diff --git a/.changelog/unreleased/breaking-changes/70-reorganize-crate-to-lib.md b/.changelog/unreleased/breaking-changes/70-reorganize-crate-to-lib.md new file mode 100644 index 00000000..508fc2ba --- /dev/null +++ b/.changelog/unreleased/breaking-changes/70-reorganize-crate-to-lib.md @@ -0,0 +1,3 @@ +Enhance the maintainability and usability of the tendermint-basecoin by +reorganizing it into a library and adding a CLI bin +([#70](https://github.com/informalsystems/basecoin-rs/issues/70)) \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index a7b70713..c70f55fe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,11 +4,11 @@ version = 3 [[package]] name = "ahash" -version = "0.7.6" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ - "getrandom", + "cfg-if", "once_cell", "version_check", ] @@ -24,9 +24,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.68" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" +checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800" [[package]] name = "arrayvec" @@ -36,19 +36,20 @@ checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" [[package]] name = "async-stream" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e" +checksum = "ad445822218ce64be7a341abfb0b1ea43b5c23aa83902542a4542e78309d8e5e" dependencies = [ "async-stream-impl", "futures-core", + "pin-project-lite", ] [[package]] name = "async-stream-impl" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" +checksum = "e4655ae1a7b0cdf149156f780c5bf3f1352bc53cbd9e0a361a7ef7b22947e965" dependencies = [ "proc-macro2", "quote", @@ -85,9 +86,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "axum" -version = "0.6.4" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5694b64066a2459918d8074c2ce0d5a88f409431994c2356617c8ae0c4721fc" +checksum = "6137c6234afb339e75e764c866e3594900f0211e1315d33779f269bbe2ec6967" dependencies = [ "async-trait", "axum-core", @@ -149,9 +150,9 @@ checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" [[package]] name = "base64ct" -version = "1.5.3" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "bip32" @@ -224,19 +225,19 @@ dependencies = [ [[package]] name = "borsh" -version = "0.10.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "822462c1e7b17b31961798a6874b36daea6818e99e0cb7d3b7b0fa3c477751c3" +checksum = "40f9ca3698b2e4cb7c15571db0abc5551dca417a21ae8140460b50309bb2cc62" dependencies = [ "borsh-derive", - "hashbrown", + "hashbrown 0.13.2", ] [[package]] name = "borsh-derive" -version = "0.10.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37626c9e941a687ee9abef6065b44c379478ae563b7483c613dd705ef1dff59e" +checksum = "598b3eacc6db9c3ee57b22707ad8f6a8d2f6d442bfe24ffeb8cbb70ca59e6a35" dependencies = [ "borsh-derive-internal", "borsh-schema-derive-internal", @@ -247,9 +248,9 @@ dependencies = [ [[package]] name = "borsh-derive-internal" -version = "0.10.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61820b4c5693eafb998b1e67485423c923db4a75f72585c247bdee32bad81e7b" +checksum = "186b734fa1c9f6743e90c95d7233c9faab6360d1a96d4ffa19d9cfd1e9350f8a" dependencies = [ "proc-macro2", "quote", @@ -258,9 +259,9 @@ dependencies = [ [[package]] name = "borsh-schema-derive-internal" -version = "0.10.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c76cdbfa13def20d1f8af3ae7b3c6771f06352a74221d8851262ac384c122b8e" +checksum = "99b7ff1008316626f485991b960ade129253d4034014616b94f309a15366cc49" dependencies = [ "proc-macro2", "quote", @@ -303,6 +304,12 @@ dependencies = [ "serde", ] +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" + [[package]] name = "cfg-if" version = "1.0.0" @@ -326,9 +333,9 @@ dependencies = [ [[package]] name = "const-oid" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b" +checksum = "520fbf3c07483f94e3e3ca9d0cfd913d7718ef2483d2cfd91c0d9e91474ab913" [[package]] name = "cosmos-sdk-proto" @@ -478,9 +485,9 @@ dependencies = [ [[package]] name = "dyn-clone" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9b0705efd4599c15a38151f4721f7bc388306f61084d3bfd50bd07fbca5cb60" +checksum = "68b0cf012f1230e43cd00ebb729c6bb58707ecfa8ad08b52ef3a4ccd2697fc30" [[package]] name = "ecdsa" @@ -572,6 +579,27 @@ dependencies = [ "serde", ] +[[package]] +name = "errno" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +dependencies = [ + "errno-dragonfly", + "libc", + "winapi", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "eyre" version = "0.6.8" @@ -584,9 +612,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" dependencies = [ "instant", ] @@ -732,9 +760,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f9f29bc9dda355256b2916cf526ab02ce0aeaaaf2bad60d65ef3f12f11dd0f4" +checksum = "5be7b54589b581f624f566bf5d8eb2bab1db736c51528720b6bd36b96b55924d" dependencies = [ "bytes", "fnv", @@ -754,6 +782,12 @@ name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ "ahash", ] @@ -802,9 +836,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ "bytes", "fnv", @@ -898,9 +932,9 @@ dependencies = [ "serde_json", "sha2 0.10.6", "subtle-encoding", - "tendermint 0.29.0", + "tendermint 0.29.1", "tendermint-light-client-verifier", - "tendermint-proto 0.29.0", + "tendermint-proto 0.29.1", "time", "tracing", "uint", @@ -921,7 +955,7 @@ dependencies = [ "scale-info", "serde", "subtle-encoding", - "tendermint-proto 0.29.0", + "tendermint-proto 0.29.1", "tonic", ] @@ -982,7 +1016,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" dependencies = [ "autocfg", - "hashbrown", + "hashbrown 0.12.3", ] [[package]] @@ -994,6 +1028,16 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "io-lifetimes" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1abeb7a0dd0f8181267ff8adc397075586500b81b28a73e8a0208b00fc170fb3" +dependencies = [ + "libc", + "windows-sys 0.45.0", +] + [[package]] name = "itertools" version = "0.10.5" @@ -1052,6 +1096,18 @@ version = "0.2.139" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" +[[package]] +name = "libm" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" + +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + [[package]] name = "log" version = "0.4.17" @@ -1081,23 +1137,14 @@ checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" [[package]] name = "mio" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" +checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" dependencies = [ "libc", "log", "wasi", - "windows-sys", -] - -[[package]] -name = "nom8" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" -dependencies = [ - "memchr", + "windows-sys 0.45.0", ] [[package]] @@ -1128,6 +1175,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -1142,9 +1190,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.17.0" +version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] name = "opaque-debug" @@ -1160,9 +1208,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parity-scale-codec" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3840933452adf7b3b9145e27086a5a3376c619dca1a21b1e5a5af0d54979bed" +checksum = "637935964ff85a605d114591d4d2c13c5d1ba2806dae97cea6bf180238a749ac" dependencies = [ "arrayvec", "bitvec", @@ -1178,7 +1226,7 @@ version = "3.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" dependencies = [ - "proc-macro-crate 1.3.0", + "proc-macro-crate 1.3.1", "proc-macro2", "quote", "syn", @@ -1276,9 +1324,9 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ "once_cell", "toml_edit", @@ -1310,18 +1358,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.50" +version = "1.0.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2" +checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" dependencies = [ "unicode-ident", ] [[package]] name = "proptest" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e0d9cc07f18492d879586c92b485def06bc850da3118075cd45d50e9c95b0e5" +checksum = "29f1b898011ce9595050a68e60f90bad083ff2987a695a42357134c8381fba70" dependencies = [ "bit-set", "bitflags", @@ -1335,13 +1383,14 @@ dependencies = [ "regex-syntax", "rusty-fork", "tempfile", + "unarray", ] [[package]] name = "prost" -version = "0.11.6" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21dc42e00223fc37204bd4aa177e69420c604ca4a183209a8f9de30c6d934698" +checksum = "e48e50df39172a3e7eb17e14642445da64996989bc212b583015435d39a58537" dependencies = [ "bytes", "prost-derive", @@ -1349,9 +1398,9 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.11.6" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bda8c0881ea9f722eb9629376db3d0b903b462477c1aafcb0566610ac28ac5d" +checksum = "4ea9b0f8cbe5e15a8a042d030bd96668db28ecb567ec37d691971ff5731d2b1b" dependencies = [ "anyhow", "itertools", @@ -1362,11 +1411,10 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.11.6" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e0526209433e96d83d750dd81a99118edbc55739e7e61a46764fd2ad537788" +checksum = "379119666929a1afd7a043aa6cf96fa67a6dce9af60c88095a4686dbce4c9c88" dependencies = [ - "bytes", "prost", ] @@ -1457,15 +1505,6 @@ version = "0.6.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - [[package]] name = "rfc6979" version = "0.3.1" @@ -1503,6 +1542,20 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" +[[package]] +name = "rustix" +version = "0.36.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43abb88211988493c1abb44a70efa56ff0ce98f233b7b276146f1f3f7ba9644" +dependencies = [ + "bitflags", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.45.0", +] + [[package]] name = "rustversion" version = "1.0.11" @@ -1592,7 +1645,7 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "303959cf613a6f6efd19ed4b4ad5bf79966a13352716299ad532cfb115f4205c" dependencies = [ - "proc-macro-crate 1.3.0", + "proc-macro-crate 1.3.1", "proc-macro2", "quote", "syn", @@ -1623,9 +1676,9 @@ dependencies = [ [[package]] name = "serde_bytes" -version = "0.11.8" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718dc5fff5b36f99093fc49b280cfc96ce6fc824317783bff5a1fed0c7a64819" +checksum = "416bda436f9aab92e02c8e10d49a15ddd339cea90b6e340fe51ed97abb548294" dependencies = [ "serde", ] @@ -1643,9 +1696,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.91" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" +checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76" dependencies = [ "itoa", "ryu", @@ -1724,9 +1777,9 @@ checksum = "8fe458c98333f9c8152221191a77e2a44e8325d0193484af2e9421a53019e57d" [[package]] name = "slab" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" dependencies = [ "autocfg", ] @@ -1816,9 +1869,9 @@ checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" [[package]] name = "syn" -version = "1.0.107" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", @@ -1851,16 +1904,15 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +checksum = "af18f7ae1acd354b992402e9ec5864359d693cd8a79dcbef59f76891701c1e95" dependencies = [ "cfg-if", "fastrand", - "libc", "redox_syscall", - "remove_dir_all", - "winapi", + "rustix", + "windows-sys 0.42.0", ] [[package]] @@ -1895,9 +1947,9 @@ dependencies = [ [[package]] name = "tendermint" -version = "0.29.0" +version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb661ab5325af3741b7c37e81ce4bddbf9fdd4e2fb602357843becc8f235041" +checksum = "cda53c85447577769cdfc94c10a56f34afef2c00e4108badb57fce6b1a0c75eb" dependencies = [ "bytes", "digest 0.10.6", @@ -1917,21 +1969,21 @@ dependencies = [ "signature 1.6.4", "subtle", "subtle-encoding", - "tendermint-proto 0.29.0", + "tendermint-proto 0.29.1", "time", "zeroize", ] [[package]] name = "tendermint-abci" -version = "0.29.0" +version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a11232def53b1a5b531179445ff20c6d4ec1f81169f9759e989af19d86e674f" +checksum = "7bfbe8f0131aa45769055496404359f4f5d3e2cf5d66c7457a5986806941de34" dependencies = [ "bytes", "flex-error", "prost", - "tendermint-proto 0.29.0", + "tendermint-proto 0.29.1", "tracing", ] @@ -1956,9 +2008,9 @@ dependencies = [ "serde_json", "sha2 0.10.6", "structopt", - "tendermint 0.29.0", + "tendermint 0.29.1", "tendermint-abci", - "tendermint-proto 0.29.0", + "tendermint-proto 0.29.1", "tokio", "tonic", "tracing", @@ -1967,14 +2019,14 @@ dependencies = [ [[package]] name = "tendermint-light-client-verifier" -version = "0.29.0" +version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4f6b770b0ca7b68fcf29aef2100c2462444d3ced71fc68f6545cab66930cd6f" +checksum = "11c3dc3c75f7a5708ac0bf98374b2b1a2cf17b3a45ddfd5faab3c111aff7fc0e" dependencies = [ "derive_more", "flex-error", "serde", - "tendermint 0.29.0", + "tendermint 0.29.1", "time", ] @@ -1998,9 +2050,9 @@ dependencies = [ [[package]] name = "tendermint-proto" -version = "0.29.0" +version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c2e8dc89de3ab71cf63adcc71eb76200fae5a9ba7afd659fee54e0810beac8f" +checksum = "c943f78c929cdf14553842f705f2c30324bc35b9179caaa5c9b80620f60652e6" dependencies = [ "bytes", "flex-error", @@ -2045,18 +2097,19 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.4" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ + "cfg-if", "once_cell", ] [[package]] name = "time" -version = "0.3.17" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890" dependencies = [ "serde", "time-core", @@ -2071,18 +2124,18 @@ checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" [[package]] name = "time-macros" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +checksum = "fd80a657e71da814b8e5d60d3374fc6d35045062245d80224748ae522dd76f36" dependencies = [ "time-core", ] [[package]] name = "tokio" -version = "1.25.0" +version = "1.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e00990ebabbe4c14c08aca901caed183ecd5c09562a12c824bb53d3c3fd3af" +checksum = "03201d01c3c27a29c8a5cee5b55a93ddae1ccf6f08f65365c2c918f8c1b76f64" dependencies = [ "autocfg", "bytes", @@ -2093,7 +2146,7 @@ dependencies = [ "pin-project-lite", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] @@ -2119,9 +2172,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +checksum = "8fb52b74f05dbf495a8fba459fdc331812b96aa086d9eb78101fa0d4569c3313" dependencies = [ "futures-core", "pin-project-lite", @@ -2130,9 +2183,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.4" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb2e075f03b3d66d8d8785356224ba688d2906a371015e225beeb65ca92c740" +checksum = "5427d89453009325de0d8f342c9490009f76e999cb7672d77e46267448f7e6b2" dependencies = [ "bytes", "futures-core", @@ -2153,19 +2206,19 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.5.1" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5" +checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" [[package]] name = "toml_edit" -version = "0.18.1" +version = "0.19.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b" +checksum = "9a1eb0622d28f4b9c90adc4ea4b2b46b47663fde9ac5fafcb14a1369d5508825" dependencies = [ "indexmap", - "nom8", "toml_datetime", + "winnow", ] [[package]] @@ -2222,9 +2275,9 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.3.5" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f873044bf02dd1e8239e9c1293ea39dad76dc594ec16185d0a1bf31d8dc8d858" +checksum = "5d1d42a9b3f3ec46ba828e8d376aec14592ea199f70a06a548587ecd1c4ab658" dependencies = [ "bitflags", "bytes", @@ -2344,6 +2397,12 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + [[package]] name = "unicode-ident" version = "1.0.6" @@ -2502,6 +2561,30 @@ dependencies = [ "windows_x86_64_msvc", ] +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.1" @@ -2544,6 +2627,15 @@ version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" +[[package]] +name = "winnow" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faf09497b8f8b5ac5d3bb4d05c0a99be20f26fd3d5f2db7b0716e946d5103658" +dependencies = [ + "memchr", +] + [[package]] name = "wyz" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index a9a4565b..a3d23072 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "tendermint-basecoin" version = "0.1.0" -edition = "2018" +edition = "2021" license = "Apache-2.0" readme = "README.md" categories = ["cryptography::cryptocurrencies"] @@ -12,6 +12,9 @@ description = """ use of tendermint-rs. """ +[[bin]] +name = "basecoin" + [dependencies] base64 = { version = "0.21", default-features = false, features = ["alloc"] } cosmrs = "0.11.0" @@ -26,9 +29,9 @@ serde = "1.0" serde_json = "1.0" sha2 = "0.10.2" structopt = "0.3.21" -tendermint = "=0.29.0" -tendermint-abci = "=0.29.0" -tendermint-proto = "=0.29.0" +tendermint = "=0.29.1" +tendermint-abci = "=0.29.1" +tendermint-proto = "=0.29.1" tokio = { version = "1.0", features = ["macros", "rt-multi-thread"] } tonic = "0.8.3" tracing = "0.1.26" diff --git a/ci/entrypoint.sh b/ci/entrypoint.sh index c15f21db..8df69558 100755 --- a/ci/entrypoint.sh +++ b/ci/entrypoint.sh @@ -6,7 +6,7 @@ BASECOIN_SRC=${BASECOIN_SRC:-/src/basecoin-rs} BUILD_ROOT="${HOME}/build" HERMES_BUILD="${BUILD_ROOT}/hermes" BASECOIN_BUILD="${BUILD_ROOT}/basecoin-rs" -BASECOIN_BIN="${BASECOIN_BUILD}/debug/tendermint-basecoin" +BASECOIN_BIN="${BASECOIN_BUILD}/debug/basecoin" HERMES_BIN="${HERMES_BUILD}/release/hermes" HERMES_REPO=https://github.com/informalsystems/hermes.git HERMES_COMMITISH=${HERMES_COMMITISH:-master} @@ -37,7 +37,7 @@ cargo build --release --bin hermes --target-dir "${HERMES_BUILD}/" cd "${BASECOIN_SRC}" echo "" echo "Building basecoin-rs..." -cargo build --all-features --target-dir "${BASECOIN_BUILD}" +cargo build --bin basecoin --all-features --target-dir "${BASECOIN_BUILD}" echo "" echo "Setting up chain ibc-0..." diff --git a/src/app/mod.rs b/src/app.rs similarity index 94% rename from src/app/mod.rs rename to src/app.rs index 3341a209..bb45347e 100644 --- a/src/app/mod.rs +++ b/src/app.rs @@ -1,9 +1,3 @@ -//! The basecoin ABCI application. - -pub(crate) mod modules; -mod response; -pub(crate) mod store; - use std::{ convert::TryInto, sync::{Arc, RwLock}, @@ -46,41 +40,27 @@ use tendermint_proto::{ use tonic::{Request, Response, Status}; use tracing::{debug, error, info}; -use crate::app::{ - modules::{Error, Module, ACCOUNT_PREFIX}, - response::ResponseFromErrorExt, - store::{Height, Identifier, Path, ProvableStore, RevertibleStore, SharedStore, Store}, +use crate::{ + error::Error, + helper::macros::ResponseFromErrorExt, + helper::{Height, Identifier, Path}, + modules::{ + auth::account::ACCOUNT_PREFIX, + types::{IdentifiedModule, ModuleList, ModuleStore}, + Module, + }, + store::{MainStore, ProvableStore, RevertibleStore, SharedRw, SharedStore, Store}, }; +pub(crate) const CHAIN_REVISION_NUMBER: u64 = 0; -const CHAIN_REVISION_NUMBER: u64 = 0; - -type MainStore = SharedStore>; -type ModuleStore = RevertibleStore; -type ModuleList = Vec>; -type SharedRw = Arc>; - -struct IdentifiedModule { - id: Identifier, - module: Box>>, -} - -/// BaseCoin ABCI application. -/// -/// Can be safely cloned and sent across threads, but not shared. -#[derive(Clone)] -pub(crate) struct BaseCoinApp { - store: MainStore, - modules: SharedRw>, -} - -pub(crate) struct Builder { +pub struct Builder { store: MainStore, modules: SharedRw>, } impl Builder { /// Constructor. - pub(crate) fn new(store: S) -> Self { + pub fn new(store: S) -> Self { Self { store: SharedStore::new(RevertibleStore::new(store)), modules: Arc::new(RwLock::new(vec![])), @@ -89,7 +69,7 @@ impl Builder { /// Returns a share to the module's store if a module with specified identifier was previously /// added, otherwise creates a new module store and returns it. - pub(crate) fn module_store(&self, prefix: &Identifier) -> SharedStore> { + pub fn module_store(&self, prefix: &Identifier) -> SharedStore> { let modules = self.modules.read().unwrap(); modules .iter() @@ -104,7 +84,7 @@ impl Builder { } /// Adds a new module. Panics if a module with the specified identifier was previously added. - pub(crate) fn add_module( + pub fn add_module( self, prefix: Identifier, module: impl Module> + 'static, @@ -117,7 +97,7 @@ impl Builder { self } - pub(crate) fn build(self) -> BaseCoinApp { + pub fn build(self) -> BaseCoinApp { BaseCoinApp { store: self.store, modules: self.modules, @@ -125,6 +105,15 @@ impl Builder { } } +/// BaseCoin ABCI application. +/// +/// Can be safely cloned and sent across threads, but not shared. +#[derive(Clone)] +pub struct BaseCoinApp { + store: MainStore, + modules: SharedRw>, +} + impl BaseCoinApp { // try to deliver the message to all registered modules // if `module.deliver()` returns `Error::NotHandled`, try next module diff --git a/src/app/modules/auth.rs b/src/app/modules/auth.rs deleted file mode 100644 index ee51dab1..00000000 --- a/src/app/modules/auth.rs +++ /dev/null @@ -1,346 +0,0 @@ -use std::{ - collections::HashMap, - convert::{TryFrom, TryInto}, - str::FromStr, -}; - -use cosmrs::AccountId; -use ibc_proto::{ - cosmos::auth::v1beta1::{ - query_server::{Query, QueryServer}, - AddressBytesToStringRequest, AddressBytesToStringResponse, AddressStringToBytesRequest, - AddressStringToBytesResponse, BaseAccount, Bech32PrefixRequest, Bech32PrefixResponse, - QueryAccountAddressByIdRequest, QueryAccountAddressByIdResponse, QueryAccountRequest, - QueryAccountResponse, QueryAccountsRequest, QueryAccountsResponse, - QueryModuleAccountByNameRequest, QueryModuleAccountByNameResponse, - QueryModuleAccountsRequest, QueryModuleAccountsResponse, QueryParamsRequest, - QueryParamsResponse, - }, - google::protobuf::Any, -}; -use prost::Message; -use serde_json::Value; -use tendermint_proto::abci::Event; -use tonic::{Request, Response, Status}; -use tracing::{debug, trace}; - -use crate::app::{ - modules::{bank::Denom, Error as ModuleError, Module}, - store::{Height, Path, ProtobufStore, ProvableStore, SharedStore, Store, TypedStore}, -}; - -/// Address of the account that the relayer uses to sign basecoin transactions. -/// This is hardcoded as we don't verify signatures currently. -const RELAYER_ACCOUNT: &str = "cosmos12xpmzmfpf7tn57xg93rne2hc2q26lcfql5efws"; -pub(crate) const ACCOUNT_PREFIX: &str = "cosmos"; - -#[derive(Clone)] -struct AccountsPath(AccountId); - -impl From for Path { - fn from(path: AccountsPath) -> Self { - format!("accounts/{}", path.0).try_into().unwrap() // safety - cannot fail as AccountsPath is correct-by-construction - } -} - -pub trait Account { - /// Account address type - type Address; - /// Account public key type - type PubKey; - - /// Returns the account's address. - fn address(&self) -> &Self::Address; - - /// Returns the account's public key. - fn pub_key(&self) -> &Self::PubKey; - - /// Returns the account's sequence. (used for replay protection) - fn sequence(&self) -> u64; -} - -#[derive(Clone)] -pub struct AuthAccount { - address: AccountId, - number: u64, - sequence: u64, -} - -impl AuthAccount { - pub fn new(address: AccountId) -> Self { - Self { - address, - number: 0, - sequence: 0, - } - } -} - -impl Account for AuthAccount { - type Address = AccountId; - type PubKey = Vec; - - fn address(&self) -> &Self::Address { - &self.address - } - - fn pub_key(&self) -> &Self::PubKey { - unimplemented!() - } - - fn sequence(&self) -> u64 { - self.sequence - } -} - -impl ibc_proto::protobuf::Protobuf for AuthAccount {} - -impl TryFrom for AuthAccount { - type Error = String; - - fn try_from(account: BaseAccount) -> Result { - Ok(AuthAccount { - address: account - .address - .parse() - .map_err(|_| "Failed to parse address".to_string())?, - number: account.account_number, - sequence: account.sequence, - }) - } -} - -impl From for BaseAccount { - fn from(account: AuthAccount) -> Self { - BaseAccount { - address: account.address.to_string(), - pub_key: None, - account_number: account.number, - sequence: account.sequence, - } - } -} - -impl From for Any { - fn from(account: AuthAccount) -> Self { - let account = BaseAccount::from(account); - Any { - type_url: "/cosmos.auth.v1beta1.BaseAccount".to_string(), - value: account.encode_to_vec(), - } - } -} - -pub trait AccountReader { - type Error; - type Address; - type Account: Account; - - fn get_account(&self, address: Self::Address) -> Result; -} - -pub trait AccountKeeper { - type Error; - type Account: Account; - - fn set_account(&mut self, account: Self::Account) -> Result<(), Self::Error>; - - fn remove_account(&mut self, account: Self::Account) -> Result<(), Self::Error>; -} - -#[derive(Clone)] -pub struct Auth { - store: SharedStore, - account_reader: AuthAccountReader, - account_keeper: AuthAccountKeeper, -} - -impl Auth { - pub fn new(store: SharedStore) -> Self { - Self { - store: store.clone(), - account_reader: AuthAccountReader { - account_store: TypedStore::new(store.clone()), - }, - account_keeper: AuthAccountKeeper { - account_store: TypedStore::new(store), - }, - } - } - - pub fn service(&self) -> QueryServer> { - QueryServer::new(AuthService { - account_reader: self.account_reader().clone(), - }) - } - - pub fn account_reader(&self) -> &AuthAccountReader { - &self.account_reader - } - - pub fn account_keeper(&self) -> &AuthAccountKeeper { - &self.account_keeper - } -} - -impl Module for Auth { - type Store = S; - - fn init(&mut self, app_state: Value) { - debug!("Initializing auth module"); - // safety - we panic on errors to prevent chain creation with invalid genesis config - let accounts: HashMap> = - serde_json::from_value(app_state).unwrap(); - for (account, _) in accounts { - trace!("Adding account: {}", account); - - let account_id = AccountId::from_str(&account).unwrap(); - self.account_keeper - .set_account(AuthAccount::new(account_id.clone())) - .map_err(|_| "Failed to create account") - .unwrap(); - } - } - - fn deliver(&mut self, _message: Any, signer: &AccountId) -> Result, ModuleError> { - let mut account = self - .account_reader - .get_account(signer.clone()) - .map_err(|_| ModuleError::Custom { - reason: "unknown signer".to_string(), - })?; - account.sequence += 1; - - self.account_keeper - .set_account(account) - .map_err(|_| ModuleError::Custom { - reason: "failed to increment signer sequence".to_string(), - })?; - - // we're only intercepting the deliverTx here, so return unhandled. - Err(ModuleError::NotHandled) - } - - fn store_mut(&mut self) -> &mut SharedStore { - &mut self.store - } - - fn store(&self) -> &SharedStore { - &self.store - } -} - -#[derive(Clone)] -pub struct AuthAccountReader { - account_store: ProtobufStore, AccountsPath, AuthAccount, BaseAccount>, -} - -impl AccountReader for AuthAccountReader { - type Error = (); - type Address = AccountId; - type Account = AuthAccount; - - fn get_account(&self, address: Self::Address) -> Result { - self.account_store - .get(Height::Pending, &AccountsPath(address)) - .ok_or(()) - } -} - -#[derive(Clone)] -pub struct AuthAccountKeeper { - account_store: ProtobufStore, AccountsPath, AuthAccount, BaseAccount>, -} - -impl AccountKeeper for AuthAccountKeeper { - type Error = (); - type Account = AuthAccount; - - fn set_account(&mut self, account: Self::Account) -> Result<(), Self::Error> { - self.account_store - .set(AccountsPath(account.address().clone()), account) - .map(|_| ()) - .map_err(|_| ()) - } - - fn remove_account(&mut self, _account: Self::Account) -> Result<(), Self::Error> { - unimplemented!() - } -} - -pub struct AuthService { - account_reader: AuthAccountReader, -} - -#[tonic::async_trait] -impl Query for AuthService { - async fn accounts( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn account( - &self, - _request: Request, - ) -> Result, Status> { - debug!("Got auth account request"); - - let account_id = RELAYER_ACCOUNT.parse().unwrap(); - let account = self.account_reader.get_account(account_id).unwrap(); - - Ok(Response::new(QueryAccountResponse { - account: Some(account.into()), - })) - } - - async fn params( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn account_address_by_id( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn module_accounts( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn module_account_by_name( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn bech32_prefix( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn address_bytes_to_string( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn address_string_to_bytes( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } -} diff --git a/src/app/modules/ibc.rs b/src/app/modules/ibc.rs deleted file mode 100644 index d6e6ace3..00000000 --- a/src/app/modules/ibc.rs +++ /dev/null @@ -1,2148 +0,0 @@ -use core::fmt::Debug; -use cosmrs::AccountId; -use ibc::{ - applications::transfer::{ - context::{ - cosmos_adr028_escrow_address, TokenTransferExecutionContext, - TokenTransferValidationContext, - }, - error::TokenTransferError, - msgs::transfer::MsgTransfer, - relay::send_transfer::send_transfer, - PrefixedCoin, - }, - clients::ics07_tendermint::{ - client_state::ClientState as TmClientState, - consensus_state::ConsensusState as TmConsensusState, - }, - core::{ - context::{ExecutionContext, Router as ContextRouter, ValidationContext}, - ics02_client::{ - client_state::ClientState, client_type::ClientType, consensus_state::ConsensusState, - error::ClientError, - }, - ics03_connection::{ - connection::{ConnectionEnd, IdentifiedConnectionEnd}, - error::ConnectionError, - version::{pick_version, Version as ConnectionVersion}, - }, - ics04_channel::{ - channel::{ChannelEnd, Counterparty, IdentifiedChannelEnd, Order}, - commitment::{AcknowledgementCommitment, PacketCommitment}, - context::{ - calculate_block_delay, SendPacketExecutionContext, SendPacketValidationContext, - }, - error::{ChannelError, PacketError}, - handler::ModuleExtras, - msgs::acknowledgement::Acknowledgement, - packet::{Packet, Receipt, Sequence}, - timeout::TimeoutHeight, - Version as ChannelVersion, - }, - ics05_port::error::PortError, - ics23_commitment::commitment::{CommitmentPrefix, CommitmentRoot}, - ics24_host::{ - identifier::{ChannelId, ClientId, ConnectionId, PortId}, - path::{ - AckPath, ChannelEndPath, ClientConnectionPath, ClientConsensusStatePath, - ClientStatePath, ClientTypePath, CommitmentPath, ConnectionPath, PathError, - ReceiptPath, SeqAckPath, SeqRecvPath, SeqSendPath, - }, - Path as IbcPath, IBC_QUERY_PATH, - }, - ics26_routing::{ - context::{Module as IbcModule, ModuleId}, - msgs::MsgEnvelope, - }, - ContextError, - }, - events::IbcEvent, - signer::Signer, - timestamp::Timestamp, - Height as IbcHeight, -}; -use ibc_proto::{ - google::protobuf::Any, - ibc::core::{ - channel::v1::{ - query_server::{Query as ChannelQuery, QueryServer as ChannelQueryServer}, - Channel as RawChannelEnd, IdentifiedChannel as RawIdentifiedChannel, PacketState, - QueryChannelClientStateRequest, QueryChannelClientStateResponse, - QueryChannelConsensusStateRequest, QueryChannelConsensusStateResponse, - QueryChannelRequest, QueryChannelResponse, QueryChannelsRequest, QueryChannelsResponse, - QueryConnectionChannelsRequest, QueryConnectionChannelsResponse, - QueryNextSequenceReceiveRequest, QueryNextSequenceReceiveResponse, - QueryPacketAcknowledgementRequest, QueryPacketAcknowledgementResponse, - QueryPacketAcknowledgementsRequest, QueryPacketAcknowledgementsResponse, - QueryPacketCommitmentRequest, QueryPacketCommitmentResponse, - QueryPacketCommitmentsRequest, QueryPacketCommitmentsResponse, - QueryPacketReceiptRequest, QueryPacketReceiptResponse, QueryUnreceivedAcksRequest, - QueryUnreceivedAcksResponse, QueryUnreceivedPacketsRequest, - QueryUnreceivedPacketsResponse, - }, - client::v1::{ - query_server::{Query as ClientQuery, QueryServer as ClientQueryServer}, - ConsensusStateWithHeight, Height as RawHeight, IdentifiedClientState, - QueryClientParamsRequest, QueryClientParamsResponse, QueryClientStateRequest, - QueryClientStateResponse, QueryClientStatesRequest, QueryClientStatesResponse, - QueryClientStatusRequest, QueryClientStatusResponse, QueryConsensusStateHeightsRequest, - QueryConsensusStateHeightsResponse, QueryConsensusStateRequest, - QueryConsensusStateResponse, QueryConsensusStatesRequest, QueryConsensusStatesResponse, - QueryUpgradedClientStateRequest, QueryUpgradedClientStateResponse, - QueryUpgradedConsensusStateRequest, QueryUpgradedConsensusStateResponse, - }, - connection::v1::{ - query_server::{Query as ConnectionQuery, QueryServer as ConnectionQueryServer}, - ConnectionEnd as RawConnectionEnd, IdentifiedConnection as RawIdentifiedConnection, - QueryClientConnectionsRequest, QueryClientConnectionsResponse, - QueryConnectionClientStateRequest, QueryConnectionClientStateResponse, - QueryConnectionConsensusStateRequest, QueryConnectionConsensusStateResponse, - QueryConnectionRequest, QueryConnectionResponse, QueryConnectionsRequest, - QueryConnectionsResponse, - }, - }, -}; -use prost::Message; -use sha2::Digest; -use std::{ - borrow::Borrow, - collections::{BTreeMap, HashMap}, - convert::{TryFrom, TryInto}, - str::FromStr, - sync::Arc, - time::Duration, -}; -use tendermint::{abci::Event as TendermintEvent, block::Header}; -use tendermint_proto::{ - abci::{Event, EventAttribute}, - crypto::ProofOp, -}; -use tonic::{Request, Response, Status}; -use tracing::{debug, trace}; - -use crate::{ - app::{ - modules::{ - bank::{BankBalanceKeeper, BankKeeper, Coin, Denom}, - Error as ModuleError, Identifiable, Module, QueryResult, ACCOUNT_PREFIX, - }, - store::{ - BinStore, Height, JsonStore, Path, ProtobufStore, ProvableStore, SharedStore, Store, - TypedSet, TypedStore, - }, - CHAIN_REVISION_NUMBER, - }, - IBC_TRANSFER_MODULE_ID, -}; - -use ibc::applications::transfer::VERSION; - -use ibc::core::handler::dispatch; - -use ibc::applications::transfer::context::{ - on_acknowledgement_packet_validate, on_chan_open_ack_validate, on_chan_open_confirm_validate, - on_chan_open_init_execute, on_chan_open_init_validate, on_chan_open_try_execute, - on_chan_open_try_validate, on_recv_packet_execute, on_timeout_packet_execute, - on_timeout_packet_validate, -}; - -pub(crate) type Error = ibc::core::ics26_routing::error::RouterError; - -impl From for ModuleError { - fn from(e: Error) -> Self { - ModuleError::Ibc(e) - } -} - -impl TryFrom for IbcPath { - type Error = PathError; - - fn try_from(path: Path) -> Result { - Self::from_str(path.to_string().as_str()) - } -} - -impl From for Path { - fn from(ibc_path: IbcPath) -> Self { - Self::try_from(ibc_path.to_string()).unwrap() // safety - `IbcPath`s are correct-by-construction - } -} - -macro_rules! impl_into_path_for { - ($($path:ty),+) => { - $(impl From<$path> for Path { - fn from(ibc_path: $path) -> Self { - Self::try_from(ibc_path.to_string()).unwrap() // safety - `IbcPath`s are correct-by-construction - } - })+ - }; -} - -impl_into_path_for!( - ClientTypePath, - ClientStatePath, - ClientConsensusStatePath, - ConnectionPath, - ClientConnectionPath, - ChannelEndPath, - SeqSendPath, - SeqRecvPath, - SeqAckPath, - CommitmentPath, - ReceiptPath, - AckPath -); - -/// The Ibc module -/// Implements all ibc-rs `Reader`s and `Keeper`s -/// Also implements gRPC endpoints required by `hermes` -#[derive(Clone)] -pub struct Ibc { - /// Handle to store instance. - /// The module is guaranteed exclusive access to all paths in the store key-space. - store: SharedStore, - /// Mapping of which IBC modules own which port - port_to_module_map: BTreeMap, - /// ICS26 router impl - router: IbcRouter, - /// Counter for clients - client_counter: u64, - /// Counter for connections - conn_counter: u64, - /// Counter for channels - channel_counter: u64, - /// Tracks the processed time for client updates - client_processed_times: HashMap<(ClientId, IbcHeight), Timestamp>, - /// Tracks the processed height for client updates - client_processed_heights: HashMap<(ClientId, IbcHeight), IbcHeight>, - /// Map of host consensus states - consensus_states: HashMap, - /// A typed-store for ClientType - client_type_store: JsonStore, ClientTypePath, ClientType>, - /// A typed-store for AnyClientState - client_state_store: ProtobufStore, ClientStatePath, TmClientState, Any>, - /// A typed-store for AnyConsensusState - consensus_state_store: - ProtobufStore, ClientConsensusStatePath, TmConsensusState, Any>, - /// A typed-store for ConnectionEnd - connection_end_store: - ProtobufStore, ConnectionPath, ConnectionEnd, RawConnectionEnd>, - /// A typed-store for ConnectionIds - connection_ids_store: JsonStore, ClientConnectionPath, Vec>, - /// A typed-store for ChannelEnd - channel_end_store: ProtobufStore, ChannelEndPath, ChannelEnd, RawChannelEnd>, - /// A typed-store for send sequences - send_sequence_store: JsonStore, SeqSendPath, Sequence>, - /// A typed-store for receive sequences - recv_sequence_store: JsonStore, SeqRecvPath, Sequence>, - /// A typed-store for ack sequences - ack_sequence_store: JsonStore, SeqAckPath, Sequence>, - /// A typed-store for packet commitments - packet_commitment_store: BinStore, CommitmentPath, PacketCommitment>, - /// A typed-store for packet receipts - packet_receipt_store: TypedSet, ReceiptPath>, - /// A typed-store for packet ack - packet_ack_store: BinStore, AckPath, AcknowledgementCommitment>, - /// IBC Events - events: Vec, - /// message logs - logs: Vec, -} - -impl Ibc { - pub fn new(store: SharedStore) -> Self { - Self { - port_to_module_map: Default::default(), - router: Default::default(), - client_counter: 0, - conn_counter: 0, - channel_counter: 0, - client_processed_times: Default::default(), - client_processed_heights: Default::default(), - consensus_states: Default::default(), - client_type_store: TypedStore::new(store.clone()), - client_state_store: TypedStore::new(store.clone()), - consensus_state_store: TypedStore::new(store.clone()), - connection_end_store: TypedStore::new(store.clone()), - connection_ids_store: TypedStore::new(store.clone()), - channel_end_store: TypedStore::new(store.clone()), - send_sequence_store: TypedStore::new(store.clone()), - recv_sequence_store: TypedStore::new(store.clone()), - ack_sequence_store: TypedStore::new(store.clone()), - packet_commitment_store: TypedStore::new(store.clone()), - packet_receipt_store: TypedStore::new(store.clone()), - packet_ack_store: TypedStore::new(store.clone()), - store, - events: Vec::new(), - logs: Vec::new(), - } - } - - pub fn add_route( - &mut self, - module_id: ModuleId, - module: impl IbcModuleWrapper, - ) -> Result<(), String> { - self.router.add_route(module_id, module) - } - - pub fn scope_port_to_module(&mut self, port_id: PortId, module_id: ModuleId) { - self.port_to_module_map.insert(port_id, module_id); - } - - pub fn client_service(&self) -> ClientQueryServer> { - ClientQueryServer::new(IbcClientService::new(self.store.clone())) - } - - pub fn connection_service(&self) -> ConnectionQueryServer> { - ConnectionQueryServer::new(IbcConnectionService::new(self.store.clone())) - } - - pub fn channel_service(&self) -> ChannelQueryServer> { - ChannelQueryServer::new(IbcChannelService::new(self.store.clone())) - } -} - -impl Ibc { - fn get_proof(&self, height: Height, path: &Path) -> Option> { - if let Some(p) = self.store.get_proof(height, path) { - let mut buffer = Vec::new(); - if p.encode(&mut buffer).is_ok() { - return Some(buffer); - } - } - None - } -} - -impl Module for Ibc { - type Store = S; - - fn deliver(&mut self, message: Any, _signer: &AccountId) -> Result, ModuleError> { - if let Ok(msg) = MsgEnvelope::try_from(message.clone()) { - debug!("Dispatching message: {:?}", msg); - - dispatch(self, msg)?; - let events = self - .events - .drain(..) - .into_iter() - .map(|ev| TmEvent(ev.try_into().unwrap()).into()) - .collect(); - Ok(events) - } else if let Ok(transfer_msg) = MsgTransfer::try_from(message) { - debug!("Dispatching message: {:?}", transfer_msg); - - let transfer_module_id: ModuleId = IBC_TRANSFER_MODULE_ID.parse().unwrap(); - let transfer_module = { - let transfer_module = self - .router - .get_route_mut(&transfer_module_id) - .ok_or(ModuleError::NotHandled)?; - transfer_module - .as_any_mut() - .downcast_mut::>>() - .expect("Transfer Module <-> ModuleId mismatch") - }; - - send_transfer(transfer_module, transfer_msg).map_err(|e| ModuleError::Custom { - reason: e.to_string(), - })?; - - Ok(transfer_module - .events - .clone() - .into_iter() - .map(|ev| TmEvent(ev.try_into().unwrap()).into()) - .collect()) - } else { - Err(ModuleError::NotHandled) - } - } - - fn query( - &self, - data: &[u8], - path: Option<&Path>, - height: Height, - prove: bool, - ) -> Result { - let path = path.ok_or(ModuleError::NotHandled)?; - if path.to_string() != IBC_QUERY_PATH { - return Err(ModuleError::NotHandled); - } - - let path: Path = String::from_utf8(data.to_vec()) - .map_err(|_| ContextError::ClientError(ClientError::ImplementationSpecific))? - .try_into()?; - - let _ = IbcPath::try_from(path.clone()) - .map_err(|_| ContextError::ClientError(ClientError::ImplementationSpecific))?; - - debug!( - "Querying for path ({}) at height {:?}", - path.to_string(), - height - ); - - let proof = if prove { - let proof = self - .get_proof(height, &path) - .ok_or(ContextError::ClientError( - ClientError::ImplementationSpecific, - ))?; - Some(vec![ProofOp { - r#type: "".to_string(), - key: path.to_string().into_bytes(), - data: proof, - }]) - } else { - None - }; - - let data = self - .store - .get(height, &path) - .ok_or(ContextError::ClientError( - ClientError::ImplementationSpecific, - ))?; - Ok(QueryResult { data, proof }) - } - - fn begin_block(&mut self, header: &Header) -> Vec { - let consensus_state = TmConsensusState::new( - CommitmentRoot::from_bytes(header.app_hash.as_ref()), - header.time, - header.next_validators_hash, - ); - self.consensus_states - .insert(header.height.value(), consensus_state); - vec![] - } - - fn store_mut(&mut self) -> &mut SharedStore { - &mut self.store - } - - fn store(&self) -> &SharedStore { - &self.store - } -} - -struct TmEvent(TendermintEvent); - -impl From for Event { - fn from(value: TmEvent) -> Self { - Self { - r#type: value.0.kind, - attributes: value - .0 - .attributes - .into_iter() - .map(|attr| EventAttribute { - key: attr.key.into(), - value: attr.value.into(), - index: true, - }) - .collect(), - } - } -} - -pub struct IbcClientService { - client_state_store: ProtobufStore, ClientStatePath, TmClientState, Any>, - consensus_state_store: - ProtobufStore, ClientConsensusStatePath, TmConsensusState, Any>, -} - -impl IbcClientService { - pub fn new(store: SharedStore) -> Self { - Self { - client_state_store: TypedStore::new(store.clone()), - consensus_state_store: TypedStore::new(store), - } - } -} - -#[tonic::async_trait] -impl ClientQuery for IbcClientService { - async fn client_state( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn client_states( - &self, - request: Request, - ) -> Result, Status> { - trace!("Got client states request: {:?}", request); - - let path = "clients" - .to_owned() - .try_into() - .map_err(|e| Status::invalid_argument(format!("{e}")))?; - - let client_state_paths = |path: Path| -> Option { - match path.try_into() { - Ok(IbcPath::ClientState(p)) => Some(p), - _ => None, - } - }; - - let identified_client_state = |path: ClientStatePath| { - let client_state = self.client_state_store.get(Height::Pending, &path).unwrap(); - IdentifiedClientState { - client_id: path.0.to_string(), - client_state: Some(client_state.into()), - } - }; - - let keys = self.client_state_store.get_keys(&path); - let client_states = keys - .into_iter() - .filter_map(client_state_paths) - .map(identified_client_state) - .collect(); - - Ok(Response::new(QueryClientStatesResponse { - client_states, - pagination: None, // TODO(hu55a1n1): add pagination support - })) - } - - async fn consensus_state( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn consensus_states( - &self, - request: Request, - ) -> Result, Status> { - trace!("Got consensus states request: {:?}", request); - - let path = format!("clients/{}/consensusStates", request.get_ref().client_id) - .try_into() - .map_err(|e| Status::invalid_argument(format!("{e}")))?; - - let keys = self.consensus_state_store.get_keys(&path); - let consensus_states = keys - .into_iter() - .map(|path| { - if let Ok(IbcPath::ClientConsensusState(path)) = path.try_into() { - let consensus_state = self.consensus_state_store.get(Height::Pending, &path); - ConsensusStateWithHeight { - height: Some(RawHeight { - revision_number: path.epoch, - revision_height: path.height, - }), - consensus_state: consensus_state.map(|cs| cs.into()), - } - } else { - panic!("unexpected path") // safety - store paths are assumed to be well-formed - } - }) - .collect(); - - Ok(Response::new(QueryConsensusStatesResponse { - consensus_states, - pagination: None, // TODO(hu55a1n1): add pagination support - })) - } - - async fn consensus_state_heights( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn client_status( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn client_params( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn upgraded_client_state( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn upgraded_consensus_state( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } -} - -pub struct IbcConnectionService { - connection_end_store: - ProtobufStore, ConnectionPath, ConnectionEnd, RawConnectionEnd>, - connection_ids_store: JsonStore, ClientConnectionPath, Vec>, -} - -impl IbcConnectionService { - pub fn new(store: SharedStore) -> Self { - Self { - connection_end_store: TypedStore::new(store.clone()), - connection_ids_store: TypedStore::new(store), - } - } -} - -#[tonic::async_trait] -impl ConnectionQuery for IbcConnectionService { - async fn connection( - &self, - request: Request, - ) -> Result, Status> { - let conn_id = ConnectionId::from_str(&request.get_ref().connection_id) - .map_err(|_| Status::invalid_argument("invalid connection id"))?; - let conn = self - .connection_end_store - .get(Height::Pending, &ConnectionPath::new(&conn_id)); - Ok(Response::new(QueryConnectionResponse { - connection: conn.map(|c| c.into()), - proof: vec![], - proof_height: None, - })) - } - - async fn connections( - &self, - _request: Request, - ) -> Result, Status> { - let connection_path_prefix: Path = String::from("connections") - .try_into() - .expect("'connections' expected to be a valid Path"); - - let connection_paths = self.connection_end_store.get_keys(&connection_path_prefix); - - let identified_connections: Vec = connection_paths - .into_iter() - .map(|path| match path.try_into() { - Ok(IbcPath::Connection(connections_path)) => { - let connection_end = self - .connection_end_store - .get(Height::Pending, &connections_path) - .unwrap(); - IdentifiedConnectionEnd::new(connections_path.0, connection_end).into() - } - _ => panic!("unexpected path"), - }) - .collect(); - - Ok(Response::new(QueryConnectionsResponse { - connections: identified_connections, - pagination: None, - height: None, - })) - } - - async fn client_connections( - &self, - request: Request, - ) -> Result, Status> { - trace!("Got client connections request: {:?}", request); - - let client_id = request - .get_ref() - .client_id - .parse() - .map_err(|e| Status::invalid_argument(format!("{e}")))?; - let path = ClientConnectionPath::new(&client_id); - let connection_ids = self - .connection_ids_store - .get(Height::Pending, &path) - .unwrap_or_default(); - let connection_paths = connection_ids - .into_iter() - .map(|conn_id| conn_id.to_string()) - .collect(); - - Ok(Response::new(QueryClientConnectionsResponse { - connection_paths, - // Note: proofs aren't being used by hermes currently - proof: vec![], - proof_height: None, - })) - } - - async fn connection_client_state( - &self, - _request: Request, - ) -> Result, Status> { - todo!() - } - - async fn connection_consensus_state( - &self, - _request: Request, - ) -> Result, Status> { - todo!() - } -} - -pub struct IbcChannelService { - channel_end_store: ProtobufStore, ChannelEndPath, ChannelEnd, RawChannelEnd>, - packet_commitment_store: BinStore, CommitmentPath, PacketCommitment>, - packet_ack_store: BinStore, AckPath, AcknowledgementCommitment>, - packet_receipt_store: TypedSet, ReceiptPath>, -} - -impl IbcChannelService { - pub fn new(store: SharedStore) -> Self { - Self { - channel_end_store: TypedStore::new(store.clone()), - packet_commitment_store: TypedStore::new(store.clone()), - packet_ack_store: TypedStore::new(store.clone()), - packet_receipt_store: TypedStore::new(store), - } - } -} - -#[tonic::async_trait] -impl ChannelQuery for IbcChannelService { - async fn channel( - &self, - request: Request, - ) -> Result, Status> { - let request = request.into_inner(); - let port_id = PortId::from_str(&request.port_id) - .map_err(|_| Status::invalid_argument("invalid port id"))?; - let channel_id = ChannelId::from_str(&request.channel_id) - .map_err(|_| Status::invalid_argument("invalid channel id"))?; - - let channel = self - .channel_end_store - .get(Height::Pending, &ChannelEndPath(port_id, channel_id)) - .map(|channel_end| channel_end.into()); - - Ok(Response::new(QueryChannelResponse { - channel, - proof: vec![], - proof_height: None, - })) - } - /// Channels queries all the IBC channels of a chain. - async fn channels( - &self, - _request: Request, - ) -> Result, Status> { - let channel_path_prefix: Path = String::from("channelEnds/ports") - .try_into() - .expect("'channelEnds/ports' expected to be a valid Path"); - - let channel_paths = self.channel_end_store.get_keys(&channel_path_prefix); - let identified_channels: Vec = channel_paths - .into_iter() - .map(|path| match path.try_into() { - Ok(IbcPath::ChannelEnd(channels_path)) => { - let channel_end = self - .channel_end_store - .get(Height::Pending, &channels_path) - .expect("channel path returned by get_keys() had no associated channel"); - IdentifiedChannelEnd::new(channels_path.0, channels_path.1, channel_end).into() - } - _ => panic!("unexpected path"), - }) - .collect(); - - Ok(Response::new(QueryChannelsResponse { - channels: identified_channels, - pagination: None, - height: Some(RawHeight { - revision_number: CHAIN_REVISION_NUMBER, - revision_height: self.channel_end_store.current_height(), - }), - })) - } - /// ConnectionChannels queries all the channels associated with a connection - /// end. - async fn connection_channels( - &self, - request: Request, - ) -> Result, Status> { - let conn_id = ConnectionId::from_str(&request.get_ref().connection) - .map_err(|_| Status::invalid_argument("invalid connection id"))?; - - let path = "channelEnds" - .to_owned() - .try_into() - .expect("'commitments/ports' expected to be a valid Path"); - - let keys = self.channel_end_store.get_keys(&path); - let channels = keys - .into_iter() - .filter_map(|path| { - if let Ok(IbcPath::ChannelEnd(path)) = path.try_into() { - let channel_end = self.channel_end_store.get(Height::Pending, &path)?; - if channel_end.connection_hops.first() == Some(&conn_id) { - return Some(IdentifiedChannelEnd::new(path.0, path.1, channel_end).into()); - } - } - - None - }) - .collect(); - - Ok(Response::new(QueryConnectionChannelsResponse { - channels, - pagination: None, - height: Some(RawHeight { - revision_number: CHAIN_REVISION_NUMBER, - revision_height: self.channel_end_store.current_height(), - }), - })) - } - /// ChannelClientState queries for the client state for the channel associated - /// with the provided channel identifiers. - async fn channel_client_state( - &self, - _request: Request, - ) -> Result, Status> { - todo!() - } - /// ChannelConsensusState queries for the consensus state for the channel - /// associated with the provided channel identifiers. - async fn channel_consensus_state( - &self, - _request: Request, - ) -> Result, Status> { - todo!() - } - /// PacketCommitment queries a stored packet commitment hash. - async fn packet_commitment( - &self, - _request: Request, - ) -> Result, Status> { - todo!() - } - /// PacketCommitments returns all the packet commitments hashes associated - /// with a channel. - async fn packet_commitments( - &self, - request: Request, - ) -> Result, Status> { - let request = request.into_inner(); - let port_id = PortId::from_str(&request.port_id) - .map_err(|_| Status::invalid_argument("invalid port id"))?; - let channel_id = ChannelId::from_str(&request.channel_id) - .map_err(|_| Status::invalid_argument("invalid channel id"))?; - - let commitment_paths = { - let prefix: Path = String::from("commitments/ports") - .try_into() - .expect("'commitments/ports' expected to be a valid Path"); - self.packet_commitment_store.get_keys(&prefix) - }; - - let matching_commitment_paths = |path: Path| -> Option { - match path.try_into() { - Ok(IbcPath::Commitment(p)) - if p.port_id == port_id && p.channel_id == channel_id => - { - Some(p) - } - _ => None, - } - }; - - let packet_state = |path: CommitmentPath| -> Option { - let commitment = self - .packet_commitment_store - .get(Height::Pending, &path) - .unwrap(); - let data = commitment.into_vec(); - (!data.is_empty()).then(|| PacketState { - port_id: path.port_id.to_string(), - channel_id: path.channel_id.to_string(), - sequence: path.sequence.into(), - data, - }) - }; - - let packet_states: Vec = commitment_paths - .into_iter() - .filter_map(matching_commitment_paths) - .filter_map(packet_state) - .collect(); - - Ok(Response::new(QueryPacketCommitmentsResponse { - commitments: packet_states, - pagination: None, - height: Some(RawHeight { - revision_number: CHAIN_REVISION_NUMBER, - revision_height: self.packet_commitment_store.current_height(), - }), - })) - } - - /// PacketReceipt queries if a given packet sequence has been received on the - /// queried chain - async fn packet_receipt( - &self, - _request: Request, - ) -> Result, Status> { - todo!() - } - - /// PacketAcknowledgement queries a stored packet acknowledgement hash. - async fn packet_acknowledgement( - &self, - _request: Request, - ) -> Result, Status> { - todo!() - } - - /// PacketAcknowledgements returns all the packet acknowledgements associated - /// with a channel. - async fn packet_acknowledgements( - &self, - request: Request, - ) -> Result, Status> { - let request = request.into_inner(); - let port_id = PortId::from_str(&request.port_id) - .map_err(|_| Status::invalid_argument("invalid port id"))?; - let channel_id = ChannelId::from_str(&request.channel_id) - .map_err(|_| Status::invalid_argument("invalid channel id"))?; - - let ack_paths = { - let prefix: Path = String::from("acks/ports") - .try_into() - .expect("'acks/ports' expected to be a valid Path"); - self.packet_ack_store.get_keys(&prefix) - }; - - let matching_ack_paths = |path: Path| -> Option { - match path.try_into() { - Ok(IbcPath::Ack(p)) if p.port_id == port_id && p.channel_id == channel_id => { - Some(p) - } - _ => None, - } - }; - - let packet_state = |path: AckPath| -> Option { - let commitment = self.packet_ack_store.get(Height::Pending, &path).unwrap(); - let data = commitment.into_vec(); - (!data.is_empty()).then(|| PacketState { - port_id: path.port_id.to_string(), - channel_id: path.channel_id.to_string(), - sequence: path.sequence.into(), - data, - }) - }; - - let packet_states: Vec = ack_paths - .into_iter() - .filter_map(matching_ack_paths) - .filter_map(packet_state) - .collect(); - - Ok(Response::new(QueryPacketAcknowledgementsResponse { - acknowledgements: packet_states, - pagination: None, - height: Some(RawHeight { - revision_number: CHAIN_REVISION_NUMBER, - revision_height: self.packet_ack_store.current_height(), - }), - })) - } - - /// UnreceivedPackets returns all the unreceived IBC packets associated with - /// a channel and sequences. - /// - /// QUESTION. Currently only works for unordered channels; ordered channels - /// don't use receipts. However, ibc-go does it this way. Investigate if - /// this query only ever makes sense on unordered channels. - async fn unreceived_packets( - &self, - request: Request, - ) -> Result, Status> { - let request = request.into_inner(); - let port_id = PortId::from_str(&request.port_id) - .map_err(|_| Status::invalid_argument("invalid port id"))?; - let channel_id = ChannelId::from_str(&request.channel_id) - .map_err(|_| Status::invalid_argument("invalid channel id"))?; - let sequences_to_check: Vec = request.packet_commitment_sequences; - - let unreceived_sequences: Vec = sequences_to_check - .into_iter() - .filter(|seq| { - let receipts_path = ReceiptPath::new(&port_id, &channel_id, Sequence::from(*seq)); - self.packet_receipt_store - .get(Height::Pending, &receipts_path) - .is_none() - }) - .collect(); - - Ok(Response::new(QueryUnreceivedPacketsResponse { - sequences: unreceived_sequences, - height: Some(RawHeight { - revision_number: CHAIN_REVISION_NUMBER, - revision_height: self.packet_receipt_store.current_height(), - }), - })) - } - - /// UnreceivedAcks returns all the unreceived IBC acknowledgements associated - /// with a channel and sequences. - async fn unreceived_acks( - &self, - request: Request, - ) -> Result, Status> { - let request = request.into_inner(); - let port_id = PortId::from_str(&request.port_id) - .map_err(|_| Status::invalid_argument("invalid port id"))?; - let channel_id = ChannelId::from_str(&request.channel_id) - .map_err(|_| Status::invalid_argument("invalid channel id"))?; - let sequences_to_check: Vec = request.packet_ack_sequences; - - let unreceived_sequences: Vec = sequences_to_check - .into_iter() - .filter(|seq| { - // To check if we received an acknowledgement, we check if we still have the sent packet - // commitment (upon receiving an ack, the sent packet commitment is deleted). - let commitments_path = - CommitmentPath::new(&port_id, &channel_id, Sequence::from(*seq)); - self.packet_commitment_store - .get(Height::Pending, &commitments_path) - .is_some() - }) - .collect(); - - Ok(Response::new(QueryUnreceivedAcksResponse { - sequences: unreceived_sequences, - height: Some(RawHeight { - revision_number: CHAIN_REVISION_NUMBER, - revision_height: self.packet_commitment_store.current_height(), - }), - })) - } - - /// NextSequenceReceive returns the next receive sequence for a given channel. - async fn next_sequence_receive( - &self, - _request: Request, - ) -> Result, Status> { - todo!() - } -} - -pub trait IbcModuleWrapper: IbcModule + Send + Sync { - fn as_ibc_module(&self) -> &dyn IbcModule; - fn as_ibc_module_mut(&mut self) -> &mut dyn IbcModule; -} - -#[derive(Clone, Default, Debug)] -pub struct IbcRouter(BTreeMap>); - -impl IbcRouter { - pub fn get_route(&self, module_id: &impl Borrow) -> Option<&dyn IbcModule> { - self.0 - .get(module_id.borrow()) - .map(|mod_wrapper| mod_wrapper.as_ibc_module()) - } - - pub fn get_route_mut( - &mut self, - module_id: &impl Borrow, - ) -> Option<&mut dyn IbcModule> { - self.0 - .get_mut(module_id.borrow()) - .and_then(Arc::get_mut) - .map(|mod_wrapper| mod_wrapper.as_ibc_module_mut()) - } - - pub fn add_route( - &mut self, - module_id: ModuleId, - module: impl IbcModuleWrapper, - ) -> Result<(), String> { - match self.0.insert(module_id, Arc::new(module)) { - None => Ok(()), - Some(_) => Err("Duplicate module_id".to_owned()), - } - } -} -#[derive(Clone, Debug)] -pub struct IbcTransferModule { - // store: SharedStore, - /// A bank keeper to enable sending, minting and burning of tokens - bank_keeper: BK, - /// A typed-store for AnyClientState - client_state_store: ProtobufStore, ClientStatePath, TmClientState, Any>, - /// A typed-store for AnyConsensusState - consensus_state_store: - ProtobufStore, ClientConsensusStatePath, TmConsensusState, Any>, - /// A typed-store for ConnectionEnd - connection_end_store: - ProtobufStore, ConnectionPath, ConnectionEnd, RawConnectionEnd>, - /// A typed-store for ChannelEnd - channel_end_store: ProtobufStore, ChannelEndPath, ChannelEnd, RawChannelEnd>, - /// A typed-store for send sequences - send_sequence_store: JsonStore, SeqSendPath, Sequence>, - /// A typed-store for packet commitments - packet_commitment_store: BinStore, CommitmentPath, PacketCommitment>, - - pub events: Vec, - - log: Vec, -} - -impl> - IbcTransferModule -{ - pub fn new(store: SharedStore, bank_keeper: BK) -> Self { - Self { - bank_keeper, - client_state_store: TypedStore::new(store.clone()), - consensus_state_store: TypedStore::new(store.clone()), - connection_end_store: TypedStore::new(store.clone()), - channel_end_store: TypedStore::new(store.clone()), - send_sequence_store: TypedStore::new(store.clone()), - packet_commitment_store: TypedStore::new(store), - events: Vec::new(), - log: Vec::new(), - } - } -} - -impl> - IbcModule for IbcTransferModule -{ - #[allow(clippy::too_many_arguments)] - fn on_chan_open_init_validate( - &self, - order: Order, - connection_hops: &[ConnectionId], - port_id: &PortId, - channel_id: &ChannelId, - counterparty: &Counterparty, - version: &ChannelVersion, - ) -> Result { - on_chan_open_init_validate( - self, - order, - connection_hops, - port_id, - channel_id, - counterparty, - version, - ) - .map_err(|e: TokenTransferError| ChannelError::AppModule { - description: e.to_string(), - })?; - Ok(ChannelVersion::new(VERSION.to_string())) - } - - #[allow(clippy::too_many_arguments)] - fn on_chan_open_init_execute( - &mut self, - order: Order, - connection_hops: &[ConnectionId], - port_id: &PortId, - channel_id: &ChannelId, - counterparty: &Counterparty, - version: &ChannelVersion, - ) -> Result<(ModuleExtras, ChannelVersion), ChannelError> { - on_chan_open_init_execute( - self, - order, - connection_hops, - port_id, - channel_id, - counterparty, - version, - ) - .map_err(|e: TokenTransferError| ChannelError::AppModule { - description: e.to_string(), - }) - } - - #[allow(clippy::too_many_arguments)] - fn on_chan_open_try_validate( - &self, - order: Order, - connection_hops: &[ConnectionId], - port_id: &PortId, - channel_id: &ChannelId, - counterparty: &Counterparty, - counterparty_version: &ChannelVersion, - ) -> Result { - on_chan_open_try_validate( - self, - order, - connection_hops, - port_id, - channel_id, - counterparty, - counterparty_version, - ) - .map_err(|e: TokenTransferError| ChannelError::AppModule { - description: e.to_string(), - })?; - Ok(ChannelVersion::new(VERSION.to_string())) - } - - #[allow(clippy::too_many_arguments)] - fn on_chan_open_try_execute( - &mut self, - order: Order, - connection_hops: &[ConnectionId], - port_id: &PortId, - channel_id: &ChannelId, - counterparty: &Counterparty, - counterparty_version: &ChannelVersion, - ) -> Result<(ModuleExtras, ChannelVersion), ChannelError> { - on_chan_open_try_execute( - self, - order, - connection_hops, - port_id, - channel_id, - counterparty, - counterparty_version, - ) - .map_err(|e: TokenTransferError| ChannelError::AppModule { - description: e.to_string(), - }) - } - - fn on_chan_open_ack_validate( - &self, - port_id: &PortId, - channel_id: &ChannelId, - counterparty_version: &ChannelVersion, - ) -> Result<(), ChannelError> { - on_chan_open_ack_validate(self, port_id, channel_id, counterparty_version).map_err( - |e: TokenTransferError| ChannelError::AppModule { - description: e.to_string(), - }, - ) - } - - fn on_chan_open_ack_execute( - &mut self, - _port_id: &PortId, - _channel_id: &ChannelId, - _counterparty_version: &ChannelVersion, - ) -> Result { - Ok(ModuleExtras::empty()) - } - - fn on_chan_open_confirm_validate( - &self, - port_id: &PortId, - channel_id: &ChannelId, - ) -> Result<(), ChannelError> { - on_chan_open_confirm_validate(self, port_id, channel_id).map_err(|e: TokenTransferError| { - ChannelError::AppModule { - description: e.to_string(), - } - }) - } - - fn on_chan_open_confirm_execute( - &mut self, - _port_id: &PortId, - _channel_id: &ChannelId, - ) -> Result { - Ok(ModuleExtras::empty()) - } - - fn on_chan_close_init_validate( - &self, - _port_id: &PortId, - _channel_id: &ChannelId, - ) -> Result<(), ChannelError> { - Ok(()) - } - - fn on_chan_close_init_execute( - &mut self, - _port_id: &PortId, - _channel_id: &ChannelId, - ) -> Result { - Ok(ModuleExtras::empty()) - } - - fn on_chan_close_confirm_validate( - &self, - _port_id: &PortId, - _channel_id: &ChannelId, - ) -> Result<(), ChannelError> { - Ok(()) - } - - fn on_chan_close_confirm_execute( - &mut self, - _port_id: &PortId, - _channel_id: &ChannelId, - ) -> Result { - Ok(ModuleExtras::empty()) - } - - fn on_recv_packet_execute( - &mut self, - packet: &Packet, - _relayer: &Signer, - ) -> (ModuleExtras, Acknowledgement) { - on_recv_packet_execute(self, packet) - } - - fn on_acknowledgement_packet_validate( - &self, - packet: &Packet, - acknowledgement: &Acknowledgement, - relayer: &Signer, - ) -> Result<(), PacketError> { - on_acknowledgement_packet_validate(self, packet, acknowledgement, relayer).map_err( - |e: TokenTransferError| PacketError::AppModule { - description: e.to_string(), - }, - ) - } - - fn on_acknowledgement_packet_execute( - &mut self, - _packet: &Packet, - _acknowledgement: &Acknowledgement, - _relayer: &Signer, - ) -> (ModuleExtras, Result<(), PacketError>) { - (ModuleExtras::empty(), Ok(())) - } - - /// Note: `MsgTimeout` and `MsgTimeoutOnClose` use the same callback - fn on_timeout_packet_validate( - &self, - packet: &Packet, - relayer: &Signer, - ) -> Result<(), PacketError> { - on_timeout_packet_validate(self, packet, relayer).map_err(|e: TokenTransferError| { - PacketError::AppModule { - description: e.to_string(), - } - }) - } - - /// Note: `MsgTimeout` and `MsgTimeoutOnClose` use the same callback - fn on_timeout_packet_execute( - &mut self, - packet: &Packet, - relayer: &Signer, - ) -> (ModuleExtras, Result<(), PacketError>) { - let res = on_timeout_packet_execute(self, packet, relayer); - ( - res.0, - res.1 - .map_err(|e: TokenTransferError| PacketError::AppModule { - description: e.to_string(), - }), - ) - } -} - -impl + Send + Sync + Debug + 'static> - IbcModuleWrapper for IbcTransferModule -{ - fn as_ibc_module(&self) -> &dyn IbcModule { - self - } - - fn as_ibc_module_mut(&mut self) -> &mut dyn IbcModule { - self - } -} - -impl> TokenTransferExecutionContext - for IbcTransferModule -{ - fn send_coins( - &mut self, - from: &Self::AccountId, - to: &Self::AccountId, - amt: &PrefixedCoin, - ) -> Result<(), TokenTransferError> { - let from = from - .to_string() - .parse() - .map_err(|_| TokenTransferError::ParseAccountFailure)?; - let to = to - .to_string() - .parse() - .map_err(|_| TokenTransferError::ParseAccountFailure)?; - let coins = vec![Coin { - denom: Denom(amt.denom.to_string()), - amount: amt.amount.into(), - }]; - self.bank_keeper.send_coins(from, to, coins).unwrap(); // Fixme(hu55a1n1) - Ok(()) - } - - fn mint_coins( - &mut self, - account: &Self::AccountId, - amt: &PrefixedCoin, - ) -> Result<(), TokenTransferError> { - let account = account - .to_string() - .parse() - .map_err(|_| TokenTransferError::ParseAccountFailure)?; - let coins = vec![Coin { - denom: Denom(amt.denom.to_string()), - amount: amt.amount.into(), - }]; - self.bank_keeper.mint_coins(account, coins).unwrap(); // Fixme(hu55a1n1) - Ok(()) - } - - fn burn_coins( - &mut self, - account: &Self::AccountId, - amt: &PrefixedCoin, - ) -> Result<(), TokenTransferError> { - let account = account - .to_string() - .parse() - .map_err(|_| TokenTransferError::ParseAccountFailure)?; - let coins = vec![Coin { - denom: Denom(amt.denom.to_string()), - amount: amt.amount.into(), - }]; - self.bank_keeper.burn_coins(account, coins).unwrap(); // Fixme(hu55a1n1) - Ok(()) - } -} - -impl TokenTransferValidationContext for IbcTransferModule { - type AccountId = Signer; - - fn get_port(&self) -> Result { - Ok(PortId::transfer()) - } - - fn get_channel_escrow_address( - &self, - port_id: &PortId, - channel_id: &ChannelId, - ) -> Result { - let account_id = AccountId::new( - ACCOUNT_PREFIX, - &cosmos_adr028_escrow_address(port_id, channel_id), - ) - .map_err(|_| TokenTransferError::ParseAccountFailure)?; - account_id - .to_string() - .parse() - .map_err(|_| TokenTransferError::ParseAccountFailure) - } - - fn is_send_enabled(&self) -> bool { - true - } - - fn is_receive_enabled(&self) -> bool { - true - } -} - -impl SendPacketValidationContext for IbcTransferModule { - fn channel_end(&self, channel_end_path: &ChannelEndPath) -> Result { - self.channel_end_store - .get(Height::Pending, channel_end_path) - .ok_or(ContextError::ChannelError(ChannelError::ChannelNotFound { - port_id: channel_end_path.0.clone(), - channel_id: channel_end_path.1.clone(), - })) - } - - fn connection_end(&self, connection_id: &ConnectionId) -> Result { - self.connection_end_store - .get(Height::Pending, &ConnectionPath::new(connection_id)) - .ok_or(ContextError::ConnectionError( - ConnectionError::ConnectionNotFound { - connection_id: connection_id.clone(), - }, - )) - } - - fn client_state(&self, client_id: &ClientId) -> Result, ContextError> { - self.client_state_store - .get(Height::Pending, &ClientStatePath::new(client_id)) - .ok_or(ContextError::ClientError(ClientError::ClientNotFound { - client_id: client_id.clone(), - })) - .map(|cs| Box::new(cs) as Box) - } - - fn client_consensus_state( - &self, - client_cons_state_path: &ClientConsensusStatePath, - ) -> Result, ContextError> { - let height = IbcHeight::new(client_cons_state_path.epoch, client_cons_state_path.height) - .map_err(|_| ContextError::ClientError(ClientError::InvalidHeight))?; - self.consensus_state_store - .get(Height::Pending, client_cons_state_path) - .ok_or(ContextError::ClientError( - ClientError::ConsensusStateNotFound { - client_id: client_cons_state_path.client_id.clone(), - height, - }, - )) - .map(|cs| Box::new(cs) as Box) - } - - fn get_next_sequence_send( - &self, - seq_send_path: &SeqSendPath, - ) -> Result { - self.send_sequence_store - .get(Height::Pending, seq_send_path) - .ok_or(ContextError::PacketError(PacketError::MissingNextSendSeq { - port_id: seq_send_path.0.clone(), - channel_id: seq_send_path.1.clone(), - })) - } - - fn hash(&self, value: &[u8]) -> Vec { - sha2::Sha256::digest(value).to_vec() - } - - fn compute_packet_commitment( - &self, - packet_data: &[u8], - timeout_height: &TimeoutHeight, - timeout_timestamp: &Timestamp, - ) -> PacketCommitment { - // copy/pasted for now; see https://github.com/cosmos/ibc-rs/issues/470 - let mut hash_input = timeout_timestamp.nanoseconds().to_be_bytes().to_vec(); - - let revision_number = timeout_height.commitment_revision_number().to_be_bytes(); - hash_input.append(&mut revision_number.to_vec()); - - let revision_height = timeout_height.commitment_revision_height().to_be_bytes(); - hash_input.append(&mut revision_height.to_vec()); - - let packet_data_hash = self.hash(packet_data); - hash_input.append(&mut packet_data_hash.to_vec()); - - self.hash(&hash_input).into() - } -} - -impl> SendPacketExecutionContext - for IbcTransferModule -{ - fn store_packet_commitment( - &mut self, - commitment_path: &CommitmentPath, - commitment: PacketCommitment, - ) -> Result<(), ContextError> { - self.packet_commitment_store - .set(commitment_path.clone(), commitment) - .map_err(|_| PacketError::ImplementationSpecific)?; - Ok(()) - } - - fn store_next_sequence_send( - &mut self, - seq_send_path: &SeqSendPath, - seq: Sequence, - ) -> Result<(), ContextError> { - self.send_sequence_store - .set(seq_send_path.clone(), seq) - .map_err(|_| PacketError::ImplementationSpecific)?; - Ok(()) - } - - fn emit_ibc_event(&mut self, event: IbcEvent) { - self.events.push(event) - } - - fn log_message(&mut self, message: String) { - self.log.push(message) - } -} - -impl ContextRouter for Ibc { - fn get_route(&self, module_id: &ModuleId) -> Option<&dyn IbcModule> { - self.router.get_route(module_id) - } - - fn get_route_mut(&mut self, module_id: &ModuleId) -> Option<&mut dyn IbcModule> { - self.router.get_route_mut(module_id) - } - - fn has_route(&self, module_id: &ModuleId) -> bool { - self.router.0.get(module_id).is_some() - } - - fn lookup_module_by_port(&self, port_id: &PortId) -> Option { - self.port_to_module_map - .get(port_id) - .ok_or(PortError::UnknownPort { - port_id: port_id.clone(), - }) - .map(Clone::clone) - .ok() - } -} - -impl ValidationContext for Ibc { - fn client_state(&self, client_id: &ClientId) -> Result, ContextError> { - self.client_state_store - .get(Height::Pending, &ClientStatePath(client_id.clone())) - .ok_or(ClientError::ImplementationSpecific) - .map_err(ContextError::from) - .map(|cs| Box::new(cs) as Box) - } - - fn decode_client_state(&self, client_state: Any) -> Result, ContextError> { - if let Ok(client_state) = TmClientState::try_from(client_state.clone()) { - Ok(client_state.into_box()) - } else { - Err(ClientError::UnknownClientStateType { - client_state_type: client_state.type_url, - }) - .map_err(ContextError::from) - } - } - - fn consensus_state( - &self, - client_cons_state_path: &ClientConsensusStatePath, - ) -> Result, ContextError> { - let height = IbcHeight::new(client_cons_state_path.epoch, client_cons_state_path.height) - .map_err(|_| ClientError::InvalidHeight)?; - let consensus_state = self - .consensus_state_store - .get(Height::Pending, client_cons_state_path) - .ok_or(ClientError::ConsensusStateNotFound { - client_id: client_cons_state_path.client_id.clone(), - height, - })?; - Ok(Box::new(consensus_state) as Box) - } - - fn next_consensus_state( - &self, - client_id: &ClientId, - height: &IbcHeight, - ) -> Result>, ContextError> { - let path = format!("clients/{client_id}/consensusStates") - .try_into() - .unwrap(); // safety - path must be valid since ClientId and height are valid Identifiers - - let keys = self.store.get_keys(&path); - let found_path = keys.into_iter().find_map(|path| { - if let Ok(IbcPath::ClientConsensusState(path)) = IbcPath::try_from(path) { - if height > &IbcHeight::new(path.epoch, path.height).unwrap() { - return Some(path); - } - } - None - }); - - if let Some(path) = found_path { - let consensus_state = self - .consensus_state_store - .get(Height::Pending, &path) - .ok_or(ClientError::ConsensusStateNotFound { - client_id: client_id.clone(), - height: *height, - })?; - Ok(Some(Box::new(consensus_state))) - } else { - Ok(None) - } - } - - fn prev_consensus_state( - &self, - client_id: &ClientId, - height: &IbcHeight, - ) -> Result>, ContextError> { - let path = format!("clients/{client_id}/consensusStates") - .try_into() - .unwrap(); // safety - path must be valid since ClientId and height are valid Identifiers - - let keys = self.store.get_keys(&path); - let pos = keys.iter().position(|path| { - if let Ok(IbcPath::ClientConsensusState(path)) = IbcPath::try_from(path.clone()) { - height >= &IbcHeight::new(path.epoch, path.height).unwrap() - } else { - false - } - }); - - if let Some(pos) = pos { - if pos > 0 { - let prev_path = match IbcPath::try_from(keys[pos - 1].clone()) { - Ok(IbcPath::ClientConsensusState(p)) => p, - _ => unreachable!(), // safety - path retrieved from store - }; - let consensus_state = self - .consensus_state_store - .get(Height::Pending, &prev_path) - .ok_or(ClientError::ConsensusStateNotFound { - client_id: client_id.clone(), - height: *height, - })?; - return Ok(Some(Box::new(consensus_state))); - } - } - Ok(None) - } - - fn host_height(&self) -> Result { - IbcHeight::new(0, self.store.current_height()).map_err(ContextError::from) - } - - fn host_timestamp(&self) -> Result { - let host_height = self.host_height()?; - let host_cons_state = self.host_consensus_state(&host_height)?; - Ok(host_cons_state.timestamp()) - } - - fn host_consensus_state( - &self, - height: &IbcHeight, - ) -> Result, ContextError> { - let consensus_state = self - .consensus_states - .get(&height.revision_height()) - .ok_or(ClientError::MissingLocalConsensusState { height: *height })?; - Ok(Box::new(consensus_state.clone())) - } - - fn client_counter(&self) -> Result { - Ok(self.client_counter) - } - - fn connection_end(&self, conn_id: &ConnectionId) -> Result { - self.connection_end_store - .get(Height::Pending, &ConnectionPath::new(conn_id)) - .ok_or(ConnectionError::Client(ClientError::ImplementationSpecific)) - .map_err(ContextError::from) - } - - fn validate_self_client(&self, _counterparty_client_state: Any) -> Result<(), ContextError> { - Ok(()) - } - - fn commitment_prefix(&self) -> CommitmentPrefix { - use crate::prefix::Ibc as IbcPrefix; - CommitmentPrefix::try_from(IbcPrefix {}.identifier().as_bytes().to_vec()) - .expect("empty prefix") - } - - fn connection_counter(&self) -> Result { - Ok(self.conn_counter) - } - - fn get_compatible_versions(&self) -> Vec { - vec![ConnectionVersion::default()] - } - - fn pick_version( - &self, - supported_versions: &[ConnectionVersion], - counterparty_candidate_versions: &[ConnectionVersion], - ) -> Result { - pick_version(supported_versions, counterparty_candidate_versions) - .map_err(ContextError::ConnectionError) - } - - fn channel_end(&self, channel_end_path: &ChannelEndPath) -> Result { - self.channel_end_store - .get( - Height::Pending, - &ChannelEndPath::new(&channel_end_path.0, &channel_end_path.1), - ) - .ok_or(ChannelError::Connection(ConnectionError::Client( - ClientError::ImplementationSpecific, - ))) - .map_err(ContextError::ChannelError) - } - - fn get_next_sequence_send( - &self, - seq_send_path: &SeqSendPath, - ) -> Result { - self.send_sequence_store - .get( - Height::Pending, - &SeqSendPath::new(&seq_send_path.0, &seq_send_path.1), - ) - .ok_or(PacketError::ImplementationSpecific) - .map_err(ContextError::PacketError) - } - - fn get_next_sequence_recv( - &self, - seq_recv_path: &SeqRecvPath, - ) -> Result { - self.recv_sequence_store - .get( - Height::Pending, - &SeqRecvPath::new(&seq_recv_path.0, &seq_recv_path.1), - ) - .ok_or(PacketError::ImplementationSpecific) - .map_err(ContextError::PacketError) - } - - fn get_next_sequence_ack(&self, seq_ack_path: &SeqAckPath) -> Result { - self.ack_sequence_store - .get( - Height::Pending, - &SeqAckPath::new(&seq_ack_path.0, &seq_ack_path.1), - ) - .ok_or(PacketError::ImplementationSpecific) - .map_err(ContextError::PacketError) - } - - fn get_packet_commitment( - &self, - commitment_path: &CommitmentPath, - ) -> Result { - self.packet_commitment_store - .get( - Height::Pending, - &CommitmentPath::new( - &commitment_path.port_id, - &commitment_path.channel_id, - commitment_path.sequence, - ), - ) - .ok_or(PacketError::ImplementationSpecific) - .map_err(ContextError::PacketError) - } - - fn get_packet_receipt(&self, receipt_path: &ReceiptPath) -> Result { - self.packet_receipt_store - .is_path_set( - Height::Pending, - &ReceiptPath::new( - &receipt_path.port_id, - &receipt_path.channel_id, - receipt_path.sequence, - ), - ) - .then_some(Receipt::Ok) - .ok_or(PacketError::PacketReceiptNotFound { - sequence: receipt_path.sequence, - }) - .map_err(ContextError::PacketError) - } - - fn get_packet_acknowledgement( - &self, - ack_path: &AckPath, - ) -> Result { - self.packet_ack_store - .get( - Height::Pending, - &AckPath::new(&ack_path.port_id, &ack_path.channel_id, ack_path.sequence), - ) - .ok_or(PacketError::PacketAcknowledgementNotFound { - sequence: ack_path.sequence, - }) - .map_err(ContextError::PacketError) - } - - /// A hashing function for packet commitments - fn hash(&self, value: &[u8]) -> Vec { - sha2::Sha256::digest(value).to_vec() - } - - /// Returns the time when the client state for the given [`ClientId`] was updated with a header for the given [`Height`] - fn client_update_time( - &self, - client_id: &ClientId, - height: &IbcHeight, - ) -> Result { - self.client_processed_times - .get(&(client_id.clone(), *height)) - .cloned() - .ok_or(ChannelError::Connection(ConnectionError::Client( - ClientError::ImplementationSpecific, - ))) - .map_err(ContextError::ChannelError) - } - - /// Returns the height when the client state for the given [`ClientId`] was updated with a header for the given [`Height`] - fn client_update_height( - &self, - client_id: &ClientId, - height: &IbcHeight, - ) -> Result { - self.client_processed_heights - .get(&(client_id.clone(), *height)) - .cloned() - .ok_or(ChannelError::Connection(ConnectionError::Client( - ClientError::ImplementationSpecific, - ))) - .map_err(ContextError::ChannelError) - } - - /// Returns a counter on the number of channel ids have been created thus far. - /// The value of this counter should increase only via method - /// `ChannelKeeper::increase_channel_counter`. - fn channel_counter(&self) -> Result { - Ok(self.channel_counter) - } - - /// Returns the maximum expected time per block - fn max_expected_time_per_block(&self) -> Duration { - Duration::from_secs(8) - } - - /// Calculates the block delay period using the connection's delay period and the maximum - /// expected time per block. - fn block_delay(&self, delay_period_time: &Duration) -> u64 { - calculate_block_delay( - delay_period_time, - &::max_expected_time_per_block(self), - ) - } -} - -impl ExecutionContext for Ibc { - /// Called upon successful client creation - fn store_client_type( - &mut self, - client_type_path: ClientTypePath, - client_type: ClientType, - ) -> Result<(), ContextError> { - self.client_type_store - .set(client_type_path, client_type) - .map(|_| ()) - .map_err(|_| ClientError::ImplementationSpecific) - .map_err(ContextError::ClientError) - } - - /// Called upon successful client creation and update - fn store_client_state( - &mut self, - client_state_path: ClientStatePath, - client_state: Box, - ) -> Result<(), ContextError> { - let tm_client_state = client_state - .as_any() - .downcast_ref::() - .ok_or(ClientError::ImplementationSpecific)?; - self.client_state_store - .set(client_state_path, tm_client_state.clone()) - .map(|_| ()) - .map_err(|_| ClientError::ImplementationSpecific) - .map_err(ContextError::ClientError) - } - - /// Called upon successful client creation and update - fn store_consensus_state( - &mut self, - consensus_state_path: ClientConsensusStatePath, - consensus_state: Box, - ) -> Result<(), ContextError> { - let tm_consensus_state = consensus_state - .as_any() - .downcast_ref::() - .ok_or(ClientError::ImplementationSpecific)?; - self.consensus_state_store - .set(consensus_state_path, tm_consensus_state.clone()) - .map_err(|_| ClientError::ImplementationSpecific)?; - Ok(()) - } - - /// Called upon client creation. - /// Increases the counter which keeps track of how many clients have been created. - /// Should never fail. - fn increase_client_counter(&mut self) { - self.client_counter += 1; - } - - /// Called upon successful client update. - /// Implementations are expected to use this to record the specified time as the time at which - /// this update (or header) was processed. - fn store_update_time( - &mut self, - client_id: ClientId, - height: IbcHeight, - timestamp: Timestamp, - ) -> Result<(), ContextError> { - self.client_processed_times - .insert((client_id, height), timestamp); - Ok(()) - } - - /// Called upon successful client update. - /// Implementations are expected to use this to record the specified height as the height at - /// at which this update (or header) was processed. - fn store_update_height( - &mut self, - client_id: ClientId, - height: IbcHeight, - host_height: IbcHeight, - ) -> Result<(), ContextError> { - self.client_processed_heights - .insert((client_id, height), host_height); - Ok(()) - } - - /// Stores the given connection_end at path - fn store_connection( - &mut self, - connection_path: &ConnectionPath, - connection_end: ConnectionEnd, - ) -> Result<(), ContextError> { - self.connection_end_store - .set(connection_path.clone(), connection_end) - .map_err(|_| ConnectionError::Client(ClientError::ImplementationSpecific))?; - Ok(()) - } - - /// Stores the given connection_id at a path associated with the client_id. - fn store_connection_to_client( - &mut self, - client_connection_path: &ClientConnectionPath, - conn_id: ConnectionId, - ) -> Result<(), ContextError> { - let mut conn_ids: Vec = self - .connection_ids_store - .get(Height::Pending, client_connection_path) - .unwrap_or_default(); - conn_ids.push(conn_id); - self.connection_ids_store - .set(client_connection_path.clone(), conn_ids) - .map_err(|_| ConnectionError::Client(ClientError::ImplementationSpecific))?; - Ok(()) - } - - /// Called upon connection identifier creation (Init or Try process). - /// Increases the counter which keeps track of how many connections have been created. - /// Should never fail. - fn increase_connection_counter(&mut self) { - self.conn_counter += 1; - } - - fn store_packet_commitment( - &mut self, - commitment_path: &CommitmentPath, - commitment: PacketCommitment, - ) -> Result<(), ContextError> { - self.packet_commitment_store - .set(commitment_path.clone(), commitment) - .map_err(|_| PacketError::ImplementationSpecific)?; - Ok(()) - } - - fn delete_packet_commitment(&mut self, key: &CommitmentPath) -> Result<(), ContextError> { - self.packet_commitment_store - .set(key.clone(), vec![].into()) - .map_err(|_| PacketError::ImplementationSpecific)?; - Ok(()) - } - - fn store_packet_receipt( - &mut self, - receipt_path: &ReceiptPath, - _receipt: Receipt, - ) -> Result<(), ContextError> { - self.packet_receipt_store - .set_path(receipt_path.clone()) - .map_err(|_| PacketError::ImplementationSpecific)?; - Ok(()) - } - - fn store_packet_acknowledgement( - &mut self, - ack_path: &AckPath, - ack_commitment: AcknowledgementCommitment, - ) -> Result<(), ContextError> { - self.packet_ack_store - .set(ack_path.clone(), ack_commitment) - .map_err(|_| PacketError::ImplementationSpecific)?; - Ok(()) - } - - fn delete_packet_acknowledgement(&mut self, ack_path: &AckPath) -> Result<(), ContextError> { - self.packet_ack_store - .set(ack_path.clone(), vec![].into()) - .map_err(|_| PacketError::ImplementationSpecific)?; - Ok(()) - } - - /// Stores the given channel_end at a path associated with the port_id and channel_id. - fn store_channel( - &mut self, - channel_end_path: &ChannelEndPath, - channel_end: ChannelEnd, - ) -> Result<(), ContextError> { - self.channel_end_store - .set(channel_end_path.clone(), channel_end) - .map_err(|_| ClientError::ImplementationSpecific)?; - Ok(()) - } - - fn store_next_sequence_send( - &mut self, - seq_send_path: &SeqSendPath, - seq: Sequence, - ) -> Result<(), ContextError> { - self.send_sequence_store - .set(seq_send_path.clone(), seq) - .map_err(|_| PacketError::ImplementationSpecific)?; - Ok(()) - } - - fn store_next_sequence_recv( - &mut self, - seq_recv_path: &SeqRecvPath, - seq: Sequence, - ) -> Result<(), ContextError> { - self.recv_sequence_store - .set(seq_recv_path.clone(), seq) - .map_err(|_| PacketError::ImplementationSpecific)?; - Ok(()) - } - - fn store_next_sequence_ack( - &mut self, - seq_ack_path: &SeqAckPath, - seq: Sequence, - ) -> Result<(), ContextError> { - self.ack_sequence_store - .set(seq_ack_path.clone(), seq) - .map_err(|_| PacketError::ImplementationSpecific)?; - Ok(()) - } - - fn increase_channel_counter(&mut self) { - self.channel_counter += 1; - } - - fn emit_ibc_event(&mut self, event: IbcEvent) { - self.events.push(event); - } - - fn log_message(&mut self, message: String) { - self.logs.push(message); - } -} diff --git a/src/app/response.rs b/src/app/response.rs deleted file mode 100644 index 30d2c022..00000000 --- a/src/app/response.rs +++ /dev/null @@ -1,22 +0,0 @@ -use tendermint_proto::abci::{ResponseCheckTx, ResponseDeliverTx, ResponseQuery}; - -pub(crate) trait ResponseFromErrorExt { - fn from_error(code: u32, log: impl ToString) -> Self; -} - -macro_rules! impl_response_error_for { - ($($resp:ty),+) => { - $(impl ResponseFromErrorExt for $resp { - fn from_error(code: u32, log: impl ToString) -> Self { - let log = log.to_string(); - Self { - code, - log, - ..Self::default() - } - } - })+ - }; -} - -impl_response_error_for!(ResponseQuery, ResponseCheckTx, ResponseDeliverTx); diff --git a/src/app/store/mod.rs b/src/app/store/mod.rs deleted file mode 100644 index 36a82642..00000000 --- a/src/app/store/mod.rs +++ /dev/null @@ -1,716 +0,0 @@ -mod avl; -mod memory; - -use displaydoc::Display; -use ibc::core::ics24_host::{error::ValidationError, validate::validate_identifier}; -use ics23::CommitmentProof; -pub(crate) use memory::InMemoryStore; -use serde::{de::DeserializeOwned, Serialize}; -use std::{ - convert::{TryFrom, TryInto}, - fmt::{Debug, Display, Formatter}, - marker::PhantomData, - ops::{Deref, DerefMut}, - str::{from_utf8, Utf8Error}, - sync::{Arc, RwLock}, -}; -use tracing::trace; - -use crate::app::modules::Error as ModuleError; - -/// A `TypedStore` that uses the `JsonCodec` -pub(crate) type JsonStore = TypedStore>; - -/// A `TypedStore` that uses the `ProtobufCodec` -pub(crate) type ProtobufStore = TypedStore>; - -/// A `TypedSet` that stores only paths and no values -pub(crate) type TypedSet = TypedStore; - -/// A `TypedStore` that uses the `BinCodec` -pub(crate) type BinStore = TypedStore>; - -/// A newtype representing a valid ICS024 identifier. -/// Implements `Deref`. -#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone)] -pub struct Identifier(String); - -impl Identifier { - /// Identifiers MUST be non-empty (of positive integer length). - /// Identifiers MUST consist of characters in one of the following categories only: - /// * Alphanumeric - /// * `.`, `_`, `+`, `-`, `#` - /// * `[`, `]`, `<`, `>` - fn validate(s: impl AsRef) -> Result<(), Error> { - let s = s.as_ref(); - - // give a `min` parameter of 0 here to allow id's of arbitrary - // length as inputs; `validate_identifier` itself checks for - // empty inputs and returns an error as appropriate - validate_identifier(s, 0, s.len()).map_err(|v| Error::InvalidIdentifier { - identifier: s.to_string(), - error: v, - }) - } -} - -impl Deref for Identifier { - type Target = String; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl TryFrom for Identifier { - type Error = Error; - - fn try_from(s: String) -> Result { - Identifier::validate(&s).map(|_| Self(s)) - } -} - -impl Display for Identifier { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} - -/// A new type representing a valid ICS024 `Path`. -#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone)] - -pub struct Path(Vec); - -impl Path { - pub fn get(&self, index: usize) -> Option<&Identifier> { - self.0.get(index) - } -} - -impl TryFrom for Path { - type Error = Error; - - fn try_from(s: String) -> Result { - let mut identifiers = vec![]; - let parts = s.split('/'); // split will never return an empty iterator - for part in parts { - identifiers.push(Identifier::try_from(part.to_owned())?); - } - Ok(Self(identifiers)) - } -} - -impl TryFrom<&[u8]> for Path { - type Error = Error; - - fn try_from(value: &[u8]) -> Result { - let s = from_utf8(value).map_err(|e| Error::MalformedPathString { error: e })?; - s.to_owned().try_into() - } -} - -impl From for Path { - fn from(id: Identifier) -> Self { - Self(vec![id]) - } -} - -impl Display for Path { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}", - self.0 - .iter() - .map(|iden| iden.as_str().to_owned()) - .collect::>() - .join("/") - ) - } -} - -#[derive(Debug, Display)] -pub enum Error { - /// '{identifier}' is not a valid identifier: `{error}` - InvalidIdentifier { - identifier: String, - error: ValidationError, - }, - /// path isn't a valid string: `{error}` - MalformedPathString { error: Utf8Error }, -} - -impl From for ModuleError { - fn from(e: Error) -> Self { - ModuleError::Store(e) - } -} - -/// Block height -pub(crate) type RawHeight = u64; - -/// Store height to query -#[derive(Debug, Copy, Clone)] -pub enum Height { - Pending, - Latest, - Stable(RawHeight), // or equivalently `tendermint::block::Height` -} - -impl From for Height { - fn from(value: u64) -> Self { - match value { - 0 => Height::Latest, // see https://docs.tendermint.com/master/spec/abci/abci.html#query - _ => Height::Stable(value), - } - } -} - -/// Store trait - maybe provableStore or privateStore -pub trait Store: Send + Sync + Clone { - /// Error type - expected to envelope all possible errors in store - type Error: core::fmt::Debug; - - /// Set `value` for `path` - fn set(&mut self, path: Path, value: Vec) -> Result>, Self::Error>; - - /// Get associated `value` for `path` at specified `height` - fn get(&self, height: Height, path: &Path) -> Option>; - - /// Delete specified `path` - fn delete(&mut self, path: &Path); - - /// Commit `Pending` block to canonical chain and create new `Pending` - fn commit(&mut self) -> Result, Self::Error>; - - /// Apply accumulated changes to `Pending` - fn apply(&mut self) -> Result<(), Self::Error> { - Ok(()) - } - - /// Reset accumulated changes - fn reset(&mut self) {} - - /// Prune historic blocks upto specified `height` - fn prune(&mut self, height: RawHeight) -> Result { - Ok(height) - } - - /// Return the current height of the chain - fn current_height(&self) -> RawHeight; - - /// Return all keys that start with specified prefix - fn get_keys(&self, key_prefix: &Path) -> Vec; // TODO(hu55a1n1): implement support for all heights -} - -/// ProvableStore trait -pub trait ProvableStore: Store { - /// Return a vector commitment - fn root_hash(&self) -> Vec; - - /// Return proof of existence for key - fn get_proof(&self, height: Height, key: &Path) -> Option; -} - -/// Wraps a store to make it shareable by cloning -#[derive(Clone, Debug)] -pub struct SharedStore(Arc>); - -impl SharedStore { - pub(crate) fn new(store: S) -> Self { - Self(Arc::new(RwLock::new(store))) - } - - pub(crate) fn share(&self) -> Self { - Self(self.0.clone()) - } -} - -impl Default for SharedStore -where - S: Default + Store, -{ - fn default() -> Self { - Self::new(S::default()) - } -} - -impl Store for SharedStore -where - S: Store, -{ - type Error = S::Error; - - #[inline] - fn set(&mut self, path: Path, value: Vec) -> Result>, Self::Error> { - self.write().unwrap().set(path, value) - } - - #[inline] - fn get(&self, height: Height, path: &Path) -> Option> { - self.read().unwrap().get(height, path) - } - - #[inline] - fn delete(&mut self, path: &Path) { - self.write().unwrap().delete(path) - } - - #[inline] - fn commit(&mut self) -> Result, Self::Error> { - self.write().unwrap().commit() - } - - #[inline] - fn apply(&mut self) -> Result<(), Self::Error> { - self.write().unwrap().apply() - } - - #[inline] - fn reset(&mut self) { - self.write().unwrap().reset() - } - - #[inline] - fn current_height(&self) -> RawHeight { - self.read().unwrap().current_height() - } - - #[inline] - fn get_keys(&self, key_prefix: &Path) -> Vec { - self.read().unwrap().get_keys(key_prefix) - } -} - -impl ProvableStore for SharedStore -where - S: ProvableStore, -{ - #[inline] - fn root_hash(&self) -> Vec { - self.read().unwrap().root_hash() - } - - #[inline] - fn get_proof(&self, height: Height, key: &Path) -> Option { - self.read().unwrap().get_proof(height, key) - } -} - -impl Deref for SharedStore { - type Target = Arc>; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for SharedStore { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -/// A wrapper store that implements rudimentary `apply()`/`reset()` support for other stores -#[derive(Clone, Debug)] -pub(crate) struct RevertibleStore { - /// backing store - store: S, - /// operation log for recording rollback operations in preserved order - op_log: Vec, -} - -#[derive(Clone, Debug)] -enum RevertOp { - Delete(Path), - Set(Path, Vec), -} - -impl RevertibleStore -where - S: Store, -{ - pub(crate) fn new(store: S) -> Self { - Self { - store, - op_log: vec![], - } - } -} - -impl Default for RevertibleStore -where - S: Default + Store, -{ - fn default() -> Self { - Self::new(S::default()) - } -} - -impl Store for RevertibleStore -where - S: Store, -{ - type Error = S::Error; - - #[inline] - fn set(&mut self, path: Path, value: Vec) -> Result>, Self::Error> { - let old_value = self.store.set(path.clone(), value)?; - match old_value { - // None implies this was an insert op, so we record the revert op as delete op - None => self.op_log.push(RevertOp::Delete(path)), - // Some old value implies this was an update op, so we record the revert op as a set op - // with the old value - Some(ref old_value) => self.op_log.push(RevertOp::Set(path, old_value.clone())), - } - Ok(old_value) - } - - #[inline] - fn get(&self, height: Height, path: &Path) -> Option> { - self.store.get(height, path) - } - - #[inline] - fn delete(&mut self, _path: &Path) { - unimplemented!("RevertibleStore doesn't support delete operations yet!") - } - - #[inline] - fn commit(&mut self) -> Result, Self::Error> { - // call `apply()` before `commit()` to make sure all operations are applied - self.apply()?; - self.store.commit() - } - - #[inline] - fn apply(&mut self) -> Result<(), Self::Error> { - // note that we do NOT call the backing store's apply here - this allows users to create - // multilayered `WalStore`s - self.op_log.clear(); - Ok(()) - } - - #[inline] - fn reset(&mut self) { - // note that we do NOT call the backing store's reset here - this allows users to create - // multilayered `WalStore`s - trace!("Rollback operation log changes"); - while let Some(op) = self.op_log.pop() { - match op { - RevertOp::Delete(path) => self.delete(&path), - RevertOp::Set(path, value) => { - self.set(path, value).unwrap(); // safety - reset failures are unrecoverable - } - } - } - } - - #[inline] - fn current_height(&self) -> u64 { - self.store.current_height() - } - - #[inline] - fn get_keys(&self, key_prefix: &Path) -> Vec { - self.store.get_keys(key_prefix) - } -} - -impl ProvableStore for RevertibleStore -where - S: ProvableStore, -{ - #[inline] - fn root_hash(&self) -> Vec { - self.store.root_hash() - } - - #[inline] - fn get_proof(&self, height: Height, key: &Path) -> Option { - self.store.get_proof(height, key) - } -} - -/// A trait that defines how types are decoded/encoded. -pub(crate) trait Codec { - type Type; - type Encoded: AsRef<[u8]>; - - fn encode(d: &Self::Type) -> Option; - - fn decode(bytes: &[u8]) -> Option; -} - -/// A JSON codec that uses `serde_json` to encode/decode as a JSON string -#[derive(Clone, Debug)] -pub(crate) struct JsonCodec(PhantomData); - -impl Codec for JsonCodec -where - T: Serialize + DeserializeOwned, -{ - type Type = T; - type Encoded = String; - - fn encode(d: &Self::Type) -> Option { - serde_json::to_string(d).ok() - } - - fn decode(bytes: &[u8]) -> Option { - let json_string = String::from_utf8(bytes.to_vec()).ok()?; - serde_json::from_str(&json_string).ok() - } -} - -/// A Null codec that can be used for paths that are only meant to be set/reset and do not hold any -/// typed value. -#[derive(Clone)] -pub(crate) struct NullCodec; - -impl Codec for NullCodec { - type Type = (); - type Encoded = Vec; - - fn encode(_d: &Self::Type) -> Option { - Some(vec![]) - } - - fn decode(bytes: &[u8]) -> Option { - assert!(bytes.is_empty()); - Some(()) - } -} - -/// A Protobuf codec that uses `prost` to encode/decode -#[derive(Clone, Debug)] -pub(crate) struct ProtobufCodec { - domain_type: PhantomData, - raw_type: PhantomData, -} - -impl Codec for ProtobufCodec -where - T: Into + Clone, - R: TryInto + Default + prost::Message, -{ - type Type = T; - type Encoded = Vec; - - fn encode(d: &Self::Type) -> Option { - let r = d.clone().into(); - Some(r.encode_to_vec()) - } - - fn decode(bytes: &[u8]) -> Option { - let r = R::decode(bytes).ok()?; - r.try_into().ok() - } -} - -/// A binary codec that uses `AsRef<[u8]>` and `From>` to encode and decode respectively. -#[derive(Clone, Debug)] -pub(crate) struct BinCodec(PhantomData); - -impl Codec for BinCodec -where - T: AsRef<[u8]> + From>, -{ - type Type = T; - type Encoded = Vec; - - fn encode(d: &Self::Type) -> Option { - Some(d.as_ref().to_vec()) - } - - fn decode(bytes: &[u8]) -> Option { - Some(bytes.to_vec().into()) - } -} - -/// The `TypedStore` provides methods to treat the data stored at given store paths as given Rust types. -/// -/// It is designed to be aliased for each concrete codec. For example, -/// ```rust -/// type CandyStore = TypedStore>; -/// ``` -#[derive(Clone, Debug)] -pub(crate) struct TypedStore { - store: S, - _key: PhantomData, - _codec: PhantomData, -} - -impl TypedStore -where - S: Store, - C: Codec, - K: Into + Clone, -{ - #[inline] - pub(crate) fn new(store: S) -> Self { - Self { - store, - _codec: PhantomData, - _key: PhantomData, - } - } - - #[inline] - pub(crate) fn set(&mut self, path: K, value: V) -> Result, S::Error> { - self.store - .set(path.into(), C::encode(&value).unwrap().as_ref().to_vec()) - .map(|prev_val| prev_val.and_then(|v| C::decode(&v))) - } - - #[inline] - pub(crate) fn get(&self, height: Height, path: &K) -> Option { - self.store - .get(height, &path.clone().into()) - .and_then(|v| C::decode(&v)) - } - - #[inline] - pub(crate) fn get_keys(&self, key_prefix: &Path) -> Vec { - self.store.get_keys(key_prefix) - } - - #[inline] - pub(crate) fn current_height(&self) -> RawHeight { - self.store.current_height() - } -} - -impl TypedStore -where - S: Store, - K: Into + Clone, -{ - #[inline] - pub(crate) fn set_path(&mut self, path: K) -> Result<(), S::Error> { - self.store.set(path.into(), vec![]).map(|_| ()) - } - - #[inline] - pub(crate) fn is_path_set(&self, height: Height, path: &K) -> bool { - self.store.get(height, &path.clone().into()).is_some() - } -} - -#[cfg(test)] -mod tests { - use std::{collections::HashSet, convert::TryFrom}; - - use lazy_static::lazy_static; - use proptest::prelude::*; - use rand::{distributions::Standard, seq::SliceRandom}; - - use super::{Identifier, Path}; - - const ALLOWED_CHARS: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\ - abcdefghijklmnopqrstuvwxyz\ - ._+-#[]<>"; - - lazy_static! { - static ref VALID_CHARS: HashSet = { - ALLOWED_CHARS - .iter() - .map(|c| char::from(*c)) - .collect::>() - }; - } - - fn gen_valid_identifier(len: usize) -> String { - let mut rng = rand::thread_rng(); - - (0..=len) - .map(|_| { - let idx = rng.gen_range(0..ALLOWED_CHARS.len()); - ALLOWED_CHARS[idx] as char - }) - .collect::() - } - - fn gen_invalid_identifier(len: usize) -> String { - let mut rng = rand::thread_rng(); - - (0..=len) - .map(|_| loop { - let c = rng.sample::(Standard); - - if c.is_ascii() && !VALID_CHARS.contains(&c) { - return c; - } - }) - .collect::() - } - - proptest! { - #[test] - fn validate_method_doesnt_crash(s in "\\PC*") { - let _ = Identifier::validate(s); - } - - #[test] - fn valid_identifier_is_ok(l in 1usize..=10) { - let id = gen_valid_identifier(l); - let validated = Identifier::validate(id); - - assert!(validated.is_ok()) - } - - #[test] - #[ignore] - fn invalid_identifier_errors(l in 1usize..=10) { - let id = gen_invalid_identifier(l); - let validated = Identifier::validate(id); - - assert!(validated.is_err()) - } - - #[test] - fn path_with_valid_parts_is_valid(n_parts in 1usize..=10) { - let mut rng = rand::thread_rng(); - - let parts = (0..n_parts) - .map(|_| { - let len = rng.gen_range(1usize..=10); - gen_valid_identifier(len) - }) - .collect::>(); - - let path = parts.join("/"); - - assert!(Path::try_from(path).is_ok()); - } - - #[test] - #[ignore] - fn path_with_invalid_parts_is_invalid(n_parts in 1usize..=10) { - let mut rng = rand::thread_rng(); - let n_invalid_parts = rng.gen_range(1usize..=n_parts); - let n_valid_parts = n_parts - n_invalid_parts; - - let mut parts = (0..n_invalid_parts) - .map(|_| { - let len = rng.gen_range(1usize..=10); - gen_invalid_identifier(len) - }) - .collect::>(); - - let mut valid_parts = (0..n_valid_parts) - .map(|_| { - let len = rng.gen_range(1usize..=10); - gen_valid_identifier(len) - }) - .collect::>(); - - parts.append(&mut valid_parts); - parts.shuffle(&mut rng); - - let path = parts.join("/"); - - assert!(Path::try_from(path).is_err()); - } - } -} diff --git a/src/main.rs b/src/bin/basecoin/main.rs similarity index 76% rename from src/main.rs rename to src/bin/basecoin/main.rs index cf951061..0f179239 100644 --- a/src/main.rs +++ b/src/bin/basecoin/main.rs @@ -1,4 +1,7 @@ -mod app; +//! Main entry point for Cli + +#![deny(warnings, missing_docs, trivial_casts, unused_qualifications)] +#![forbid(unsafe_code)] use ibc::{ applications::transfer::MODULE_ID_STR as IBC_TRANSFER_MODULE_ID, @@ -10,44 +13,17 @@ use ibc_proto::cosmos::{ }; use structopt::StructOpt; use tendermint_abci::ServerBuilder; +use tendermint_basecoin::{ + app::Builder, + cli::option::Opt, + modules::{prefix, Identifiable, Module}, + modules::{Auth, Bank, Ibc, IbcTransferModule, Staking}, + store::memory::InMemoryStore, +}; use tokio::runtime::Runtime; use tonic::transport::Server; use tracing_subscriber::filter::LevelFilter; -use crate::app::{ - modules::{prefix, Auth, Bank, Ibc, IbcTransferModule, Identifiable, Module, Staking}, - store::InMemoryStore, - Builder, -}; - -#[derive(Debug, StructOpt)] -struct Opt { - /// Bind the TCP server to this host. - #[structopt(short, long, default_value = "127.0.0.1")] - host: String, - - /// Bind the TCP server to this port. - #[structopt(short, long, default_value = "26358")] - port: u16, - - /// Bind the gRPC server to this port. - #[structopt(short, long, default_value = "9093")] - grpc_port: u16, - - /// The default server read buffer size, in bytes, for each incoming client - /// connection. - #[structopt(short, long, default_value = "1048576")] - read_buf_size: usize, - - /// Increase output logging verbosity to DEBUG level. - #[structopt(short, long)] - verbose: bool, - - /// Suppress all output logging (overrides --verbose). - #[structopt(short, long)] - quiet: bool, -} - fn main() { let opt: Opt = Opt::from_args(); let log_level = if opt.quiet { @@ -65,15 +41,11 @@ fn main() { // instantiate modules and setup inter-module communication (if required) let auth = Auth::new(app_builder.module_store(&prefix::Auth {}.identifier())); - let auth_service = auth.service(); - let bank = Bank::new( app_builder.module_store(&prefix::Bank {}.identifier()), auth.account_reader().clone(), auth.account_keeper().clone(), ); - let bank_service = bank.service(); - let staking = Staking::new(app_builder.module_store(&prefix::Staking {}.identifier())); let ibc = { @@ -87,6 +59,11 @@ fn main() { ibc }; + + // instantiate gRPC services for each module + let auth_service = auth.service(); + let bank_service = bank.service(); + let staking_service = staking.service(); let ibc_client_service = ibc.client_service(); let ibc_conn_service = ibc.connection_service(); let ibc_channel_service = ibc.channel_service(); @@ -115,7 +92,7 @@ fn main() { .add_service(ibc_channel_service) .add_service(auth_service) .add_service(bank_service) - .add_service(staking.service()) + .add_service(staking_service) .serve(format!("{}:{}", opt.host, opt.grpc_port).parse().unwrap()); Runtime::new() .unwrap() diff --git a/src/cli/mod.rs b/src/cli/mod.rs new file mode 100644 index 00000000..ba0e3833 --- /dev/null +++ b/src/cli/mod.rs @@ -0,0 +1 @@ +pub mod option; diff --git a/src/cli/option.rs b/src/cli/option.rs new file mode 100644 index 00000000..62527bb0 --- /dev/null +++ b/src/cli/option.rs @@ -0,0 +1,29 @@ +use structopt::StructOpt; + +#[derive(Debug, StructOpt)] +pub struct Opt { + /// Bind the TCP server to this host. + #[structopt(short, long, default_value = "127.0.0.1")] + pub host: String, + + /// Bind the TCP server to this port. + #[structopt(short, long, default_value = "26358")] + pub port: u16, + + /// Bind the gRPC server to this port. + #[structopt(short, long, default_value = "9093")] + pub grpc_port: u16, + + /// The default server read buffer size, in bytes, for each incoming client + /// connection. + #[structopt(short, long, default_value = "1048576")] + pub read_buf_size: usize, + + /// Increase output logging verbosity to DEBUG level. + #[structopt(short, long)] + pub verbose: bool, + + /// Suppress all output logging (overrides --verbose). + #[structopt(short, long)] + pub quiet: bool, +} diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 00000000..ca92671e --- /dev/null +++ b/src/error.rs @@ -0,0 +1,25 @@ +use super::helper::error::Error as HelperError; +use super::modules::bank::error::Error as BankError; +use super::modules::ibc::error::Error as IbcError; +use displaydoc::Display; +use ibc::core::ContextError; + +#[derive(Debug, Display)] +pub enum Error { + /// no module could handle specified message + NotHandled, + /// custom error: `{reason}` + Custom { reason: String }, + /// helper error + Helper(HelperError), + /// bank module error + Bank(BankError), + /// IBC module error + Ibc(IbcError), +} + +impl From for Error { + fn from(error: ContextError) -> Self { + Self::Ibc(error.into()) + } +} diff --git a/src/helper/adapts.rs b/src/helper/adapts.rs new file mode 100644 index 00000000..39f4afd1 --- /dev/null +++ b/src/helper/adapts.rs @@ -0,0 +1,276 @@ +use super::error::Error; +use crate::store::avl::{AsBytes, ByteSlice}; +use ibc::core::ics24_host::{ + path::{Path as IbcPath, PathError}, + validate::validate_identifier, +}; +use std::{ + convert::{TryFrom, TryInto}, + fmt::{Debug, Display, Formatter}, + ops::Deref, + str::from_utf8, + str::FromStr, +}; +use tendermint_proto::crypto::ProofOp; + +/// A new type representing a valid ICS024 identifier. +/// Implements `Deref`. +#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone)] +pub struct Identifier(String); + +impl Identifier { + /// Identifiers MUST be non-empty (of positive integer length). + /// Identifiers MUST consist of characters in one of the following categories only: + /// * Alphanumeric + /// * `.`, `_`, `+`, `-`, `#` + /// * `[`, `]`, `<`, `>` + fn validate(s: impl AsRef) -> Result<(), Error> { + let s = s.as_ref(); + + // give a `min` parameter of 0 here to allow id's of arbitrary + // length as inputs; `validate_identifier` itself checks for + // empty inputs and returns an error as appropriate + validate_identifier(s, 0, s.len()).map_err(|v| Error::InvalidIdentifier { + identifier: s.to_string(), + error: v, + }) + } +} + +impl Deref for Identifier { + type Target = String; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl TryFrom for Identifier { + type Error = Error; + + fn try_from(s: String) -> Result { + Identifier::validate(&s).map(|_| Self(s)) + } +} + +impl Display for Identifier { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// A new type representing a valid ICS024 `Path`. +#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone)] + +pub struct Path(Vec); + +impl Path { + pub fn get(&self, index: usize) -> Option<&Identifier> { + self.0.get(index) + } +} + +impl TryFrom for Path { + type Error = Error; + + fn try_from(s: String) -> Result { + let mut identifiers = vec![]; + let parts = s.split('/'); // split will never return an empty iterator + for part in parts { + identifiers.push(Identifier::try_from(part.to_owned())?); + } + Ok(Self(identifiers)) + } +} + +impl TryFrom<&[u8]> for Path { + type Error = Error; + + fn try_from(value: &[u8]) -> Result { + let s = from_utf8(value).map_err(|e| Error::MalformedPathString { error: e })?; + s.to_owned().try_into() + } +} + +impl From for Path { + fn from(id: Identifier) -> Self { + Self(vec![id]) + } +} + +impl Display for Path { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + self.0 + .iter() + .map(|iden| iden.as_str().to_owned()) + .collect::>() + .join("/") + ) + } +} + +impl TryFrom for IbcPath { + type Error = PathError; + + fn try_from(path: Path) -> Result { + Self::from_str(path.to_string().as_str()) + } +} + +impl From for Path { + fn from(ibc_path: IbcPath) -> Self { + Self::try_from(ibc_path.to_string()).unwrap() // safety - `IbcPath`s are correct-by-construction + } +} + +impl AsBytes for Path { + fn as_bytes(&self) -> ByteSlice<'_> { + ByteSlice::Vector(self.to_string().into_bytes()) + } +} + +/// Block height +pub type RawHeight = u64; + +/// Store height to query +#[derive(Debug, Copy, Clone)] +pub enum Height { + Pending, + Latest, + Stable(RawHeight), // or equivalently `tendermint::block::Height` +} + +impl From for Height { + fn from(value: u64) -> Self { + match value { + 0 => Height::Latest, // see https://docs.tendermint.com/master/spec/abci/abci.html#query + _ => Height::Stable(value), + } + } +} + +pub struct QueryResult { + pub data: Vec, + pub proof: Option>, +} + +#[cfg(test)] +mod tests { + use std::{collections::HashSet, convert::TryFrom}; + + use lazy_static::lazy_static; + use proptest::prelude::*; + use rand::{distributions::Standard, seq::SliceRandom}; + + use super::{Identifier, Path}; + + const ALLOWED_CHARS: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\ + abcdefghijklmnopqrstuvwxyz\ + ._+-#[]<>"; + + lazy_static! { + static ref VALID_CHARS: HashSet = { + ALLOWED_CHARS + .iter() + .map(|c| char::from(*c)) + .collect::>() + }; + } + + fn gen_valid_identifier(len: usize) -> String { + let mut rng = rand::thread_rng(); + + (0..=len) + .map(|_| { + let idx = rng.gen_range(0..ALLOWED_CHARS.len()); + ALLOWED_CHARS[idx] as char + }) + .collect::() + } + + fn gen_invalid_identifier(len: usize) -> String { + let mut rng = rand::thread_rng(); + + (0..=len) + .map(|_| loop { + let c = rng.sample::(Standard); + + if c.is_ascii() && !VALID_CHARS.contains(&c) { + return c; + } + }) + .collect::() + } + + proptest! { + #[test] + fn validate_method_doesnt_crash(s in "\\PC*") { + let _ = Identifier::validate(s); + } + + #[test] + fn valid_identifier_is_ok(l in 1usize..=10) { + let id = gen_valid_identifier(l); + let validated = Identifier::validate(id); + + assert!(validated.is_ok()) + } + + #[test] + #[ignore] + fn invalid_identifier_errors(l in 1usize..=10) { + let id = gen_invalid_identifier(l); + let validated = Identifier::validate(id); + + assert!(validated.is_err()) + } + + #[test] + fn path_with_valid_parts_is_valid(n_parts in 1usize..=10) { + let mut rng = rand::thread_rng(); + + let parts = (0..n_parts) + .map(|_| { + let len = rng.gen_range(1usize..=10); + gen_valid_identifier(len) + }) + .collect::>(); + + let path = parts.join("/"); + + assert!(Path::try_from(path).is_ok()); + } + + #[test] + #[ignore] + fn path_with_invalid_parts_is_invalid(n_parts in 1usize..=10) { + let mut rng = rand::thread_rng(); + let n_invalid_parts = rng.gen_range(1usize..=n_parts); + let n_valid_parts = n_parts - n_invalid_parts; + + let mut parts = (0..n_invalid_parts) + .map(|_| { + let len = rng.gen_range(1usize..=10); + gen_invalid_identifier(len) + }) + .collect::>(); + + let mut valid_parts = (0..n_valid_parts) + .map(|_| { + let len = rng.gen_range(1usize..=10); + gen_valid_identifier(len) + }) + .collect::>(); + + parts.append(&mut valid_parts); + parts.shuffle(&mut rng); + + let path = parts.join("/"); + + assert!(Path::try_from(path).is_err()); + } + } +} diff --git a/src/helper/error.rs b/src/helper/error.rs new file mode 100644 index 00000000..a36d987b --- /dev/null +++ b/src/helper/error.rs @@ -0,0 +1,21 @@ +use crate::error::Error as AppError; +use displaydoc::Display; +use ibc::core::ics24_host::error::ValidationError; +use std::str::Utf8Error; + +#[derive(Debug, Display)] +pub enum Error { + /// '{identifier}' is not a valid identifier: `{error}` + InvalidIdentifier { + identifier: String, + error: ValidationError, + }, + /// path isn't a valid string: `{error}` + MalformedPathString { error: Utf8Error }, +} + +impl From for AppError { + fn from(e: Error) -> Self { + AppError::Helper(e) + } +} diff --git a/src/helper/macros.rs b/src/helper/macros.rs new file mode 100644 index 00000000..56484ba0 --- /dev/null +++ b/src/helper/macros.rs @@ -0,0 +1,52 @@ +use super::Path; +use ibc::core::ics24_host::path::{ + AckPath, ChannelEndPath, ClientConnectionPath, ClientConsensusStatePath, ClientStatePath, + ClientTypePath, CommitmentPath, ConnectionPath, ReceiptPath, SeqAckPath, SeqRecvPath, + SeqSendPath, +}; +use tendermint_proto::abci::{ResponseCheckTx, ResponseDeliverTx, ResponseQuery}; +pub(crate) trait ResponseFromErrorExt { + fn from_error(code: u32, log: impl ToString) -> Self; +} + +macro_rules! impl_response_error_for { + ($($resp:ty),+) => { + $(impl ResponseFromErrorExt for $resp { + fn from_error(code: u32, log: impl ToString) -> Self { + let log = log.to_string(); + Self { + code, + log, + ..Self::default() + } + } + })+ + }; +} + +impl_response_error_for!(ResponseQuery, ResponseCheckTx, ResponseDeliverTx); + +macro_rules! impl_into_path_for { + ($($path:ty),+) => { + $(impl From<$path> for Path { + fn from(ibc_path: $path) -> Self { + Self::try_from(ibc_path.to_string()).unwrap() // safety - `IbcPath`s are correct-by-construction + } + })+ + }; +} + +impl_into_path_for!( + ClientTypePath, + ClientStatePath, + ClientConsensusStatePath, + ConnectionPath, + ClientConnectionPath, + ChannelEndPath, + SeqSendPath, + SeqRecvPath, + SeqAckPath, + CommitmentPath, + ReceiptPath, + AckPath +); diff --git a/src/helper/mod.rs b/src/helper/mod.rs new file mode 100644 index 00000000..f3ac940b --- /dev/null +++ b/src/helper/mod.rs @@ -0,0 +1,5 @@ +pub mod adapts; +pub mod error; +pub mod macros; + +pub use adapts::{Height, Identifier, Path, QueryResult, RawHeight}; diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 00000000..575a8084 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,15 @@ +//! The basecoin ABCI application. +#![deny( + warnings, + trivial_numeric_casts, + unused_import_braces, + unused_qualifications, + rust_2018_idioms +)] +#![forbid(unsafe_code)] +pub mod app; +pub mod cli; +pub mod error; +mod helper; +pub mod modules; +pub mod store; diff --git a/src/modules/auth/account.rs b/src/modules/auth/account.rs new file mode 100644 index 00000000..f14936ba --- /dev/null +++ b/src/modules/auth/account.rs @@ -0,0 +1,94 @@ +use std::convert::{TryFrom, TryInto}; + +use crate::helper::Path; +use cosmrs::AccountId; +use ibc_proto::{cosmos::auth::v1beta1::BaseAccount, google::protobuf::Any}; +use prost::Message; + +use super::context::Account; + +/// Address of the account that the relayer uses to sign basecoin transactions. +/// This is hardcoded as we don't verify signatures currently. +pub const RELAYER_ACCOUNT: &str = "cosmos12xpmzmfpf7tn57xg93rne2hc2q26lcfql5efws"; +pub const ACCOUNT_PREFIX: &str = "cosmos"; + +#[derive(Clone)] +pub struct AccountsPath(pub AccountId); + +impl From for Path { + fn from(path: AccountsPath) -> Self { + format!("accounts/{}", path.0).try_into().unwrap() // safety - cannot fail as AccountsPath is correct-by-construction + } +} + +#[derive(Clone)] +pub struct AuthAccount { + address: AccountId, + number: u64, + pub sequence: u64, +} + +impl AuthAccount { + pub fn new(address: AccountId) -> Self { + Self { + address, + number: 0, + sequence: 0, + } + } +} + +impl Account for AuthAccount { + type Address = AccountId; + type PubKey = Vec; + + fn address(&self) -> &Self::Address { + &self.address + } + + fn pub_key(&self) -> &Self::PubKey { + unimplemented!() + } + + fn sequence(&self) -> u64 { + self.sequence + } +} + +impl ibc_proto::protobuf::Protobuf for AuthAccount {} + +impl TryFrom for AuthAccount { + type Error = String; + + fn try_from(account: BaseAccount) -> Result { + Ok(AuthAccount { + address: account + .address + .parse() + .map_err(|_| "Failed to parse address".to_string())?, + number: account.account_number, + sequence: account.sequence, + }) + } +} + +impl From for BaseAccount { + fn from(account: AuthAccount) -> Self { + BaseAccount { + address: account.address.to_string(), + pub_key: None, + account_number: account.number, + sequence: account.sequence, + } + } +} + +impl From for Any { + fn from(account: AuthAccount) -> Self { + let account = BaseAccount::from(account); + Any { + type_url: "/cosmos.auth.v1beta1.BaseAccount".to_string(), + value: account.encode_to_vec(), + } + } +} diff --git a/src/modules/auth/context.rs b/src/modules/auth/context.rs new file mode 100644 index 00000000..64fcfcd1 --- /dev/null +++ b/src/modules/auth/context.rs @@ -0,0 +1,32 @@ +pub trait Account { + /// Account address type + type Address; + /// Account public key type + type PubKey; + + /// Returns the account's address. + fn address(&self) -> &Self::Address; + + /// Returns the account's public key. + fn pub_key(&self) -> &Self::PubKey; + + /// Returns the account's sequence. (used for replay protection) + fn sequence(&self) -> u64; +} + +pub trait AccountReader { + type Error; + type Address; + type Account: Account; + + fn get_account(&self, address: Self::Address) -> Result; +} + +pub trait AccountKeeper { + type Error; + type Account: Account; + + fn set_account(&mut self, account: Self::Account) -> Result<(), Self::Error>; + + fn remove_account(&mut self, account: Self::Account) -> Result<(), Self::Error>; +} diff --git a/src/modules/auth/impls.rs b/src/modules/auth/impls.rs new file mode 100644 index 00000000..d81965b1 --- /dev/null +++ b/src/modules/auth/impls.rs @@ -0,0 +1,144 @@ +use std::{collections::HashMap, str::FromStr}; + +use crate::modules::auth::account::AuthAccount; +use crate::modules::bank::util::Denom; +use crate::{ + error::Error as AppError, + helper::Height, + modules::Module, + store::{ProtobufStore, ProvableStore, SharedStore, Store, TypedStore}, +}; +use cosmrs::AccountId; +use ibc_proto::{ + cosmos::auth::v1beta1::{query_server::QueryServer, BaseAccount}, + google::protobuf::Any, +}; +use serde_json::Value; +use tendermint_proto::abci::Event; +use tracing::{debug, trace}; + +use super::account::AccountsPath; +use super::{ + context::{Account, AccountKeeper, AccountReader}, + service::AuthService, +}; + +#[derive(Clone)] +pub struct Auth { + store: SharedStore, + account_reader: AuthAccountReader, + account_keeper: AuthAccountKeeper, +} + +impl Auth { + pub fn new(store: SharedStore) -> Self { + Self { + store: store.clone(), + account_reader: AuthAccountReader { + account_store: TypedStore::new(store.clone()), + }, + account_keeper: AuthAccountKeeper { + account_store: TypedStore::new(store), + }, + } + } + + pub fn service(&self) -> QueryServer> { + QueryServer::new(AuthService { + account_reader: self.account_reader().clone(), + }) + } + + pub fn account_reader(&self) -> &AuthAccountReader { + &self.account_reader + } + + pub fn account_keeper(&self) -> &AuthAccountKeeper { + &self.account_keeper + } +} + +impl Module for Auth { + type Store = S; + + fn init(&mut self, app_state: Value) { + debug!("Initializing auth module"); + // safety - we panic on errors to prevent chain creation with invalid genesis config + let accounts: HashMap> = + serde_json::from_value(app_state).unwrap(); + for (account, _) in accounts { + trace!("Adding account: {}", account); + + let account_id = AccountId::from_str(&account).unwrap(); + self.account_keeper + .set_account(AuthAccount::new(account_id.clone())) + .map_err(|_| "Failed to create account") + .unwrap(); + } + } + + fn deliver(&mut self, _message: Any, signer: &AccountId) -> Result, AppError> { + let mut account = self + .account_reader + .get_account(signer.clone()) + .map_err(|_| AppError::Custom { + reason: "unknown signer".to_string(), + })?; + account.sequence += 1; + + self.account_keeper + .set_account(account) + .map_err(|_| AppError::Custom { + reason: "failed to increment signer sequence".to_string(), + })?; + + // we're only intercepting the deliverTx here, so return unhandled. + Err(AppError::NotHandled) + } + + fn store_mut(&mut self) -> &mut SharedStore { + &mut self.store + } + + fn store(&self) -> &SharedStore { + &self.store + } +} + +#[derive(Clone)] +pub struct AuthAccountReader { + account_store: ProtobufStore, AccountsPath, AuthAccount, BaseAccount>, +} + +impl AccountReader for AuthAccountReader { + type Error = (); + type Address = AccountId; + type Account = AuthAccount; + + fn get_account(&self, address: Self::Address) -> Result { + self.account_store + .get(Height::Pending, &AccountsPath(address)) + .ok_or(()) + } +} + +#[derive(Clone)] +pub struct AuthAccountKeeper { + account_store: ProtobufStore, AccountsPath, AuthAccount, BaseAccount>, +} + +impl AccountKeeper for AuthAccountKeeper { + type Error = (); + type Account = AuthAccount; + + fn set_account(&mut self, account: Self::Account) -> Result<(), Self::Error> { + self.account_store + .set(AccountsPath(account.address().clone()), account) + .map(|_| ()) + .map_err(|_| ()) + } + + fn remove_account(&mut self, _account: Self::Account) -> Result<(), Self::Error> { + unimplemented!() + } +} diff --git a/src/modules/auth/mod.rs b/src/modules/auth/mod.rs new file mode 100644 index 00000000..f95b042d --- /dev/null +++ b/src/modules/auth/mod.rs @@ -0,0 +1,4 @@ +pub(crate) mod account; +pub mod context; +pub(crate) mod impls; +pub mod service; diff --git a/src/modules/auth/service.rs b/src/modules/auth/service.rs new file mode 100644 index 00000000..de67842b --- /dev/null +++ b/src/modules/auth/service.rs @@ -0,0 +1,94 @@ +use crate::{ + modules::auth::{account::RELAYER_ACCOUNT, context::AccountReader}, + store::ProvableStore, +}; +use ibc_proto::cosmos::auth::v1beta1::{ + query_server::Query, AddressBytesToStringRequest, AddressBytesToStringResponse, + AddressStringToBytesRequest, AddressStringToBytesResponse, Bech32PrefixRequest, + Bech32PrefixResponse, QueryAccountAddressByIdRequest, QueryAccountAddressByIdResponse, + QueryAccountRequest, QueryAccountResponse, QueryAccountsRequest, QueryAccountsResponse, + QueryModuleAccountByNameRequest, QueryModuleAccountByNameResponse, QueryModuleAccountsRequest, + QueryModuleAccountsResponse, QueryParamsRequest, QueryParamsResponse, +}; + +use tonic::{Request, Response, Status}; +use tracing::debug; + +use super::impls::AuthAccountReader; + +pub struct AuthService { + pub account_reader: AuthAccountReader, +} + +#[tonic::async_trait] +impl Query for AuthService { + async fn accounts( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn account( + &self, + _request: Request, + ) -> Result, Status> { + debug!("Got auth account request"); + + let account_id = RELAYER_ACCOUNT.parse().unwrap(); + let account = self.account_reader.get_account(account_id).unwrap(); + + Ok(Response::new(QueryAccountResponse { + account: Some(account.into()), + })) + } + + async fn params( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn account_address_by_id( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn module_accounts( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn module_account_by_name( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn bech32_prefix( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn address_bytes_to_string( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn address_string_to_bytes( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } +} diff --git a/src/modules/bank/context.rs b/src/modules/bank/context.rs new file mode 100644 index 00000000..e838d6a9 --- /dev/null +++ b/src/modules/bank/context.rs @@ -0,0 +1,45 @@ +use std::{fmt::Debug, str::FromStr}; + +use crate::helper::Height; + +pub trait BankReader { + type Address; + type Denom; + type Coin; + type Coins: IntoIterator; + + fn get_all_balances_at_height(&self, height: Height, address: Self::Address) -> Self::Coins; + + fn get_all_balances(&self, address: Self::Address) -> Self::Coins { + self.get_all_balances_at_height(Height::Pending, address) + } +} + +pub trait BankKeeper { + type Error: Debug; + type Address: FromStr; + type Denom; + type Coin; + + /// This function should enable sending ibc fungible tokens from one account to another + fn send_coins( + &mut self, + from: Self::Address, + to: Self::Address, + amount: impl IntoIterator, + ) -> Result<(), Self::Error>; + + /// This function to enable minting ibc tokens to a user account + fn mint_coins( + &mut self, + account: Self::Address, + amount: impl IntoIterator, + ) -> Result<(), Self::Error>; + + /// This function should enable burning of minted tokens in a user account + fn burn_coins( + &mut self, + account: Self::Address, + amount: impl IntoIterator, + ) -> Result<(), Self::Error>; +} diff --git a/src/modules/bank/error.rs b/src/modules/bank/error.rs new file mode 100644 index 00000000..cfcf2a28 --- /dev/null +++ b/src/modules/bank/error.rs @@ -0,0 +1,26 @@ +use cosmrs::AccountId; + +pub use crate::error::Error as AppError; +pub use displaydoc::Display; + +#[derive(Debug, Display)] +pub enum Error { + /// failed to decode message + MsgDecodeFailure, + /// failed to validate message: `{reason}` + MsgValidationFailure { reason: String }, + /// account `{account}` doesn't exist + NonExistentAccount { account: AccountId }, + /// insufficient funds in sender account + InsufficientSourceFunds, + /// receiver account funds overflow + DestFundOverflow, + /// Store error: `{reason}` + Store { reason: String }, +} + +impl From for AppError { + fn from(e: Error) -> Self { + AppError::Bank(e) + } +} diff --git a/src/app/modules/bank.rs b/src/modules/bank/impls.rs similarity index 57% rename from src/app/modules/bank.rs rename to src/modules/bank/impls.rs index 1aeff8bb..34c23e74 100644 --- a/src/app/modules/bank.rs +++ b/src/modules/bank/impls.rs @@ -1,150 +1,28 @@ -use cosmrs::{bank::MsgSend, proto, AccountId, Coin as MsgCoin}; -use displaydoc::Display; -use ibc_proto::{ - cosmos::{ - bank::v1beta1::{ - query_server::{Query, QueryServer}, - QueryAllBalancesRequest, QueryAllBalancesResponse, QueryBalanceRequest, - QueryBalanceResponse, QueryDenomMetadataRequest, QueryDenomMetadataResponse, - QueryDenomOwnersRequest, QueryDenomOwnersResponse, QueryDenomsMetadataRequest, - QueryDenomsMetadataResponse, QueryParamsRequest, QueryParamsResponse, - QuerySpendableBalancesRequest, QuerySpendableBalancesResponse, QuerySupplyOfRequest, - QuerySupplyOfResponse, QueryTotalSupplyRequest, QueryTotalSupplyResponse, - }, - base::v1beta1::Coin as RawCoin, - }, - google::protobuf::Any, -}; +use super::context::{BankKeeper, BankReader}; +use super::error::Error; +use super::service::BankService; +use super::util::{Balances, BalancesPath, Coin, Denom}; +use cosmrs::{bank::MsgSend, proto, AccountId}; +use ibc_proto::{cosmos::bank::v1beta1::query_server::QueryServer, google::protobuf::Any}; use primitive_types::U256; use prost::Message; -use serde::{Deserialize, Serialize}; use std::{collections::HashMap, convert::TryInto, fmt::Debug, str::FromStr}; use tendermint_proto::abci::Event; -use tonic::{Request, Response, Status}; use tracing::{debug, trace}; -use crate::app::{ - modules::{ - auth::{AccountKeeper, AccountReader, AuthAccount, ACCOUNT_PREFIX}, - Error as ModuleError, Module, QueryResult, - }, +use crate::modules::Module; +use crate::{ + error::Error as AppError, + helper::{Height, Path, QueryResult}, + modules::auth::account::{AuthAccount, ACCOUNT_PREFIX}, + modules::auth::context::{AccountKeeper, AccountReader}, store::{ - Codec, Height, JsonCodec, JsonStore, Path, ProvableStore, SharedStore, Store, TypedStore, + SharedStore, + {codec::JsonCodec, Codec}, + {JsonStore, TypedStore}, {ProvableStore, Store}, }, }; -#[derive(Debug, Display)] -pub enum Error { - /// failed to decode message - MsgDecodeFailure, - /// failed to validate message: `{reason}` - MsgValidationFailure { reason: String }, - /// account `{account}` doesn't exist - NonExistentAccount { account: AccountId }, - /// insufficient funds in sender account - InsufficientSourceFunds, - /// receiver account funds overflow - DestFundOverflow, - /// Store error: `{reason}` - Store { reason: String }, -} - -impl From for ModuleError { - fn from(e: Error) -> Self { - ModuleError::Bank(e) - } -} - -#[derive(Serialize, Deserialize, Debug, Default, PartialEq, Clone, Hash, Eq)] -#[serde(transparent)] -pub struct Denom(pub String); - -#[derive(Clone, Serialize, Deserialize, Debug, Default)] -pub struct Coin { - pub denom: Denom, - pub amount: U256, -} - -impl Coin { - fn new_empty(denom: Denom) -> Self { - Self { - denom, - amount: 0u64.into(), - } - } -} - -impl From<(Denom, U256)> for Coin { - fn from((denom, amount): (Denom, U256)) -> Self { - Self { denom, amount } - } -} - -impl From<&MsgCoin> for Coin { - fn from(coin: &MsgCoin) -> Self { - Self { - denom: Denom(coin.denom.to_string()), - amount: coin.amount.to_string().parse().unwrap(), - } - } -} - -/// A mapping of currency denomination identifiers to balances. -#[derive(Clone, Serialize, Deserialize, Debug, Default)] -#[serde(transparent)] -pub struct Balances(Vec); - -#[derive(Clone, Debug)] -struct BalancesPath(AccountId); - -impl From for Path { - fn from(path: BalancesPath) -> Self { - format!("balances/{}", path.0).try_into().unwrap() // safety - cannot fail as AccountsPath is correct-by-construction - } -} - -pub trait BankReader { - type Address; - type Denom; - type Coin; - type Coins: IntoIterator; - - fn get_all_balances_at_height(&self, height: Height, address: Self::Address) -> Self::Coins; - - fn get_all_balances(&self, address: Self::Address) -> Self::Coins { - self.get_all_balances_at_height(Height::Pending, address) - } -} - -pub trait BankKeeper { - type Error: Debug; - type Address: FromStr; - type Denom; - type Coin; - - /// This function should enable sending ibc fungible tokens from one account to another - fn send_coins( - &mut self, - from: Self::Address, - to: Self::Address, - amount: impl IntoIterator, - ) -> Result<(), Self::Error>; - - /// This function to enable minting ibc tokens to a user account - fn mint_coins( - &mut self, - account: Self::Address, - amount: impl IntoIterator, - ) -> Result<(), Self::Error>; - - /// This function should enable burning of minted tokens in a user account - fn burn_coins( - &mut self, - account: Self::Address, - amount: impl IntoIterator, - ) -> Result<(), Self::Error>; -} - #[derive(Clone)] pub struct BankBalanceReader { balance_store: JsonStore, BalancesPath, Balances>, @@ -349,9 +227,9 @@ impl } impl Bank { - fn decode(message: Any) -> Result { + fn decode(message: Any) -> Result { if message.type_url != "/cosmos.bank.v1beta1.MsgSend" { - return Err(ModuleError::NotHandled); + return Err(AppError::NotHandled); } Message::decode(message.value.as_ref()).map_err(|_| Error::MsgDecodeFailure.into()) } @@ -360,12 +238,12 @@ impl Bank { impl Module for Bank where - ::Address: From, + ::Address: From, ::Account: From, { type Store = S; - fn deliver(&mut self, message: Any, _signer: &AccountId) -> Result, ModuleError> { + fn deliver(&mut self, message: Any, _signer: &AccountId) -> Result, AppError> { let message: MsgSend = Self::decode::(message)? .try_into() .map_err(|e| Error::MsgValidationFailure { @@ -414,13 +292,13 @@ where _path: Option<&Path>, height: Height, _prove: bool, - ) -> Result { + ) -> Result { let account_id = match String::from_utf8(data.to_vec()) { Ok(s) if s.starts_with(ACCOUNT_PREFIX) => s, // TODO(hu55a1n1): check if valid identifier - _ => return Err(ModuleError::NotHandled), + _ => return Err(AppError::NotHandled), }; - let account_id = AccountId::from_str(&account_id).map_err(|_| ModuleError::NotHandled)?; + let account_id = AccountId::from_str(&account_id).map_err(|_| AppError::NotHandled)?; trace!("Attempting to get account ID: {}", account_id); @@ -448,92 +326,12 @@ where fn store(&self) -> &SharedStore { &self.store } -} - -pub struct BankService { - bank_reader: BankBalanceReader, -} - -#[tonic::async_trait] -impl Query for BankService { - async fn balance( - &self, - request: Request, - ) -> Result, Status> { - debug!("Got bank balance request: {:?}", request); - - let account_id = request - .get_ref() - .address - .parse() - .map_err(|e| Status::invalid_argument(format!("{e}")))?; - let denom = Denom(request.get_ref().denom.clone()); - let balances = self.bank_reader.get_all_balances(account_id); - - Ok(Response::new(QueryBalanceResponse { - balance: balances - .into_iter() - .find(|c| c.denom == denom) - .map(|coin| RawCoin { - denom: coin.denom.0, - amount: coin.amount.to_string(), - }), - })) - } - - async fn all_balances( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn spendable_balances( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn total_supply( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn supply_of( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn params( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - - async fn denom_metadata( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() - } - async fn denoms_metadata( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() + fn check(&self, _message: Any) -> Result<(), AppError> { + Ok(()) } - async fn denom_owners( - &self, - _request: Request, - ) -> Result, Status> { - unimplemented!() + fn begin_block(&mut self, _header: &tendermint::block::Header) -> Vec { + vec![] } } diff --git a/src/modules/bank/mod.rs b/src/modules/bank/mod.rs new file mode 100644 index 00000000..3a689e40 --- /dev/null +++ b/src/modules/bank/mod.rs @@ -0,0 +1,5 @@ +pub mod context; +pub mod error; +pub mod impls; +pub mod service; +pub mod util; diff --git a/src/modules/bank/service.rs b/src/modules/bank/service.rs new file mode 100644 index 00000000..7d470d17 --- /dev/null +++ b/src/modules/bank/service.rs @@ -0,0 +1,107 @@ +use ibc_proto::cosmos::{ + bank::v1beta1::{ + query_server::Query, QueryAllBalancesRequest, QueryAllBalancesResponse, + QueryBalanceRequest, QueryBalanceResponse, QueryDenomMetadataRequest, + QueryDenomMetadataResponse, QueryDenomOwnersRequest, QueryDenomOwnersResponse, + QueryDenomsMetadataRequest, QueryDenomsMetadataResponse, QueryParamsRequest, + QueryParamsResponse, QuerySpendableBalancesRequest, QuerySpendableBalancesResponse, + QuerySupplyOfRequest, QuerySupplyOfResponse, QueryTotalSupplyRequest, + QueryTotalSupplyResponse, + }, + base::v1beta1::Coin as RawCoin, +}; +use tonic::{Request, Response, Status}; + +use crate::{modules::bank::util::Denom, store::ProvableStore}; +use tracing::debug; + +use super::context::BankReader; +use super::impls::BankBalanceReader; + +pub struct BankService { + pub bank_reader: BankBalanceReader, +} + +#[tonic::async_trait] +impl Query for BankService { + async fn balance( + &self, + request: Request, + ) -> Result, Status> { + debug!("Got bank balance request: {:?}", request); + + let account_id = request + .get_ref() + .address + .parse() + .map_err(|e| Status::invalid_argument(format!("{e}")))?; + let denom = Denom(request.get_ref().denom.clone()); + let balances = self.bank_reader.get_all_balances(account_id); + + Ok(Response::new(QueryBalanceResponse { + balance: balances + .into_iter() + .find(|c| c.denom == denom) + .map(|coin| RawCoin { + denom: coin.denom.0, + amount: coin.amount.to_string(), + }), + })) + } + + async fn all_balances( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn spendable_balances( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn total_supply( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn supply_of( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn params( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn denom_metadata( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn denoms_metadata( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn denom_owners( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } +} diff --git a/src/modules/bank/util.rs b/src/modules/bank/util.rs new file mode 100644 index 00000000..99993997 --- /dev/null +++ b/src/modules/bank/util.rs @@ -0,0 +1,53 @@ +use cosmrs::{AccountId, Coin as MsgCoin}; +use primitive_types::U256; +use serde::{Deserialize, Serialize}; + +use crate::helper::Path; + +#[derive(Serialize, Deserialize, Debug, Default, PartialEq, Clone, Hash, Eq)] +#[serde(transparent)] +pub struct Denom(pub String); + +#[derive(Clone, Serialize, Deserialize, Debug, Default)] +pub struct Coin { + pub denom: Denom, + pub amount: U256, +} + +impl Coin { + pub fn new_empty(denom: Denom) -> Self { + Self { + denom, + amount: 0u64.into(), + } + } +} + +impl From<(Denom, U256)> for Coin { + fn from((denom, amount): (Denom, U256)) -> Self { + Self { denom, amount } + } +} + +impl From<&MsgCoin> for Coin { + fn from(coin: &MsgCoin) -> Self { + Self { + denom: Denom(coin.denom.to_string()), + amount: coin.amount.to_string().parse().unwrap(), + } + } +} + +/// A mapping of currency denomination identifiers to balances. +#[derive(Clone, Serialize, Deserialize, Debug, Default)] +#[serde(transparent)] +pub struct Balances(pub Vec); + +#[derive(Clone, Debug)] +pub(super) struct BalancesPath(pub AccountId); + +impl From for Path { + fn from(path: BalancesPath) -> Self { + format!("balances/{}", path.0).try_into().unwrap() // safety - cannot fail as AccountsPath is correct-by-construction + } +} diff --git a/src/modules/ibc/error.rs b/src/modules/ibc/error.rs new file mode 100644 index 00000000..48d3a2c5 --- /dev/null +++ b/src/modules/ibc/error.rs @@ -0,0 +1,9 @@ +pub use crate::error::Error as AppError; +pub use ibc::core::ics26_routing::error::RouterError; +pub type Error = RouterError; + +impl From for AppError { + fn from(e: Error) -> Self { + AppError::Ibc(e) + } +} diff --git a/src/modules/ibc/impls.rs b/src/modules/ibc/impls.rs new file mode 100644 index 00000000..a95581e4 --- /dev/null +++ b/src/modules/ibc/impls.rs @@ -0,0 +1,907 @@ +use super::{ + router::{IbcModuleWrapper, IbcRouter}, + service::{IbcChannelService, IbcClientService, IbcConnectionService}, +}; +use crate::{ + error::Error as AppError, + helper::{Height, Path, QueryResult}, + modules::{bank::impls::BankBalanceKeeper, IbcTransferModule, Identifiable, Module}, + store::{ + SharedStore, {BinStore, JsonStore, ProtobufStore, TypedSet, TypedStore}, + {ProvableStore, Store}, + }, +}; +use cosmrs::AccountId; +use ibc::{ + applications::transfer::MODULE_ID_STR as IBC_TRANSFER_MODULE_ID, + core::{ics24_host::identifier::PortId, ics26_routing::context::ModuleId}, +}; +use ibc::{ + applications::transfer::{msgs::transfer::MsgTransfer, relay::send_transfer::send_transfer}, + clients::ics07_tendermint::{ + client_state::ClientState as TmClientState, + consensus_state::ConsensusState as TmConsensusState, + }, + core::{ + context::{ExecutionContext, Router as ContextRouter, ValidationContext}, + ics02_client::{ + client_state::ClientState, client_type::ClientType, consensus_state::ConsensusState, + error::ClientError, + }, + ics03_connection::{ + connection::ConnectionEnd, + error::ConnectionError, + version::{pick_version, Version as ConnectionVersion}, + }, + ics04_channel::{ + channel::ChannelEnd, + commitment::{AcknowledgementCommitment, PacketCommitment}, + context::calculate_block_delay, + error::{ChannelError, PacketError}, + packet::{Receipt, Sequence}, + }, + ics05_port::error::PortError, + ics23_commitment::commitment::{CommitmentPrefix, CommitmentRoot}, + ics24_host::{ + identifier::{ClientId, ConnectionId}, + path::{ + AckPath, ChannelEndPath, ClientConnectionPath, ClientConsensusStatePath, + ClientStatePath, ClientTypePath, CommitmentPath, ConnectionPath, ReceiptPath, + SeqAckPath, SeqRecvPath, SeqSendPath, + }, + Path as IbcPath, IBC_QUERY_PATH, + }, + ics26_routing::{context::Module as IbcModule, msgs::MsgEnvelope}, + ContextError, + }, + events::IbcEvent, + timestamp::Timestamp, + Height as IbcHeight, +}; +use ibc_proto::{ + google::protobuf::Any, + ibc::core::{ + channel::v1::{query_server::QueryServer as ChannelQueryServer, Channel as RawChannelEnd}, + client::v1::query_server::QueryServer as ClientQueryServer, + connection::v1::{ + query_server::QueryServer as ConnectionQueryServer, ConnectionEnd as RawConnectionEnd, + }, + }, +}; +use prost::Message; +use sha2::Digest; +use std::{ + collections::{BTreeMap, HashMap}, + convert::{TryFrom, TryInto}, + time::Duration, +}; +use tendermint::{abci::Event as TendermintEvent, block::Header}; +use tendermint_proto::{ + abci::{Event, EventAttribute}, + crypto::ProofOp, +}; +use tracing::debug; + +use ibc::core::handler::dispatch; + +/// The Ibc module +/// Implements all ibc-rs `Reader`s and `Keeper`s +/// Also implements gRPC endpoints required by `hermes` +#[derive(Clone)] +pub struct Ibc { + /// Handle to store instance. + /// The module is guaranteed exclusive access to all paths in the store key-space. + store: SharedStore, + /// Mapping of which IBC modules own which port + port_to_module_map: BTreeMap, + /// ICS26 router impl + router: IbcRouter, + /// Counter for clients + client_counter: u64, + /// Counter for connections + conn_counter: u64, + /// Counter for channels + channel_counter: u64, + /// Tracks the processed time for client updates + client_processed_times: HashMap<(ClientId, IbcHeight), Timestamp>, + /// Tracks the processed height for client updates + client_processed_heights: HashMap<(ClientId, IbcHeight), IbcHeight>, + /// Map of host consensus states + consensus_states: HashMap, + /// A typed-store for ClientType + client_type_store: JsonStore, ClientTypePath, ClientType>, + /// A typed-store for AnyClientState + client_state_store: ProtobufStore, ClientStatePath, TmClientState, Any>, + /// A typed-store for AnyConsensusState + consensus_state_store: + ProtobufStore, ClientConsensusStatePath, TmConsensusState, Any>, + /// A typed-store for ConnectionEnd + connection_end_store: + ProtobufStore, ConnectionPath, ConnectionEnd, RawConnectionEnd>, + /// A typed-store for ConnectionIds + connection_ids_store: JsonStore, ClientConnectionPath, Vec>, + /// A typed-store for ChannelEnd + channel_end_store: ProtobufStore, ChannelEndPath, ChannelEnd, RawChannelEnd>, + /// A typed-store for send sequences + send_sequence_store: JsonStore, SeqSendPath, Sequence>, + /// A typed-store for receive sequences + recv_sequence_store: JsonStore, SeqRecvPath, Sequence>, + /// A typed-store for ack sequences + ack_sequence_store: JsonStore, SeqAckPath, Sequence>, + /// A typed-store for packet commitments + packet_commitment_store: BinStore, CommitmentPath, PacketCommitment>, + /// A typed-store for packet receipts + packet_receipt_store: TypedSet, ReceiptPath>, + /// A typed-store for packet ack + packet_ack_store: BinStore, AckPath, AcknowledgementCommitment>, + /// IBC Events + events: Vec, + /// message logs + logs: Vec, +} + +impl Ibc { + pub fn new(store: SharedStore) -> Self { + Self { + port_to_module_map: Default::default(), + router: Default::default(), + client_counter: 0, + conn_counter: 0, + channel_counter: 0, + client_processed_times: Default::default(), + client_processed_heights: Default::default(), + consensus_states: Default::default(), + client_type_store: TypedStore::new(store.clone()), + client_state_store: TypedStore::new(store.clone()), + consensus_state_store: TypedStore::new(store.clone()), + connection_end_store: TypedStore::new(store.clone()), + connection_ids_store: TypedStore::new(store.clone()), + channel_end_store: TypedStore::new(store.clone()), + send_sequence_store: TypedStore::new(store.clone()), + recv_sequence_store: TypedStore::new(store.clone()), + ack_sequence_store: TypedStore::new(store.clone()), + packet_commitment_store: TypedStore::new(store.clone()), + packet_receipt_store: TypedStore::new(store.clone()), + packet_ack_store: TypedStore::new(store.clone()), + store, + events: Vec::new(), + logs: Vec::new(), + } + } + + pub fn add_route( + &mut self, + module_id: ModuleId, + module: impl IbcModuleWrapper, + ) -> Result<(), String> { + self.router.add_route(module_id, module) + } + + pub fn scope_port_to_module(&mut self, port_id: PortId, module_id: ModuleId) { + self.port_to_module_map.insert(port_id, module_id); + } + + pub fn client_service(&self) -> ClientQueryServer> { + ClientQueryServer::new(IbcClientService::new(self.store.clone())) + } + + pub fn connection_service(&self) -> ConnectionQueryServer> { + ConnectionQueryServer::new(IbcConnectionService::new(self.store.clone())) + } + + pub fn channel_service(&self) -> ChannelQueryServer> { + ChannelQueryServer::new(IbcChannelService::new(self.store.clone())) + } +} + +impl Ibc { + fn get_proof(&self, height: Height, path: &Path) -> Option> { + if let Some(p) = self.store.get_proof(height, path) { + let mut buffer = Vec::new(); + if p.encode(&mut buffer).is_ok() { + return Some(buffer); + } + } + None + } +} + +impl Module for Ibc { + type Store = S; + + fn deliver(&mut self, message: Any, _signer: &AccountId) -> Result, AppError> { + if let Ok(msg) = MsgEnvelope::try_from(message.clone()) { + debug!("Dispatching message: {:?}", msg); + + dispatch(self, msg)?; + let events = self + .events + .drain(..) + .into_iter() + .map(|ev| TmEvent(ev.try_into().unwrap()).into()) + .collect(); + Ok(events) + } else if let Ok(transfer_msg) = MsgTransfer::try_from(message) { + debug!("Dispatching message: {:?}", transfer_msg); + + let transfer_module_id: ModuleId = IBC_TRANSFER_MODULE_ID.parse().unwrap(); + let transfer_module = { + let transfer_module = self + .router + .get_route_mut(&transfer_module_id) + .ok_or(AppError::NotHandled)?; + transfer_module + .as_any_mut() + .downcast_mut::>>() + .expect("Transfer Module <-> ModuleId mismatch") + }; + + send_transfer(transfer_module, transfer_msg).map_err(|e| AppError::Custom { + reason: e.to_string(), + })?; + + Ok(transfer_module + .events + .clone() + .into_iter() + .map(|ev| TmEvent(ev.try_into().unwrap()).into()) + .collect()) + } else { + Err(AppError::NotHandled) + } + } + + fn query( + &self, + data: &[u8], + path: Option<&Path>, + height: Height, + prove: bool, + ) -> Result { + let path = path.ok_or(AppError::NotHandled)?; + if path.to_string() != IBC_QUERY_PATH { + return Err(AppError::NotHandled); + } + + let path: Path = String::from_utf8(data.to_vec()) + .map_err(|_| ContextError::ClientError(ClientError::ImplementationSpecific))? + .try_into()?; + + let _ = IbcPath::try_from(path.clone()) + .map_err(|_| ContextError::ClientError(ClientError::ImplementationSpecific))?; + + debug!( + "Querying for path ({}) at height {:?}", + path.to_string(), + height + ); + + let proof = if prove { + let proof = self + .get_proof(height, &path) + .ok_or(ContextError::ClientError( + ClientError::ImplementationSpecific, + ))?; + Some(vec![ProofOp { + r#type: "".to_string(), + key: path.to_string().into_bytes(), + data: proof, + }]) + } else { + None + }; + + let data = self + .store + .get(height, &path) + .ok_or(ContextError::ClientError( + ClientError::ImplementationSpecific, + ))?; + Ok(QueryResult { data, proof }) + } + + fn begin_block(&mut self, header: &Header) -> Vec { + let consensus_state = TmConsensusState::new( + CommitmentRoot::from_bytes(header.app_hash.as_ref()), + header.time, + header.next_validators_hash, + ); + self.consensus_states + .insert(header.height.value(), consensus_state); + vec![] + } + + fn store_mut(&mut self) -> &mut SharedStore { + &mut self.store + } + + fn store(&self) -> &SharedStore { + &self.store + } +} + +struct TmEvent(TendermintEvent); + +impl From for Event { + fn from(value: TmEvent) -> Self { + Self { + r#type: value.0.kind, + attributes: value + .0 + .attributes + .into_iter() + .map(|attr| EventAttribute { + key: attr.key.into(), + value: attr.value.into(), + index: true, + }) + .collect(), + } + } +} + +impl ContextRouter for Ibc { + fn get_route(&self, module_id: &ModuleId) -> Option<&dyn IbcModule> { + self.router.get_route(module_id) + } + + fn get_route_mut(&mut self, module_id: &ModuleId) -> Option<&mut dyn IbcModule> { + self.router.get_route_mut(module_id) + } + + fn has_route(&self, module_id: &ModuleId) -> bool { + self.router.0.get(module_id).is_some() + } + + fn lookup_module_by_port(&self, port_id: &PortId) -> Option { + self.port_to_module_map + .get(port_id) + .ok_or(PortError::UnknownPort { + port_id: port_id.clone(), + }) + .map(Clone::clone) + .ok() + } +} + +impl ValidationContext for Ibc { + fn client_state(&self, client_id: &ClientId) -> Result, ContextError> { + self.client_state_store + .get(Height::Pending, &ClientStatePath(client_id.clone())) + .ok_or(ClientError::ImplementationSpecific) + .map_err(ContextError::from) + .map(|cs| Box::new(cs) as Box) + } + + fn decode_client_state(&self, client_state: Any) -> Result, ContextError> { + if let Ok(client_state) = TmClientState::try_from(client_state.clone()) { + Ok(client_state.into_box()) + } else { + Err(ClientError::UnknownClientStateType { + client_state_type: client_state.type_url, + }) + .map_err(ContextError::from) + } + } + + fn consensus_state( + &self, + client_cons_state_path: &ClientConsensusStatePath, + ) -> Result, ContextError> { + let height = IbcHeight::new(client_cons_state_path.epoch, client_cons_state_path.height) + .map_err(|_| ClientError::InvalidHeight)?; + let consensus_state = self + .consensus_state_store + .get(Height::Pending, client_cons_state_path) + .ok_or(ClientError::ConsensusStateNotFound { + client_id: client_cons_state_path.client_id.clone(), + height, + })?; + Ok(Box::new(consensus_state) as Box) + } + + fn next_consensus_state( + &self, + client_id: &ClientId, + height: &IbcHeight, + ) -> Result>, ContextError> { + let path = format!("clients/{client_id}/consensusStates") + .try_into() + .unwrap(); // safety - path must be valid since ClientId and height are valid Identifiers + + let keys = self.store.get_keys(&path); + let found_path = keys.into_iter().find_map(|path| { + if let Ok(IbcPath::ClientConsensusState(path)) = IbcPath::try_from(path) { + if height > &IbcHeight::new(path.epoch, path.height).unwrap() { + return Some(path); + } + } + None + }); + + if let Some(path) = found_path { + let consensus_state = self + .consensus_state_store + .get(Height::Pending, &path) + .ok_or(ClientError::ConsensusStateNotFound { + client_id: client_id.clone(), + height: *height, + })?; + Ok(Some(Box::new(consensus_state))) + } else { + Ok(None) + } + } + + fn prev_consensus_state( + &self, + client_id: &ClientId, + height: &IbcHeight, + ) -> Result>, ContextError> { + let path = format!("clients/{client_id}/consensusStates") + .try_into() + .unwrap(); // safety - path must be valid since ClientId and height are valid Identifiers + + let keys = self.store.get_keys(&path); + let pos = keys.iter().position(|path| { + if let Ok(IbcPath::ClientConsensusState(path)) = IbcPath::try_from(path.clone()) { + height >= &IbcHeight::new(path.epoch, path.height).unwrap() + } else { + false + } + }); + + if let Some(pos) = pos { + if pos > 0 { + let prev_path = match IbcPath::try_from(keys[pos - 1].clone()) { + Ok(IbcPath::ClientConsensusState(p)) => p, + _ => unreachable!(), // safety - path retrieved from store + }; + let consensus_state = self + .consensus_state_store + .get(Height::Pending, &prev_path) + .ok_or(ClientError::ConsensusStateNotFound { + client_id: client_id.clone(), + height: *height, + })?; + return Ok(Some(Box::new(consensus_state))); + } + } + Ok(None) + } + + fn host_height(&self) -> Result { + IbcHeight::new(0, self.store.current_height()).map_err(ContextError::from) + } + + fn host_timestamp(&self) -> Result { + let host_height = self.host_height()?; + let host_cons_state = self.host_consensus_state(&host_height)?; + Ok(host_cons_state.timestamp()) + } + + fn host_consensus_state( + &self, + height: &IbcHeight, + ) -> Result, ContextError> { + let consensus_state = self + .consensus_states + .get(&height.revision_height()) + .ok_or(ClientError::MissingLocalConsensusState { height: *height })?; + Ok(Box::new(consensus_state.clone())) + } + + fn client_counter(&self) -> Result { + Ok(self.client_counter) + } + + fn connection_end(&self, conn_id: &ConnectionId) -> Result { + self.connection_end_store + .get(Height::Pending, &ConnectionPath::new(conn_id)) + .ok_or(ConnectionError::Client(ClientError::ImplementationSpecific)) + .map_err(ContextError::from) + } + + fn validate_self_client(&self, _counterparty_client_state: Any) -> Result<(), ContextError> { + Ok(()) + } + + fn commitment_prefix(&self) -> CommitmentPrefix { + use crate::modules::module::prefix::Ibc as IbcPrefix; + CommitmentPrefix::try_from(IbcPrefix {}.identifier().as_bytes().to_vec()) + .expect("empty prefix") + } + + fn connection_counter(&self) -> Result { + Ok(self.conn_counter) + } + + fn get_compatible_versions(&self) -> Vec { + vec![ConnectionVersion::default()] + } + + fn pick_version( + &self, + supported_versions: &[ConnectionVersion], + counterparty_candidate_versions: &[ConnectionVersion], + ) -> Result { + pick_version(supported_versions, counterparty_candidate_versions) + .map_err(ContextError::ConnectionError) + } + + fn channel_end(&self, channel_end_path: &ChannelEndPath) -> Result { + self.channel_end_store + .get( + Height::Pending, + &ChannelEndPath::new(&channel_end_path.0, &channel_end_path.1), + ) + .ok_or(ChannelError::Connection(ConnectionError::Client( + ClientError::ImplementationSpecific, + ))) + .map_err(ContextError::ChannelError) + } + + fn get_next_sequence_send( + &self, + seq_send_path: &SeqSendPath, + ) -> Result { + self.send_sequence_store + .get( + Height::Pending, + &SeqSendPath::new(&seq_send_path.0, &seq_send_path.1), + ) + .ok_or(PacketError::ImplementationSpecific) + .map_err(ContextError::PacketError) + } + + fn get_next_sequence_recv( + &self, + seq_recv_path: &SeqRecvPath, + ) -> Result { + self.recv_sequence_store + .get( + Height::Pending, + &SeqRecvPath::new(&seq_recv_path.0, &seq_recv_path.1), + ) + .ok_or(PacketError::ImplementationSpecific) + .map_err(ContextError::PacketError) + } + + fn get_next_sequence_ack(&self, seq_ack_path: &SeqAckPath) -> Result { + self.ack_sequence_store + .get( + Height::Pending, + &SeqAckPath::new(&seq_ack_path.0, &seq_ack_path.1), + ) + .ok_or(PacketError::ImplementationSpecific) + .map_err(ContextError::PacketError) + } + + fn get_packet_commitment( + &self, + commitment_path: &CommitmentPath, + ) -> Result { + self.packet_commitment_store + .get( + Height::Pending, + &CommitmentPath::new( + &commitment_path.port_id, + &commitment_path.channel_id, + commitment_path.sequence, + ), + ) + .ok_or(PacketError::ImplementationSpecific) + .map_err(ContextError::PacketError) + } + + fn get_packet_receipt(&self, receipt_path: &ReceiptPath) -> Result { + self.packet_receipt_store + .is_path_set( + Height::Pending, + &ReceiptPath::new( + &receipt_path.port_id, + &receipt_path.channel_id, + receipt_path.sequence, + ), + ) + .then_some(Receipt::Ok) + .ok_or(PacketError::PacketReceiptNotFound { + sequence: receipt_path.sequence, + }) + .map_err(ContextError::PacketError) + } + + fn get_packet_acknowledgement( + &self, + ack_path: &AckPath, + ) -> Result { + self.packet_ack_store + .get( + Height::Pending, + &AckPath::new(&ack_path.port_id, &ack_path.channel_id, ack_path.sequence), + ) + .ok_or(PacketError::PacketAcknowledgementNotFound { + sequence: ack_path.sequence, + }) + .map_err(ContextError::PacketError) + } + + /// A hashing function for packet commitments + fn hash(&self, value: &[u8]) -> Vec { + sha2::Sha256::digest(value).to_vec() + } + + /// Returns the time when the client state for the given [`ClientId`] was updated with a header for the given [`Height`] + fn client_update_time( + &self, + client_id: &ClientId, + height: &IbcHeight, + ) -> Result { + self.client_processed_times + .get(&(client_id.clone(), *height)) + .cloned() + .ok_or(ChannelError::Connection(ConnectionError::Client( + ClientError::ImplementationSpecific, + ))) + .map_err(ContextError::ChannelError) + } + + /// Returns the height when the client state for the given [`ClientId`] was updated with a header for the given [`Height`] + fn client_update_height( + &self, + client_id: &ClientId, + height: &IbcHeight, + ) -> Result { + self.client_processed_heights + .get(&(client_id.clone(), *height)) + .cloned() + .ok_or(ChannelError::Connection(ConnectionError::Client( + ClientError::ImplementationSpecific, + ))) + .map_err(ContextError::ChannelError) + } + + /// Returns a counter on the number of channel ids have been created thus far. + /// The value of this counter should increase only via method + /// `ChannelKeeper::increase_channel_counter`. + fn channel_counter(&self) -> Result { + Ok(self.channel_counter) + } + + /// Returns the maximum expected time per block + fn max_expected_time_per_block(&self) -> Duration { + Duration::from_secs(8) + } + + /// Calculates the block delay period using the connection's delay period and the maximum + /// expected time per block. + fn block_delay(&self, delay_period_time: &Duration) -> u64 { + calculate_block_delay( + delay_period_time, + &::max_expected_time_per_block(self), + ) + } +} + +impl ExecutionContext for Ibc { + /// Called upon successful client creation + fn store_client_type( + &mut self, + client_type_path: ClientTypePath, + client_type: ClientType, + ) -> Result<(), ContextError> { + self.client_type_store + .set(client_type_path, client_type) + .map(|_| ()) + .map_err(|_| ClientError::ImplementationSpecific) + .map_err(ContextError::ClientError) + } + + /// Called upon successful client creation and update + fn store_client_state( + &mut self, + client_state_path: ClientStatePath, + client_state: Box, + ) -> Result<(), ContextError> { + let tm_client_state = client_state + .as_any() + .downcast_ref::() + .ok_or(ClientError::ImplementationSpecific)?; + self.client_state_store + .set(client_state_path, tm_client_state.clone()) + .map(|_| ()) + .map_err(|_| ClientError::ImplementationSpecific) + .map_err(ContextError::ClientError) + } + + /// Called upon successful client creation and update + fn store_consensus_state( + &mut self, + consensus_state_path: ClientConsensusStatePath, + consensus_state: Box, + ) -> Result<(), ContextError> { + let tm_consensus_state = consensus_state + .as_any() + .downcast_ref::() + .ok_or(ClientError::ImplementationSpecific)?; + self.consensus_state_store + .set(consensus_state_path, tm_consensus_state.clone()) + .map_err(|_| ClientError::ImplementationSpecific)?; + Ok(()) + } + + /// Called upon client creation. + /// Increases the counter which keeps track of how many clients have been created. + /// Should never fail. + fn increase_client_counter(&mut self) { + self.client_counter += 1; + } + + /// Called upon successful client update. + /// Implementations are expected to use this to record the specified time as the time at which + /// this update (or header) was processed. + fn store_update_time( + &mut self, + client_id: ClientId, + height: IbcHeight, + timestamp: Timestamp, + ) -> Result<(), ContextError> { + self.client_processed_times + .insert((client_id, height), timestamp); + Ok(()) + } + + /// Called upon successful client update. + /// Implementations are expected to use this to record the specified height as the height at + /// at which this update (or header) was processed. + fn store_update_height( + &mut self, + client_id: ClientId, + height: IbcHeight, + host_height: IbcHeight, + ) -> Result<(), ContextError> { + self.client_processed_heights + .insert((client_id, height), host_height); + Ok(()) + } + + /// Stores the given connection_end at path + fn store_connection( + &mut self, + connection_path: &ConnectionPath, + connection_end: ConnectionEnd, + ) -> Result<(), ContextError> { + self.connection_end_store + .set(connection_path.clone(), connection_end) + .map_err(|_| ConnectionError::Client(ClientError::ImplementationSpecific))?; + Ok(()) + } + + /// Stores the given connection_id at a path associated with the client_id. + fn store_connection_to_client( + &mut self, + client_connection_path: &ClientConnectionPath, + conn_id: ConnectionId, + ) -> Result<(), ContextError> { + let mut conn_ids: Vec = self + .connection_ids_store + .get(Height::Pending, client_connection_path) + .unwrap_or_default(); + conn_ids.push(conn_id); + self.connection_ids_store + .set(client_connection_path.clone(), conn_ids) + .map_err(|_| ConnectionError::Client(ClientError::ImplementationSpecific))?; + Ok(()) + } + + /// Called upon connection identifier creation (Init or Try process). + /// Increases the counter which keeps track of how many connections have been created. + /// Should never fail. + fn increase_connection_counter(&mut self) { + self.conn_counter += 1; + } + + fn store_packet_commitment( + &mut self, + commitment_path: &CommitmentPath, + commitment: PacketCommitment, + ) -> Result<(), ContextError> { + self.packet_commitment_store + .set(commitment_path.clone(), commitment) + .map_err(|_| PacketError::ImplementationSpecific)?; + Ok(()) + } + + fn delete_packet_commitment(&mut self, key: &CommitmentPath) -> Result<(), ContextError> { + self.packet_commitment_store + .set(key.clone(), vec![].into()) + .map_err(|_| PacketError::ImplementationSpecific)?; + Ok(()) + } + + fn store_packet_receipt( + &mut self, + receipt_path: &ReceiptPath, + _receipt: Receipt, + ) -> Result<(), ContextError> { + self.packet_receipt_store + .set_path(receipt_path.clone()) + .map_err(|_| PacketError::ImplementationSpecific)?; + Ok(()) + } + + fn store_packet_acknowledgement( + &mut self, + ack_path: &AckPath, + ack_commitment: AcknowledgementCommitment, + ) -> Result<(), ContextError> { + self.packet_ack_store + .set(ack_path.clone(), ack_commitment) + .map_err(|_| PacketError::ImplementationSpecific)?; + Ok(()) + } + + fn delete_packet_acknowledgement(&mut self, ack_path: &AckPath) -> Result<(), ContextError> { + self.packet_ack_store + .set(ack_path.clone(), vec![].into()) + .map_err(|_| PacketError::ImplementationSpecific)?; + Ok(()) + } + + /// Stores the given channel_end at a path associated with the port_id and channel_id. + fn store_channel( + &mut self, + channel_end_path: &ChannelEndPath, + channel_end: ChannelEnd, + ) -> Result<(), ContextError> { + self.channel_end_store + .set(channel_end_path.clone(), channel_end) + .map_err(|_| ClientError::ImplementationSpecific)?; + Ok(()) + } + + fn store_next_sequence_send( + &mut self, + seq_send_path: &SeqSendPath, + seq: Sequence, + ) -> Result<(), ContextError> { + self.send_sequence_store + .set(seq_send_path.clone(), seq) + .map_err(|_| PacketError::ImplementationSpecific)?; + Ok(()) + } + + fn store_next_sequence_recv( + &mut self, + seq_recv_path: &SeqRecvPath, + seq: Sequence, + ) -> Result<(), ContextError> { + self.recv_sequence_store + .set(seq_recv_path.clone(), seq) + .map_err(|_| PacketError::ImplementationSpecific)?; + Ok(()) + } + + fn store_next_sequence_ack( + &mut self, + seq_ack_path: &SeqAckPath, + seq: Sequence, + ) -> Result<(), ContextError> { + self.ack_sequence_store + .set(seq_ack_path.clone(), seq) + .map_err(|_| PacketError::ImplementationSpecific)?; + Ok(()) + } + + fn increase_channel_counter(&mut self) { + self.channel_counter += 1; + } + + fn emit_ibc_event(&mut self, event: IbcEvent) { + self.events.push(event); + } + + fn log_message(&mut self, message: String) { + self.logs.push(message); + } +} diff --git a/src/modules/ibc/mod.rs b/src/modules/ibc/mod.rs new file mode 100644 index 00000000..153d80ef --- /dev/null +++ b/src/modules/ibc/mod.rs @@ -0,0 +1,5 @@ +pub mod error; +pub mod impls; +mod router; +pub mod service; +pub mod transfer; diff --git a/src/modules/ibc/router.rs b/src/modules/ibc/router.rs new file mode 100644 index 00000000..d2da195e --- /dev/null +++ b/src/modules/ibc/router.rs @@ -0,0 +1,37 @@ +use std::{borrow::Borrow, collections::BTreeMap, sync::Arc}; + +use ibc::core::ics26_routing::context::{Module, ModuleId}; + +pub trait IbcModuleWrapper: Module + Send + Sync { + fn as_ibc_module(&self) -> &dyn Module; + fn as_ibc_module_mut(&mut self) -> &mut dyn Module; +} + +#[derive(Clone, Default, Debug)] +pub struct IbcRouter(pub BTreeMap>); + +impl IbcRouter { + pub fn get_route(&self, module_id: &impl Borrow) -> Option<&dyn Module> { + self.0 + .get(module_id.borrow()) + .map(|mod_wrapper| mod_wrapper.as_ibc_module()) + } + + pub fn get_route_mut(&mut self, module_id: &impl Borrow) -> Option<&mut dyn Module> { + self.0 + .get_mut(module_id.borrow()) + .and_then(Arc::get_mut) + .map(|mod_wrapper| mod_wrapper.as_ibc_module_mut()) + } + + pub fn add_route( + &mut self, + module_id: ModuleId, + module: impl IbcModuleWrapper, + ) -> Result<(), String> { + match self.0.insert(module_id, Arc::new(module)) { + None => Ok(()), + Some(_) => Err("Duplicate module_id".to_owned()), + } + } +} diff --git a/src/modules/ibc/service.rs b/src/modules/ibc/service.rs new file mode 100644 index 00000000..985f58d7 --- /dev/null +++ b/src/modules/ibc/service.rs @@ -0,0 +1,670 @@ +use std::str::FromStr; + +use crate::{ + app::CHAIN_REVISION_NUMBER, + helper::{Height, Path}, + store::{ + BinStore, JsonStore, ProtobufStore, ProvableStore, SharedStore, Store, TypedSet, TypedStore, + }, +}; +use ibc::core::ics24_host::identifier::PortId; +use ibc::{ + clients::ics07_tendermint::{ + client_state::ClientState as TmClientState, + consensus_state::ConsensusState as TmConsensusState, + }, + core::{ + ics03_connection::connection::{ConnectionEnd, IdentifiedConnectionEnd}, + ics04_channel::{ + channel::{ChannelEnd, IdentifiedChannelEnd}, + commitment::{AcknowledgementCommitment, PacketCommitment}, + packet::Sequence, + }, + ics24_host::{ + identifier::{ChannelId, ConnectionId}, + path::{ + AckPath, ChannelEndPath, ClientConnectionPath, ClientConsensusStatePath, + ClientStatePath, CommitmentPath, ConnectionPath, ReceiptPath, + }, + Path as IbcPath, + }, + }, +}; + +use ibc_proto::{ + google::protobuf::Any, + ibc::core::{ + channel::v1::{ + query_server::Query as ChannelQuery, Channel as RawChannelEnd, + IdentifiedChannel as RawIdentifiedChannel, PacketState, QueryChannelClientStateRequest, + QueryChannelClientStateResponse, QueryChannelConsensusStateRequest, + QueryChannelConsensusStateResponse, QueryChannelRequest, QueryChannelResponse, + QueryChannelsRequest, QueryChannelsResponse, QueryConnectionChannelsRequest, + QueryConnectionChannelsResponse, QueryNextSequenceReceiveRequest, + QueryNextSequenceReceiveResponse, QueryPacketAcknowledgementRequest, + QueryPacketAcknowledgementResponse, QueryPacketAcknowledgementsRequest, + QueryPacketAcknowledgementsResponse, QueryPacketCommitmentRequest, + QueryPacketCommitmentResponse, QueryPacketCommitmentsRequest, + QueryPacketCommitmentsResponse, QueryPacketReceiptRequest, QueryPacketReceiptResponse, + QueryUnreceivedAcksRequest, QueryUnreceivedAcksResponse, QueryUnreceivedPacketsRequest, + QueryUnreceivedPacketsResponse, + }, + client::v1::{ + query_server::Query as ClientQuery, ConsensusStateWithHeight, Height as RawHeight, + IdentifiedClientState, QueryClientParamsRequest, QueryClientParamsResponse, + QueryClientStateRequest, QueryClientStateResponse, QueryClientStatesRequest, + QueryClientStatesResponse, QueryClientStatusRequest, QueryClientStatusResponse, + QueryConsensusStateHeightsRequest, QueryConsensusStateHeightsResponse, + QueryConsensusStateRequest, QueryConsensusStateResponse, QueryConsensusStatesRequest, + QueryConsensusStatesResponse, QueryUpgradedClientStateRequest, + QueryUpgradedClientStateResponse, QueryUpgradedConsensusStateRequest, + QueryUpgradedConsensusStateResponse, + }, + connection::v1::{ + query_server::Query as ConnectionQuery, ConnectionEnd as RawConnectionEnd, + IdentifiedConnection as RawIdentifiedConnection, QueryClientConnectionsRequest, + QueryClientConnectionsResponse, QueryConnectionClientStateRequest, + QueryConnectionClientStateResponse, QueryConnectionConsensusStateRequest, + QueryConnectionConsensusStateResponse, QueryConnectionRequest, QueryConnectionResponse, + QueryConnectionsRequest, QueryConnectionsResponse, + }, + }, +}; +use tonic::{Request, Response, Status}; +use tracing::trace; + +pub struct IbcClientService { + client_state_store: ProtobufStore, ClientStatePath, TmClientState, Any>, + consensus_state_store: + ProtobufStore, ClientConsensusStatePath, TmConsensusState, Any>, +} + +impl IbcClientService { + pub fn new(store: SharedStore) -> Self { + Self { + client_state_store: TypedStore::new(store.clone()), + consensus_state_store: TypedStore::new(store), + } + } +} + +#[tonic::async_trait] +impl ClientQuery for IbcClientService { + async fn client_state( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn client_states( + &self, + request: Request, + ) -> Result, Status> { + trace!("Got client states request: {:?}", request); + + let path = "clients" + .to_owned() + .try_into() + .map_err(|e| Status::invalid_argument(format!("{e}")))?; + + let client_state_paths = |path: Path| -> Option { + match path.try_into() { + Ok(IbcPath::ClientState(p)) => Some(p), + _ => None, + } + }; + + let identified_client_state = |path: ClientStatePath| { + let client_state = self.client_state_store.get(Height::Pending, &path).unwrap(); + IdentifiedClientState { + client_id: path.0.to_string(), + client_state: Some(client_state.into()), + } + }; + + let keys = self.client_state_store.get_keys(&path); + let client_states = keys + .into_iter() + .filter_map(client_state_paths) + .map(identified_client_state) + .collect(); + + Ok(Response::new(QueryClientStatesResponse { + client_states, + pagination: None, // TODO(hu55a1n1): add pagination support + })) + } + + async fn consensus_state( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn consensus_states( + &self, + request: Request, + ) -> Result, Status> { + trace!("Got consensus states request: {:?}", request); + + let path = format!("clients/{}/consensusStates", request.get_ref().client_id) + .try_into() + .map_err(|e| Status::invalid_argument(format!("{e}")))?; + + let keys = self.consensus_state_store.get_keys(&path); + let consensus_states = keys + .into_iter() + .map(|path| { + if let Ok(IbcPath::ClientConsensusState(path)) = path.try_into() { + let consensus_state = self.consensus_state_store.get(Height::Pending, &path); + ConsensusStateWithHeight { + height: Some(RawHeight { + revision_number: path.epoch, + revision_height: path.height, + }), + consensus_state: consensus_state.map(|cs| cs.into()), + } + } else { + panic!("unexpected path") // safety - store paths are assumed to be well-formed + } + }) + .collect(); + + Ok(Response::new(QueryConsensusStatesResponse { + consensus_states, + pagination: None, // TODO(hu55a1n1): add pagination support + })) + } + + async fn consensus_state_heights( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn client_status( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn client_params( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn upgraded_client_state( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn upgraded_consensus_state( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } +} + +pub struct IbcConnectionService { + connection_end_store: + ProtobufStore, ConnectionPath, ConnectionEnd, RawConnectionEnd>, + connection_ids_store: JsonStore, ClientConnectionPath, Vec>, +} + +impl IbcConnectionService { + pub fn new(store: SharedStore) -> Self { + Self { + connection_end_store: TypedStore::new(store.clone()), + connection_ids_store: TypedStore::new(store), + } + } +} + +#[tonic::async_trait] +impl ConnectionQuery for IbcConnectionService { + async fn connection( + &self, + request: Request, + ) -> Result, Status> { + let conn_id = ConnectionId::from_str(&request.get_ref().connection_id) + .map_err(|_| Status::invalid_argument("invalid connection id"))?; + let conn = self + .connection_end_store + .get(Height::Pending, &ConnectionPath::new(&conn_id)); + Ok(Response::new(QueryConnectionResponse { + connection: conn.map(|c| c.into()), + proof: vec![], + proof_height: None, + })) + } + + async fn connections( + &self, + _request: Request, + ) -> Result, Status> { + let connection_path_prefix: Path = String::from("connections") + .try_into() + .expect("'connections' expected to be a valid Path"); + + let connection_paths = self.connection_end_store.get_keys(&connection_path_prefix); + + let identified_connections: Vec = connection_paths + .into_iter() + .map(|path| match path.try_into() { + Ok(IbcPath::Connection(connections_path)) => { + let connection_end = self + .connection_end_store + .get(Height::Pending, &connections_path) + .unwrap(); + IdentifiedConnectionEnd::new(connections_path.0, connection_end).into() + } + _ => panic!("unexpected path"), + }) + .collect(); + + Ok(Response::new(QueryConnectionsResponse { + connections: identified_connections, + pagination: None, + height: None, + })) + } + + async fn client_connections( + &self, + request: Request, + ) -> Result, Status> { + trace!("Got client connections request: {:?}", request); + + let client_id = request + .get_ref() + .client_id + .parse() + .map_err(|e| Status::invalid_argument(format!("{e}")))?; + let path = ClientConnectionPath::new(&client_id); + let connection_ids = self + .connection_ids_store + .get(Height::Pending, &path) + .unwrap_or_default(); + let connection_paths = connection_ids + .into_iter() + .map(|conn_id| conn_id.to_string()) + .collect(); + + Ok(Response::new(QueryClientConnectionsResponse { + connection_paths, + // Note: proofs aren't being used by hermes currently + proof: vec![], + proof_height: None, + })) + } + + async fn connection_client_state( + &self, + _request: Request, + ) -> Result, Status> { + todo!() + } + + async fn connection_consensus_state( + &self, + _request: Request, + ) -> Result, Status> { + todo!() + } +} + +pub struct IbcChannelService { + channel_end_store: ProtobufStore, ChannelEndPath, ChannelEnd, RawChannelEnd>, + packet_commitment_store: BinStore, CommitmentPath, PacketCommitment>, + packet_ack_store: BinStore, AckPath, AcknowledgementCommitment>, + packet_receipt_store: TypedSet, ReceiptPath>, +} + +impl IbcChannelService { + pub fn new(store: SharedStore) -> Self { + Self { + channel_end_store: TypedStore::new(store.clone()), + packet_commitment_store: TypedStore::new(store.clone()), + packet_ack_store: TypedStore::new(store.clone()), + packet_receipt_store: TypedStore::new(store), + } + } +} + +#[tonic::async_trait] +impl ChannelQuery for IbcChannelService { + async fn channel( + &self, + request: Request, + ) -> Result, Status> { + let request = request.into_inner(); + let port_id = PortId::from_str(&request.port_id) + .map_err(|_| Status::invalid_argument("invalid port id"))?; + let channel_id = ChannelId::from_str(&request.channel_id) + .map_err(|_| Status::invalid_argument("invalid channel id"))?; + + let channel = self + .channel_end_store + .get(Height::Pending, &ChannelEndPath(port_id, channel_id)) + .map(|channel_end| channel_end.into()); + + Ok(Response::new(QueryChannelResponse { + channel, + proof: vec![], + proof_height: None, + })) + } + /// Channels queries all the IBC channels of a chain. + async fn channels( + &self, + _request: Request, + ) -> Result, Status> { + let channel_path_prefix: Path = String::from("channelEnds/ports") + .try_into() + .expect("'channelEnds/ports' expected to be a valid Path"); + + let channel_paths = self.channel_end_store.get_keys(&channel_path_prefix); + let identified_channels: Vec = channel_paths + .into_iter() + .map(|path| match path.try_into() { + Ok(IbcPath::ChannelEnd(channels_path)) => { + let channel_end = self + .channel_end_store + .get(Height::Pending, &channels_path) + .expect("channel path returned by get_keys() had no associated channel"); + IdentifiedChannelEnd::new(channels_path.0, channels_path.1, channel_end).into() + } + _ => panic!("unexpected path"), + }) + .collect(); + + Ok(Response::new(QueryChannelsResponse { + channels: identified_channels, + pagination: None, + height: Some(RawHeight { + revision_number: CHAIN_REVISION_NUMBER, + revision_height: self.channel_end_store.current_height(), + }), + })) + } + /// ConnectionChannels queries all the channels associated with a connection + /// end. + async fn connection_channels( + &self, + request: Request, + ) -> Result, Status> { + let conn_id = ConnectionId::from_str(&request.get_ref().connection) + .map_err(|_| Status::invalid_argument("invalid connection id"))?; + + let path = "channelEnds" + .to_owned() + .try_into() + .expect("'commitments/ports' expected to be a valid Path"); + + let keys = self.channel_end_store.get_keys(&path); + let channels = keys + .into_iter() + .filter_map(|path| { + if let Ok(IbcPath::ChannelEnd(path)) = path.try_into() { + let channel_end = self.channel_end_store.get(Height::Pending, &path)?; + if channel_end.connection_hops.first() == Some(&conn_id) { + return Some(IdentifiedChannelEnd::new(path.0, path.1, channel_end).into()); + } + } + + None + }) + .collect(); + + Ok(Response::new(QueryConnectionChannelsResponse { + channels, + pagination: None, + height: Some(RawHeight { + revision_number: CHAIN_REVISION_NUMBER, + revision_height: self.channel_end_store.current_height(), + }), + })) + } + /// ChannelClientState queries for the client state for the channel associated + /// with the provided channel identifiers. + async fn channel_client_state( + &self, + _request: Request, + ) -> Result, Status> { + todo!() + } + /// ChannelConsensusState queries for the consensus state for the channel + /// associated with the provided channel identifiers. + async fn channel_consensus_state( + &self, + _request: Request, + ) -> Result, Status> { + todo!() + } + /// PacketCommitment queries a stored packet commitment hash. + async fn packet_commitment( + &self, + _request: Request, + ) -> Result, Status> { + todo!() + } + /// PacketCommitments returns all the packet commitments hashes associated + /// with a channel. + async fn packet_commitments( + &self, + request: Request, + ) -> Result, Status> { + let request = request.into_inner(); + let port_id = PortId::from_str(&request.port_id) + .map_err(|_| Status::invalid_argument("invalid port id"))?; + let channel_id = ChannelId::from_str(&request.channel_id) + .map_err(|_| Status::invalid_argument("invalid channel id"))?; + + let commitment_paths = { + let prefix: Path = String::from("commitments/ports") + .try_into() + .expect("'commitments/ports' expected to be a valid Path"); + self.packet_commitment_store.get_keys(&prefix) + }; + + let matching_commitment_paths = |path: Path| -> Option { + match path.try_into() { + Ok(IbcPath::Commitment(p)) + if p.port_id == port_id && p.channel_id == channel_id => + { + Some(p) + } + _ => None, + } + }; + + let packet_state = |path: CommitmentPath| -> Option { + let commitment = self + .packet_commitment_store + .get(Height::Pending, &path) + .unwrap(); + let data = commitment.into_vec(); + (!data.is_empty()).then(|| PacketState { + port_id: path.port_id.to_string(), + channel_id: path.channel_id.to_string(), + sequence: path.sequence.into(), + data, + }) + }; + + let packet_states: Vec = commitment_paths + .into_iter() + .filter_map(matching_commitment_paths) + .filter_map(packet_state) + .collect(); + + Ok(Response::new(QueryPacketCommitmentsResponse { + commitments: packet_states, + pagination: None, + height: Some(RawHeight { + revision_number: CHAIN_REVISION_NUMBER, + revision_height: self.packet_commitment_store.current_height(), + }), + })) + } + + /// PacketReceipt queries if a given packet sequence has been received on the + /// queried chain + async fn packet_receipt( + &self, + _request: Request, + ) -> Result, Status> { + todo!() + } + + /// PacketAcknowledgement queries a stored packet acknowledgement hash. + async fn packet_acknowledgement( + &self, + _request: Request, + ) -> Result, Status> { + todo!() + } + + /// PacketAcknowledgements returns all the packet acknowledgements associated + /// with a channel. + async fn packet_acknowledgements( + &self, + request: Request, + ) -> Result, Status> { + let request = request.into_inner(); + let port_id = PortId::from_str(&request.port_id) + .map_err(|_| Status::invalid_argument("invalid port id"))?; + let channel_id = ChannelId::from_str(&request.channel_id) + .map_err(|_| Status::invalid_argument("invalid channel id"))?; + + let ack_paths = { + let prefix: Path = String::from("acks/ports") + .try_into() + .expect("'acks/ports' expected to be a valid Path"); + self.packet_ack_store.get_keys(&prefix) + }; + + let matching_ack_paths = |path: Path| -> Option { + match path.try_into() { + Ok(IbcPath::Ack(p)) if p.port_id == port_id && p.channel_id == channel_id => { + Some(p) + } + _ => None, + } + }; + + let packet_state = |path: AckPath| -> Option { + let commitment = self.packet_ack_store.get(Height::Pending, &path).unwrap(); + let data = commitment.into_vec(); + (!data.is_empty()).then(|| PacketState { + port_id: path.port_id.to_string(), + channel_id: path.channel_id.to_string(), + sequence: path.sequence.into(), + data, + }) + }; + + let packet_states: Vec = ack_paths + .into_iter() + .filter_map(matching_ack_paths) + .filter_map(packet_state) + .collect(); + + Ok(Response::new(QueryPacketAcknowledgementsResponse { + acknowledgements: packet_states, + pagination: None, + height: Some(RawHeight { + revision_number: CHAIN_REVISION_NUMBER, + revision_height: self.packet_ack_store.current_height(), + }), + })) + } + + /// UnreceivedPackets returns all the unreceived IBC packets associated with + /// a channel and sequences. + /// + /// QUESTION. Currently only works for unordered channels; ordered channels + /// don't use receipts. However, ibc-go does it this way. Investigate if + /// this query only ever makes sense on unordered channels. + async fn unreceived_packets( + &self, + request: Request, + ) -> Result, Status> { + let request = request.into_inner(); + let port_id = PortId::from_str(&request.port_id) + .map_err(|_| Status::invalid_argument("invalid port id"))?; + let channel_id = ChannelId::from_str(&request.channel_id) + .map_err(|_| Status::invalid_argument("invalid channel id"))?; + let sequences_to_check: Vec = request.packet_commitment_sequences; + + let unreceived_sequences: Vec = sequences_to_check + .into_iter() + .filter(|seq| { + let receipts_path = ReceiptPath::new(&port_id, &channel_id, Sequence::from(*seq)); + self.packet_receipt_store + .get(Height::Pending, &receipts_path) + .is_none() + }) + .collect(); + + Ok(Response::new(QueryUnreceivedPacketsResponse { + sequences: unreceived_sequences, + height: Some(RawHeight { + revision_number: CHAIN_REVISION_NUMBER, + revision_height: self.packet_receipt_store.current_height(), + }), + })) + } + + /// UnreceivedAcks returns all the unreceived IBC acknowledgements associated + /// with a channel and sequences. + async fn unreceived_acks( + &self, + request: Request, + ) -> Result, Status> { + let request = request.into_inner(); + let port_id = PortId::from_str(&request.port_id) + .map_err(|_| Status::invalid_argument("invalid port id"))?; + let channel_id = ChannelId::from_str(&request.channel_id) + .map_err(|_| Status::invalid_argument("invalid channel id"))?; + let sequences_to_check: Vec = request.packet_ack_sequences; + + let unreceived_sequences: Vec = sequences_to_check + .into_iter() + .filter(|seq| { + // To check if we received an acknowledgement, we check if we still have the sent packet + // commitment (upon receiving an ack, the sent packet commitment is deleted). + let commitments_path = + CommitmentPath::new(&port_id, &channel_id, Sequence::from(*seq)); + self.packet_commitment_store + .get(Height::Pending, &commitments_path) + .is_some() + }) + .collect(); + + Ok(Response::new(QueryUnreceivedAcksResponse { + sequences: unreceived_sequences, + height: Some(RawHeight { + revision_number: CHAIN_REVISION_NUMBER, + revision_height: self.packet_commitment_store.current_height(), + }), + })) + } + + /// NextSequenceReceive returns the next receive sequence for a given channel. + async fn next_sequence_receive( + &self, + _request: Request, + ) -> Result, Status> { + todo!() + } +} diff --git a/src/modules/ibc/transfer.rs b/src/modules/ibc/transfer.rs new file mode 100644 index 00000000..f3ae6daf --- /dev/null +++ b/src/modules/ibc/transfer.rs @@ -0,0 +1,575 @@ +use crate::{ + helper::Height, + modules::{ + auth::account::ACCOUNT_PREFIX, + bank::context::BankKeeper, + bank::util::{Coin, Denom}, + }, + store::{ + SharedStore, Store, {BinStore, JsonStore, ProtobufStore, TypedStore}, + }, +}; +use core::fmt::Debug; +use cosmrs::AccountId; +use ibc::core::ics24_host::identifier::PortId; +use ibc::{applications::transfer::VERSION, core::ics24_host::path::SeqSendPath}; +use ibc::{ + applications::transfer::{ + context::{ + cosmos_adr028_escrow_address, TokenTransferExecutionContext, + TokenTransferValidationContext, + }, + error::TokenTransferError, + PrefixedCoin, + }, + clients::ics07_tendermint::{ + client_state::ClientState as TmClientState, + consensus_state::ConsensusState as TmConsensusState, + }, + core::{ + ics02_client::{ + client_state::ClientState, consensus_state::ConsensusState, error::ClientError, + }, + ics03_connection::{connection::ConnectionEnd, error::ConnectionError}, + ics04_channel::{ + channel::{ChannelEnd, Counterparty, Order}, + commitment::PacketCommitment, + context::{SendPacketExecutionContext, SendPacketValidationContext}, + error::{ChannelError, PacketError}, + handler::ModuleExtras, + msgs::acknowledgement::Acknowledgement, + packet::{Packet, Sequence}, + timeout::TimeoutHeight, + Version as ChannelVersion, + }, + ics24_host::{ + identifier::{ChannelId, ClientId, ConnectionId}, + path::{ + ChannelEndPath, ClientConsensusStatePath, ClientStatePath, CommitmentPath, + ConnectionPath, + }, + }, + ics26_routing::context::Module as IbcModule, + ContextError, + }, + events::IbcEvent, + signer::Signer, + timestamp::Timestamp, + Height as IbcHeight, +}; +use ibc_proto::{ + google::protobuf::Any, + ibc::core::{ + channel::v1::Channel as RawChannelEnd, connection::v1::ConnectionEnd as RawConnectionEnd, + }, +}; +use sha2::Digest; + +use ibc::applications::transfer::context::{ + on_acknowledgement_packet_validate, on_chan_open_ack_validate, on_chan_open_confirm_validate, + on_chan_open_init_execute, on_chan_open_init_validate, on_chan_open_try_execute, + on_chan_open_try_validate, on_recv_packet_execute, on_timeout_packet_execute, + on_timeout_packet_validate, +}; + +use super::router::IbcModuleWrapper; + +#[derive(Clone, Debug)] +pub struct IbcTransferModule { + // store: SharedStore, + /// A bank keeper to enable sending, minting and burning of tokens + bank_keeper: BK, + /// A typed-store for AnyClientState + client_state_store: ProtobufStore, ClientStatePath, TmClientState, Any>, + /// A typed-store for AnyConsensusState + consensus_state_store: + ProtobufStore, ClientConsensusStatePath, TmConsensusState, Any>, + /// A typed-store for ConnectionEnd + connection_end_store: + ProtobufStore, ConnectionPath, ConnectionEnd, RawConnectionEnd>, + /// A typed-store for ChannelEnd + channel_end_store: ProtobufStore, ChannelEndPath, ChannelEnd, RawChannelEnd>, + /// A typed-store for send sequences + send_sequence_store: JsonStore, SeqSendPath, Sequence>, + /// A typed-store for packet commitments + packet_commitment_store: BinStore, CommitmentPath, PacketCommitment>, + + pub events: Vec, + + log: Vec, +} + +impl> + IbcTransferModule +{ + pub fn new(store: SharedStore, bank_keeper: BK) -> Self { + Self { + bank_keeper, + client_state_store: TypedStore::new(store.clone()), + consensus_state_store: TypedStore::new(store.clone()), + connection_end_store: TypedStore::new(store.clone()), + channel_end_store: TypedStore::new(store.clone()), + send_sequence_store: TypedStore::new(store.clone()), + packet_commitment_store: TypedStore::new(store), + events: Vec::new(), + log: Vec::new(), + } + } +} + +impl> + IbcModule for IbcTransferModule +{ + #[allow(clippy::too_many_arguments)] + fn on_chan_open_init_validate( + &self, + order: Order, + connection_hops: &[ConnectionId], + port_id: &PortId, + channel_id: &ChannelId, + counterparty: &Counterparty, + version: &ChannelVersion, + ) -> Result { + on_chan_open_init_validate( + self, + order, + connection_hops, + port_id, + channel_id, + counterparty, + version, + ) + .map_err(|e: TokenTransferError| ChannelError::AppModule { + description: e.to_string(), + })?; + Ok(ChannelVersion::new(VERSION.to_string())) + } + + #[allow(clippy::too_many_arguments)] + fn on_chan_open_init_execute( + &mut self, + order: Order, + connection_hops: &[ConnectionId], + port_id: &PortId, + channel_id: &ChannelId, + counterparty: &Counterparty, + version: &ChannelVersion, + ) -> Result<(ModuleExtras, ChannelVersion), ChannelError> { + on_chan_open_init_execute( + self, + order, + connection_hops, + port_id, + channel_id, + counterparty, + version, + ) + .map_err(|e: TokenTransferError| ChannelError::AppModule { + description: e.to_string(), + }) + } + + #[allow(clippy::too_many_arguments)] + fn on_chan_open_try_validate( + &self, + order: Order, + connection_hops: &[ConnectionId], + port_id: &PortId, + channel_id: &ChannelId, + counterparty: &Counterparty, + counterparty_version: &ChannelVersion, + ) -> Result { + on_chan_open_try_validate( + self, + order, + connection_hops, + port_id, + channel_id, + counterparty, + counterparty_version, + ) + .map_err(|e: TokenTransferError| ChannelError::AppModule { + description: e.to_string(), + })?; + Ok(ChannelVersion::new(VERSION.to_string())) + } + + #[allow(clippy::too_many_arguments)] + fn on_chan_open_try_execute( + &mut self, + order: Order, + connection_hops: &[ConnectionId], + port_id: &PortId, + channel_id: &ChannelId, + counterparty: &Counterparty, + counterparty_version: &ChannelVersion, + ) -> Result<(ModuleExtras, ChannelVersion), ChannelError> { + on_chan_open_try_execute( + self, + order, + connection_hops, + port_id, + channel_id, + counterparty, + counterparty_version, + ) + .map_err(|e: TokenTransferError| ChannelError::AppModule { + description: e.to_string(), + }) + } + + fn on_chan_open_ack_validate( + &self, + port_id: &PortId, + channel_id: &ChannelId, + counterparty_version: &ChannelVersion, + ) -> Result<(), ChannelError> { + on_chan_open_ack_validate(self, port_id, channel_id, counterparty_version).map_err( + |e: TokenTransferError| ChannelError::AppModule { + description: e.to_string(), + }, + ) + } + + fn on_chan_open_ack_execute( + &mut self, + _port_id: &PortId, + _channel_id: &ChannelId, + _counterparty_version: &ChannelVersion, + ) -> Result { + Ok(ModuleExtras::empty()) + } + + fn on_chan_open_confirm_validate( + &self, + port_id: &PortId, + channel_id: &ChannelId, + ) -> Result<(), ChannelError> { + on_chan_open_confirm_validate(self, port_id, channel_id).map_err(|e: TokenTransferError| { + ChannelError::AppModule { + description: e.to_string(), + } + }) + } + + fn on_chan_open_confirm_execute( + &mut self, + _port_id: &PortId, + _channel_id: &ChannelId, + ) -> Result { + Ok(ModuleExtras::empty()) + } + + fn on_chan_close_init_validate( + &self, + _port_id: &PortId, + _channel_id: &ChannelId, + ) -> Result<(), ChannelError> { + Ok(()) + } + + fn on_chan_close_init_execute( + &mut self, + _port_id: &PortId, + _channel_id: &ChannelId, + ) -> Result { + Ok(ModuleExtras::empty()) + } + + fn on_chan_close_confirm_validate( + &self, + _port_id: &PortId, + _channel_id: &ChannelId, + ) -> Result<(), ChannelError> { + Ok(()) + } + + fn on_chan_close_confirm_execute( + &mut self, + _port_id: &PortId, + _channel_id: &ChannelId, + ) -> Result { + Ok(ModuleExtras::empty()) + } + + fn on_recv_packet_execute( + &mut self, + packet: &Packet, + _relayer: &Signer, + ) -> (ModuleExtras, Acknowledgement) { + on_recv_packet_execute(self, packet) + } + + fn on_acknowledgement_packet_validate( + &self, + packet: &Packet, + acknowledgement: &Acknowledgement, + relayer: &Signer, + ) -> Result<(), PacketError> { + on_acknowledgement_packet_validate(self, packet, acknowledgement, relayer).map_err( + |e: TokenTransferError| PacketError::AppModule { + description: e.to_string(), + }, + ) + } + + fn on_acknowledgement_packet_execute( + &mut self, + _packet: &Packet, + _acknowledgement: &Acknowledgement, + _relayer: &Signer, + ) -> (ModuleExtras, Result<(), PacketError>) { + (ModuleExtras::empty(), Ok(())) + } + + /// Note: `MsgTimeout` and `MsgTimeoutOnClose` use the same callback + fn on_timeout_packet_validate( + &self, + packet: &Packet, + relayer: &Signer, + ) -> Result<(), PacketError> { + on_timeout_packet_validate(self, packet, relayer).map_err(|e: TokenTransferError| { + PacketError::AppModule { + description: e.to_string(), + } + }) + } + + /// Note: `MsgTimeout` and `MsgTimeoutOnClose` use the same callback + fn on_timeout_packet_execute( + &mut self, + packet: &Packet, + relayer: &Signer, + ) -> (ModuleExtras, Result<(), PacketError>) { + let res = on_timeout_packet_execute(self, packet, relayer); + ( + res.0, + res.1 + .map_err(|e: TokenTransferError| PacketError::AppModule { + description: e.to_string(), + }), + ) + } +} + +impl + Send + Sync + Debug + 'static> + IbcModuleWrapper for IbcTransferModule +{ + fn as_ibc_module(&self) -> &dyn IbcModule { + self + } + + fn as_ibc_module_mut(&mut self) -> &mut dyn IbcModule { + self + } +} + +impl> TokenTransferExecutionContext + for IbcTransferModule +{ + fn send_coins( + &mut self, + from: &Self::AccountId, + to: &Self::AccountId, + amt: &PrefixedCoin, + ) -> Result<(), TokenTransferError> { + let from = from + .to_string() + .parse() + .map_err(|_| TokenTransferError::ParseAccountFailure)?; + let to = to + .to_string() + .parse() + .map_err(|_| TokenTransferError::ParseAccountFailure)?; + let coins = vec![Coin { + denom: Denom(amt.denom.to_string()), + amount: amt.amount.into(), + }]; + self.bank_keeper.send_coins(from, to, coins).unwrap(); // Fixme(hu55a1n1) + Ok(()) + } + + fn mint_coins( + &mut self, + account: &Self::AccountId, + amt: &PrefixedCoin, + ) -> Result<(), TokenTransferError> { + let account = account + .to_string() + .parse() + .map_err(|_| TokenTransferError::ParseAccountFailure)?; + let coins = vec![Coin { + denom: Denom(amt.denom.to_string()), + amount: amt.amount.into(), + }]; + self.bank_keeper.mint_coins(account, coins).unwrap(); // Fixme(hu55a1n1) + Ok(()) + } + + fn burn_coins( + &mut self, + account: &Self::AccountId, + amt: &PrefixedCoin, + ) -> Result<(), TokenTransferError> { + let account = account + .to_string() + .parse() + .map_err(|_| TokenTransferError::ParseAccountFailure)?; + let coins = vec![Coin { + denom: Denom(amt.denom.to_string()), + amount: amt.amount.into(), + }]; + self.bank_keeper.burn_coins(account, coins).unwrap(); // Fixme(hu55a1n1) + Ok(()) + } +} + +impl TokenTransferValidationContext for IbcTransferModule { + type AccountId = Signer; + + fn get_port(&self) -> Result { + Ok(PortId::transfer()) + } + + fn get_channel_escrow_address( + &self, + port_id: &PortId, + channel_id: &ChannelId, + ) -> Result { + let account_id = AccountId::new( + ACCOUNT_PREFIX, + &cosmos_adr028_escrow_address(port_id, channel_id), + ) + .map_err(|_| TokenTransferError::ParseAccountFailure)?; + account_id + .to_string() + .parse() + .map_err(|_| TokenTransferError::ParseAccountFailure) + } + + fn is_send_enabled(&self) -> bool { + true + } + + fn is_receive_enabled(&self) -> bool { + true + } +} + +impl SendPacketValidationContext for IbcTransferModule { + fn channel_end(&self, channel_end_path: &ChannelEndPath) -> Result { + self.channel_end_store + .get(Height::Pending, channel_end_path) + .ok_or(ContextError::ChannelError(ChannelError::ChannelNotFound { + port_id: channel_end_path.0.clone(), + channel_id: channel_end_path.1.clone(), + })) + } + + fn connection_end(&self, connection_id: &ConnectionId) -> Result { + self.connection_end_store + .get(Height::Pending, &ConnectionPath::new(connection_id)) + .ok_or(ContextError::ConnectionError( + ConnectionError::ConnectionNotFound { + connection_id: connection_id.clone(), + }, + )) + } + + fn client_state(&self, client_id: &ClientId) -> Result, ContextError> { + self.client_state_store + .get(Height::Pending, &ClientStatePath::new(client_id)) + .ok_or(ContextError::ClientError(ClientError::ClientNotFound { + client_id: client_id.clone(), + })) + .map(|cs| Box::new(cs) as Box) + } + + fn client_consensus_state( + &self, + client_cons_state_path: &ClientConsensusStatePath, + ) -> Result, ContextError> { + let height = IbcHeight::new(client_cons_state_path.epoch, client_cons_state_path.height) + .map_err(|_| ContextError::ClientError(ClientError::InvalidHeight))?; + self.consensus_state_store + .get(Height::Pending, client_cons_state_path) + .ok_or(ContextError::ClientError( + ClientError::ConsensusStateNotFound { + client_id: client_cons_state_path.client_id.clone(), + height, + }, + )) + .map(|cs| Box::new(cs) as Box) + } + + fn get_next_sequence_send( + &self, + seq_send_path: &SeqSendPath, + ) -> Result { + self.send_sequence_store + .get(Height::Pending, seq_send_path) + .ok_or(ContextError::PacketError(PacketError::MissingNextSendSeq { + port_id: seq_send_path.0.clone(), + channel_id: seq_send_path.1.clone(), + })) + } + + fn hash(&self, value: &[u8]) -> Vec { + sha2::Sha256::digest(value).to_vec() + } + + fn compute_packet_commitment( + &self, + packet_data: &[u8], + timeout_height: &TimeoutHeight, + timeout_timestamp: &Timestamp, + ) -> PacketCommitment { + // copy/pasted for now; see https://github.com/cosmos/ibc-rs/issues/470 + let mut hash_input = timeout_timestamp.nanoseconds().to_be_bytes().to_vec(); + + let revision_number = timeout_height.commitment_revision_number().to_be_bytes(); + hash_input.append(&mut revision_number.to_vec()); + + let revision_height = timeout_height.commitment_revision_height().to_be_bytes(); + hash_input.append(&mut revision_height.to_vec()); + + let packet_data_hash = self.hash(packet_data); + hash_input.append(&mut packet_data_hash.to_vec()); + + self.hash(&hash_input).into() + } +} + +impl> SendPacketExecutionContext + for IbcTransferModule +{ + fn store_packet_commitment( + &mut self, + commitment_path: &CommitmentPath, + commitment: PacketCommitment, + ) -> Result<(), ContextError> { + self.packet_commitment_store + .set(commitment_path.clone(), commitment) + .map_err(|_| PacketError::ImplementationSpecific)?; + Ok(()) + } + + fn store_next_sequence_send( + &mut self, + seq_send_path: &SeqSendPath, + seq: Sequence, + ) -> Result<(), ContextError> { + self.send_sequence_store + .set(seq_send_path.clone(), seq) + .map_err(|_| PacketError::ImplementationSpecific)?; + Ok(()) + } + + fn emit_ibc_event(&mut self, event: IbcEvent) { + self.events.push(event) + } + + fn log_message(&mut self, message: String) { + self.log.push(message) + } +} diff --git a/src/modules/mod.rs b/src/modules/mod.rs new file mode 100644 index 00000000..0073e1a2 --- /dev/null +++ b/src/modules/mod.rs @@ -0,0 +1,13 @@ +pub(crate) mod auth; +pub(crate) mod bank; +pub(crate) mod ibc; +pub(crate) mod staking; + +pub mod module; +pub mod types; + +pub use self::ibc::{impls::Ibc, transfer::IbcTransferModule}; +pub use auth::impls::Auth; +pub use bank::impls::Bank; +pub use module::{prefix, Identifiable, Module}; +pub use staking::impls::Staking; diff --git a/src/app/modules/mod.rs b/src/modules/module.rs similarity index 73% rename from src/app/modules/mod.rs rename to src/modules/module.rs index 5df79d43..0a9fad4b 100644 --- a/src/app/modules/mod.rs +++ b/src/modules/module.rs @@ -1,43 +1,12 @@ -mod auth; -mod bank; -mod ibc; -mod staking; - -pub(crate) use self::{ - auth::{Auth, ACCOUNT_PREFIX}, - bank::Bank, - ibc::{Ibc, IbcTransferModule}, - staking::Staking, -}; -use crate::app::store::{self, Height, Path, SharedStore}; -use ::ibc::core::ContextError; +use crate::error::Error; +use crate::helper::{Height, Identifier as StoreIdentifier, Path, QueryResult}; +use crate::store::impls::SharedStore; use cosmrs::AccountId; -use displaydoc::Display; use ibc_proto::google::protobuf::Any; use tendermint::block::Header; -use tendermint_proto::{abci::Event, crypto::ProofOp}; - -#[derive(Debug, Display)] -pub enum Error { - /// no module could handle specified message - NotHandled, - /// custom error: `{reason}` - Custom { reason: String }, - /// store error - Store(store::Error), - /// bank module error - Bank(bank::Error), - /// IBC module error - Ibc(ibc::Error), -} - -impl From for Error { - fn from(error: ContextError) -> Self { - Self::Ibc(error.into()) - } -} +use tendermint_proto::abci::Event; -pub(crate) trait Module: Send + Sync { +pub trait Module: Send + Sync { /// The module's store type. type Store; @@ -97,32 +66,27 @@ pub(crate) trait Module: Send + Sync { fn store(&self) -> &SharedStore; } -pub(crate) struct QueryResult { - pub(crate) data: Vec, - pub(crate) proof: Option>, -} - /// Trait for identifying modules /// This is used to get `Module` prefixes that are used for creating prefixed key-space proxy-stores -pub(crate) trait Identifiable { - type Identifier: Into; +pub trait Identifiable { + type Identifier: Into; /// Return an identifier fn identifier(&self) -> Self::Identifier; } -pub(crate) mod prefix { +pub mod prefix { use core::convert::TryInto; use super::Identifiable; - use crate::app::store; + use crate::helper::Identifier as StoreIdentifier; /// Bank module prefix #[derive(Clone)] - pub(crate) struct Bank; + pub struct Bank; impl Identifiable for Bank { - type Identifier = store::Identifier; + type Identifier = StoreIdentifier; fn identifier(&self) -> Self::Identifier { "bank".to_owned().try_into().unwrap() @@ -131,10 +95,10 @@ pub(crate) mod prefix { /// Ibc module prefix #[derive(Clone)] - pub(crate) struct Ibc; + pub struct Ibc; impl Identifiable for Ibc { - type Identifier = store::Identifier; + type Identifier = StoreIdentifier; fn identifier(&self) -> Self::Identifier { "ibc".to_owned().try_into().unwrap() @@ -143,10 +107,10 @@ pub(crate) mod prefix { /// Auth module prefix #[derive(Clone)] - pub(crate) struct Auth; + pub struct Auth; impl Identifiable for Auth { - type Identifier = store::Identifier; + type Identifier = StoreIdentifier; fn identifier(&self) -> Self::Identifier { "auth".to_owned().try_into().unwrap() @@ -155,10 +119,10 @@ pub(crate) mod prefix { /// Staking module prefix #[derive(Clone)] - pub(crate) struct Staking; + pub struct Staking; impl Identifiable for Staking { - type Identifier = store::Identifier; + type Identifier = StoreIdentifier; fn identifier(&self) -> Self::Identifier { "staking".to_owned().try_into().unwrap() diff --git a/src/modules/staking/impls.rs b/src/modules/staking/impls.rs new file mode 100644 index 00000000..468a49f1 --- /dev/null +++ b/src/modules/staking/impls.rs @@ -0,0 +1,19 @@ +use std::marker::PhantomData; + +use ibc_proto::cosmos::staking::v1beta1::query_server::QueryServer; + +use crate::store::{ProvableStore, SharedStore}; + +use super::service::StakingService; + +pub struct Staking(PhantomData); + +impl Staking { + pub fn new(_store: SharedStore) -> Self { + Self(PhantomData) + } + + pub fn service(&self) -> QueryServer> { + QueryServer::new(StakingService(PhantomData)) + } +} diff --git a/src/modules/staking/mod.rs b/src/modules/staking/mod.rs new file mode 100644 index 00000000..0042c2da --- /dev/null +++ b/src/modules/staking/mod.rs @@ -0,0 +1,2 @@ +pub mod impls; +pub mod service; diff --git a/src/app/modules/staking.rs b/src/modules/staking/service.rs similarity index 78% rename from src/app/modules/staking.rs rename to src/modules/staking/service.rs index 3009db56..c00fb521 100644 --- a/src/app/modules/staking.rs +++ b/src/modules/staking/service.rs @@ -2,14 +2,14 @@ use std::marker::PhantomData; use ibc_proto::{ cosmos::staking::v1beta1::{ - query_server::{Query, QueryServer}, - Params, QueryDelegationRequest, QueryDelegationResponse, QueryDelegatorDelegationsRequest, - QueryDelegatorDelegationsResponse, QueryDelegatorUnbondingDelegationsRequest, - QueryDelegatorUnbondingDelegationsResponse, QueryDelegatorValidatorRequest, - QueryDelegatorValidatorResponse, QueryDelegatorValidatorsRequest, - QueryDelegatorValidatorsResponse, QueryHistoricalInfoRequest, QueryHistoricalInfoResponse, - QueryParamsRequest, QueryParamsResponse, QueryPoolRequest, QueryPoolResponse, - QueryRedelegationsRequest, QueryRedelegationsResponse, QueryUnbondingDelegationRequest, + query_server::Query, Params, QueryDelegationRequest, QueryDelegationResponse, + QueryDelegatorDelegationsRequest, QueryDelegatorDelegationsResponse, + QueryDelegatorUnbondingDelegationsRequest, QueryDelegatorUnbondingDelegationsResponse, + QueryDelegatorValidatorRequest, QueryDelegatorValidatorResponse, + QueryDelegatorValidatorsRequest, QueryDelegatorValidatorsResponse, + QueryHistoricalInfoRequest, QueryHistoricalInfoResponse, QueryParamsRequest, + QueryParamsResponse, QueryPoolRequest, QueryPoolResponse, QueryRedelegationsRequest, + QueryRedelegationsResponse, QueryUnbondingDelegationRequest, QueryUnbondingDelegationResponse, QueryValidatorDelegationsRequest, QueryValidatorDelegationsResponse, QueryValidatorRequest, QueryValidatorResponse, QueryValidatorUnbondingDelegationsRequest, QueryValidatorUnbondingDelegationsResponse, @@ -17,24 +17,13 @@ use ibc_proto::{ }, google::protobuf::Duration, }; + use tonic::{Request, Response, Status}; use tracing::debug; -use crate::app::store::{ProvableStore, SharedStore}; - -pub struct Staking(PhantomData); - -impl Staking { - pub fn new(_store: SharedStore) -> Self { - Self(PhantomData) - } - - pub fn service(&self) -> QueryServer> { - QueryServer::new(StakingService(PhantomData)) - } -} +use crate::store::ProvableStore; -pub struct StakingService(PhantomData); +pub struct StakingService(pub PhantomData); #[tonic::async_trait] impl Query for StakingService { diff --git a/src/modules/types.rs b/src/modules/types.rs new file mode 100644 index 00000000..b2b794b9 --- /dev/null +++ b/src/modules/types.rs @@ -0,0 +1,10 @@ +use super::Module; +use crate::{helper::Identifier, store::RevertibleStore}; + +pub(crate) type ModuleList = Vec>; +pub(crate) type ModuleStore = RevertibleStore; + +pub struct IdentifiedModule { + pub id: Identifier, + pub module: Box>>, +} diff --git a/src/app/store/avl/as_bytes.rs b/src/store/avl/as_bytes.rs similarity index 100% rename from src/app/store/avl/as_bytes.rs rename to src/store/avl/as_bytes.rs diff --git a/src/app/store/avl/mod.rs b/src/store/avl/mod.rs similarity index 100% rename from src/app/store/avl/mod.rs rename to src/store/avl/mod.rs diff --git a/src/app/store/avl/node.rs b/src/store/avl/node.rs similarity index 98% rename from src/app/store/avl/node.rs rename to src/store/avl/node.rs index b8b72a4d..e3f1cec9 100644 --- a/src/app/store/avl/node.rs +++ b/src/store/avl/node.rs @@ -3,7 +3,7 @@ use std::{borrow::Borrow, mem}; use sha2::{Digest, Sha256}; use tendermint::hash::Hash; -use crate::app::store::avl::{as_bytes::AsBytes, proof, HASH_ALGO}; +use crate::store::avl::{as_bytes::AsBytes, proof, HASH_ALGO}; pub type NodeRef = Option>>; diff --git a/src/app/store/avl/proof.rs b/src/store/avl/proof.rs similarity index 100% rename from src/app/store/avl/proof.rs rename to src/store/avl/proof.rs diff --git a/src/app/store/avl/tests.rs b/src/store/avl/tests.rs similarity index 99% rename from src/app/store/avl/tests.rs rename to src/store/avl/tests.rs index 37332a98..60ceca7b 100644 --- a/src/app/store/avl/tests.rs +++ b/src/store/avl/tests.rs @@ -3,7 +3,7 @@ use ics23::{commitment_proof::Proof, verify_membership, HostFunctionsManager}; use sha2::{Digest, Sha256}; -use crate::app::store::avl::{ +use crate::store::avl::{ node::{as_node_ref, NodeRef}, tree::AvlTree, *, diff --git a/src/app/store/avl/tree.rs b/src/store/avl/tree.rs similarity index 99% rename from src/app/store/avl/tree.rs rename to src/store/avl/tree.rs index 9a8b498d..54db5327 100644 --- a/src/app/store/avl/tree.rs +++ b/src/store/avl/tree.rs @@ -13,7 +13,7 @@ use ics23::{ }; use tendermint::hash::Hash; -use crate::app::store::avl::{ +use crate::store::avl::{ node::{as_node_ref, NodeRef}, proof, AsBytes, }; diff --git a/src/store/codec.rs b/src/store/codec.rs new file mode 100644 index 00000000..36aa3fcd --- /dev/null +++ b/src/store/codec.rs @@ -0,0 +1,98 @@ +use serde::{de::DeserializeOwned, Serialize}; +use std::marker::PhantomData; + +/// A trait that defines how types are decoded/encoded. +pub trait Codec { + type Type; + type Encoded: AsRef<[u8]>; + + fn encode(d: &Self::Type) -> Option; + + fn decode(bytes: &[u8]) -> Option; +} + +/// A JSON codec that uses `serde_json` to encode/decode as a JSON string +#[derive(Clone, Debug)] +pub struct JsonCodec(PhantomData); + +impl Codec for JsonCodec +where + T: Serialize + DeserializeOwned, +{ + type Type = T; + type Encoded = String; + + fn encode(d: &Self::Type) -> Option { + serde_json::to_string(d).ok() + } + + fn decode(bytes: &[u8]) -> Option { + let json_string = String::from_utf8(bytes.to_vec()).ok()?; + serde_json::from_str(&json_string).ok() + } +} + +/// A Null codec that can be used for paths that are only meant to be set/reset and do not hold any +/// typed value. +#[derive(Clone)] +pub struct NullCodec; + +impl Codec for NullCodec { + type Type = (); + type Encoded = Vec; + + fn encode(_d: &Self::Type) -> Option { + Some(vec![]) + } + + fn decode(bytes: &[u8]) -> Option { + assert!(bytes.is_empty()); + Some(()) + } +} + +/// A Protobuf codec that uses `prost` to encode/decode +#[derive(Clone, Debug)] +pub struct ProtobufCodec { + domain_type: PhantomData, + raw_type: PhantomData, +} + +impl Codec for ProtobufCodec +where + T: Into + Clone, + R: TryInto + Default + prost::Message, +{ + type Type = T; + type Encoded = Vec; + + fn encode(d: &Self::Type) -> Option { + let r = d.clone().into(); + Some(r.encode_to_vec()) + } + + fn decode(bytes: &[u8]) -> Option { + let r = R::decode(bytes).ok()?; + r.try_into().ok() + } +} + +/// A binary codec that uses `AsRef<[u8]>` and `From>` to encode and decode respectively. +#[derive(Clone, Debug)] +pub struct BinCodec(PhantomData); + +impl Codec for BinCodec +where + T: AsRef<[u8]> + From>, +{ + type Type = T; + type Encoded = Vec; + + fn encode(d: &Self::Type) -> Option { + Some(d.as_ref().to_vec()) + } + + fn decode(bytes: &[u8]) -> Option { + Some(bytes.to_vec().into()) + } +} diff --git a/src/store/context.rs b/src/store/context.rs new file mode 100644 index 00000000..50c96a54 --- /dev/null +++ b/src/store/context.rs @@ -0,0 +1,49 @@ +use crate::helper::{Height, Path, RawHeight}; +use ics23::CommitmentProof; +use std::fmt::Debug; + +/// Store trait - maybe provableStore or privateStore +pub trait Store: Send + Sync + Clone { + /// Error type - expected to envelope all possible errors in store + type Error: Debug; + + /// Set `value` for `path` + fn set(&mut self, path: Path, value: Vec) -> Result>, Self::Error>; + + /// Get associated `value` for `path` at specified `height` + fn get(&self, height: Height, path: &Path) -> Option>; + + /// Delete specified `path` + fn delete(&mut self, path: &Path); + + /// Commit `Pending` block to canonical chain and create new `Pending` + fn commit(&mut self) -> Result, Self::Error>; + + /// Apply accumulated changes to `Pending` + fn apply(&mut self) -> Result<(), Self::Error> { + Ok(()) + } + + /// Reset accumulated changes + fn reset(&mut self) {} + + /// Prune historic blocks upto specified `height` + fn prune(&mut self, height: RawHeight) -> Result { + Ok(height) + } + + /// Return the current height of the chain + fn current_height(&self) -> RawHeight; + + /// Return all keys that start with specified prefix + fn get_keys(&self, key_prefix: &Path) -> Vec; // TODO(hu55a1n1): implement support for all heights +} + +/// ProvableStore trait +pub trait ProvableStore: Store { + /// Return a vector commitment + fn root_hash(&self) -> Vec; + + /// Return proof of existence for key + fn get_proof(&self, height: Height, key: &Path) -> Option; +} diff --git a/src/store/impls.rs b/src/store/impls.rs new file mode 100644 index 00000000..ab0e5b3f --- /dev/null +++ b/src/store/impls.rs @@ -0,0 +1,228 @@ +use super::context::{ProvableStore, Store}; +use crate::helper::{Height, Path, RawHeight}; +use ics23::CommitmentProof; +use std::{ + ops::{Deref, DerefMut}, + sync::{Arc, RwLock}, +}; +use tracing::trace; + +/// Wraps a store to make it shareable by cloning +#[derive(Clone, Debug)] +pub struct SharedStore(Arc>); + +impl SharedStore { + pub fn new(store: S) -> Self { + Self(Arc::new(RwLock::new(store))) + } + + pub fn share(&self) -> Self { + Self(self.0.clone()) + } +} + +impl Default for SharedStore +where + S: Default + Store, +{ + fn default() -> Self { + Self::new(S::default()) + } +} + +impl Store for SharedStore +where + S: Store, +{ + type Error = S::Error; + + #[inline] + fn set(&mut self, path: Path, value: Vec) -> Result>, Self::Error> { + self.write().unwrap().set(path, value) + } + + #[inline] + fn get(&self, height: Height, path: &Path) -> Option> { + self.read().unwrap().get(height, path) + } + + #[inline] + fn delete(&mut self, path: &Path) { + self.write().unwrap().delete(path) + } + + #[inline] + fn commit(&mut self) -> Result, Self::Error> { + self.write().unwrap().commit() + } + + #[inline] + fn apply(&mut self) -> Result<(), Self::Error> { + self.write().unwrap().apply() + } + + #[inline] + fn reset(&mut self) { + self.write().unwrap().reset() + } + + #[inline] + fn current_height(&self) -> RawHeight { + self.read().unwrap().current_height() + } + + #[inline] + fn get_keys(&self, key_prefix: &Path) -> Vec { + self.read().unwrap().get_keys(key_prefix) + } +} + +impl ProvableStore for SharedStore +where + S: ProvableStore, +{ + #[inline] + fn root_hash(&self) -> Vec { + self.read().unwrap().root_hash() + } + + #[inline] + fn get_proof(&self, height: Height, key: &Path) -> Option { + self.read().unwrap().get_proof(height, key) + } +} + +impl Deref for SharedStore { + type Target = Arc>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for SharedStore { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +/// A wrapper store that implements rudimentary `apply()`/`reset()` support for other stores +#[derive(Clone, Debug)] +pub struct RevertibleStore { + /// backing store + store: S, + /// operation log for recording rollback operations in preserved order + op_log: Vec, +} + +#[derive(Clone, Debug)] +enum RevertOp { + Delete(Path), + Set(Path, Vec), +} + +impl RevertibleStore +where + S: Store, +{ + pub fn new(store: S) -> Self { + Self { + store, + op_log: vec![], + } + } +} + +impl Default for RevertibleStore +where + S: Default + Store, +{ + fn default() -> Self { + Self::new(S::default()) + } +} + +impl Store for RevertibleStore +where + S: Store, +{ + type Error = S::Error; + + #[inline] + fn set(&mut self, path: Path, value: Vec) -> Result>, Self::Error> { + let old_value = self.store.set(path.clone(), value)?; + match old_value { + // None implies this was an insert op, so we record the revert op as delete op + None => self.op_log.push(RevertOp::Delete(path)), + // Some old value implies this was an update op, so we record the revert op as a set op + // with the old value + Some(ref old_value) => self.op_log.push(RevertOp::Set(path, old_value.clone())), + } + Ok(old_value) + } + + #[inline] + fn get(&self, height: Height, path: &Path) -> Option> { + self.store.get(height, path) + } + + #[inline] + fn delete(&mut self, _path: &Path) { + unimplemented!("RevertibleStore doesn't support delete operations yet!") + } + + #[inline] + fn commit(&mut self) -> Result, Self::Error> { + // call `apply()` before `commit()` to make sure all operations are applied + self.apply()?; + self.store.commit() + } + + #[inline] + fn apply(&mut self) -> Result<(), Self::Error> { + // note that we do NOT call the backing store's apply here - this allows users to create + // multilayered `WalStore`s + self.op_log.clear(); + Ok(()) + } + + #[inline] + fn reset(&mut self) { + // note that we do NOT call the backing store's reset here - this allows users to create + // multilayered `WalStore`s + trace!("Rollback operation log changes"); + while let Some(op) = self.op_log.pop() { + match op { + RevertOp::Delete(path) => self.delete(&path), + RevertOp::Set(path, value) => { + self.set(path, value).unwrap(); // safety - reset failures are unrecoverable + } + } + } + } + + #[inline] + fn current_height(&self) -> u64 { + self.store.current_height() + } + + #[inline] + fn get_keys(&self, key_prefix: &Path) -> Vec { + self.store.get_keys(key_prefix) + } +} + +impl ProvableStore for RevertibleStore +where + S: ProvableStore, +{ + #[inline] + fn root_hash(&self) -> Vec { + self.store.root_hash() + } + + #[inline] + fn get_proof(&self, height: Height, key: &Path) -> Option { + self.store.get_proof(height, key) + } +} diff --git a/src/app/store/memory.rs b/src/store/memory.rs similarity index 86% rename from src/app/store/memory.rs rename to src/store/memory.rs index 1ed1f24b..83ced601 100644 --- a/src/app/store/memory.rs +++ b/src/store/memory.rs @@ -2,18 +2,16 @@ use ics23::CommitmentProof; use tendermint::{hash::Algorithm, Hash}; use tracing::trace; -use crate::app::store::{ - avl::{AsBytes, AvlTree, ByteSlice}, - Height, Path, ProvableStore, Store, +use super::State; +use crate::helper::{Height, Path}; +use crate::store::{ + avl::{AsBytes, AvlTree}, + context::{ProvableStore, Store}, }; -// A state type that represents a snapshot of the store at every block. -// The value is a `Vec` to allow stored types to choose their own serde. -type State = AvlTree>; - /// An in-memory store backed by an AvlTree. #[derive(Clone, Debug)] -pub(crate) struct InMemoryStore { +pub struct InMemoryStore { /// collection of states corresponding to every committed block height store: Vec, /// pending block state @@ -113,10 +111,4 @@ impl ProvableStore for InMemoryStore { } } -impl AsBytes for Path { - fn as_bytes(&self) -> ByteSlice<'_> { - ByteSlice::Vector(self.to_string().into_bytes()) - } -} - // TODO(hu55a1n1): import tests diff --git a/src/store/mod.rs b/src/store/mod.rs new file mode 100644 index 00000000..68db592b --- /dev/null +++ b/src/store/mod.rs @@ -0,0 +1,14 @@ +pub(crate) mod avl; +pub(crate) mod codec; +mod context; +pub(crate) mod impls; +pub mod memory; +mod types; + +pub use codec::Codec; +pub use context::{ProvableStore, Store}; +pub(crate) use impls::{RevertibleStore, SharedStore}; +pub use memory::InMemoryStore; +pub(crate) use types::{ + BinStore, JsonStore, MainStore, ProtobufStore, SharedRw, State, TypedSet, TypedStore, +}; diff --git a/src/store/types.rs b/src/store/types.rs new file mode 100644 index 00000000..ea729ce9 --- /dev/null +++ b/src/store/types.rs @@ -0,0 +1,92 @@ +use super::{ + codec::{BinCodec, Codec, JsonCodec, NullCodec, ProtobufCodec}, + context::Store, + RevertibleStore, SharedStore, +}; +use crate::helper::{Height, Path, RawHeight}; +use crate::store::avl::AvlTree; +use std::sync::{Arc, RwLock}; +use std::{fmt::Debug, marker::PhantomData}; + +// A state type that represents a snapshot of the store at every block. +// The value is a `Vec` to allow stored types to choose their own serde. +pub type State = AvlTree>; + +pub type MainStore = SharedStore>; + +pub type SharedRw = Arc>; + +/// A `TypedStore` that uses the `JsonCodec` +pub type JsonStore = TypedStore>; + +/// A `TypedStore` that uses the `ProtobufCodec` +pub type ProtobufStore = TypedStore>; + +/// A `TypedSet` that stores only paths and no values +pub type TypedSet = TypedStore; + +/// A `TypedStore` that uses the `BinCodec` +pub type BinStore = TypedStore>; + +#[derive(Clone, Debug)] +pub struct TypedStore { + store: S, + _key: PhantomData, + _codec: PhantomData, +} + +impl TypedStore +where + S: Store, + C: Codec, + K: Into + Clone, +{ + #[inline] + pub fn new(store: S) -> Self { + Self { + store, + _codec: PhantomData, + _key: PhantomData, + } + } + + #[inline] + pub fn set(&mut self, path: K, value: V) -> Result, S::Error> { + self.store + .set(path.into(), C::encode(&value).unwrap().as_ref().to_vec()) + .map(|prev_val| prev_val.and_then(|v| C::decode(&v))) + } + + #[inline] + pub fn get(&self, height: Height, path: &K) -> Option { + self.store + .get(height, &path.clone().into()) + .and_then(|v| C::decode(&v)) + } + + #[inline] + pub fn get_keys(&self, key_prefix: &Path) -> Vec { + self.store.get_keys(key_prefix) + } + + #[inline] + pub fn current_height(&self) -> RawHeight { + self.store.current_height() + } +} + +impl TypedStore +where + S: Store, + K: Into + Clone, +{ + #[inline] + pub fn set_path(&mut self, path: K) -> Result<(), S::Error> { + self.store.set(path.into(), vec![]).map(|_| ()) + } + + #[inline] + pub fn is_path_set(&self, height: Height, path: &K) -> bool { + self.store.get(height, &path.clone().into()).is_some() + } +}