From 3bf1c82491a8982090fd59ecdba6c23eed082d9a Mon Sep 17 00:00:00 2001 From: Xavier Lau Date: Thu, 30 Mar 2023 03:26:01 +0800 Subject: [PATCH 1/7] Brainstorming Signed-off-by: Xavier Lau --- .github/workflows/checks.yml | 37 +- .../workflows/{pages.yml => pages.yml.bak} | 0 .github/workflows/release.yml | 82 +- .github/workflows/staging.yml | 87 - Cargo.lock | 1506 +++++++++++++++++ Cargo.toml | 42 +- README.md | 14 +- build.rs | 17 - src/apeye.rs | 140 ++ src/apeye/api.rs | 44 + src/apeye/api/base.rs | 19 + src/apeye/api/base/chain.rs | 66 + src/apeye/api/base/net.rs | 51 + src/apeye/api/base/state.rs | 81 + src/apeye/api/ext.rs | 199 +++ src/apeye/api/frontier.rs | 10 + src/apeye/runtime.rs | 30 + src/error.rs | 92 + src/jsonrpc.rs | 187 ++ src/jsonrpc/ws.rs | 297 ++++ src/jsonrpc/ws/initializer.rs | 285 ++++ src/lib.rs | 22 + src/main.rs | 71 + 23 files changed, 3167 insertions(+), 212 deletions(-) rename .github/workflows/{pages.yml => pages.yml.bak} (100%) delete mode 100644 .github/workflows/staging.yml create mode 100644 Cargo.lock delete mode 100644 build.rs create mode 100644 src/apeye.rs create mode 100644 src/apeye/api.rs create mode 100644 src/apeye/api/base.rs create mode 100644 src/apeye/api/base/chain.rs create mode 100644 src/apeye/api/base/net.rs create mode 100644 src/apeye/api/base/state.rs create mode 100644 src/apeye/api/ext.rs create mode 100644 src/apeye/api/frontier.rs create mode 100644 src/apeye/runtime.rs create mode 100644 src/error.rs create mode 100644 src/jsonrpc.rs create mode 100644 src/jsonrpc/ws.rs create mode 100644 src/jsonrpc/ws/initializer.rs create mode 100644 src/main.rs diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 0df7e8f..16f889d 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -8,34 +8,24 @@ on: - main env: + CARGO_INCREMENTAL: 1 CARGO_TERM_COLOR: always GITHUB_CACHE_VERSION: 1 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} RUST_BACKTRACE: full - RUSTC_WRAPPER: sccache - - SCCACHE_LINK: https://github.com/mozilla/sccache/releases/download - SCCACHE_VERSION: v0.3.0 - SCCACHE_DIR: /home/runner/.cache/sccache jobs: - cargo_checks: + cargo-checks: name: Task cargo ${{ matrix.action }} runs-on: ubuntu-latest strategy: matrix: - action: [clippy, nextest] + action: [clippy, fmt, nextest] steps: - name: Fetch latest code uses: actions/checkout@v3 - - name: Install Sccache - run: | - export SCCACHE_FILE=sccache-${{ env.SCCACHE_VERSION }}-x86_64-unknown-linux-musl - curl -L ${{ env.SCCACHE_LINK }}/${{ env.SCCACHE_VERSION }}/$SCCACHE_FILE.tar.gz | tar xz - sudo mv $SCCACHE_FILE/sccache /usr/bin - sudo chmod u+x /usr/bin/sccache - name: Cache cargo uses: actions/cache@v3 with: @@ -45,24 +35,25 @@ jobs: target key: cargo-${{ env.GITHUB_CACHE_VERSION }}-${{ matrix.action }}-${{ hashFiles('**/Cargo.lock') }} restore-keys: cargo-${{ env.GITHUB_CACHE_VERSION }}-${{ matrix.action }}- - - name: Cache sccache - uses: actions/cache@v3 - with: - path: ${{ env.SCCACHE_DIR}} - key: sccache-${{ env.GITHUB_CACHE_VERSION }}-${{ matrix.action }}--${{ hashFiles('**/Cargo.lock') }} - restore-keys: sccache-${{ env.GITHUB_CACHE_VERSION }}-${{ matrix.action }}- - name: Cargo ${{ matrix.action }} if: matrix.action == 'clippy' uses: actions-rs/clippy-check@v1 with: token: ${{ secrets.GITHUB_TOKEN }} args: --workspace --all-features --all-targets --locked + - name: Cargo ${{ matrix.action }} + if: matrix.action == 'fmt' + run: cargo ${{ matrix.action }} --all -- --check - name: Install cargo-nextest if: matrix.action == 'nextest' uses: taiki-e/install-action@nextest - - name: Cargo nextest + - name: Cargo ${{ matrix.action }} if: matrix.action == 'nextest' - uses: actions-rs/cargo@v1 + run: cargo ${{ matrix.action }} run --release --workspace --all-features --all-targets --locked + - name: Fast fail + uses: vishnudxb/cancel-workflow@v1.2 + if: failure() with: - command: nextest - args: run --workspace --all-features --all-targets --locked + repo: hack-ink/subapeye + workflow_id: ${{ github.run_id }} + access_token: ${{ github.token }} diff --git a/.github/workflows/pages.yml b/.github/workflows/pages.yml.bak similarity index 100% rename from .github/workflows/pages.yml rename to .github/workflows/pages.yml.bak diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b6eebc4..3a4ac35 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,88 +8,24 @@ env: CARGO_TERM_COLOR: always jobs: - build: - name: Build ${{ matrix.target.name }} package - runs-on: ${{ matrix.target.os }} - strategy: - matrix: - target: - [ - { name: x86_64-unknown-linux-gnu, os: ubuntu-latest }, - { name: aarch64-apple-darwin, os: macos-latest }, - { name: x86_64-apple-darwin, os: macos-latest }, - { - name: x86_64-pc-windows-msvc, - os: windows-latest, - extension: .exe, - }, - ] - steps: - - name: Fetch latest code - uses: actions/checkout@v3 - - name: Setup Rust toolchain - run: rustup target add ${{ matrix.target.name }} - - name: Build - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --locked --target ${{ matrix.target.name }} - - name: Compress - run: | - mv target/${{ matrix.target.name }}/release/${{ matrix.target.extension }} . - zstd --ultra -22 -o -${{ matrix.target.name }}.zst ${{ matrix.target.extension }} - - name: Collect artifact - run: | - mkdir -p artifacts - mv -${{ matrix.target.name }}.zst artifacts - - name: Upload artifact - uses: actions/upload-artifact@v3.1.2 - with: - name: artifacts - path: artifacts - release: name: Release runs-on: ubuntu-latest needs: [build] steps: - - name: Download artifacts - uses: actions/download-artifact@v3 - with: - name: artifacts - path: artifacts - - name: Hash - run: | - cd artifacts - sha256sum * | tee ../SHA256 - md5sum * | tee ../MD5 - mv ../SHA256 . - mv ../MD5 . - name: Publish uses: softprops/action-gh-release@v1 with: discussion_category_name: Announcements generate_release_notes: true - files: artifacts/* - # publish-on-crates-io: - # name: Publish on crates.io - # runs-on: ubuntu-latest - # steps: - # - name: Fetch latest code - # uses: actions/checkout@v3 - # - name: Login - # run: cargo login ${{ secrets.CARGO_REGISTRY_TOKEN }} - # - name: Publish - # run: .maintain/release.sh - - clean-artifacts: - name: Clean artifacts - if: always() - needs: [release] - steps: - - name: Clean artifacts - uses: geekyeggo/delete-artifact@v2 - with: - name: artifacts + publish-on-crates-io: + name: Publish on crates.io runs-on: ubuntu-latest + steps: + - name: Fetch latest code + uses: actions/checkout@v3 + - name: Login + run: cargo login ${{ secrets.CARGO_REGISTRY_TOKEN }} + - name: Publish + run: cargo publish --locked diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml deleted file mode 100644 index 8278e8f..0000000 --- a/.github/workflows/staging.yml +++ /dev/null @@ -1,87 +0,0 @@ -name: Staging -on: - workflow_dispatch: - -env: - CARGO_TERM_COLOR: always - -jobs: - build: - name: Build ${{ matrix.target.name }} package - runs-on: ${{ matrix.target.os }} - strategy: - matrix: - target: - [ - { name: x86_64-unknown-linux-gnu, os: ubuntu-latest }, - { name: aarch64-apple-darwin, os: macos-latest }, - { name: x86_64-apple-darwin, os: macos-latest }, - { - name: x86_64-pc-windows-msvc, - os: windows-latest, - extension: .exe, - }, - ] - steps: - - name: Fetch latest code - uses: actions/checkout@v3 - - name: Setup Rust toolchain - run: rustup target add ${{ matrix.target.name }} - - name: Build - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --locked --target ${{ matrix.target.name }} - - name: Compress - run: | - mv target/${{ matrix.target.name }}/release/${{ matrix.target.extension }} . - zstd --ultra -22 -o -${{ matrix.target.name }}.zst ${{ matrix.target.extension }} - - name: Collect artifact - run: | - mkdir -p artifacts - mv -${{ matrix.target.name }}.zst artifacts - - name: Upload artifact - uses: actions/upload-artifact@v3.1.2 - with: - name: artifacts - path: artifacts - - staging: - name: Staging - runs-on: ubuntu-latest - needs: [build] - steps: - - name: Download artifacts - uses: actions/download-artifact@v3 - with: - name: artifacts - path: artifacts - - name: Hash - run: | - cd artifacts - sha256sum * | tee ../SHA256 - md5sum * | tee ../MD5 - mv ../SHA256 . - mv ../MD5 . - - # publish-on-crates-io: - # name: Publish on crates.io - # runs-on: ubuntu-latest - # steps: - # - name: Fetch latest code - # uses: actions/checkout@v3 - # - name: Login - # run: cargo login ${{ secrets.CARGO_REGISTRY_TOKEN }} - # - name: Publish - # run: .maintain/release.sh - - clean-artifacts: - name: Clean artifacts - if: always() - needs: [staging] - steps: - - name: Clean artifacts - uses: geekyeggo/delete-artifact@v2 - with: - name: artifacts - runs-on: ubuntu-latest diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..541fb22 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,1506 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "affix" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50e7ea84d3fa2009f355f8429a0b418a96849135a4188fadf384f59127d5d4bc" +dependencies = [ + "convert_case", +] + +[[package]] +name = "array-bytes" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b1c5a481ec30a5abd8dfbd94ab5cf1bb4e9a66be7f1b3b322f2f1170c200fd" + +[[package]] +name = "arrayvec" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" +dependencies = [ + "nodrop", +] + +[[package]] +name = "arrayvec" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" + +[[package]] +name = "async-trait" +version = "0.1.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blake2-rfc" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d6d530bdd2d52966a6d03b7a964add7ae1a288d25214066fd4b600f0f796400" +dependencies = [ + "arrayvec 0.4.12", + "constant_time_eq", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b1ce199063694f33ffb7dd4e0ee620741495c32833cde5aa08f02a0bf96f0c8" + +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "convert_case" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb4a24b1aaf0fd0ce8b45161144d6f42cd91677fd5940fd431183eb023b3a2b8" + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" + +[[package]] +name = "cpufeatures" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e4c1eaa2012c47becbbad2ab175484c2a84d1185b566fb2cc5b8707343dfe58" +dependencies = [ + "libc", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "digest" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "frame-metadata" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "878babb0b136e731cc77ec2fd883ff02745ff21e6fb662729953d44923df009c" +dependencies = [ + "cfg-if", + "parity-scale-codec", + "scale-info", + "serde", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" + +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" + +[[package]] +name = "futures-macro" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "futures-sink" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" + +[[package]] +name = "futures-task" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" + +[[package]] +name = "futures-util" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hermit-abi" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +dependencies = [ + "libc", +] + +[[package]] +name = "http" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "itoa" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" + +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.142" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a987beff54b60ffa6d51982e1aa1146bc42f19bd26be28b0586f252fccf5317" + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "mio" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.45.0", +] + +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num_cpus" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "once_cell" +version = "1.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parity-scale-codec" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "637935964ff85a605d114591d4d2c13c5d1ba2806dae97cea6bf180238a749ac" +dependencies = [ + "arrayvec 0.7.2", + "bitvec", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "3.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "percent-encoding" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" + +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit", +] + +[[package]] +name = "proc-macro2" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "rustls" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +dependencies = [ + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +dependencies = [ + "base64 0.21.0", +] + +[[package]] +name = "ryu" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" + +[[package]] +name = "scale-info" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfdef77228a4c05dc94211441595746732131ad7f6530c6c18f045da7b7ab937" +dependencies = [ + "bitvec", + "cfg-if", + "derive_more", + "parity-scale-codec", + "scale-info-derive", + "serde", +] + +[[package]] +name = "scale-info-derive" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53012eae69e5aa5c14671942a5dd47de59d4cdcff8532a6dd0e081faf1119482" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "schannel" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +dependencies = [ + "windows-sys 0.42.0", +] + +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "security-framework" +version = "2.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31c9bb296072e961fcbd8853511dd39c2d8be2deb1e17c6860b1d30732b323b4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "serde" +version = "1.0.160" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.160" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "serde_json" +version = "1.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "slab" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" + +[[package]] +name = "socket2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "subapeye" +version = "0.1.0" +dependencies = [ + "array-bytes", + "async-trait", + "futures", + "fxhash", + "num_cpus", + "parity-scale-codec", + "serde", + "serde_json", + "submetadatan", + "subrpcer", + "substorager", + "thiserror", + "tokio", + "tokio-stream", + "tokio-tungstenite", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "subhasher" +version = "0.10.0" +dependencies = [ + "blake2-rfc", + "byteorder", + "sha2", + "tiny-keccak", + "twox-hash", +] + +[[package]] +name = "submetadatan" +version = "0.10.0" +dependencies = [ + "array-bytes", + "frame-metadata", + "fxhash", + "parity-scale-codec", + "scale-info", + "substorager", + "thiserror", +] + +[[package]] +name = "subrpcer" +version = "0.10.0" +dependencies = [ + "affix", + "serde", + "serde_json", + "tracing", +] + +[[package]] +name = "substorager" +version = "0.10.0" +dependencies = [ + "array-bytes", + "subhasher", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "thiserror" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c786bf8134e5a3a166db9b29ab8f48134739014a3eca7bc6bfa95d673b136f" +dependencies = [ + "autocfg", + "bytes", + "libc", + "mio", + "num_cpus", + "pin-project-lite", + "socket2", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-macros" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54319c93411147bced34cb5609a80e0a8e44c5999c93903a81cd866630ec0bfd" +dependencies = [ + "futures-util", + "log", + "rustls", + "rustls-native-certs", + "tokio", + "tokio-rustls", + "tungstenite", + "webpki", +] + +[[package]] +name = "toml_datetime" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" + +[[package]] +name = "toml_edit" +version = "0.19.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13" +dependencies = [ + "indexmap", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f57e3ca2a01450b1a921183a9c9cbfda207fd822cef4ccb00a65402cbba7a74" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "tracing-core" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +dependencies = [ + "nu-ansi-term", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "tungstenite" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ee6ab729cd4cf0fd55218530c4522ed30b7b6081752839b68fcec8d0960788" +dependencies = [ + "base64 0.13.1", + "byteorder", + "bytes", + "http", + "httparse", + "log", + "rand", + "rustls", + "sha1", + "thiserror", + "url", + "utf-8", + "webpki", +] + +[[package]] +name = "twox-hash" +version = "1.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" +dependencies = [ + "cfg-if", + "rand", + "static_assertions", +] + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-ident" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "url" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 1.0.109", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "web-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.0", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] +name = "winnow" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5617da7e1f97bf363947d767b91aaf3c2bbc19db7fda9c65af1278713d58e0a2" +dependencies = [ + "memchr", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] diff --git a/Cargo.toml b/Cargo.toml index 5cf288e..64332a5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,11 +1,43 @@ [package] authors = ["Xavier Lau "] -build = "build.rs" -description = "" +description = "Substrate API client." edition = "2021" -homepage = "https://.hack.ink" +homepage = "https://subapeye.hack.ink" license = "GPL-3.0" -name = "" +name = "subapeye" readme = "README.md" -repository = "https://github.com/hack-ink/" +repository = "https://github.com/hack-ink/subapeye" version = "0.1.0" + +[features] +debug = [] +trace = [ + # crates.io + "subrpcer/trace", +] + +[dependencies] +# crates.io +array-bytes = { version = "6.1" } +async-trait = { version = "0.1" } +futures = { version = "0.3" } +fxhash = { version = "0.2" } +num_cpus = { version = "1.15" } +parity-scale-codec = { version = "3.4" } +serde = { version = "1.0", features = ["derive"] } +serde_json = { version = "1.0", features = ["raw_value"] } +submetadatan = { version = "0.10" } +subrpcer = { version = "0.10" } +substorager = { version = "0.10" } +thiserror = { version = "1.0" } +tokio = { version = "1.27", features = ["macros", "rt-multi-thread", "sync", "time"] } +tokio-stream = { version = "0.1" } +tokio-tungstenite = { version = "0.18", features = ["rustls-tls-native-roots"] } +tracing = { version = "0.1" } + +tracing-subscriber = { version = "0.3" } + +[patch.crates-io] +submetadatan = { version = "0.10", path = "../substrate-minimal/submetadatan" } +subrpcer = { version = "0.10", path = "../substrate-minimal/subrpcer" } +substorager = { version = "0.10", path = "../substrate-minimal/substorager" } diff --git a/README.md b/README.md index 00d358c..4e76685 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@
-# \ -### \ +# Subapeye +### Substrate API client [![License](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0) -[![Checks](https://github.com/hack-ink//actions/workflows/checks.yml/badge.svg?branch=main)](https://github.com/hack-ink//actions/workflows/checks.yml) -[![Release](https://github.com/hack-ink//actions/workflows/release.yml/badge.svg)](https://github.com/hack-ink//actions/workflows/release.yml) -[![GitHub tag (latest by date)](https://img.shields.io/github/v/tag/hack-ink/)](https://github.com/hack-ink//tags) -[![GitHub code lines](https://tokei.rs/b1/github/hack-ink/)](https://github.com/hack-ink/) -[![GitHub last commit](https://img.shields.io/github/last-commit/hack-ink/?color=red&style=plastic)](https://github.com/hack-ink/) +[![Checks](https://github.com/hack-ink/subapeye/actions/workflows/checks.yml/badge.svg?branch=main)](https://github.com/hack-ink/subapeye/actions/workflows/checks.yml) +[![Release](https://github.com/hack-ink/subapeye/actions/workflows/release.yml/badge.svg)](https://github.com/hack-ink/subapeye/actions/workflows/release.yml) +[![GitHub tag (latest by date)](https://img.shields.io/github/v/tag/hack-ink/subapeye)](https://github.com/hack-ink/subapeye/tags) +[![GitHub code lines](https://tokei.rs/b1/github/hack-ink/subapeye)](https://github.com/hack-ink/subapeye) +[![GitHub last commit](https://img.shields.io/github/last-commit/hack-ink/subapeye?color=red&style=plastic)](https://github.com/hack-ink/subapeye)
diff --git a/build.rs b/build.rs deleted file mode 100644 index f6072f5..0000000 --- a/build.rs +++ /dev/null @@ -1,17 +0,0 @@ -// crates.io -use vergen::{Config, ShaKind}; - -fn main() { - let mut config = Config::default(); - - *config.git_mut().sha_kind_mut() = ShaKind::Short; - - // Disable the git version if installed from . - if vergen::vergen(config.clone()).is_err() { - *config.git_mut().enabled_mut() = false; - - println!("cargo:rustc-env=VERGEN_GIT_SHA_SHORT=crates.io"); - - vergen::vergen(config).unwrap(); - } -} diff --git a/src/apeye.rs b/src/apeye.rs new file mode 100644 index 0000000..14c0752 --- /dev/null +++ b/src/apeye.rs @@ -0,0 +1,140 @@ +//! + +pub mod api; +use api::ApiState; + +pub mod runtime; +use runtime::Runtime; + +// std +use std::{marker::PhantomData, sync::Arc}; +// crates.io +use serde::de::DeserializeOwned; +use submetadatan::{Meta, Metadata, StorageEntry}; +// subapeye +use crate::{ + jsonrpc::{Connection, Initialize, IntoRequestRaw, JsonrpcResult}, + prelude::*, +}; + +/// +pub trait Layer: Invoker + Runtime {} +impl Layer for T where T: Invoker + Runtime {} + +/// +#[async_trait::async_trait] +pub trait Invoker: Send + Sync { + /// + fn map_result(r: JsonrpcResult) -> Result + where + R: DeserializeOwned, + { + Ok(r.map_err(|e| error::Jsonrpc::ResponseError(e.error))?) + .and_then(|r| Ok(serde_json::from_value::(r.result).map_err(error::Generic::Serde)?)) + } + + /// + async fn request<'a, Req, R>(&self, raw_request: Req) -> Result> + where + Req: IntoRequestRaw<'a>, + R: DeserializeOwned; + + /// + async fn batch<'a, Req, R>(&self, raw_requests: Vec) -> Result>> + where + Req: IntoRequestRaw<'a>, + R: DeserializeOwned; +} +#[async_trait::async_trait] +impl Invoker for T +where + T: Connection, +{ + async fn request<'a, Req, R>(&self, raw_request: Req) -> Result> + where + Req: IntoRequestRaw<'a>, + R: DeserializeOwned, + { + self.request(raw_request).await.map(Self::map_result) + } + + async fn batch<'a, Req, R>(&self, raw_requests: Vec) -> Result>> + where + Req: IntoRequestRaw<'a>, + R: DeserializeOwned, + { + self.batch(raw_requests).await.map(|v| v.into_iter().map(Self::map_result).collect()) + } +} + +/// The API client for Substrate-like chain. +#[derive(Clone, Debug)] +pub struct Apeye +where + I: Invoker, + R: Runtime, +{ + /// + pub invoker: Arc, + /// + pub metadata: Metadata, + /// + pub runtime: PhantomData, +} +impl Apeye +where + I: Invoker, + R: Runtime, +{ + /// Initialize the API client with the given initializer. + pub async fn initialize(initializer: Iz) -> Result + where + Iz: Initialize, + { + let invoker = Arc::new(initializer.initialize().await?); + let mut apeye = Self { invoker, metadata: Default::default(), runtime: Default::default() }; + + apeye.metadata = + submetadatan::unprefix_raw_metadata_minimal(apeye.get_metadata::(None).await??) + .map_err(error::Generic::Submetadatan)?; + + #[cfg(feature = "trace")] + tracing::trace!("Metadata({:?})", apeye.metadata); + + Ok(apeye) + } +} +#[async_trait::async_trait] +impl Invoker for Apeye +where + I: Invoker, + R: Runtime, +{ + async fn request<'a, Req, Resp>(&self, raw_request: Req) -> Result> + where + Req: IntoRequestRaw<'a>, + Resp: DeserializeOwned, + { + I::request::<_, _>(&self.invoker, raw_request).await + } + + async fn batch<'a, Req, Resp>(&self, raw_requests: Vec) -> Result>> + where + Req: IntoRequestRaw<'a>, + Resp: DeserializeOwned, + { + I::batch::<_, _>(&self.invoker, raw_requests).await + } +} +impl Meta for Apeye +where + I: Invoker, + R: Runtime, +{ + fn storage<'a, 'b>(&'a self, pallet: &str, item: &'b str) -> Option> + where + 'a: 'b, + { + self.metadata.storage(pallet, item) + } +} diff --git a/src/apeye/api.rs b/src/apeye/api.rs new file mode 100644 index 0000000..8f6dc3a --- /dev/null +++ b/src/apeye/api.rs @@ -0,0 +1,44 @@ +//! + +pub mod prelude { + //! + + pub use crate::{ + apeye::{ + api::{Argument, Deserialization, Parameter}, + runtime::Runtime, + Layer, + }, + jsonrpc::Response, + }; +} + +pub mod base; +pub use base::*; + +pub mod ext; +pub use ext::*; + +pub mod frontier; +pub use frontier::*; + +// std +use std::fmt::Debug; +// crates.io +use serde::{de::DeserializeOwned, Serialize}; + +/// +pub trait Api: ApiBase + ApiExt + ApiFrontier {} +impl Api for T where T: ApiBase + ApiExt + ApiFrontier {} + +/// +pub trait Argument: Debug + Send + Sync {} +impl Argument for T where T: Debug + Send + Sync {} + +/// +pub trait Parameter: Serialize + Argument {} +impl Parameter for T where T: Serialize + Argument {} + +/// +pub trait Deserialization: Debug + DeserializeOwned {} +impl Deserialization for T where T: Debug + DeserializeOwned {} diff --git a/src/apeye/api/base.rs b/src/apeye/api/base.rs new file mode 100644 index 0000000..96cf5a1 --- /dev/null +++ b/src/apeye/api/base.rs @@ -0,0 +1,19 @@ +//! + +pub mod chain; +pub use chain::Api as ApiChain; + +pub mod net; +pub use net::Api as ApiNet; + +pub mod state; +pub use state::Api as ApiState; + +pub use Api as ApiBase; + +// subapeye +use crate::apeye::api::prelude::*; + +/// +pub trait Api: ApiChain + ApiNet + ApiState {} +impl Api for T where T: Layer {} diff --git a/src/apeye/api/base/chain.rs b/src/apeye/api/base/chain.rs new file mode 100644 index 0000000..cfe5bf2 --- /dev/null +++ b/src/apeye/api/base/chain.rs @@ -0,0 +1,66 @@ +//! + +// crates.io +use subrpcer::chain; +// subapeye +use crate::{apeye::api::prelude::*, prelude::*}; + +/// +#[async_trait::async_trait] +pub trait Api { + /// + async fn get_block(&self, hash: Option<&str>) -> Result> + where + R: Deserialization; + + /// + async fn get_block_hash(&self, list_or_value: Option) -> Result> + where + LoV: Parameter, + R: Deserialization; + + /// + async fn get_finalized_head(&self) -> Result> + where + R: Deserialization; + + /// + async fn get_header(&self, hash: Option<&str>) -> Result> + where + R: Deserialization; +} +#[async_trait::async_trait] +impl Api for T +where + T: Layer, +{ + async fn get_block(&self, hash: Option<&str>) -> Result> + where + R: Deserialization, + { + self.request::<_, R>(chain::get_block_raw(hash)).await + } + + async fn get_block_hash(&self, list_or_value: Option) -> Result> + where + LoV: Parameter, + R: Deserialization, + { + self.request::<_, R>(chain::get_block_hash_raw(list_or_value)).await + } + + async fn get_finalized_head(&self) -> Result> + where + R: Deserialization, + { + self.request::<_, R>(chain::get_finalized_head_raw()).await + } + + /// + async fn get_header(&self, hash: Option<&str>) -> Result> + where + R: Deserialization, + { + self.request::<_, R>(chain::get_header_raw(hash)).await + } +} diff --git a/src/apeye/api/base/net.rs b/src/apeye/api/base/net.rs new file mode 100644 index 0000000..8daa339 --- /dev/null +++ b/src/apeye/api/base/net.rs @@ -0,0 +1,51 @@ +//! + +// crates.io +use subrpcer::net; +// subapeye +use crate::{apeye::api::prelude::*, prelude::*}; + +/// +#[async_trait::async_trait] +pub trait Api { + /// + async fn listening(&self) -> Result> + where + R: Deserialization; + + /// + async fn peer_count(&self) -> Result> + where + R: Deserialization; + + /// + async fn version(&self) -> Result> + where + R: Deserialization; +} +#[async_trait::async_trait] +impl Api for T +where + T: Layer, +{ + async fn listening(&self) -> Result> + where + R: Deserialization, + { + self.request::<_, R>(net::listening_raw()).await + } + + async fn peer_count(&self) -> Result> + where + R: Deserialization, + { + self.request::<_, R>(net::peer_count_raw()).await + } + + async fn version(&self) -> Result> + where + R: Deserialization, + { + self.request::<_, R>(net::version_raw()).await + } +} diff --git a/src/apeye/api/base/state.rs b/src/apeye/api/base/state.rs new file mode 100644 index 0000000..cbc635c --- /dev/null +++ b/src/apeye/api/base/state.rs @@ -0,0 +1,81 @@ +//! + +// crates.io +use subrpcer::state; +// subapeye +use crate::{apeye::api::prelude::*, prelude::*}; + +/// +#[async_trait::async_trait] +pub trait Api { + /// + async fn get_keys(&self, prefix: P, hash: Option<&str>) -> Result> + where + P: Parameter, + R: Deserialization; + + /// + async fn get_metadata(&self, hash: Option<&str>) -> Result> + where + R: Deserialization; + + /// + async fn get_pairs(&self, prefix: P, hash: Option<&str>) -> Result> + where + P: Parameter, + R: Deserialization; + + /// + async fn get_read_proof(&self, keys: K, hash: Option<&str>) -> Result> + where + K: Parameter, + R: Deserialization; + + /// + async fn get_runtime_version(&self, hash: Option<&str>) -> Result> + where + R: Deserialization; +} +#[async_trait::async_trait] +impl Api for T +where + T: Layer, +{ + async fn get_keys(&self, prefix: P, hash: Option<&str>) -> Result> + where + P: Parameter, + R: Deserialization, + { + self.request::<_, R>(state::get_keys_raw(prefix, hash)).await + } + + async fn get_metadata(&self, hash: Option<&str>) -> Result> + where + R: Deserialization, + { + self.request::<_, R>(state::get_metadata_raw(hash)).await + } + + async fn get_pairs(&self, prefix: P, hash: Option<&str>) -> Result> + where + P: Parameter, + R: Deserialization, + { + self.request::<_, R>(state::get_pairs_raw(prefix, hash)).await + } + + async fn get_read_proof(&self, keys: K, hash: Option<&str>) -> Result> + where + K: Parameter, + R: Deserialization, + { + self.request::<_, R>(state::get_read_proof_raw(keys, hash)).await + } + + async fn get_runtime_version(&self, hash: Option<&str>) -> Result> + where + R: Deserialization, + { + self.request::<_, R>(state::get_runtime_version_raw(hash)).await + } +} diff --git a/src/apeye/api/ext.rs b/src/apeye/api/ext.rs new file mode 100644 index 0000000..b252c2c --- /dev/null +++ b/src/apeye/api/ext.rs @@ -0,0 +1,199 @@ +//! + +pub use Api as ApiExt; + +// crates.io +use parity_scale_codec::Encode; +use submetadatan::{Meta, StorageEntry, StorageEntryType}; +use subrpcer::state; +// subapeye +use crate::{apeye::api::prelude::*, prelude::*}; + +/// +pub trait Api: ApiQuery {} +impl Api for T where T: Layer + Meta {} + +/// +#[async_trait::async_trait] +pub trait ApiQuery { + /// + fn query_of<'a, E>(&'a self, pallet: &'a str, item: &'a str) -> Result> + where + E: EncodableArgs; + + /// + async fn query(&self, storage_query: &StorageQuery) -> Result> + where + R: Deserialization; +} +#[async_trait::async_trait] +impl ApiQuery for T +where + T: Layer + Meta, +{ + fn query_of<'a, E>(&'a self, pallet: &'a str, item: &'a str) -> Result> + where + E: EncodableArgs, + { + let storage_entry = self + .storage(pallet, item) + .ok_or_else(|| error::Apeye::StorageNotFound(format!("{pallet}::{item}")))?; + + Ok(StorageQueryArgs::new(storage_entry)) + } + + async fn query(&self, storage_query: &StorageQuery) -> Result> + where + R: Deserialization, + { + self.request::<_, R>(state::get_storage_raw(&storage_query.key, storage_query.at.as_ref())) + .await + } +} + +/// +pub trait EncodableArgs { + /// + const LENGTH: usize; + + /// + fn encode(&self) -> [Vec; Self::LENGTH]; +} +impl EncodableArgs for () { + const LENGTH: usize = 0; + + fn encode(&self) -> [Vec; Self::LENGTH] { + [] + } +} +impl EncodableArgs for (E,) +where + E: Encode, +{ + const LENGTH: usize = 1; + + fn encode(&self) -> [Vec; Self::LENGTH] { + [self.0.encode()] + } +} +impl EncodableArgs for (E, E1) +where + E: Encode, + E1: Encode, +{ + const LENGTH: usize = 2; + + fn encode(&self) -> [Vec; Self::LENGTH] { + [self.0.encode(), self.1.encode()] + } +} +impl EncodableArgs for (E, E1, E2) +where + E: Encode, + E1: Encode, + E2: Encode, +{ + const LENGTH: usize = 3; + + fn encode(&self) -> [Vec; Self::LENGTH] { + [self.0.encode(), self.1.encode(), self.2.encode()] + } +} +impl EncodableArgs for (E, E1, E2, E3) +where + E: Encode, + E1: Encode, + E2: Encode, + E3: Encode, +{ + const LENGTH: usize = 4; + + fn encode(&self) -> [Vec; Self::LENGTH] { + [self.0.encode(), self.1.encode(), self.2.encode(), self.3.encode()] + } +} + +/// +pub struct StorageQuery<'a> { + /// + pub key: String, + /// + pub at: Option<&'a str>, +} +/// +pub struct StorageQueryArgs<'a, E> +where + E: EncodableArgs, +{ + /// + pub storage_entry: StorageEntry<'a>, + /// + pub keys: Option>, + /// + pub at: Option<&'a str>, +} +impl<'a, E> StorageQueryArgs<'a, E> +where + E: EncodableArgs, +{ + /// + pub fn new(storage_entry: StorageEntry<'a>) -> Self { + Self { storage_entry, keys: None, at: None } + } + + /// + pub fn keys(mut self, keys: Keys<'a, E>) -> Self { + self.keys = Some(keys); + + self + } + + /// + pub fn at(mut self, at: &'a str) -> Self { + self.at = Some(at); + + self + } + + /// + pub fn construct(self) -> Result> + where + [(); E::LENGTH]:, + { + let key = match &self.storage_entry.r#type { + StorageEntryType::Plain => + substorager::storage_value_key(self.storage_entry.prefix, self.storage_entry.item), + StorageEntryType::Map(hashers) => match self.keys.ok_or(error::Apeye::KeysNotFound)? { + Keys::Raw(keys) => substorager::storage_n_map_key( + self.storage_entry.prefix, + self.storage_entry.item, + hashers.iter().zip(keys.encode().iter()).collect::>(), + ), + Keys::Encoded(keys) => substorager::storage_n_map_key( + self.storage_entry.prefix, + self.storage_entry.item, + hashers.iter().zip(keys.iter()).collect::>(), + ), + }, + } + .to_string(); + + #[cfg(feature = "trace")] + tracing::trace!("StorageKey({key})"); + + Ok(StorageQuery { key, at: self.at }) + } +} + +/// +pub enum Keys<'a, E> +where + E: EncodableArgs, +{ + /// + Raw(&'a E), + /// + Encoded(&'a [&'a [u8]]), + // /// + // Hashed(&'a [&'a [u8]]), +} diff --git a/src/apeye/api/frontier.rs b/src/apeye/api/frontier.rs new file mode 100644 index 0000000..12cc323 --- /dev/null +++ b/src/apeye/api/frontier.rs @@ -0,0 +1,10 @@ +//! + +pub use Api as ApiFrontier; + +// subapeye +use crate::apeye::api::prelude::*; + +/// +pub trait Api {} +impl Api for T where T: Layer {} diff --git a/src/apeye/runtime.rs b/src/apeye/runtime.rs new file mode 100644 index 0000000..faf6c82 --- /dev/null +++ b/src/apeye/runtime.rs @@ -0,0 +1,30 @@ +//! + +// crates.io +use array_bytes::{Hex, TryFromHex}; +// subapeye +use crate::apeye::{Apeye, Invoker}; + +/// +pub trait Runtime: Send + Sync { + /// + type BlockNumber: ParameterConvertor; + /// + type Hash: ParameterConvertor; + /// + type AccountId: ParameterConvertor; +} + +/// +pub trait ParameterConvertor: Hex + TryFromHex {} +impl ParameterConvertor for T where T: Hex + TryFromHex {} + +impl Runtime for Apeye +where + I: Invoker, + R: Runtime, +{ + type AccountId = R::AccountId; + type BlockNumber = R::BlockNumber; + type Hash = R::Hash; +} diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 0000000..2bb1739 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,92 @@ +//! Subapeye error collections. + +use thiserror::Error as ThisError; + +/// Main error. +#[allow(missing_docs)] +#[derive(Debug, ThisError)] +pub enum Error { + #[error(transparent)] + Quick(#[from] Quick), + + #[error(transparent)] + Apeye(#[from] Apeye), + #[error(transparent)] + Generic(#[from] Generic), + #[error(transparent)] + Jsonrpc(#[from] Jsonrpc), + #[error(transparent)] + Tokio(#[from] Tokio), + #[error(transparent)] + Tungstenite(#[from] tokio_tungstenite::tungstenite::Error), +} + +/// An error helper/wrapper to debug/print the error quickly. +#[derive(Debug)] +pub struct Quick(String); +impl std::fmt::Display for Quick { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + std::fmt::Debug::fmt(self, f) + } +} +impl std::error::Error for Quick {} +/// Wrap the error with [`Quick`]. +pub fn quick_err(e: E) -> Quick +where + E: std::fmt::Debug, +{ + Quick(format!("{e:?}")) +} + +/// Api error. +#[allow(missing_docs)] +#[derive(Debug, ThisError)] +pub enum Apeye { + #[error("[apeye] can not find keys of the storage map")] + KeysNotFound, + #[error("[apeye] can not find the storage from runtime, {0:?}")] + StorageNotFound(String), +} + +/// Generic error. +#[allow(missing_docs)] +#[derive(Debug, ThisError)] +pub enum Generic { + #[error("{0:?}")] + AlmostImpossible(&'static str), + // #[error(transparent)] + // Codec(#[from] parity_scale_codec::Error), + #[error(transparent)] + Serde(#[from] serde_json::Error), + #[error(transparent)] + Submetadatan(#[from] submetadatan::Error), +} +/// Wrap the error with [`Generic::AlmostImpossible`]. +pub fn almost_impossible(e_msg: &'static str) -> Generic { + Generic::AlmostImpossible(e_msg) +} + +/// JSONRPC error. +#[allow(missing_docs)] +#[derive(Debug, ThisError)] +pub enum Jsonrpc { + #[error("[jsonrpc] empty batch")] + EmptyBatch, + #[error("[jsonrpc] exceeded the maximum number of request queue size, {0:?}")] + ExceededRequestQueueMaxSize(crate::jsonrpc::Id), + #[error("[jsonrpc] response error, {0:?}")] + ResponseError(serde_json::Value), +} + +/// Tokio error. +#[allow(missing_docs)] +#[derive(Debug, ThisError)] +pub enum Tokio { + #[error(transparent)] + OneshotRecv(tokio::sync::oneshot::error::RecvError), + // e.g. https://github.com/tokio-rs/tokio/blob/master/tokio/src/sync/mpsc/error.rs#L12 + #[error("channel closed")] + ChannelClosed, + #[error(transparent)] + Elapsed(tokio::time::error::Elapsed), +} diff --git a/src/jsonrpc.rs b/src/jsonrpc.rs new file mode 100644 index 0000000..fa8f667 --- /dev/null +++ b/src/jsonrpc.rs @@ -0,0 +1,187 @@ +//! Subapeye core JSONRPC library. + +pub mod ws; +pub use ws::{Ws, WsInitializer}; + +// std +use std::sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, +}; +// crates.io +use serde::{Deserialize, Serialize}; +use serde_json::Value; +// subapeye +use crate::prelude::*; + +/// JSONRPC Id. +pub type Id = usize; + +/// +pub type JsonrpcResult = StdResult; + +/// JSONRPC version. +pub const VERSION: &str = "2.0"; + +/// +#[async_trait::async_trait] +pub trait Initialize { + /// + type Connection; + + /// + async fn initialize(self) -> Result; +} +#[async_trait::async_trait] +impl<'a> Initialize for WsInitializer<'a> { + type Connection = Ws; + + async fn initialize(self) -> Result { + // #[cfg(feature = "trace")] + // tracing::trace!("Connecting({uri})"); + + self.initialize().await + } +} + +/// +pub trait IntoRequestRaw<'a>: Send + Into> {} +impl<'a, T> IntoRequestRaw<'a> for T where T: Send + Into> {} + +/// +#[async_trait::async_trait] +pub trait Jsonrpc { + /// Send a single request. + async fn request<'a, Req>(&self, raw_request: Req) -> Result + where + Req: IntoRequestRaw<'a>; + + /// Send a single request. + async fn batch<'a, Req>(&self, raw_requests: Vec) -> Result> + where + Req: IntoRequestRaw<'a>; +} + +/// +pub trait Connection: Send + Sync + Jsonrpc {} +impl Connection for T where T: Send + Sync + Jsonrpc {} + +/// +pub trait Response { + /// + fn id(&self) -> Id; + + // /// + // fn deserialize(self) -> Self; +} +impl Response for JsonrpcResult { + fn id(&self) -> Id { + match self { + Self::Ok(r) => r.id, + Self::Err(e) => e.id, + } + } + + // fn deserialize(self) -> Result { + // Ok(match self { + // Self::Ok(r) => Self::Ok(ResponseResult { + // jsonrpc: r.jsonrpc, + // id: r.id, + // result: serde_json::to_value(r.result).map_err(error::Generic::Serde)?, + // }), + // Self::Err(e) => Self::Err(ResponseError { + // jsonrpc: e.jsonrpc, + // id: e.id, + // error: serde_json::to_value(e.error).map_err(error::Generic::Serde)?, + // }), + // }) + // } +} + +/// Generic JSONRPC request. +#[allow(missing_docs)] +#[derive(Clone, Debug, Serialize)] +pub struct Request<'a, P> { + #[serde(borrow)] + pub jsonrpc: &'a str, + pub id: Id, + #[serde(borrow)] + pub method: &'a str, + pub params: P, +} +/// Raw JSONRPC request. +#[allow(missing_docs)] +#[derive(Clone, Debug)] +pub struct RequestRaw<'a, P> { + pub method: &'a str, + pub params: P, +} +impl<'a, P> From<(&'a str, P)> for RequestRaw<'a, P> { + fn from(raw: (&'a str, P)) -> Self { + Self { method: raw.0, params: raw.1 } + } +} + +/// Generic JSONRPC result. +#[allow(missing_docs)] +#[derive(Clone, Debug, Deserialize)] +pub struct ResponseResult { + pub jsonrpc: String, + pub id: Id, + pub result: Value, +} + +/// Generic JSONRPC error. +#[allow(missing_docs)] +#[derive(Clone, Debug, Deserialize)] +pub struct ResponseError { + pub jsonrpc: String, + pub id: Id, + pub error: Value, +} + +#[derive(Debug)] +struct RequestQueue { + size: Id, + active: Arc<()>, + next: AtomicUsize, +} +impl RequestQueue { + fn consume_once(&self) -> Result> { + let active = Arc::strong_count(&self.active); + + #[cfg(feature = "trace")] + tracing::trace!("RequestQueue({active}/{})", self.size); + + if active == self.size { + Err(error::Jsonrpc::ExceededRequestQueueMaxSize(self.size))? + } else { + Ok(RequestQueueGuard { + lock: self.next.fetch_add(1, Ordering::SeqCst), + _strong: self.active.clone(), + }) + } + } + + fn consume(&self, count: Id) -> Result>> { + let active = Arc::strong_count(&self.active); + + #[cfg(feature = "trace")] + tracing::trace!("RequestQueue({active}/{})", self.size); + + if active == self.size { + Err(error::Jsonrpc::ExceededRequestQueueMaxSize(self.size))? + } else { + Ok(RequestQueueGuard { + lock: (0..count).map(|_| self.next.fetch_add(1, Ordering::SeqCst)).collect(), + _strong: self.active.clone(), + }) + } + } +} + +#[derive(Debug)] +struct RequestQueueGuard { + lock: L, + _strong: Arc<()>, +} diff --git a/src/jsonrpc/ws.rs b/src/jsonrpc/ws.rs new file mode 100644 index 0000000..0d2c5eb --- /dev/null +++ b/src/jsonrpc/ws.rs @@ -0,0 +1,297 @@ +//! Full functionality WS JSONRPC client implementation. +//! Follow specification. + +pub mod initializer; +pub use initializer::*; + +// std +use std::{fmt::Debug, time::Duration}; +// crates.io +use futures::{ + stream::{SplitSink, SplitStream}, + SinkExt, +}; +use fxhash::FxHashMap; +use serde_json::value::RawValue; +use tokio::{ + net::TcpStream, + sync::{mpsc, oneshot}, + time, +}; +use tokio_tungstenite::{ + tungstenite::{error::Result as WsResult, Message}, + MaybeTlsStream, WebSocketStream, +}; +// subapeye +use crate::{ + jsonrpc::*, + prelude::{Error, *}, +}; + +type CallSender = mpsc::Sender; +type CallReceiver = mpsc::Receiver; + +type WsSender = SplitSink>, Message>; +type WsReceiver = SplitStream>>; + +type ErrorSender = oneshot::Sender; +type ErrorReceiver = oneshot::Receiver; + +type ExitSender = oneshot::Sender<()>; +type ExitReceiver = oneshot::Receiver<()>; + +type Notifier = oneshot::Sender>; +type Pool = FxHashMap; + +type RequestResponse = JsonrpcResult; +type RequestNotifier = Notifier; +type RequestPool = Pool; + +type BatchResponse = Vec; +type BatchNotifier = Notifier; +type BatchPool = Pool; + +/// A Ws instance. +/// +/// Use this to interact with the server. +#[derive(Debug)] +pub struct Ws { + messenger: CallSender, + request_queue: RequestQueue, + request_timeout: Duration, + reporter: Option, + closer: Option, +} +impl Drop for Ws { + fn drop(&mut self) { + if let Some(c) = self.closer.take() { + let _ = c.send(()); + } else { + // + } + } +} +#[async_trait::async_trait] +impl Jsonrpc for Ws { + /// Send a single request. + async fn request<'a, Req>(&self, raw_request: Req) -> Result + where + Req: Send + Into>, + { + let RequestQueueGuard { lock: id, .. } = self.request_queue.consume_once()?; + let RequestRaw { method, params } = raw_request.into(); + let (tx, rx) = oneshot::channel(); + + #[cfg(feature = "debug")] + self.messenger.send(Call::Debug(id)).await.map_err(|_| error::Tokio::ChannelClosed)?; + self.messenger + .send(Call::Single(CallInner { + id, + request: serde_json::to_string(&Request { jsonrpc: VERSION, id, method, params }) + .map_err(error::Generic::Serde)?, + notifier: tx, + })) + .await + .map_err(|_| error::Tokio::ChannelClosed)?; + + time::timeout(self.request_timeout, rx) + .await + .map_err(error::Tokio::Elapsed)? + .map_err(error::Tokio::OneshotRecv)? + } + + /// Send a batch of requests. + async fn batch<'a, Req>(&self, raw_requests: Vec) -> Result> + where + Req: Send + Into>, + { + if raw_requests.is_empty() { + Err(error::Jsonrpc::EmptyBatch)?; + } + + let RequestQueueGuard { lock: ids, .. } = self.request_queue.consume(raw_requests.len())?; + let id = ids + .first() + .ok_or(error::almost_impossible("[jsonrpc::ws] acquired `lock` is empty"))? + .to_owned(); + let requests = ids + .into_iter() + .zip(raw_requests.into_iter()) + .map(|(id, raw_request)| { + let RequestRaw { method, params } = raw_request.into(); + + Request { jsonrpc: VERSION, id, method, params } + }) + .collect::>(); + let request = serde_json::to_string(&requests).map_err(error::Generic::Serde)?; + let (tx, rx) = oneshot::channel(); + + self.messenger + .send(Call::Batch(CallInner { id, request, notifier: tx })) + .await + .map_err(|_| error::Tokio::ChannelClosed)?; + + let mut responses = time::timeout(self.request_timeout, rx) + .await + .map_err(error::Tokio::Elapsed)? + .map_err(error::Tokio::OneshotRecv)?; + // Each id is unique. + let _ = responses.as_mut().map(|r| r.sort_unstable_by_key(|r| r.id())); + + responses + } +} + +#[derive(Debug)] +enum Call { + #[cfg(feature = "debug")] + Debug(Id), + Single(CallInner), + Batch(CallInner), +} +impl Call { + async fn try_send(self, ws_tx: &mut WsSender, pool: &mut Pools) -> bool { + match self { + #[cfg(feature = "debug")] + Call::Debug(_) => {}, + Call::Single(c) => + if !c.try_send(ws_tx, &mut pool.requests).await { + return false; + }, + Call::Batch(c) => + if !c.try_send(ws_tx, &mut pool.batches).await { + return false; + }, + } + + true + } +} +// A single request object. +// `id`: Request Id. +// +// Or +// +// A batch requests object to send several request objects simultaneously. +// `id`: The first request's id. +#[derive(Debug)] +struct CallInner { + id: Id, + request: String, + notifier: Notifier, +} +impl CallInner +where + T: Debug, +{ + async fn try_send(self, ws_tx: &mut WsSender, pool: &mut Pool>) -> bool { + if let Err(e) = ws_tx.send(Message::Text(self.request)).await { + try_send(self.notifier, Err(e.into())) + } else { + pool.insert(self.id, self.notifier); + + true + } + } +} + +fn try_send(tx: oneshot::Sender, any: T) -> bool +where + T: Debug, +{ + if let Err(e) = tx.send(any) { + tracing::error!("[jsonrpc::ws] failed to send error to outside, {e:?}"); + + return false; + } + + true +} + +#[derive(Debug, Default)] +struct Pools { + requests: RequestPool, + batches: BatchPool, +} +impl Pools { + fn new() -> Self { + Default::default() + } + + async fn on_ws_recv(&mut self, response: WsResult) -> Result<()> { + match response { + Ok(msg) => { + match msg { + Message::Binary(r) => self.process_response(&r).await, + Message::Text(r) => self.process_response(r.as_bytes()).await, + Message::Ping(_) => tracing::trace!("ping"), + Message::Pong(_) => tracing::trace!("pong"), + Message::Close(_) => tracing::trace!("close"), + Message::Frame(_) => tracing::trace!("frame"), + } + + Ok(()) + }, + Err(e) => Err(e)?, + } + } + + // TODO: error handling + async fn process_response(&mut self, response: &[u8]) { + #[cfg(feature = "trace")] + tracing::trace!("Response({response:?})"); + + let response = response.trim_ascii_start(); + let Some(first) = response.first() else { + tracing::error!("[jsonrpc::ws] empty response"); + + return; + }; + + match first { + b'{' => + if let Ok(r) = serde_json::from_slice::(response) { + if r.id == 0 { + return; + } + + let notifier = self.requests.remove(&r.id).unwrap(); + + if let Err(e) = notifier.send(Ok(Ok(r))) { + tracing::error!("{e:?}"); + } + } else if let Ok(e) = serde_json::from_slice::(response) { + dbg!(e); + // Response({"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not + // found"},"id":2}) + // TODO: return + }, + b'[' => + if let Ok(r) = serde_json::from_slice::>(response) { + let r = r + .into_iter() + .map(|r| { + if let Ok(r) = serde_json::from_str::(r.get()) { + Ok(Ok(r)) + } else if let Ok(r) = serde_json::from_str::(r.get()) { + Ok(Err(r)) + } else { + Err(error::almost_impossible("TODO"))? + } + }) + .collect::>>() + .unwrap(); + + let notifier = self.batches.remove(&r.first().unwrap().id()).unwrap(); + + if let Err(e) = notifier.send(Ok(r)) { + tracing::error!("{e:?}"); + } + }, + _ => { + tracing::error!("unable to process response, {response:?}"); + // TODO: return + }, + } + } +} diff --git a/src/jsonrpc/ws/initializer.rs b/src/jsonrpc/ws/initializer.rs new file mode 100644 index 0000000..ede19d7 --- /dev/null +++ b/src/jsonrpc/ws/initializer.rs @@ -0,0 +1,285 @@ +//! + +pub use Initializer as WsInitializer; + +// std +use std::{future::Future, pin::Pin, str, time::Duration}; +// crates.io +use futures::{ + future::{self, Either, Fuse}, + stream, FutureExt, StreamExt, +}; +use tokio_stream::wrappers::IntervalStream; +// subapeye +use crate::{jsonrpc::ws::*, prelude::*}; + +type GenericConnect = Box< + dyn FnOnce( + Duration, + WsSender, + WsReceiver, + CallReceiver, + ErrorSender, + ExitReceiver, + ) -> Pin + Send>> + + Send, +>; + +/// [`Ws`] initializer. +#[derive(Clone, Debug)] +pub struct Initializer<'a> { + /// URI to connect to. + /// + /// Default: `ws://127.0.0.1:9944`. + pub uri: &'a str, + /// Maximum concurrent task count. + pub max_concurrency: Id, + /// Send tick with this interval to keep the WS alive. + pub interval: Duration, + /// Request timeout. + pub request_timeout: Duration, + /// Future selector. + pub future_selector: FutureSelector, +} +impl<'a> Initializer<'a> { + /// Create a initializer with default configurations. + pub fn new() -> Self { + Self::default() + } + + /// Set the [`uri`](#structfield.uri). + pub fn uri(mut self, uri: &'a str) -> Self { + self.uri = uri; + + self + } + + /// Set the [`max_concurrency`](#structfield.max_concurrency). + pub fn max_concurrency(mut self, max_concurrency: Id) -> Self { + self.max_concurrency = max_concurrency; + + self + } + + /// Set the [`interval`](#structfield.interval). + pub fn interval(mut self, interval: Duration) -> Self { + self.interval = interval; + + self + } + + /// Set the [`request_timeout`](#structfield.request_timeout). + pub fn request_timeout(mut self, request_timeout: Duration) -> Self { + self.request_timeout = request_timeout; + + self + } + + /// Set the [`future_selector`](#structfield.future_selector). + pub fn future_selector(mut self, future_selector: FutureSelector) -> Self { + self.future_selector = future_selector; + + self + } + + /// Initialize the connection. + pub async fn initialize(self) -> Result { + let (messenger, reporter, closer) = self + .connect(match self.future_selector { + FutureSelector::Futures => Box::new(connect_futures), + FutureSelector::Tokio => Box::new(connect_tokio), + }) + .await?; + + Ok(Ws { + messenger, + request_queue: RequestQueue { + size: self.max_concurrency, + active: Arc::new(()), + // Id 0 is reserved for system health check. + next: AtomicUsize::new(1), + }, + request_timeout: self.request_timeout, + reporter: Some(reporter), + closer: Some(closer), + }) + } + + async fn connect( + &self, + connect_inner: GenericConnect, + ) -> Result<(CallSender, ErrorReceiver, ExitSender)> { + let interval = self.interval; + let (ws_tx, ws_rx) = tokio_tungstenite::connect_async(self.uri).await?.0.split(); + let (call_tx, call_rx) = mpsc::channel(self.max_concurrency); + let (error_tx, error_rx) = oneshot::channel(); + let (exit_tx, exit_rx) = oneshot::channel(); + + tokio::spawn(async move { + connect_inner(interval, ws_tx, ws_rx, call_rx, error_tx, exit_rx).await + }); + + Ok((call_tx, error_rx, exit_tx)) + } +} +impl<'a> Default for Initializer<'a> { + fn default() -> Self { + Self { + uri: "ws://127.0.0.1:9944", + max_concurrency: num_cpus::get(), + interval: Duration::from_secs(10), + request_timeout: Duration::from_secs(30), + future_selector: FutureSelector::default(), + } + } +} + +/// Async future selectors. +#[derive(Clone, Debug)] +pub enum FutureSelector { + /// Use [`futures::future::select`]. + Futures, + /// Use [`tokio::select!`]. + Tokio, +} +impl Default for FutureSelector { + fn default() -> Self { + Self::Tokio + } +} + +fn connect_futures( + interval: Duration, + mut ws_tx: WsSender, + mut ws_rx: WsReceiver, + call_rx: CallReceiver, + error_tx: ErrorSender, + exit_rx: ExitReceiver, +) -> Pin + Send>> { + Box::pin(async move { + let call_rx = stream::unfold(call_rx, |mut r| async { r.recv().await.map(|c| (c, r)) }); + + futures::pin_mut!(call_rx); + + let mut rxs_fut = future::select(call_rx.next(), ws_rx.next()); + // TODO: clean dead items? + let mut pool = Pools::new(); + // Minimum interval is 1ms. + let interval_max = interval.max(Duration::from_millis(1)); + let mut interval_max = IntervalStream::new(time::interval(interval_max)); + // Disable the tick, if the interval is zero. + let mut exit_or_interval_fut = future::select( + exit_rx, + if interval.is_zero() { Fuse::terminated() } else { interval_max.next().fuse() }, + ); + + loop { + match future::select(rxs_fut, exit_or_interval_fut).await { + Either::Left((Either::Left((maybe_call, ws_rx_next)), exit_or_interval_fut_)) => { + if let Some(c) = maybe_call { + #[cfg(feature = "trace")] + tracing::trace!("Call({c:?})"); + + if !c.try_send(&mut ws_tx, &mut pool).await { + return; + } + } else { + try_send(error_tx, error::Tokio::ChannelClosed.into()); + + return; + } + + rxs_fut = future::select(call_rx.next(), ws_rx_next); + exit_or_interval_fut = exit_or_interval_fut_; + }, + Either::Left(( + Either::Right((maybe_response, call_rx_next)), + exit_or_interval_fut_, + )) => { + if let Some(response) = maybe_response { + pool.on_ws_recv(response).await.unwrap() + } else { + // TODO?: closed + } + + rxs_fut = future::select(call_rx_next, ws_rx.next()); + exit_or_interval_fut = exit_or_interval_fut_; + }, + Either::Right((Either::Left((_, _)), _)) => return, + Either::Right((Either::Right((_, exit_rx)), rxs_fut_)) => { + #[cfg(feature = "trace")] + tracing::trace!("TickRequest(Ping)"); + + ws_tx.send(Message::Text("Ping".into())).await.unwrap(); + + rxs_fut = rxs_fut_; + exit_or_interval_fut = future::select( + exit_rx, + if interval.is_zero() { + Fuse::terminated() + } else { + interval_max.next().fuse() + }, + ); + }, + } + } + }) +} + +fn connect_tokio( + interval: Duration, + mut ws_tx: WsSender, + mut ws_rx: WsReceiver, + mut call_rx: CallReceiver, + error_tx: ErrorSender, + mut exit_rx: ExitReceiver, +) -> Pin + Send>> { + Box::pin(async move { + // TODO: clean dead items? + let mut pool = Pools::new(); + // Minimum interval is 1ms. + let interval_max = interval.max(Duration::from_millis(1)); + let mut interval_max = IntervalStream::new(time::interval(interval_max)); + // Disable the tick, if the interval is zero. + let mut interval_fut = + if interval.is_zero() { Fuse::terminated() } else { interval_max.next().fuse() }; + + loop { + tokio::select! { + maybe_request = call_rx.recv() => { + if let Some(c) = maybe_request { + #[cfg(feature = "trace")] + tracing::trace!("Call({c:?})"); + + if !c.try_send(&mut ws_tx, &mut pool).await { + return; + } + } else { + try_send(error_tx, error::Tokio::ChannelClosed.into()); + + return; + } + }, + maybe_response = ws_rx.next() => { + if let Some(response) = maybe_response { + pool.on_ws_recv(response).await.unwrap() + } else { + // TODO?: closed + } + } + _ = &mut interval_fut => { + #[cfg(feature = "trace")] + tracing::trace!("TickRequest(Ping)"); + + ws_tx.send(Message::Ping(Vec::new())).await.unwrap(); + + interval_fut = interval_max.next().fuse(); + }, + _ = &mut exit_rx => { + return; + }, + } + } + }) +} diff --git a/src/lib.rs b/src/lib.rs index e69de29..a7fa874 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -0,0 +1,22 @@ +//! Substrate API client. + +#![deny(missing_docs)] +// https://github.com/rust-lang/rust/issues/60551 +#![allow(incomplete_features)] +#![feature(generic_const_exprs)] +#![feature(byte_slice_trim_ascii)] + +pub mod prelude { + //! Subapeye core prelude. + + pub use std::result::Result as StdResult; + + pub use crate::error::{self, Error}; + + /// Subapeye's `Result` type. + pub type Result = StdResult; +} + +pub mod apeye; +pub mod error; +pub mod jsonrpc; diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..a008684 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,71 @@ +use array_bytes::TryFromHex; +use serde_json::Value; +use subapeye::{ + apeye::{api::*, runtime::Runtime, Apeye, Invoker}, + jsonrpc::WsInitializer, +}; +use subrpcer::{chain, net}; + +#[tokio::main] +async fn main() { + tracing_subscriber::fmt::init(); + + enum R {} + impl Runtime for R { + type AccountId = [u8; 32]; + type BlockNumber = u32; + type Hash = [u8; 32]; + } + + let apeye = + >::initialize(WsInitializer::default().uri("wss://kusama-rpc.polkadot.io")) + .await + .unwrap(); + + // let hashes = apeye.get_block_hash::<_, Vec>(Some([0, 1, 2])).await.unwrap(); + + // dbg!(hashes); + + // for h in hashes { + // dbg!(apeye.get_block::(Some(&h)).await.unwrap()); + // dbg!(apeye.get_header::(Some(&h)).await.unwrap()); + // } + + // dbg!(apeye.get_finalized_head::().await.unwrap()); + // dbg!(apeye.get_metadata::(None).await.unwrap()); + + // dbg!(apeye + // .query::(&apeye.query_of::<()>("System", "Number").unwrap().construct().unwrap()) + // .await + // .unwrap()); + dbg!(apeye + .query::( + &apeye + .query_of("Staking", "ErasValidatorPrefs") + .unwrap() + .keys(Keys::Raw(&( + 5_044_u32, + ::AccountId::try_from_hex( + "0x305b1689cfee594c19a642a2fcd554074c93d62181c0d4117ebe196bd7c62b79" + ) + .unwrap() + ))) + .construct() + .unwrap() + ) + .await + .unwrap() + .unwrap()); + + let v = apeye + .batch::<_, Value>(vec![ + chain::get_block_hash_raw(>::None), + chain::get_finalized_head_raw(), + net::version_raw(), + ]) + .await + .unwrap(); + dbg!(v); + + dbg!(apeye.version::().await.unwrap()); +} From b72ac1758947f7bef1e1bf6e5c5c19562cc1cb69 Mon Sep 17 00:00:00 2001 From: Xavier Lau Date: Tue, 2 May 2023 02:22:53 +0800 Subject: [PATCH 2/7] New JSONRPC error --- src/apeye.rs | 9 +- src/error.rs | 30 +---- src/jsonrpc.rs | 25 +++- src/jsonrpc/error.rs | 58 ++++++++ src/jsonrpc/ws.rs | 245 ++++++++++++++++++++++++++++++---- src/jsonrpc/ws/initializer.rs | 192 +------------------------- src/main.rs | 86 ++++++------ 7 files changed, 355 insertions(+), 290 deletions(-) create mode 100644 src/jsonrpc/error.rs diff --git a/src/apeye.rs b/src/apeye.rs index 14c0752..e3d2b23 100644 --- a/src/apeye.rs +++ b/src/apeye.rs @@ -29,7 +29,7 @@ pub trait Invoker: Send + Sync { where R: DeserializeOwned, { - Ok(r.map_err(|e| error::Jsonrpc::ResponseError(e.error))?) + Ok(r.map_err(|e| error::Net::JsonrpcResponse(e.error))?) .and_then(|r| Ok(serde_json::from_value::(r.result).map_err(error::Generic::Serde)?)) } @@ -55,7 +55,7 @@ where Req: IntoRequestRaw<'a>, R: DeserializeOwned, { - self.request(raw_request).await.map(Self::map_result) + Ok(self.request(raw_request).await.map_err(error::Net::Jsonrpc)?).map(Self::map_result) } async fn batch<'a, Req, R>(&self, raw_requests: Vec) -> Result>> @@ -63,7 +63,8 @@ where Req: IntoRequestRaw<'a>, R: DeserializeOwned, { - self.batch(raw_requests).await.map(|v| v.into_iter().map(Self::map_result).collect()) + Ok(self.batch(raw_requests).await.map_err(error::Net::Jsonrpc)?) + .map(|v| v.into_iter().map(Self::map_result).collect()) } } @@ -91,7 +92,7 @@ where where Iz: Initialize, { - let invoker = Arc::new(initializer.initialize().await?); + let invoker = Arc::new(initializer.initialize().await.map_err(error::Net::Jsonrpc)?); let mut apeye = Self { invoker, metadata: Default::default(), runtime: Default::default() }; apeye.metadata = diff --git a/src/error.rs b/src/error.rs index 2bb1739..235036e 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,5 +1,6 @@ //! Subapeye error collections. +// crates.io use thiserror::Error as ThisError; /// Main error. @@ -14,11 +15,7 @@ pub enum Error { #[error(transparent)] Generic(#[from] Generic), #[error(transparent)] - Jsonrpc(#[from] Jsonrpc), - #[error(transparent)] - Tokio(#[from] Tokio), - #[error(transparent)] - Tungstenite(#[from] tokio_tungstenite::tungstenite::Error), + Net(#[from] Net), } /// An error helper/wrapper to debug/print the error quickly. @@ -69,24 +66,9 @@ pub fn almost_impossible(e_msg: &'static str) -> Generic { /// JSONRPC error. #[allow(missing_docs)] #[derive(Debug, ThisError)] -pub enum Jsonrpc { - #[error("[jsonrpc] empty batch")] - EmptyBatch, - #[error("[jsonrpc] exceeded the maximum number of request queue size, {0:?}")] - ExceededRequestQueueMaxSize(crate::jsonrpc::Id), - #[error("[jsonrpc] response error, {0:?}")] - ResponseError(serde_json::Value), -} - -/// Tokio error. -#[allow(missing_docs)] -#[derive(Debug, ThisError)] -pub enum Tokio { +pub enum Net { #[error(transparent)] - OneshotRecv(tokio::sync::oneshot::error::RecvError), - // e.g. https://github.com/tokio-rs/tokio/blob/master/tokio/src/sync/mpsc/error.rs#L12 - #[error("channel closed")] - ChannelClosed, - #[error(transparent)] - Elapsed(tokio::time::error::Elapsed), + Jsonrpc(#[from] crate::jsonrpc::Error), + #[error("[jsonrpc] response error, {0:?}")] + JsonrpcResponse(serde_json::Value), } diff --git a/src/jsonrpc.rs b/src/jsonrpc.rs index fa8f667..f59cd54 100644 --- a/src/jsonrpc.rs +++ b/src/jsonrpc.rs @@ -1,8 +1,23 @@ -//! Subapeye core JSONRPC library. +//! JSONRPC client library. pub mod ws; pub use ws::{Ws, WsInitializer}; +pub mod error; +pub use error::Error; + +pub mod prelude { + //! JSONRPC prelude. + + pub use std::result::Result as StdResult; + + pub use crate::jsonrpc::error::{self, Error}; + + /// Subapeye's `Result` type. + pub type Result = StdResult; +} +use prelude::*; + // std use std::sync::{ atomic::{AtomicUsize, Ordering}, @@ -11,8 +26,6 @@ use std::sync::{ // crates.io use serde::{Deserialize, Serialize}; use serde_json::Value; -// subapeye -use crate::prelude::*; /// JSONRPC Id. pub type Id = usize; @@ -89,7 +102,7 @@ impl Response for JsonrpcResult { // id: r.id, // result: serde_json::to_value(r.result).map_err(error::Generic::Serde)?, // }), - // Self::Err(e) => Self::Err(ResponseError { + // Self::Err(e) => Self::Err(Response { // jsonrpc: e.jsonrpc, // id: e.id, // error: serde_json::to_value(e.error).map_err(error::Generic::Serde)?, @@ -147,6 +160,10 @@ struct RequestQueue { next: AtomicUsize, } impl RequestQueue { + fn with_size(size: Id) -> Self { + Self { size, active: Default::default(), next: Default::default() } + } + fn consume_once(&self) -> Result> { let active = Arc::strong_count(&self.active); diff --git a/src/jsonrpc/error.rs b/src/jsonrpc/error.rs new file mode 100644 index 0000000..59234a6 --- /dev/null +++ b/src/jsonrpc/error.rs @@ -0,0 +1,58 @@ +//! JSONRPC error collections. + +// crates.io +use thiserror::Error as ThisError; + +/// Main error. +#[allow(missing_docs)] +#[derive(Debug, ThisError)] +pub enum Error { + #[error(transparent)] + Jsonrpc(#[from] Jsonrpc), + #[error(transparent)] + Generic(#[from] Generic), + #[error(transparent)] + Tungstenite(#[from] tokio_tungstenite::tungstenite::Error), +} + +/// Generic error. +#[allow(missing_docs)] +#[derive(Debug, ThisError)] +pub enum Generic { + #[error("{0:?}")] + AlmostImpossible(&'static str), + #[error(transparent)] + Serde(#[from] serde_json::Error), +} +/// Wrap the error with [`Generic::AlmostImpossible`]. +pub fn almost_impossible(e_msg: &'static str) -> Generic { + Generic::AlmostImpossible(e_msg) +} + +/// JSONRPC error. +#[allow(missing_docs)] +#[derive(Debug, ThisError)] +pub enum Jsonrpc { + #[error(transparent)] + ChannelClosed(#[from] ChannelClosed), + #[error("[jsonrpc] empty batch")] + EmptyBatch, + #[error("[jsonrpc] exceeded the maximum number of request queue size, {0:?}")] + ExceededRequestQueueMaxSize(crate::jsonrpc::Id), + #[error("[jsonrpc] response error, {0:?}")] + Response(serde_json::Value), + #[error(transparent)] + Timeout(#[from] tokio::time::error::Elapsed), +} + +/// Channel closed error. +#[allow(missing_docs)] +#[derive(Debug, ThisError)] +pub enum ChannelClosed { + #[error("[jsonrpc] messenger channel closed")] + Messenger, + #[error("[jsonrpc] reporter channel closed")] + Reporter, + #[error("[jsonrpc] notifier channel closed")] + Notifier(#[from] tokio::sync::oneshot::error::RecvError), +} diff --git a/src/jsonrpc/ws.rs b/src/jsonrpc/ws.rs index 0d2c5eb..d17012e 100644 --- a/src/jsonrpc/ws.rs +++ b/src/jsonrpc/ws.rs @@ -5,11 +5,12 @@ pub mod initializer; pub use initializer::*; // std -use std::{fmt::Debug, time::Duration}; +use std::{fmt::Debug, future::Future, pin::Pin, time::Duration}; // crates.io use futures::{ - stream::{SplitSink, SplitStream}, - SinkExt, + future::{self, Either, Fuse}, + stream::{self, SplitSink, SplitStream}, + FutureExt, SinkExt, StreamExt, }; use fxhash::FxHashMap; use serde_json::value::RawValue; @@ -18,15 +19,25 @@ use tokio::{ sync::{mpsc, oneshot}, time, }; +use tokio_stream::wrappers::IntervalStream; use tokio_tungstenite::{ tungstenite::{error::Result as WsResult, Message}, MaybeTlsStream, WebSocketStream, }; // subapeye -use crate::{ - jsonrpc::*, - prelude::{Error, *}, -}; +use crate::jsonrpc::{prelude::*, *}; + +type GenericConnect = Box< + dyn FnOnce( + Duration, + WsSender, + WsReceiver, + CallReceiver, + ErrorSender, + ExitReceiver, + ) -> Pin + Send>> + + Send, +>; type CallSender = mpsc::Sender; type CallReceiver = mpsc::Receiver; @@ -83,7 +94,10 @@ impl Jsonrpc for Ws { let (tx, rx) = oneshot::channel(); #[cfg(feature = "debug")] - self.messenger.send(Call::Debug(id)).await.map_err(|_| error::Tokio::ChannelClosed)?; + self.messenger + .send(Call::Debug(id)) + .await + .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Messenger))?; self.messenger .send(Call::Single(CallInner { id, @@ -92,12 +106,12 @@ impl Jsonrpc for Ws { notifier: tx, })) .await - .map_err(|_| error::Tokio::ChannelClosed)?; + .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Messenger))?; time::timeout(self.request_timeout, rx) .await - .map_err(error::Tokio::Elapsed)? - .map_err(error::Tokio::OneshotRecv)? + .map_err(error::Jsonrpc::Timeout)? + .map_err(|e| error::Jsonrpc::from(error::ChannelClosed::Notifier(e)))? } /// Send a batch of requests. @@ -129,12 +143,12 @@ impl Jsonrpc for Ws { self.messenger .send(Call::Batch(CallInner { id, request, notifier: tx })) .await - .map_err(|_| error::Tokio::ChannelClosed)?; + .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Messenger))?; let mut responses = time::timeout(self.request_timeout, rx) .await - .map_err(error::Tokio::Elapsed)? - .map_err(error::Tokio::OneshotRecv)?; + .map_err(error::Jsonrpc::Timeout)? + .map_err(|e| error::Jsonrpc::from(error::ChannelClosed::Notifier(e)))?; // Each id is unique. let _ = responses.as_mut().map(|r| r.sort_unstable_by_key(|r| r.id())); @@ -142,6 +156,183 @@ impl Jsonrpc for Ws { } } +/// Async future selectors. +#[derive(Clone, Debug)] +pub enum FutureSelector { + /// Use [`futures::future::select`]. + Futures, + /// Use [`tokio::select!`]. + Tokio, +} +impl FutureSelector { + fn connector(&self) -> GenericConnect { + Box::new(match self { + FutureSelector::Futures => Self::connect_futures, + FutureSelector::Tokio => Self::connect_tokio, + }) + } + + fn connect_futures( + interval: Duration, + mut ws_tx: WsSender, + mut ws_rx: WsReceiver, + call_rx: CallReceiver, + error_tx: ErrorSender, + exit_rx: ExitReceiver, + ) -> Pin + Send>> { + Box::pin(async move { + let call_rx = stream::unfold(call_rx, |mut r| async { r.recv().await.map(|c| (c, r)) }); + + futures::pin_mut!(call_rx); + + let mut rxs_fut = future::select(call_rx.next(), ws_rx.next()); + // TODO: clean dead items? + let mut pool = Pools::new(); + // Minimum interval is 1ms. + let interval_max = interval.max(Duration::from_millis(1)); + let mut interval_max = IntervalStream::new(time::interval(interval_max)); + // Disable the tick, if the interval is zero. + let mut exit_or_interval_fut = future::select( + exit_rx, + if interval.is_zero() { Fuse::terminated() } else { interval_max.next().fuse() }, + ); + + loop { + match future::select(rxs_fut, exit_or_interval_fut).await { + Either::Left(( + Either::Left((maybe_call, ws_rx_next)), + exit_or_interval_fut_, + )) => { + if let Some(c) = maybe_call { + #[cfg(feature = "trace")] + tracing::trace!("Call({c:?})"); + + if !c.try_send(&mut ws_tx, &mut pool).await { + return; + } + } else { + try_send( + error_tx, + error::Jsonrpc::from(error::ChannelClosed::Reporter).into(), + false, + ); + + return; + } + + rxs_fut = future::select(call_rx.next(), ws_rx_next); + exit_or_interval_fut = exit_or_interval_fut_; + }, + Either::Left(( + Either::Right((maybe_response, call_rx_next)), + exit_or_interval_fut_, + )) => { + if let Some(response) = maybe_response { + pool.on_ws_recv(response).await.unwrap() + } else { + // TODO?: closed + } + + rxs_fut = future::select(call_rx_next, ws_rx.next()); + exit_or_interval_fut = exit_or_interval_fut_; + }, + Either::Right((Either::Left((_, _)), _)) => return, + Either::Right((Either::Right((_, exit_rx)), rxs_fut_)) => { + #[cfg(feature = "trace")] + tracing::trace!("Tick(Ping)"); + + if ws_tx.send(Message::Text("Ping".into())).await.is_err() { + try_send( + error_tx, + error::Jsonrpc::from(error::ChannelClosed::Reporter).into(), + false, + ); + + return; + } + + rxs_fut = rxs_fut_; + exit_or_interval_fut = future::select( + exit_rx, + if interval.is_zero() { + Fuse::terminated() + } else { + interval_max.next().fuse() + }, + ); + }, + } + } + }) + } + + fn connect_tokio( + interval: Duration, + mut ws_tx: WsSender, + mut ws_rx: WsReceiver, + mut call_rx: CallReceiver, + error_tx: ErrorSender, + mut exit_rx: ExitReceiver, + ) -> Pin + Send>> { + Box::pin(async move { + // TODO: clean dead items? + let mut pool = Pools::new(); + // Minimum interval is 1ms. + let interval_max = interval.max(Duration::from_millis(1)); + let mut interval_max = IntervalStream::new(time::interval(interval_max)); + // Disable the tick, if the interval is zero. + let mut interval_fut = + if interval.is_zero() { Fuse::terminated() } else { interval_max.next().fuse() }; + + loop { + tokio::select! { + maybe_request = call_rx.recv() => { + if let Some(c) = maybe_request { + #[cfg(feature = "trace")] + tracing::trace!("Call({c:?})"); + + if !c.try_send(&mut ws_tx, &mut pool).await { + return; + } + } else { + try_send(error_tx, error::Jsonrpc::from(error::ChannelClosed::Reporter).into(), false); + + return; + } + }, + maybe_response = ws_rx.next() => { + if let Some(response) = maybe_response { + pool.on_ws_recv(response).await.unwrap() + } else { + // TODO?: closed + } + } + _ = &mut interval_fut => { + #[cfg(feature = "trace")] + tracing::trace!("Tick(Ping)"); + + if ws_tx.send(Message::Ping(Vec::new())).await.is_err() { + try_send(error_tx, error::Jsonrpc::from(error::ChannelClosed::Reporter).into(), false); + + return + }; + + interval_fut = interval_max.next().fuse(); + }, + _ = &mut exit_rx => { + return; + }, + } + } + }) + } +} +impl Default for FutureSelector { + fn default() -> Self { + Self::Tokio + } +} + #[derive(Debug)] enum Call { #[cfg(feature = "debug")] @@ -186,7 +377,7 @@ where { async fn try_send(self, ws_tx: &mut WsSender, pool: &mut Pool>) -> bool { if let Err(e) = ws_tx.send(Message::Text(self.request)).await { - try_send(self.notifier, Err(e.into())) + try_send(self.notifier, Err(e.into()), true) } else { pool.insert(self.id, self.notifier); @@ -195,12 +386,14 @@ where } } -fn try_send(tx: oneshot::Sender, any: T) -> bool +fn try_send(tx: oneshot::Sender, any: T, log: bool) -> bool where T: Debug, { if let Err(e) = tx.send(any) { - tracing::error!("[jsonrpc::ws] failed to send error to outside, {e:?}"); + if log { + tracing::error!("[jsonrpc::ws] failed to send error to outside, {e:?}"); + } return false; } @@ -251,20 +444,22 @@ impl Pools { match first { b'{' => if let Ok(r) = serde_json::from_slice::(response) { - if r.id == 0 { - return; - } - let notifier = self.requests.remove(&r.id).unwrap(); if let Err(e) = notifier.send(Ok(Ok(r))) { tracing::error!("{e:?}"); } } else if let Ok(e) = serde_json::from_slice::(response) { - dbg!(e); - // Response({"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not - // found"},"id":2}) - // TODO: return + // E.g. + // ``` + // Response({"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not found"},"id":2}) + // ``` + + let notifier = self.requests.remove(&e.id).unwrap(); + + if let Err(e) = notifier.send(Ok(Err(e))) { + tracing::error!("{e:?}"); + } }, b'[' => if let Ok(r) = serde_json::from_slice::>(response) { diff --git a/src/jsonrpc/ws/initializer.rs b/src/jsonrpc/ws/initializer.rs index ede19d7..bdd6532 100644 --- a/src/jsonrpc/ws/initializer.rs +++ b/src/jsonrpc/ws/initializer.rs @@ -3,27 +3,11 @@ pub use Initializer as WsInitializer; // std -use std::{future::Future, pin::Pin, str, time::Duration}; +use std::{str, time::Duration}; // crates.io -use futures::{ - future::{self, Either, Fuse}, - stream, FutureExt, StreamExt, -}; -use tokio_stream::wrappers::IntervalStream; +use futures::StreamExt; // subapeye -use crate::{jsonrpc::ws::*, prelude::*}; - -type GenericConnect = Box< - dyn FnOnce( - Duration, - WsSender, - WsReceiver, - CallReceiver, - ErrorSender, - ExitReceiver, - ) -> Pin + Send>> - + Send, ->; +use crate::jsonrpc::{prelude::*, ws::*}; /// [`Ws`] initializer. #[derive(Clone, Debug)] @@ -84,31 +68,19 @@ impl<'a> Initializer<'a> { /// Initialize the connection. pub async fn initialize(self) -> Result { - let (messenger, reporter, closer) = self - .connect(match self.future_selector { - FutureSelector::Futures => Box::new(connect_futures), - FutureSelector::Tokio => Box::new(connect_tokio), - }) - .await?; + let (messenger, reporter, closer) = self.connect().await?; Ok(Ws { messenger, - request_queue: RequestQueue { - size: self.max_concurrency, - active: Arc::new(()), - // Id 0 is reserved for system health check. - next: AtomicUsize::new(1), - }, + request_queue: RequestQueue::with_size(self.max_concurrency), request_timeout: self.request_timeout, reporter: Some(reporter), closer: Some(closer), }) } - async fn connect( - &self, - connect_inner: GenericConnect, - ) -> Result<(CallSender, ErrorReceiver, ExitSender)> { + async fn connect(&self) -> Result<(CallSender, ErrorReceiver, ExitSender)> { + let connect_inner = self.future_selector.connector(); let interval = self.interval; let (ws_tx, ws_rx) = tokio_tungstenite::connect_async(self.uri).await?.0.split(); let (call_tx, call_rx) = mpsc::channel(self.max_concurrency); @@ -133,153 +105,3 @@ impl<'a> Default for Initializer<'a> { } } } - -/// Async future selectors. -#[derive(Clone, Debug)] -pub enum FutureSelector { - /// Use [`futures::future::select`]. - Futures, - /// Use [`tokio::select!`]. - Tokio, -} -impl Default for FutureSelector { - fn default() -> Self { - Self::Tokio - } -} - -fn connect_futures( - interval: Duration, - mut ws_tx: WsSender, - mut ws_rx: WsReceiver, - call_rx: CallReceiver, - error_tx: ErrorSender, - exit_rx: ExitReceiver, -) -> Pin + Send>> { - Box::pin(async move { - let call_rx = stream::unfold(call_rx, |mut r| async { r.recv().await.map(|c| (c, r)) }); - - futures::pin_mut!(call_rx); - - let mut rxs_fut = future::select(call_rx.next(), ws_rx.next()); - // TODO: clean dead items? - let mut pool = Pools::new(); - // Minimum interval is 1ms. - let interval_max = interval.max(Duration::from_millis(1)); - let mut interval_max = IntervalStream::new(time::interval(interval_max)); - // Disable the tick, if the interval is zero. - let mut exit_or_interval_fut = future::select( - exit_rx, - if interval.is_zero() { Fuse::terminated() } else { interval_max.next().fuse() }, - ); - - loop { - match future::select(rxs_fut, exit_or_interval_fut).await { - Either::Left((Either::Left((maybe_call, ws_rx_next)), exit_or_interval_fut_)) => { - if let Some(c) = maybe_call { - #[cfg(feature = "trace")] - tracing::trace!("Call({c:?})"); - - if !c.try_send(&mut ws_tx, &mut pool).await { - return; - } - } else { - try_send(error_tx, error::Tokio::ChannelClosed.into()); - - return; - } - - rxs_fut = future::select(call_rx.next(), ws_rx_next); - exit_or_interval_fut = exit_or_interval_fut_; - }, - Either::Left(( - Either::Right((maybe_response, call_rx_next)), - exit_or_interval_fut_, - )) => { - if let Some(response) = maybe_response { - pool.on_ws_recv(response).await.unwrap() - } else { - // TODO?: closed - } - - rxs_fut = future::select(call_rx_next, ws_rx.next()); - exit_or_interval_fut = exit_or_interval_fut_; - }, - Either::Right((Either::Left((_, _)), _)) => return, - Either::Right((Either::Right((_, exit_rx)), rxs_fut_)) => { - #[cfg(feature = "trace")] - tracing::trace!("TickRequest(Ping)"); - - ws_tx.send(Message::Text("Ping".into())).await.unwrap(); - - rxs_fut = rxs_fut_; - exit_or_interval_fut = future::select( - exit_rx, - if interval.is_zero() { - Fuse::terminated() - } else { - interval_max.next().fuse() - }, - ); - }, - } - } - }) -} - -fn connect_tokio( - interval: Duration, - mut ws_tx: WsSender, - mut ws_rx: WsReceiver, - mut call_rx: CallReceiver, - error_tx: ErrorSender, - mut exit_rx: ExitReceiver, -) -> Pin + Send>> { - Box::pin(async move { - // TODO: clean dead items? - let mut pool = Pools::new(); - // Minimum interval is 1ms. - let interval_max = interval.max(Duration::from_millis(1)); - let mut interval_max = IntervalStream::new(time::interval(interval_max)); - // Disable the tick, if the interval is zero. - let mut interval_fut = - if interval.is_zero() { Fuse::terminated() } else { interval_max.next().fuse() }; - - loop { - tokio::select! { - maybe_request = call_rx.recv() => { - if let Some(c) = maybe_request { - #[cfg(feature = "trace")] - tracing::trace!("Call({c:?})"); - - if !c.try_send(&mut ws_tx, &mut pool).await { - return; - } - } else { - try_send(error_tx, error::Tokio::ChannelClosed.into()); - - return; - } - }, - maybe_response = ws_rx.next() => { - if let Some(response) = maybe_response { - pool.on_ws_recv(response).await.unwrap() - } else { - // TODO?: closed - } - } - _ = &mut interval_fut => { - #[cfg(feature = "trace")] - tracing::trace!("TickRequest(Ping)"); - - ws_tx.send(Message::Ping(Vec::new())).await.unwrap(); - - interval_fut = interval_max.next().fuse(); - }, - _ = &mut exit_rx => { - return; - }, - } - } - }) -} diff --git a/src/main.rs b/src/main.rs index a008684..0a53588 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,5 @@ +use std::error::Error; + use array_bytes::TryFromHex; use serde_json::Value; use subapeye::{ @@ -7,7 +9,7 @@ use subapeye::{ use subrpcer::{chain, net}; #[tokio::main] -async fn main() { +async fn main() -> Result<(), Box> { tracing_subscriber::fmt::init(); enum R {} @@ -19,53 +21,41 @@ async fn main() { let apeye = >::initialize(WsInitializer::default().uri("wss://kusama-rpc.polkadot.io")) - .await - .unwrap(); - - // let hashes = apeye.get_block_hash::<_, Vec>(Some([0, 1, 2])).await.unwrap(); - - // dbg!(hashes); - - // for h in hashes { - // dbg!(apeye.get_block::(Some(&h)).await.unwrap()); - // dbg!(apeye.get_header::(Some(&h)).await.unwrap()); - // } + .await?; - // dbg!(apeye.get_finalized_head::().await.unwrap()); - // dbg!(apeye.get_metadata::(None).await.unwrap()); - - // dbg!(apeye - // .query::(&apeye.query_of::<()>("System", "Number").unwrap().construct().unwrap()) - // .await - // .unwrap()); - dbg!(apeye - .query::( - &apeye - .query_of("Staking", "ErasValidatorPrefs") - .unwrap() - .keys(Keys::Raw(&( - 5_044_u32, - ::AccountId::try_from_hex( - "0x305b1689cfee594c19a642a2fcd554074c93d62181c0d4117ebe196bd7c62b79" - ) - .unwrap() - ))) - .construct() - .unwrap() - ) - .await - .unwrap() - .unwrap()); - - let v = apeye - .batch::<_, Value>(vec![ - chain::get_block_hash_raw(>::None), - chain::get_finalized_head_raw(), - net::version_raw(), - ]) - .await - .unwrap(); - dbg!(v); + for h in apeye.get_block_hash::<_, Vec>(Some([0, 1, 2])).await?? { + dbg!(apeye.get_block::(Some(&h)).await??); + dbg!(apeye.get_header::(Some(&h)).await??); + } - dbg!(apeye.version::().await.unwrap()); + dbg!(apeye.get_finalized_head::().await??); + dbg!(apeye.query::(&apeye.query_of::<()>("System", "Number")?.construct()?).await??); + dbg!( + apeye + .query::( + &apeye + .query_of("Staking", "ErasValidatorPrefs")? + .keys(Keys::Raw(&( + 5_044_u32, + ::AccountId::try_from_hex( + "0x305b1689cfee594c19a642a2fcd554074c93d62181c0d4117ebe196bd7c62b79" + ) + .unwrap() + ))) + .construct()? + ) + .await?? + ); + dbg!( + apeye + .batch::<_, Value>(vec![ + chain::get_block_hash_raw(>::None), + chain::get_finalized_head_raw(), + net::version_raw(), + ]) + .await? + ); + dbg!(apeye.version::().await??); + + Ok(()) } From 43bf546e64ba0b982939d9c6f19e88bbd30aff59 Mon Sep 17 00:00:00 2001 From: Xavier Lau Date: Tue, 2 May 2023 03:59:45 +0800 Subject: [PATCH 3/7] Improve error handling --- src/apeye.rs | 2 +- src/error.rs | 2 - src/jsonrpc/error.rs | 7 +-- src/jsonrpc/ws.rs | 126 ++++++++++++++++++++++++------------------- 4 files changed, 76 insertions(+), 61 deletions(-) diff --git a/src/apeye.rs b/src/apeye.rs index e3d2b23..4ac537f 100644 --- a/src/apeye.rs +++ b/src/apeye.rs @@ -97,7 +97,7 @@ where apeye.metadata = submetadatan::unprefix_raw_metadata_minimal(apeye.get_metadata::(None).await??) - .map_err(error::Generic::Submetadatan)?; + .expect("[apeye] failed to parse metadata"); #[cfg(feature = "trace")] tracing::trace!("Metadata({:?})", apeye.metadata); diff --git a/src/error.rs b/src/error.rs index 235036e..4e42b13 100644 --- a/src/error.rs +++ b/src/error.rs @@ -55,8 +55,6 @@ pub enum Generic { // Codec(#[from] parity_scale_codec::Error), #[error(transparent)] Serde(#[from] serde_json::Error), - #[error(transparent)] - Submetadatan(#[from] submetadatan::Error), } /// Wrap the error with [`Generic::AlmostImpossible`]. pub fn almost_impossible(e_msg: &'static str) -> Generic { diff --git a/src/jsonrpc/error.rs b/src/jsonrpc/error.rs index 59234a6..61b54df 100644 --- a/src/jsonrpc/error.rs +++ b/src/jsonrpc/error.rs @@ -11,6 +11,7 @@ pub enum Error { Jsonrpc(#[from] Jsonrpc), #[error(transparent)] Generic(#[from] Generic), + // Move to Websocket error? #[error(transparent)] Tungstenite(#[from] tokio_tungstenite::tungstenite::Error), } @@ -37,6 +38,8 @@ pub enum Jsonrpc { ChannelClosed(#[from] ChannelClosed), #[error("[jsonrpc] empty batch")] EmptyBatch, + #[error("[jsonrpc] empty response")] + EmptyResponse, #[error("[jsonrpc] exceeded the maximum number of request queue size, {0:?}")] ExceededRequestQueueMaxSize(crate::jsonrpc::Id), #[error("[jsonrpc] response error, {0:?}")] @@ -51,8 +54,6 @@ pub enum Jsonrpc { pub enum ChannelClosed { #[error("[jsonrpc] messenger channel closed")] Messenger, - #[error("[jsonrpc] reporter channel closed")] - Reporter, #[error("[jsonrpc] notifier channel closed")] - Notifier(#[from] tokio::sync::oneshot::error::RecvError), + Notifier, } diff --git a/src/jsonrpc/ws.rs b/src/jsonrpc/ws.rs index d17012e..7c764e1 100644 --- a/src/jsonrpc/ws.rs +++ b/src/jsonrpc/ws.rs @@ -62,6 +62,9 @@ type BatchResponse = Vec; type BatchNotifier = Notifier; type BatchPool = Pool; +const E_EMPTY_LOCK: &str = "[jsonrpc::ws] acquired `lock` is empty"; +const E_ID_NOT_FOUND: &str = "[jsonrpc::ws] id not found in the pool"; + /// A Ws instance. /// /// Use this to interact with the server. @@ -111,7 +114,7 @@ impl Jsonrpc for Ws { time::timeout(self.request_timeout, rx) .await .map_err(error::Jsonrpc::Timeout)? - .map_err(|e| error::Jsonrpc::from(error::ChannelClosed::Notifier(e)))? + .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Notifier))? } /// Send a batch of requests. @@ -124,10 +127,7 @@ impl Jsonrpc for Ws { } let RequestQueueGuard { lock: ids, .. } = self.request_queue.consume(raw_requests.len())?; - let id = ids - .first() - .ok_or(error::almost_impossible("[jsonrpc::ws] acquired `lock` is empty"))? - .to_owned(); + let id = ids.first().ok_or(error::almost_impossible(E_EMPTY_LOCK))?.to_owned(); let requests = ids .into_iter() .zip(raw_requests.into_iter()) @@ -148,7 +148,7 @@ impl Jsonrpc for Ws { let mut responses = time::timeout(self.request_timeout, rx) .await .map_err(error::Jsonrpc::Timeout)? - .map_err(|e| error::Jsonrpc::from(error::ChannelClosed::Notifier(e)))?; + .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Notifier))?; // Each id is unique. let _ = responses.as_mut().map(|r| r.sort_unstable_by_key(|r| r.id())); @@ -213,7 +213,7 @@ impl FutureSelector { } else { try_send( error_tx, - error::Jsonrpc::from(error::ChannelClosed::Reporter).into(), + error::Jsonrpc::from(error::ChannelClosed::Notifier).into(), false, ); @@ -228,7 +228,11 @@ impl FutureSelector { exit_or_interval_fut_, )) => { if let Some(response) = maybe_response { - pool.on_ws_recv(response).await.unwrap() + if let Err(e) = pool.on_response(response).await { + try_send(error_tx, e, true); + + return; + } } else { // TODO?: closed } @@ -241,15 +245,11 @@ impl FutureSelector { #[cfg(feature = "trace")] tracing::trace!("Tick(Ping)"); - if ws_tx.send(Message::Text("Ping".into())).await.is_err() { - try_send( - error_tx, - error::Jsonrpc::from(error::ChannelClosed::Reporter).into(), - false, - ); + if let Err(e) = ws_tx.send(Message::Ping(Vec::new())).await { + try_send(error_tx, e.into(), false); return; - } + }; rxs_fut = rxs_fut_; exit_or_interval_fut = future::select( @@ -295,14 +295,18 @@ impl FutureSelector { return; } } else { - try_send(error_tx, error::Jsonrpc::from(error::ChannelClosed::Reporter).into(), false); + try_send(error_tx, error::Jsonrpc::from(error::ChannelClosed::Notifier).into(), false); return; } }, maybe_response = ws_rx.next() => { if let Some(response) = maybe_response { - pool.on_ws_recv(response).await.unwrap() + if let Err(e) = pool.on_response(response).await { + try_send(error_tx, e, true); + + return; + } } else { // TODO?: closed } @@ -311,8 +315,8 @@ impl FutureSelector { #[cfg(feature = "trace")] tracing::trace!("Tick(Ping)"); - if ws_tx.send(Message::Ping(Vec::new())).await.is_err() { - try_send(error_tx, error::Jsonrpc::from(error::ChannelClosed::Reporter).into(), false); + if let Err(e) = ws_tx.send(Message::Ping(Vec::new())).await { + try_send(error_tx, e.into(), false); return }; @@ -411,55 +415,62 @@ impl Pools { Default::default() } - async fn on_ws_recv(&mut self, response: WsResult) -> Result<()> { + async fn on_response(&mut self, response: WsResult) -> Result<()> { match response { - Ok(msg) => { - match msg { - Message::Binary(r) => self.process_response(&r).await, - Message::Text(r) => self.process_response(r.as_bytes()).await, - Message::Ping(_) => tracing::trace!("ping"), - Message::Pong(_) => tracing::trace!("pong"), - Message::Close(_) => tracing::trace!("close"), - Message::Frame(_) => tracing::trace!("frame"), - } + Ok(m) => match m { + Message::Binary(r) => self.process_response(&r).await, + Message::Text(r) => self.process_response(r.as_bytes()).await, + Message::Ping(_) => { + tracing::trace!("ping"); - Ok(()) + Ok(()) + }, + Message::Pong(_) => { + tracing::trace!("pong"); + + Ok(()) + }, + Message::Close(_) => { + tracing::trace!("close"); + + Ok(()) + }, + Message::Frame(_) => { + tracing::trace!("frame"); + + Ok(()) + }, }, Err(e) => Err(e)?, } } - // TODO: error handling - async fn process_response(&mut self, response: &[u8]) { + async fn process_response(&mut self, response: &[u8]) -> Result<()> { #[cfg(feature = "trace")] tracing::trace!("Response({response:?})"); let response = response.trim_ascii_start(); - let Some(first) = response.first() else { - tracing::error!("[jsonrpc::ws] empty response"); - - return; - }; + let first = response.first().ok_or(error::Jsonrpc::EmptyResponse)?; match first { b'{' => if let Ok(r) = serde_json::from_slice::(response) { - let notifier = self.requests.remove(&r.id).unwrap(); + try_take_notifier(&mut self.requests, &r.id)? + .send(Ok(Ok(r))) + .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Notifier))?; - if let Err(e) = notifier.send(Ok(Ok(r))) { - tracing::error!("{e:?}"); - } + return Ok(()); } else if let Ok(e) = serde_json::from_slice::(response) { // E.g. // ``` // Response({"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not found"},"id":2}) // ``` - let notifier = self.requests.remove(&e.id).unwrap(); + try_take_notifier(&mut self.requests, &e.id)? + .send(Ok(Err(e))) + .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Notifier))?; - if let Err(e) = notifier.send(Ok(Err(e))) { - tracing::error!("{e:?}"); - } + return Ok(()); }, b'[' => if let Ok(r) = serde_json::from_slice::>(response) { @@ -474,19 +485,24 @@ impl Pools { Err(error::almost_impossible("TODO"))? } }) - .collect::>>() - .unwrap(); + .collect::>>()?; - let notifier = self.batches.remove(&r.first().unwrap().id()).unwrap(); + try_take_notifier( + &mut self.batches, + &r.first().ok_or(error::Jsonrpc::EmptyBatch)?.id(), + )? + .send(Ok(r)) + .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Notifier))?; - if let Err(e) = notifier.send(Ok(r)) { - tracing::error!("{e:?}"); - } + return Ok(()); }, - _ => { - tracing::error!("unable to process response, {response:?}"); - // TODO: return - }, + _ => (), } + + Err(error::almost_impossible("[jsonrpc::ws] unable to process response, {response:?}"))? } } + +fn try_take_notifier(pool: &mut Pool>, id: &Id) -> Result> { + Ok(pool.remove(id).ok_or_else(|| error::almost_impossible(E_ID_NOT_FOUND))?) +} From 7b1e87d914743ae8426ff19b53fc333037dd0208 Mon Sep 17 00:00:00 2001 From: Xavier Lau Date: Thu, 4 May 2023 01:11:07 +0800 Subject: [PATCH 4/7] Stash --- Cargo.lock | 1 - Cargo.toml | 1 - src/apeye.rs | 78 +++++-- src/apeye/api.rs | 11 +- src/error.rs | 16 +- src/jsonrpc.rs | 284 +++++++++++++++++++++--- src/jsonrpc/error.rs | 33 +-- src/jsonrpc/ws.rs | 406 ++++++++++++++-------------------- src/jsonrpc/ws/initializer.rs | 30 +-- src/main.rs | 77 ++++--- 10 files changed, 557 insertions(+), 380 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 541fb22..813e82c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -864,7 +864,6 @@ dependencies = [ "async-trait", "futures", "fxhash", - "num_cpus", "parity-scale-codec", "serde", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index 64332a5..e4b150a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,7 +22,6 @@ array-bytes = { version = "6.1" } async-trait = { version = "0.1" } futures = { version = "0.3" } fxhash = { version = "0.2" } -num_cpus = { version = "1.15" } parity-scale-codec = { version = "3.4" } serde = { version = "1.0", features = ["derive"] } serde_json = { version = "1.0", features = ["raw_value"] } diff --git a/src/apeye.rs b/src/apeye.rs index 4ac537f..7752b89 100644 --- a/src/apeye.rs +++ b/src/apeye.rs @@ -13,7 +13,7 @@ use serde::de::DeserializeOwned; use submetadatan::{Meta, Metadata, StorageEntry}; // subapeye use crate::{ - jsonrpc::{Connection, Initialize, IntoRequestRaw, JsonrpcResult}, + jsonrpc::{Connection, Initialize, IntoRequestRaw, ResponseResult, ResultExt, Subscriber}, prelude::*, }; @@ -25,46 +25,73 @@ impl Layer for T where T: Invoker + Runtime {} #[async_trait::async_trait] pub trait Invoker: Send + Sync { /// - fn map_result(r: JsonrpcResult) -> Result + type Connection: Connection; + + /// + fn map_result(r: ResponseResult) -> Result where R: DeserializeOwned, { - Ok(r.map_err(|e| error::Net::JsonrpcResponse(e.error))?) - .and_then(|r| Ok(serde_json::from_value::(r.result).map_err(error::Generic::Serde)?)) + Ok(serde_json::from_value(r.extract_err()?.result).map_err(error::Generic::Serde)?) } /// - async fn request<'a, Req, R>(&self, raw_request: Req) -> Result> + async fn request<'a, Req, Resp>(&self, raw_request: Req) -> Result> where Req: IntoRequestRaw<'a>, - R: DeserializeOwned; + Resp: DeserializeOwned; /// - async fn batch<'a, Req, R>(&self, raw_requests: Vec) -> Result>> + async fn batch<'a, Req, Resp>(&self, raw_requests: Vec) -> Result>> where Req: IntoRequestRaw<'a>, - R: DeserializeOwned; + Resp: DeserializeOwned; + + /// + async fn subscribe<'a, Req, M, Resp>( + &self, + raw_request: Req, + unsubscribe_method: M, + ) -> Result> + where + Req: IntoRequestRaw<'a>, + M: Send + AsRef, + Resp: DeserializeOwned; } #[async_trait::async_trait] impl Invoker for T where T: Connection, { + type Connection = T; + async fn request<'a, Req, R>(&self, raw_request: Req) -> Result> where Req: IntoRequestRaw<'a>, R: DeserializeOwned, { - Ok(self.request(raw_request).await.map_err(error::Net::Jsonrpc)?).map(Self::map_result) + Ok(self.request(raw_request).await.map(Self::map_result)?) } - async fn batch<'a, Req, R>(&self, raw_requests: Vec) -> Result>> + async fn batch<'a, Req, Resp>(&self, raw_requests: Vec) -> Result>> where Req: IntoRequestRaw<'a>, - R: DeserializeOwned, + Resp: DeserializeOwned, { - Ok(self.batch(raw_requests).await.map_err(error::Net::Jsonrpc)?) - .map(|v| v.into_iter().map(Self::map_result).collect()) + Ok(self.batch(raw_requests).await?.into_iter().map(Self::map_result).collect()) + } + + async fn subscribe<'a, Req, M, Resp>( + &self, + raw_request: Req, + unsubscribe_method: M, + ) -> Result> + where + Req: IntoRequestRaw<'a>, + M: Send + AsRef, + Resp: DeserializeOwned, + { + Ok(self.subscribe(raw_request, unsubscribe_method).await.unwrap()) } } @@ -92,15 +119,15 @@ where where Iz: Initialize, { - let invoker = Arc::new(initializer.initialize().await.map_err(error::Net::Jsonrpc)?); + let invoker = Arc::new(initializer.initialize().await?); let mut apeye = Self { invoker, metadata: Default::default(), runtime: Default::default() }; apeye.metadata = submetadatan::unprefix_raw_metadata_minimal(apeye.get_metadata::(None).await??) .expect("[apeye] failed to parse metadata"); - #[cfg(feature = "trace")] - tracing::trace!("Metadata({:?})", apeye.metadata); + // #[cfg(feature = "trace")] + // tracing::trace!("Metadata({:?})", apeye.metadata); Ok(apeye) } @@ -111,12 +138,14 @@ where I: Invoker, R: Runtime, { + type Connection = I::Connection; + async fn request<'a, Req, Resp>(&self, raw_request: Req) -> Result> where Req: IntoRequestRaw<'a>, Resp: DeserializeOwned, { - I::request::<_, _>(&self.invoker, raw_request).await + self.invoker.request(raw_request).await } async fn batch<'a, Req, Resp>(&self, raw_requests: Vec) -> Result>> @@ -124,7 +153,20 @@ where Req: IntoRequestRaw<'a>, Resp: DeserializeOwned, { - I::batch::<_, _>(&self.invoker, raw_requests).await + self.invoker.batch(raw_requests).await + } + + async fn subscribe<'a, Req, M, Resp>( + &self, + raw_request: Req, + unsubscribe_method: M, + ) -> Result> + where + Req: IntoRequestRaw<'a>, + M: Send + AsRef, + Resp: DeserializeOwned, + { + self.invoker.subscribe(raw_request, unsubscribe_method).await } } impl Meta for Apeye diff --git a/src/apeye/api.rs b/src/apeye/api.rs index 8f6dc3a..d0e9a32 100644 --- a/src/apeye/api.rs +++ b/src/apeye/api.rs @@ -3,13 +3,10 @@ pub mod prelude { //! - pub use crate::{ - apeye::{ - api::{Argument, Deserialization, Parameter}, - runtime::Runtime, - Layer, - }, - jsonrpc::Response, + pub use crate::apeye::{ + api::{Argument, Deserialization, Parameter}, + runtime::Runtime, + Layer, }; } diff --git a/src/error.rs b/src/error.rs index 4e42b13..0514c85 100644 --- a/src/error.rs +++ b/src/error.rs @@ -15,7 +15,7 @@ pub enum Error { #[error(transparent)] Generic(#[from] Generic), #[error(transparent)] - Net(#[from] Net), + Jsonrpc(#[from] crate::jsonrpc::Error), } /// An error helper/wrapper to debug/print the error quickly. @@ -57,16 +57,6 @@ pub enum Generic { Serde(#[from] serde_json::Error), } /// Wrap the error with [`Generic::AlmostImpossible`]. -pub fn almost_impossible(e_msg: &'static str) -> Generic { - Generic::AlmostImpossible(e_msg) -} - -/// JSONRPC error. -#[allow(missing_docs)] -#[derive(Debug, ThisError)] -pub enum Net { - #[error(transparent)] - Jsonrpc(#[from] crate::jsonrpc::Error), - #[error("[jsonrpc] response error, {0:?}")] - JsonrpcResponse(serde_json::Value), +pub fn almost_impossible(error: &'static str) -> Generic { + Generic::AlmostImpossible(error) } diff --git a/src/jsonrpc.rs b/src/jsonrpc.rs index f59cd54..dca62a6 100644 --- a/src/jsonrpc.rs +++ b/src/jsonrpc.rs @@ -19,23 +19,65 @@ pub mod prelude { use prelude::*; // std -use std::sync::{ - atomic::{AtomicUsize, Ordering}, - Arc, +use std::{ + fmt::{Debug, Formatter, Result as FmtResult}, + future::Future, + hash::Hash, + marker::PhantomData, + pin::Pin, + sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, + }, }; // crates.io +use fxhash::FxHashMap; use serde::{Deserialize, Serialize}; -use serde_json::Value; +use serde_json::{value::RawValue, Value}; +use tokio::sync::{mpsc, oneshot}; /// JSONRPC Id. pub type Id = usize; +/// Subscription Id. +pub type SubscriptionId = String; /// -pub type JsonrpcResult = StdResult; +pub type ResponseResult = StdResult; +/// +pub type SubscriptionResult = StdResult; + +type MessageTx = mpsc::Sender; +type MessageRx = mpsc::Receiver; + +type ErrorTx = oneshot::Sender; +type ErrorRx = oneshot::Receiver; + +type ExitTx = oneshot::Sender<()>; +type ExitRx = oneshot::Receiver<()>; + +type ResponseTx = oneshot::Sender>; + +type SubscriptionTx = mpsc::Sender; +type SubscriptionRx = mpsc::Receiver; + +type RequestTx = ResponseTx; +type BatchTx = ResponseTx>; + +type Pool = FxHashMap; +type RequestPool = Pool; +type BatchPool = Pool; +type SubscriptionPool = Pool; /// JSONRPC version. pub const VERSION: &str = "2.0"; +const E_EMPTY_LOCK: &str = "[jsonrpc] acquired `lock` is empty"; +const E_INVALID_RESPONSE: &str = "[jsonrpc] unable to process response"; +const E_MESSAGE_CHANNEL_CLOSED: &str = "[jsonrpc] message channel closed"; +const E_NO_ERROR: &str = "[jsonrpc] no error to report"; +const E_REPORTER_CHANNEL_CLOSED: &str = "[jsonrpc] reporter channel closed"; +const E_TX_NOT_FOUND: &str = "[jsonrpc] tx not found in the pool"; + /// #[async_trait::async_trait] pub trait Initialize { @@ -50,9 +92,6 @@ impl<'a> Initialize for WsInitializer<'a> { type Connection = Ws; async fn initialize(self) -> Result { - // #[cfg(feature = "trace")] - // tracing::trace!("Connecting({uri})"); - self.initialize().await } } @@ -63,16 +102,26 @@ impl<'a, T> IntoRequestRaw<'a> for T where T: Send + Into> /// #[async_trait::async_trait] -pub trait Jsonrpc { +pub trait Jsonrpc: Sized { /// Send a single request. - async fn request<'a, Req>(&self, raw_request: Req) -> Result + async fn request<'a, R>(&self, raw_request: R) -> Result where - Req: IntoRequestRaw<'a>; + R: IntoRequestRaw<'a>; - /// Send a single request. - async fn batch<'a, Req>(&self, raw_requests: Vec) -> Result> + /// Send a batch of requests. + async fn batch<'a, R>(&self, raw_requests: Vec) -> Result> where - Req: IntoRequestRaw<'a>; + R: IntoRequestRaw<'a>; + + /// Send a subscription. + async fn subscribe<'a, R, M, D>( + &self, + raw_request: R, + unsubscribe_method: M, + ) -> Result> + where + R: IntoRequestRaw<'a>, + M: Send + AsRef; } /// @@ -80,35 +129,47 @@ pub trait Connection: Send + Sync + Jsonrpc {} impl Connection for T where T: Send + Sync + Jsonrpc {} /// -pub trait Response { +pub trait ResultExt { + /// + type Ok; + /// fn id(&self) -> Id; - // /// - // fn deserialize(self) -> Self; + /// + fn extract_err(self) -> Result; } -impl Response for JsonrpcResult { +impl ResultExt for ResponseResult { + type Ok = ResponseOk; + fn id(&self) -> Id { match self { - Self::Ok(r) => r.id, + Self::Ok(o) => o.id, Self::Err(e) => e.id, } } - // fn deserialize(self) -> Result { - // Ok(match self { - // Self::Ok(r) => Self::Ok(ResponseResult { - // jsonrpc: r.jsonrpc, - // id: r.id, - // result: serde_json::to_value(r.result).map_err(error::Generic::Serde)?, - // }), - // Self::Err(e) => Self::Err(Response { - // jsonrpc: e.jsonrpc, - // id: e.id, - // error: serde_json::to_value(e.error).map_err(error::Generic::Serde)?, - // }), - // }) - // } + fn extract_err(self) -> Result<::Ok> { + Ok(self.map_err(|e| error::Jsonrpc::Response(e.error))?) + } +} + +trait PoolExt { + type Key: PartialEq + Eq + Hash; + type Value; + + fn take_tx(&mut self, key: &Self::Key) -> Self::Value; +} +impl PoolExt for Pool +where + K: PartialEq + Eq + Hash, +{ + type Key = K; + type Value = V; + + fn take_tx(&mut self, key: &Self::Key) -> Self::Value { + self.remove(key).expect(E_TX_NOT_FOUND) + } } /// Generic JSONRPC request. @@ -135,10 +196,10 @@ impl<'a, P> From<(&'a str, P)> for RequestRaw<'a, P> { } } -/// Generic JSONRPC result. +/// Generic JSONRPC response result. #[allow(missing_docs)] #[derive(Clone, Debug, Deserialize)] -pub struct ResponseResult { +pub struct ResponseOk { pub jsonrpc: String, pub id: Id, pub result: Value, @@ -147,12 +208,50 @@ pub struct ResponseResult { /// Generic JSONRPC error. #[allow(missing_docs)] #[derive(Clone, Debug, Deserialize)] -pub struct ResponseError { +pub struct JsonrpcError { pub jsonrpc: String, pub id: Id, pub error: Value, } +/// Generic JSONRPC notification. +#[allow(missing_docs)] +#[derive(Clone, Debug, Deserialize)] +pub struct SubscriptionOk { + pub jsonrpc: String, + pub method: String, + pub params: Value, +} + +/// +#[derive(Debug)] +pub struct Subscriber<'a, J, R, D> +where + J: Jsonrpc, + R: IntoRequestRaw<'a>, +{ + unsubscribe_fut: UnsubscribeFut<'a, J, R>, + subscription_rx: SubscriptionRx, + _deserialize: D, +} +struct UnsubscribeFut<'a, J, R> +where + J: Jsonrpc, + R: IntoRequestRaw<'a>, +{ + f: Box Pin>>>, + _lifetime: PhantomData<&'a ()>, +} +impl<'a, J, R> Debug for UnsubscribeFut<'a, J, R> +where + J: Jsonrpc, + R: IntoRequestRaw<'a>, +{ + fn fmt(&self, f: &mut Formatter) -> FmtResult { + write!(f, "UnsubscribeFut") + } +} + #[derive(Debug)] struct RequestQueue { size: Id, @@ -202,3 +301,116 @@ struct RequestQueueGuard { lock: L, _strong: Arc<()>, } + +#[derive(Debug)] +enum Message { + #[cfg(feature = "debug")] + Debug(Id), + Request(Call), + Batch(Call>), + Subscription(Subscription), +} +// A single request object. +// `id`: Request Id. +// +// Or +// +// A batch requests object to send several request objects simultaneously. +// `id`: The first request's id. +struct Call { + id: Id, + request: String, + tx: ResponseTx, +} +impl Debug for Call +where + T: Debug, +{ + fn fmt(&self, f: &mut Formatter) -> FmtResult { + write!(f, "Call {{ id: {}, request: {}, tx: {:?} }}", self.id, self.request, self.tx) + } +} +#[derive(Debug)] +struct Subscription { + id: String, + tx: SubscriptionTx, +} + +#[derive(Debug, Default)] +struct Pools { + requests: RequestPool, + batches: BatchPool, + subscriptions: SubscriptionPool, +} +impl Pools { + fn new() -> Self { + Default::default() + } + + async fn process_response(&mut self, response: &[u8]) -> Result<()> { + let r = response.trim_ascii_start(); + let first = r.first().ok_or(error::Jsonrpc::EmptyResponse)?; + + match first { + b'{' => + if let Ok(o) = serde_json::from_slice::(r) { + self.requests.take_tx(&o.id).send(Ok(Ok(o))).expect(E_MESSAGE_CHANNEL_CLOSED); + + return Ok(()); + } else if let Ok(e) = serde_json::from_slice::(r) { + // E.g. + // ``` + // Response({"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not found"},"id":2}) + // ``` + + self.requests.take_tx(&e.id).send(Ok(Err(e))).expect(E_MESSAGE_CHANNEL_CLOSED); + + return Ok(()); + } else if let Ok(o) = serde_json::from_slice::(r) { + dbg!(o); + + return Ok(()); + }, + b'[' => + if let Ok(r) = serde_json::from_slice::>(r) { + let r = r + .into_iter() + .map(|r| { + if let Ok(o) = serde_json::from_str::(r.get()) { + Ok(Ok(o)) + } else if let Ok(e) = serde_json::from_str::(r.get()) { + Ok(Err(e)) + } else { + Err(error::almost_impossible(E_INVALID_RESPONSE))? + } + }) + .collect::>>()?; + + self.batches + .take_tx(&r.first().ok_or(error::Jsonrpc::EmptyBatch)?.id()) + .send(Ok(r)) + .expect(E_MESSAGE_CHANNEL_CLOSED); + + return Ok(()); + }, + _ => (), + } + + Err(error::almost_impossible(E_INVALID_RESPONSE))? + } +} + +fn try_send(tx: oneshot::Sender, any: T, log: bool) -> bool +where + T: Debug, +{ + if let Err(e) = tx.send(any) { + if log { + tracing::error!("[jsonrpc] failed to throw this error to outside, {e:?}"); + } + + return false; + } + + true +} diff --git a/src/jsonrpc/error.rs b/src/jsonrpc/error.rs index 61b54df..af2f65b 100644 --- a/src/jsonrpc/error.rs +++ b/src/jsonrpc/error.rs @@ -7,13 +7,12 @@ use thiserror::Error as ThisError; #[allow(missing_docs)] #[derive(Debug, ThisError)] pub enum Error { - #[error(transparent)] - Jsonrpc(#[from] Jsonrpc), #[error(transparent)] Generic(#[from] Generic), - // Move to Websocket error? #[error(transparent)] - Tungstenite(#[from] tokio_tungstenite::tungstenite::Error), + Jsonrpc(#[from] Jsonrpc), + #[error(transparent)] + Websocket(#[from] Websocket), } /// Generic error. @@ -22,38 +21,40 @@ pub enum Error { pub enum Generic { #[error("{0:?}")] AlmostImpossible(&'static str), + #[error("[jsonrpc] {0:?}")] + Plain(String), #[error(transparent)] Serde(#[from] serde_json::Error), + #[error(transparent)] + Timeout(#[from] tokio::time::error::Elapsed), } /// Wrap the error with [`Generic::AlmostImpossible`]. -pub fn almost_impossible(e_msg: &'static str) -> Generic { - Generic::AlmostImpossible(e_msg) +pub fn almost_impossible(error: &'static str) -> Generic { + Generic::AlmostImpossible(error) } /// JSONRPC error. #[allow(missing_docs)] #[derive(Debug, ThisError)] pub enum Jsonrpc { - #[error(transparent)] - ChannelClosed(#[from] ChannelClosed), #[error("[jsonrpc] empty batch")] EmptyBatch, #[error("[jsonrpc] empty response")] EmptyResponse, #[error("[jsonrpc] exceeded the maximum number of request queue size, {0:?}")] ExceededRequestQueueMaxSize(crate::jsonrpc::Id), + #[error("[jsonrpc] invalid subscription id")] + InvalidSubscriptionId, #[error("[jsonrpc] response error, {0:?}")] Response(serde_json::Value), - #[error(transparent)] - Timeout(#[from] tokio::time::error::Elapsed), } -/// Channel closed error. +/// Websocket error. #[allow(missing_docs)] #[derive(Debug, ThisError)] -pub enum ChannelClosed { - #[error("[jsonrpc] messenger channel closed")] - Messenger, - #[error("[jsonrpc] notifier channel closed")] - Notifier, +pub enum Websocket { + #[error("[jsonrpc::ws] websocket closed")] + Closed, + #[error(transparent)] + Tungstenite(#[from] tokio_tungstenite::tungstenite::Error), } diff --git a/src/jsonrpc/ws.rs b/src/jsonrpc/ws.rs index 7c764e1..997e897 100644 --- a/src/jsonrpc/ws.rs +++ b/src/jsonrpc/ws.rs @@ -5,23 +5,17 @@ pub mod initializer; pub use initializer::*; // std -use std::{fmt::Debug, future::Future, pin::Pin, time::Duration}; +use std::{mem, time::Duration}; // crates.io use futures::{ future::{self, Either, Fuse}, stream::{self, SplitSink, SplitStream}, FutureExt, SinkExt, StreamExt, }; -use fxhash::FxHashMap; -use serde_json::value::RawValue; -use tokio::{ - net::TcpStream, - sync::{mpsc, oneshot}, - time, -}; +use tokio::{net::TcpStream, sync::Mutex, time}; use tokio_stream::wrappers::IntervalStream; use tokio_tungstenite::{ - tungstenite::{error::Result as WsResult, Message}, + tungstenite::{error::Result as WsResult, Message as WsMessage}, MaybeTlsStream, WebSocketStream, }; // subapeye @@ -32,49 +26,54 @@ type GenericConnect = Box< Duration, WsSender, WsReceiver, - CallReceiver, - ErrorSender, - ExitReceiver, + MessageRx, + ErrorTx, + ExitRx, ) -> Pin + Send>> + Send, >; -type CallSender = mpsc::Sender; -type CallReceiver = mpsc::Receiver; - -type WsSender = SplitSink>, Message>; +type WsSender = SplitSink>, WsMessage>; type WsReceiver = SplitStream>>; -type ErrorSender = oneshot::Sender; -type ErrorReceiver = oneshot::Receiver; - -type ExitSender = oneshot::Sender<()>; -type ExitReceiver = oneshot::Receiver<()>; - -type Notifier = oneshot::Sender>; -type Pool = FxHashMap; - -type RequestResponse = JsonrpcResult; -type RequestNotifier = Notifier; -type RequestPool = Pool; - -type BatchResponse = Vec; -type BatchNotifier = Notifier; -type BatchPool = Pool; - -const E_EMPTY_LOCK: &str = "[jsonrpc::ws] acquired `lock` is empty"; -const E_ID_NOT_FOUND: &str = "[jsonrpc::ws] id not found in the pool"; - /// A Ws instance. /// /// Use this to interact with the server. #[derive(Debug)] pub struct Ws { - messenger: CallSender, + messenger: MessageTx, request_queue: RequestQueue, request_timeout: Duration, - reporter: Option, - closer: Option, + reporter: Mutex>, + closer: Option, +} +impl Ws { + // Don't call this if code hasn't encountered any error yet, + // as it will block the asynchronous process. + async fn report(&self) -> Result<()> { + let mut reporter = self.reporter.lock().await; + let e = match mem::replace( + &mut *reporter, + Err("[jsonrpc::ws] temporary error placeholder".into()), + ) { + Ok(r) => r + .await + .map_err(|_| error::almost_impossible(E_REPORTER_CHANNEL_CLOSED))? + .to_string(), + Err(e) => e, + }; + + *reporter = Err(e.clone()); + + Err(error::Generic::Plain(e))? + } + + async fn execute(&self, future: F) -> Result<::Output> + where + F: Future, + { + Ok(time::timeout(self.request_timeout, future).await.map_err(error::Generic::Timeout)?) + } } impl Drop for Ws { fn drop(&mut self) { @@ -88,39 +87,42 @@ impl Drop for Ws { #[async_trait::async_trait] impl Jsonrpc for Ws { /// Send a single request. - async fn request<'a, Req>(&self, raw_request: Req) -> Result + async fn request<'a, R>(&self, raw_request: R) -> Result where - Req: Send + Into>, + R: IntoRequestRaw<'a>, { let RequestQueueGuard { lock: id, .. } = self.request_queue.consume_once()?; let RequestRaw { method, params } = raw_request.into(); let (tx, rx) = oneshot::channel(); #[cfg(feature = "debug")] - self.messenger - .send(Call::Debug(id)) - .await - .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Messenger))?; - self.messenger - .send(Call::Single(CallInner { + if self.messenger.send(Message::Debug(id)).await.is_err() { + self.report().await?; + } + if self + .messenger + .send(Message::Request(Call { id, request: serde_json::to_string(&Request { jsonrpc: VERSION, id, method, params }) .map_err(error::Generic::Serde)?, - notifier: tx, + tx, })) .await - .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Messenger))?; - - time::timeout(self.request_timeout, rx) - .await - .map_err(error::Jsonrpc::Timeout)? - .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Notifier))? + .is_err() + { + self.report().await?; + } + if let Ok(r) = self.execute(rx).await? { + r + } else { + self.report().await.and(Err(error::almost_impossible(E_NO_ERROR))?)? + } } /// Send a batch of requests. - async fn batch<'a, Req>(&self, raw_requests: Vec) -> Result> + async fn batch<'a, R>(&self, raw_requests: Vec) -> Result> where - Req: Send + Into>, + R: IntoRequestRaw<'a>, { if raw_requests.is_empty() { Err(error::Jsonrpc::EmptyBatch)?; @@ -140,19 +142,45 @@ impl Jsonrpc for Ws { let request = serde_json::to_string(&requests).map_err(error::Generic::Serde)?; let (tx, rx) = oneshot::channel(); - self.messenger - .send(Call::Batch(CallInner { id, request, notifier: tx })) - .await - .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Messenger))?; + if self.messenger.send(Message::Batch(Call { id, request, tx })).await.is_err() { + self.report().await?; + } + if let Ok(mut r) = self.execute(rx).await? { + // Each id is unique. + let _ = r.as_mut().map(|r| r.sort_unstable_by_key(|r| r.id())); - let mut responses = time::timeout(self.request_timeout, rx) - .await - .map_err(error::Jsonrpc::Timeout)? - .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Notifier))?; - // Each id is unique. - let _ = responses.as_mut().map(|r| r.sort_unstable_by_key(|r| r.id())); + r + } else { + self.report().await.and(Err(error::almost_impossible(E_NO_ERROR))?)? + } + } + + async fn subscribe<'a, R, M, D>( + &self, + raw_request: R, + unsubscribe_method: M, + ) -> Result> + where + R: IntoRequestRaw<'a>, + M: Send + AsRef, + { + let id = self + .request(raw_request) + .await? + .extract_err()? + .result + .as_str() + .ok_or(error::Jsonrpc::InvalidSubscriptionId)? + .to_owned(); + // TODO?: Configurable channel size. + let (tx, rx) = mpsc::channel(self.request_queue.size); + + if self.messenger.send(Message::Subscription(Subscription { id, tx })).await.is_err() { + self.report().await?; + } - responses + todo!() + // Ok(Subscriber { message_tx: tx, subscription_rx: rx }) } } @@ -176,16 +204,17 @@ impl FutureSelector { interval: Duration, mut ws_tx: WsSender, mut ws_rx: WsReceiver, - call_rx: CallReceiver, - error_tx: ErrorSender, - exit_rx: ExitReceiver, + message_rx: MessageRx, + error_tx: ErrorTx, + exit_rx: ExitRx, ) -> Pin + Send>> { Box::pin(async move { - let call_rx = stream::unfold(call_rx, |mut r| async { r.recv().await.map(|c| (c, r)) }); + let message_rx = + stream::unfold(message_rx, |mut r| async { r.recv().await.map(|m| (m, r)) }); - futures::pin_mut!(call_rx); + futures::pin_mut!(message_rx); - let mut rxs_fut = future::select(call_rx.next(), ws_rx.next()); + let mut rxs_fut = future::select(message_rx.next(), ws_rx.next()); // TODO: clean dead items? let mut pool = Pools::new(); // Minimum interval is 1ms. @@ -200,27 +229,20 @@ impl FutureSelector { loop { match future::select(rxs_fut, exit_or_interval_fut).await { Either::Left(( - Either::Left((maybe_call, ws_rx_next)), + Either::Left((maybe_message, ws_rx_next)), exit_or_interval_fut_, )) => { - if let Some(c) = maybe_call { - #[cfg(feature = "trace")] - tracing::trace!("Call({c:?})"); - - if !c.try_send(&mut ws_tx, &mut pool).await { - return; - } - } else { - try_send( - error_tx, - error::Jsonrpc::from(error::ChannelClosed::Notifier).into(), - false, - ); - + if !pool + .on_message_ws( + maybe_message.expect(E_MESSAGE_CHANNEL_CLOSED), + &mut ws_tx, + ) + .await + { return; } - rxs_fut = future::select(call_rx.next(), ws_rx_next); + rxs_fut = future::select(message_rx.next(), ws_rx_next); exit_or_interval_fut = exit_or_interval_fut_; }, Either::Left(( @@ -228,13 +250,15 @@ impl FutureSelector { exit_or_interval_fut_, )) => { if let Some(response) = maybe_response { - if let Err(e) = pool.on_response(response).await { + if let Err(e) = pool.on_response_ws(response).await { try_send(error_tx, e, true); return; } } else { - // TODO?: closed + try_send(error_tx, error::Websocket::Closed.into(), true); + + return; } rxs_fut = future::select(call_rx_next, ws_rx.next()); @@ -245,8 +269,8 @@ impl FutureSelector { #[cfg(feature = "trace")] tracing::trace!("Tick(Ping)"); - if let Err(e) = ws_tx.send(Message::Ping(Vec::new())).await { - try_send(error_tx, e.into(), false); + if let Err(e) = ws_tx.send(WsMessage::Ping(Vec::new())).await { + try_send(error_tx, error::Websocket::Tungstenite(e).into(), false); return; }; @@ -270,9 +294,9 @@ impl FutureSelector { interval: Duration, mut ws_tx: WsSender, mut ws_rx: WsReceiver, - mut call_rx: CallReceiver, - error_tx: ErrorSender, - mut exit_rx: ExitReceiver, + mut message_rx: MessageRx, + error_tx: ErrorTx, + mut exit_rx: ExitRx, ) -> Pin + Send>> { Box::pin(async move { // TODO: clean dead items? @@ -286,37 +310,30 @@ impl FutureSelector { loop { tokio::select! { - maybe_request = call_rx.recv() => { - if let Some(c) = maybe_request { - #[cfg(feature = "trace")] - tracing::trace!("Call({c:?})"); - - if !c.try_send(&mut ws_tx, &mut pool).await { - return; - } - } else { - try_send(error_tx, error::Jsonrpc::from(error::ChannelClosed::Notifier).into(), false); - + maybe_message = message_rx.recv() => { + if !pool.on_message_ws(maybe_message.expect(E_MESSAGE_CHANNEL_CLOSED), &mut ws_tx).await { return; } }, maybe_response = ws_rx.next() => { if let Some(response) = maybe_response { - if let Err(e) = pool.on_response(response).await { + if let Err(e) = pool.on_response_ws(response).await { try_send(error_tx, e, true); return; } } else { - // TODO?: closed + try_send(error_tx, error::Websocket::Closed.into(), true); + + return; } } _ = &mut interval_fut => { #[cfg(feature = "trace")] tracing::trace!("Tick(Ping)"); - if let Err(e) = ws_tx.send(Message::Ping(Vec::new())).await { - try_send(error_tx, e.into(), false); + if let Err(e) = ws_tx.send(WsMessage::Ping(Vec::new())).await { + try_send(error_tx, error::Websocket::Tungstenite(e).into(), false); return }; @@ -337,172 +354,75 @@ impl Default for FutureSelector { } } -#[derive(Debug)] -enum Call { - #[cfg(feature = "debug")] - Debug(Id), - Single(CallInner), - Batch(CallInner), -} -impl Call { - async fn try_send(self, ws_tx: &mut WsSender, pool: &mut Pools) -> bool { - match self { - #[cfg(feature = "debug")] - Call::Debug(_) => {}, - Call::Single(c) => - if !c.try_send(ws_tx, &mut pool.requests).await { - return false; - }, - Call::Batch(c) => - if !c.try_send(ws_tx, &mut pool.batches).await { - return false; - }, - } - - true - } -} -// A single request object. -// `id`: Request Id. -// -// Or -// -// A batch requests object to send several request objects simultaneously. -// `id`: The first request's id. -#[derive(Debug)] -struct CallInner { - id: Id, - request: String, - notifier: Notifier, -} -impl CallInner +impl Call where T: Debug, { - async fn try_send(self, ws_tx: &mut WsSender, pool: &mut Pool>) -> bool { - if let Err(e) = ws_tx.send(Message::Text(self.request)).await { - try_send(self.notifier, Err(e.into()), true) + async fn try_send_ws(self, tx: &mut WsSender, pool: &mut Pool>) -> bool { + if let Err(e) = tx.send(WsMessage::Text(self.request)).await { + try_send(self.tx, Err(error::Websocket::Tungstenite(e).into()), true) } else { - pool.insert(self.id, self.notifier); + pool.insert(self.id, self.tx); true } } } -fn try_send(tx: oneshot::Sender, any: T, log: bool) -> bool -where - T: Debug, -{ - if let Err(e) = tx.send(any) { - if log { - tracing::error!("[jsonrpc::ws] failed to send error to outside, {e:?}"); +impl Pools { + async fn on_message_ws(&mut self, message: Message, tx: &mut WsSender) -> bool { + #[cfg(feature = "trace")] + tracing::trace!("Message({message:?})"); + + match message { + #[cfg(feature = "debug")] + Message::Debug(_) => {}, + Message::Request(c) => + if !c.try_send_ws(tx, &mut self.requests).await { + return false; + }, + Message::Batch(c) => + if !c.try_send_ws(tx, &mut self.batches).await { + return false; + }, + Message::Subscription(s) => { + self.subscriptions.insert(s.id, s.tx); + }, } - return false; + true } - true -} - -#[derive(Debug, Default)] -struct Pools { - requests: RequestPool, - batches: BatchPool, -} -impl Pools { - fn new() -> Self { - Default::default() - } + async fn on_response_ws(&mut self, response: WsResult) -> Result<()> { + #[cfg(feature = "trace")] + tracing::trace!("Response({response:?})"); - async fn on_response(&mut self, response: WsResult) -> Result<()> { match response { Ok(m) => match m { - Message::Binary(r) => self.process_response(&r).await, - Message::Text(r) => self.process_response(r.as_bytes()).await, - Message::Ping(_) => { + WsMessage::Binary(r) => self.process_response(&r).await, + WsMessage::Text(r) => self.process_response(r.as_bytes()).await, + WsMessage::Ping(_) => { tracing::trace!("ping"); Ok(()) }, - Message::Pong(_) => { + WsMessage::Pong(_) => { tracing::trace!("pong"); Ok(()) }, - Message::Close(_) => { + WsMessage::Close(_) => { tracing::trace!("close"); Ok(()) }, - Message::Frame(_) => { + WsMessage::Frame(_) => { tracing::trace!("frame"); Ok(()) }, }, - Err(e) => Err(e)?, - } - } - - async fn process_response(&mut self, response: &[u8]) -> Result<()> { - #[cfg(feature = "trace")] - tracing::trace!("Response({response:?})"); - - let response = response.trim_ascii_start(); - let first = response.first().ok_or(error::Jsonrpc::EmptyResponse)?; - - match first { - b'{' => - if let Ok(r) = serde_json::from_slice::(response) { - try_take_notifier(&mut self.requests, &r.id)? - .send(Ok(Ok(r))) - .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Notifier))?; - - return Ok(()); - } else if let Ok(e) = serde_json::from_slice::(response) { - // E.g. - // ``` - // Response({"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not found"},"id":2}) - // ``` - - try_take_notifier(&mut self.requests, &e.id)? - .send(Ok(Err(e))) - .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Notifier))?; - - return Ok(()); - }, - b'[' => - if let Ok(r) = serde_json::from_slice::>(response) { - let r = r - .into_iter() - .map(|r| { - if let Ok(r) = serde_json::from_str::(r.get()) { - Ok(Ok(r)) - } else if let Ok(r) = serde_json::from_str::(r.get()) { - Ok(Err(r)) - } else { - Err(error::almost_impossible("TODO"))? - } - }) - .collect::>>()?; - - try_take_notifier( - &mut self.batches, - &r.first().ok_or(error::Jsonrpc::EmptyBatch)?.id(), - )? - .send(Ok(r)) - .map_err(|_| error::Jsonrpc::from(error::ChannelClosed::Notifier))?; - - return Ok(()); - }, - _ => (), + Err(e) => Err(error::Websocket::Tungstenite(e))?, } - - Err(error::almost_impossible("[jsonrpc::ws] unable to process response, {response:?}"))? } } - -fn try_take_notifier(pool: &mut Pool>, id: &Id) -> Result> { - Ok(pool.remove(id).ok_or_else(|| error::almost_impossible(E_ID_NOT_FOUND))?) -} diff --git a/src/jsonrpc/ws/initializer.rs b/src/jsonrpc/ws/initializer.rs index bdd6532..c0b543a 100644 --- a/src/jsonrpc/ws/initializer.rs +++ b/src/jsonrpc/ws/initializer.rs @@ -16,8 +16,8 @@ pub struct Initializer<'a> { /// /// Default: `ws://127.0.0.1:9944`. pub uri: &'a str, - /// Maximum concurrent task count. - pub max_concurrency: Id, + /// Request pool's size. + pub pool_size: Id, /// Send tick with this interval to keep the WS alive. pub interval: Duration, /// Request timeout. @@ -38,9 +38,9 @@ impl<'a> Initializer<'a> { self } - /// Set the [`max_concurrency`](#structfield.max_concurrency). - pub fn max_concurrency(mut self, max_concurrency: Id) -> Self { - self.max_concurrency = max_concurrency; + /// Set the [`pool_size`](#structfield.pool_size). + pub fn pool_size(mut self, pool_size: Id) -> Self { + self.pool_size = pool_size; self } @@ -72,33 +72,37 @@ impl<'a> Initializer<'a> { Ok(Ws { messenger, - request_queue: RequestQueue::with_size(self.max_concurrency), + request_queue: RequestQueue::with_size(self.pool_size), request_timeout: self.request_timeout, - reporter: Some(reporter), + reporter: Mutex::new(Ok(reporter)), closer: Some(closer), }) } - async fn connect(&self) -> Result<(CallSender, ErrorReceiver, ExitSender)> { + async fn connect(&self) -> Result<(MessageTx, ErrorRx, ExitTx)> { let connect_inner = self.future_selector.connector(); let interval = self.interval; - let (ws_tx, ws_rx) = tokio_tungstenite::connect_async(self.uri).await?.0.split(); - let (call_tx, call_rx) = mpsc::channel(self.max_concurrency); + let (ws_tx, ws_rx) = tokio_tungstenite::connect_async(self.uri) + .await + .map_err(error::Websocket::Tungstenite)? + .0 + .split(); + let (message_tx, message_rx) = mpsc::channel(self.pool_size); let (error_tx, error_rx) = oneshot::channel(); let (exit_tx, exit_rx) = oneshot::channel(); tokio::spawn(async move { - connect_inner(interval, ws_tx, ws_rx, call_rx, error_tx, exit_rx).await + connect_inner(interval, ws_tx, ws_rx, message_rx, error_tx, exit_rx).await }); - Ok((call_tx, error_rx, exit_tx)) + Ok((message_tx, error_rx, exit_tx)) } } impl<'a> Default for Initializer<'a> { fn default() -> Self { Self { uri: "ws://127.0.0.1:9944", - max_concurrency: num_cpus::get(), + pool_size: 1_024, interval: Duration::from_secs(10), request_timeout: Duration::from_secs(30), future_selector: FutureSelector::default(), diff --git a/src/main.rs b/src/main.rs index 0a53588..5e765c4 100644 --- a/src/main.rs +++ b/src/main.rs @@ -23,39 +23,52 @@ async fn main() -> Result<(), Box> { >::initialize(WsInitializer::default().uri("wss://kusama-rpc.polkadot.io")) .await?; - for h in apeye.get_block_hash::<_, Vec>(Some([0, 1, 2])).await?? { - dbg!(apeye.get_block::(Some(&h)).await??); - dbg!(apeye.get_header::(Some(&h)).await??); - } + // for h in apeye.get_block_hash::<_, Vec>(Some([0, 1, 2])).await? { + // dbg!(apeye.get_block::(Some(&h)).await?); + // dbg!(apeye.get_header::(Some(&h)).await?); + // } + + // dbg!(apeye.get_finalized_head::().await?); + // dbg!(apeye.query::(&apeye.query_of::<()>("System", "Number")?.construct()?).await?); + // dbg!( + // apeye + // .query::( + // &apeye + // .query_of("Staking", "ErasValidatorPrefs")? + // .keys(Keys::Raw(&( + // 5_044_u32, + // ::AccountId::try_from_hex( + // "0x305b1689cfee594c19a642a2fcd554074c93d62181c0d4117ebe196bd7c62b79" + // ) + // .unwrap() + // ))) + // .construct()? + // ) + // .await? + // ); + // dbg!( + // apeye + // .batch::<_, Value>(vec![ + // chain::get_block_hash_raw(>::None), + // chain::get_finalized_head_raw(), + // net::version_raw(), + // ]) + // .await? + // ); + dbg!(apeye.version::().await?); - dbg!(apeye.get_finalized_head::().await??); - dbg!(apeye.query::(&apeye.query_of::<()>("System", "Number")?.construct()?).await??); - dbg!( - apeye - .query::( - &apeye - .query_of("Staking", "ErasValidatorPrefs")? - .keys(Keys::Raw(&( - 5_044_u32, - ::AccountId::try_from_hex( - "0x305b1689cfee594c19a642a2fcd554074c93d62181c0d4117ebe196bd7c62b79" - ) - .unwrap() - ))) - .construct()? - ) - .await?? - ); - dbg!( - apeye - .batch::<_, Value>(vec![ - chain::get_block_hash_raw(>::None), - chain::get_finalized_head_raw(), - net::version_raw(), - ]) - .await? - ); - dbg!(apeye.version::().await??); + apeye + .subscribe::<_, _, ()>( + ( + "state_subscribeStorage", + serde_json::json!([[ + "0x26aa394eea5630e07c48ae0c9558cef70a98fdbe9ce6c55837576c60c7af3850" + ]]), + ), + "state_unsubscribeStorage", + ) + .await + .unwrap(); Ok(()) } From 8b098baa7ab082843de5f2f490312fa4e462e7ed Mon Sep 17 00:00:00 2001 From: Xavier Lau Date: Thu, 4 May 2023 19:34:05 +0800 Subject: [PATCH 5/7] Subscribe v1 --- src/apeye.rs | 147 +++++++++++---------- src/apeye/api.rs | 2 +- src/apeye/api/ext.rs | 10 +- src/apeye/runtime.rs | 12 -- src/jsonrpc.rs | 144 ++++++++++---------- src/jsonrpc/ws.rs | 240 +++++++++++++++++++++++++--------- src/jsonrpc/ws/initializer.rs | 10 +- src/main.rs | 20 +-- 8 files changed, 354 insertions(+), 231 deletions(-) diff --git a/src/apeye.rs b/src/apeye.rs index 7752b89..fa57c3d 100644 --- a/src/apeye.rs +++ b/src/apeye.rs @@ -13,20 +13,21 @@ use serde::de::DeserializeOwned; use submetadatan::{Meta, Metadata, StorageEntry}; // subapeye use crate::{ - jsonrpc::{Connection, Initialize, IntoRequestRaw, ResponseResult, ResultExt, Subscriber}, - prelude::*, + jsonrpc::*, + prelude::{error, Result}, }; /// pub trait Layer: Invoker + Runtime {} impl Layer for T where T: Invoker + Runtime {} +/// +pub trait LayerExt: Layer + InvokerExt {} +impl LayerExt for T where T: Layer + InvokerExt {} + /// #[async_trait::async_trait] pub trait Invoker: Send + Sync { - /// - type Connection: Connection; - /// fn map_result(r: ResponseResult) -> Result where @@ -36,72 +37,77 @@ pub trait Invoker: Send + Sync { } /// - async fn request<'a, Req, Resp>(&self, raw_request: Req) -> Result> - where - Req: IntoRequestRaw<'a>, - Resp: DeserializeOwned; - - /// - async fn batch<'a, Req, Resp>(&self, raw_requests: Vec) -> Result>> + async fn request<'a, Req, Resp>(&self, request_raw: Req) -> Result> where - Req: IntoRequestRaw<'a>, + Req: IntoRequestRaw<&'a str>, Resp: DeserializeOwned; /// - async fn subscribe<'a, Req, M, Resp>( - &self, - raw_request: Req, - unsubscribe_method: M, - ) -> Result> + async fn batch<'a, Req, Resp>(&self, requests_raw: Vec) -> Result>> where - Req: IntoRequestRaw<'a>, - M: Send + AsRef, + Req: IntoRequestRaw<&'a str>, Resp: DeserializeOwned; } #[async_trait::async_trait] impl Invoker for T where - T: Connection, + T: Jsonrpc, { - type Connection = T; - - async fn request<'a, Req, R>(&self, raw_request: Req) -> Result> + async fn request<'a, Req, R>(&self, request_raw: Req) -> Result> where - Req: IntoRequestRaw<'a>, + Req: IntoRequestRaw<&'a str>, R: DeserializeOwned, { - Ok(self.request(raw_request).await.map(Self::map_result)?) + let r = request_raw.into(); + + Ok(self + .request(RequestRaw { method: r.method.into(), params: r.params }) + .await + .map(Self::map_result)?) } - async fn batch<'a, Req, Resp>(&self, raw_requests: Vec) -> Result>> + async fn batch<'a, Req, Resp>(&self, requests_raw: Vec) -> Result>> where - Req: IntoRequestRaw<'a>, + Req: IntoRequestRaw<&'a str>, Resp: DeserializeOwned, { - Ok(self.batch(raw_requests).await?.into_iter().map(Self::map_result).collect()) + Ok(self.batch(requests_raw).await?.into_iter().map(Self::map_result).collect()) } - - async fn subscribe<'a, Req, M, Resp>( +} +/// +#[async_trait::async_trait] +pub trait InvokerExt: Invoker { + /// + async fn subscribe<'a, Req, Resp>( + &self, + request_raw: Req, + unsubscribe_method: String, + ) -> Result> + where + Req: IntoRequestRaw<&'a str>, + Resp: DeserializeOwned; +} +#[async_trait::async_trait] +impl InvokerExt for T +where + T: Invoker + JsonrpcExt, +{ + async fn subscribe<'a, Req, Resp>( &self, - raw_request: Req, - unsubscribe_method: M, - ) -> Result> + request_raw: Req, + unsubscribe_method: String, + ) -> Result> where - Req: IntoRequestRaw<'a>, - M: Send + AsRef, + Req: IntoRequestRaw<&'a str>, Resp: DeserializeOwned, { - Ok(self.subscribe(raw_request, unsubscribe_method).await.unwrap()) + Ok(self.subscribe(request_raw, unsubscribe_method).await.unwrap()) } } /// The API client for Substrate-like chain. #[derive(Clone, Debug)] -pub struct Apeye -where - I: Invoker, - R: Runtime, -{ +pub struct Apeye { /// pub invoker: Arc, /// @@ -109,15 +115,15 @@ where /// pub runtime: PhantomData, } -impl Apeye +impl Apeye where - I: Invoker, + Ivk: Invoker, R: Runtime, { /// Initialize the API client with the given initializer. pub async fn initialize(initializer: Iz) -> Result where - Iz: Initialize, + Iz: Initialize, { let invoker = Arc::new(initializer.initialize().await?); let mut apeye = Self { invoker, metadata: Default::default(), runtime: Default::default() }; @@ -126,49 +132,58 @@ where submetadatan::unprefix_raw_metadata_minimal(apeye.get_metadata::(None).await??) .expect("[apeye] failed to parse metadata"); - // #[cfg(feature = "trace")] - // tracing::trace!("Metadata({:?})", apeye.metadata); - Ok(apeye) } } #[async_trait::async_trait] -impl Invoker for Apeye +impl Invoker for Apeye where I: Invoker, - R: Runtime, + Rt: Runtime, { - type Connection = I::Connection; - - async fn request<'a, Req, Resp>(&self, raw_request: Req) -> Result> + async fn request<'a, Req, Resp>(&self, request_raw: Req) -> Result> where - Req: IntoRequestRaw<'a>, + Req: IntoRequestRaw<&'a str>, Resp: DeserializeOwned, { - self.invoker.request(raw_request).await + self.invoker.request(request_raw).await } - async fn batch<'a, Req, Resp>(&self, raw_requests: Vec) -> Result>> + async fn batch<'a, Req, Resp>(&self, requests_raw: Vec) -> Result>> where - Req: IntoRequestRaw<'a>, + Req: IntoRequestRaw<&'a str>, Resp: DeserializeOwned, { - self.invoker.batch(raw_requests).await + self.invoker.batch(requests_raw).await } - - async fn subscribe<'a, Req, M, Resp>( +} +#[async_trait::async_trait] +impl InvokerExt for Apeye +where + I: InvokerExt, + Rt: Runtime, +{ + async fn subscribe<'a, Req, Resp>( &self, - raw_request: Req, - unsubscribe_method: M, - ) -> Result> + request_raw: Req, + unsubscribe_method: String, + ) -> Result> where - Req: IntoRequestRaw<'a>, - M: Send + AsRef, + Req: IntoRequestRaw<&'a str>, Resp: DeserializeOwned, { - self.invoker.subscribe(raw_request, unsubscribe_method).await + self.invoker.subscribe(request_raw, unsubscribe_method).await } } +impl Runtime for Apeye +where + I: Send + Sync, + R: Runtime, +{ + type AccountId = R::AccountId; + type BlockNumber = R::BlockNumber; + type Hash = R::Hash; +} impl Meta for Apeye where I: Invoker, diff --git a/src/apeye/api.rs b/src/apeye/api.rs index d0e9a32..02d0c1b 100644 --- a/src/apeye/api.rs +++ b/src/apeye/api.rs @@ -6,7 +6,7 @@ pub mod prelude { pub use crate::apeye::{ api::{Argument, Deserialization, Parameter}, runtime::Runtime, - Layer, + Layer, LayerExt, }; } diff --git a/src/apeye/api/ext.rs b/src/apeye/api/ext.rs index b252c2c..a23b48e 100644 --- a/src/apeye/api/ext.rs +++ b/src/apeye/api/ext.rs @@ -121,10 +121,7 @@ pub struct StorageQuery<'a> { pub at: Option<&'a str>, } /// -pub struct StorageQueryArgs<'a, E> -where - E: EncodableArgs, -{ +pub struct StorageQueryArgs<'a, E> { /// pub storage_entry: StorageEntry<'a>, /// @@ -186,10 +183,7 @@ where } /// -pub enum Keys<'a, E> -where - E: EncodableArgs, -{ +pub enum Keys<'a, E> { /// Raw(&'a E), /// diff --git a/src/apeye/runtime.rs b/src/apeye/runtime.rs index faf6c82..04e3a70 100644 --- a/src/apeye/runtime.rs +++ b/src/apeye/runtime.rs @@ -2,8 +2,6 @@ // crates.io use array_bytes::{Hex, TryFromHex}; -// subapeye -use crate::apeye::{Apeye, Invoker}; /// pub trait Runtime: Send + Sync { @@ -18,13 +16,3 @@ pub trait Runtime: Send + Sync { /// pub trait ParameterConvertor: Hex + TryFromHex {} impl ParameterConvertor for T where T: Hex + TryFromHex {} - -impl Runtime for Apeye -where - I: Invoker, - R: Runtime, -{ - type AccountId = R::AccountId; - type BlockNumber = R::BlockNumber; - type Hash = R::Hash; -} diff --git a/src/jsonrpc.rs b/src/jsonrpc.rs index dca62a6..9f45d14 100644 --- a/src/jsonrpc.rs +++ b/src/jsonrpc.rs @@ -1,7 +1,7 @@ //! JSONRPC client library. pub mod ws; -pub use ws::{Ws, WsInitializer}; +pub use ws::{Subscriber, Ws, WsInitializer}; pub mod error; pub use error::Error; @@ -20,6 +20,7 @@ use prelude::*; // std use std::{ + borrow::Cow, fmt::{Debug, Formatter, Result as FmtResult}, future::Future, hash::Hash, @@ -44,7 +45,7 @@ pub type SubscriptionId = String; /// pub type ResponseResult = StdResult; /// -pub type SubscriptionResult = StdResult; +pub type SubscriptionResult = StdResult; type MessageTx = mpsc::Sender; type MessageRx = mpsc::Receiver; @@ -72,62 +73,61 @@ type SubscriptionPool = Pool; pub const VERSION: &str = "2.0"; const E_EMPTY_LOCK: &str = "[jsonrpc] acquired `lock` is empty"; +const E_ERROR_CHANNEL_CLOSED: &str = "[jsonrpc] error channel closed"; const E_INVALID_RESPONSE: &str = "[jsonrpc] unable to process response"; const E_MESSAGE_CHANNEL_CLOSED: &str = "[jsonrpc] message channel closed"; const E_NO_ERROR: &str = "[jsonrpc] no error to report"; -const E_REPORTER_CHANNEL_CLOSED: &str = "[jsonrpc] reporter channel closed"; +const E_RESPONSE_CHANNEL_CLOSED: &str = "[jsonrpc] response channel closed"; const E_TX_NOT_FOUND: &str = "[jsonrpc] tx not found in the pool"; /// #[async_trait::async_trait] pub trait Initialize { /// - type Connection; + type Protocol; /// - async fn initialize(self) -> Result; + async fn initialize(self) -> Result; } #[async_trait::async_trait] impl<'a> Initialize for WsInitializer<'a> { - type Connection = Ws; + type Protocol = Ws; - async fn initialize(self) -> Result { + async fn initialize(self) -> Result { self.initialize().await } } /// -pub trait IntoRequestRaw<'a>: Send + Into> {} -impl<'a, T> IntoRequestRaw<'a> for T where T: Send + Into> {} +pub trait IntoRequestRaw: Send + Into> {} +impl IntoRequestRaw for T where T: Send + Into> {} /// #[async_trait::async_trait] -pub trait Jsonrpc: Sized { +pub trait Jsonrpc: Sync + Send { /// Send a single request. - async fn request<'a, R>(&self, raw_request: R) -> Result + async fn request<'a, R>(&self, request_raw: R) -> Result where - R: IntoRequestRaw<'a>; + R: IntoRequestRaw>; /// Send a batch of requests. - async fn batch<'a, R>(&self, raw_requests: Vec) -> Result> + async fn batch<'a, R>(&self, requests_raw: Vec) -> Result> where - R: IntoRequestRaw<'a>; - + R: IntoRequestRaw<&'a str>; +} +/// +#[async_trait::async_trait] +pub trait JsonrpcExt: Jsonrpc { /// Send a subscription. - async fn subscribe<'a, R, M, D>( + async fn subscribe<'a, R, D>( &self, - raw_request: R, - unsubscribe_method: M, - ) -> Result> + request_raw: R, + unsubscribe_method: String, + ) -> Result> where - R: IntoRequestRaw<'a>, - M: Send + AsRef; + R: IntoRequestRaw<&'a str>; } -/// -pub trait Connection: Send + Sync + Jsonrpc {} -impl Connection for T where T: Send + Sync + Jsonrpc {} - /// pub trait ResultExt { /// @@ -186,69 +186,74 @@ pub struct Request<'a, P> { /// Raw JSONRPC request. #[allow(missing_docs)] #[derive(Clone, Debug)] -pub struct RequestRaw<'a, P> { - pub method: &'a str, - pub params: P, +pub struct RequestRaw { + pub method: T, + pub params: Value, } -impl<'a, P> From<(&'a str, P)> for RequestRaw<'a, P> { - fn from(raw: (&'a str, P)) -> Self { - Self { method: raw.0, params: raw.1 } +impl From<(T, Value)> for RequestRaw { + fn from(v: (T, Value)) -> Self { + Self { method: v.0, params: v.1 } } } /// Generic JSONRPC response result. #[allow(missing_docs)] -#[derive(Clone, Debug, Deserialize)] +#[derive(Clone, Deserialize)] pub struct ResponseOk { pub jsonrpc: String, pub id: Id, pub result: Value, } +impl Debug for ResponseOk { + fn fmt(&self, f: &mut Formatter) -> FmtResult { + write!( + f, + "ResponseOk {{ jsonrpc: {}, id: {}, result: {} }}", + self.jsonrpc, self.id, self.result + ) + } +} /// Generic JSONRPC error. #[allow(missing_docs)] -#[derive(Clone, Debug, Deserialize)] +#[derive(Clone, Deserialize)] pub struct JsonrpcError { pub jsonrpc: String, pub id: Id, pub error: Value, } +impl Debug for JsonrpcError { + fn fmt(&self, f: &mut Formatter) -> FmtResult { + write!( + f, + "JsonrpcError {{ jsonrpc: {}, id: {}, error: {} }}", + self.jsonrpc, self.id, self.error + ) + } +} /// Generic JSONRPC notification. #[allow(missing_docs)] #[derive(Clone, Debug, Deserialize)] -pub struct SubscriptionOk { +pub struct NotificationOk { pub jsonrpc: String, pub method: String, - pub params: Value, -} - -/// -#[derive(Debug)] -pub struct Subscriber<'a, J, R, D> -where - J: Jsonrpc, - R: IntoRequestRaw<'a>, -{ - unsubscribe_fut: UnsubscribeFut<'a, J, R>, - subscription_rx: SubscriptionRx, - _deserialize: D, + pub params: NotificationParams, } -struct UnsubscribeFut<'a, J, R> -where - J: Jsonrpc, - R: IntoRequestRaw<'a>, -{ - f: Box Pin>>>, - _lifetime: PhantomData<&'a ()>, +/// Generic JSONRPC notification params. +#[allow(missing_docs)] +#[derive(Clone, Deserialize)] +pub struct NotificationParams { + subscription: SubscriptionId, + result: Value, } -impl<'a, J, R> Debug for UnsubscribeFut<'a, J, R> -where - J: Jsonrpc, - R: IntoRequestRaw<'a>, -{ +impl Debug for NotificationParams { fn fmt(&self, f: &mut Formatter) -> FmtResult { - write!(f, "UnsubscribeFut") + write!( + f, + "NotificationParams {{ subscription: {}, result: {} }}", + self.subscription, self.result + ) } } @@ -308,7 +313,8 @@ enum Message { Debug(Id), Request(Call), Batch(Call>), - Subscription(Subscription), + Subscribe(Subscription), + Unsubscribe(SubscriptionId), } // A single request object. // `id`: Request Id. @@ -354,20 +360,24 @@ impl Pools { match first { b'{' => if let Ok(o) = serde_json::from_slice::(r) { - self.requests.take_tx(&o.id).send(Ok(Ok(o))).expect(E_MESSAGE_CHANNEL_CLOSED); + self.requests.take_tx(&o.id).send(Ok(Ok(o))).expect(E_RESPONSE_CHANNEL_CLOSED); return Ok(()); } else if let Ok(e) = serde_json::from_slice::(r) { // E.g. // ``` - // Response({"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not found"},"id":2}) + // {"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not found"},"id":2} // ``` - self.requests.take_tx(&e.id).send(Ok(Err(e))).expect(E_MESSAGE_CHANNEL_CLOSED); + self.requests.take_tx(&e.id).send(Ok(Err(e))).expect(E_RESPONSE_CHANNEL_CLOSED); return Ok(()); - } else if let Ok(o) = serde_json::from_slice::(r) { - dbg!(o); + } else if let Ok(o) = serde_json::from_slice::(r) { + self.subscriptions + .take_tx(&o.params.subscription) + .send(Ok(o)) + .await + .expect(E_RESPONSE_CHANNEL_CLOSED); return Ok(()); }, @@ -389,7 +399,7 @@ impl Pools { self.batches .take_tx(&r.first().ok_or(error::Jsonrpc::EmptyBatch)?.id()) .send(Ok(r)) - .expect(E_MESSAGE_CHANNEL_CLOSED); + .expect(E_RESPONSE_CHANNEL_CLOSED); return Ok(()); }, diff --git a/src/jsonrpc/ws.rs b/src/jsonrpc/ws.rs index 997e897..7a0d79e 100644 --- a/src/jsonrpc/ws.rs +++ b/src/jsonrpc/ws.rs @@ -5,13 +5,18 @@ pub mod initializer; pub use initializer::*; // std -use std::{mem, time::Duration}; +use std::{ + mem, + task::{Context, Poll}, + time::Duration, +}; // crates.io use futures::{ future::{self, Either, Fuse}, stream::{self, SplitSink, SplitStream}, - FutureExt, SinkExt, StreamExt, + FutureExt, SinkExt, Stream, StreamExt, }; +use serde::de::DeserializeOwned; use tokio::{net::TcpStream, sync::Mutex, time}; use tokio_stream::wrappers::IntervalStream; use tokio_tungstenite::{ @@ -41,13 +46,84 @@ type WsReceiver = SplitStream>>; /// Use this to interact with the server. #[derive(Debug)] pub struct Ws { + inner: Arc, + closer: Option, +} +impl Drop for Ws { + fn drop(&mut self) { + if let Some(c) = self.closer.take() { + let _ = c.send(()); + } else { + // + } + } +} +#[async_trait::async_trait] +impl Jsonrpc for Ws { + async fn request<'a, R>(&self, request_raw: R) -> Result + where + R: IntoRequestRaw>, + { + self.inner.request(request_raw).await + } + + async fn batch<'a, R>(&self, requests_raw: Vec) -> Result> + where + R: IntoRequestRaw<&'a str>, + { + self.inner.batch(requests_raw).await + } +} +#[async_trait::async_trait] +impl JsonrpcExt for Ws { + async fn subscribe<'a, R, D>( + &self, + request_raw: R, + unsubscribe_method: String, + ) -> Result> + where + R: IntoRequestRaw<&'a str>, + { + let r = request_raw.into(); + let id = self + .inner + .request(RequestRaw { method: r.method.into(), params: r.params }) + .await? + .extract_err()? + .result + .as_str() + .ok_or(error::Jsonrpc::InvalidSubscriptionId)? + .to_owned(); + // TODO?: Configurable channel size. + let (tx, rx) = mpsc::channel(self.inner.request_queue.size); + + if self + .inner + .messenger + .send(Message::Subscribe(Subscription { id: id.clone(), tx })) + .await + .is_err() + { + self.inner.report().await?; + } + + Ok(Subscriber { + subscription_id: id, + subscription_rx: rx, + unsubscriber: self.inner.clone(), + unsubscribe_method, + _deserialize: Default::default(), + }) + } +} +#[derive(Debug)] +struct WsInner { messenger: MessageTx, request_queue: RequestQueue, request_timeout: Duration, reporter: Mutex>, - closer: Option, } -impl Ws { +impl WsInner { // Don't call this if code hasn't encountered any error yet, // as it will block the asynchronous process. async fn report(&self) -> Result<()> { @@ -58,7 +134,7 @@ impl Ws { ) { Ok(r) => r .await - .map_err(|_| error::almost_impossible(E_REPORTER_CHANNEL_CLOSED))? + .map_err(|_| error::almost_impossible(E_ERROR_CHANNEL_CLOSED))? .to_string(), Err(e) => e, }; @@ -75,24 +151,14 @@ impl Ws { Ok(time::timeout(self.request_timeout, future).await.map_err(error::Generic::Timeout)?) } } -impl Drop for Ws { - fn drop(&mut self) { - if let Some(c) = self.closer.take() { - let _ = c.send(()); - } else { - // - } - } -} #[async_trait::async_trait] -impl Jsonrpc for Ws { - /// Send a single request. - async fn request<'a, R>(&self, raw_request: R) -> Result +impl Jsonrpc for WsInner { + async fn request<'a, R>(&self, request_raw: R) -> Result where - R: IntoRequestRaw<'a>, + R: IntoRequestRaw>, { let RequestQueueGuard { lock: id, .. } = self.request_queue.consume_once()?; - let RequestRaw { method, params } = raw_request.into(); + let RequestRaw { method, params } = request_raw.into(); let (tx, rx) = oneshot::channel(); #[cfg(feature = "debug")] @@ -103,8 +169,13 @@ impl Jsonrpc for Ws { .messenger .send(Message::Request(Call { id, - request: serde_json::to_string(&Request { jsonrpc: VERSION, id, method, params }) - .map_err(error::Generic::Serde)?, + request: serde_json::to_string(&Request { + jsonrpc: VERSION, + id, + method: &method, + params, + }) + .map_err(error::Generic::Serde)?, tx, })) .await @@ -119,22 +190,21 @@ impl Jsonrpc for Ws { } } - /// Send a batch of requests. - async fn batch<'a, R>(&self, raw_requests: Vec) -> Result> + async fn batch<'a, R>(&self, requests_raw: Vec) -> Result> where - R: IntoRequestRaw<'a>, + R: IntoRequestRaw<&'a str>, { - if raw_requests.is_empty() { + if requests_raw.is_empty() { Err(error::Jsonrpc::EmptyBatch)?; } - let RequestQueueGuard { lock: ids, .. } = self.request_queue.consume(raw_requests.len())?; + let RequestQueueGuard { lock: ids, .. } = self.request_queue.consume(requests_raw.len())?; let id = ids.first().ok_or(error::almost_impossible(E_EMPTY_LOCK))?.to_owned(); let requests = ids .into_iter() - .zip(raw_requests.into_iter()) - .map(|(id, raw_request)| { - let RequestRaw { method, params } = raw_request.into(); + .zip(requests_raw.into_iter()) + .map(|(id, request_raw)| { + let RequestRaw { method, params } = request_raw.into(); Request { jsonrpc: VERSION, id, method, params } }) @@ -154,34 +224,6 @@ impl Jsonrpc for Ws { self.report().await.and(Err(error::almost_impossible(E_NO_ERROR))?)? } } - - async fn subscribe<'a, R, M, D>( - &self, - raw_request: R, - unsubscribe_method: M, - ) -> Result> - where - R: IntoRequestRaw<'a>, - M: Send + AsRef, - { - let id = self - .request(raw_request) - .await? - .extract_err()? - .result - .as_str() - .ok_or(error::Jsonrpc::InvalidSubscriptionId)? - .to_owned(); - // TODO?: Configurable channel size. - let (tx, rx) = mpsc::channel(self.request_queue.size); - - if self.messenger.send(Message::Subscription(Subscription { id, tx })).await.is_err() { - self.report().await?; - } - - todo!() - // Ok(Subscriber { message_tx: tx, subscription_rx: rx }) - } } /// Async future selectors. @@ -354,6 +396,68 @@ impl Default for FutureSelector { } } +/// +#[derive(Debug)] +pub struct Subscriber { + subscription_id: SubscriptionId, + subscription_rx: SubscriptionRx, + unsubscriber: Arc, + unsubscribe_method: String, + _deserialize: PhantomData, +} +impl Subscriber { + /// + pub async fn unsubscribe(&self) -> Result<()> { + self.unsubscriber + .messenger + .send(Message::Unsubscribe(self.subscription_id.clone())) + .await + .map_err(|_| error::almost_impossible(E_MESSAGE_CHANNEL_CLOSED))?; + + let _ = self + .unsubscriber + .request(( + self.unsubscribe_method.clone().into(), + Value::Array(vec![Value::Array(vec![Value::String(self.subscription_id.clone())])]), + )) + .await?; + + Ok(()) + } +} +impl Subscriber +where + D: Unpin, +{ + /// + pub async fn next_raw(&mut self) -> Option { + StreamExt::next(self).await + } +} +impl Subscriber +where + D: DeserializeOwned + Unpin, +{ + /// + pub async fn next(&mut self) -> Option> { + self.next_raw().await.map(|r| { + r.map_err(|e| error::Error::Jsonrpc(error::Jsonrpc::Response(e.error))).and_then(|o| { + Ok(serde_json::from_value(o.params.result).map_err(error::Generic::Serde)?) + }) + }) + } +} +impl Stream for Subscriber +where + D: Unpin, +{ + type Item = SubscriptionResult; + + fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll> { + self.subscription_rx.poll_recv(cx) + } +} + impl Call where T: Debug, @@ -385,22 +489,32 @@ impl Pools { if !c.try_send_ws(tx, &mut self.batches).await { return false; }, - Message::Subscription(s) => { + Message::Subscribe(s) => { self.subscriptions.insert(s.id, s.tx); }, + Message::Unsubscribe(s) => { + let _ = self.subscriptions.remove(&s); + }, } true } async fn on_response_ws(&mut self, response: WsResult) -> Result<()> { - #[cfg(feature = "trace")] - tracing::trace!("Response({response:?})"); - match response { Ok(m) => match m { - WsMessage::Binary(r) => self.process_response(&r).await, - WsMessage::Text(r) => self.process_response(r.as_bytes()).await, + WsMessage::Binary(r) => { + #[cfg(feature = "trace")] + tracing::trace!("Response({})", String::from_utf8_lossy(&r)); + + self.process_response(&r).await + }, + WsMessage::Text(r) => { + #[cfg(feature = "trace")] + tracing::trace!("Response({r})"); + + self.process_response(r.as_bytes()).await + }, WsMessage::Ping(_) => { tracing::trace!("ping"); diff --git a/src/jsonrpc/ws/initializer.rs b/src/jsonrpc/ws/initializer.rs index c0b543a..63e1f4e 100644 --- a/src/jsonrpc/ws/initializer.rs +++ b/src/jsonrpc/ws/initializer.rs @@ -71,10 +71,12 @@ impl<'a> Initializer<'a> { let (messenger, reporter, closer) = self.connect().await?; Ok(Ws { - messenger, - request_queue: RequestQueue::with_size(self.pool_size), - request_timeout: self.request_timeout, - reporter: Mutex::new(Ok(reporter)), + inner: Arc::new(WsInner { + messenger, + request_queue: RequestQueue::with_size(self.pool_size), + request_timeout: self.request_timeout, + reporter: Mutex::new(Ok(reporter)), + }), closer: Some(closer), }) } diff --git a/src/main.rs b/src/main.rs index 5e765c4..14db2fa 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,7 +3,7 @@ use std::error::Error; use array_bytes::TryFromHex; use serde_json::Value; use subapeye::{ - apeye::{api::*, runtime::Runtime, Apeye, Invoker}, + apeye::{api::*, runtime::Runtime, Apeye, Invoker, InvokerExt}, jsonrpc::WsInitializer, }; use subrpcer::{chain, net}; @@ -23,13 +23,13 @@ async fn main() -> Result<(), Box> { >::initialize(WsInitializer::default().uri("wss://kusama-rpc.polkadot.io")) .await?; - // for h in apeye.get_block_hash::<_, Vec>(Some([0, 1, 2])).await? { - // dbg!(apeye.get_block::(Some(&h)).await?); - // dbg!(apeye.get_header::(Some(&h)).await?); + // for h in apeye.get_block_hash::<_, Vec>(Some([0, 1, 2])).await?? { + // dbg!(apeye.get_block::(Some(&h)).await??); + // dbg!(apeye.get_header::(Some(&h)).await??); // } - // dbg!(apeye.get_finalized_head::().await?); - // dbg!(apeye.query::(&apeye.query_of::<()>("System", "Number")?.construct()?).await?); + // dbg!(apeye.get_finalized_head::().await??); + // dbg!(apeye.query::(&apeye.query_of::<()>("System", "Number")?.construct()?).await??); // dbg!( // apeye // .query::( @@ -44,7 +44,7 @@ async fn main() -> Result<(), Box> { // ))) // .construct()? // ) - // .await? + // .await?? // ); // dbg!( // apeye @@ -55,17 +55,17 @@ async fn main() -> Result<(), Box> { // ]) // .await? // ); - dbg!(apeye.version::().await?); + // dbg!(apeye.version::().await?.unwrap_err()); apeye - .subscribe::<_, _, ()>( + .subscribe::<_, ()>( ( "state_subscribeStorage", serde_json::json!([[ "0x26aa394eea5630e07c48ae0c9558cef70a98fdbe9ce6c55837576c60c7af3850" ]]), ), - "state_unsubscribeStorage", + "state_unsubscribeStorage".into(), ) .await .unwrap(); From 9fef5f093f48d25a5eece12875c440d5035d0e36 Mon Sep 17 00:00:00 2001 From: Xavier Lau Date: Fri, 5 May 2023 02:31:29 +0800 Subject: [PATCH 6/7] Subscribe v2 --- src/apeye.rs | 7 +- src/jsonrpc.rs | 146 +++++----- src/jsonrpc/ws.rs | 503 ++++++++++++++++++++++------------ src/jsonrpc/ws/initializer.rs | 26 +- src/main.rs | 8 +- 5 files changed, 405 insertions(+), 285 deletions(-) diff --git a/src/apeye.rs b/src/apeye.rs index fa57c3d..2ff3be4 100644 --- a/src/apeye.rs +++ b/src/apeye.rs @@ -58,12 +58,7 @@ where Req: IntoRequestRaw<&'a str>, R: DeserializeOwned, { - let r = request_raw.into(); - - Ok(self - .request(RequestRaw { method: r.method.into(), params: r.params }) - .await - .map(Self::map_result)?) + Ok(self.request(request_raw).await.map(Self::map_result)?) } async fn batch<'a, Req, Resp>(&self, requests_raw: Vec) -> Result>> diff --git a/src/jsonrpc.rs b/src/jsonrpc.rs index 9f45d14..4b13308 100644 --- a/src/jsonrpc.rs +++ b/src/jsonrpc.rs @@ -20,22 +20,26 @@ use prelude::*; // std use std::{ - borrow::Cow, fmt::{Debug, Formatter, Result as FmtResult}, future::Future, hash::Hash, marker::PhantomData, + mem, pin::Pin, sync::{ atomic::{AtomicUsize, Ordering}, Arc, }, + time::Duration, }; // crates.io use fxhash::FxHashMap; use serde::{Deserialize, Serialize}; use serde_json::{value::RawValue, Value}; -use tokio::sync::{mpsc, oneshot}; +use tokio::{ + sync::{mpsc, oneshot, Mutex}, + time, +}; /// JSONRPC Id. pub type Id = usize; @@ -47,8 +51,8 @@ pub type ResponseResult = StdResult; /// pub type SubscriptionResult = StdResult; -type MessageTx = mpsc::Sender; -type MessageRx = mpsc::Receiver; +// type MessageTx = mpsc::Sender; +// type MessageRx = mpsc::Receiver; type ErrorTx = oneshot::Sender; type ErrorRx = oneshot::Receiver; @@ -58,27 +62,23 @@ type ExitRx = oneshot::Receiver<()>; type ResponseTx = oneshot::Sender>; -type SubscriptionTx = mpsc::Sender; -type SubscriptionRx = mpsc::Receiver; - type RequestTx = ResponseTx; type BatchTx = ResponseTx>; type Pool = FxHashMap; type RequestPool = Pool; type BatchPool = Pool; -type SubscriptionPool = Pool; /// JSONRPC version. pub const VERSION: &str = "2.0"; -const E_EMPTY_LOCK: &str = "[jsonrpc] acquired `lock` is empty"; const E_ERROR_CHANNEL_CLOSED: &str = "[jsonrpc] error channel closed"; const E_INVALID_RESPONSE: &str = "[jsonrpc] unable to process response"; const E_MESSAGE_CHANNEL_CLOSED: &str = "[jsonrpc] message channel closed"; const E_NO_ERROR: &str = "[jsonrpc] no error to report"; const E_RESPONSE_CHANNEL_CLOSED: &str = "[jsonrpc] response channel closed"; const E_TX_NOT_FOUND: &str = "[jsonrpc] tx not found in the pool"; +const E_UNEXPECTED_LOCK_COUNT: &str = "[jsonrpc] unexpected lock count"; /// #[async_trait::async_trait] @@ -108,7 +108,7 @@ pub trait Jsonrpc: Sync + Send { /// Send a single request. async fn request<'a, R>(&self, request_raw: R) -> Result where - R: IntoRequestRaw>; + R: IntoRequestRaw<&'a str>; /// Send a batch of requests. async fn batch<'a, R>(&self, requests_raw: Vec) -> Result> @@ -158,6 +158,8 @@ trait PoolExt { type Key: PartialEq + Eq + Hash; type Value; + fn get_tx(&self, key: &Self::Key) -> &Self::Value; + fn take_tx(&mut self, key: &Self::Key) -> Self::Value; } impl PoolExt for Pool @@ -167,6 +169,10 @@ where type Key = K; type Value = V; + fn get_tx(&self, key: &Self::Key) -> &Self::Value { + self.get(key).expect(E_TX_NOT_FOUND) + } + fn take_tx(&mut self, key: &Self::Key) -> Self::Value { self.remove(key).expect(E_TX_NOT_FOUND) } @@ -307,14 +313,23 @@ struct RequestQueueGuard { _strong: Arc<()>, } +#[derive(Debug, Default)] +struct Pools { + requests: RequestPool, + batches: BatchPool, +} +// impl Pools { +// fn new() -> Self { +// Default::default() +// } +// } + #[derive(Debug)] enum Message { #[cfg(feature = "debug")] Debug(Id), - Request(Call), - Batch(Call>), - Subscribe(Subscription), - Unsubscribe(SubscriptionId), + Request(CallOnce), + Batch(CallOnce>), } // A single request object. // `id`: Request Id. @@ -323,93 +338,54 @@ enum Message { // // A batch requests object to send several request objects simultaneously. // `id`: The first request's id. -struct Call { +struct CallOnce { id: Id, request: String, tx: ResponseTx, } -impl Debug for Call +impl Debug for CallOnce where T: Debug, { fn fmt(&self, f: &mut Formatter) -> FmtResult { - write!(f, "Call {{ id: {}, request: {}, tx: {:?} }}", self.id, self.request, self.tx) + write!(f, "CallOnce {{ id: {}, request: {}, tx: {:?} }}", self.id, self.request, self.tx) } } -#[derive(Debug)] -struct Subscription { - id: String, - tx: SubscriptionTx, -} -#[derive(Debug, Default)] -struct Pools { - requests: RequestPool, - batches: BatchPool, - subscriptions: SubscriptionPool, -} -impl Pools { - fn new() -> Self { - Default::default() +#[derive(Clone, Debug)] +struct Reporter(Arc>>); +impl Reporter { + fn new(error_rx: ErrorRx) -> Self { + Self(Arc::new(Mutex::new(Ok(error_rx)))) } - async fn process_response(&mut self, response: &[u8]) -> Result<()> { - let r = response.trim_ascii_start(); - let first = r.first().ok_or(error::Jsonrpc::EmptyResponse)?; - - match first { - b'{' => - if let Ok(o) = serde_json::from_slice::(r) { - self.requests.take_tx(&o.id).send(Ok(Ok(o))).expect(E_RESPONSE_CHANNEL_CLOSED); - - return Ok(()); - } else if let Ok(e) = serde_json::from_slice::(r) { - // E.g. - // ``` - // {"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not found"},"id":2} - // ``` - - self.requests.take_tx(&e.id).send(Ok(Err(e))).expect(E_RESPONSE_CHANNEL_CLOSED); - - return Ok(()); - } else if let Ok(o) = serde_json::from_slice::(r) { - self.subscriptions - .take_tx(&o.params.subscription) - .send(Ok(o)) - .await - .expect(E_RESPONSE_CHANNEL_CLOSED); - - return Ok(()); - }, - b'[' => - if let Ok(r) = serde_json::from_slice::>(r) { - let r = r - .into_iter() - .map(|r| { - if let Ok(o) = serde_json::from_str::(r.get()) { - Ok(Ok(o)) - } else if let Ok(e) = serde_json::from_str::(r.get()) { - Ok(Err(e)) - } else { - Err(error::almost_impossible(E_INVALID_RESPONSE))? - } - }) - .collect::>>()?; - - self.batches - .take_tx(&r.first().ok_or(error::Jsonrpc::EmptyBatch)?.id()) - .send(Ok(r)) - .expect(E_RESPONSE_CHANNEL_CLOSED); - - return Ok(()); - }, - _ => (), - } - - Err(error::almost_impossible(E_INVALID_RESPONSE))? + // Don't call this if code hasn't encountered any error yet, + // as it will block the asynchronous process. + async fn report(&self) -> Result<()> { + let mut error_rx = self.0.lock().await; + let e = + match mem::replace(&mut *error_rx, Err("[jsonrpc] temporary error placeholder".into())) + { + Ok(r) => r + .await + .map_err(|_| error::almost_impossible(E_ERROR_CHANNEL_CLOSED))? + .to_string(), + Err(e) => e, + }; + + *error_rx = Err(e.clone()); + + Err(error::Generic::Plain(e))? } } +async fn execute(future: F, timeout: Duration) -> Result<::Output> +where + F: Future, +{ + Ok(time::timeout(timeout, future).await.map_err(error::Generic::Timeout)?) +} + fn try_send(tx: oneshot::Sender, any: T, log: bool) -> bool where T: Debug, diff --git a/src/jsonrpc/ws.rs b/src/jsonrpc/ws.rs index 7a0d79e..27c8e0e 100644 --- a/src/jsonrpc/ws.rs +++ b/src/jsonrpc/ws.rs @@ -5,11 +5,7 @@ pub mod initializer; pub use initializer::*; // std -use std::{ - mem, - task::{Context, Poll}, - time::Duration, -}; +use std::task::{Context, Poll}; // crates.io use futures::{ future::{self, Either, Fuse}, @@ -17,7 +13,7 @@ use futures::{ FutureExt, SinkExt, Stream, StreamExt, }; use serde::de::DeserializeOwned; -use tokio::{net::TcpStream, sync::Mutex, time}; +use tokio::net::TcpStream; use tokio_stream::wrappers::IntervalStream; use tokio_tungstenite::{ tungstenite::{error::Result as WsResult, Message as WsMessage}, @@ -31,7 +27,7 @@ type GenericConnect = Box< Duration, WsSender, WsReceiver, - MessageRx, + MessageExtRx, ErrorTx, ExitRx, ) -> Pin + Send>> @@ -41,17 +37,43 @@ type GenericConnect = Box< type WsSender = SplitSink>, WsMessage>; type WsReceiver = SplitStream>>; +type MessageExtTx = mpsc::Sender; +type MessageExtRx = mpsc::Receiver; + +type NotificationTx = mpsc::Sender; +type NotificationRx = mpsc::Receiver; + +type ResultTx = oneshot::Sender>; +type SubscriptionIdTx = oneshot::Sender; + +type SubscriptionPool = Pool; +type NotificationPool = Pool; + +const E_INVALID_SUBSCRIPTION_ID: &str = "[jsonrpc::ws] invalid subscription id"; +const E_SUBSCRIPTION_ID_CHANNEL_CLOSED: &str = "[jsonrpc::ws] subscription id channel closed"; + /// A Ws instance. /// /// Use this to interact with the server. #[derive(Debug)] pub struct Ws { - inner: Arc, - closer: Option, + message_tx: MessageExtTx, + request_queue: RequestQueue, + timeout: Duration, + reporter: Reporter, + exit_tx: Option, +} +impl Ws { + async fn execute(&self, future: F) -> Result<::Output> + where + F: Future, + { + execute(future, self.timeout).await + } } impl Drop for Ws { fn drop(&mut self) { - if let Some(c) = self.closer.take() { + if let Some(c) = self.exit_tx.take() { let _ = c.send(()); } else { // @@ -61,132 +83,34 @@ impl Drop for Ws { #[async_trait::async_trait] impl Jsonrpc for Ws { async fn request<'a, R>(&self, request_raw: R) -> Result - where - R: IntoRequestRaw>, - { - self.inner.request(request_raw).await - } - - async fn batch<'a, R>(&self, requests_raw: Vec) -> Result> where R: IntoRequestRaw<&'a str>, - { - self.inner.batch(requests_raw).await - } -} -#[async_trait::async_trait] -impl JsonrpcExt for Ws { - async fn subscribe<'a, R, D>( - &self, - request_raw: R, - unsubscribe_method: String, - ) -> Result> - where - R: IntoRequestRaw<&'a str>, - { - let r = request_raw.into(); - let id = self - .inner - .request(RequestRaw { method: r.method.into(), params: r.params }) - .await? - .extract_err()? - .result - .as_str() - .ok_or(error::Jsonrpc::InvalidSubscriptionId)? - .to_owned(); - // TODO?: Configurable channel size. - let (tx, rx) = mpsc::channel(self.inner.request_queue.size); - - if self - .inner - .messenger - .send(Message::Subscribe(Subscription { id: id.clone(), tx })) - .await - .is_err() - { - self.inner.report().await?; - } - - Ok(Subscriber { - subscription_id: id, - subscription_rx: rx, - unsubscriber: self.inner.clone(), - unsubscribe_method, - _deserialize: Default::default(), - }) - } -} -#[derive(Debug)] -struct WsInner { - messenger: MessageTx, - request_queue: RequestQueue, - request_timeout: Duration, - reporter: Mutex>, -} -impl WsInner { - // Don't call this if code hasn't encountered any error yet, - // as it will block the asynchronous process. - async fn report(&self) -> Result<()> { - let mut reporter = self.reporter.lock().await; - let e = match mem::replace( - &mut *reporter, - Err("[jsonrpc::ws] temporary error placeholder".into()), - ) { - Ok(r) => r - .await - .map_err(|_| error::almost_impossible(E_ERROR_CHANNEL_CLOSED))? - .to_string(), - Err(e) => e, - }; - - *reporter = Err(e.clone()); - - Err(error::Generic::Plain(e))? - } - - async fn execute(&self, future: F) -> Result<::Output> - where - F: Future, - { - Ok(time::timeout(self.request_timeout, future).await.map_err(error::Generic::Timeout)?) - } -} -#[async_trait::async_trait] -impl Jsonrpc for WsInner { - async fn request<'a, R>(&self, request_raw: R) -> Result - where - R: IntoRequestRaw>, { let RequestQueueGuard { lock: id, .. } = self.request_queue.consume_once()?; let RequestRaw { method, params } = request_raw.into(); let (tx, rx) = oneshot::channel(); #[cfg(feature = "debug")] - if self.messenger.send(Message::Debug(id)).await.is_err() { - self.report().await?; + if self.message_tx.send(MessageExt::Debug(id)).await.is_err() { + self.reporter.report().await?; } if self - .messenger - .send(Message::Request(Call { + .message_tx + .send(MessageExt::Message(Message::Request(CallOnce { id, - request: serde_json::to_string(&Request { - jsonrpc: VERSION, - id, - method: &method, - params, - }) - .map_err(error::Generic::Serde)?, + request: serde_json::to_string(&Request { jsonrpc: VERSION, id, method, params }) + .map_err(error::Generic::Serde)?, tx, - })) + }))) .await .is_err() { - self.report().await?; + self.reporter.report().await?; } if let Ok(r) = self.execute(rx).await? { r } else { - self.report().await.and(Err(error::almost_impossible(E_NO_ERROR))?)? + self.reporter.report().await.and(Err(error::almost_impossible(E_NO_ERROR))?)? } } @@ -198,10 +122,15 @@ impl Jsonrpc for WsInner { Err(error::Jsonrpc::EmptyBatch)?; } - let RequestQueueGuard { lock: ids, .. } = self.request_queue.consume(requests_raw.len())?; - let id = ids.first().ok_or(error::almost_impossible(E_EMPTY_LOCK))?.to_owned(); + let RequestQueueGuard { lock: ids, .. } = + self.request_queue.consume(requests_raw.len() + 1)?; + + assert_eq!(requests_raw.len(), ids.len(), "{}", E_UNEXPECTED_LOCK_COUNT); + + let id = ids.get(0).expect(E_UNEXPECTED_LOCK_COUNT).to_owned(); let requests = ids .into_iter() + .skip(1) .zip(requests_raw.into_iter()) .map(|(id, request_raw)| { let RequestRaw { method, params } = request_raw.into(); @@ -212,8 +141,13 @@ impl Jsonrpc for WsInner { let request = serde_json::to_string(&requests).map_err(error::Generic::Serde)?; let (tx, rx) = oneshot::channel(); - if self.messenger.send(Message::Batch(Call { id, request, tx })).await.is_err() { - self.report().await?; + if self + .message_tx + .send(MessageExt::Message(Message::Batch(CallOnce { id, request, tx }))) + .await + .is_err() + { + self.reporter.report().await?; } if let Ok(mut r) = self.execute(rx).await? { // Each id is unique. @@ -221,8 +155,67 @@ impl Jsonrpc for WsInner { r } else { - self.report().await.and(Err(error::almost_impossible(E_NO_ERROR))?)? + self.reporter.report().await.and(Err(error::almost_impossible(E_NO_ERROR))?)? + } + } +} +#[async_trait::async_trait] +impl JsonrpcExt for Ws { + async fn subscribe<'a, R, D>( + &self, + request_raw: R, + unsubscribe_method: String, + ) -> Result> + where + R: IntoRequestRaw<&'a str>, + { + let RequestQueueGuard { lock: ids, .. } = self.request_queue.consume(2)?; + let id = ids.get(0).expect(E_UNEXPECTED_LOCK_COUNT).to_owned(); + let unsubscribe_id = ids.get(1).expect(E_UNEXPECTED_LOCK_COUNT).to_owned(); + let RequestRaw { method, params } = request_raw.into(); + // TODO?: Configurable channel size. + let (notification_tx, notification_rx) = mpsc::channel(self.request_queue.size); + let (result_tx, result_rx) = oneshot::channel(); + let (subscription_tx, subscription_rx) = oneshot::channel(); + + if self + .message_tx + .send(MessageExt::Subscribe(Call { + id, + request: serde_json::to_string(&Request { jsonrpc: VERSION, id, method, params }) + .map_err(error::Generic::Serde)?, + notification_tx, + result_tx, + subscription_tx, + })) + .await + .is_err() + { + self.reporter.report().await?; + } + if let Ok(r) = self.execute(result_rx).await? { + r?; + } else { + self.reporter.report().await.and(Err(error::almost_impossible(E_NO_ERROR))?)?; } + + let subscription_id = if let Ok(r) = self.execute(subscription_rx).await? { + r + } else { + self.reporter.report().await.and(Err(error::almost_impossible(E_NO_ERROR))?)? + }; + + Ok(Subscriber { + message_tx: self.message_tx.clone(), + id, + subscription_id, + notification_rx, + unsubscribe_id, + unsubscribe_method, + reporter: self.reporter.clone(), + timeout: self.timeout.clone(), + _deserialize: Default::default(), + }) } } @@ -246,7 +239,7 @@ impl FutureSelector { interval: Duration, mut ws_tx: WsSender, mut ws_rx: WsReceiver, - message_rx: MessageRx, + message_rx: MessageExtRx, error_tx: ErrorTx, exit_rx: ExitRx, ) -> Pin + Send>> { @@ -258,7 +251,7 @@ impl FutureSelector { let mut rxs_fut = future::select(message_rx.next(), ws_rx.next()); // TODO: clean dead items? - let mut pool = Pools::new(); + let mut pool = PoolsExt::new(); // Minimum interval is 1ms. let interval_max = interval.max(Duration::from_millis(1)); let mut interval_max = IntervalStream::new(time::interval(interval_max)); @@ -275,10 +268,7 @@ impl FutureSelector { exit_or_interval_fut_, )) => { if !pool - .on_message_ws( - maybe_message.expect(E_MESSAGE_CHANNEL_CLOSED), - &mut ws_tx, - ) + .on_message(maybe_message.expect(E_MESSAGE_CHANNEL_CLOSED), &mut ws_tx) .await { return; @@ -292,7 +282,7 @@ impl FutureSelector { exit_or_interval_fut_, )) => { if let Some(response) = maybe_response { - if let Err(e) = pool.on_response_ws(response).await { + if let Err(e) = pool.on_response(response).await { try_send(error_tx, e, true); return; @@ -336,13 +326,13 @@ impl FutureSelector { interval: Duration, mut ws_tx: WsSender, mut ws_rx: WsReceiver, - mut message_rx: MessageRx, + mut message_rx: MessageExtRx, error_tx: ErrorTx, mut exit_rx: ExitRx, ) -> Pin + Send>> { Box::pin(async move { // TODO: clean dead items? - let mut pool = Pools::new(); + let mut pool = PoolsExt::new(); // Minimum interval is 1ms. let interval_max = interval.max(Duration::from_millis(1)); let mut interval_max = IntervalStream::new(time::interval(interval_max)); @@ -353,13 +343,13 @@ impl FutureSelector { loop { tokio::select! { maybe_message = message_rx.recv() => { - if !pool.on_message_ws(maybe_message.expect(E_MESSAGE_CHANNEL_CLOSED), &mut ws_tx).await { + if !pool.on_message(maybe_message.expect(E_MESSAGE_CHANNEL_CLOSED), &mut ws_tx).await { return; } }, maybe_response = ws_rx.next() => { if let Some(response) = maybe_response { - if let Err(e) = pool.on_response_ws(response).await { + if let Err(e) = pool.on_response(response).await { try_send(error_tx, e, true); return; @@ -399,30 +389,58 @@ impl Default for FutureSelector { /// #[derive(Debug)] pub struct Subscriber { + message_tx: MessageExtTx, + id: Id, subscription_id: SubscriptionId, - subscription_rx: SubscriptionRx, - unsubscriber: Arc, + notification_rx: NotificationRx, + unsubscribe_id: Id, unsubscribe_method: String, + reporter: Reporter, + /// + pub timeout: Duration, _deserialize: PhantomData, } impl Subscriber { /// - pub async fn unsubscribe(&self) -> Result<()> { - self.unsubscriber - .messenger - .send(Message::Unsubscribe(self.subscription_id.clone())) + pub fn timeout(&mut self, timeout: Duration) -> &mut Self { + self.timeout = timeout; + + self + } + + /// + pub async fn unsubscribe(&self) -> Result { + self.message_tx + .send(MessageExt::Unsubscribe((self.id, self.subscription_id.clone()))) .await .map_err(|_| error::almost_impossible(E_MESSAGE_CHANNEL_CLOSED))?; + let (tx, rx) = oneshot::channel(); + let id = self.unsubscribe_id; - let _ = self - .unsubscriber - .request(( - self.unsubscribe_method.clone().into(), - Value::Array(vec![Value::Array(vec![Value::String(self.subscription_id.clone())])]), - )) - .await?; + if self + .message_tx + .send(MessageExt::Message(Message::Request(CallOnce { + id, + request: serde_json::to_string(&Request { + jsonrpc: VERSION, + id, + method: &self.unsubscribe_method, + params: [[&self.subscription_id]], + }) + .map_err(error::Generic::Serde)?, - Ok(()) + tx, + }))) + .await + .is_err() + { + self.reporter.report().await; + } + if let Ok(r) = execute(rx, self.timeout).await? { + r + } else { + self.reporter.report().await.and(Err(error::almost_impossible(E_NO_ERROR))?)? + } } } impl Subscriber @@ -430,8 +448,10 @@ where D: Unpin, { /// - pub async fn next_raw(&mut self) -> Option { - StreamExt::next(self).await + pub async fn next_raw(&mut self) -> Result> { + let timeout = self.timeout.clone(); + + execute(StreamExt::next(self), timeout).await } } impl Subscriber @@ -439,12 +459,12 @@ where D: DeserializeOwned + Unpin, { /// - pub async fn next(&mut self) -> Option> { - self.next_raw().await.map(|r| { + pub async fn next(&mut self) -> Result>> { + Ok(self.next_raw().await?.map(|r| { r.map_err(|e| error::Error::Jsonrpc(error::Jsonrpc::Response(e.error))).and_then(|o| { Ok(serde_json::from_value(o.params.result).map_err(error::Generic::Serde)?) }) - }) + })) } } impl Stream for Subscriber @@ -454,53 +474,130 @@ where type Item = SubscriptionResult; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll> { - self.subscription_rx.poll_recv(cx) + self.notification_rx.poll_recv(cx) } } -impl Call -where - T: Debug, -{ - async fn try_send_ws(self, tx: &mut WsSender, pool: &mut Pool>) -> bool { - if let Err(e) = tx.send(WsMessage::Text(self.request)).await { - try_send(self.tx, Err(error::Websocket::Tungstenite(e).into()), true) - } else { - pool.insert(self.id, self.tx); +#[derive(Debug, Default)] +struct PoolsExt { + regular_pools: Pools, + subscriptions: SubscriptionPool, + notifications: NotificationPool, +} +impl PoolsExt { + fn new() -> Self { + Default::default() + } - true + async fn process_response(&mut self, response: &[u8]) -> Result<()> { + let r = response.trim_ascii_start(); + let first = r.first().ok_or(error::Jsonrpc::EmptyResponse)?; + + match first { + b'{' => + if let Ok(o) = serde_json::from_slice::(r) { + if let Some((notification, subscription_tx)) = self.subscriptions.remove(&o.id) + { + let subscription_id = o + .result + .as_str() + .ok_or(error::almost_impossible(E_INVALID_SUBSCRIPTION_ID))? + .to_owned(); + + subscription_tx + .send(subscription_id.clone()) + .expect(E_SUBSCRIPTION_ID_CHANNEL_CLOSED); + + self.notifications.insert(subscription_id, notification); + } else { + self.regular_pools + .requests + .take_tx(&o.id) + .send(Ok(Ok(o))) + .expect(E_RESPONSE_CHANNEL_CLOSED); + } + + return Ok(()); + } else if let Ok(e) = serde_json::from_slice::(r) { + // E.g. + // ``` + // {"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not found"},"id":2} + // ``` + + self.regular_pools + .requests + .take_tx(&e.id) + .send(Ok(Err(e))) + .expect(E_RESPONSE_CHANNEL_CLOSED); + + return Ok(()); + } else if let Ok(o) = serde_json::from_slice::(r) { + self.notifications + .get_tx(&o.params.subscription) + .send(Ok(o)) + .await + .expect(E_RESPONSE_CHANNEL_CLOSED); + + return Ok(()); + }, + b'[' => + if let Ok(r) = serde_json::from_slice::>(r) { + let r = r + .into_iter() + .map(|r| { + if let Ok(o) = serde_json::from_str::(r.get()) { + Ok(Ok(o)) + } else if let Ok(e) = serde_json::from_str::(r.get()) { + Ok(Err(e)) + } else { + Err(error::almost_impossible(E_INVALID_RESPONSE))? + } + }) + .collect::>>()?; + + self.regular_pools + .batches + .take_tx(&r.first().ok_or(error::Jsonrpc::EmptyBatch)?.id()) + .send(Ok(r)) + .expect(E_RESPONSE_CHANNEL_CLOSED); + + return Ok(()); + }, + _ => (), } + + Err(error::almost_impossible(E_INVALID_RESPONSE))? } -} -impl Pools { - async fn on_message_ws(&mut self, message: Message, tx: &mut WsSender) -> bool { + async fn on_message(&mut self, message: MessageExt, tx: &mut WsSender) -> bool { #[cfg(feature = "trace")] - tracing::trace!("Message({message:?})"); + tracing::trace!("MessageExt({message:?})"); match message { #[cfg(feature = "debug")] - Message::Debug(_) => {}, - Message::Request(c) => - if !c.try_send_ws(tx, &mut self.requests).await { + MessageExt::Message(Message::Debug(_)) => {}, + MessageExt::Message(Message::Request(c)) => + if !c.try_send_ws(tx, &mut self.regular_pools.requests).await { return false; }, - Message::Batch(c) => - if !c.try_send_ws(tx, &mut self.batches).await { + MessageExt::Message(Message::Batch(c)) => + if !c.try_send_ws(tx, &mut self.regular_pools.batches).await { return false; }, - Message::Subscribe(s) => { - self.subscriptions.insert(s.id, s.tx); - }, - Message::Unsubscribe(s) => { - let _ = self.subscriptions.remove(&s); + MessageExt::Subscribe(c) => + if !c.try_send_ws(tx, &mut self.subscriptions).await { + return false; + }, + MessageExt::Unsubscribe((id, subscription_id)) => { + let _ = self.subscriptions.remove(&id); + let _ = self.notifications.remove(&subscription_id); }, } true } - async fn on_response_ws(&mut self, response: WsResult) -> Result<()> { + async fn on_response(&mut self, response: WsResult) -> Result<()> { match response { Ok(m) => match m { WsMessage::Binary(r) => { @@ -540,3 +637,53 @@ impl Pools { } } } + +#[derive(Debug)] +enum MessageExt { + Message(Message), + Subscribe(Call), + Unsubscribe((Id, SubscriptionId)), +} +struct Call { + id: Id, + request: String, + notification_tx: NotificationTx, + result_tx: ResultTx, + subscription_tx: SubscriptionIdTx, +} +impl Call { + async fn try_send_ws(self, tx: &mut WsSender, pool: &mut SubscriptionPool) -> bool { + if let Err(e) = tx.send(WsMessage::Text(self.request)).await { + try_send(self.result_tx, Err(error::Websocket::Tungstenite(e).into()), true) + } else if try_send(self.result_tx, Ok(()), true) { + pool.insert(self.id, (self.notification_tx, self.subscription_tx)); + + true + } else { + false + } + } +} +impl Debug for Call { + fn fmt(&self, f: &mut Formatter) -> FmtResult { + write!( + f, + "Call {{ id: {}, request: {}, notification_tx: {:?}, result_tx: {:?} }}", + self.id, self.request, self.notification_tx, self.result_tx + ) + } +} +impl CallOnce +where + T: Debug, +{ + async fn try_send_ws(self, tx: &mut WsSender, pool: &mut Pool>) -> bool { + if let Err(e) = tx.send(WsMessage::Text(self.request)).await { + try_send(self.tx, Err(error::Websocket::Tungstenite(e).into()), true) + } else { + pool.insert(self.id, self.tx); + + true + } + } +} diff --git a/src/jsonrpc/ws/initializer.rs b/src/jsonrpc/ws/initializer.rs index 63e1f4e..60113c3 100644 --- a/src/jsonrpc/ws/initializer.rs +++ b/src/jsonrpc/ws/initializer.rs @@ -21,7 +21,7 @@ pub struct Initializer<'a> { /// Send tick with this interval to keep the WS alive. pub interval: Duration, /// Request timeout. - pub request_timeout: Duration, + pub timeout: Duration, /// Future selector. pub future_selector: FutureSelector, } @@ -52,9 +52,9 @@ impl<'a> Initializer<'a> { self } - /// Set the [`request_timeout`](#structfield.request_timeout). - pub fn request_timeout(mut self, request_timeout: Duration) -> Self { - self.request_timeout = request_timeout; + /// Set the [`timeout`](#structfield.timeout). + pub fn timeout(mut self, timeout: Duration) -> Self { + self.timeout = timeout; self } @@ -68,20 +68,18 @@ impl<'a> Initializer<'a> { /// Initialize the connection. pub async fn initialize(self) -> Result { - let (messenger, reporter, closer) = self.connect().await?; + let (message_tx, error_rx, exit_tx) = self.connect().await?; Ok(Ws { - inner: Arc::new(WsInner { - messenger, - request_queue: RequestQueue::with_size(self.pool_size), - request_timeout: self.request_timeout, - reporter: Mutex::new(Ok(reporter)), - }), - closer: Some(closer), + message_tx, + request_queue: RequestQueue::with_size(self.pool_size), + timeout: self.timeout, + reporter: Reporter::new(error_rx), + exit_tx: Some(exit_tx), }) } - async fn connect(&self) -> Result<(MessageTx, ErrorRx, ExitTx)> { + async fn connect(&self) -> Result<(MessageExtTx, ErrorRx, ExitTx)> { let connect_inner = self.future_selector.connector(); let interval = self.interval; let (ws_tx, ws_rx) = tokio_tungstenite::connect_async(self.uri) @@ -106,7 +104,7 @@ impl<'a> Default for Initializer<'a> { uri: "ws://127.0.0.1:9944", pool_size: 1_024, interval: Duration::from_secs(10), - request_timeout: Duration::from_secs(30), + timeout: Duration::from_secs(30), future_selector: FutureSelector::default(), } } diff --git a/src/main.rs b/src/main.rs index 14db2fa..b40009e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -57,8 +57,8 @@ async fn main() -> Result<(), Box> { // ); // dbg!(apeye.version::().await?.unwrap_err()); - apeye - .subscribe::<_, ()>( + let mut subscriber = apeye + .subscribe::<_, Value>( ( "state_subscribeStorage", serde_json::json!([[ @@ -70,5 +70,9 @@ async fn main() -> Result<(), Box> { .await .unwrap(); + while let Some(notification) = subscriber.next().await? { + dbg!(notification); + } + Ok(()) } From 7f03f07f7a6279cbe41f3a4221274f0c3b449eef Mon Sep 17 00:00:00 2001 From: Xavier Lau Date: Fri, 5 May 2023 02:34:53 +0800 Subject: [PATCH 7/7] Test --- src/jsonrpc/ws.rs | 2 +- src/main.rs | 66 +++++++++++++++++++++++------------------------ 2 files changed, 34 insertions(+), 34 deletions(-) diff --git a/src/jsonrpc/ws.rs b/src/jsonrpc/ws.rs index 27c8e0e..290be94 100644 --- a/src/jsonrpc/ws.rs +++ b/src/jsonrpc/ws.rs @@ -434,7 +434,7 @@ impl Subscriber { .await .is_err() { - self.reporter.report().await; + self.reporter.report().await?; } if let Ok(r) = execute(rx, self.timeout).await? { r diff --git a/src/main.rs b/src/main.rs index b40009e..800bc22 100644 --- a/src/main.rs +++ b/src/main.rs @@ -23,39 +23,39 @@ async fn main() -> Result<(), Box> { >::initialize(WsInitializer::default().uri("wss://kusama-rpc.polkadot.io")) .await?; - // for h in apeye.get_block_hash::<_, Vec>(Some([0, 1, 2])).await?? { - // dbg!(apeye.get_block::(Some(&h)).await??); - // dbg!(apeye.get_header::(Some(&h)).await??); - // } + for h in apeye.get_block_hash::<_, Vec>(Some([0, 1, 2])).await?? { + dbg!(apeye.get_block::(Some(&h)).await??); + dbg!(apeye.get_header::(Some(&h)).await??); + } - // dbg!(apeye.get_finalized_head::().await??); - // dbg!(apeye.query::(&apeye.query_of::<()>("System", "Number")?.construct()?).await??); - // dbg!( - // apeye - // .query::( - // &apeye - // .query_of("Staking", "ErasValidatorPrefs")? - // .keys(Keys::Raw(&( - // 5_044_u32, - // ::AccountId::try_from_hex( - // "0x305b1689cfee594c19a642a2fcd554074c93d62181c0d4117ebe196bd7c62b79" - // ) - // .unwrap() - // ))) - // .construct()? - // ) - // .await?? - // ); - // dbg!( - // apeye - // .batch::<_, Value>(vec![ - // chain::get_block_hash_raw(>::None), - // chain::get_finalized_head_raw(), - // net::version_raw(), - // ]) - // .await? - // ); - // dbg!(apeye.version::().await?.unwrap_err()); + dbg!(apeye.get_finalized_head::().await??); + dbg!(apeye.query::(&apeye.query_of::<()>("System", "Number")?.construct()?).await??); + dbg!( + apeye + .query::( + &apeye + .query_of("Staking", "ErasValidatorPrefs")? + .keys(Keys::Raw(&( + 5_044_u32, + ::AccountId::try_from_hex( + "0x305b1689cfee594c19a642a2fcd554074c93d62181c0d4117ebe196bd7c62b79" + ) + .unwrap() + ))) + .construct()? + ) + .await?? + ); + dbg!( + apeye + .batch::<_, Value>(vec![ + chain::get_block_hash_raw(>::None), + chain::get_finalized_head_raw(), + net::version_raw(), + ]) + .await? + ); + dbg!(apeye.version::().await?.unwrap_err()); let mut subscriber = apeye .subscribe::<_, Value>( @@ -71,7 +71,7 @@ async fn main() -> Result<(), Box> { .unwrap(); while let Some(notification) = subscriber.next().await? { - dbg!(notification); + dbg!(notification?); } Ok(())