Repository: nomic-io/nomic Branch: develop Commit: 3dccaf5d6349 Files: 144 Total size: 62.3 MB Directory structure: gitextract_c0qz_ec1/ ├── .github/ │ └── workflows/ │ └── ci.yml ├── .gitignore ├── Cargo.toml ├── LICENSE ├── README.md ├── SECURITY.md ├── build.rs ├── build.sh ├── genesis/ │ ├── stakenet-2.json │ ├── stakenet-3.json │ ├── testnet-4.json │ └── testnet-4d.json ├── networks/ │ ├── stakenet.toml │ └── testnet.toml ├── rest/ │ ├── Cargo.toml │ └── src/ │ └── main.rs ├── rust-toolchain ├── rustfmt.toml ├── src/ │ ├── airdrop.rs │ ├── app/ │ │ └── migrations.rs │ ├── app.rs │ ├── babylon/ │ │ ├── mod.rs │ │ ├── proto/ │ │ │ ├── babylon/ │ │ │ │ ├── btccheckpoint/ │ │ │ │ │ └── v1/ │ │ │ │ │ ├── btccheckpoint.proto │ │ │ │ │ ├── genesis.proto │ │ │ │ │ ├── params.proto │ │ │ │ │ ├── query.proto │ │ │ │ │ └── tx.proto │ │ │ │ ├── btclightclient/ │ │ │ │ │ └── v1/ │ │ │ │ │ ├── btclightclient.proto │ │ │ │ │ ├── event.proto │ │ │ │ │ ├── genesis.proto │ │ │ │ │ ├── params.proto │ │ │ │ │ ├── query.proto │ │ │ │ │ └── tx.proto │ │ │ │ ├── btcstaking/ │ │ │ │ │ └── v1/ │ │ │ │ │ ├── btcstaking.proto │ │ │ │ │ ├── events.proto │ │ │ │ │ ├── genesis.proto │ │ │ │ │ ├── incentive.proto │ │ │ │ │ ├── params.proto │ │ │ │ │ ├── pop.proto │ │ │ │ │ ├── query.proto │ │ │ │ │ └── tx.proto │ │ │ │ ├── checkpointing/ │ │ │ │ │ └── v1/ │ │ │ │ │ ├── bls_key.proto │ │ │ │ │ ├── checkpoint.proto │ │ │ │ │ ├── events.proto │ │ │ │ │ ├── genesis.proto │ │ │ │ │ ├── query.proto │ │ │ │ │ └── tx.proto │ │ │ │ ├── epoching/ │ │ │ │ │ └── v1/ │ │ │ │ │ ├── epoching.proto │ │ │ │ │ ├── events.proto │ │ │ │ │ ├── genesis.proto │ │ │ │ │ ├── params.proto │ │ │ │ │ ├── query.proto │ │ │ │ │ └── tx.proto │ │ │ │ ├── finality/ │ │ │ │ │ └── v1/ │ │ │ │ │ ├── events.proto │ │ │ │ │ ├── finality.proto │ │ │ │ │ ├── genesis.proto │ │ │ │ │ ├── params.proto │ │ │ │ │ ├── query.proto │ │ │ │ │ └── tx.proto │ │ │ │ ├── incentive/ │ │ │ │ │ ├── genesis.proto │ │ │ │ │ ├── incentive.proto │ │ │ │ │ ├── params.proto │ │ │ │ │ ├── query.proto │ │ │ │ │ └── tx.proto │ │ │ │ ├── monitor/ │ │ │ │ │ └── v1/ │ │ │ │ │ ├── genesis.proto │ │ │ │ │ └── query.proto │ │ │ │ └── zoneconcierge/ │ │ │ │ └── v1/ │ │ │ │ ├── genesis.proto │ │ │ │ ├── packet.proto │ │ │ │ ├── params.proto │ │ │ │ ├── query.proto │ │ │ │ ├── tx.proto │ │ │ │ └── zoneconcierge.proto │ │ │ ├── btccheckpoint.rs │ │ │ ├── buf.gen.yaml │ │ │ ├── buf.yaml │ │ │ ├── gen/ │ │ │ │ ├── babylon.btccheckpoint.v1.rs │ │ │ │ ├── babylon.btclightclient.v1.rs │ │ │ │ ├── babylon.btcstaking.v1.rs │ │ │ │ ├── babylon.checkpointing.v1.rs │ │ │ │ ├── babylon.epoching.v1.rs │ │ │ │ ├── babylon.finality.v1.rs │ │ │ │ ├── babylon.incentive.rs │ │ │ │ ├── babylon.monitor.v1.rs │ │ │ │ └── babylon.zoneconcierge.v1.rs │ │ │ └── mod.rs │ │ └── relayer.rs │ ├── bin/ │ │ ├── create-checkpoint.rs │ │ ├── eth-bootstrap.rs │ │ ├── get-reserve-scripts.rs │ │ └── nomic.rs │ ├── bitcoin/ │ │ ├── adapter.rs │ │ ├── checkpoint.json │ │ ├── checkpoint.rs │ │ ├── deposit_index.rs │ │ ├── header_queue.rs │ │ ├── mod.rs │ │ ├── outpoint_set.rs │ │ ├── recovery.rs │ │ ├── relayer.rs │ │ ├── signatory.rs │ │ ├── signer.rs │ │ ├── signet_checkpoint.json │ │ ├── testnet_checkpoint.json │ │ └── threshold_sig.rs │ ├── cosmos.rs │ ├── error.rs │ ├── ethereum/ │ │ ├── bootstrap/ │ │ │ └── sepolia.json │ │ ├── consensus/ │ │ │ ├── mod.rs │ │ │ ├── relayer.rs │ │ │ └── test_fixtures.json │ │ ├── contracts/ │ │ │ ├── Babylon.json │ │ │ ├── Babylon.sol │ │ │ ├── CosmosERC20.json │ │ │ ├── CosmosToken.sol │ │ │ ├── Nomic.json │ │ │ └── Nomic.sol │ │ ├── mod.rs │ │ ├── proofs.rs │ │ ├── relayer.rs │ │ └── signer.rs │ ├── frost/ │ │ ├── dkg.rs │ │ ├── encoding.rs │ │ ├── mod.rs │ │ ├── signer.rs │ │ └── signing.rs │ ├── incentives.rs │ ├── lib.rs │ ├── network.rs │ └── utils.rs ├── stakenet_reserve_scripts.csv ├── testnet_addresses.csv ├── tests/ │ ├── bitcoin.rs │ ├── data/ │ │ └── block-data │ ├── header_queue.rs │ ├── ibc.rs │ ├── node.rs │ ├── node_spawn.rs │ └── relayer.rs └── wasm/ ├── Cargo.toml ├── index.html └── src/ ├── error.rs ├── lib.rs ├── types.rs └── web_client.rs ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/workflows/ci.yml ================================================ name: CI on: push: branches: [master, develop] pull_request: branches: [master, develop] concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true env: CARGO_TERM_COLOR: always NOMIC_CLEANUP_LEGACY_BUILD: 1 jobs: test-base: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 - name: Use Nightly uses: actions-rs/toolchain@v1 with: toolchain: nightly-2024-07-21 override: true - name: Cache uses: actions/cache@v4 with: path: | ~/.cargo ./target key: ${{ runner.os }}-test-base-${{ hashFiles('Cargo.lock') }} restore-keys: | ${{ runner.os }}-test-base- - name: Test uses: actions-rs/cargo@v1 with: command: test args: --verbose --no-default-features --features=full test-testnet: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 - name: Use Nightly uses: actions-rs/toolchain@v1 with: toolchain: nightly-2024-07-21 override: true - name: Cache uses: actions/cache@v4 with: path: | ~/.cargo ./target key: ${{ runner.os }}-test-testnet-${{ hashFiles('Cargo.lock') }} restore-keys: | ${{ runner.os }}-test-testnet- - name: Test uses: actions-rs/cargo@v1 with: command: test args: --verbose --no-default-features --features=full,testnet test-bitcoin: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 - name: Use Nightly uses: actions-rs/toolchain@v1 with: toolchain: nightly-2024-07-21 override: true - name: Cache uses: actions/cache@v4 with: path: | ~/.cargo ./target key: ${{ runner.os }}-test-testnet-${{ hashFiles('Cargo.lock') }} restore-keys: | ${{ runner.os }}-test-testnet- - name: Test env: RUST_LOG: info uses: actions-rs/cargo@v1 with: command: test args: --verbose --features=devnet bitcoin -- --ignored check-rest: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 - name: Use Nightly uses: actions-rs/toolchain@v1 with: toolchain: nightly-2024-07-21 override: true - name: Check Rest uses: actions-rs/cargo@v1 with: command: check args: --manifest-path rest/Cargo.toml --verbose build-wasm: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 - name: Use Nightly uses: actions-rs/toolchain@v1 with: toolchain: nightly-2024-07-21 override: true - name: Install run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh - name: Build Wasm working-directory: ./wasm run: wasm-pack -v build --target web coverage: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 - name: Use Nightly uses: actions-rs/toolchain@v1 with: toolchain: nightly-2024-07-21 components: llvm-tools-preview override: true - name: Cache id: cache uses: actions/cache@v4 with: path: | ~/.cargo ./target key: ${{ runner.os }}-coverage-${{ hashFiles('Cargo.lock') }} restore-keys: | ${{ runner.os }}-coverage- - if: ${{ steps.cache.outputs.cache-hit != 'true' }} name: Install Coverage Tooling uses: actions-rs/cargo@v1 with: command: install args: cargo-llvm-cov --force - name: Run Coverage uses: actions-rs/cargo@v1 with: command: llvm-cov args: --no-cfg-coverage-nightly --workspace --lcov --output-path lcov.info - name: Upload to codecov.io uses: codecov/codecov-action@v1 with: token: ${{ secrets.CODECOV_TOKEN }} files: lcov.info fail_ci_if_error: true format: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 - name: Use Nightly uses: actions-rs/toolchain@v1 with: toolchain: nightly-2024-07-21 components: rustfmt override: true - name: Check uses: actions-rs/cargo@v1 with: command: fmt args: --all -- --check clippy: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 - name: Use Nightly uses: actions-rs/toolchain@v1 with: toolchain: nightly-2024-07-21 components: clippy override: true - name: Cache uses: actions/cache@v4 with: path: | ~/.cargo ./target key: ${{ runner.os }}-clippy-${{ hashFiles('Cargo.lock') }} restore-keys: | ${{ runner.os }}-clippy- - name: Check uses: actions-rs/clippy-check@v1 with: token: ${{ secrets.GITHUB_TOKEN }} args: -- -D warnings ================================================ FILE: .gitignore ================================================ target **/target/ merk.db/ .vscode/ .DS_Store # Added by cargo /target wasm/Cargo.lock ================================================ FILE: Cargo.toml ================================================ [package] name = "nomic" version = "9.2.0" authors = ["Nomic DAO Foundation "] edition = "2021" default-run = "nomic" [dependencies] bitcoin = { version = "0.29.2", features = ["serde", "rand"] } orga = { git = "https://github.com/nomic-io/orga.git", rev = "3b3d25ade40d81cb64f19335535e3a47bb47778f", features = [ "merk-verify", "feat-ibc", ] } thiserror = "1.0.30" ed = { git = "https://github.com/nomic-io/ed", rev = "a657be856792039ff60c2f67e7920e38cd3acffc" } clap = { version = "3.2.16", features = ["derive"], optional = true } tokio = { version = "1.39.2", features = ["full"], optional = true } base64 = "0.13.0" js-sys = { version = "0.3.55" } serde = "1.0.208" serde_json = "1.0.125" csv = { version = "1.1.6", optional = true } bech32 = { version = "0.9.1" } futures = "0.3.21" toml_edit = "0.13.4" tendermint-rpc = { version = "0.38.0", features = [ "http-client", ], optional = true } bitcoincore-rpc-async = { package = "bitcoincore-rpc-async2", version = "4.0.2", optional = true } bitcoin-script = "0.1.1" warp = { version = "0.3.2", optional = true } derive_more = "0.99.17" pretty_env_logger = { git = "https://github.com/seanmonstar/pretty-env-logger", rev = "f9e35b6dbbf06de55222c944c9e1e176ce73b3a7" } reqwest = { version = "0.11.16", optional = true, features = ["json"] } rand = { version = "0.8.5", optional = true } sha2 = "0.10.6" bytes = "1.2.1" serde-big-array = "0.4.1" log = "0.4.17" hex = "0.4.3" toml = { version = "0.7.2", features = ["parse"] } split-iter = "0.1.0" chrono = "0.4.19" tempfile = "3" home = { version = "0.5.5", optional = true } semver = "1.0.18" ics23 = "0.12.0" cosmos-sdk-proto = { version = "0.23.0", optional = true } prometheus_exporter = "0.8.5" lazy_static = "1.4.0" prost = "0.13.3" cosmrs = "0.14.0" ripemd = "0.1.3" frost-secp256k1-tr = { git = "https://github.com/ZcashFoundation/frost", rev = "51fa7d09f3742563a35d065afcff6ad486430dac", features = [ "nightly", ], optional = true } serde-hex = "0.1.0" alloy-core = { version = "0.8.5", optional = true } alloy-sol-types = { version = "0.8.5", optional = true } alloy-contract = { version = "0.3.6", optional = true } alloy-provider = { version = "0.3.6", optional = true } alloy-signer-local = { version = "0.3.6", optional = true } helios-consensus-core = { package = "consensus-core", git = "https://github.com/a16z/helios.git", rev = "0.7.0", optional = true } ethereum_ssz = { version = "0.6.0", optional = true } ssz_types = { version = "0.7.0", optional = true } alloy-trie = { version = "0.6.0", optional = true } alloy-primitives = { version = "0.8.5", optional = true } alloy-rlp = { version = "0.3.8", optional = true } tree_hash = { version = "0.7.0", optional = true } rlp = { version = "0.5.1", optional = true } ethereum-triedb = { version = "0.1.1", optional = true } trie-db = { version = "0.28", optional = true } rlp-derive = { version = "0.2.0", optional = true } primitive-types = { version = "0.12", features = ["rlp"], optional = true } hex-literal = { version = "0.4.1", optional = true } alloy-rpc-types = { version = "0.3.6", optional = true } alloy-transport = { version = "0.3.6", optional = true } alloy-rpc-types-eth = { version = "0.3.6", optional = true } ruint = { version = "1.12.3", optional = true } [dev-dependencies] bitcoind = { version = "0.27.0", features = ["22_0"] } bitcoin_hashes = "0.11.0" serde_json = "1.0.68" serde = "1.0.130" mutagen = "0.1.2" curl = "0.4.44" urlencoding = "2.1.2" crossbeam-channel = "0.5.8" chrono = "0.4.19" serial_test = "2.0.0" tempfile = "3.12.0" alloy-node-bindings = "0.3.6" [build-dependencies] toml = { version = "0.7.2", features = ["parse"] } semver = "1.0.18" glob = "0.3.1" [features] default = ["full", "testnet"] full = [ "bitcoincore-rpc-async", "clap", "tokio", "orga/merk-full", "orga/abci", "orga/state-sync", "csv", "warp", "rand", "reqwest", "tendermint-rpc", "cosmos-sdk-proto", "home", ] testnet = [] devnet = [] legacy-bin = [] signet = [] ethereum = [ "alloy-core", "alloy-core/dyn-abi", "alloy-core/json-abi", "alloy-sol-types", "alloy-primitives", "alloy-trie", "ethereum-triedb", "alloy-rlp", "primitive-types", "ethereum_ssz", "ssz_types", "tree_hash", "rlp", "rlp-derive", "trie-db", "ruint", "helios-consensus-core", "alloy-rpc-types-eth", ] ethereum-full = [ "ethereum", "alloy-contract", "alloy-provider", "alloy-rpc-types", "alloy-transport", "alloy-provider/anvil-node", "alloy-provider/rpc-api", "alloy-signer-local", ] frost = ["frost-secp256k1-tr"] babylon = ["frost"] [profile.release] overflow-checks = true [lints.rust] unexpected_cfgs = { level = "warn", check-cfg = ['cfg(fuzzing)'] } [[bin]] name = "nomic" [[bin]] name = "create-checkpoint" [[bin]] name = "eth-bootstrap" required-features = ["ethereum-full"] [[test]] name = "bitcoin" required-features = ["devnet"] ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: README.md ================================================

Nomic

Decentralized Custody Engine for Bitcoin

![CI](https://github.com/nomic-io/nomic/actions/workflows/ci.yml/badge.svg) Nomic is a blockchain that offers a decentralized custody solution for Bitcoin. Built on Turbofish’s [Orga](https://github.com/turbofish-org/orga), a custom high-performance blockchain application framework. Nomic mints nBTC, a token backed 1:1 with BTC, using [IBC](https://www.ibcprotocol.dev/) for secure and efficient bridging. ## Running a Node Running a node increases the health of the network by decentralizing ledger validation and data, even for non-validator nodes. Community members are encouraged to run a node, especially when regularly interacting with the network via transactions and queries. [Nomic Network Docs](https://docs.nomic.io/) ## Integrating with Nomic Integrating with nBTC enables accepting Bitcoin deposits with Interchain Deposits to any IBC-enabled blockchain. [nBTC Docs](https://github.com/nomic-io/nomic-bitcoin-js/blob/main/README.md) ## Contributing Nomic is an open-source project spearheaded by contributors. Anyone is able to contribute to Nomic via GitHub. [Contribute to Nomic](https://github.com/nomic-io/nomic/contribute) ## Security Nomic is currently undergoing security audits. Vulnerabilities should not be reported through public channels, including GitHub Issues. You can report a vulnerability via GitHub's Private Vulnerability Reporting or via the Nomic DAO Foundation at `foundation@nomic.io`. [Report a Vulnerability](https://github.com/nomic-io/nomic/security/advisories/new) ## License Licensed under the Apache License, Version 2.0 (the "License"); you may not use the files in this repository except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --- Copyright © 2024 Nomic DAO Foundation. ================================================ FILE: SECURITY.md ================================================ # Security Policy ## Introduction Security researchers are essential in identifying vulnerabilities that may impact the Nomic ecosystem. If you have discovered a security vulnerability in the Nomic network or any repository managed by the Nomic DAO Foundation, we encourage you to notify using one of the methods outlined below. ### Guidelines for Responsible Vulnerability Testing and Reporting 1. **Refrain from testing vulnerabilities on publicly accessible environments**, including but not limited to: - Nomic mainnet. - Nomic frontend. - Nomic public testnets. - Nomic testnet frontend. 2. **Avoid reporting security vulnerabilities through public channels, including GitHub issues.** ## Security Audits | Date | Auditor | Scope | Report | | ---: | :---: | :--- | :---: | | October 2024 | Trail of Bits | `nomic`
`nomic-bitcoin-js` | [📄](https://github.com/trailofbits/publications/blob/master/reviews/2024-11-nomic-securityreview.pdf) | ### Dependency Security Audits | Date | Auditor | Scope | Report | | ---: | :---: | :--- | :---: | | October 2024 | Trail of Bits | `orga` `merk` `ed` `abci2` | [📄](https://github.com/trailofbits/publications/blob/master/reviews/2024-11-orgaandmerk-securityreview.pdf) | ## Reporting a Vulnerability To privately report a security vulnerability, please choose one of the following options: ### 1. Email Send your detailed vulnerability report to `security@nomic.io`. ### 2. GitHub Private Vulnerability Reporting Utilize [GitHub's Private Vulnerability Reporting](https://github.com/nomic-io/nomic/security/advisories/new) for confidential disclosure. ## Submit Vulnerability Report When reporting a vulnerability through either method, please include the following details to aid in assessment: - Type of vulnerability. - Description of the vulnerability. - Steps to reproduce the issue. - Impact of the issue. - Explanation of how an attacker could exploit it. ## Vulnerability Disclosure Process 1. **Initial Report**: Submit the vulnerability via one of the above channels. 2. **Confirmation**: We will confirm receipt of your report within 48 hours. 3. **Assessment**: Our security team will evaluate the vulnerability and inform you of its severity and the estimated time frame for resolution. 4. **Resolution**: Once fixed, you will be contacted to verify the solution. 5. **Public Disclosure**: Details of the vulnerability may be publicly disclosed after ensuring it poses no further risk. During the vulnerability disclosure process, we ask security researchers to keep vulnerabilities and communications around vulnerability submissions private and confidential until a patch is developed. Should a security issue require a network upgrade, additional time may be needed to raise a governance proposal and complete the upgrade. During this time: - Avoid exploiting any vulnerabilities you discover. - Demonstrate good faith by not disrupting or degrading Nomic's services. ## Severity Characterization | Severity | Description | |--------------|--------------------------------------------------------------------------| | **CRITICAL** | Immediate threat to critical systems (e.g., chain halts, funds at risk). | | **HIGH** | Significant impact on major functionality. | | **MEDIUM** | Impacts minor features or exposes non-sensitive data. | | **LOW** | Minimal impact. | ## Bug Bounty Though Nomic does not yet have an official bug bounty program, the Nomic DAO Foundation generally offers rewards to security researchers who responsibly disclose vulnerabilities. Bounties are generally awarded for vulnerabilities classified as **high** or **critical** severity. Bounty amounts will be determined during the disclosure process, after the severity has been assessed. > [!WARNING] > Targeting production environments will disqualify you from receiving any bounty. ## Feedback on this Policy For recommendations on how to improve this policy, either submit a pull request or email `security@nomic.io`. ================================================ FILE: build.rs ================================================ fn main() { let branch_name = std::process::Command::new("git") .args(["symbolic-ref", "--short", "HEAD"]) .output() .unwrap(); let branch_name = String::from_utf8(branch_name.stdout) .unwrap() .trim() .to_string(); println!("cargo:rustc-env=GIT_BRANCH={}", branch_name); #[cfg(feature = "legacy-bin")] { println!("cargo:rerun-if-changed=build.sh"); println!("cargo:rerun-if-env-changed=NOMIC_LEGACY_VERSION"); println!("cargo:rerun-if-env-changed=NOMIC_LEGACY_REV"); let version = std::env::var("NOMIC_LEGACY_VERSION"); let rev = std::env::var("NOMIC_LEGACY_REV"); let rev = if let Ok(rev) = rev { rev } else { let mut version_req_str = if let Ok(version_req_str) = version { version_req_str } else { #[cfg(feature = "testnet")] let toml = { println!("cargo:rerun-if-changed=networks/testnet.toml"); include_str!("networks/testnet.toml") }; #[cfg(not(feature = "testnet"))] let toml = { println!("cargo:rerun-if-changed=networks/stakenet.toml"); include_str!("networks/stakenet.toml") }; let config: toml::Value = toml::from_str(toml).unwrap(); if let Some(legacy_version) = config.as_table().unwrap().get("legacy_version") { legacy_version.as_str().unwrap().to_string() } else { println!("No legacy_version set in network config"); println!("cargo:rustc-env=NOMIC_LEGACY_BUILD_PATH=/dev/null"); println!("cargo:rustc-env=NOMIC_LEGACY_BUILD_VERSION="); return; } }; if version_req_str.chars().next().unwrap().is_numeric() { version_req_str = format!("={}", version_req_str); } let version_req = semver::VersionReq::parse(&version_req_str).unwrap(); assert!(std::process::Command::new("git") .args(["fetch", "--tags", "--force"]) .spawn() .unwrap() .wait_with_output() .unwrap() .status .success()); let version = std::process::Command::new("git") .args(["tag"]) .output() .unwrap() .stdout .split(|&b| b == b'\n') .map(|b| String::from_utf8(b.to_vec()).unwrap()) .filter(|s| s.starts_with('v')) .filter_map(|s| semver::Version::parse(&s[1..]).ok()) .filter(|v| version_req.matches(v)) .max() .unwrap(); println!( "Highest matching git tag for version requirement '{}': v{}", version_req_str, version, ); format!("v{}", version) }; println!("Using rev: {}", rev); let shell = std::env::var("SHELL").unwrap_or("/bin/bash".to_string()); println!("Using shell: {}", shell); #[cfg(feature = "testnet")] let default_features = "full,feat-ibc,testnet"; #[cfg(not(feature = "testnet"))] let default_features = "full,feat-ibc"; let cargo_features = std::env::var("NOMIC_LEGACY_FEATURES").unwrap_or(default_features.to_string()); let forwarded_envvars = [ "OUT_DIR", "PATH", "NOMIC_CLEANUP_LEGACY_BUILD", "ROCKSDB_LIB_DIR", "ROCKSDB_STATIC", ]; let mut cmd = std::process::Command::new(shell); cmd.env_clear(); for var in forwarded_envvars { if let Ok(val) = std::env::var(var) { cmd.env(var, val); } } let res = cmd .env("NOMIC_LEGACY_REV", rev) .env("CARGO_FEATURES", cargo_features) .args(["build.sh"]) .spawn() .unwrap() .wait_with_output() .unwrap(); assert!(res.status.success()); } } ================================================ FILE: build.sh ================================================ #!/bin/bash set -e BUILD_DIR=$OUT_DIR/nomic NOMIC_LEGACY_PATH=$OUT_DIR/nomic-$NOMIC_LEGACY_REV if [ ! -f "$NOMIC_LEGACY_PATH" ]; then echo "Building legacy nomic at $NOMIC_LEGACY_PATH..." if [ ! -d "$BUILD_DIR" ]; then git clone https://github.com/nomic-io/nomic.git $BUILD_DIR fi cd $BUILD_DIR git checkout . git checkout main git pull git checkout $NOMIC_LEGACY_REV git fetch rustc --version echo "Building with features: $CARGO_FEATURES" cargo build --release --no-default-features --features $CARGO_FEATURES cp $BUILD_DIR/target/release/nomic $NOMIC_LEGACY_PATH else echo "Skipping legacy nomic binary build (already exists at $NOMIC_LEGACY_PATH)" fi if [[ ! -z "${NOMIC_CLEANUP_LEGACY_BUILD}" ]]; then rm -rf $BUILD_DIR fi echo "cargo:rustc-env=NOMIC_LEGACY_BUILD_PATH=$NOMIC_LEGACY_PATH" echo "cargo:rustc-env=NOMIC_LEGACY_BUILD_VERSION=$($NOMIC_LEGACY_PATH --version)" ================================================ FILE: genesis/stakenet-2.json ================================================ { "genesis_time": "2022-03-31T16:00:00Z", "chain_id": "nomic-stakenet-2", "initial_height": "0", "consensus_params": { "block": { "max_bytes": "22020096", "max_gas": "-1", "time_iota_ms": "1000" }, "evidence": { "max_age_num_blocks": "100000", "max_age_duration": "172800000000000", "max_bytes": "1048576" }, "validator": { "pub_key_types": ["ed25519"] }, "version": {} }, "validators": [], "app_hash": "" } ================================================ FILE: genesis/stakenet-3.json ================================================ { "genesis_time": "2022-07-04T00:00:00Z", "chain_id": "nomic-stakenet-3", "initial_height": "0", "consensus_params": { "block": { "max_bytes": "22020096", "max_gas": "-1", "time_iota_ms": "1000" }, "evidence": { "max_age_num_blocks": "100000", "max_age_duration": "172800000000000", "max_bytes": "1048576" }, "validator": { "pub_key_types": ["ed25519"] }, "version": {} }, "validators": [], "app_hash": "" } ================================================ FILE: genesis/testnet-4.json ================================================ { "genesis_time": "2022-06-22T00:00:00Z", "chain_id": "nomic-testnet-4", "initial_height": "1", "consensus_params": { "block": { "max_bytes": "22020096", "max_gas": "-1", "time_iota_ms": "1000" }, "evidence": { "max_age_num_blocks": "100000", "max_age_duration": "172800000000000", "max_bytes": "1048576" }, "validator": { "pub_key_types": [ "ed25519" ] }, "version": {} }, "app_hash": "" } ================================================ FILE: genesis/testnet-4d.json ================================================ { "genesis_time": "2022-10-05T00:00:00Z", "chain_id": "nomic-testnet-4d", "initial_height": "0", "consensus_params": { "block": { "max_bytes": "22020096", "max_gas": "-1", "time_iota_ms": "1000" }, "evidence": { "max_age_num_blocks": "100000", "max_age_duration": "172800000000000", "max_bytes": "1048576" }, "validator": { "pub_key_types": ["ed25519"] }, "version": {} }, "validators": [], "app_hash": "" } ================================================ FILE: networks/stakenet.toml ================================================ state_sync_rpc = [ "http://161.35.51.124:26667", "http://161.35.51.124:26667" ] tendermint_flags = [ "--p2p.seeds", """ 238120dfe716082754048057c1fdc3d6f09609b5@161.35.51.124:26656 """, ] btc_relayer = [ "https://relayer.nomic.mappum.io:8443" ] legacy_version = "8.0.x" genesis = """ { "genesis_time": "2022-07-04T00:00:00Z", "chain_id": "nomic-stakenet-3", "initial_height": "0", "consensus_params": { "block": { "max_bytes": "22020096", "max_gas": "-1", "time_iota_ms": "1000" }, "evidence": { "max_age_num_blocks": "100000", "max_age_duration": "172800000000000", "max_bytes": "1048576" }, "validator": { "pub_key_types": ["ed25519"] }, "version": {} }, "validators": [], "app_hash": "" } """ ================================================ FILE: networks/testnet.toml ================================================ state_sync_rpc = [ "http://147.182.171.216:26657", "http://147.182.171.216:26657", ] tendermint_flags = ["--p2p.seeds", """ a07d56aa65e395c332a7bf226ec4e2f844519ffa@147.182.171.216:26656,\ """] btc_relayer = ["https://relayer.nomic-testnet.mappum.io:8443"] legacy_version = "9.1.x" genesis = """ { "app_hash": "", "chain_id": "nomic-testnet-6", "consensus_params": { "block": { "max_bytes": "22020096", "max_gas": "-1", "time_iota_ms": "1000" }, "evidence": { "max_age_duration": "172800000000000", "max_age_num_blocks": "100000", "max_bytes": "1048576" }, "validator": { "pub_key_types": [ "ed25519" ] }, "version": {} }, "genesis_time": "2024-09-12T01:27:04.17850332Z", "initial_height": "0", "validators": [ { "address": "563EAA34B8A607C49F0F89008A9542CD0F06D91C", "name": "", "power": "10", "pub_key": { "type": "tendermint/PubKeyEd25519", "value": "H/uHaF1ZUTHzLRSmOY8g87Sgpmuh/Hz6Wdxpn09WFjo=" } } ] } """ ================================================ FILE: rest/Cargo.toml ================================================ [package] name = "nomic-rest" version = "0.1.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] rocket = { version = "0.5.0-rc.1", features = ["json", "tls"] } nomic = { path = "..", default-features = true, features = [ "full", "testnet", ] } hex = "0.4.3" tendermint-rpc = { version = "=0.23.7", features = ["http-client"] } tendermint = "=0.23.7" tendermint-proto = "=0.23.9" ibc = { version = "0.54.0", features = ["borsh"] } ibc-proto = { version = "0.47.0", default-features = false, features = [ "std", "borsh", "serde", ] } base64 = "0.13.0" serde = "1.0.136" serde_json = "1.0.78" lazy_static = "1.4.0" tokio = "1.19.2" chrono = { version = "0.4.31", features = ["serde"] } sha2 = "0.10.6" bech32 = { version = "0.9.1" } ================================================ FILE: rest/src/main.rs ================================================ #[macro_use] extern crate rocket; use chrono::{TimeZone, Utc}; use nomic::{ app::{InnerApp, Nom}, bitcoin::Nbtc, orga::{ client::{wallet::Unsigned, AppClient}, coins::{Address, Amount, Decimal, DelegationInfo, Symbol, ValidatorQueryInfo}, encoding::EofTerminatedString, tendermint::client::HttpClient, }, utils::DeclareInfo, }; use rocket::response::status::BadRequest; use rocket::serde::json::{json, Value}; use std::collections::HashMap; use std::str::FromStr; use std::sync::Arc; use tokio::sync::RwLock; use ibc::clients::tendermint::types::ClientState; use ibc::core::host::types::identifiers::ConnectionId as IbcConnectionId; use ibc_proto::google::protobuf::Any; use ibc_proto::ibc::core::client::v1::IdentifiedClientState; use ibc_proto::ibc::core::connection::v1::ConnectionEnd as RawConnectionEnd; use ibc_proto::ibc::lightclients::tendermint::v1::ClientState as RawTmClientState; use bech32::ToBase32; use sha2::Digest; use tendermint_proto::types::CommitSig as RawCommitSig; use tendermint_rpc as tm; use tm::Client as _; lazy_static::lazy_static! { static ref QUERY_CACHE: Arc>> = Arc::new(RwLock::new(HashMap::new())); } fn app_host() -> &'static str { "http://localhost:26657" } fn app_client() -> AppClient { nomic::app_client(app_host()) } // DONE /cosmos/bank/v1beta1/balances/{address} // DONE /cosmos/distribution/v1beta1/delegators/{address}/rewards // TODO /cosmos/staking/v1beta1/delegations/{address} // DONE /cosmos/staking/v1beta1/validators // DONE /cosmos/staking/v1beta1/delegators/{address}/unbonding_delegations // /cosmos/staking/v1beta1/validators/{address} // /cosmos/gov/v1beta1/proposals // /cosmos/gov/v1beta1/proposals/{proposalId} // /cosmos/gov/v1beta1/proposals/{proposalId}/votes/{address} // /cosmos/gov/v1beta1/proposals/{proposalId}/tally // /ibc/apps/transfer/v1/denom_traces/{hash} // /ibc/core/channel/v1/channels/{channelId}/ports/{portId}/client_state #[get("/cosmos/staking/v1beta1/validators?")] async fn validators(status: Option) -> Value { let all_validators: Vec = app_client() .query(|app: InnerApp| app.staking.all_validators()) .await .unwrap(); let all_keys: Vec<_> = app_client() .query(|app: InnerApp| app.staking.consensus_keys()) .await .unwrap(); let mut validators = vec![]; for validator in all_validators { let validator_status = if validator.unbonding { "BOND_STATUS_UNBONDING" } else if validator.in_active_set { "BOND_STATUS_BONDED" } else { "BOND_STATUS_UNBONDED" }; if !status.is_none() && status != Some(validator_status.to_owned()) { continue; } let cons_key = all_keys .iter() .find(|entry| (**entry).0 == validator.address.into()) .map(|entry| (*entry).1) .unwrap(); let info: DeclareInfo = serde_json::from_str(String::from_utf8(validator.info.to_vec()).unwrap().as_str()) .unwrap_or(DeclareInfo { details: "".to_string(), identity: "".to_string(), moniker: "".to_string(), website: "".to_string(), }); validators.push(json!( { "operator_address": validator.address.to_string(), "consensus_pubkey": { "@type": "/cosmos.crypto.ed25519.PubKey", "key": base64::encode(cons_key) }, "jailed": validator.jailed, "status": validator_status, "tokens": validator.amount_staked.to_string(), "delegator_shares": validator.amount_staked.to_string(), "description": { "moniker": info.moniker, "identity": info.identity, "website": info.website, "security_contact": "", "details": info.details }, "unbonding_height": "0", // TODO "unbonding_time": "1970-01-01T00:00:00Z", // TODO "commission": { "commission_rates": { "rate": validator.commission.rate, "max_rate": validator.commission.max, "max_change_rate": validator.commission.max_change }, "update_time": "2023-08-04T06:00:00.000000000Z" // TODO }, "min_self_delegation": validator.min_self_delegation.to_string() })); } json!({ "validators": validators, "pagination": { "next_key": null, "total": validators.len().to_string() } }) } #[get("/cosmos/staking/v1beta1/validators/
")] async fn validator(address: &str) -> Value { let address: Address = address.parse().unwrap(); // TODO: cache let all_validators: Vec = app_client() .query(|app: InnerApp| app.staking.all_validators()) .await .unwrap(); let mut validators = vec![]; for validator in all_validators { if validator.address != address.into() { continue; } let cons_key = app_client() .query(|app: InnerApp| app.staking.consensus_key(validator.address.into())) .await .unwrap(); let status = if validator.unbonding { "BOND_STATUS_UNBONDING" } else if validator.in_active_set { "BOND_STATUS_BONDED" } else { "BOND_STATUS_UNBONDED" }; let info: DeclareInfo = serde_json::from_str(String::from_utf8(validator.info.to_vec()).unwrap().as_str()) .unwrap_or(DeclareInfo { details: "".to_string(), identity: "".to_string(), moniker: "".to_string(), website: "".to_string(), }); validators.push(json!( { "operator_address": validator.address.to_string(), "consensus_pubkey": { "@type": "/cosmos.crypto.ed25519.PubKey", "key": base64::encode(cons_key) }, "jailed": validator.jailed, "status": status, "tokens": validator.amount_staked.to_string(), "delegator_shares": validator.amount_staked.to_string(), "description": { "moniker": info.moniker, "identity": info.identity, "website": info.website, "security_contact": "", "details": info.details }, "unbonding_height": "0", // TODO "unbonding_time": "1970-01-01T00:00:00Z", // TODO "commission": { "commission_rates": { "rate": validator.commission.rate, "max_rate": validator.commission.max, "max_change_rate": validator.commission.max_change }, "update_time": "2023-08-04T06:00:00.000000000Z" // TODO }, "min_self_delegation": validator.min_self_delegation.to_string() })); } let validator = validators.first().unwrap(); json!({ "validator": validator, }) } #[get("/cosmos/bank/v1beta1/balances/
")] async fn bank_balances(address: &str) -> Result> { let address: Address = address.parse().unwrap(); let nom_balance: u64 = app_client() .query(|app| app.accounts.balance(address)) .await .map_err(|e| BadRequest(format!("{:?}", e)))? .into(); let nbtc_balance: u64 = app_client() .query(|app| app.bitcoin.accounts.balance(address)) .await .map_err(|e| BadRequest(format!("{:?}", e)))? .into(); Ok(json!({ "balances": [ { "denom": "unom", "amount": nom_balance.to_string(), }, { "denom": "usat", "amount": nbtc_balance.to_string(), } ], "pagination": { "next_key": null, "total": "2" } })) } #[get("/bank/balances/
")] async fn bank_balances_2(address: &str) -> Result> { let address: Address = address.parse().unwrap(); let balance: u64 = app_client() .query(|app| app.accounts.balance(address)) .await .map_err(|e| BadRequest(format!("{:?}", e)))? .into(); Ok(json!({ "height": "0", "result": [ { "denom": "unom", "amount": balance.to_string(), } ] })) } #[get("/auth/accounts/")] async fn auth_accounts(addr_str: &str) -> Result> { let address: Address = addr_str.parse().unwrap(); let balance: u64 = app_client() .query(|app| app.accounts.balance(address)) .await .map_err(|e| BadRequest(format!("{:?}", e)))? .into(); let mut nonce: u64 = app_client() .query_root(|app| app.inner.inner.borrow().inner.inner.inner.nonce(address)) .await .map_err(|e| BadRequest(format!("{:?}", e)))?; nonce += 1; Ok(json!({ "height": "0", "result": { "type": "cosmos-sdk/BaseAccount", "value": { "address": addr_str, "coins": [ { "denom": "unom", "amount": balance.to_string(), } ], "sequence": nonce.to_string() } } })) } #[get("/cosmos/auth/v1beta1/accounts/")] async fn auth_accounts2(addr_str: &str) -> Result> { let address: Address = addr_str.parse().unwrap(); let _balance: u64 = app_client() .query(|app| app.accounts.balance(address)) .await .map_err(|e| BadRequest(format!("{:?}", e)))? .into(); let mut nonce: u64 = app_client() .query_root(|app| app.inner.inner.borrow().inner.inner.inner.nonce(address)) .await .map_err(|e| BadRequest(format!("{:?}", e)))?; nonce += 1; Ok(json!({ "account": { "@type": "/cosmos.auth.v1beta1.BaseAccount", "address": addr_str, "pub_key": { "@type": "/cosmos.crypto.secp256k1.PubKey", "key": "Atl2HeBoLMorGAUPTH0hXk2Sx72reuw8x2V1puqwV+jN" }, "account_number": "0", "sequence": nonce.to_string() } })) } use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] struct TxRequest { tx: serde_json::Value, mode: String, } #[post("/txs", data = "")] async fn txs(tx: &str) -> Result> { dbg!(tx); let client = tm::HttpClient::new(app_host()).unwrap(); let tx_bytes = if let Some('{') = tx.chars().next() { let tx: TxRequest = serde_json::from_str(tx).unwrap(); serde_json::to_vec(&tx.tx).unwrap() } else { base64::decode(tx).map_err(|e| BadRequest(format!("{:?}", e)))? }; let res = client .broadcast_tx_commit(tx_bytes.into()) .await .map_err(|e| BadRequest(format!("{:?}", e)))?; let tx_response = if res.check_tx.code.is_err() { &res.check_tx } else { &res.deliver_tx }; Ok(json!({ "height": "0", "txhash": res.hash, "codespace": tx_response.codespace, "code": tx_response.code, "data": "", "raw_log": "[]", "logs": [ tx_response.log ], "info": tx_response.info, "gas_wanted": tx_response.gas_wanted, "gas_used": tx_response.gas_used, "tx": null, "timestamp": "" })) } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] struct TxRequest2 { tx_bytes: String, mode: String, } #[post("/cosmos/tx/v1beta1/txs", data = "")] async fn txs2(tx: &str) -> Result> { dbg!(tx); let client = tm::HttpClient::new(app_host()).unwrap(); let tx_bytes = if let Some('{') = tx.chars().next() { let tx: TxRequest2 = serde_json::from_str(tx).unwrap(); base64::decode(tx.tx_bytes.as_str()).map_err(|e| BadRequest(format!("{:?}", e)))? } else { base64::decode(tx).map_err(|e| BadRequest(format!("{:?}", e)))? }; let res = client .broadcast_tx_commit(tx_bytes.into()) .await .map_err(|e| BadRequest(format!("{:?}", e)))?; let tx_response = if res.check_tx.code.is_err() { &res.check_tx } else { &res.deliver_tx }; Ok(json!({ "height": "0", "txhash": res.hash, "codespace": tx_response.codespace, "code": tx_response.code, "data": "", "raw_log": "[]", "logs": [ tx_response.log ], "info": tx_response.info, "gas_wanted": tx_response.gas_wanted, "gas_used": tx_response.gas_used, "tx": null, "timestamp": "" })) } fn time_now() -> u64 { std::time::SystemTime::now() .duration_since(std::time::SystemTime::UNIX_EPOCH) .unwrap() .as_secs() } #[get("/query/?")] async fn query(query: &str, height: Option) -> Result> { let cache = QUERY_CACHE.clone(); let lock = cache.read_owned().await; let cached_res = lock.get(query).cloned(); let cache_hit = cached_res.is_some(); drop(lock); dbg!((&query, cache_hit)); let now = time_now(); // if let Some((time, res)) = cached_res { // if now - time < 15 { // return Ok(res.clone()) // } // } let client = tm::HttpClient::new(app_host()).unwrap(); let query_bytes = hex::decode(query).map_err(|e| BadRequest(format!("{:?}", e)))?; let res = client .abci_query(None, query_bytes, height.map(Into::into), true) .await .map_err(|e| BadRequest(format!("{:?}", e)))?; let res_height: u64 = res.height.into(); let res_height: u32 = res_height.try_into().unwrap(); if let tendermint::abci::Code::Err(code) = res.code { let msg = format!("code {}: {}", code, res.log); return Err(BadRequest(msg)); } let res_b64 = base64::encode([res_height.to_be_bytes().to_vec(), res.value].concat()); let cache = QUERY_CACHE.clone(); let mut lock = cache.write_owned().await; lock.insert(query.to_string(), (now, res_b64.clone())); drop(lock); Ok(res_b64) } #[get("/cosmos/staking/v1beta1/delegations/
")] async fn staking_delegators_delegations(address: &str) -> Value { let address: Address = address.parse().unwrap(); let delegations = app_client() .query(|app| app.staking.delegations(address)) .await .unwrap(); let mut entries = vec![]; for (validator_address, delegation) in delegations { if delegation.staked == 0 { continue; } entries.push(json!({ "delegation": { "delegator_address": address.to_string(), "validator_address": validator_address.to_string(), "shares": delegation.staked.to_string(), }, "balance": { "denom": "unom", "amount": delegation.staked.to_string(), }, })) } json!({ "delegation_responses": entries, "pagination": { "next_key": null, "total": entries.len().to_string() } }) } #[get("/staking/delegators/
/delegations")] async fn staking_delegators_delegations_2(address: &str) -> Result> { let address: Address = address.parse().unwrap(); let delegations = app_client() .query(|app| app.staking.delegations(address)) .await .map_err(|e| BadRequest(format!("{:?}", e)))?; let total_staked: u64 = delegations .iter() .map(|(_, d)| -> u64 { d.staked.into() }) .sum(); Ok(json!({ "height": "0", "result": [ { "delegator_address": "", "validator_address": "", "shares": "0", "balance": { "denom": "NOM", "amount": total_staked.to_string(), } } ] })) } #[get("/cosmos/staking/v1beta1/delegators/
/unbonding_delegations")] async fn staking_delegators_unbonding_delegations(address: &str) -> Value { use chrono::{TimeZone, Utc}; let address: Address = address.parse().unwrap(); let delegations: Vec<(Address, DelegationInfo)> = app_client() .query(|app: InnerApp| app.staking.delegations(address)) .await .unwrap(); let mut unbonds = vec![]; for (val_address, delegation) in delegations { if delegation.unbonding.len() == 0 { continue; } let mut entries = vec![]; for unbond in delegation.unbonding { let t = Utc.timestamp_opt(unbond.start_seconds, 0).unwrap(); entries.push(json!({ "creation_height": "0", // TODO "completion_time": t, // TODO "initial_balance": unbond.amount.to_string(), "balance": unbond.amount.to_string() })) } unbonds.push(json!({ "delegator_address": address, "validator_address": val_address, "entries": entries })) } json!({ "unbonding_responses": unbonds, "pagination": { "next_key": null, "total": unbonds.len().to_string() } }) } #[get("/staking/delegators/<_address>/unbonding_delegations")] fn staking_delegators_unbonding_delegations_2(_address: &str) -> Value { json!({ "height": "0", "result": [] }) } #[get("/cosmos/staking/v1beta1/validators/
/delegations")] async fn staking_validators_delegations(address: &str) -> Value { let validator_address: Address = address.parse().unwrap(); let delegations: Vec<(Address, DelegationInfo)> = app_client() .query(|app: InnerApp| app.staking.validator_delegations(validator_address)) .await .unwrap(); let mut entries = vec![]; for (delegator_address, delegation) in delegations { if delegation.staked == 0 { continue; } entries.push(json!({ "delegation": { "delegator_address": delegator_address.to_string(), "validator_address": validator_address.to_string(), "shares": delegation.staked.to_string(), }, "balance": { "denom": "unom", "amount": delegation.staked.to_string(), }, })) } json!({ "delegation_responses": entries, "pagination": { "next_key": null, "total": entries.len().to_string() } }) } #[get("/cosmos/staking/v1beta1/validators//delegations/")] async fn staking_validator_single_delegation( validator_address: &str, delegator_address: &str, ) -> Value { let delegator_address: Address = delegator_address.parse().unwrap(); let validator_address: Address = validator_address.parse().unwrap(); let delegations: Vec<(Address, DelegationInfo)> = app_client() .query(|app: InnerApp| app.staking.delegations(delegator_address)) .await .unwrap(); let delegation: &DelegationInfo = delegations .iter() .find(|(validator, _delegation)| *validator == validator_address) .map(|(_validator, delegation)| delegation) .unwrap(); json!({ "delegation_response": { "delegation": { "delegator_address": delegator_address, "validator_address": validator_address, "shares": delegation.staked.to_string(), }, "balance": { "denom": "unom", "amount": delegation.staked.to_string(), } } }) } #[get("/cosmos/staking/v1beta1/validators/
/unbonding_delegations")] async fn staking_validators_unbonding_delegations(address: &str) -> Value { let validator_address: Address = address.parse().unwrap(); let delegations: Vec<(Address, DelegationInfo)> = app_client() .query(|app: InnerApp| app.staking.validator_delegations(validator_address)) .await .unwrap(); let mut unbonds = vec![]; for (delegator_address, delegation) in delegations { if delegation.unbonding.len() == 0 { continue; } let mut entries = vec![]; for unbond in delegation.unbonding { let t = Utc.timestamp_opt(unbond.start_seconds, 0).unwrap(); entries.push(json!({ "creation_height": "0", // TODO "completion_time": t, // TODO "initial_balance": unbond.amount.to_string(), "balance": unbond.amount.to_string() })) } unbonds.push(json!({ "delegator_address": delegator_address, "validator_address": validator_address, "entries": entries })) } json!({ "unbonding_responses": unbonds, "pagination": { "next_key": null, "total": unbonds.len().to_string() } }) } #[get("/cosmos/distribution/v1beta1/delegators/
/rewards")] async fn distribution_delegators_rewards(address: &str) -> Value { let address: Address = address.parse().unwrap(); let delegations: Vec<(Address, DelegationInfo)> = app_client() .query(|app: InnerApp| app.staking.delegations(address)) .await .unwrap(); let mut rewards = vec![]; let mut total_nom = 0; let mut total_nbtc = 0; for (validator, delegation) in delegations { let mut reward = vec![]; let liquid: u64 = delegation .liquid .iter() .map(|(_, amount)| -> u64 { (*amount).into() }) .sum(); if liquid == 0 { continue; } let liquid_nom: u64 = delegation .liquid .iter() .find(|(denom, _)| *denom == Nom::INDEX) .unwrap_or(&(0, 0.into())) .1 .into(); total_nom += liquid_nom; reward.push(json!({ "denom": "unom", "amount": liquid_nom.to_string(), })); let liquid_nbtc: u64 = delegation .liquid .iter() .find(|(denom, _)| *denom == Nbtc::INDEX) .unwrap_or(&(0, 0.into())) .1 .into(); reward.push(json!({ "denom": "usat", "amount": liquid_nbtc.to_string(), })); total_nbtc += liquid_nbtc; rewards.push(json!({ "validator_address": validator.to_string(), "reward": reward, })); } json!({ "rewards": rewards, "total": [ { "denom": "unom", "amount": total_nom.to_string(), }, { "denom": "usat", "amount": total_nbtc.to_string(), } ] }) } #[get("/cosmos/distribution/v1beta1/validators/
/commission")] async fn distribution_validator_commission(address: &str) -> Value { json!({ "commission": { "commission": [] } }) } #[get("/cosmos/distribution/v1beta1/delegators/
/rewards/")] async fn distribution_delegators_rewards_for_validator( address: &str, validator_address: &str, ) -> Value { let address: Address = address.parse().unwrap(); let validator_address: Address = validator_address.parse().unwrap(); let delegations: Vec<(Address, DelegationInfo)> = app_client() .query(|app: InnerApp| app.staking.delegations(address)) .await .unwrap(); let delegation: &DelegationInfo = delegations .iter() .find(|(validator, _delegation)| *validator == validator_address) .map(|(_validator, delegation)| delegation) .unwrap(); let mut rewards = vec![]; let liquid_nom: u64 = delegation .liquid .iter() .find(|(denom, _)| *denom == Nom::INDEX) .unwrap_or(&(0, 0.into())) .1 .into(); rewards.push(json!({ "denom": "unom", "amount": liquid_nom.to_string(), })); let liquid_nbtc: u64 = delegation .liquid .iter() .find(|(denom, _)| *denom == Nbtc::INDEX) .unwrap_or(&(0, 0.into())) .1 .into(); rewards.push(json!({ "denom": "usat", "amount": liquid_nbtc.to_string(), })); json!({ "rewards": rewards }) } #[get("/cosmos/mint/v1beta1/inflation")] async fn minting_inflation() -> Result> { let validators = app_client() .query(|app| app.staking.all_validators()) .await .map_err(|e| BadRequest(format!("{:?}", e)))?; let total_staked: u64 = validators .iter() .map(|v| -> u64 { v.amount_staked.into() }) .sum(); let total_staked = Amount::from(total_staked + 1); let yearly_inflation = Decimal::from(64_682_541_340_000); let apr = (yearly_inflation / Decimal::from(4) / Decimal::from(total_staked)) .result() .map_err(|e| BadRequest(format!("{:?}", e)))?; Ok(json!({ "inflation": apr.to_string() })) } #[get("/minting/inflation")] async fn minting_inflation_2() -> Result> { let validators = app_client() .query(|app| app.staking.all_validators()) .await .map_err(|e| BadRequest(format!("{:?}", e)))?; let total_staked: u64 = validators .iter() .map(|v| -> u64 { v.amount_staked.into() }) .sum(); let total_staked = Amount::from(total_staked + 1); let yearly_inflation = Decimal::from(64_682_541_340_000); let apr = (yearly_inflation / Decimal::from(4) / Decimal::from(total_staked)) .result() .map_err(|e| BadRequest(format!("{:?}", e)))?; Ok(json!({ "height": "0", "result": apr.to_string() })) } #[get("/bank/total/")] fn bank_total(denom: &str) -> Value { json!({ "height": "0", "result": "0" }) } #[get("/cosmos/staking/v1beta1/pool")] async fn staking_pool() -> Value { let validators = app_client() .query(|app| app.staking.all_validators()) .await .unwrap(); let total_bonded: u64 = validators .iter() .filter(|v| v.in_active_set) .map(|v| -> u64 { v.amount_staked.into() }) .sum(); let total_not_bonded: u64 = validators .iter() .filter(|v| !v.in_active_set) .map(|v| -> u64 { v.amount_staked.into() }) .sum(); json!({ "pool": { "bonded_tokens": total_bonded.to_string(), "not_bonded_tokens": total_not_bonded.to_string() } }) } #[get("/cosmos/bank/v1beta1/supply/unom")] async fn bank_supply_unom() -> Value { let supply = app_client().query(|app| app.total_supply()).await.unwrap(); json!({ "amount": { "denom": "unom", "amount": supply.to_string(), } }) } #[get("/cosmos/bank/v1beta1/supply")] async fn bank_supply() -> Value { let supply = app_client().query(|app| app.total_supply()).await.unwrap(); json!({ "supply": [ { "denom": "unom", "amount": supply.to_string() } ], "pagination": { "next_key": null, "total": "1", } }) } #[get("/staking/pool")] fn staking_pool_2() -> Value { json!({ "height": "0", "result": { "loose_tokens": "0", "bonded_tokens": "0", "inflation_last_time": "0", "inflation": "1", "date_last_commission_reset": "0", "prev_bonded_shares": "0" } }) } #[get("/ibc/apps/transfer/v1/params")] fn ibc_apps_transfer_params() -> Value { json!({ "params": { "send_enabled": false, "receive_enabled": false } }) } #[get("/ibc/applications/transfer/v1/params")] fn ibc_applications_transfer_params() -> Value { json!({ "params": { "send_enabled": false, "receive_enabled": false } }) } #[get("/cosmos/staking/v1beta1/params")] async fn staking_params() -> Value { let (unbonding_seconds, max_validators) = app_client() .query(|app| Ok((app.staking.unbonding_seconds, app.staking.max_validators))) .await .unwrap(); json!({ "params": { "unbonding_time": unbonding_seconds.to_string() + "s", "max_validators": max_validators, "max_entries": 7, "historical_entries": 10000, "bond_denom": "unom" } }) } #[get("/cosmos/slashing/v1beta1/params")] async fn slashing_params() -> Value { let ( max_offline_blocks, slash_fraction_double_sign, slash_fraction_downtime, downtime_jail_seconds, ) = app_client() .query(|app| { Ok(( app.staking.max_offline_blocks, app.staking.slash_fraction_double_sign, app.staking.slash_fraction_downtime, app.staking.downtime_jail_seconds, )) }) .await .unwrap(); json!({ "params": { "signed_blocks_window": max_offline_blocks.to_string(), "min_signed_per_window": "0.0", "downtime_jail_duration": downtime_jail_seconds.to_string() + "s", "slash_fraction_double_sign": slash_fraction_double_sign.to_string(), "slash_fraction_downtime": slash_fraction_downtime.to_string() } }) } async fn get_signing_infos() -> Vec { let client = tm::HttpClient::new(app_host()).unwrap(); let all_validators: Vec = app_client() .query(|app: InnerApp| app.staking.all_validators()) .await .unwrap(); let all_keys: Vec<_> = app_client() .query(|app: InnerApp| app.staking.consensus_keys()) .await .unwrap(); let last_signed_blocks = app_client() .query(|app: InnerApp| app.staking.last_signed_blocks()) .await .unwrap(); let latest_block_response = client.latest_block().await.unwrap(); let latest_block: u64 = latest_block_response.block.header.height.value(); let mut signing_infos = vec![]; for validator in all_validators { let cons_key = all_keys .iter() .find(|entry| (**entry).0 == validator.address.into()) .map(|entry| (*entry).1) .unwrap(); let mut hasher = sha2::Sha256::new(); hasher.update(cons_key); let hash = hasher.finalize().to_vec()[..20].to_vec(); let address = bech32::encode( "nomicvalcons", hash.to_vec().to_base32(), bech32::Variant::Bech32, ) .unwrap(); let last_signed_block: u64 = last_signed_blocks .iter() .find(|entry| (**entry).0 == validator.address.into()) .map(|entry| (*entry).1) .unwrap() .unwrap_or(latest_block); let skipped_blocks: u64 = latest_block - last_signed_block; signing_infos.push(json!({ "address": address, "start_height": "0", // TODO: fix, "index_offset": "0", // TODO: fix, "jailed_until": Utc.timestamp_opt(validator.jailed_until.unwrap_or(0), 0) .unwrap() .format("%Y-%m-%dT%H:%M:%SZ") .to_string(), "tombstoned": validator.tombstoned, "missed_blocks_counter": skipped_blocks.to_string(), })) } signing_infos } #[get("/cosmos/slashing/v1beta1/signing_infos")] async fn signing_infos() -> Value { let signing_infos: Vec<_> = get_signing_infos().await; json!({ "info": signing_infos, "pagination": { "next_key": null, "total": signing_infos.len().to_string(), } }) } #[get("/cosmos/slashing/v1beta1/signing_infos/")] async fn signing_info(cons_addr: &str) -> Value { let signing_infos: Vec<_> = get_signing_infos().await; let signing_info = signing_infos .iter() .find(|value| (**value).get("address").unwrap() == cons_addr) .unwrap(); json!({ "val_signing_info": signing_info }) } fn parse_block(res: tendermint_rpc::endpoint::block::Response) -> Value { let last_commit = res.block.last_commit.unwrap(); let signatures: Vec<_> = last_commit .signatures .iter() .map(|signature| -> Value { let signature_raw = RawCommitSig::from(signature.clone()); json!({ "validator_address": base64::encode(signature_raw.validator_address), "block_id_flag": match signature_raw.block_id_flag { 1 => "BLOCK_ID_FLAG_ABSENT", 2 => "BLOCK_ID_FLAG_COMMIT", 3 => "BLOCK_ID_FLAG_NIL", i32::MIN..=0_i32 | 4_i32..=i32::MAX => "BLOCK_ID_FLAG_UNKNOWN" }, "timestamp": signature_raw.timestamp, "signature": base64::encode(signature_raw.signature), }) }) .collect(); json!({ "block_id": res.block_id, "block": { "header": { "version": { "block": res.block.header.version.block, "app": res.block.header.version.block, }, "chain_id": res.block.header.chain_id, "height": res.block.header.height, "time": res.block.header.time, "last_block_id": res.block.header.last_block_id, "last_commit_hash": res.block.header.last_commit_hash.map(|hash| base64::encode(hash.as_bytes())), "data_hash": res.block.header.data_hash.map(|hash| base64::encode(hash.as_bytes())), "validators_hash": base64::encode(res.block.header.validators_hash.as_bytes()), "next_validators_hash": base64::encode(res.block.header.next_validators_hash.as_bytes()), "consensus_hash": base64::encode(res.block.header.consensus_hash.as_bytes()), "app_hash": base64::encode(res.block.header.app_hash.value()), "last_results_hash": res.block.header.last_results_hash.map(|hash| base64::encode(hash.as_bytes())), "evidence_hash": res.block.header.evidence_hash.map(|hash| base64::encode(hash.as_bytes())), "proposer_address": base64::encode(res.block.header.proposer_address), }, "data": res.block.data, "evidence": res.block.evidence, "last_commit": { "block_id": last_commit.block_id, "signatures": signatures } } }) } #[get("/cosmos/base/tendermint/v1beta1/blocks/latest")] async fn latest_block() -> Value { let client = tm::HttpClient::new(app_host()).unwrap(); let res = client.latest_block().await.unwrap(); parse_block(res) } #[get("/cosmos/base/tendermint/v1beta1/blocks/")] async fn block(height: u32) -> Value { let client = tm::HttpClient::new(app_host()).unwrap(); let res = client .block(tendermint::block::Height::from(height)) .await .unwrap(); parse_block(res) } fn parse_validator_set(res: tendermint_rpc::endpoint::validators::Response) -> Value { let validators: Vec<_> = res .validators .iter() .map(|validator| -> Value { json!({ "address": validator.address, "voting_power": i64::from(validator.power).to_string(), "proposer_priority": i64::from(validator.proposer_priority).to_string(), "pub_key": { "@type": "/cosmos.crypto.ed25519.PubKey", "key": base64::encode(validator.pub_key.ed25519().unwrap().to_bytes()), } }) }) .collect(); json!({ "block_height": res.block_height, "validators": validators, "pagination": { "next_key": null, "total": res.validators.len(), } }) } #[get("/cosmos/base/tendermint/v1beta1/validatorsets/latest")] async fn latest_validator_set() -> Value { let client = tm::HttpClient::new(app_host()).unwrap(); let block = client.latest_block().await.unwrap(); let res = client .validators(block.block.header.height, tendermint_rpc::Paging::All) .await .unwrap(); parse_validator_set(res) } #[get("/cosmos/base/tendermint/v1beta1/validatorsets/")] async fn validator_set(height: u32) -> Value { let client = tm::HttpClient::new(app_host()).unwrap(); let res = client .validators(height, tendermint_rpc::Paging::All) .await .unwrap(); parse_validator_set(res) } #[get("/cosmos/distribution/v1beta1/community_pool")] async fn community_pool() -> Value { let community_pool = app_client() .query(|app| Ok(app.community_pool.amount)) .await .unwrap(); json!({ "pool": [ { "denom": "unom", "amount": community_pool.to_string() } ] }) } #[get("/cosmos/gov/v1beta1/proposals")] fn proposals() -> Value { json!({ "proposals": [], "pagination": { "next_key": null, "total": 0 } }) } #[get("/ibc/core/connection/v1/connections//client_state")] #[allow(deprecated)] async fn ibc_connection_client_state(connection: &str) -> Value { let connection = app_client() .query(|app| { app.ibc.ctx.query_connection(EofTerminatedString( IbcConnectionId::from_str(connection).unwrap(), )) }) .await .unwrap() .unwrap(); let states: Vec = app_client() .query(|app| app.ibc.ctx.query_client_states()) .await .unwrap(); let state: &IdentifiedClientState = states .iter() .find(|state| state.client_id == connection.client_id().to_string()) .unwrap(); let state_as_any: Any = state.client_state.clone().unwrap(); let client_state_tmp: ClientState = ClientState::try_from(state_as_any).unwrap().to_owned(); let client_state = client_state_tmp.clone(); let raw_client_state: RawTmClientState = RawTmClientState::from(client_state_tmp); let proof_specs: Vec<_> = raw_client_state .proof_specs .iter() .map(|spec| { json!({ "inner_spec": spec.inner_spec.clone().map(|inner_spec| json!({ "child_order": inner_spec.child_order, "child_size": inner_spec.child_size, "min_prefix_length": inner_spec.child_size, "max_prefix_length": inner_spec.max_prefix_length, "empty_child": inner_spec.empty_child, "hash": inner_spec.hash })), "leaf_spec": spec.leaf_spec, }) }) .collect(); json!({ "identified_client_state": { "client_id": state.client_id, "client_state": { "@type": "/ibc.lightclients.tendermint.v1.ClientState", "chain_id": raw_client_state.chain_id, "trust_level": client_state.trust_level, "trusting_period": raw_client_state.trusting_period.map(|v| format!("{}s", v.seconds)), "unbonding_period": format!("{}s", client_state.unbonding_period.as_secs()), "max_clock_drift": raw_client_state.max_clock_drift.map(|v| format!("{}s", v.seconds)), "frozen_height": raw_client_state.frozen_height.map(|h| json!({ "revision_height": h.revision_height.to_string(), "revision_number": h.revision_number.to_string(), })), "latest_height": raw_client_state.latest_height.map(|h| json!({ "revision_height": h.revision_height.to_string(), "revision_number": h.revision_number.to_string(), })), "proof_specs": proof_specs, "upgrade_path": client_state.upgrade_path, "allow_update_after_expiry": raw_client_state.allow_update_after_expiry, "allow_update_after_misbehaviour": raw_client_state.allow_update_after_misbehaviour, } }, "proof": null, "proof_height": { "revision_number": "0", "revision_height": "0" } }) } #[get("/ibc/core/channel/v1/connections//channels")] async fn ibc_connection_channels(connection: &str) -> Value { let channels = app_client() .query(|app| { app.ibc.ctx.query_connection_channels(EofTerminatedString( IbcConnectionId::from_str(connection).unwrap(), )) }) .await .unwrap(); let json_channels: Vec<_> = channels .iter() .map(|channel| { json!({ "state": match channel.state { 0 => "STATE_UNINITIALIZED_UNSPECIFIED", 1 => "STATE_INIT", 2 => "STATE_TRYOPEN", 3 => "STATE_OPEN", i32::MIN..=-1_i32 | 4_i32..=i32::MAX => "STATE_UNINITIALIZED_UNSPECIFIED" }, "ordering": match channel.ordering { 0 => "ORDER_NONE_UNSPECIFIED", 1 => "ORDER_UNORDERED", 2 => "ORDER_ORDERED", i32::MIN..=-1_i32 | 3_i32..=i32::MAX => "ORDER_NONE_UNSPECIFIED" }, "counterparty": channel.counterparty, "connection_hops": channel.connection_hops, "version": channel.version, "port_id": channel.port_id, "channel_id": channel.channel_id, }) }) .collect(); json!({ "channels": json_channels, "proof_height": { "revision_number": "0", "revision_height": "0" }, }) } #[get("/ibc/core/connection/v1/connections/")] async fn ibc_connection(connection: &str) -> Value { let connection = app_client() .query(|app| { app.ibc.ctx.query_connection(EofTerminatedString( IbcConnectionId::from_str(connection).unwrap(), )) }) .await .unwrap() .unwrap(); let raw_connection = RawConnectionEnd::from(connection); json!({ "connection": { "client_id": raw_connection.client_id, "versions": raw_connection.versions, "state": match raw_connection.state { 0 => "STATE_UNINITIALIZED_UNSPECIFIED", 1 => "STATE_INIT", 2 => "STATE_TRYOPEN", 3 => "STATE_OPEN", i32::MIN..=-1_i32 | 4_i32..=i32::MAX => "STATE_UNINITIALIZED_UNSPECIFIED" }, "counterparty": raw_connection.counterparty, "delay_period": raw_connection.delay_period, }, "proof_height": { "revision_number": "0", "revision_height": "0" }, }) } #[get("/ibc/core/connection/v1/connections")] async fn ibc_connections() -> Value { let connections = app_client() .query(|app| app.ibc.ctx.query_all_connections()) .await .unwrap(); json!({ "connections": connections, "pagination": { "next_key": null, "total": connections.len().to_string() }, "proof_height": { "revision_number": "0", "revision_height": "0" }, }) } use rocket::fairing::{Fairing, Info, Kind}; use rocket::http::Header; use rocket::{Request, Response}; pub struct CORS; #[rocket::async_trait] impl Fairing for CORS { fn info(&self) -> Info { Info { name: "Add CORS headers to responses", kind: Kind::Response, } } async fn on_response<'r>(&self, _request: &'r Request<'_>, response: &mut Response<'r>) { response.set_header(Header::new("Access-Control-Allow-Origin", "*")); response.set_header(Header::new( "Access-Control-Allow-Methods", "POST, GET, PATCH, OPTIONS", )); response.set_header(Header::new("Access-Control-Allow-Headers", "*")); response.set_header(Header::new("Access-Control-Allow-Credentials", "true")); } } #[launch] fn rocket() -> _ { rocket::build().attach(CORS).mount( "/", routes![ bank_balances, bank_balances_2, auth_accounts, auth_accounts2, txs, txs2, query, staking_delegators_delegations, staking_delegators_delegations_2, staking_delegators_unbonding_delegations, staking_delegators_unbonding_delegations_2, staking_validators_delegations, staking_validators_unbonding_delegations, staking_validator_single_delegation, distribution_delegators_rewards, distribution_delegators_rewards_for_validator, distribution_validator_commission, minting_inflation, minting_inflation_2, staking_pool, staking_pool_2, bank_total, ibc_apps_transfer_params, ibc_applications_transfer_params, bank_supply_unom, bank_supply, validators, validator, staking_params, slashing_params, signing_infos, signing_info, latest_block, block, latest_validator_set, validator_set, community_pool, proposals, ibc_connection, ibc_connections, ibc_connection_client_state, ibc_connection_channels, ], ) } ================================================ FILE: rust-toolchain ================================================ [toolchain] channel = "nightly-2024-07-21" ================================================ FILE: rustfmt.toml ================================================ comment_width = 80 wrap_comments = true ================================================ FILE: src/airdrop.rs ================================================ //! State and logic for airdrop accounts which can be claimed by users. use orga::coins::Address; #[cfg(feature = "full")] use orga::coins::{Amount, Decimal}; use orga::collections::{ChildMut, Map}; use orga::context::GetContext; use orga::migrate::MigrateFrom; use orga::orga; use orga::plugins::{Paid, Signer}; use orga::{Error, Result}; #[cfg(feature = "full")] use split_iter::Splittable; use super::app::Nom; /// The maximum units of stake counted when calculating airdrop II. #[cfg(feature = "full")] const MAX_STAKED: u64 = 1_000_000_000; /// The total amount of token units claimable by users in airdrop II. #[cfg(feature = "full")] const AIRDROP_II_TOTAL: u64 = 3_500_000_000_000; /// Airdrop account state. #[orga(version = 1)] pub struct Airdrop { accounts: Map, } impl MigrateFrom for AirdropV1 { fn migrate_from(_value: AirdropV0) -> Result { unreachable!() } } type Recipients = Vec<(Address, Vec<(u64, u64)>, u64)>; #[orga] impl Airdrop { /// Gets the account at the given address. #[query] pub fn get(&self, address: Address) -> Result> { Ok(self.accounts.get(address)?.map(|a| a.clone())) } /// Gets a mutable reference to the account at the given address. pub fn get_mut(&mut self, address: Address) -> Result>> { self.accounts.get_mut(address) } /// Gets a mutable reference to the account for the signer of the /// transaction. pub fn signer_acct_mut(&mut self) -> Result> { let signer = self .context::() .ok_or_else(|| Error::Signer("No Signer context available".into()))? .signer .ok_or_else(|| Error::Coins("Unauthorized account action".into()))?; self.accounts .get_mut(signer)? .ok_or_else(|| Error::App("No airdrop account for signer".into())) } /// Pays into the Paid context as funding. fn pay_as_funding(&mut self, amount: u64) -> Result<()> { let paid = self .context::() .ok_or_else(|| Error::Coins("No Paid context found".into()))?; paid.give::(amount) } /// Claims the signer's airdrop I balance to the funding context. #[call] pub fn claim_airdrop1(&mut self) -> Result<()> { let mut acct = self.signer_acct_mut()?; let amount = acct.airdrop1.claim()?; self.pay_as_funding(amount)?; Ok(()) } /// Claims the signer's airdrop II balance to the funding context. #[call] pub fn claim_airdrop2(&mut self) -> Result<()> { let mut acct = self.signer_acct_mut()?; let amount = acct.airdrop2.claim()?; self.pay_as_funding(amount)?; Ok(()) } /// Joins the signer's account to a destination account (e.g. when the user /// received the airdrop to multiple addresses but would like to consolidate /// into one account). pub fn join_accounts(&mut self, dest_addr: Address) -> Result<()> { let mut acct = self.signer_acct_mut()?; if acct.joined { return Err(Error::App("Account already joined".to_string())); } if acct.is_empty() { return Err(Error::App("Account has no airdrop balance".to_string())); } let src = acct.clone(); *acct = Account::default(); let mut dest = self.accounts.entry(dest_addr)?.or_default()?; let add_part = |dest: &mut Part, src: Part| { if dest.claimable > 0 || dest.claimed > 0 { dest.claimable += src.locked; } else { dest.locked += src.locked; } dest.claimable += src.claimable; dest.claimed += src.claimed; }; add_part(&mut dest.airdrop1, src.airdrop1); add_part(&mut dest.airdrop2, src.airdrop2); dest.joined = true; Ok(()) } /// Initializes unclaimed airdrop accounts from a CSV file. #[cfg(feature = "full")] pub fn init_from_airdrop2_csv(&mut self, data: &[u8]) -> Result<()> { log::info!("Initializing balances from airdrop 2 snapshot..."); let recipients = Self::get_recipients_from_csv(data); let len = recipients[0].1.len(); let mut totals = vec![0u64; len]; for (_, networks, _) in recipients.iter() { for (i, (staked, count)) in networks.iter().enumerate() { let score = Self::score(*staked, *count); totals[i] += score; } } let precision = 1_000_000u128; let unom_per_network = AIRDROP_II_TOTAL / (len as u64); let unom_per_score: Vec<_> = totals .iter() .map(|n| unom_per_network as u128 * precision / *n as u128) .collect(); let mut airdrop_total = 0; let mut accounts = 0; #[cfg(not(feature = "testnet"))] let mut testnet_locked = 0; #[cfg(not(feature = "testnet"))] let mut testnet_claimable = 0; #[allow(unused_variables)] for (address, networks, testnet_completions) in recipients.iter() { let unom: u64 = networks .iter() .zip(unom_per_score.iter()) .map(|((staked, count), unom_per_score)| { let score = Self::score(*staked, *count) as u128; (score * unom_per_score / precision) as u64 }) .sum(); let res = self.airdrop_to(*address, unom, *testnet_completions)?; airdrop_total += unom; accounts += 1; #[cfg(not(feature = "testnet"))] { testnet_locked += res.0; testnet_claimable += res.1; } } log::info!( "Total amount minted for airdrop 2: {} uNOM across {} accounts", airdrop_total, accounts, ); #[cfg(not(feature = "testnet"))] log::info!( "Testnet participation allocation: {} uNOM locked, {} uNOM claimable", testnet_locked, testnet_claimable, ); Ok(()) } /// Initializes and pays into a new airdrop account. #[allow(unused_variables)] #[cfg(feature = "full")] fn airdrop_to( &mut self, addr: Address, unom: u64, testnet_completions: u64, ) -> Result<(u64, u64)> { let mut acct = self.accounts.entry(addr)?.or_insert_default()?; acct.airdrop2.claimable = unom; Ok((0, 0)) } /// Returns the score for a given staked amount and delegation count. The /// score is further used in the calculation of the amount of tokens to /// receive. #[cfg(feature = "full")] fn score(staked: u64, _count: u64) -> u64 { staked.min(MAX_STAKED) } /// Parses the CSV data into a list of recipients. #[cfg(feature = "full")] fn get_recipients_from_csv(data: &[u8]) -> Recipients { let mut reader = csv::Reader::from_reader(data); reader .records() .filter_map(|row| { let row = row.unwrap(); if row[0].len() != 44 { return None; } let addr: Address = row[0].parse().unwrap(); let (claims, values) = row .into_iter() .skip(1) .split(|item| item.parse::().is_ok()); let values: Vec<_> = values.map(|s| -> u64 { s.parse().unwrap() }).collect(); let claims = claims .map(|s| -> bool { s.parse().unwrap() }) .filter(|b| *b) .count() as u64; let pairs = values.chunks_exact(2).map(|arr| (arr[0], arr[1])).collect(); Some((addr, pairs, claims)) }) .collect() } /// Initializes the airdrop I balances for a given address. #[cfg(feature = "full")] fn init_airdrop1_amount( &mut self, addr: Address, liquid: Amount, staked: Amount, ) -> Result { let liquid_capped = Amount::min(liquid, 1_000_000_000.into()); let staked_capped = Amount::min(staked, 1_000_000_000.into()); let units = (liquid_capped + staked_capped * Amount::from(4))?; let units_per_nom = Decimal::from(20_299325) / Decimal::from(1_000_000); let nom_amount = (Decimal::from(units) / units_per_nom)?.amount()?; let mut acct = self.accounts.entry(addr)?.or_insert_default()?; acct.airdrop1.claimable = nom_amount.into(); Ok(nom_amount) } /// Initializes the airdrop I balances for all the accounts in the given /// CSV. #[cfg(feature = "full")] pub fn init_from_airdrop1_csv(&mut self, data: &[u8]) -> Result<()> { let mut rdr = csv::Reader::from_reader(data); let snapshot = rdr.records(); println!("Initializing balances from airdrop 1 snapshot..."); let mut minted = Amount::from(0); let mut accounts = 0; for row in snapshot { let row = row.map_err(|e| Error::App(e.to_string()))?; let (_, address_b32, _) = bech32::decode(&row[0]).unwrap(); let address_vec: Vec = bech32::FromBase32::from_base32(&address_b32).unwrap(); let address_buf: [u8; 20] = address_vec.try_into().unwrap(); let liquid: u64 = row[1].parse().unwrap(); let staked: u64 = row[2].parse().unwrap(); let minted_for_account = self.init_airdrop1_amount(address_buf.into(), liquid.into(), staked.into())?; minted = (minted + minted_for_account)?; accounts += 1; } println!( "Total amount minted for airdrop 1: {} uNOM across {} accounts", minted, accounts ); Ok(()) } } /// An airdrop account. #[orga(version = 1..=2)] #[derive(Clone, Debug, PartialEq, Eq)] pub struct Account { /// The part of the airdrop received in airdrop I. pub airdrop1: Part, /// The part of the airdrop received in airdrop II. pub airdrop2: Part, /// Whether or not the account has been joined into from another account. /// This is tracked to prevent a DoS vector where an attacker could spam /// transactions by repeatedly joining into different accounts without /// paying a fee. pub joined: bool, } impl Account { /// Returns `true` if the account is empty. pub fn is_empty(&self) -> bool { self == &Self::default() } } impl MigrateFrom for AccountV2 { fn migrate_from(_value: AccountV1) -> Result { unreachable!() } } /// A part of an airdrop account, e.g. the balances from either airdrop I or /// airdrop II. #[orga] #[derive(Clone, Debug, PartialEq, Eq)] pub struct Part { /// A balance for the user which is locked and cannot be claimed. This will /// typically be unlocked after some external event occurs. pub locked: u64, /// A balance for the user which can be claimed by the user. pub claimable: u64, /// The amount of balance which has already been claimed. This is not a /// balance, since upon claiming the balance was moved elsewhere, e.g. the /// user's normal balance. pub claimed: u64, } impl Part { /// Unlocks the locked balance, making it claimable. pub fn unlock(&mut self) { self.claimable += self.locked; self.locked = 0; } /// Claims the claimable balance, marking the amount as claimed and /// returning the amount to be paid to the account's normal balance. pub fn claim(&mut self) -> Result { let amount = self.claimable; if amount == 0 { return Err(Error::Coins("No balance to claim".to_string())); } self.claimed += amount; self.claimable = 0; Ok(amount) } /// Returns `true` if the part has no locked or claimable balances and has /// not been claimed. pub fn is_empty(&self) -> bool { self == &Self::default() } /// Returns the total balance across all states. pub fn total(&self) -> u64 { self.locked + self.claimable + self.claimed } } #[cfg(feature = "full")] #[cfg(test)] mod test { use super::*; #[cfg(not(feature = "testnet"))] use orga::coins::Amount; use std::str::FromStr; fn assert_approx_eq(a: u64, b: u64) { assert!((a as i64 - b as i64).abs() <= 2, "{} !~= {}", a, b); } #[cfg(not(feature = "testnet"))] fn amount_airdropped(acct: &Account) -> u64 { acct.airdrop2.claimable } #[cfg(feature = "testnet")] #[test] fn airdrop_allocation_no_testnet() { let mut airdrop = Airdrop::default(); let csv = "address,evmos_9000-1_staked,evmos_9000-1_count,kaiyo-1_staked,kaiyo-1_count,cosmoshub-4_staked,cosmoshub-4_count,juno-1_staked,juno-1_count,osmosis-1_staked,osmosis-1_count,btc_deposit_claimed,btc_withdraw_claimed,ibc_transfer_claimed nomic100000aeu2lh0jrrnmn2npc88typ25u7t3aa64x,1,1,1,1,1,1,1,1,1,1,true,true,true".as_bytes(); airdrop.init_from_airdrop2_csv(csv).unwrap(); let account = airdrop .get_mut(Address::from_str("nomic100000aeu2lh0jrrnmn2npc88typ25u7t3aa64x").unwrap()) .unwrap() .unwrap(); let airdrop2_total = account.airdrop2.total(); assert_approx_eq(airdrop2_total, AIRDROP_II_TOTAL); } #[cfg(not(feature = "testnet"))] #[test] fn airdrop_allocation() { let mut airdrop = Airdrop::default(); let csv = "address,evmos_9000-1_staked,evmos_9000-1_count,kaiyo-1_staked,kaiyo-1_count,cosmoshub-4_staked,cosmoshub-4_count,juno-1_staked,juno-1_count,osmosis-1_staked,osmosis-1_count,btc_deposit_claimed,btc_withdraw_claimed,ibc_transfer_claimed nomic100000aeu2lh0jrrnmn2npc88typ25u7t3aa64x,1,1,1,1,1,1,1,1,1,1,true,true,true".as_bytes(); airdrop.init_from_airdrop2_csv(csv).unwrap(); let account = airdrop .get_mut(Address::from_str("nomic100000aeu2lh0jrrnmn2npc88typ25u7t3aa64x").unwrap()) .unwrap() .unwrap(); let airdrop2_total = amount_airdropped(&*account); assert_approx_eq(airdrop2_total, AIRDROP_II_TOTAL); } #[cfg(not(feature = "testnet"))] #[test] fn airdrop_allocation_multiple() { let mut airdrop = Airdrop::default(); let csv = "address,evmos_9000-1_staked,evmos_9000-1_count,kaiyo-1_staked,kaiyo-1_count,cosmoshub-4_staked,cosmoshub-4_count,juno-1_staked,juno-1_count,osmosis-1_staked,osmosis-1_count,btc_deposit_claimed,btc_withdraw_claimed,ibc_transfer_claimed nomic100000aeu2lh0jrrnmn2npc88typ25u7t3aa64x,1,1,1,1,1,1,1,1,1,1,true,true,true nomic10005vr6w230rer02rgwsvmhh0vdpk9hvxkv8zs,1,1,1,1,1,1,1,1,1,1,true,true,true".as_bytes(); airdrop.init_from_airdrop2_csv(csv).unwrap(); let account = airdrop .get_mut(Address::from_str("nomic100000aeu2lh0jrrnmn2npc88typ25u7t3aa64x").unwrap()) .unwrap() .unwrap(); let airdrop2_total = amount_airdropped(&*account); let expected: u64 = (Amount::from(AIRDROP_II_TOTAL) / Amount::from(2)) .result() .unwrap() .amount() .unwrap() .into(); assert_approx_eq(airdrop2_total, expected); let account = airdrop .get_mut(Address::from_str("nomic10005vr6w230rer02rgwsvmhh0vdpk9hvxkv8zs").unwrap()) .unwrap() .unwrap(); let airdrop2_total = amount_airdropped(&*account); assert_approx_eq(airdrop2_total, expected); } } ================================================ FILE: src/app/migrations.rs ================================================ #[cfg(feature = "babylon")] use crate::babylon::Babylon; #[cfg(feature = "ethereum")] use crate::ethereum::{bytes32, Connection, Ethereum, Network}; use crate::{ bitcoin::{ adapter::Adapter, header_queue::{WorkHeader, WrappedHeader}, }, incentives::Incentives, }; use super::{InnerAppV5, InnerAppV6, InnerAppV7}; use bitcoin::{ util::{uint::Uint256, BitArray}, BlockHeader, }; use orga::{ coins::Take, collections::Map, ibc::Ibc, migrate::{Migrate, MigrateFrom}, state::State, store::Store, upgrade::Upgrade, Result, }; impl MigrateFrom for InnerAppV6 { #[allow(unused_mut)] fn migrate_from(mut other: InnerAppV5) -> Result { #[cfg(not(feature = "testnet"))] { other.bitcoin.checkpoints.config.max_age = 60 * 60 * 24 * 30 * 12; other.bitcoin.headers.config.max_length = 52_560; // remove headers and revert to checkpoint so we can regain history which was // pruned other .bitcoin .headers .deque .retain_unordered(|_| Ok(false))?; let checkpoint_json = include_str!("../bitcoin/checkpoint.json"); let header: (u32, BlockHeader) = serde_json::from_str(checkpoint_json)?; let wrapped_header = WrappedHeader::new(Adapter::new(header.1), header.0); let work_header = WorkHeader::new(wrapped_header.clone(), wrapped_header.work()); other.bitcoin.headers.current_work = Adapter::new(work_header.work()); other.bitcoin.headers.deque.push_back(work_header)?; // backfill checkpoint history use bitcoin::hashes::hex::FromHex; let scripts = include_str!("../../stakenet_reserve_scripts.csv") .lines() .map(|line| { let mut parts = line.split(','); parts.next().unwrap(); parts.next().unwrap() }) .map(|script_hex| bitcoin::Script::from_hex(script_hex).unwrap()); other.bitcoin.checkpoints.backfill( 5276, scripts, other.bitcoin.checkpoints.config.sigset_threshold, )?; } Ok(Self { accounts: other.accounts, staking: other.staking, airdrop: other.airdrop, community_pool: other.community_pool, incentive_pool: other.incentive_pool, staking_rewards: other.staking_rewards, dev_rewards: other.dev_rewards, community_pool_rewards: other.community_pool_rewards, incentive_pool_rewards: other.incentive_pool_rewards, bitcoin: other.bitcoin, reward_timer: other.reward_timer, upgrade: other.upgrade, incentives: other.incentives, ibc: other.ibc, cosmos: other.cosmos, #[cfg(feature = "ethereum")] ethereum: Default::default(), // TODO }) } } impl MigrateFrom for InnerAppV7 { fn migrate_from(other: InnerAppV6) -> Result { // #[cfg(all(feature = "testnet", feature = "ethereum"))] // let mut ethereum = Ethereum::default(); todo!(); } } ================================================ FILE: src/app.rs ================================================ //! The top-level application state and logic of the Nomic protocol. The main //! state type is the [InnerApp] struct. #![allow(clippy::too_many_arguments)] // TODO: remove after switching from "testnet" feature flag to orga channels #![allow(unused_variables)] #![allow(unused_imports)] use crate::airdrop::Airdrop; #[cfg(feature = "babylon")] use crate::babylon::{self, Babylon, Params}; use crate::bitcoin::adapter::Adapter; use crate::bitcoin::threshold_sig::Signature; use crate::bitcoin::{exempt_from_fee, Bitcoin, Nbtc}; use crate::bitcoin::{matches_bitcoin_network, NETWORK}; use crate::cosmos::{Chain, Cosmos, Proof}; #[cfg(feature = "ethereum")] use crate::ethereum::Ethereum; #[cfg(feature = "frost")] use crate::frost::{Config as FrostConfig, Frost, FrostGroup}; #[cfg(feature = "ethereum")] use crate::ethereum::Connection; use crate::incentives::Incentives; use bitcoin::util::merkleblock::PartialMerkleTree; use bitcoin::{PublicKey, Script, Transaction, TxOut}; use orga::coins::{ Accounts, Address, Amount, Coin, Faucet, FaucetOptions, Give, Staking, Symbol, Take, }; use orga::context::{Context, GetContext}; use orga::cosmrs::bank::MsgSend; use orga::describe::{Describe, Descriptor}; use orga::encoding::{Decode, Encode, LengthString, LengthVec}; use orga::ibc::ibc_rs::apps::transfer::types::Memo; use orga::ibc::ClientIdKey as ClientId; use sha2::{Digest, Sha256}; use std::io::Read; use std::str::FromStr; use std::time::Duration; use orga::ibc::ibc_rs::apps::transfer::context::TokenTransferExecutionContext; use orga::ibc::ibc_rs::apps::transfer::types::msgs::transfer::MsgTransfer; use orga::ibc::ibc_rs::apps::transfer::types::packet::PacketData; use orga::ibc::ibc_rs::core::channel::types::timeout::{TimeoutHeight, TimeoutTimestamp}; use orga::ibc::ibc_rs::core::host::types::identifiers::{ChannelId, PortId}; use orga::ibc::ibc_rs::core::primitives::Timestamp; use orga::ibc::{Ibc, IbcTx}; use orga::ibc::ibc_rs::core::primitives::Signer as IbcSigner; use orga::coins::Declaration; use orga::encoding::Adapter as EdAdapter; use orga::macros::build_call; use orga::migrate::Migrate; use orga::orga; use orga::plugins::sdk_compat::{sdk, sdk::Tx as SdkTx, ConvertSdkTx}; use orga::plugins::{disable_fee, DefaultPlugins, Events, Paid, PaidCall, Signer, Time, MIN_FEE}; use orga::prelude::*; use orga::upgrade::Version; use orga::upgrade::{Upgrade, UpgradeV0}; use orga::Error; use serde::{Deserialize, Serialize}; use serde_hex::{SerHex, Strict, StrictPfx}; use std::convert::TryInto; use std::fmt::Debug; mod migrations; /// The top-level application state type, wrapped with the Orga default plugins. pub type App = DefaultPlugins; /// The symbol for the NOM token. #[derive(State, Debug, Clone, Encode, Decode, Default, Migrate, Serialize)] pub struct Nom(()); impl Symbol for Nom { const INDEX: u8 = 69; const NAME: &'static str = "unom"; } /// The recipient address for the NOM developer rewards faucet on Nomic /// Stakenet. #[cfg(feature = "full")] const DEV_ADDRESS: &str = "nomic14z79y3yrghqx493mwgcj0qd2udy6lm26lmduah"; /// The recipient address for the NOM strategic reserve tokens on Nomic /// Stakenet. #[cfg(feature = "full")] const STRATEGIC_RESERVE_ADDRESS: &str = "nomic1d5n325zrf4elfu0heqd59gna5j6xyunhev23cj"; /// An address to receive a small portion of the strategic reserve tokens in /// order to send a small portion of tokens to validators for declaration fees /// on Nomic Stakenet. #[cfg(feature = "full")] const VALIDATOR_BOOTSTRAP_ADDRESS: &str = "nomic1fd9mxxt84lw3jdcsmjh6jy8m6luafhqd8dcqeq"; /// The fixed amount of nBTC fee required to relay IBC messages, in /// micro-satoshis. const IBC_FEE_USATS: u64 = 1_000_000; /// The fixed amount of nBTC fee required to make any application call, in /// micro-satoshis. const CALL_FEE_USATS: u64 = 100_000_000; /// The fixed amount of nBTC fee required to create a new Ethereum connection, /// in micro-satoshis. #[cfg(feature = "ethereum")] const ETH_CREATE_CONNECTION_FEE_USATS: u64 = 10_000_000_000; pub const OSMOSIS_CHANNEL_ID: &str = "channel-1"; #[cfg(feature = "frost")] const FROST_GROUP_INTERVAL: i64 = 10 * 60; #[cfg(feature = "frost")] const FROST_TOP_N: u16 = 5; #[cfg(feature = "frost")] const FROST_THRESHOLD: u16 = 3; /// The top-level application state type and logic. This contains the major /// state types for the various subsystems of the Nomic protocol. #[orga(version = 5..=7)] pub struct InnerApp { /// Account state for the NOM token. #[call] pub accounts: Accounts, /// Staking and validator state, including the validator set and staking /// rewards. This ultimately sets the voting power of Tendermint consensus /// based on the amount staked to each validator. #[call] pub staking: Staking, /// Airdrop state, which can be claimed by eligible accounts. #[call] pub airdrop: Airdrop, /// A balance of NOM tokens that are reserved for the protocol community /// pool. pub community_pool: Coin, /// A balance of NOM tokens that are reserved for the protocol incentive /// pool. incentive_pool: Coin, /// A stream of tokens that pays out over time to NOM stakers, based on a /// defined inflation schedule. staking_rewards: Faucet, /// A stream of tokens that pays out over time to the NOM developer wallet, /// based on a defined inflation schedule. dev_rewards: Faucet, /// A stream of tokens that pays out over time to the NOM community pool, /// based on a defined inflation schedule. community_pool_rewards: Faucet, /// A stream of tokens that pays out over time to the NOM incentive pool, /// based on a defined inflation schedule. incentive_pool_rewards: Faucet, /// The Bitcoin state, including a chain of verified Bitcoin headers and /// logic for processing Bitcoin transactions. #[call] pub bitcoin: Bitcoin, /// A timer to support paying out accumulated Bitcoin rewards periodically. pub reward_timer: RewardTimer, /// The IBC state, including the IBC client, connection, and channel /// states. This is used to relay messages between Nomic and other IBC /// enabled blockchains. #[call] pub ibc: Ibc, /// The upgrade state, including the current version of the application and /// logic for upgrading to a new version of the protocol once sufficient /// network voting power has signaled readiness. pub upgrade: Upgrade, /// Incentive state, allowing eligible users to claim tokens based on /// participation in the Nomic ecosystem. #[call] pub incentives: Incentives, /// The Cosmos state, allowing for relaying data about remote Cosmos chains /// which is not available in the IBC module. pub cosmos: Cosmos, #[cfg(all(feature = "ethereum", feature = "testnet"))] #[orga(version(V5, V6))] #[call] pub ethereum: Connection, #[cfg(all(feature = "ethereum", feature = "testnet"))] #[orga(version(V7))] #[call] pub ethereum: Ethereum, #[cfg(all(feature = "babylon", feature = "testnet"))] #[orga(version(V7))] #[call] pub babylon: Babylon, #[cfg(all(feature = "frost", feature = "testnet"))] #[orga(version(V7))] #[call] pub frost: Frost, } #[orga] impl InnerApp { /// The current version of the Nomic protocol. This is incremented when /// breaking changes are made to either the state encoding or logic of the /// protocol, and requires a network upgrade to be coordinated via the /// upgrade module. pub const CONSENSUS_VERSION: u8 = 14; #[cfg(feature = "full")] fn configure_faucets(&mut self) -> Result<()> { let day = 60 * 60 * 24; let year = Duration::from_secs(60 * 60 * 24 * 365); let two_thirds = (Amount::new(2) / Amount::new(3))?; let genesis_time = self .context::