diff --git a/.cargo/mutants.toml b/.cargo/mutants.toml new file mode 100644 index 0000000000..96f3735504 --- /dev/null +++ b/.cargo/mutants.toml @@ -0,0 +1,34 @@ +additional_cargo_args = ["--all-features"] +examine_globs = ["units/src/**/*.rs", "primitives/src/**/*.rs"] +exclude_globs = [ + "units/src/amount/verification.rs" # kani tests +] +exclude_re = [ + "impl Debug", + "impl Arbitrary", + "impl Display", + ".*Error", + "deserialize", # Skip serde mutation tests + "Iterator", # Mutating operations in an iterator can result in an infinite loop + + # ----------------------------------Crate-specific exclusions---------------------------------- + # Units + # src/amount/mod.rs + "parse_signed_to_satoshi", # Can't kill all mutants since there is no denomination smaller than Satoshi + "fmt_satoshi_in", # Related to formatting/display + "dec_width", # Replacing num /= 10 with num %=10 in a loop causes a timeout due to infinite loop + # src/locktime/relative.rs + "Time::to_consensus_u32", # Mutant from replacing | with ^, this returns the same value since the XOR is taken against the u16 with an all-zero bitmask + "FeeRate::fee_vb", # Deprecated + "FeeRate::fee_wu", # Deprecated + + # primitives + "Sequence::from_512_second_intervals", # Mutant from replacing | with ^, this returns the same value since the XOR is taken against the u16 with an all-zero bitmask + "Opcode::classify", # Not possible to kill all mutants without individually checking every opcode classification + "Block::cached_witness_root", # Skip getters + "Block::transactions", # Skip getters + "Script::to_bytes", # Deprecated + "decode_cursor", # Mutating operations in decode_cursor can result in an infinite loop + "fmt_debug", # Mutants from formatting/display changes + "fmt_debug_pretty", # Mutants from formatting/display changes +] diff --git a/.github/workflows/README.md b/.github/workflows/README.md index 54da70c567..f93b40cd1c 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -22,11 +22,11 @@ Run from rust.yml unless stated otherwise. Unfortunately we are now exceeding th 8. `Docs` 9. `Docsrs` 10. `Bench` -11. `ASAN` -12. `WASM` -13. `Arch32bit` -14. `Cross` -15. `Embedded` +11. `Arch32bit` +12. `Cross` +13. `Embedded` +14. `ASAN` +15. `WASM` 16. `Kani` 17. `Coveralls` - run by `coveralls.yml` 18. `release` - run by `release.yml` diff --git a/.github/workflows/cargo-semver-checks-version b/.github/workflows/cargo-semver-checks-version index 93d4c1ef06..9b0025a785 100644 --- a/.github/workflows/cargo-semver-checks-version +++ b/.github/workflows/cargo-semver-checks-version @@ -1 +1 @@ -0.36.0 +0.40.0 diff --git a/.github/workflows/coveralls.yml b/.github/workflows/coveralls.yml index a3d37cd3c5..688481ef81 100644 --- a/.github/workflows/coveralls.yml +++ b/.github/workflows/coveralls.yml @@ -7,7 +7,7 @@ name: Code coverage with llvm-cov jobs: Coveralls: name: Code coverage - stable toolchain - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false steps: diff --git a/.github/workflows/cron-daily-fuzz.yml b/.github/workflows/cron-daily-fuzz.yml index 2686f70ffd..253be2af24 100644 --- a/.github/workflows/cron-daily-fuzz.yml +++ b/.github/workflows/cron-daily-fuzz.yml @@ -11,7 +11,7 @@ on: jobs: fuzz: if: ${{ !github.event.act }} - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: @@ -59,7 +59,7 @@ jobs: echo "Using RUSTFLAGS $RUSTFLAGS" cd fuzz && ./fuzz.sh "${{ matrix.fuzz_target }}" - run: echo "${{ matrix.fuzz_target }}" >executed_${{ matrix.fuzz_target }} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: executed_${{ matrix.fuzz_target }} path: executed_${{ matrix.fuzz_target }} @@ -67,10 +67,10 @@ jobs: verify-execution: if: ${{ !github.event.act }} needs: fuzz - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 - name: Display structure of downloaded files run: ls -R - run: find executed_* -type f -exec cat {} + | sort > executed diff --git a/.github/workflows/cron-daily-kani.yml b/.github/workflows/cron-daily-kani.yml index dff1e54be8..61ef6e3957 100644 --- a/.github/workflows/cron-daily-kani.yml +++ b/.github/workflows/cron-daily-kani.yml @@ -5,7 +5,7 @@ on: - cron: '59 23 * * *' # midnight every day. jobs: run-kani: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - name: 'Checkout your code.' uses: actions/checkout@v4 diff --git a/.github/workflows/cron-weekly-cargo-mutants.yml b/.github/workflows/cron-weekly-cargo-mutants.yml new file mode 100644 index 0000000000..74ec346299 --- /dev/null +++ b/.github/workflows/cron-weekly-cargo-mutants.yml @@ -0,0 +1,41 @@ +name: Weekly cargo-mutants +on: + schedule: + - cron: "0 0 * * 0" # runs weekly on Sunday at 00:00 + workflow_dispatch: # allows manual triggering +jobs: + cargo-mutants: + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + - uses: taiki-e/install-action@v2 + with: + tool: cargo-mutants + - run: cargo mutants --in-place --no-shuffle + - uses: actions/upload-artifact@v4 + if: always() + with: + name: mutants.out + path: mutants.out + - name: Check for new mutants + if: always() + run: | + if [ -s mutants.out/missed.txt ]; then + echo "New missed mutants found" + gh issue create \ + --title "New Mutants Found" \ + --body "$(cat <> $GITHUB_ENV + else + echo "No new mutants found" + echo "create_issue=false" >> $GITHUB_ENV + fi + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/cron-weekly-rustfmt.yml b/.github/workflows/cron-weekly-rustfmt.yml index 81a5323fb1..d35073c4a6 100644 --- a/.github/workflows/cron-weekly-rustfmt.yml +++ b/.github/workflows/cron-weekly-rustfmt.yml @@ -6,7 +6,7 @@ on: jobs: format: name: Nightly rustfmt - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly diff --git a/.github/workflows/cron-weekly-update-cargo-semver-checks.yml b/.github/workflows/cron-weekly-update-cargo-semver-checks.yml index c3c86f9035..75f706a1cc 100644 --- a/.github/workflows/cron-weekly-update-cargo-semver-checks.yml +++ b/.github/workflows/cron-weekly-update-cargo-semver-checks.yml @@ -6,7 +6,7 @@ on: jobs: format: name: Update cargo-semver-checks - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - name: Update semver-checks to use latest crates.io published version diff --git a/.github/workflows/cron-semi-weekly-update-nightly.yml b/.github/workflows/cron-weekly-update-nightly.yml similarity index 95% rename from .github/workflows/cron-semi-weekly-update-nightly.yml rename to .github/workflows/cron-weekly-update-nightly.yml index 8cfb698854..b31ad20cc0 100644 --- a/.github/workflows/cron-semi-weekly-update-nightly.yml +++ b/.github/workflows/cron-weekly-update-nightly.yml @@ -1,12 +1,12 @@ name: Update Nightly rustc on: schedule: - - cron: "5 0 * * 1,4" # runs every Monday and Thursday at 00:05 + - cron: "5 0 * * 6" # Saturday at 00:05 workflow_dispatch: # allows manual triggering jobs: format: name: Update nightly rustc - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@nightly diff --git a/.github/workflows/cron-weekly-update-stable.yml b/.github/workflows/cron-weekly-update-stable.yml index 42f49cc772..456ccd9ae4 100644 --- a/.github/workflows/cron-weekly-update-stable.yml +++ b/.github/workflows/cron-weekly-update-stable.yml @@ -6,7 +6,7 @@ on: jobs: format: name: Update stable rustc - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable diff --git a/.github/workflows/gh-release.yml b/.github/workflows/gh-release.yml index 69ea930c5e..86014a98cf 100644 --- a/.github/workflows/gh-release.yml +++ b/.github/workflows/gh-release.yml @@ -7,7 +7,7 @@ on: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - uses: ncipollo/release-action@v1 diff --git a/.github/workflows/manage-pr.yml b/.github/workflows/manage-pr.yml index 49407eead0..3e92b33f94 100644 --- a/.github/workflows/manage-pr.yml +++ b/.github/workflows/manage-pr.yml @@ -7,7 +7,7 @@ jobs: permissions: contents: read pull-requests: write - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: Checkout master uses: actions/checkout@v4 diff --git a/.github/workflows/miri.yml b/.github/workflows/miri.yml index 38eaa4afd0..ed9f05a702 100644 --- a/.github/workflows/miri.yml +++ b/.github/workflows/miri.yml @@ -11,7 +11,7 @@ name: Miri jobs: Miri: name: Miri - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false steps: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cd2a872550..1241ecf9ba 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,7 +12,7 @@ name: Release jobs: release: name: Release - dry-run - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: Checkout Crate uses: actions/checkout@v4 diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index cf06c57ae8..ea83c99cbc 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -10,7 +10,7 @@ name: Continuous integration jobs: Prepare: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 outputs: nightly_version: ${{ steps.read_toolchain.outputs.nightly_version }} steps: @@ -22,7 +22,7 @@ jobs: Stable: # 2 jobs, one per manifest. name: Test - stable toolchain - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: @@ -34,7 +34,7 @@ jobs: uses: actions/checkout@v4 with: repository: rust-bitcoin/rust-bitcoin-maintainer-tools - ref: 4f17a059a2f57c1b99b7c240a1467a5c0acebdc3 + ref: c3324024ced9bb1eb854397686919c3ff7d97e1e path: maintainer-tools - name: "Select toolchain" uses: dtolnay/rust-toolchain@stable @@ -46,7 +46,7 @@ jobs: Nightly: # 2 jobs, one per manifest. name: Test - nightly toolchain needs: Prepare - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: @@ -58,7 +58,7 @@ jobs: uses: actions/checkout@v4 with: repository: rust-bitcoin/rust-bitcoin-maintainer-tools - ref: 4f17a059a2f57c1b99b7c240a1467a5c0acebdc3 + ref: c3324024ced9bb1eb854397686919c3ff7d97e1e path: maintainer-tools - name: "Select toolchain" uses: dtolnay/rust-toolchain@v1 @@ -71,7 +71,7 @@ jobs: MSRV: # 2 jobs, one per manifest. name: Test - MSRV toolchain - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: @@ -83,7 +83,7 @@ jobs: uses: actions/checkout@v4 with: repository: rust-bitcoin/rust-bitcoin-maintainer-tools - ref: 4f17a059a2f57c1b99b7c240a1467a5c0acebdc3 + ref: c3324024ced9bb1eb854397686919c3ff7d97e1e path: maintainer-tools - name: "Select toolchain" uses: dtolnay/rust-toolchain@stable @@ -97,7 +97,7 @@ jobs: Lint: name: Lint - nightly toolchain needs: Prepare - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: @@ -109,7 +109,7 @@ jobs: uses: actions/checkout@v4 with: repository: rust-bitcoin/rust-bitcoin-maintainer-tools - ref: 4f17a059a2f57c1b99b7c240a1467a5c0acebdc3 + ref: c3324024ced9bb1eb854397686919c3ff7d97e1e path: maintainer-tools - name: "Select toolchain" uses: dtolnay/rust-toolchain@v1 @@ -124,7 +124,7 @@ jobs: Docs: name: Docs - stable toolchain - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: @@ -136,7 +136,7 @@ jobs: uses: actions/checkout@v4 with: repository: rust-bitcoin/rust-bitcoin-maintainer-tools - ref: 4f17a059a2f57c1b99b7c240a1467a5c0acebdc3 + ref: c3324024ced9bb1eb854397686919c3ff7d97e1e path: maintainer-tools - name: "Select toolchain" uses: dtolnay/rust-toolchain@stable @@ -148,7 +148,7 @@ jobs: Docsrs: name: Docs - nightly toolchain needs: Prepare - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: @@ -160,7 +160,7 @@ jobs: uses: actions/checkout@v4 with: repository: rust-bitcoin/rust-bitcoin-maintainer-tools - ref: 4f17a059a2f57c1b99b7c240a1467a5c0acebdc3 + ref: c3324024ced9bb1eb854397686919c3ff7d97e1e path: maintainer-tools - name: "Select toolchain" uses: dtolnay/rust-toolchain@v1 @@ -174,7 +174,7 @@ jobs: Bench: name: Bench - nightly toolchain needs: Prepare - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: @@ -186,7 +186,7 @@ jobs: uses: actions/checkout@v4 with: repository: rust-bitcoin/rust-bitcoin-maintainer-tools - ref: 4f17a059a2f57c1b99b7c240a1467a5c0acebdc3 + ref: c3324024ced9bb1eb854397686919c3ff7d97e1e path: maintainer-tools - name: "Select toolchain" uses: dtolnay/rust-toolchain@v1 @@ -199,7 +199,7 @@ jobs: Arch32bit: name: Test 32-bit version - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: "Checkout repo" uses: actions/checkout@v4 @@ -217,7 +217,7 @@ jobs: Cross: name: Cross test - stable toolchain if: ${{ !github.event.act }} - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: "Checkout repo" uses: actions/checkout@v4 @@ -233,7 +233,7 @@ jobs: Embedded: name: Embedded - nightly toolchain needs: Prepare - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 env: RUSTFLAGS: "-C link-arg=-Tlink.x" CARGO_TARGET_THUMBV7M_NONE_EABI_RUNNER: "qemu-system-arm -cpu cortex-m3 -machine mps2-an385 -nographic -semihosting-config enable=on,target=native -kernel" @@ -261,7 +261,7 @@ jobs: ASAN: # hashes crate only. name: ASAN - nightly toolchain needs: Prepare - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: @@ -282,7 +282,7 @@ jobs: WASM: # hashes crate only. name: WASM - stable toolchain - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false # Note we do not use the recent lock file for wasm testing. @@ -296,7 +296,7 @@ jobs: Kani: name: Kani codegen - stable toolchain - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - name: "Checkout repo" uses: actions/checkout@v4 @@ -304,4 +304,3 @@ jobs: uses: model-checking/kani-github-action@v1.1 with: args: "--only-codegen" - diff --git a/.github/workflows/semver-checks-pr-label.yml b/.github/workflows/semver-checks-pr-label.yml index 56ce356de1..c7f9b1c66c 100644 --- a/.github/workflows/semver-checks-pr-label.yml +++ b/.github/workflows/semver-checks-pr-label.yml @@ -8,7 +8,7 @@ name: Check semver breaks - Label and Comment PR jobs: Download: name: Download, Unzip and Add Labels/Comments - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 permissions: contents: read pull-requests: write diff --git a/.github/workflows/semver-checks.yml b/.github/workflows/semver-checks.yml index 93f37d2692..32d01b6b28 100644 --- a/.github/workflows/semver-checks.yml +++ b/.github/workflows/semver-checks.yml @@ -6,7 +6,7 @@ name: Check semver breaks jobs: PR: name: PR Semver - stable toolchain - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false steps: @@ -45,7 +45,7 @@ jobs: Feature: name: Non additive cargo features - stable toolchain - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false steps: diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml index 8577c05033..dea02c3a04 100644 --- a/.github/workflows/shellcheck.yml +++ b/.github/workflows/shellcheck.yml @@ -6,7 +6,7 @@ on: jobs: shellcheck: name: Shellcheck - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - name: Run ShellCheck diff --git a/.github/workflows/stable-version b/.github/workflows/stable-version index 71fae54fb2..8510ffad03 100644 --- a/.github/workflows/stable-version +++ b/.github/workflows/stable-version @@ -1 +1 @@ -1.82.0 +1.85.1 diff --git a/.gitignore b/.gitignore index 6055dd8637..48c9cedca4 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,7 @@ hashes/target # Test artifacts bitcoin/dep_test +mutants.out* # Fuzz artifacts hfuzz_target diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d9139bacc9..98107dc615 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -130,7 +130,7 @@ adhere to the ideas presented in the following two blog posts: Whenever any part of your code wants to mention the version number the code will be released in, primarily in deprecation notices, you should use the string `TBD` (verbatim), so that the release preparation script can detect the -change and the correct version number can be filled in in preparation of the +change and the correct version number can be filled in preparation of the release. ```rust @@ -145,6 +145,21 @@ test out the patch set and opine on the technical merits of the patch. Please, first review PR on the conceptual level before focusing on code style or grammar fixes. +### API changes + +The API of the following crates is almost stable. Changing it is supposed to be non-trivial. To +assist in this effort ll PRs that change the public API of any these crates must include a patch to +the `api/` text files. This should be a separate final patch to the PR that is the diff created by +running `just check-api`. + +- `hashes` +- `io` +- `primitives` +- `units` + +Check the [API text files](api/README.md) for more information +on how to install the dependencies and create the text files. + ### Repository maintainers Pull request merge requirements: @@ -194,12 +209,9 @@ in Bitcoin Core, with the following exceptions: If your change requires a dependency to be upgraded you must do the following: 1. Modify `Cargo.toml` -2. Copy `Cargo-minimal.lock` to `Cargo.lock` -3. Trigger cargo to update the required entries in the lock file - use `--precise` using the minimum version number that works -4. Test your change -5. Copy `Cargo.lock` to `Cargo-minimal.lock` -6. Update `Cargo-recent.lock` if it is also behind -7. Commit both lock files together with `Cargo.toml` and your code changes +2. Run `just update-lock-files`, if necessary install `just` first with `cargo install just`. +3. Test your change +4. Commit both `Cargo-minimal.lock` and `Cargo-recent.lock` together with `Cargo.toml` and your code changes ### Unsafe code @@ -348,7 +360,7 @@ All errors that live in an `error` module (eg, `foo/error.rs`) and appear in a p With respect to `expect` messages, they should follow the [Rust standard library guidelines](https://doc.rust-lang.org/std/option/enum.Option.html#recommended-message-style). -More specifically, `expect` messages should be used to to describe the reason +More specifically, `expect` messages should be used to describe the reason you expect the operation to succeed. For example, this `expect` message clearly states why the operation should succeed: diff --git a/Cargo-minimal.lock b/Cargo-minimal.lock index 5ec37a0166..f1b5097fa9 100644 --- a/Cargo-minimal.lock +++ b/Cargo-minimal.lock @@ -2,17 +2,11 @@ # It is not intended for manual editing. version = 3 -[[package]] -name = "anyhow" -version = "1.0.57" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc" - [[package]] name = "arbitrary" -version = "1.0.1" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "237430fd6ed3740afe94eefcc278ae21e050285be882804e0d6e8695f0c94691" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" [[package]] name = "arrayvec" @@ -22,10 +16,10 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "base58ck" -version = "0.1.0" +version = "0.2.0" dependencies = [ "bitcoin-internals", - "bitcoin_hashes 0.15.0", + "bitcoin_hashes 0.16.0", "hex-conservative 0.3.0", ] @@ -53,7 +47,7 @@ dependencies = [ [[package]] name = "bitcoin" -version = "0.33.0-alpha" +version = "0.33.0-alpha.0" dependencies = [ "arbitrary", "base58ck", @@ -61,15 +55,13 @@ dependencies = [ "bech32", "bincode", "bitcoin-internals", - "bitcoin-io", + "bitcoin-io 0.2.0", "bitcoin-primitives", "bitcoin-units", - "bitcoin_hashes 0.15.0", + "bitcoin_hashes 0.16.0", "bitcoinconsensus", "hex-conservative 0.3.0", "hex_lit", - "mutagen", - "ordered", "secp256k1", "serde", "serde_json", @@ -100,11 +92,18 @@ dependencies = [ "serde_json", ] +[[package]] +name = "bitcoin-io" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b47c4ab7a93edb0c7198c5535ed9b52b63095f4e9b45279c6736cec4b856baf" + [[package]] name = "bitcoin-io" version = "0.2.0" dependencies = [ "bitcoin-internals", + "bitcoin_hashes 0.16.0", ] [[package]] @@ -114,12 +113,9 @@ dependencies = [ "arbitrary", "bincode", "bitcoin-internals", - "bitcoin-io", "bitcoin-units", - "bitcoin_hashes 0.15.0", + "bitcoin_hashes 0.16.0", "hex-conservative 0.3.0", - "mutagen", - "ordered", "serde", "serde_json", ] @@ -129,6 +125,7 @@ name = "bitcoin-units" version = "0.2.0" dependencies = [ "arbitrary", + "bincode", "bitcoin-internals", "serde", "serde_json", @@ -141,14 +138,15 @@ version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb18c03d0db0247e147a21a6faafd5a7eb851c743db062de72018b6b7e8e4d16" dependencies = [ + "bitcoin-io 0.1.3", "hex-conservative 0.2.0", ] [[package]] name = "bitcoin_hashes" -version = "0.15.0" +version = "0.16.0" dependencies = [ - "bitcoin-io", + "bitcoin-internals", "hex-conservative 0.3.0", "serde", "serde_json", @@ -241,12 +239,6 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b" -[[package]] -name = "json" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "078e285eafdfb6c4b434e0d31e8cfcb5115b651496faca5749b88fafd4f23bfd" - [[package]] name = "lazy_static" version = "1.4.0" @@ -268,45 +260,6 @@ dependencies = [ "libc", ] -[[package]] -name = "mutagen" -version = "0.2.0" -source = "git+https://github.com/llogiq/mutagen#a6377c4c3f360afeb7a287c1c17e4b69456d5f53" -dependencies = [ - "mutagen-core", - "mutagen-transform", -] - -[[package]] -name = "mutagen-core" -version = "0.2.0" -source = "git+https://github.com/llogiq/mutagen#a6377c4c3f360afeb7a287c1c17e4b69456d5f53" -dependencies = [ - "anyhow", - "json", - "lazy_static", - "proc-macro2", - "quote", - "serde", - "serde_json", - "syn", -] - -[[package]] -name = "mutagen-transform" -version = "0.2.0" -source = "git+https://github.com/llogiq/mutagen#a6377c4c3f360afeb7a287c1c17e4b69456d5f53" -dependencies = [ - "mutagen-core", - "proc-macro2", -] - -[[package]] -name = "ordered" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f0642533dea0bb58bd5cae31bafc1872429f0f12ac8c61fe2b4ba44f80b959b" - [[package]] name = "ppv-lite86" version = "0.2.8" @@ -388,9 +341,9 @@ checksum = "c92464b447c0ee8c4fb3824ecc8383b81717b9f1e74ba2e72540aef7b9f82997" [[package]] name = "secp256k1" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e0cc0f1cf93f4969faf3ea1c7d8a9faed25918d96affa959720823dfe86d4f3" +checksum = "b50c5943d326858130af85e049f2661ba3c78b26589b8ab98e65e80ae44a1252" dependencies = [ "bitcoin_hashes 0.14.0", "rand", diff --git a/Cargo-recent.lock b/Cargo-recent.lock index ed3d594ec8..558ade0a89 100644 --- a/Cargo-recent.lock +++ b/Cargo-recent.lock @@ -2,17 +2,11 @@ # It is not intended for manual editing. version = 3 -[[package]] -name = "anyhow" -version = "1.0.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" - [[package]] name = "arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" [[package]] name = "arrayvec" @@ -22,10 +16,10 @@ checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "base58ck" -version = "0.1.0" +version = "0.2.0" dependencies = [ "bitcoin-internals", - "bitcoin_hashes 0.15.0", + "bitcoin_hashes 0.16.0", "hex-conservative 0.3.0", ] @@ -52,7 +46,7 @@ dependencies = [ [[package]] name = "bitcoin" -version = "0.33.0-alpha" +version = "0.33.0-alpha.0" dependencies = [ "arbitrary", "base58ck", @@ -60,15 +54,13 @@ dependencies = [ "bech32", "bincode", "bitcoin-internals", - "bitcoin-io", + "bitcoin-io 0.2.0", "bitcoin-primitives", "bitcoin-units", - "bitcoin_hashes 0.15.0", + "bitcoin_hashes 0.16.0", "bitcoinconsensus", "hex-conservative 0.3.0", "hex_lit", - "mutagen", - "ordered", "secp256k1", "serde", "serde_json", @@ -99,11 +91,18 @@ dependencies = [ "serde_json", ] +[[package]] +name = "bitcoin-io" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b47c4ab7a93edb0c7198c5535ed9b52b63095f4e9b45279c6736cec4b856baf" + [[package]] name = "bitcoin-io" version = "0.2.0" dependencies = [ "bitcoin-internals", + "bitcoin_hashes 0.16.0", ] [[package]] @@ -113,12 +112,9 @@ dependencies = [ "arbitrary", "bincode", "bitcoin-internals", - "bitcoin-io", "bitcoin-units", - "bitcoin_hashes 0.15.0", + "bitcoin_hashes 0.16.0", "hex-conservative 0.3.0", - "mutagen", - "ordered", "serde", "serde_json", ] @@ -128,6 +124,7 @@ name = "bitcoin-units" version = "0.2.0" dependencies = [ "arbitrary", + "bincode", "bitcoin-internals", "serde", "serde_json", @@ -140,14 +137,15 @@ version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb18c03d0db0247e147a21a6faafd5a7eb851c743db062de72018b6b7e8e4d16" dependencies = [ + "bitcoin-io 0.1.3", "hex-conservative 0.2.1", ] [[package]] name = "bitcoin_hashes" -version = "0.15.0" +version = "0.16.0" dependencies = [ - "bitcoin-io", + "bitcoin-internals", "hex-conservative 0.3.0", "serde", "serde_json", @@ -243,12 +241,6 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" -[[package]] -name = "json" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "078e285eafdfb6c4b434e0d31e8cfcb5115b651496faca5749b88fafd4f23bfd" - [[package]] name = "lazy_static" version = "1.5.0" @@ -276,45 +268,6 @@ dependencies = [ "libc", ] -[[package]] -name = "mutagen" -version = "0.2.0" -source = "git+https://github.com/llogiq/mutagen#a6377c4c3f360afeb7a287c1c17e4b69456d5f53" -dependencies = [ - "mutagen-core", - "mutagen-transform", -] - -[[package]] -name = "mutagen-core" -version = "0.2.0" -source = "git+https://github.com/llogiq/mutagen#a6377c4c3f360afeb7a287c1c17e4b69456d5f53" -dependencies = [ - "anyhow", - "json", - "lazy_static", - "proc-macro2", - "quote", - "serde", - "serde_json", - "syn 1.0.109", -] - -[[package]] -name = "mutagen-transform" -version = "0.2.0" -source = "git+https://github.com/llogiq/mutagen#a6377c4c3f360afeb7a287c1c17e4b69456d5f53" -dependencies = [ - "mutagen-core", - "proc-macro2", -] - -[[package]] -name = "ordered" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f0642533dea0bb58bd5cae31bafc1872429f0f12ac8c61fe2b4ba44f80b959b" - [[package]] name = "ppv-lite86" version = "0.2.20" @@ -389,9 +342,9 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "secp256k1" -version = "0.29.1" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9465315bc9d4566e1724f0fffcbcc446268cb522e60f9a27bcded6b19c108113" +checksum = "b50c5943d326858130af85e049f2661ba3c78b26589b8ab98e65e80ae44a1252" dependencies = [ "bitcoin_hashes 0.14.0", "rand", @@ -431,7 +384,7 @@ checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn", ] [[package]] @@ -461,17 +414,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - [[package]] name = "syn" version = "2.0.79" @@ -513,5 +455,5 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn", ] diff --git a/README.md b/README.md index a841d5122e..5cfe2c98a4 100644 --- a/README.md +++ b/README.md @@ -12,9 +12,9 @@ CC0 1.0 Universal Licensed CI Status API Docs - Rustc Version 1.63.0+ + Rustc Version 1.63.0+ Chat on IRC - + kani

@@ -49,7 +49,7 @@ are no plans to do so. Of course, patches to fix specific consensus incompatibil ### Support for 16-bit pointer sizes -16-bit pointer sizes are not supported and we can't promise they will be. If you care about them +16-bit pointer sizes are not supported, and we can't promise they will be. If you care about them please let us know, so we can know how large the interest is and possibly decide to support them. ### Semver compliance @@ -103,13 +103,6 @@ current stable one (see MSRV section). ## Building -The cargo feature `std` is enabled by default. At least one of the features `std` or `no-std` or -both must be enabled. - -Enabling the `no-std` feature does not disable `std`. To disable the `std` feature you must disable -default features. The `no-std` feature only enables additional features required for this crate to -be usable without `std`. Both can be enabled without conflict. - The library can be built and tested using [`cargo`](https://github.com/rust-lang/cargo/): ``` @@ -127,10 +120,19 @@ cargo test Please refer to the [`cargo` documentation](https://doc.rust-lang.org/stable/cargo/) for more detailed instructions. +### No-std support + +The `std` cargo feature is enabled by default. To build this project without the Rust standard +library, use the `--no-default-features` flag or set `default-features = false` in your dependency +declaration when adding it to your project. + +For embedded device examples, see [`bitcoin/embedded`](https://github.com/rust-bitcoin/rust-bitcoin/tree/master/bitcoin/embedded) +or [`hashes/embedded`](https://github.com/rust-bitcoin/rust-bitcoin/tree/master/hashes/embedded). + ### Just We support [`just`](https://just.systems/man/en/) for running dev workflow commands. Run `just` from -your shell to see list available sub-commands. +your shell to see a list of available sub-commands. ### Building the docs @@ -146,8 +148,8 @@ alias build-docs='RUSTDOCFLAGS="--cfg docsrs" cargo +nightly rustdoc --features= Unit and integration tests are available for those interested, along with benchmarks. For project developers, especially new contributors looking for something to work on, we do: -- Fuzz testing with [`Hongfuzz`](https://github.com/rust-fuzz/honggfuzz-rs) -- Mutation testing with [`Mutagen`](https://github.com/llogiq/mutagen) +- Fuzz testing with [`Honggfuzz`](https://github.com/rust-fuzz/honggfuzz-rs) +- Mutation testing with [`cargo-mutants`](https://github.com/sourcefrog/cargo-mutants) - Code verification with [`Kani`](https://github.com/model-checking/kani) There are always more tests to write and more bugs to find, contributions to our testing efforts @@ -165,9 +167,9 @@ bench marks use: `RUSTFLAGS='--cfg=bench' cargo +nightly bench`. ### Mutation tests -We have started doing mutation testing with [mutagen](https://github.com/llogiq/mutagen). To run -these tests first install the latest dev version with `cargo +nightly install --git https://github.com/llogiq/mutagen` -then run with `RUSTFLAGS='--cfg=mutate' cargo +nightly mutagen`. +We are doing mutation testing with [cargo-mutants](https://github.com/sourcefrog/cargo-mutants). To run +these tests first install with `cargo install --locked cargo-mutants` then run with `cargo mutants --in-place --no-shuffle`. +Note that running these mutation tests will take on the order of 10's of minutes. ### Code verification diff --git a/base58/CHANGELOG.md b/base58/CHANGELOG.md index b219fb4776..9bd3876aa3 100644 --- a/base58/CHANGELOG.md +++ b/base58/CHANGELOG.md @@ -1,3 +1,12 @@ +# 0.2.0 - 2024-12-10 + +- Bump MSRV to `1.63` [#3100](https://github.com/rust-bitcoin/rust-bitcoin/pull/3100) +- Optimize `base58` on small inputs [#3002](https://github.com/rust-bitcoin/rust-bitcoin/pull/3002) +- Add `alloc` feature [#2996](https://github.com/rust-bitcoin/rust-bitcoin/pull/2996) +- Remove zeroed vector by pushing front [#3227](https://github.com/rust-bitcoin/rust-bitcoin/pull/3227) +- Close all errors [#3533](https://github.com/rust-bitcoin/rust-bitcoin/pull/3533) +- Bump `hex-conservative` to `0.3.0` [#3543](https://github.com/rust-bitcoin/rust-bitcoin/pull/3543) + # 0.1.0 - 2024-03-14 Initial release of the `base58ck` crate. This crate was cut out of diff --git a/base58/Cargo.toml b/base58/Cargo.toml index 198b91bf4d..06000bcbf9 100644 --- a/base58/Cargo.toml +++ b/base58/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "base58ck" -version = "0.1.0" +version = "0.2.0" authors = ["Andrew Poelstra "] license = "CC0-1.0" repository = "https://github.com/rust-bitcoin/rust-bitcoin/" @@ -18,7 +18,7 @@ std = ["alloc", "hashes/std", "internals/std"] alloc = ["hashes/alloc", "internals/alloc"] [dependencies] -hashes = { package = "bitcoin_hashes", version = "0.15.0", default-features = false } +hashes = { package = "bitcoin_hashes", version = "0.16.0", default-features = false } internals = { package = "bitcoin-internals", version = "0.4.0" } [dev-dependencies] diff --git a/base58/README.md b/base58/README.md index f28534d5a6..4b530e94ed 100644 --- a/base58/README.md +++ b/base58/README.md @@ -26,5 +26,5 @@ This library should always compile with any combination of features on **Rust 1. ## Licensing -The code in this project is licensed under the [Creative Commons CC0 1.0 Universal license](LICENSE). +The code in this project is licensed under the [Creative Commons CC0 1.0 Universal license](../LICENSE). We use the [SPDX license list](https://spdx.org/licenses/) and [SPDX IDs](https://spdx.dev/ids/). diff --git a/base58/src/error.rs b/base58/src/error.rs index 6257056ecc..678c22d930 100644 --- a/base58/src/error.rs +++ b/base58/src/error.rs @@ -2,6 +2,7 @@ //! Error code for the `base58` crate. +use core::convert::Infallible; use core::fmt; use internals::write_err; @@ -20,11 +21,16 @@ pub(super) enum ErrorInner { TooShort(TooShortError), } -internals::impl_from_infallible!(Error); -internals::impl_from_infallible!(ErrorInner); +impl From for Error { + fn from(never: Infallible) -> Self { match never {} } +} + +impl From for ErrorInner { + fn from(never: Infallible) -> Self { match never {} } +} impl Error { - /// Returns the invalid base58 ssscharacter, if encountered. + /// Returns the invalid base58 character, if encountered. pub fn invalid_character(&self) -> Option { match self.0 { ErrorInner::Decode(ref e) => Some(e.invalid_character()), @@ -95,7 +101,9 @@ pub(super) struct IncorrectChecksumError { pub(super) expected: u32, } -internals::impl_from_infallible!(IncorrectChecksumError); +impl From for IncorrectChecksumError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for IncorrectChecksumError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -116,8 +124,9 @@ pub(super) struct TooShortError { /// The length of the decoded data. pub(super) length: usize, } - -internals::impl_from_infallible!(TooShortError); +impl From for TooShortError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for TooShortError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -141,8 +150,13 @@ pub(super) struct InvalidCharacterErrorInner { pub(super) invalid: u8, } -internals::impl_from_infallible!(InvalidCharacterError); -internals::impl_from_infallible!(InvalidCharacterErrorInner); +impl From for InvalidCharacterError { + fn from(never: Infallible) -> Self { match never {} } +} + +impl From for InvalidCharacterErrorInner { + fn from(never: Infallible) -> Self { match never {} } +} impl InvalidCharacterError { pub(super) fn new(invalid: u8) -> Self { Self(InvalidCharacterErrorInner { invalid }) } diff --git a/base58/src/lib.rs b/base58/src/lib.rs index b5b977ebc5..13db9fd1f1 100644 --- a/base58/src/lib.rs +++ b/base58/src/lib.rs @@ -20,6 +20,7 @@ // Exclude lints we don't think are valuable. #![allow(clippy::needless_question_mark)] // https://github.com/rust-bitcoin/rust-bitcoin/pull/2134 #![allow(clippy::manual_range_contains)] // More readable than clippy's format. +#![allow(clippy::incompatible_msrv)] // Has FPs and we're testing it which is more reliable anyway. extern crate alloc; @@ -40,7 +41,10 @@ use core::fmt; pub use std::{string::String, vec::Vec}; use hashes::sha256d; +use internals::array::ArrayExt; use internals::array_vec::ArrayVec; +#[allow(unused)] // MSRV polyfill +use internals::slice::SliceExt; use crate::error::{IncorrectChecksumError, TooShortError}; @@ -109,15 +113,9 @@ pub fn decode(data: &str) -> Result, InvalidCharacterError> { /// Decodes a base58check-encoded string into a byte vector verifying the checksum. pub fn decode_check(data: &str) -> Result, Error> { let mut ret: Vec = decode(data)?; - if ret.len() < 4 { - return Err(TooShortError { length: ret.len() }.into()); - } - let check_start = ret.len() - 4; + let (remaining, &data_check) = ret.split_last_chunk::<4>().ok_or(TooShortError { length: ret.len() })?; - let hash_check = sha256d::Hash::hash(&ret[..check_start]).as_byte_array()[..4] - .try_into() - .expect("4 byte slice"); - let data_check = ret[check_start..].try_into().expect("4 byte slice"); + let hash_check = *sha256d::Hash::hash(remaining).as_byte_array().sub_array::<0, 4>(); let expected = u32::from_le_bytes(hash_check); let actual = u32::from_le_bytes(data_check); @@ -126,7 +124,7 @@ pub fn decode_check(data: &str) -> Result, Error> { return Err(IncorrectChecksumError { incorrect: actual, expected }.into()); } - ret.truncate(check_start); + ret.truncate(remaining.len()); Ok(ret) } @@ -258,7 +256,7 @@ mod tests { use super::*; #[test] - fn test_base58_encode() { + fn base58_encode() { // Basics assert_eq!(&encode(&[0][..]), "1"); assert_eq!(&encode(&[1][..]), "2"); @@ -287,7 +285,7 @@ mod tests { } #[test] - fn test_base58_decode() { + fn base58_decode() { // Basics assert_eq!(decode("1").ok(), Some(vec![0u8])); assert_eq!(decode("2").ok(), Some(vec![1u8])); @@ -308,7 +306,7 @@ mod tests { } #[test] - fn test_base58_roundtrip() { + fn base58_roundtrip() { let s = "xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs"; let v: Vec = decode_check(s).unwrap(); assert_eq!(encode_check(&v[..]), s); @@ -337,7 +335,7 @@ mod benches { #[bench] pub fn bench_encode_check_xpub(bh: &mut Bencher) { - let data: alloc::vec::Vec<_> = (0u8..78).collect(); // lenght of xpub + let data: alloc::vec::Vec<_> = (0u8..78).collect(); // length of xpub bh.iter(|| { let r = super::encode_check(&data); diff --git a/bitcoin/CHANGELOG.md b/bitcoin/CHANGELOG.md index c6c779bd37..0972ca23b7 100644 --- a/bitcoin/CHANGELOG.md +++ b/bitcoin/CHANGELOG.md @@ -1,6 +1,113 @@ -# 0.33.0-alpha - TODO: Set date - -- Enforce script size limit when hashing scripts [#2794](https://github.com/rust-bitcoin/rust-bitcoin/pull/2794) +# Unreleased + +- TODO: Make a comment about `Amount::MAX_MONEY` (perhaps here in `bitcoin` release notes as well as in `amount`) + +- Use MAX_MONEY in serde regression test [#3950](https://github.com/rust-bitcoin/rust-bitcoin/pull/3950) + +# 0.33.0-alpha.0 - 2024-11-18 + +This series of alpha releases is meant for two things: + +1. To facilitate testing of `primitives 0.101`. +2. To allow testing of upcoming `1.0` releases of: + + - `bitcoin_hashes` + - `hex` + - `bitcoin-io` + - `primitives` + - `units` + - `ordered` + +You likely want to explicitly set the version if doing testing. `cargo` can be surprising when there +is a `-` in the version number. + +We do not currently intend on releasing `bitcoin 0.33.0` until the `1.0` releases above are done. + +For changes to our dependencies included in this release see: + +- `bitcoin_hashes 0.15`: [changelog](https://github.com/rust-bitcoin/rust-bitcoin/blob/master/hashes/CHANGELOG.md) +- `hex-conservative 0.3`: [changelog](https://github.com/rust-bitcoin/hex-conservative/blob/master/CHANGELOG.md) +- `bitcoin-io 0.2`: [changelog](https://github.com/rust-bitcoin/rust-bitcoin/blob/master/io/CHANGELOG.md) +- `bitcoin-primitives: 0.101`: [changelog]((https://github.com/rust-bitcoin/rust-bitcoin/blob/master/primitives/CHANGELOG.md)) +- `bitcoin-units 0.2`: [changelog]((https://github.com/rust-bitcoin/rust-bitcoin/blob/master/units/CHANGELOG.md)) +- `bitcoinconsensus: 0.106.0+26`: [changelog](https://github.com/rust-bitcoin/rust-bitcoinconsensus/blob/master/CHANGELOG.md) + +## Changes + +- Fix psbt fuzz crash [#3667](https://github.com/rust-bitcoin/rust-bitcoin/pull/3667) +- Update `from_next_work_required` to take an `i64` for timespan [#3660](https://github.com/rust-bitcoin/rust-bitcoin/pull/3660) +- Account for data pushing opcodes in `is_standard_op_return` [#3643](https://github.com/rust-bitcoin/rust-bitcoin/pull/3643) +- Add p2wpkh address creation example [#3642](https://github.com/rust-bitcoin/rust-bitcoin/pull/3642) +- Add `Address::into_unchecked` [#3640](https://github.com/rust-bitcoin/rust-bitcoin/pull/3640) +- Mark `checked_` functions as const [#3636](https://github.com/rust-bitcoin/rust-bitcoin/pull/3636) +- Mark functions const in `fee_rate` [#3627](https://github.com/rust-bitcoin/rust-bitcoin/pull/3627) +- Mark funtions const [#3608](https://github.com/rust-bitcoin/rust-bitcoin/pull/3608) +- Add constructor to `FeeRate` [#3604](https://github.com/rust-bitcoin/rust-bitcoin/pull/3604) +- Fix bug in witness stack getters [#3601](https://github.com/rust-bitcoin/rust-bitcoin/pull/3601) +- Split `checked_div_by_weight` into floor and ceiling version [#3587](https://github.com/rust-bitcoin/rust-bitcoin/pull/3587) +- script: remove `unsafe` marker from slice-to-script conversions [#3569](https://github.com/rust-bitcoin/rust-bitcoin/pull/3569) +- io: Bump version to `0.1.3` [#3566](https://github.com/rust-bitcoin/rust-bitcoin/pull/3566) +- Re-export `block::Header` as `BlockHeader` [#3562](https://github.com/rust-bitcoin/rust-bitcoin/pull/3562) +- Bump `hex-conservative` to `0.3.0` [#3543](https://github.com/rust-bitcoin/rust-bitcoin/pull/3543) +- Re-organise the `amount` module [#3541](https://github.com/rust-bitcoin/rust-bitcoin/pull/3541) +- Improve the `amount` module [#3539](https://github.com/rust-bitcoin/rust-bitcoin/pull/3539) +- base58: Close all errors [#3533](https://github.com/rust-bitcoin/rust-bitcoin/pull/3533) +- psbt: Fix bug in `Subtype` consensus_encode [#3519](https://github.com/rust-bitcoin/rust-bitcoin/pull/3519) +- Explicitly re-export stuff from crates down the stack [#3497](https://github.com/rust-bitcoin/rust-bitcoin/pull/3497) +- Expose `units::amount::ParseError` [#3496](https://github.com/rust-bitcoin/rust-bitcoin/pull/3496) +- Make `Amount::to_sat and SignedAmount::to_sat` const [#3493](https://github.com/rust-bitcoin/rust-bitcoin/pull/3493) +- Decode an address string based on prefix [#3481](https://github.com/rust-bitcoin/rust-bitcoin/pull/3481) +- Replace `ENABLE_RBF_NO_LOCKTIME` with `ENABLE_LOCKTIME_AND_RBF` [#3459](https://github.com/rust-bitcoin/rust-bitcoin/pull/3459) +- Add version three variant to transaction version [#3450](https://github.com/rust-bitcoin/rust-bitcoin/pull/3450) +- Input weight prediction helpers for nested P2WPKH [#3443](https://github.com/rust-bitcoin/rust-bitcoin/pull/3443) +- Clarify sequence constant name and add `FINAL` [#3439](https://github.com/rust-bitcoin/rust-bitcoin/pull/3439) +- Add checked div by weight to amount [#3430](https://github.com/rust-bitcoin/rust-bitcoin/pull/3430) +- Rename `Midstate::into_parts` to `Midstate::to_parts` since it derives `Copy` [#3429](https://github.com/rust-bitcoin/rust-bitcoin/pull/3429) +- Const locktime constructors [#3421](https://github.com/rust-bitcoin/rust-bitcoin/pull/3421) +- Fix script number overflow check for `push_int` [#3392](https://github.com/rust-bitcoin/rust-bitcoin/pull/3392) +- transaction: Remove `Default` implementations [#3386](https://github.com/rust-bitcoin/rust-bitcoin/pull/3386) +- Add `FeeRate` addition and subtraction traits [#3381](https://github.com/rust-bitcoin/rust-bitcoin/pull/3381) +- Add `Xpriv::to_xpub` and improve related method names [#3358](https://github.com/rust-bitcoin/rust-bitcoin/pull/3358) +- Support `impl AsRef<[#u8]>` in `signed_msg_hash` [3357](https://github.com/rust-bitcoin/rust-bitcoin/pull/u8) +- Fix `GetKey` for sets (plus some related changes) [#3356](https://github.com/rust-bitcoin/rust-bitcoin/pull/3356) +- Add a condition for parsing zero from string when not denominated [#3346](https://github.com/rust-bitcoin/rust-bitcoin/pull/3346) +- Add basic `miri` checks [#3328](https://github.com/rust-bitcoin/rust-bitcoin/pull/3328) +- Add coinbase associated consts [#3308](https://github.com/rust-bitcoin/rust-bitcoin/pull/3308) +- Fix bug in `ArrayVec::extend_from_slice` [#3272](https://github.com/rust-bitcoin/rust-bitcoin/pull/3272) +- Change `T::from_str(s)` to `s.parse::()` in examples, docs and tests [#3262](https://github.com/rust-bitcoin/rust-bitcoin/pull/3262) +- Add `Arbitrary` to `Weight` [#3257](https://github.com/rust-bitcoin/rust-bitcoin/pull/3257) +- Bump `units` version [#3248](https://github.com/rust-bitcoin/rust-bitcoin/pull/3248) +- Rename key field in Key to key_data [#3048](https://github.com/rust-bitcoin/rust-bitcoin/pull/3048) +- Optimize `base58` on small inputs [#3002](https://github.com/rust-bitcoin/rust-bitcoin/pull/3002) +- Add `TxIdentifier` trait [#2987](https://github.com/rust-bitcoin/rust-bitcoin/pull/2987) +- Fix `Amount` decimals handling [#2951](https://github.com/rust-bitcoin/rust-bitcoin/pull/2951) +- `OP_RETURN` standardness check [#2949](https://github.com/rust-bitcoin/rust-bitcoin/pull/2949) +- Support Testnet4 Network [#2945](https://github.com/rust-bitcoin/rust-bitcoin/pull/2945) +- Remove `VarInt` and use `ReadExt` and `WriteExt` trait methods instead [#2931](https://github.com/rust-bitcoin/rust-bitcoin/pull/2931) +- bip32: Add `From<&'a [#u32]>` for `DerivationPath` [2909](https://github.com/rust-bitcoin/rust-bitcoin/pull/u32) +- psbt: Encode keytype as a compact size unsigned integer [#2906](https://github.com/rust-bitcoin/rust-bitcoin/pull/2906) +- Pass sigs and associated types by value [#2899](https://github.com/rust-bitcoin/rust-bitcoin/pull/2899) +- Re-export `UnprefixedHexError` in the bitcoin crate root [#2895](https://github.com/rust-bitcoin/rust-bitcoin/pull/2895) +- taproot: Split errors up [#2886](https://github.com/rust-bitcoin/rust-bitcoin/pull/2886) +- Remove usage of `blockdata` from paths [#2885](https://github.com/rust-bitcoin/rust-bitcoin/pull/2885) +- Update `PushBytes::read_scriptint(x)` to `x.read_scriptint()` [#2872](https://github.com/rust-bitcoin/rust-bitcoin/pull/2872) +- Remove `Denomination::MilliSatoshi` [#2870](https://github.com/rust-bitcoin/rust-bitcoin/pull/2870) +- Pass keys by value [#2868](https://github.com/rust-bitcoin/rust-bitcoin/pull/2868) +- Clarify the meaning of `Height` & `Time` based locktime [#2858](https://github.com/rust-bitcoin/rust-bitcoin/pull/2858) +- Add API for extracting the inner payload of `RawNetworkMessage` [#2839](https://github.com/rust-bitcoin/rust-bitcoin/pull/2839) +- Update `bitcoinconsensus` version to `0.106.0+26` [#2833] (https://github.com/rust-bitcoin/rust-bitcoin/pull/2833) +- Make `difficulty_float` general to all networks [#2816](https://github.com/rust-bitcoin/rust-bitcoin/pull/2816) +- Add const modifier to `Magic::from_bytes` [#2815](https://github.com/rust-bitcoin/rust-bitcoin/pull/2815) +- Add an `AddressData` type [#2808](https://github.com/rust-bitcoin/rust-bitcoin/pull/2808) +- Make `Address:p2sh_from_hash` public [#2795](https://github.com/rust-bitcoin/rust-bitcoin/pull/2795) +- Enable getting the witness program from an address [#2796](https://github.com/rust-bitcoin/rust-bitcoin/pull/2796) +- Enforce script size limit when hashing scripts [##2794](https://github.com/rust-bitcoin/rust-bitcoin/pull/2794https://github.com/rust-bitcoin/rust-bitcoin/pull/#2794) +- Deprecate `to_vec` in favour of `to_bytes` [#2768](https://github.com/rust-bitcoin/rust-bitcoin/pull/2768) +- Flesh out hex unit parsing API [#2765](https://github.com/rust-bitcoin/rust-bitcoin/pull/2765) +- Bench `base58` encoding and remove `SmallVec` to improve perf [#2759](https://github.com/rust-bitcoin/rust-bitcoin/pull/2759) +- Add difficulty adjustment calculation [#2740](https://github.com/rust-bitcoin/rust-bitcoin/pull/2740) +- Upgrade `base64` dependency [#2721](https://github.com/rust-bitcoin/rust-bitcoin/pull/2721) +- Some additional inspectors on `Script` and `Witness` [#2646](https://github.com/rust-bitcoin/rust-bitcoin/pull/2646) ## Upgrade notes @@ -14,6 +121,20 @@ - Change `TxIn::default()` to `TxIn::EMPTY_COINBASE` if appropriate. - Change `to_raw_hash()` to `to_byte_array()`. - `bitcoin::error::UnprefixedHexError` moved to `bitcoin::parse::UnprefixedHexError`. +# 0.32.5 - 2024-11-27 + +- Backport - Re-export `bech32` crate [#3662](https://github.com/rust-bitcoin/rust-bitcoin/pull/3662) +- Backport - Add API for extracting the inner payload of `RawNetworkMessage` [#3523](https://github.com/rust-bitcoin/rust-bitcoin/pull/3523) +- Backport - Fix bug in witness stack getters [#3626](https://github.com/rust-bitcoin/rust-bitcoin/pull/3626) +- Backport - address: Add `Address::into_unchecked` [#3655](https://github.com/rust-bitcoin/rust-bitcoin/pull/3655) + +# 0.32.4 - 2024-10-24 + +- Bound decode methods on `Read`, rather than `BufRead` [#3173](https://github.com/rust-bitcoin/rust-bitcoin/pull/3173) +- Backport - Some additional inspectors on `Script` and `Witness` [#2646](https://github.com/rust-bitcoin/rust-bitcoin/pull/2646) +- Backport - Add difficulty adjustment calculation [#3494](https://github.com/rust-bitcoin/rust-bitcoin/pull/3494) +- Backport - Add testnet 4 support [#3453](https://github.com/rust-bitcoin/rust-bitcoin/pull/3453) +- Backport - CI: Copy main workflow from master [#3418](https://github.com/rust-bitcoin/rust-bitcoin/pull/3418) # 0.32.3 - 2024-09-27 diff --git a/bitcoin/Cargo.toml b/bitcoin/Cargo.toml index c4ba294c44..7784efff71 100644 --- a/bitcoin/Cargo.toml +++ b/bitcoin/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "bitcoin" -version = "0.33.0-alpha" +version = "0.33.0-alpha.0" authors = ["Andrew Poelstra "] license = "CC0-1.0" repository = "https://github.com/rust-bitcoin/rust-bitcoin/" @@ -17,7 +17,7 @@ exclude = ["tests", "contrib"] [features] default = [ "std", "secp-recovery" ] std = ["base58/std", "bech32/std", "hashes/std", "hex/std", "internals/std", "io/std", "primitives/std", "secp256k1/std", "units/std", "bitcoinconsensus?/std"] -rand-std = ["secp256k1/rand-std", "std"] +rand-std = ["secp256k1/rand", "std"] rand = ["secp256k1/rand"] serde = ["dep:serde", "hashes/serde", "internals/serde", "primitives/serde", "secp256k1/serde", "units/serde"] secp-lowmemory = ["secp256k1/lowmemory"] @@ -25,21 +25,20 @@ secp-recovery = ["secp256k1/recovery"] arbitrary = ["dep:arbitrary", "units/arbitrary", "primitives/arbitrary"] [dependencies] -base58 = { package = "base58ck", version = "0.1.0", default-features = false, features = ["alloc"] } +base58 = { package = "base58ck", version = "0.2.0", default-features = false, features = ["alloc"] } bech32 = { version = "0.11.0", default-features = false, features = ["alloc"] } -hashes = { package = "bitcoin_hashes", version = "0.15.0", default-features = false, features = ["alloc", "bitcoin-io", "hex"] } +hashes = { package = "bitcoin_hashes", version = "0.16.0", default-features = false, features = ["alloc", "hex"] } hex = { package = "hex-conservative", version = "0.3.0", default-features = false, features = ["alloc"] } -internals = { package = "bitcoin-internals", version = "0.4.0", features = ["alloc"] } -io = { package = "bitcoin-io", version = "0.2.0", default-features = false, features = ["alloc"] } +internals = { package = "bitcoin-internals", version = "0.4.0", features = ["alloc", "hex"] } +io = { package = "bitcoin-io", version = "0.2.0", default-features = false, features = ["alloc", "hashes"] } primitives = { package = "bitcoin-primitives", version = "0.101.0", default-features = false, features = ["alloc"] } -secp256k1 = { version = "0.29.0", default-features = false, features = ["hashes", "alloc"] } +secp256k1 = { version = "0.30.0", default-features = false, features = ["hashes", "alloc", "rand"] } units = { package = "bitcoin-units", version = "0.2.0", default-features = false, features = ["alloc"] } -arbitrary = { version = "1.0.1", optional = true } +arbitrary = { version = "1.4", optional = true } base64 = { version = "0.22.0", optional = true } # `bitcoinconsensus` version includes metadata which indicates the version of Core. Use `cargo tree` to see it. bitcoinconsensus = { version = "0.106.0", default-features = false, optional = true } -ordered = { version = "0.2.0", optional = true } serde = { version = "1.0.103", default-features = false, features = [ "derive", "alloc" ], optional = true } [dev-dependencies] @@ -49,9 +48,6 @@ serde_test = "1.0.19" bincode = "1.3.1" hex_lit = "0.1.1" -[target.'cfg(mutate)'.dev-dependencies] -mutagen = { git = "https://github.com/llogiq/mutagen" } - [package.metadata.docs.rs] all-features = true rustdoc-args = ["--cfg", "docsrs"] @@ -71,6 +67,10 @@ required-features = ["std", "bitcoinconsensus"] name = "ecdsa-psbt-simple" required-features = ["rand-std"] +[[example]] +name = "create-p2wpkh-address" +required-features = ["rand-std"] + [[example]] name = "sign-tx-segwit-v0" required-features = ["rand-std"] @@ -90,5 +90,9 @@ required-features = ["rand-std"] [[example]] name = "sighash" +[[example]] +name = "io" +required-features = ["std"] + [lints.rust] -unexpected_cfgs = { level = "deny", check-cfg = ['cfg(bench)', 'cfg(fuzzing)', 'cfg(kani)', 'cfg(mutate)'] } +unexpected_cfgs = { level = "deny", check-cfg = ['cfg(bench)', 'cfg(fuzzing)', 'cfg(kani)'] } diff --git a/bitcoin/contrib/test_vars.sh b/bitcoin/contrib/test_vars.sh index c65af099da..f28ecdb33a 100644 --- a/bitcoin/contrib/test_vars.sh +++ b/bitcoin/contrib/test_vars.sh @@ -5,10 +5,10 @@ # shellcheck disable=SC2034 # Test all these features with "std" enabled. -FEATURES_WITH_STD="rand-std serde secp-recovery bitcoinconsensus base64 ordered arbitrary" +FEATURES_WITH_STD="rand-std serde secp-recovery bitcoinconsensus base64 arbitrary" # Test all these features without "std" or "alloc" enabled. -FEATURES_WITHOUT_STD="rand serde secp-recovery bitcoinconsensus base64 ordered arbitrary" +FEATURES_WITHOUT_STD="rand serde secp-recovery bitcoinconsensus base64 arbitrary" # Run these examples. EXAMPLES="ecdsa-psbt:std,bitcoinconsensus sign-tx-segwit-v0:rand-std sign-tx-taproot:rand-std taproot-psbt:bitcoinconsensus,rand-std sighash:std" diff --git a/bitcoin/examples/bip32.rs b/bitcoin/examples/bip32.rs index f2f36d4099..9df5ef6732 100644 --- a/bitcoin/examples/bip32.rs +++ b/bitcoin/examples/bip32.rs @@ -1,5 +1,3 @@ -extern crate bitcoin; - use std::{env, process}; use bitcoin::address::{Address, KnownHrp}; @@ -13,7 +11,7 @@ fn main() { // This example derives root xprv from a 32-byte seed, // derives the child xprv with path m/84h/0h/0h, // prints out corresponding xpub, - // calculates and prints out the first receiving segwit address. + // calculates and prints out the first receiving SegWit address. // Run this example with cargo and seed(hex-encoded) argument: // cargo run --example bip32 7934c09359b234e076b9fa5a1abfd38e3dc2a9939745b7cc3c22a48d831d14bd diff --git a/bitcoin/examples/create-p2wpkh-address.rs b/bitcoin/examples/create-p2wpkh-address.rs new file mode 100644 index 0000000000..b8ec758f82 --- /dev/null +++ b/bitcoin/examples/create-p2wpkh-address.rs @@ -0,0 +1,24 @@ +use bitcoin::secp256k1::{rand, Secp256k1}; +use bitcoin::{Address, CompressedPublicKey, Network, PrivateKey}; + +/// Generate a P2WPKH (pay-to-witness-public-key-hash) address and print it +/// along with the associated private key needed to transact. +fn main() { + // Create new secp256k1 instance. + let secp = Secp256k1::new(); + + // Generate secp256k1 public and private key pair. + let (secret_key, public_key) = secp.generate_keypair(&mut rand::thread_rng()); + + // Create a Bitcoin private key to be used on the Bitcoin mainnet. + let private_key = PrivateKey::new(secret_key, Network::Bitcoin); + + // Create a compressed Bitcoin public key from the secp256k1 public key. + let public_key = CompressedPublicKey(public_key); + + // Create a Bitcoin P2WPKH address. + let address = Address::p2wpkh(public_key, Network::Bitcoin); + + println!("Private Key: {}", private_key); + println!("Address: {}", address); +} diff --git a/bitcoin/examples/ecdsa-psbt-simple.rs b/bitcoin/examples/ecdsa-psbt-simple.rs index 4e2ca10855..ac5d84b43e 100644 --- a/bitcoin/examples/ecdsa-psbt-simple.rs +++ b/bitcoin/examples/ecdsa-psbt-simple.rs @@ -26,13 +26,14 @@ use std::collections::BTreeMap; use bitcoin::address::script_pubkey::ScriptBufExt as _; use bitcoin::bip32::{ChildNumber, DerivationPath, Fingerprint, IntoDerivationPath, Xpriv, Xpub}; +use bitcoin::key::WPubkeyHash; use bitcoin::locktime::absolute; use bitcoin::psbt::Input; use bitcoin::secp256k1::{Secp256k1, Signing}; use bitcoin::witness::WitnessExt as _; use bitcoin::{ consensus, transaction, Address, Amount, EcdsaSighashType, Network, OutPoint, Psbt, ScriptBuf, - Sequence, Transaction, TxIn, TxOut, Txid, WPubkeyHash, Witness, + Sequence, Transaction, TxIn, TxOut, Txid, Witness, }; // The master xpriv, from which we derive the keys we control. @@ -46,12 +47,12 @@ const BIP84_DERIVATION_PATH: &str = "m/84'/0'/0'"; const MASTER_FINGERPRINT: &str = "9680603f"; // The dummy UTXO amounts we are spending. -const DUMMY_UTXO_AMOUNT_INPUT_1: Amount = Amount::from_sat(20_000_000); -const DUMMY_UTXO_AMOUNT_INPUT_2: Amount = Amount::from_sat(10_000_000); +const DUMMY_UTXO_AMOUNT_INPUT_1: Amount = Amount::from_sat_u32(20_000_000); +const DUMMY_UTXO_AMOUNT_INPUT_2: Amount = Amount::from_sat_u32(10_000_000); // The amounts we are sending to someone, and receiving back as change. -const SPEND_AMOUNT: Amount = Amount::from_sat(25_000_000); -const CHANGE_AMOUNT: Amount = Amount::from_sat(4_990_000); // 10_000 sat fee. +const SPEND_AMOUNT: Amount = Amount::from_sat_u32(25_000_000); +const CHANGE_AMOUNT: Amount = Amount::from_sat_u32(4_990_000); // 10_000 sat fee. // Derive the external address xpriv. fn get_external_address_xpriv( diff --git a/bitcoin/examples/ecdsa-psbt.rs b/bitcoin/examples/ecdsa-psbt.rs index 54ae34767c..e6bbaf1e66 100644 --- a/bitcoin/examples/ecdsa-psbt.rs +++ b/bitcoin/examples/ecdsa-psbt.rs @@ -54,7 +54,7 @@ const INPUT_UTXO_TXID: &str = "295f06639cde6039bf0c3dbf4827f0e3f2b2c2b476408e2f9 const INPUT_UTXO_VOUT: u32 = 0; const INPUT_UTXO_SCRIPT_PUBKEY: &str = "00149891eeb8891b3e80a2a1ade180f143add23bf5de"; const INPUT_UTXO_VALUE: &str = "50 BTC"; -// Get this from the desciptor, +// Get this from the descriptor, // "wpkh([97f17dca/0'/0'/0']02749483607dafb30c66bd93ece4474be65745ce538c2d70e8e246f17e7a4e0c0c)#m9n56cx0". const INPUT_UTXO_DERIVATION_PATH: &str = "0h/0h/0h"; diff --git a/bitcoin/examples/handshake.rs b/bitcoin/examples/handshake.rs index 8cfa6a9e22..db5a4fb171 100644 --- a/bitcoin/examples/handshake.rs +++ b/bitcoin/examples/handshake.rs @@ -1,5 +1,3 @@ -extern crate bitcoin; - use std::io::{BufReader, Write}; use std::net::{IpAddr, Ipv4Addr, Shutdown, SocketAddr, TcpStream}; use std::time::{SystemTime, UNIX_EPOCH}; diff --git a/bitcoin/examples/io.rs b/bitcoin/examples/io.rs new file mode 100644 index 0000000000..812c6b1664 --- /dev/null +++ b/bitcoin/examples/io.rs @@ -0,0 +1,131 @@ +// SPDX-License-Identifier: CC0-1.0 + +//! Demonstrate reading and writing `rust-bitcoin` objects. +//! +//! The `std::io` module is not exposed in `no-std` Rust so building `no-std` applications which +//! require reading and writing objects via standard traits is not generally possible. To support +//! this we provide the `bitcoin_io` crate which provides `io::Read`, `io::BufRead`, and +//! `io::Write`. This module demonstrates its usage. + +use bitcoin::consensus::{Decodable, Encodable as _}; +use bitcoin::{OutPoint, Txid}; + +fn main() { + // Encode/Decode a `rust-bitcoin` type to/from a stdlib type. + encode_decode_from_stdlib_type(); + + // Encode to a custom type by implementing `bitcoin_io` traits. + encode_to_custom_type(); + + // Encode to a foreign custom type by using the `bitcoin_io::bridge::FromStd` wrapper. + encode_using_wrapper(); +} + +/// Encodes/Decodes a `rust-bitcoin` type to/from a stdlib type. +/// +/// The consensus encoding and decoding traits are generic over `bitcoin_io::Write` and +/// `bitcoin_io::Read`. However for various stdlib types we implement our traits so _most_ things +/// should just work. +fn encode_decode_from_stdlib_type() { + let data = dummy_utxo(); + + // A type that implements `std::io::Write`. + let mut v = Vec::new(); + + // Under the hood we implement our `io` traits for a bunch of stdlib types so this just works. + let _bytes_written = data.consensus_encode(&mut v).expect("failed to encode to writer"); + + // Slices implement `std::io::Read`. + let mut reader = v.as_ref(); + + let _: OutPoint = + Decodable::consensus_decode(&mut reader).expect("failed to decode from reader"); +} + +/// Encodes to a custom type by implementing the `bitcoin_io::Write` trait. +/// +/// To use the `Encodable` (and `Decodable`) traits you can implement the `bitcoin_io` traits. +fn encode_to_custom_type() { + /// A byte counter - counts how many bytes where written to it. + struct WriteCounter { + count: usize, + } + + /// This `io` is `bitcoin_io` - see `Cargo.toml` usage of `io = { package = "bitcoin-io" }`. + impl io::Write for WriteCounter { + fn write(&mut self, buf: &[u8]) -> Result { + let written = buf.len(); + self.count += written; + Ok(written) + } + fn write_all(&mut self, buf: &[u8]) -> Result<(), io::Error> { + self.count += buf.len(); + Ok(()) + } + fn flush(&mut self) -> Result<(), io::Error> { Ok(()) } + } + + let data = dummy_utxo(); + + let mut counter = WriteCounter { count: 0 }; + let bytes_written = data.consensus_encode(&mut counter).expect("failed to encode to writer"); + assert_eq!(bytes_written, 36); // 32 bytes for txid + 4 bytes for vout. +} + +/// Encodes to a custom type by using the `bitcoin_io::bridge` module. +/// +/// If you have a type that you don't control that implements `std::io::Write` you can still encode +/// to it by way of the `io::bridge::FromStd` wrapper. +fn encode_using_wrapper() { + use pretend_this_is_some_other_crate::WriteCounter; + + let data = dummy_utxo(); + + // This will not build because `WriteCounter` does not implement `bitcoin_io::Write`. + // + // let mut counter = WriteCounter::new(); + // let bytes_written = data.consensus_encode(&mut counter)?; + + let mut counter = io::FromStd::new(WriteCounter::new()); + let bytes_written = data.consensus_encode(&mut counter).expect("failed to encode to writer"); + assert_eq!(bytes_written, 36); // 32 bytes for txid + 4 bytes for vout. + assert_eq!(bytes_written, counter.get_ref().written()); + + // Take back ownership of the `WriteCounter`. + let _ = counter.into_inner(); +} + +mod pretend_this_is_some_other_crate { + /// A byte counter - counts how many bytes where written to it. + pub struct WriteCounter { + count: usize, + } + + impl WriteCounter { + /// Constructs a new `WriteCounter`. + pub fn new() -> Self { Self { count: 0 } } + + /// Returns the number of bytes written to this counter. + pub fn written(&self) -> usize { self.count } + } + + impl std::io::Write for WriteCounter { + fn write(&mut self, buf: &[u8]) -> Result { + let written = buf.len(); + self.count += written; + Ok(written) + } + fn write_all(&mut self, buf: &[u8]) -> Result<(), std::io::Error> { + self.count += buf.len(); + Ok(()) + } + fn flush(&mut self) -> Result<(), std::io::Error> { Ok(()) } + } +} + +/// Constructs a dummy UTXO that is just to represent some `rust-bitcoin` type that implements the +/// [`consensus::Encodable`] and [`consensus::Decodable`] traits. +fn dummy_utxo() -> OutPoint { + let txid = Txid::from_byte_array([0xFF; 32]); // Arbitrary invalid dummy value. + OutPoint { txid, vout: 1 } +} diff --git a/bitcoin/examples/sighash.rs b/bitcoin/examples/sighash.rs index 18e63ea6a2..1055a52f7e 100644 --- a/bitcoin/examples/sighash.rs +++ b/bitcoin/examples/sighash.rs @@ -13,14 +13,14 @@ use hex_lit::hex; //run with: cargo run --example sighash -/// Computes segwit sighash for a transaction input that spends a p2wpkh output with "witness_v0_keyhash" scriptPubKey.type +/// Computes SegWit sighash for a transaction input that spends a p2wpkh output with "witness_v0_keyhash" scriptPubKey.type /// /// # Parameters /// /// * `raw_tx` - spending tx hex /// * `inp_idx` - spending tx input index -/// * `value` - ref tx output value in sats -fn compute_sighash_p2wpkh(raw_tx: &[u8], inp_idx: usize, value: u64) { +/// * `amount` - ref tx output value in sats +fn compute_sighash_p2wpkh(raw_tx: &[u8], inp_idx: usize, amount: Amount) { let tx: Transaction = consensus::deserialize(raw_tx).unwrap(); let inp = &tx.input[inp_idx]; let witness = &inp.witness; @@ -29,8 +29,8 @@ fn compute_sighash_p2wpkh(raw_tx: &[u8], inp_idx: usize, value: u64) { // BIP-141: The witness must consist of exactly 2 items (≤ 520 bytes each). The first one a // signature, and the second one a public key. assert_eq!(witness.len(), 2); - let sig_bytes = witness.nth(0).unwrap(); - let pk_bytes = witness.nth(1).unwrap(); + let sig_bytes = witness.get(0).unwrap(); + let pk_bytes = witness.get(1).unwrap(); let sig = ecdsa::Signature::from_slice(sig_bytes).expect("failed to parse sig"); @@ -43,9 +43,9 @@ fn compute_sighash_p2wpkh(raw_tx: &[u8], inp_idx: usize, value: u64) { let mut cache = sighash::SighashCache::new(&tx); let sighash = cache - .p2wpkh_signature_hash(inp_idx, &spk, Amount::from_sat(value), sig.sighash_type) + .p2wpkh_signature_hash(inp_idx, &spk, amount, sig.sighash_type) .expect("failed to compute sighash"); - println!("Segwit p2wpkh sighash: {:x}", sighash); + println!("SegWit p2wpkh sighash: {:x}", sighash); let msg = secp256k1::Message::from(sighash); println!("Message is {:x}", msg); let secp = secp256k1::Secp256k1::verification_only(); @@ -57,7 +57,7 @@ fn compute_sighash_p2wpkh(raw_tx: &[u8], inp_idx: usize, value: u64) { /// # Parameters /// /// * `raw_tx` - spending tx hex -/// * `inp_idx` - spending tx input inde +/// * `inp_idx` - spending tx input index /// * `script_pubkey_bytes_opt` - Option with scriptPubKey bytes. If None, it's p2sh case, i.e., reftx output's scriptPubKey.type is "scripthash". In this case scriptPubkey is extracted from the spending transaction's scriptSig. If Some(), it's p2ms case, i.e., reftx output's scriptPubKey.type is "multisig", and the scriptPubkey is supplied from the referenced output. fn compute_sighash_legacy(raw_tx: &[u8], inp_idx: usize, script_pubkey_bytes_opt: Option<&[u8]>) { let tx: Transaction = consensus::deserialize(raw_tx).unwrap(); @@ -98,14 +98,14 @@ fn compute_sighash_legacy(raw_tx: &[u8], inp_idx: usize, script_pubkey_bytes_opt } } -/// Computes sighash for a segwit multisig transaction input that spends a p2wsh output with "witness_v0_scripthash" scriptPubKey.type +/// Computes sighash for a SegWit multisig transaction input that spends a p2wsh output with "witness_v0_scripthash" scriptPubKey.type /// /// # Parameters /// /// * `raw_tx` - spending tx hex /// * `inp_idx` - spending tx input index -/// * `value` - ref tx output value in sats -fn compute_sighash_p2wsh(raw_tx: &[u8], inp_idx: usize, value: u64) { +/// * `amount` - ref tx output value in sats +fn compute_sighash_p2wsh(raw_tx: &[u8], inp_idx: usize, amount: Amount) { let tx: Transaction = consensus::deserialize(raw_tx).unwrap(); let inp = &tx.input[inp_idx]; let witness = &inp.witness; @@ -118,21 +118,16 @@ fn compute_sighash_p2wsh(raw_tx: &[u8], inp_idx: usize, value: u64) { //in an M of N multisig, the witness elements from 1 (0-based) to M-2 are signatures (with sighash flags as the last byte) for n in 1..=witness.len() - 2 { - let sig_bytes = witness.nth(n).expect("out of bounds"); + let sig_bytes = witness.get(n).expect("out of bounds"); let sig = ecdsa::Signature::from_slice(sig_bytes).expect("failed to parse sig"); let sig_len = sig_bytes.len() - 1; //last byte is EcdsaSighashType sighash flag //ECDSA signature in DER format lengths are between 70 and 72 bytes assert!((70..=72).contains(&sig_len), "signature length {} out of bounds", sig_len); //here we assume that all sighash_flags are the same. Can they be different? let sighash = cache - .p2wsh_signature_hash( - inp_idx, - witness_script, - Amount::from_sat(value), - sig.sighash_type, - ) + .p2wsh_signature_hash(inp_idx, witness_script, amount, sig.sighash_type) .expect("failed to compute sighash"); - println!("Segwit p2wsh sighash: {:x} ({})", sighash, sig.sighash_type); + println!("SegWit p2wsh sighash: {:x} ({})", sighash, sig.sighash_type); } } @@ -153,14 +148,14 @@ fn sighash_p2wpkh() { let inp_idx = 0; //output value from the referenced vout:0 from the referenced tx: //bitcoin-cli getrawtransaction 752d675b9cc0bd14e0bd23969effee0005ad6d7e550dcc832f0216c7ffd4e15c 3 - let ref_out_value = 200000000; + let ref_out_value = Amount::from_sat_u32(200000000); println!("\nsighash_p2wpkh:"); compute_sighash_p2wpkh(&raw_tx, inp_idx, ref_out_value); } fn sighash_p2sh_multisig_2x2() { - //Spending transactoin: + //Spending transaction: //bitcoin-cli getrawtransaction 214646c4b563cd8c788754ec94468ab71602f5ed07d5e976a2b0e41a413bcc0e 3 //after decoding ScriptSig from the input:0, its last ASM element is the scriptpubkey: //bitcoin-cli decodescript 5221032d7306898e980c66aefdfb6b377eaf71597c449bf9ce741a3380c5646354f6de2103e8c742e1f283ef810c1cd0c8875e5c2998a05fc5b23c30160d3d33add7af565752ae @@ -183,7 +178,7 @@ fn sighash_p2wsh_multisig_2x2() { //For the witness transaction sighash computation, we need its referenced output's value from the original transaction: //bitcoin-cli getrawtransaction 2845399a8cd7a52733f9f9d0e0b8b6c5d1c88aea4cee09f8d8fa762912b49e1b 3 //we need vout 0 value in sats: - let ref_out_value = 968240; + let ref_out_value = Amount::from_sat_u32(968240); println!("\nsighash_p2wsh_multisig_2x2:"); compute_sighash_p2wsh(&raw_tx, 0, ref_out_value); diff --git a/bitcoin/examples/sign-tx-segwit-v0.rs b/bitcoin/examples/sign-tx-segwit-v0.rs index d3ab610301..b163a24b08 100644 --- a/bitcoin/examples/sign-tx-segwit-v0.rs +++ b/bitcoin/examples/sign-tx-segwit-v0.rs @@ -3,18 +3,19 @@ //! Demonstrate creating a transaction that spends to and from p2wpkh outputs. use bitcoin::address::script_pubkey::ScriptBufExt as _; +use bitcoin::key::WPubkeyHash; use bitcoin::locktime::absolute; use bitcoin::secp256k1::{rand, Message, Secp256k1, SecretKey, Signing}; use bitcoin::sighash::{EcdsaSighashType, SighashCache}; use bitcoin::witness::WitnessExt as _; use bitcoin::{ transaction, Address, Amount, Network, OutPoint, ScriptBuf, Sequence, Transaction, TxIn, TxOut, - Txid, WPubkeyHash, Witness, + Txid, Witness, }; -const DUMMY_UTXO_AMOUNT: Amount = Amount::from_sat(20_000_000); -const SPEND_AMOUNT: Amount = Amount::from_sat(5_000_000); -const CHANGE_AMOUNT: Amount = Amount::from_sat(14_999_000); // 1000 sat fee. +const DUMMY_UTXO_AMOUNT: Amount = Amount::from_sat_u32(20_000_000); +const SPEND_AMOUNT: Amount = Amount::from_sat_u32(5_000_000); +const CHANGE_AMOUNT: Amount = Amount::from_sat_u32(14_999_000); // 1000 sat fee. fn main() { let secp = Secp256k1::new(); diff --git a/bitcoin/examples/sign-tx-taproot.rs b/bitcoin/examples/sign-tx-taproot.rs index b7e22aa4d8..06a3fab162 100644 --- a/bitcoin/examples/sign-tx-taproot.rs +++ b/bitcoin/examples/sign-tx-taproot.rs @@ -13,9 +13,9 @@ use bitcoin::{ Txid, Witness, }; -const DUMMY_UTXO_AMOUNT: Amount = Amount::from_sat(20_000_000); -const SPEND_AMOUNT: Amount = Amount::from_sat(5_000_000); -const CHANGE_AMOUNT: Amount = Amount::from_sat(14_999_000); // 1000 sat fee. +const DUMMY_UTXO_AMOUNT: Amount = Amount::from_sat_u32(20_000_000); +const SPEND_AMOUNT: Amount = Amount::from_sat_u32(5_000_000); +const CHANGE_AMOUNT: Amount = Amount::from_sat_u32(14_999_000); // 1000 sat fee. fn main() { let secp = Secp256k1::new(); @@ -71,7 +71,7 @@ fn main() { // Sign the sighash using the secp256k1 library (exported by rust-bitcoin). let tweaked: TweakedKeypair = keypair.tap_tweak(&secp, None); let msg = Message::from(sighash); - let signature = secp.sign_schnorr(&msg, &tweaked.to_inner()); + let signature = secp.sign_schnorr(msg.as_ref(), &tweaked.to_inner()); // Update the witness stack. let signature = bitcoin::taproot::Signature { signature, sighash_type }; diff --git a/bitcoin/examples/taproot-psbt-simple.rs b/bitcoin/examples/taproot-psbt-simple.rs index 372c1a9693..8181573a27 100644 --- a/bitcoin/examples/taproot-psbt-simple.rs +++ b/bitcoin/examples/taproot-psbt-simple.rs @@ -45,12 +45,12 @@ const BIP86_DERIVATION_PATH: &str = "m/86'/0'/0'"; const MASTER_FINGERPRINT: &str = "9680603f"; // The dummy UTXO amounts we are spending. -const DUMMY_UTXO_AMOUNT_INPUT_1: Amount = Amount::from_sat(20_000_000); -const DUMMY_UTXO_AMOUNT_INPUT_2: Amount = Amount::from_sat(10_000_000); +const DUMMY_UTXO_AMOUNT_INPUT_1: Amount = Amount::from_sat_u32(20_000_000); +const DUMMY_UTXO_AMOUNT_INPUT_2: Amount = Amount::from_sat_u32(10_000_000); // The amounts we are sending to someone, and receiving back as change. -const SPEND_AMOUNT: Amount = Amount::from_sat(25_000_000); -const CHANGE_AMOUNT: Amount = Amount::from_sat(4_990_000); // 10_000 sat fee. +const SPEND_AMOUNT: Amount = Amount::from_sat_u32(25_000_000); +const CHANGE_AMOUNT: Amount = Amount::from_sat_u32(4_990_000); // 10_000 sat fee. // Derive the external address xpriv. fn get_external_address_xpriv( diff --git a/bitcoin/examples/taproot-psbt.rs b/bitcoin/examples/taproot-psbt.rs index 4a69d718fd..92af851561 100644 --- a/bitcoin/examples/taproot-psbt.rs +++ b/bitcoin/examples/taproot-psbt.rs @@ -40,7 +40,7 @@ const UTXO_SCRIPT_PUBKEY: &str = "5120be27fa8b1f5278faf82cab8da23e8761f8f9bd5d5ebebbb37e0e12a70d92dd16"; const UTXO_PUBKEY: &str = "a6ac32163539c16b6b5dbbca01b725b8e8acaa5f821ba42c80e7940062140d19"; const UTXO_MASTER_FINGERPRINT: &str = "e61b318f"; -const ABSOLUTE_FEES_IN_SATS: Amount = Amount::from_sat(1_000); +const ABSOLUTE_FEES: Amount = Amount::from_sat_u32(1_000); // UTXO_1 will be used for spending example 1 const UTXO_1: P2trUtxo = P2trUtxo { @@ -49,7 +49,7 @@ const UTXO_1: P2trUtxo = P2trUtxo { script_pubkey: UTXO_SCRIPT_PUBKEY, pubkey: UTXO_PUBKEY, master_fingerprint: UTXO_MASTER_FINGERPRINT, - amount_in_sats: Amount::from_int_btc_const(50), + amount: Amount::FIFTY_BTC, derivation_path: BIP86_DERIVATION_PATH, }; @@ -60,7 +60,7 @@ const UTXO_2: P2trUtxo = P2trUtxo { script_pubkey: UTXO_SCRIPT_PUBKEY, pubkey: UTXO_PUBKEY, master_fingerprint: UTXO_MASTER_FINGERPRINT, - amount_in_sats: Amount::from_int_btc_const(50), + amount: Amount::FIFTY_BTC, derivation_path: BIP86_DERIVATION_PATH, }; @@ -71,7 +71,7 @@ const UTXO_3: P2trUtxo = P2trUtxo { script_pubkey: UTXO_SCRIPT_PUBKEY, pubkey: UTXO_PUBKEY, master_fingerprint: UTXO_MASTER_FINGERPRINT, - amount_in_sats: Amount::from_int_btc_const(50), + amount: Amount::FIFTY_BTC, derivation_path: BIP86_DERIVATION_PATH, }; @@ -106,11 +106,11 @@ fn main() -> Result<(), Box> { let change_address = "bcrt1pz449kexzydh2kaypatup5ultru3ej284t6eguhnkn6wkhswt0l7q3a7j76" .parse::>()? .require_network(Network::Regtest)?; - let amount_to_send_in_sats = Amount::ONE_BTC; + let amount_to_send = Amount::ONE_BTC; let change_amount = UTXO_1 - .amount_in_sats - .checked_sub(amount_to_send_in_sats) - .and_then(|x| x.checked_sub(ABSOLUTE_FEES_IN_SATS)) + .amount + .checked_sub(amount_to_send) + .and_then(|x| x.checked_sub(ABSOLUTE_FEES)) .ok_or("fees more than input amount!")?; let tx_hex_string = encode::serialize_hex(&generate_bip86_key_spend_tx( @@ -120,7 +120,7 @@ fn main() -> Result<(), Box> { // Set these fields with valid data for the UTXO from step 5 above UTXO_1, vec![ - TxOut { value: amount_to_send_in_sats, script_pubkey: to_address.script_pubkey() }, + TxOut { value: amount_to_send, script_pubkey: to_address.script_pubkey() }, TxOut { value: change_amount, script_pubkey: change_address.script_pubkey() }, ], )?); @@ -215,7 +215,7 @@ struct P2trUtxo<'a> { script_pubkey: &'a str, pubkey: &'a str, master_fingerprint: &'a str, - amount_in_sats: Amount, + amount: Amount, derivation_path: &'a str, } @@ -226,7 +226,7 @@ fn generate_bip86_key_spend_tx( input_utxo: P2trUtxo, outputs: Vec, ) -> Result> { - let from_amount = input_utxo.amount_in_sats; + let from_amount = input_utxo.amount; let input_pubkey = input_utxo.pubkey.parse::()?; // CREATOR + UPDATER @@ -274,7 +274,7 @@ fn generate_bip86_key_spend_tx( let mut input_txouts = Vec::::new(); for input in [&input_utxo].iter() { input_txouts.push(TxOut { - value: input.amount_in_sats, + value: input.amount, script_pubkey: ScriptBuf::from_hex(input.script_pubkey)?, }); } @@ -412,7 +412,8 @@ impl BenefactorWallet { taproot_spend_info.internal_key(), taproot_spend_info.merkle_root(), ); - let value = input_utxo.amount_in_sats - ABSOLUTE_FEES_IN_SATS; + let value = (input_utxo.amount - ABSOLUTE_FEES) + .expect("ABSOLUTE_FEES must be set below input amount"); // Spend a normal BIP86-like output as an input in our inheritance funding transaction let tx = generate_bip86_key_spend_tx( @@ -476,7 +477,7 @@ impl BenefactorWallet { let mut psbt = self.next_psbt.clone().expect("should have next_psbt"); let input = &mut psbt.inputs[0]; let input_value = input.witness_utxo.as_ref().unwrap().value; - let output_value = input_value - ABSOLUTE_FEES_IN_SATS; + let output_value = (input_value - ABSOLUTE_FEES).into_result()?; // We use some other derivation path in this example for our inheritance protocol. The important thing is to ensure // that we use an unhardened path so we can make use of xpubs. @@ -649,7 +650,8 @@ impl BeneficiaryWallet { psbt.unsigned_tx.lock_time = lock_time; psbt.unsigned_tx.output = vec![TxOut { script_pubkey: to_address.script_pubkey(), - value: input_value - ABSOLUTE_FEES_IN_SATS, + value: (input_value - ABSOLUTE_FEES) + .expect("ABSOLUTE_FEES must be set below input amount"), }]; psbt.outputs = vec![Output::default()]; let unsigned_tx = psbt.unsigned_tx.clone(); @@ -747,7 +749,7 @@ fn sign_psbt_taproot( }; let msg = secp256k1::Message::from(hash); - let signature = secp.sign_schnorr(&msg, &keypair); + let signature = secp.sign_schnorr(msg.as_ref(), &keypair); let final_signature = taproot::Signature { signature, sighash_type }; diff --git a/bitcoin/src/address/error.rs b/bitcoin/src/address/error.rs index b972f7b124..c174710994 100644 --- a/bitcoin/src/address/error.rs +++ b/bitcoin/src/address/error.rs @@ -1,5 +1,6 @@ //! Error code for the address module. +use core::convert::Infallible; use core::fmt; use internals::write_err; @@ -21,7 +22,9 @@ pub enum FromScriptError { WitnessVersion(witness_version::TryFromError), } -internals::impl_from_infallible!(FromScriptError); +impl From for FromScriptError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for FromScriptError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -78,13 +81,15 @@ impl std::error::Error for UnknownAddressTypeError { pub enum ParseError { /// Base58 legacy decoding error. Base58(Base58Error), - /// Bech32 segwit decoding error. + /// Bech32 SegWit decoding error. Bech32(Bech32Error), /// Address's network differs from required one. NetworkValidation(NetworkValidationError), } -internals::impl_from_infallible!(ParseError); +impl From for ParseError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -165,7 +170,7 @@ impl std::error::Error for NetworkValidationError {} #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum Bech32Error { - /// Parse segwit Bech32 error. + /// Parse SegWit Bech32 error. ParseBech32(ParseBech32Error), /// A witness version conversion/parsing error. WitnessVersion(witness_version::TryFromError), @@ -175,14 +180,16 @@ pub enum Bech32Error { UnknownHrp(UnknownHrpError), } -internals::impl_from_infallible!(Bech32Error); +impl From for Bech32Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for Bech32Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use Bech32Error::*; match *self { - ParseBech32(ref e) => write_err!(f, "segwit parsing error"; e), + ParseBech32(ref e) => write_err!(f, "SegWit parsing error"; e), WitnessVersion(ref e) => write_err!(f, "witness version conversion/parsing error"; e), WitnessProgram(ref e) => write_err!(f, "witness program error"; e), UnknownHrp(ref e) => write_err!(f, "unknown hrp error"; e), @@ -221,7 +228,9 @@ impl From for Bech32Error { #[derive(Debug, Clone, PartialEq, Eq)] pub struct ParseBech32Error(pub(crate) bech32::segwit::DecodeError); -internals::impl_from_infallible!(ParseBech32Error); +impl From for ParseBech32Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for ParseBech32Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -248,7 +257,9 @@ pub enum Base58Error { InvalidLegacyPrefix(InvalidLegacyPrefixError), } -internals::impl_from_infallible!(Base58Error); +impl From for Base58Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for Base58Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -323,7 +334,11 @@ pub struct LegacyAddressTooLongError { impl LegacyAddressTooLongError { /// Returns the invalid legacy address length. - pub fn invalid_legcay_address_length(&self) -> usize { self.length } + pub fn invalid_legacy_address_length(&self) -> usize { self.length } + + #[doc(hidden)] + #[deprecated = "Use invalid_legacy_address_length() instead"] + pub fn invalid_legcay_address_length(&self) -> usize { self.invalid_legacy_address_length() } } impl fmt::Display for LegacyAddressTooLongError { diff --git a/bitcoin/src/address/mod.rs b/bitcoin/src/address/mod.rs index c7053f6674..93bc290d87 100644 --- a/bitcoin/src/address/mod.rs +++ b/bitcoin/src/address/mod.rs @@ -2,28 +2,41 @@ //! Bitcoin addresses. //! -//! Support for ordinary base58 Bitcoin addresses and private keys. +//! Support for segwit and legacy addresses (bech32 and base58 respectively). //! -//! # Example: creating a new address from a randomly-generated key pair +//! # Examples +//! +//! ### Creating a new address from a randomly-generated key pair. //! //! ```rust -//! # #[cfg(feature = "rand-std")] { -//! use bitcoin::{Address, PublicKey, Network}; +//! #[cfg(feature = "rand-std")] { //! use bitcoin::secp256k1::{rand, Secp256k1}; +//! use bitcoin::{Address, Network, PublicKey}; //! //! // Generate random key pair. -//! let s = Secp256k1::new(); -//! let public_key = PublicKey::new(s.generate_keypair(&mut rand::thread_rng()).1); +//! let secp = Secp256k1::new(); +//! let (_sk, pk) = secp.generate_keypair(&mut rand::thread_rng()); +//! let public_key = PublicKey::new(pk); // Or `PublicKey::from(pk)`. //! -//! // Generate pay-to-pubkey-hash address. +//! // Generate a mainnet pay-to-pubkey-hash address. //! let address = Address::p2pkh(&public_key, Network::Bitcoin); -//! # } +//! } //! ``` //! -//! # Note: creating a new address requires the rand-std feature flag +//! ### Using an `Address` as a struct field. //! -//! ```toml -//! bitcoin = { version = "...", features = ["rand-std"] } +//! ```rust +//! # #[cfg(feature = "serde")] { +//! # use serde::{self, Deserialize, Serialize}; +//! use bitcoin::address::{Address, NetworkValidation, NetworkValidationUnchecked}; +//! #[derive(Serialize, Deserialize)] +//! struct Foo +//! where V: NetworkValidation, +//! { +//! #[serde(bound(deserialize = "V: NetworkValidationUnchecked"))] +//! address: Address, +//! } +//! # } //! ``` pub mod error; @@ -36,6 +49,7 @@ use core::str::FromStr; use bech32::primitives::gf32::Fe32; use bech32::primitives::hrp::Hrp; use hashes::{hash160, HashEngine}; +use internals::array::ArrayExt; use secp256k1::{Secp256k1, Verification, XOnlyPublicKey}; use crate::address::script_pubkey::ScriptBufExt as _; @@ -78,6 +92,8 @@ pub enum AddressType { P2wsh, /// Pay to taproot. P2tr, + /// Pay to anchor. + P2a, } impl fmt::Display for AddressType { @@ -88,6 +104,7 @@ impl fmt::Display for AddressType { AddressType::P2wpkh => "p2wpkh", AddressType::P2wsh => "p2wsh", AddressType::P2tr => "p2tr", + AddressType::P2a => "p2a", }) } } @@ -101,6 +118,7 @@ impl FromStr for AddressType { "p2wpkh" => Ok(AddressType::P2wpkh), "p2wsh" => Ok(AddressType::P2wsh), "p2tr" => Ok(AddressType::P2tr), + "p2a" => Ok(AddressType::P2a), _ => Err(UnknownAddressTypeError(s.to_owned())), } } @@ -110,6 +128,9 @@ mod sealed { pub trait NetworkValidation {} impl NetworkValidation for super::NetworkChecked {} impl NetworkValidation for super::NetworkUnchecked {} + + pub trait NetworkValidationUnchecked {} + impl NetworkValidationUnchecked for super::NetworkUnchecked {} } /// Marker of status of address's network validation. See section [*Parsing addresses*](Address#parsing-addresses) @@ -119,14 +140,25 @@ pub trait NetworkValidation: sealed::NetworkValidation + Sync + Send + Sized + U const IS_CHECKED: bool; } +/// Marker trait for `FromStr` and `serde::Deserialize`. +/// +/// This allows users to use `V: NetworkValidation` in conjunction with derives. Is only ever +/// implemented for `NetworkUnchecked`. +pub trait NetworkValidationUnchecked: + NetworkValidation + sealed::NetworkValidationUnchecked + Sync + Send + Sized + Unpin +{ +} + /// Marker that address's network has been successfully validated. See section [*Parsing addresses*](Address#parsing-addresses) /// on [`Address`] for details. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum NetworkChecked {} /// Marker that address's network has not yet been validated. See section [*Parsing addresses*](Address#parsing-addresses) /// on [`Address`] for details. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum NetworkUnchecked {} impl NetworkValidation for NetworkChecked { @@ -136,6 +168,8 @@ impl NetworkValidation for NetworkUnchecked { const IS_CHECKED: bool = false; } +impl NetworkValidationUnchecked for NetworkUnchecked {} + /// The inner representation of an address, without the network validation tag. /// /// This struct represents the inner representation of an address without the network validation @@ -240,6 +274,16 @@ impl From for KnownHrp { fn from(n: Network) -> Self { Self::from_network(n) } } +impl From for NetworkKind { + fn from(hrp: KnownHrp) -> Self { + match hrp { + KnownHrp::Mainnet => NetworkKind::Main, + KnownHrp::Testnets => NetworkKind::Test, + KnownHrp::Regtest => NetworkKind::Test, + } + } +} + /// The data encoded by an `Address`. /// /// This is the data used to encumber an output that pays to this address i.e., it is the address @@ -257,7 +301,7 @@ pub enum AddressData { /// The script hash used to encumber outputs to this address. script_hash: ScriptHash, }, - /// Data encoded by a Segwit address. + /// Data encoded by a SegWit address. Segwit { /// The witness program used to encumber outputs to this address. witness_program: WitnessProgram, @@ -364,10 +408,41 @@ impl fmt::Display for DisplayUnchecked<'_, N> { } #[cfg(feature = "serde")] -internals::serde_string_deserialize_impl!(Address, "a Bitcoin address"); +impl<'de, U: NetworkValidationUnchecked> serde::Deserialize<'de> for Address { + fn deserialize(deserializer: D) -> Result, D::Error> + where + D: serde::de::Deserializer<'de>, + { + use core::fmt::Formatter; + + struct Visitor(PhantomData); + impl serde::de::Visitor<'_> for Visitor + where + U: NetworkValidationUnchecked + NetworkValidation, + Address: FromStr, + { + type Value = Address; + + fn expecting(&self, f: &mut Formatter) -> core::fmt::Result { + f.write_str("A Bitcoin address") + } + + fn visit_str(self, v: &str) -> core::result::Result + where + E: serde::de::Error, + { + // We know that `U` is only ever `NetworkUnchecked` but the compiler does not. + let address = v.parse::>().map_err(E::custom)?; + Ok(Address(address.0, PhantomData::)) + } + } + + deserializer.deserialize_str(Visitor(PhantomData::)) + } +} #[cfg(feature = "serde")] -impl serde::Serialize for Address { +impl serde::Serialize for Address { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -383,6 +458,19 @@ impl Address { pub fn as_unchecked(&self) -> &Address { unsafe { &*(self as *const Address as *const Address) } } + + /// Marks the network of this address as unchecked. + pub fn into_unchecked(self) -> Address { Address(self.0, PhantomData) } + + /// Returns the [`NetworkKind`] of this address. + pub fn network_kind(&self) -> NetworkKind { + use AddressInner::*; + match self.0 { + P2pkh { hash: _, ref network } => *network, + P2sh { hash: _, ref network } => *network, + Segwit { program: _, ref hrp } => NetworkKind::from(*hrp), + } + } } /// Methods and functions that can be called only on `Address`. @@ -421,7 +509,7 @@ impl Address { /// Constructs a new pay-to-witness-public-key-hash (P2WPKH) [`Address`] from a public key. /// - /// This is the native segwit address type for an output redeemable with a single signature. + /// This is the native SegWit address type for an output redeemable with a single signature. pub fn p2wpkh(pk: CompressedPublicKey, hrp: impl Into) -> Self { let program = WitnessProgram::p2wpkh(pk); Address::from_witness_program(program, hrp) @@ -430,7 +518,7 @@ impl Address { /// Constructs a new pay-to-script-hash (P2SH) [`Address`] that embeds a /// pay-to-witness-public-key-hash (P2WPKH). /// - /// This is a segwit address type that looks familiar (as p2sh) to legacy clients. + /// This is a SegWit address type that looks familiar (as p2sh) to legacy clients. pub fn p2shwpkh(pk: CompressedPublicKey, network: impl Into) -> Address { let builder = script::Builder::new().push_int_unchecked(0).push_slice(pk.wpubkey_hash()); let script_hash = builder.as_script().script_hash().expect("script is less than 520 bytes"); @@ -455,7 +543,7 @@ impl Address { /// Constructs a new pay-to-script-hash (P2SH) [`Address`] that embeds a /// pay-to-witness-script-hash (P2WSH). /// - /// This is a segwit address type that looks familiar (as p2sh) to legacy clients. + /// This is a SegWit address type that looks familiar (as p2sh) to legacy clients. pub fn p2shwsh( witness_script: &Script, network: impl Into, @@ -509,6 +597,8 @@ impl Address { Some(AddressType::P2wsh) } else if program.is_p2tr() { Some(AddressType::P2tr) + } else if program.is_p2a() { + Some(AddressType::P2a) } else { None }, @@ -546,7 +636,7 @@ impl Address { } } - /// Gets the witness program for this address if this is a segwit address. + /// Gets the witness program for this address if this is a SegWit address. pub fn witness_program(&self) -> Option { use AddressInner::*; @@ -642,7 +732,7 @@ impl Address { /// Returns true if the given pubkey is directly related to the address payload. /// /// This is determined by directly comparing the address payload with either the - /// hash of the given public key or the segwit redeem hash generated from the + /// hash of the given public key or the SegWit redeem hash generated from the /// given key. For Taproot addresses, the supplied key is assumed to be tweaked pub fn is_related_to_pubkey(&self, pubkey: PublicKey) -> bool { let pubkey_hash = pubkey.pubkey_hash(); @@ -684,7 +774,7 @@ impl Address { /// /// - For p2sh, the payload is the script hash. /// - For p2pkh, the payload is the pubkey hash. - /// - For segwit addresses, the payload is the witness program. + /// - For SegWit addresses, the payload is the witness program. fn payload_as_bytes(&self) -> &[u8] { use AddressInner::*; match self.0 { @@ -705,7 +795,7 @@ impl Address { } /// Parsed addresses do not always have *one* network. The problem is that legacy testnet, - /// regtest and signet addresse use the same prefix instead of multiple different ones. When + /// regtest and signet addresses use the same prefix instead of multiple different ones. When /// parsing, such addresses are always assumed to be testnet addresses (the same is true for /// bech32 signet addresses). So if one wants to check if an address belongs to a certain /// network a simple comparison is not enough anymore. Instead this function can be used. @@ -792,16 +882,7 @@ impl Address { /// For details about this mechanism, see section [*Parsing addresses*](Address#parsing-addresses) /// on [`Address`]. #[inline] - pub fn assume_checked(self) -> Address { - use AddressInner::*; - - let inner = match self.0 { - P2pkh { hash, network } => P2pkh { hash, network }, - P2sh { hash, network } => P2sh { hash, network }, - Segwit { program, hrp } => Segwit { program, hrp }, - }; - Address(inner, PhantomData) - } + pub fn assume_checked(self) -> Address { Address(self.0, PhantomData) } /// Parse a bech32 Address string pub fn from_bech32_str(s: &str) -> Result, Bech32Error> { @@ -822,12 +903,9 @@ impl Address { return Err(LegacyAddressTooLongError { length: s.len() }.into()); } let data = base58::decode_check(s)?; - if data.len() != 21 { - return Err(InvalidBase58PayloadLengthError { length: s.len() }.into()); - } + let data: &[u8; 21] = (&*data).try_into().map_err(|_| InvalidBase58PayloadLengthError { length: s.len() })?; - let (prefix, data) = data.split_first().expect("length checked above"); - let data: [u8; 20] = data.try_into().expect("length checked above"); + let (prefix, &data) = data.split_first(); let inner = match *prefix { PUBKEY_ADDRESS_PREFIX_MAIN => { @@ -877,27 +955,30 @@ impl fmt::Debug for Address { /// Address can be parsed only with `NetworkUnchecked`. /// -/// Only segwit bech32 addresses prefixed with `bc`, `bcrt` or `tb` and legacy base58 addresses -/// prefixed with `1`, `2, `3`, `m` or `n` are supported. +/// Only SegWit bech32 addresses prefixed with `bc`, `bcrt` or `tb` and legacy base58 addresses +/// prefixed with `1`, `2`, `3`, `m` or `n` are supported. /// /// # Errors /// -/// - [`ParseError::Bech32`] if the segwit address begins with a `bc`, `bcrt` or `tb` and is not a +/// - [`ParseError::Bech32`] if the SegWit address begins with a `bc`, `bcrt` or `tb` and is not a /// valid bech32 address. /// /// - [`ParseError::Base58`] if the legacy address begins with a `1`, `2`, `3`, `m` or `n` and is /// not a valid base58 address. /// -/// - [`UnknownHrpError`] if the address does not begin with one of the above segwit or -/// legacy prifixes. -impl FromStr for Address { +/// - [`UnknownHrpError`] if the address does not begin with one of the above SegWit or +/// legacy prefixes. +impl FromStr for Address { type Err = ParseError; - fn from_str(s: &str) -> Result, ParseError> { + fn from_str(s: &str) -> Result { if ["bc1", "bcrt1", "tb1"].iter().any(|&prefix| s.to_lowercase().starts_with(prefix)) { - Ok(Address::from_bech32_str(s)?) + let address = Address::from_bech32_str(s)?; + // We know that `U` is only ever `NetworkUnchecked` but the compiler does not. + Ok(Address(address.0, PhantomData::)) } else if ["1", "2", "3", "m", "n"].iter().any(|&prefix| s.starts_with(prefix)) { - Ok(Address::from_base58_str(s)?) + let address = Address::from_base58_str(s)?; + Ok(Address(address.0, PhantomData::)) } else { let hrp = match s.rfind('1') { Some(pos) => &s[..pos], @@ -908,7 +989,7 @@ impl FromStr for Address { } } -/// Convert a byte array of a pubkey hash into a segwit redeem hash +/// Convert a byte array of a pubkey hash into a SegWit redeem hash fn segwit_redeem_hash(pubkey_hash: PubkeyHash) -> hash160::Hash { let mut sha_engine = hash160::Hash::engine(); sha_engine.input(&[0, 20]); @@ -950,7 +1031,7 @@ mod tests { } #[test] - fn test_p2pkh_address_58() { + fn p2pkh_address_58() { let hash = "162c5ea71c0b23f5b9022ef047c4a86470a5b070".parse::().unwrap(); let addr = Address::p2pkh(hash, NetworkKind::Main); @@ -964,7 +1045,7 @@ mod tests { } #[test] - fn test_p2pkh_from_key() { + fn p2pkh_from_key() { let key = "048d5141948c1702e8c95f438815794b87f706a8d4cd2bffad1dc1570971032c9b6042a0431ded2478b5c9cf2d81c124a5e57347a3c63ef0e7716cf54d613ba183".parse::().unwrap(); let addr = Address::p2pkh(key, NetworkKind::Main); assert_eq!(&addr.to_string(), "1QJVDzdqb1VpbDK7uDeyVXy9mR27CJiyhY"); @@ -979,7 +1060,7 @@ mod tests { } #[test] - fn test_p2sh_address_58() { + fn p2sh_address_58() { let hash = "162c5ea71c0b23f5b9022ef047c4a86470a5b070".parse::().unwrap(); let addr = Address::p2sh_from_hash(hash, NetworkKind::Main); @@ -993,7 +1074,7 @@ mod tests { } #[test] - fn test_p2sh_parse() { + fn p2sh_parse() { let script = ScriptBuf::from_hex("552103a765fc35b3f210b95223846b36ef62a4e53e34e2925270c2c7906b92c9f718eb2103c327511374246759ec8d0b89fa6c6b23b33e11f92c5bc155409d86de0c79180121038cae7406af1f12f4786d820a1466eec7bc5785a1b5e4a387eca6d797753ef6db2103252bfb9dcaab0cd00353f2ac328954d791270203d66c2be8b430f115f451b8a12103e79412d42372c55dd336f2eb6eb639ef9d74a22041ba79382c74da2338fe58ad21035049459a4ebc00e876a9eef02e72a3e70202d3d1f591fc0dd542f93f642021f82102016f682920d9723c61b27f562eb530c926c00106004798b6471e8c52c60ee02057ae").unwrap(); let addr = Address::p2sh(&script, NetworkKind::Test).unwrap(); assert_eq!(&addr.to_string(), "2N3zXjbwdTcPsJiy8sUK9FhWJhqQCxA8Jjr"); @@ -1002,16 +1083,14 @@ mod tests { } #[test] - fn test_p2sh_parse_for_large_script() { + fn p2sh_parse_for_large_script() { let script = ScriptBuf::from_hex("552103a765fc35b3f210b95223846b36ef62a4e53e34e2925270c2c7906b92c9f718eb2103c327511374246759ec8d0b89fa6c6b23b33e11f92c5bc155409d86de0c79180121038cae7406af1f12f4786d820a1466eec7bc5785a1b5e4a387eca6d797753ef6db2103252bfb9dcaab0cd00353f2ac328954d791270203d66c2be8b430f115f451b8a12103e79412d42372c55dd336f2eb6eb639ef9d74a22041ba79382c74da2338fe58ad21035049459a4ebc00e876a9eef02e72a3e70202d3d1f591fc0dd542f93f642021f82102016f682920d9723c61b27f562eb530c926c00106004798b6471e8c52c60ee02057ae12123122313123123ac1231231231231313123131231231231313212313213123123552103a765fc35b3f210b95223846b36ef62a4e53e34e2925270c2c7906b92c9f718eb2103c327511374246759ec8d0b89fa6c6b23b33e11f92c5bc155409d86de0c79180121038cae7406af1f12f4786d820a1466eec7bc5785a1b5e4a387eca6d797753ef6db2103252bfb9dcaab0cd00353f2ac328954d791270203d66c2be8b430f115f451b8a12103e79412d42372c55dd336f2eb6eb639ef9d74a22041ba79382c74da2338fe58ad21035049459a4ebc00e876a9eef02e72a3e70202d3d1f591fc0dd542f93f642021f82102016f682920d9723c61b27f562eb530c926c00106004798b6471e8c52c60ee02057ae12123122313123123ac1231231231231313123131231231231313212313213123123552103a765fc35b3f210b95223846b36ef62a4e53e34e2925270c2c7906b92c9f718eb2103c327511374246759ec8d0b89fa6c6b23b33e11f92c5bc155409d86de0c79180121038cae7406af1f12f4786d820a1466eec7bc5785a1b5e4a387eca6d797753ef6db2103252bfb9dcaab0cd00353f2ac328954d791270203d66c2be8b430f115f451b8a12103e79412d42372c55dd336f2eb6eb639ef9d74a22041ba79382c74da2338fe58ad21035049459a4ebc00e876a9eef02e72a3e70202d3d1f591fc0dd542f93f642021f82102016f682920d9723c61b27f562eb530c926c00106004798b6471e8c52c60ee02057ae12123122313123123ac1231231231231313123131231231231313212313213123123").unwrap(); - assert_eq!( - Address::p2sh(&script, NetworkKind::Test), - Err(RedeemScriptSizeError { size: script.len() }) - ); + let res = Address::p2sh(&script, NetworkKind::Test); + assert_eq!(res.unwrap_err().invalid_size(), script.len()) } #[test] - fn test_p2wpkh() { + fn p2wpkh() { // stolen from Bitcoin transaction: b3c8c2b6cfc335abbcb2c7823a8453f55d64b2b5125a9a61e8737230cdb8ce20 let key = "033bc8c83c52df5712229a2f72206d90192366c36428cb0c12b6af98324d97bfbc" .parse::() @@ -1023,7 +1102,7 @@ mod tests { } #[test] - fn test_p2wsh() { + fn p2wsh() { // stolen from Bitcoin transaction 5df912fda4becb1c29e928bec8d64d93e9ba8efa9b5b405bd683c86fd2c65667 let script = ScriptBuf::from_hex("52210375e00eb72e29da82b89367947f29ef34afb75e8654f6ea368e0acdfd92976b7c2103a1b26313f430c4b15bb1fdce663207659d8cac749a0e53d70eff01874496feff2103c96d495bfdd5ba4145e3e046fee45e84a8a48ad05bd8dbb395c011a32cf9f88053ae").unwrap(); let addr = Address::p2wsh(&script, KnownHrp::Mainnet).expect("script is valid"); @@ -1036,7 +1115,7 @@ mod tests { } #[test] - fn test_p2shwpkh() { + fn p2shwpkh() { // stolen from Bitcoin transaction: ad3fd9c6b52e752ba21425435ff3dd361d6ac271531fc1d2144843a9f550ad01 let key = "026c468be64d22761c30cd2f12cbc7de255d592d7904b1bab07236897cc4c2e766" .parse::() @@ -1048,7 +1127,7 @@ mod tests { } #[test] - fn test_p2shwsh() { + fn p2shwsh() { // stolen from Bitcoin transaction f9ee2be4df05041d0e0a35d7caa3157495ca4f93b233234c9967b6901dacf7a9 let script = ScriptBuf::from_hex("522103e5529d8eaa3d559903adb2e881eb06c86ac2574ffa503c45f4e942e2a693b33e2102e5f10fcdcdbab211e0af6a481f5532536ec61a5fdbf7183770cf8680fe729d8152ae").unwrap(); let addr = Address::p2shwsh(&script, NetworkKind::Main).expect("script is valid"); @@ -1058,7 +1137,7 @@ mod tests { } #[test] - fn test_non_existent_segwit_version() { + fn non_existent_segwit_version() { // 40-byte program let program = hex!( "654f6ea368e0acdfd92976b7c2103a1b26313f430654f6ea368e0acdfd92976b7c2103a1b26313f4" @@ -1070,7 +1149,7 @@ mod tests { } #[test] - fn test_address_debug() { + fn address_debug() { // This is not really testing output of Debug but the ability and proper functioning // of Debug derivation on structs generic in NetworkValidation. #[derive(Debug)] @@ -1094,7 +1173,7 @@ mod tests { } #[test] - fn test_address_type() { + fn address_type() { let addresses = [ ("1QJVDzdqb1VpbDK7uDeyVXy9mR27CJiyhY", Some(AddressType::P2pkh)), ("33iFwdLuRpW1uK1RTRqsoi8rR4NpDzk66k", Some(AddressType::P2sh)), @@ -1108,9 +1187,9 @@ mod tests { Some(AddressType::P2tr), ), // Related to future extensions, addresses are valid but have no type - // segwit v1 and len != 32 + // SegWit v1 and len != 32 ("bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7kt5nd6y", None), - // segwit v2 + // SegWit v2 ("bc1zw508d6qejxtdg4y5r3zarvaryvaxxpcs", None), ]; for (address, expected_type) in &addresses { @@ -1125,7 +1204,7 @@ mod tests { #[test] #[cfg(feature = "serde")] - fn test_json_serialize() { + fn json_serialize() { use serde_json; let addr = @@ -1207,7 +1286,7 @@ mod tests { } #[test] - fn test_qr_string() { + fn qr_string() { for el in ["132F25rTsvBdp9JzLLBHP5mvGY66i1xdiM", "33iFwdLuRpW1uK1RTRqsoi8rR4NpDzk66k"].iter() { @@ -1247,7 +1326,7 @@ mod tests { } #[test] - fn test_is_related_to_pubkey_p2wpkh() { + fn is_related_to_pubkey_p2wpkh() { let address_string = "bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4"; let address = address_string .parse::>() @@ -1268,7 +1347,7 @@ mod tests { } #[test] - fn test_is_related_to_pubkey_p2shwpkh() { + fn is_related_to_pubkey_p2shwpkh() { let address_string = "3EZQk4F8GURH5sqVMLTFisD17yNeKa7Dfs"; let address = address_string .parse::>() @@ -1289,7 +1368,7 @@ mod tests { } #[test] - fn test_is_related_to_pubkey_p2pkh() { + fn is_related_to_pubkey_p2pkh() { let address_string = "1J4LVanjHMu3JkXbVrahNuQCTGCRRgfWWx"; let address = address_string .parse::>() @@ -1310,7 +1389,7 @@ mod tests { } #[test] - fn test_is_related_to_pubkey_p2pkh_uncompressed_key() { + fn is_related_to_pubkey_p2pkh_uncompressed_key() { let address_string = "msvS7KzhReCDpQEJaV2hmGNvuQqVUDuC6p"; let address = address_string .parse::>() @@ -1331,7 +1410,7 @@ mod tests { } #[test] - fn test_is_related_to_pubkey_p2tr() { + fn is_related_to_pubkey_p2tr() { let pubkey_string = "0347ff3dacd07a1f43805ec6808e801505a6e18245178609972a68afbc2777ff2b"; let pubkey = pubkey_string.parse::().expect("pubkey"); let xonly_pubkey = XOnlyPublicKey::from(pubkey.inner); @@ -1357,7 +1436,7 @@ mod tests { } #[test] - fn test_is_related_to_xonly_pubkey() { + fn is_related_to_xonly_pubkey() { let pubkey_string = "0347ff3dacd07a1f43805ec6808e801505a6e18245178609972a68afbc2777ff2b"; let pubkey = pubkey_string.parse::().expect("pubkey"); let xonly_pubkey = XOnlyPublicKey::from(pubkey.inner); @@ -1378,7 +1457,7 @@ mod tests { } #[test] - fn test_fail_address_from_script() { + fn fail_address_from_script() { use crate::witness_program; let bad_p2wpkh = ScriptBuf::from_hex("0014dbc5b0a8f9d4353b4b54c3db48846bb15abfec").unwrap(); @@ -1412,7 +1491,7 @@ mod tests { } #[test] - fn test_matches_script_pubkey() { + fn matches_script_pubkey() { let addresses = [ "1QJVDzdqb1VpbDK7uDeyVXy9mR27CJiyhY", "1J4LVanjHMu3JkXbVrahNuQCTGCRRgfWWx", @@ -1436,4 +1515,55 @@ mod tests { } } } + + #[test] + #[cfg(feature = "serde")] + fn serde_address_usage_in_struct() { + use serde::{self, Deserialize, Serialize}; + + #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] + struct Foo + where + V: NetworkValidation, + { + #[serde(bound(deserialize = "V: NetworkValidationUnchecked"))] + address: Address, + } + + let addr_str = "33iFwdLuRpW1uK1RTRqsoi8rR4NpDzk66k"; + let unchecked = addr_str.parse::>().unwrap(); + + // Serialize with an unchecked address. + let foo_unchecked = Foo { address: unchecked.clone() }; + let ser = serde_json::to_string(&foo_unchecked).expect("failed to serialize"); + let rinsed: Foo = + serde_json::from_str(&ser).expect("failed to deserialize"); + assert_eq!(rinsed, foo_unchecked); + + // Serialize with a checked address. + let foo_checked = Foo { address: unchecked.assume_checked() }; + let ser = serde_json::to_string(&foo_checked).expect("failed to serialize"); + let rinsed: Foo = + serde_json::from_str(&ser).expect("failed to deserialize"); + assert_eq!(&rinsed.address, foo_checked.address.as_unchecked()); + assert_eq!(rinsed, foo_unchecked); + } + + #[test] + fn pay_to_anchor_address_regtest() { + // Verify that p2a uses the expected address for regtest. + // This test-vector is borrowed from the bitcoin source code. + let address_str = "bcrt1pfeesnyr2tx"; + + let script = ScriptBuf::new_p2a(); + let address_unchecked = address_str.parse().unwrap(); + let address = Address::from_script(&script, Network::Regtest).unwrap(); + assert_eq!(address.as_unchecked(), &address_unchecked); + assert_eq!(address.to_string(), address_str); + + // Verify that the address is considered standard + // and that the output type is P2a + assert!(address.is_spend_standard()); + assert_eq!(address.address_type(), Some(AddressType::P2a)); + } } diff --git a/bitcoin/src/address/script_pubkey.rs b/bitcoin/src/address/script_pubkey.rs index 2d801b0c87..b89008ca33 100644 --- a/bitcoin/src/address/script_pubkey.rs +++ b/bitcoin/src/address/script_pubkey.rs @@ -2,6 +2,7 @@ //! Bitcoin scriptPubkey script extensions. +use internals::array::ArrayExt; use secp256k1::{Secp256k1, Verification}; use crate::internal_macros::define_extension_trait; @@ -10,7 +11,7 @@ use crate::key::{ XOnlyPublicKey, }; use crate::opcodes::all::*; -use crate::script::witness_program::WitnessProgram; +use crate::script::witness_program::{WitnessProgram, P2A_PROGRAM}; use crate::script::witness_version::WitnessVersion; use crate::script::{ self, Builder, PushBytes, RedeemScriptSizeError, Script, ScriptBuf, ScriptExt as _, ScriptHash, @@ -99,14 +100,16 @@ define_extension_trait! { pub(crate) trait ScriptExtPrivate impl for Script { /// Returns the bytes of the (possibly invalid) public key if this script is P2PK. fn p2pk_pubkey_bytes(&self) -> Option<&[u8]> { - match self.len() { - 67 if self.as_bytes()[0] == OP_PUSHBYTES_65.to_u8() - && self.as_bytes()[66] == OP_CHECKSIG.to_u8() => - Some(&self.as_bytes()[1..66]), - 35 if self.as_bytes()[0] == OP_PUSHBYTES_33.to_u8() - && self.as_bytes()[34] == OP_CHECKSIG.to_u8() => - Some(&self.as_bytes()[1..34]), - _ => None, + if let Ok(bytes) = <&[u8; 67]>::try_from(self.as_bytes()) { + let (&first, bytes) = bytes.split_first::<66>(); + let (&last, pubkey) = bytes.split_last::<65>(); + (first == OP_PUSHBYTES_65.to_u8() && last == OP_CHECKSIG.to_u8()).then_some(pubkey) + } else if let Ok(bytes) = <&[u8; 35]>::try_from(self.as_bytes()) { + let (&first, bytes) = bytes.split_first::<34>(); + let (&last, pubkey) = bytes.split_last::<33>(); + (first == OP_PUSHBYTES_33.to_u8() && last == OP_CHECKSIG.to_u8()).then_some(pubkey) + } else { + None } } } @@ -170,6 +173,11 @@ define_extension_trait! { new_witness_program_unchecked(WitnessVersion::V1, output_key.serialize()) } + /// Generates pay to anchor output. + fn new_p2a() -> Self { + new_witness_program_unchecked(WitnessVersion::V1, P2A_PROGRAM) + } + /// Generates P2WSH-type of scriptPubkey with a given [`WitnessProgram`]. fn new_witness_program(witness_program: &WitnessProgram) -> Self { Builder::new() @@ -183,14 +191,14 @@ define_extension_trait! { /// Generates P2WSH-type of scriptPubkey with a given [`WitnessVersion`] and the program bytes. /// Does not do any checks on version or program length. /// -/// Convenience method used by `new_p2wpkh`, `new_p2wsh`, `new_p2tr`, and `new_p2tr_tweaked`. +/// Convenience method used by `new_p2a`, `new_p2wpkh`, `new_p2wsh`, `new_p2tr`, and `new_p2tr_tweaked`. pub(super) fn new_witness_program_unchecked>( version: WitnessVersion, program: T, ) -> ScriptBuf { let program = program.as_ref(); debug_assert!(program.len() >= 2 && program.len() <= 40); - // In segwit v0, the program must be 20 or 32 bytes long. + // In SegWit v0, the program must be either 20 (P2WPKH) bytes or 32 (P2WSH) bytes long debug_assert!(version != WitnessVersion::V0 || program.len() == 20 || program.len() == 32); Builder::new().push_opcode(version.into()).push_slice(program).into_script() } diff --git a/bitcoin/src/bip152.rs b/bitcoin/src/bip152.rs index 7fcfe38fca..bb25066d18 100644 --- a/bitcoin/src/bip152.rs +++ b/bitcoin/src/bip152.rs @@ -4,12 +4,13 @@ //! //! Implementation of compact blocks data structure and algorithms. +use core::convert::Infallible; use core::{convert, fmt, mem}; #[cfg(feature = "std")] use std::error; use hashes::{sha256, siphash24}; -use internals::ToU64 as _; +use internals::{ToU64 as _, array::ArrayExt as _}; use io::{BufRead, Write}; use crate::consensus::encode::{self, Decodable, Encodable, ReadExt, WriteExt}; @@ -30,7 +31,9 @@ pub enum Error { InvalidPrefill, } -internals::impl_from_infallible!(Error); +impl From for Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -114,8 +117,8 @@ impl ShortId { // 2. Running SipHash-2-4 with the input being the transaction ID and the keys (k0/k1) // set to the first two little-endian 64-bit integers from the above hash, respectively. ( - u64::from_le_bytes(h.as_byte_array()[0..8].try_into().expect("8 byte slice")), - u64::from_le_bytes(h.as_byte_array()[8..16].try_into().expect("8 byte slice")), + u64::from_le_bytes(*h.as_byte_array().sub_array::<0, 8>()), + u64::from_le_bytes(*h.as_byte_array().sub_array::<8, 8>()), ) } @@ -410,8 +413,8 @@ mod test { use crate::merkle_tree::TxMerkleNode; use crate::transaction::OutPointExt; use crate::{ - transaction, Amount, BlockChecked, CompactTarget, OutPoint, ScriptBuf, Sequence, TxIn, - TxOut, Txid, Witness, + transaction, Amount, BlockChecked, BlockTime, CompactTarget, OutPoint, ScriptBuf, Sequence, + TxIn, TxOut, Txid, Witness, }; fn dummy_tx(nonce: &[u8]) -> Transaction { @@ -434,7 +437,7 @@ mod test { version: block::Version::ONE, prev_blockhash: BlockHash::from_byte_array([0x99; 32]), merkle_root: TxMerkleNode::from_byte_array([0x77; 32]), - time: 2, + time: BlockTime::from_u32(2), bits: CompactTarget::from_consensus(3), nonce: 4, }; @@ -443,7 +446,7 @@ mod test { } #[test] - fn test_header_and_short_ids_from_block() { + fn header_and_short_ids_from_block() { let block = dummy_block(); let compact = HeaderAndShortIds::from_block(&block, 42, 2, &[]).unwrap(); @@ -463,7 +466,7 @@ mod test { } #[test] - fn test_compact_block_vector() { + fn compact_block_vector() { // Tested with Elements implementation of compact blocks. let raw_block = Vec::::from_hex("000000206c750a364035aefd5f81508a08769975116d9195312ee4520dceac39e1fdc62c4dc67473b8e354358c1e610afeaff7410858bd45df43e2940f8a62bd3d5e3ac943c2975cffff7f200000000002020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04016b0101ffffffff020006062a0100000001510000000000000000266a24aa21a9ed4a3d9f3343dafcc0d6f6d4310f2ee5ce273ed34edca6c75db3a73e7f368734200120000000000000000000000000000000000000000000000000000000000000000000000000020000000001021fc20ba2bd745507b8e00679e3b362558f9457db374ca28ffa5243f4c23a4d5f00000000171600147c9dea14ffbcaec4b575e03f05ceb7a81cd3fcbffdffffff915d689be87b43337f42e26033df59807b768223368f189a023d0242d837768900000000171600147c9dea14ffbcaec4b575e03f05ceb7a81cd3fcbffdffffff0200cdf5050000000017a9146803c72d9154a6a20f404bed6d3dcee07986235a8700e1f5050000000017a9144e6a4c7cb5b5562904843bdf816342f4db9f5797870247304402205e9bf6e70eb0e4b495bf483fd8e6e02da64900f290ef8aaa64bb32600d973c450220670896f5d0e5f33473e5f399ab680cc1d25c2d2afd15abd722f04978f28be887012103e4e4d9312b2261af508b367d8ba9be4f01b61d6d6e78bec499845b4f410bcf2702473044022045ac80596a6ac9c8c572f94708709adaf106677221122e08daf8b9741a04f66a022003ccd52a3b78f8fd08058fc04fc0cffa5f4c196c84eae9e37e2a85babe731b57012103e4e4d9312b2261af508b367d8ba9be4f01b61d6d6e78bec499845b4f410bcf276a000000").unwrap(); let raw_compact = Vec::::from_hex("000000206c750a364035aefd5f81508a08769975116d9195312ee4520dceac39e1fdc62c4dc67473b8e354358c1e610afeaff7410858bd45df43e2940f8a62bd3d5e3ac943c2975cffff7f2000000000a4df3c3744da89fa010a6979e971450100020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04016b0101ffffffff020006062a0100000001510000000000000000266a24aa21a9ed4a3d9f3343dafcc0d6f6d4310f2ee5ce273ed34edca6c75db3a73e7f368734200120000000000000000000000000000000000000000000000000000000000000000000000000").unwrap(); @@ -478,7 +481,7 @@ mod test { } #[test] - fn test_getblocktx_differential_encoding_de_and_serialization() { + fn getblocktx_differential_encoding_de_and_serialization() { let testcases = vec![ // differentially encoded VarInts, indicies (vec![4, 0, 5, 1, 10], vec![0, 6, 8, 19]), @@ -523,7 +526,7 @@ mod test { #[test] #[cfg(debug_assertions)] #[should_panic] // 'attempt to add with overflow' in consensus_encode() - fn test_getblocktx_panic_when_encoding_u64_max() { + fn getblocktx_panic_when_encoding_u64_max() { serialize(&BlockTransactionsRequest { block_hash: BlockHash::from_byte_array([0; 32]), indexes: vec![u64::MAX], diff --git a/bitcoin/src/bip158.rs b/bitcoin/src/bip158.rs index 8a9188a2ce..abdde2a9c2 100644 --- a/bitcoin/src/bip158.rs +++ b/bitcoin/src/bip158.rs @@ -38,10 +38,11 @@ //! ``` use core::cmp::{self, Ordering}; +use core::convert::Infallible; use core::fmt; use hashes::{sha256d, siphash24, HashEngine as _}; -use internals::{write_err, ToU64 as _}; +use internals::{write_err, ToU64 as _, array::ArrayExt as _}; use io::{BufRead, Write}; use crate::block::{Block, BlockHash, Checked}; @@ -75,11 +76,13 @@ impl_hashencode!(FilterHeader); pub enum Error { /// Missing UTXO, cannot calculate script filter. UtxoMissing(OutPoint), - /// IO error reading or writing binary serialization of the filter. + /// I/O error reading or writing binary serialization of the filter. Io(io::Error), } -internals::impl_from_infallible!(Error); +impl From for Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { @@ -87,7 +90,7 @@ impl fmt::Display for Error { match *self { UtxoMissing(ref coin) => write!(f, "unresolved UTXO {}", coin), - Io(ref e) => write_err!(f, "IO error"; e), + Io(ref e) => write_err!(f, "I/O error"; e), } } } @@ -192,8 +195,8 @@ impl<'a, W: Write> BlockFilterWriter<'a, W> { /// Constructs a new [`BlockFilterWriter`] from `block`. pub fn new(writer: &'a mut W, block: &'a Block) -> BlockFilterWriter<'a, W> { let block_hash_as_int = block.block_hash().to_byte_array(); - let k0 = u64::from_le_bytes(block_hash_as_int[0..8].try_into().expect("8 byte slice")); - let k1 = u64::from_le_bytes(block_hash_as_int[8..16].try_into().expect("8 byte slice")); + let k0 = u64::from_le_bytes(*block_hash_as_int.sub_array::<0, 8>()); + let k1 = u64::from_le_bytes(*block_hash_as_int.sub_array::<8, 8>()); let writer = GcsFilterWriter::new(writer, k0, k1, M, P); BlockFilterWriter { block, writer } } @@ -247,8 +250,8 @@ impl BlockFilterReader { /// Constructs a new [`BlockFilterReader`] from `block_hash`. pub fn new(block_hash: BlockHash) -> BlockFilterReader { let block_hash_as_int = block_hash.to_byte_array(); - let k0 = u64::from_le_bytes(block_hash_as_int[0..8].try_into().expect("8 byte slice")); - let k1 = u64::from_le_bytes(block_hash_as_int[8..16].try_into().expect("8 byte slice")); + let k0 = u64::from_le_bytes(*block_hash_as_int.sub_array::<0, 8>()); + let k1 = u64::from_le_bytes(*block_hash_as_int.sub_array::<8, 8>()); BlockFilterReader { reader: GcsFilterReader::new(k0, k1, M, P) } } @@ -582,7 +585,7 @@ mod test { use crate::ScriptBuf; #[test] - fn test_blockfilters() { + fn blockfilters() { // test vectors from: https://github.com/jimpo/bitcoin/blob/c7efb652f3543b001b4dd22186a354605b14f47e/src/test/data/blockfilters.json let data = include_str!("../tests/data/blockfilters.json"); @@ -649,7 +652,7 @@ mod test { } #[test] - fn test_filter() { + fn filter() { let mut patterns = BTreeSet::new(); patterns.insert(hex!("000000")); @@ -718,7 +721,7 @@ mod test { } #[test] - fn test_bit_stream() { + fn bit_stream() { let mut out = Vec::new(); { let mut writer = BitStreamWriter::new(&mut out); diff --git a/bitcoin/src/bip32.rs b/bitcoin/src/bip32.rs index 8f11e67b83..73d74ffc82 100644 --- a/bitcoin/src/bip32.rs +++ b/bitcoin/src/bip32.rs @@ -5,11 +5,13 @@ //! Implementation of BIP32 hierarchical deterministic wallets, as defined //! at . +use core::convert::Infallible; use core::ops::Index; use core::str::FromStr; use core::{fmt, slice}; -use hashes::{hash160, hash_newtype, sha512, GeneralHash, HashEngine, Hmac, HmacEngine}; +use hashes::{hash160, hash_newtype, sha512, Hash, HashEngine, Hmac, HmacEngine}; +use internals::array::ArrayExt; use internals::write_err; use secp256k1::{Secp256k1, XOnlyPublicKey}; @@ -43,7 +45,7 @@ impl_array_newtype_stringify!(ChainCode, 32); impl ChainCode { fn from_hmac(hmac: Hmac) -> Self { - hmac.as_ref()[32..].try_into().expect("half of hmac is guaranteed to be 32 bytes") + ChainCode(*hmac.as_byte_array().split_array::<32, 32>().1) } } @@ -517,9 +519,17 @@ pub enum Error { InvalidPublicKeyHexLength(usize), /// Base58 decoded data was an invalid length. InvalidBase58PayloadLength(InvalidBase58PayloadLengthError), + /// Invalid private key prefix (byte 45 must be 0) + InvalidPrivateKeyPrefix, + /// Non-zero parent fingerprint for a master key (depth 0) + NonZeroParentFingerprintForMasterKey, + /// Non-zero child number for a master key (depth 0) + NonZeroChildNumberForMasterKey, } -internals::impl_from_infallible!(Error); +impl From for Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -541,6 +551,11 @@ impl fmt::Display for Error { InvalidPublicKeyHexLength(got) => write!(f, "PublicKey hex should be 66 or 130 digits long, got: {}", got), InvalidBase58PayloadLength(ref e) => write_err!(f, "base58 payload"; e), + InvalidPrivateKeyPrefix => + f.write_str("invalid private key prefix, byte 45 must be 0 as required by BIP-32"), + NonZeroParentFingerprintForMasterKey => + f.write_str("non-zero parent fingerprint in master key"), + NonZeroChildNumberForMasterKey => f.write_str("non-zero child number in master key"), } } } @@ -562,6 +577,9 @@ impl std::error::Error for Error { | UnknownVersion(_) | WrongExtendedKeyLength(_) | InvalidPublicKeyHexLength(_) => None, + InvalidPrivateKeyPrefix => None, + NonZeroParentFingerprintForMasterKey => None, + NonZeroChildNumberForMasterKey => None, } } } @@ -581,17 +599,19 @@ impl From for Error { impl Xpriv { /// Constructs a new master key from a seed value pub fn new_master(network: impl Into, seed: &[u8]) -> Result { - let mut hmac_engine: HmacEngine = HmacEngine::new(b"Bitcoin seed"); - hmac_engine.input(seed); - let hmac_result: Hmac = Hmac::from_engine(hmac_engine); + let mut engine = HmacEngine::::new(b"Bitcoin seed"); + engine.input(seed); + let hmac = engine.finalize(); Ok(Xpriv { network: network.into(), depth: 0, parent_fingerprint: Default::default(), child_number: ChildNumber::ZERO_NORMAL, - private_key: secp256k1::SecretKey::from_slice(&hmac_result.as_ref()[..32])?, - chain_code: ChainCode::from_hmac(hmac_result), + private_key: secp256k1::SecretKey::from_byte_array( + hmac.as_byte_array().split_array::<32, 32>().0, + )?, + chain_code: ChainCode::from_hmac(hmac), }) } @@ -605,8 +625,8 @@ impl Xpriv { } /// Constructs a new extended public key from this extended private key. - pub fn to_xpub(&self, secp: &Secp256k1) -> Xpub { - Xpub::from_xpriv(secp, self) + pub fn to_xpub(self, secp: &Secp256k1) -> Xpub { + Xpub::from_xpriv(secp, &self) } /// Constructs a new BIP340 keypair for Schnorr signatures and Taproot use matching the internal @@ -645,25 +665,27 @@ impl Xpriv { /// Private->Private child key derivation fn ckd_priv(&self, secp: &Secp256k1, i: ChildNumber) -> Xpriv { - let mut hmac_engine: HmacEngine = HmacEngine::new(&self.chain_code[..]); + let mut engine = HmacEngine::::new(&self.chain_code[..]); match i { ChildNumber::Normal { .. } => { // Non-hardened key: compute public data and use that - hmac_engine.input( + engine.input( &secp256k1::PublicKey::from_secret_key(secp, &self.private_key).serialize()[..], ); } ChildNumber::Hardened { .. } => { // Hardened key: use only secret data to prevent public derivation - hmac_engine.input(&[0u8]); - hmac_engine.input(&self.private_key[..]); + engine.input(&[0u8]); + engine.input(&self.private_key[..]); } } - hmac_engine.input(&u32::from(i).to_be_bytes()); - let hmac_result: Hmac = Hmac::from_engine(hmac_engine); - let sk = secp256k1::SecretKey::from_slice(&hmac_result.as_ref()[..32]) - .expect("statistically impossible to hit"); + engine.input(&u32::from(i).to_be_bytes()); + let hmac: Hmac = engine.finalize(); + let sk = secp256k1::SecretKey::from_byte_array( + hmac.as_byte_array().split_array::<32, 32>().0, + ) + .expect("statistically impossible to hit"); let tweaked = sk.add_tweak(&self.private_key.into()).expect("statistically impossible to hit"); @@ -673,36 +695,39 @@ impl Xpriv { parent_fingerprint: self.fingerprint(secp), child_number: i, private_key: tweaked, - chain_code: ChainCode::from_hmac(hmac_result), + chain_code: ChainCode::from_hmac(hmac), } } /// Decoding extended private key from binary data according to BIP 32 pub fn decode(data: &[u8]) -> Result { - if data.len() != 78 { - return Err(Error::WrongExtendedKeyLength(data.len())); - } + let Common { + network, + depth, + parent_fingerprint, + child_number, + chain_code, + key, + } = Common::decode(data)?; - let network = if data.starts_with(&VERSION_BYTES_MAINNET_PRIVATE) { - NetworkKind::Main - } else if data.starts_with(&VERSION_BYTES_TESTNETS_PRIVATE) { - NetworkKind::Test - } else { - let (b0, b1, b2, b3) = (data[0], data[1], data[2], data[3]); - return Err(Error::UnknownVersion([b0, b1, b2, b3])); + let network = match network { + VERSION_BYTES_MAINNET_PRIVATE => NetworkKind::Main, + VERSION_BYTES_TESTNETS_PRIVATE => NetworkKind::Test, + unknown => return Err(Error::UnknownVersion(unknown)), }; + let (&zero, private_key) = key.split_first(); + if zero != 0 { + return Err(Error::InvalidPrivateKeyPrefix); + } + Ok(Xpriv { network, - depth: data[4], - parent_fingerprint: data[5..9] - .try_into() - .expect("9 - 5 == 4, which is the Fingerprint length"), - child_number: u32::from_be_bytes(data[9..13].try_into().expect("4 byte slice")).into(), - chain_code: data[13..45] - .try_into() - .expect("45 - 13 == 32, which is the ChainCode length"), - private_key: secp256k1::SecretKey::from_slice(&data[46..78])?, + depth, + parent_fingerprint, + child_number, + chain_code, + private_key: secp256k1::SecretKey::from_byte_array(private_key)?, }) } @@ -729,7 +754,7 @@ impl Xpriv { /// Returns the first four bytes of the identifier pub fn fingerprint(&self, secp: &Secp256k1) -> Fingerprint { - self.identifier(secp).as_byte_array()[0..4].try_into().expect("4 is the fingerprint length") + self.identifier(secp).as_byte_array().sub_array::<0, 4>().into() } } @@ -803,15 +828,15 @@ impl Xpub { match i { ChildNumber::Hardened { .. } => Err(Error::CannotDeriveFromHardenedKey), ChildNumber::Normal { index: n } => { - let mut hmac_engine: HmacEngine = - HmacEngine::new(&self.chain_code[..]); - hmac_engine.input(&self.public_key.serialize()[..]); - hmac_engine.input(&n.to_be_bytes()); - - let hmac_result: Hmac = Hmac::from_engine(hmac_engine); - - let private_key = secp256k1::SecretKey::from_slice(&hmac_result.as_ref()[..32])?; - let chain_code = ChainCode::from_hmac(hmac_result); + let mut engine = HmacEngine::::new(&self.chain_code[..]); + engine.input(&self.public_key.serialize()[..]); + engine.input(&n.to_be_bytes()); + + let hmac = engine.finalize(); + let private_key = secp256k1::SecretKey::from_byte_array( + hmac.as_byte_array().split_array::<32, 32>().0 + )?; + let chain_code = ChainCode::from_hmac(hmac); Ok((private_key, chain_code)) } } @@ -838,30 +863,28 @@ impl Xpub { /// Decoding extended public key from binary data according to BIP 32 pub fn decode(data: &[u8]) -> Result { - if data.len() != 78 { - return Err(Error::WrongExtendedKeyLength(data.len())); - } + let Common { + network, + depth, + parent_fingerprint, + child_number, + chain_code, + key, + } = Common::decode(data)?; - let network = if data.starts_with(&VERSION_BYTES_MAINNET_PUBLIC) { - NetworkKind::Main - } else if data.starts_with(&VERSION_BYTES_TESTNETS_PUBLIC) { - NetworkKind::Test - } else { - let (b0, b1, b2, b3) = (data[0], data[1], data[2], data[3]); - return Err(Error::UnknownVersion([b0, b1, b2, b3])); + let network = match network { + VERSION_BYTES_MAINNET_PUBLIC => NetworkKind::Main, + VERSION_BYTES_TESTNETS_PUBLIC => NetworkKind::Test, + unknown => return Err(Error::UnknownVersion(unknown)), }; Ok(Xpub { network, - depth: data[4], - parent_fingerprint: data[5..9] - .try_into() - .expect("9 - 5 == 4, which is the Fingerprint length"), - child_number: u32::from_be_bytes(data[9..13].try_into().expect("4 byte slice")).into(), - chain_code: data[13..45] - .try_into() - .expect("45 - 13 == 32, which is the ChainCode length"), - public_key: secp256k1::PublicKey::from_slice(&data[45..78])?, + depth, + parent_fingerprint, + child_number, + chain_code, + public_key: secp256k1::PublicKey::from_slice(&key)?, }) } @@ -880,14 +903,14 @@ impl Xpub { ret } - /// Returns the HASH160 of the chaincode + /// Returns the HASH160 of the public key component of the xpub pub fn identifier(&self) -> XKeyIdentifier { XKeyIdentifier(hash160::Hash::hash(&self.public_key.serialize())) } /// Returns the first four bytes of the identifier pub fn fingerprint(&self) -> Fingerprint { - self.identifier().as_byte_array()[0..4].try_into().expect("4 is the fingerprint length") + self.identifier().as_byte_array().sub_array::<0, 4>().into() } } @@ -964,6 +987,48 @@ impl fmt::Display for InvalidBase58PayloadLengthError { #[cfg(feature = "std")] impl std::error::Error for InvalidBase58PayloadLengthError {} +// Helps unify decoding +struct Common { + network: [u8; 4], + depth: u8, + parent_fingerprint: Fingerprint, + child_number: ChildNumber, + chain_code: ChainCode, + // public key (compressed) or 0 byte followed by a private key + key: [u8; 33], +} + +impl Common { + fn decode(data: &[u8]) -> Result { + let data: &[u8; 78] = data.try_into().map_err(|_| Error::WrongExtendedKeyLength(data.len()))?; + + let (&network, data) = data.split_array::<4, 74>(); + let (&depth, data) = data.split_first::<73>(); + let (&parent_fingerprint, data) = data.split_array::<4, 69>(); + let (&child_number, data) = data.split_array::<4, 65>(); + let (&chain_code, &key) = data.split_array::<32, 33>(); + + if depth == 0 { + if parent_fingerprint != [0u8; 4] { + return Err(Error::NonZeroParentFingerprintForMasterKey); + } + + if child_number != [0u8; 4] { + return Err(Error::NonZeroChildNumberForMasterKey); + } + } + + Ok(Common { + network, + depth, + parent_fingerprint: parent_fingerprint.into(), + child_number: u32::from_be_bytes(child_number).into(), + chain_code: chain_code.into(), + key, + }) + } +} + #[cfg(test)] mod tests { use hex::test_hex_unwrap as hex; @@ -974,7 +1039,7 @@ mod tests { use super::*; #[test] - fn test_parse_derivation_path() { + fn parse_derivation_path() { assert_eq!("n/0'/0".parse::(), Err(Error::InvalidChildNumberFormat)); assert_eq!("4/m/5".parse::(), Err(Error::InvalidChildNumberFormat)); assert_eq!("//3/0'".parse::(), Err(Error::InvalidChildNumberFormat)); @@ -1036,7 +1101,7 @@ mod tests { } #[test] - fn test_derivation_path_conversion_index() { + fn derivation_path_conversion_index() { let path = "0h/1/2'".parse::().unwrap(); let numbers: Vec = path.clone().into(); let path2: DerivationPath = numbers.into(); @@ -1096,7 +1161,7 @@ mod tests { } #[test] - fn test_increment() { + fn increment() { let idx = 9345497; // randomly generated, I promise let cn = ChildNumber::from_normal_idx(idx).unwrap(); assert_eq!(cn.increment().ok(), Some(ChildNumber::from_normal_idx(idx + 1).unwrap())); @@ -1139,7 +1204,7 @@ mod tests { } #[test] - fn test_vector_1() { + fn vector_1() { let secp = Secp256k1::new(); let seed = hex!("000102030405060708090a0b0c0d0e0f"); @@ -1175,7 +1240,7 @@ mod tests { } #[test] - fn test_vector_2() { + fn vector_2() { let secp = Secp256k1::new(); let seed = hex!("fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542"); @@ -1211,7 +1276,7 @@ mod tests { } #[test] - fn test_vector_3() { + fn vector_3() { let secp = Secp256k1::new(); let seed = hex!("4b381541583be4423346c643850da4b320e46a87ae3d2a4e6da11eba819cd4acba45d239319ac14f863b8d5ab5a0d0c64d2e8a1e7d1457df2e5a3c51c73235be"); @@ -1226,6 +1291,44 @@ mod tests { "xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y"); } + #[test] + fn test_reject_xpriv_with_non_zero_byte_at_index_45() { + let mut xpriv = base58::decode_check("xprv9wSp6B7kry3Vj9m1zSnLvN3xH8RdsPP1Mh7fAaR7aRLcQMKTR2vidYEeEg2mUCTAwCd6vnxVrcjfy2kRgVsFawNzmjuHc2YmYRmagcEPdU9").unwrap(); + + // Modify byte at index 45 to be non-zero (e.g., 1) + xpriv[45] = 1; + + let result = Xpriv::decode(&xpriv); + assert!(result.is_err()); + + match result { + Err(Error::InvalidPrivateKeyPrefix) => {} + _ => panic!("Expected InvalidPrivateKeyPrefix error, got {:?}", result), + } + } + + #[test] + fn test_reject_xpriv_with_zero_depth_and_non_zero_index() { + let result = "xprv9s21ZrQH4r4TsiLvyLXqM9P7k1K3EYhA1kkD6xuquB5i39AU8KF42acDyL3qsDbU9NmZn6MsGSUYZEsuoePmjzsB3eFKSUEh3Gu1N3cqVUN".parse::(); + assert!(result.is_err()); + + match result { + Err(Error::NonZeroChildNumberForMasterKey) => {} + _ => panic!("Expected NonZeroChildNumberForMasterKey error, got {:?}", result), + } + } + + #[test] + fn test_reject_xpriv_with_zero_depth_and_non_zero_parent_fingerprint() { + let result = "xprv9s2SPatNQ9Vc6GTbVMFPFo7jsaZySyzk7L8n2uqKXJen3KUmvQNTuLh3fhZMBoG3G4ZW1N2kZuHEPY53qmbZzCHshoQnNf4GvELZfqTUrcv".parse::(); + assert!(result.is_err()); + + match result { + Err(Error::NonZeroParentFingerprintForMasterKey) => {} + _ => panic!("Expected NonZeroParentFingerprintForMasterKey error, got {:?}", result), + } + } + #[test] #[cfg(feature = "serde")] pub fn encode_decode_childnumber() { @@ -1307,4 +1410,33 @@ mod tests { let xpriv_str = "xprv9s21ZrQH143K24Mfq5zL5MhWK9hUhhGbd45hLXo2Pq2oqzMMo63oStZzFAzHGBP2UuGCqWLTAPLcMtD9y5gkZ6Eq3Rjuahrv17fENZ3QzxW"; xpriv_str.parse::().unwrap(); } + + #[test] + fn official_vectors_5() { + let invalid_keys = [ + "xpub661MyMwAqRbcEYS8w7XLSVeEsBXy79zSzH1J8vCdxAZningWLdN3zgtU6LBpB85b3D2yc8sfvZU521AAwdZafEz7mnzBBsz4wKY5fTtTQBm", + "xprv9s21ZrQH143K24Mfq5zL5MhWK9hUhhGbd45hLXo2Pq2oqzMMo63oStZzFGTQQD3dC4H2D5GBj7vWvSQaaBv5cxi9gafk7NF3pnBju6dwKvH", + "xpub661MyMwAqRbcEYS8w7XLSVeEsBXy79zSzH1J8vCdxAZningWLdN3zgtU6Txnt3siSujt9RCVYsx4qHZGc62TG4McvMGcAUjeuwZdduYEvFn", + "xprv9s21ZrQH143K24Mfq5zL5MhWK9hUhhGbd45hLXo2Pq2oqzMMo63oStZzFGpWnsj83BHtEy5Zt8CcDr1UiRXuWCmTQLxEK9vbz5gPstX92JQ", + "xpub661MyMwAqRbcEYS8w7XLSVeEsBXy79zSzH1J8vCdxAZningWLdN3zgtU6N8ZMMXctdiCjxTNq964yKkwrkBJJwpzZS4HS2fxvyYUA4q2Xe4", + "xprv9s21ZrQH143K24Mfq5zL5MhWK9hUhhGbd45hLXo2Pq2oqzMMo63oStZzFAzHGBP2UuGCqWLTAPLcMtD9y5gkZ6Eq3Rjuahrv17fEQ3Qen6J", + "xprv9s2SPatNQ9Vc6GTbVMFPFo7jsaZySyzk7L8n2uqKXJen3KUmvQNTuLh3fhZMBoG3G4ZW1N2kZuHEPY53qmbZzCHshoQnNf4GvELZfqTUrcv", + "xpub661no6RGEX3uJkY4bNnPcw4URcQTrSibUZ4NqJEw5eBkv7ovTwgiT91XX27VbEXGENhYRCf7hyEbWrR3FewATdCEebj6znwMfQkhRYHRLpJ", + "xprv9s21ZrQH4r4TsiLvyLXqM9P7k1K3EYhA1kkD6xuquB5i39AU8KF42acDyL3qsDbU9NmZn6MsGSUYZEsuoePmjzsB3eFKSUEh3Gu1N3cqVUN", + "xpub661MyMwAuDcm6CRQ5N4qiHKrJ39Xe1R1NyfouMKTTWcguwVcfrZJaNvhpebzGerh7gucBvzEQWRugZDuDXjNDRmXzSZe4c7mnTK97pTvGS8", + "DMwo58pR1QLEFihHiXPVykYB6fJmsTeHvyTp7hRThAtCX8CvYzgPcn8XnmdfHGMQzT7ayAmfo4z3gY5KfbrZWZ6St24UVf2Qgo6oujFktLHdHY4", + "DMwo58pR1QLEFihHiXPVykYB6fJmsTeHvyTp7hRThAtCX8CvYzgPcn8XnmdfHPmHJiEDXkTiJTVV9rHEBUem2mwVbbNfvT2MTcAqj3nesx8uBf9", + "xprv9s21ZrQH143K24Mfq5zL5MhWK9hUhhGbd45hLXo2Pq2oqzMMo63oStZzF93Y5wvzdUayhgkkFoicQZcP3y52uPPxFnfoLZB21Teqt1VvEHx", + "xprv9s21ZrQH143K24Mfq5zL5MhWK9hUhhGbd45hLXo2Pq2oqzMMo63oStZzFAzHGBP2UuGCqWLTAPLcMtD5SDKr24z3aiUvKr9bJpdrcLg1y3G", + "xpub661MyMwAqRbcEYS8w7XLSVeEsBXy79zSzH1J8vCdxAZningWLdN3zgtU6Q5JXayek4PRsn35jii4veMimro1xefsM58PgBMrvdYre8QyULY", + "xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHL", + ]; + for key in invalid_keys { + if key.starts_with("xpub") { + key.parse::().unwrap_err(); + } else { + key.parse::().unwrap_err(); + } + } + } } diff --git a/bitcoin/src/blockdata/block.rs b/bitcoin/src/blockdata/block.rs index 88971c6089..a8dae5e4ba 100644 --- a/bitcoin/src/blockdata/block.rs +++ b/bitcoin/src/blockdata/block.rs @@ -7,11 +7,13 @@ //! module describes structures and functions needed to describe //! these blocks and the blockchain. +use core::convert::Infallible; use core::fmt; use hashes::{sha256d, HashEngine}; -use internals::compact_size; +use internals::{compact_size, ToU64}; use io::{BufRead, Write}; +use units::BlockTime; use super::Weight; use crate::consensus::encode::WriteExt as _; @@ -84,6 +86,18 @@ impl Decodable for Version { } } +impl Encodable for BlockTime { + fn consensus_encode(&self, w: &mut W) -> Result { + self.to_u32().consensus_encode(w) + } +} + +impl Decodable for BlockTime { + fn consensus_decode(r: &mut R) -> Result { + Decodable::consensus_decode(r).map(BlockTime::from_u32) + } +} + /// Extension functionality for the [`Block`] type. pub trait BlockUncheckedExt: sealed::Sealed { /// Validates (or checks) a block. @@ -263,7 +277,7 @@ impl BlockCheckedExt for Block { fn weight(&self) -> Weight { // This is the exact definition of a weight unit, as defined by BIP-141 (quote above). let wu = block_base_size(self.transactions()) * 3 + self.total_size(); - Weight::from_wu_usize(wu) + Weight::from_wu(wu.to_u64()) } fn total_size(&self) -> usize { @@ -383,7 +397,9 @@ pub enum InvalidBlockError { InvalidWitnessCommitment, } -internals::impl_from_infallible!(InvalidBlockError); +impl From for InvalidBlockError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for InvalidBlockError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -413,7 +429,9 @@ pub enum Bip34Error { NegativeHeight, } -internals::impl_from_infallible!(Bip34Error); +impl From for Bip34Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for Bip34Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -457,7 +475,9 @@ pub enum ValidationError { BadTarget, } -internals::impl_from_infallible!(ValidationError); +impl From for ValidationError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for ValidationError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -508,7 +528,7 @@ mod tests { } #[test] - fn test_coinbase_and_bip34() { + fn coinbase_and_bip34() { // testnet block 100,000 const BLOCK_HEX: &str = "0200000035ab154183570282ce9afc0b494c9fc6a3cfea05aa8c1add2ecc56490000000038ba3d78e4500a5a7570dbe61960398add4410d278b21cd9708e6d9743f374d544fc055227f1001c29c1ea3b0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff3703a08601000427f1001c046a510100522cfabe6d6d0000000000000000000068692066726f6d20706f6f6c7365727665726aac1eeeed88ffffffff0100f2052a010000001976a914912e2b234f941f30b18afbb4fa46171214bf66c888ac00000000"; let block: Block = deserialize(&hex!(BLOCK_HEX)).unwrap(); @@ -589,7 +609,7 @@ mod tests { block::compute_merkle_root(&transactions).unwrap() ); assert_eq!(serialize(&real_decode.header().merkle_root), merkle); - assert_eq!(real_decode.header().time, 1231965655); + assert_eq!(real_decode.header().time, BlockTime::from_u32(1231965655)); assert_eq!(real_decode.header().bits, CompactTarget::from_consensus(486604799)); assert_eq!(real_decode.header().nonce, 2067413810); assert_eq!(real_decode.header().work(), work); @@ -640,7 +660,7 @@ mod tests { real_decode.header().merkle_root, block::compute_merkle_root(&transactions).unwrap() ); - assert_eq!(real_decode.header().time, 1472004949); + assert_eq!(real_decode.header().time, BlockTime::from_u32(1472004949)); assert_eq!(real_decode.header().bits, CompactTarget::from_consensus(0x1a06d450)); assert_eq!(real_decode.header().nonce, 1879759182); assert_eq!(real_decode.header().work(), work); @@ -723,7 +743,7 @@ mod tests { #[test] fn soft_fork_signalling() { for i in 0..31 { - let version_int = (0x20000000u32 ^ 1 << i) as i32; + let version_int = (0x20000000u32 ^ (1 << i)) as i32; let version = Version::from_consensus(version_int); if i < 29 { assert!(version.is_signalling_soft_fork(i)); @@ -732,7 +752,7 @@ mod tests { } } - let segwit_signal = Version::from_consensus(0x20000000 ^ 1 << 1); + let segwit_signal = Version::from_consensus(0x20000000 ^ (1 << 1)); assert!(!segwit_signal.is_signalling_soft_fork(0)); assert!(segwit_signal.is_signalling_soft_fork(1)); assert!(!segwit_signal.is_signalling_soft_fork(2)); diff --git a/bitcoin/src/blockdata/constants.rs b/bitcoin/src/blockdata/constants.rs index 6073570e9a..60d528862c 100644 --- a/bitcoin/src/blockdata/constants.rs +++ b/bitcoin/src/blockdata/constants.rs @@ -6,8 +6,6 @@ //! consensus code. In particular, it defines the genesis block and its //! single transaction. -use hashes::sha256d; - use crate::block::{self, Block, Checked}; use crate::internal_macros::{impl_array_newtype, impl_array_newtype_stringify}; use crate::locktime::absolute; @@ -16,7 +14,7 @@ use crate::opcodes::all::*; use crate::pow::CompactTarget; use crate::transaction::{self, OutPoint, Transaction, TxIn, TxOut}; use crate::witness::Witness; -use crate::{script, Amount, BlockHash, Sequence, TestnetVersion}; +use crate::{script, Amount, BlockHash, BlockTime, Sequence, TestnetVersion}; /// How many seconds between blocks we expect on average. pub const TARGET_BLOCK_SPACING: u32 = 600; @@ -38,9 +36,9 @@ pub const PUBKEY_ADDRESS_PREFIX_TEST: u8 = 111; // 0x6f /// Test (tesnet, signet, regtest) script address prefix. pub const SCRIPT_ADDRESS_PREFIX_TEST: u8 = 196; // 0xc4 /// The maximum allowed redeem script size for a P2SH output. -pub const MAX_REDEEM_SCRIPT_SIZE: usize = 520; +pub const MAX_REDEEM_SCRIPT_SIZE: usize = primitives::script::MAX_REDEEM_SCRIPT_SIZE; // 520 /// The maximum allowed redeem script size of the witness script. -pub const MAX_WITNESS_SCRIPT_SIZE: usize = 10_000; +pub const MAX_WITNESS_SCRIPT_SIZE: usize = primitives::script::MAX_WITNESS_SCRIPT_SIZE; // 10_000 /// The maximum allowed size of any single witness stack element. pub const MAX_STACK_ELEMENT_SIZE: usize = 520; /// How may blocks between halvings. @@ -52,6 +50,8 @@ pub const SUBSIDY_HALVING_INTERVAL: u32 = 210_000; pub const MAX_SCRIPTNUM_VALUE: u32 = 0x80000000; // 2^31 /// Number of blocks needed for an output from a coinbase transaction to be spendable. pub const COINBASE_MATURITY: u32 = 100; +/// The maximum allowed size for a serialized block, in bytes (only for buffer size limits) +pub const MAX_BLOCK_SERIALIZED_SIZE: usize = 4_000_000; // This is the 65 byte (uncompressed) pubkey used as the one-and-only output of the genesis transaction. // @@ -112,7 +112,7 @@ fn bitcoin_genesis_tx(params: &Params) -> Transaction { witness: Witness::default(), }); - ret.output.push(TxOut { value: Amount::from_sat(50 * 100_000_000), script_pubkey: out_script }); + ret.output.push(TxOut { value: Amount::FIFTY_BTC, script_pubkey: out_script }); // end ret @@ -122,8 +122,7 @@ fn bitcoin_genesis_tx(params: &Params) -> Transaction { pub fn genesis_block(params: impl AsRef) -> Block { let params = params.as_ref(); let transactions = vec![bitcoin_genesis_tx(params)]; - let hash: sha256d::Hash = transactions[0].compute_txid().into(); - let merkle_root: crate::TxMerkleNode = hash.into(); + let merkle_root = block::compute_merkle_root(&transactions).expect("transactions is not empty"); let witness_root = block::compute_witness_root(&transactions); match params.network { @@ -132,7 +131,7 @@ pub fn genesis_block(params: impl AsRef) -> Block { version: block::Version::ONE, prev_blockhash: BlockHash::GENESIS_PREVIOUS_BLOCK_HASH, merkle_root, - time: 1231006505, + time: BlockTime::from_u32(1231006505), bits: CompactTarget::from_consensus(0x1d00ffff), nonce: 2083236893, }, @@ -144,7 +143,7 @@ pub fn genesis_block(params: impl AsRef) -> Block { version: block::Version::ONE, prev_blockhash: BlockHash::GENESIS_PREVIOUS_BLOCK_HASH, merkle_root, - time: 1296688602, + time: BlockTime::from_u32(1296688602), bits: CompactTarget::from_consensus(0x1d00ffff), nonce: 414098458, }, @@ -156,7 +155,7 @@ pub fn genesis_block(params: impl AsRef) -> Block { version: block::Version::ONE, prev_blockhash: BlockHash::GENESIS_PREVIOUS_BLOCK_HASH, merkle_root, - time: 1714777860, + time: BlockTime::from_u32(1714777860), bits: CompactTarget::from_consensus(0x1d00ffff), nonce: 393743547, }, @@ -168,7 +167,7 @@ pub fn genesis_block(params: impl AsRef) -> Block { version: block::Version::ONE, prev_blockhash: BlockHash::GENESIS_PREVIOUS_BLOCK_HASH, merkle_root, - time: 1598918400, + time: BlockTime::from_u32(1598918400), bits: CompactTarget::from_consensus(0x1e0377ae), nonce: 52613770, }, @@ -180,7 +179,7 @@ pub fn genesis_block(params: impl AsRef) -> Block { version: block::Version::ONE, prev_blockhash: BlockHash::GENESIS_PREVIOUS_BLOCK_HASH, merkle_root, - time: 1296688602, + time: BlockTime::from_u32(1296688602), bits: CompactTarget::from_consensus(0x207fffff), nonce: 2, }, @@ -320,7 +319,7 @@ mod test { "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b" ); - assert_eq!(gen.header().time, 1231006505); + assert_eq!(gen.header().time, BlockTime::from_u32(1231006505)); assert_eq!(gen.header().bits, CompactTarget::from_consensus(0x1d00ffff)); assert_eq!(gen.header().nonce, 2083236893); assert_eq!( @@ -338,7 +337,7 @@ mod test { gen.header().merkle_root.to_string(), "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b" ); - assert_eq!(gen.header().time, 1296688602); + assert_eq!(gen.header().time, BlockTime::from_u32(1296688602)); assert_eq!(gen.header().bits, CompactTarget::from_consensus(0x1d00ffff)); assert_eq!(gen.header().nonce, 414098458); assert_eq!( @@ -356,7 +355,7 @@ mod test { gen.header().merkle_root.to_string(), "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b" ); - assert_eq!(gen.header().time, 1598918400); + assert_eq!(gen.header().time, BlockTime::from_u32(1598918400)); assert_eq!(gen.header().bits, CompactTarget::from_consensus(0x1e0377ae)); assert_eq!(gen.header().nonce, 52613770); assert_eq!( @@ -373,7 +372,7 @@ mod test { // The genesis block hash is a double-sha256 and it is displayed backwards. let genesis_hash = genesis_block(network).block_hash(); // We abuse the sha256 hash here so we get a LowerHex impl that does not print the hex backwards. - let hash = sha256::Hash::from_slice(genesis_hash.as_byte_array()).unwrap(); + let hash = sha256::Hash::from_byte_array(genesis_hash.to_byte_array()); let want = format!("{:02x}", hash); let chain_hash = ChainHash::using_genesis_block_const(network); diff --git a/bitcoin/src/blockdata/mod.rs b/bitcoin/src/blockdata/mod.rs index a8d0af44bf..a6f39ba9a8 100644 --- a/bitcoin/src/blockdata/mod.rs +++ b/bitcoin/src/blockdata/mod.rs @@ -7,6 +7,7 @@ pub mod block; pub mod constants; +pub mod opcodes; pub mod script; pub mod transaction; pub mod witness; @@ -20,32 +21,10 @@ pub use self::{ /// Implements `FeeRate` and assoctiated features. pub mod fee_rate { + #[cfg(feature = "serde")] + pub use units::fee_rate::serde; /// Re-export everything from the [`units::fee_rate`] module. pub use units::fee_rate::FeeRate; - - #[cfg(test)] - mod tests { - use internals::ToU64 as _; - - use super::*; - - #[test] - fn fee_convenience_functions_agree() { - use hex::test_hex_unwrap as hex; - - use crate::consensus::Decodable; - use crate::transaction::{Transaction, TransactionExt as _}; - - const SOME_TX: &str = "0100000001a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff0100e1f505000000001976a9140389035a9225b3839e2bbf32d826a1e222031fd888ac00000000"; - - let raw_tx = hex!(SOME_TX); - let tx: Transaction = Decodable::consensus_decode(&mut raw_tx.as_slice()).unwrap(); - - let rate = FeeRate::from_sat_per_vb(1).expect("1 sat/byte is valid"); - - assert_eq!(rate.fee_vb(tx.vsize().to_u64()), rate.fee_wu(tx.weight())); - } - } } /// Provides absolute and relative locktimes. @@ -94,12 +73,6 @@ pub mod locktime { } } -/// Bitcoin script opcodes. -pub mod opcodes { - /// Re-export everything from the [`primitives::opcodes`] module. - pub use primitives::opcodes::*; -} - /// Implements `Weight` and associated features. pub mod weight { /// Re-export everything from the [`units::weight`] module. diff --git a/bitcoin/src/blockdata/opcodes.rs b/bitcoin/src/blockdata/opcodes.rs new file mode 100644 index 0000000000..5da0295d39 --- /dev/null +++ b/bitcoin/src/blockdata/opcodes.rs @@ -0,0 +1,901 @@ +// SPDX-License-Identifier: CC0-1.0 + +//! Bitcoin script opcodes. +//! +//! Bitcoin's script uses a stack-based assembly language. This module defines +//! all of the opcodes for that language. + +#![allow(non_camel_case_types)] + +use core::fmt; + +#[cfg(feature = "serde")] +use crate::prelude::ToString; + +/// A script Opcode. +/// +/// We do not implement Ord on this type because there is no natural ordering on opcodes, but there +/// may appear to be one (e.g. because all the push opcodes appear in a consecutive block) and we +/// don't want to encourage subtly buggy code. Please use [`Opcode::classify`] to distinguish different +/// types of opcodes. +/// +///
+/// Example of Core bug caused by assuming ordering +/// +/// Bitcoin Core's `IsPushOnly` considers `OP_RESERVED` to be a "push code", allowing this opcode +/// in contexts where only pushes are supposed to be allowed. +///
+#[derive(Copy, Clone, PartialEq, Eq)] +pub struct Opcode { + code: u8, +} + +use self::all::*; + +macro_rules! all_opcodes { + ($($op:ident => $val:expr, $doc:expr);*) => { + /// Enables wildcard imports to bring into scope all opcodes and nothing else. + /// + /// The `all` module is provided so one can use a wildcard import `use bitcoin::opcodes::all::*` to + /// get all the `OP_FOO` opcodes without getting other types defined in `opcodes` (e.g. `Opcode`, `Class`). + /// + /// This module is guaranteed to never contain anything except opcode constants and all opcode + /// constants are guaranteed to begin with OP_. + pub mod all { + use super::Opcode; + $( + #[doc = $doc] + pub const $op: Opcode = Opcode { code: $val}; + )* + } + + /// Push an empty array onto the stack. + pub static OP_0: Opcode = OP_PUSHBYTES_0; + /// Empty stack is also FALSE. + pub static OP_FALSE: Opcode = OP_PUSHBYTES_0; + /// Number 1 is also TRUE. + pub static OP_TRUE: Opcode = OP_PUSHNUM_1; + /// Previously called OP_NOP2. + pub static OP_NOP2: Opcode = OP_CLTV; + /// Previously called OP_NOP3. + pub static OP_NOP3: Opcode = OP_CSV; + + impl fmt::Display for Opcode { + fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { + match *self { + $( + $op => core::fmt::Display::fmt(stringify!($op), f), + )+ + } + } + } + } +} + +all_opcodes! { + OP_PUSHBYTES_0 => 0x00, "Push an empty array onto the stack."; + OP_PUSHBYTES_1 => 0x01, "Push the next byte as an array onto the stack."; + OP_PUSHBYTES_2 => 0x02, "Push the next 2 bytes as an array onto the stack."; + OP_PUSHBYTES_3 => 0x03, "Push the next 3 bytes as an array onto the stack."; + OP_PUSHBYTES_4 => 0x04, "Push the next 4 bytes as an array onto the stack."; + OP_PUSHBYTES_5 => 0x05, "Push the next 5 bytes as an array onto the stack."; + OP_PUSHBYTES_6 => 0x06, "Push the next 6 bytes as an array onto the stack."; + OP_PUSHBYTES_7 => 0x07, "Push the next 7 bytes as an array onto the stack."; + OP_PUSHBYTES_8 => 0x08, "Push the next 8 bytes as an array onto the stack."; + OP_PUSHBYTES_9 => 0x09, "Push the next 9 bytes as an array onto the stack."; + OP_PUSHBYTES_10 => 0x0a, "Push the next 10 bytes as an array onto the stack."; + OP_PUSHBYTES_11 => 0x0b, "Push the next 11 bytes as an array onto the stack."; + OP_PUSHBYTES_12 => 0x0c, "Push the next 12 bytes as an array onto the stack."; + OP_PUSHBYTES_13 => 0x0d, "Push the next 13 bytes as an array onto the stack."; + OP_PUSHBYTES_14 => 0x0e, "Push the next 14 bytes as an array onto the stack."; + OP_PUSHBYTES_15 => 0x0f, "Push the next 15 bytes as an array onto the stack."; + OP_PUSHBYTES_16 => 0x10, "Push the next 16 bytes as an array onto the stack."; + OP_PUSHBYTES_17 => 0x11, "Push the next 17 bytes as an array onto the stack."; + OP_PUSHBYTES_18 => 0x12, "Push the next 18 bytes as an array onto the stack."; + OP_PUSHBYTES_19 => 0x13, "Push the next 19 bytes as an array onto the stack."; + OP_PUSHBYTES_20 => 0x14, "Push the next 20 bytes as an array onto the stack."; + OP_PUSHBYTES_21 => 0x15, "Push the next 21 bytes as an array onto the stack."; + OP_PUSHBYTES_22 => 0x16, "Push the next 22 bytes as an array onto the stack."; + OP_PUSHBYTES_23 => 0x17, "Push the next 23 bytes as an array onto the stack."; + OP_PUSHBYTES_24 => 0x18, "Push the next 24 bytes as an array onto the stack."; + OP_PUSHBYTES_25 => 0x19, "Push the next 25 bytes as an array onto the stack."; + OP_PUSHBYTES_26 => 0x1a, "Push the next 26 bytes as an array onto the stack."; + OP_PUSHBYTES_27 => 0x1b, "Push the next 27 bytes as an array onto the stack."; + OP_PUSHBYTES_28 => 0x1c, "Push the next 28 bytes as an array onto the stack."; + OP_PUSHBYTES_29 => 0x1d, "Push the next 29 bytes as an array onto the stack."; + OP_PUSHBYTES_30 => 0x1e, "Push the next 30 bytes as an array onto the stack."; + OP_PUSHBYTES_31 => 0x1f, "Push the next 31 bytes as an array onto the stack."; + OP_PUSHBYTES_32 => 0x20, "Push the next 32 bytes as an array onto the stack."; + OP_PUSHBYTES_33 => 0x21, "Push the next 33 bytes as an array onto the stack."; + OP_PUSHBYTES_34 => 0x22, "Push the next 34 bytes as an array onto the stack."; + OP_PUSHBYTES_35 => 0x23, "Push the next 35 bytes as an array onto the stack."; + OP_PUSHBYTES_36 => 0x24, "Push the next 36 bytes as an array onto the stack."; + OP_PUSHBYTES_37 => 0x25, "Push the next 37 bytes as an array onto the stack."; + OP_PUSHBYTES_38 => 0x26, "Push the next 38 bytes as an array onto the stack."; + OP_PUSHBYTES_39 => 0x27, "Push the next 39 bytes as an array onto the stack."; + OP_PUSHBYTES_40 => 0x28, "Push the next 40 bytes as an array onto the stack."; + OP_PUSHBYTES_41 => 0x29, "Push the next 41 bytes as an array onto the stack."; + OP_PUSHBYTES_42 => 0x2a, "Push the next 42 bytes as an array onto the stack."; + OP_PUSHBYTES_43 => 0x2b, "Push the next 43 bytes as an array onto the stack."; + OP_PUSHBYTES_44 => 0x2c, "Push the next 44 bytes as an array onto the stack."; + OP_PUSHBYTES_45 => 0x2d, "Push the next 45 bytes as an array onto the stack."; + OP_PUSHBYTES_46 => 0x2e, "Push the next 46 bytes as an array onto the stack."; + OP_PUSHBYTES_47 => 0x2f, "Push the next 47 bytes as an array onto the stack."; + OP_PUSHBYTES_48 => 0x30, "Push the next 48 bytes as an array onto the stack."; + OP_PUSHBYTES_49 => 0x31, "Push the next 49 bytes as an array onto the stack."; + OP_PUSHBYTES_50 => 0x32, "Push the next 50 bytes as an array onto the stack."; + OP_PUSHBYTES_51 => 0x33, "Push the next 51 bytes as an array onto the stack."; + OP_PUSHBYTES_52 => 0x34, "Push the next 52 bytes as an array onto the stack."; + OP_PUSHBYTES_53 => 0x35, "Push the next 53 bytes as an array onto the stack."; + OP_PUSHBYTES_54 => 0x36, "Push the next 54 bytes as an array onto the stack."; + OP_PUSHBYTES_55 => 0x37, "Push the next 55 bytes as an array onto the stack."; + OP_PUSHBYTES_56 => 0x38, "Push the next 56 bytes as an array onto the stack."; + OP_PUSHBYTES_57 => 0x39, "Push the next 57 bytes as an array onto the stack."; + OP_PUSHBYTES_58 => 0x3a, "Push the next 58 bytes as an array onto the stack."; + OP_PUSHBYTES_59 => 0x3b, "Push the next 59 bytes as an array onto the stack."; + OP_PUSHBYTES_60 => 0x3c, "Push the next 60 bytes as an array onto the stack."; + OP_PUSHBYTES_61 => 0x3d, "Push the next 61 bytes as an array onto the stack."; + OP_PUSHBYTES_62 => 0x3e, "Push the next 62 bytes as an array onto the stack."; + OP_PUSHBYTES_63 => 0x3f, "Push the next 63 bytes as an array onto the stack."; + OP_PUSHBYTES_64 => 0x40, "Push the next 64 bytes as an array onto the stack."; + OP_PUSHBYTES_65 => 0x41, "Push the next 65 bytes as an array onto the stack."; + OP_PUSHBYTES_66 => 0x42, "Push the next 66 bytes as an array onto the stack."; + OP_PUSHBYTES_67 => 0x43, "Push the next 67 bytes as an array onto the stack."; + OP_PUSHBYTES_68 => 0x44, "Push the next 68 bytes as an array onto the stack."; + OP_PUSHBYTES_69 => 0x45, "Push the next 69 bytes as an array onto the stack."; + OP_PUSHBYTES_70 => 0x46, "Push the next 70 bytes as an array onto the stack."; + OP_PUSHBYTES_71 => 0x47, "Push the next 71 bytes as an array onto the stack."; + OP_PUSHBYTES_72 => 0x48, "Push the next 72 bytes as an array onto the stack."; + OP_PUSHBYTES_73 => 0x49, "Push the next 73 bytes as an array onto the stack."; + OP_PUSHBYTES_74 => 0x4a, "Push the next 74 bytes as an array onto the stack."; + OP_PUSHBYTES_75 => 0x4b, "Push the next 75 bytes as an array onto the stack."; + OP_PUSHDATA1 => 0x4c, "Read the next byte as N; push the next N bytes as an array onto the stack."; + OP_PUSHDATA2 => 0x4d, "Read the next 2 bytes as N; push the next N bytes as an array onto the stack."; + OP_PUSHDATA4 => 0x4e, "Read the next 4 bytes as N; push the next N bytes as an array onto the stack."; + OP_PUSHNUM_NEG1 => 0x4f, "Push the array `0x81` onto the stack."; + OP_RESERVED => 0x50, "Synonym for OP_RETURN."; + OP_PUSHNUM_1 => 0x51, "Push the array `0x01` onto the stack."; + OP_PUSHNUM_2 => 0x52, "Push the array `0x02` onto the stack."; + OP_PUSHNUM_3 => 0x53, "Push the array `0x03` onto the stack."; + OP_PUSHNUM_4 => 0x54, "Push the array `0x04` onto the stack."; + OP_PUSHNUM_5 => 0x55, "Push the array `0x05` onto the stack."; + OP_PUSHNUM_6 => 0x56, "Push the array `0x06` onto the stack."; + OP_PUSHNUM_7 => 0x57, "Push the array `0x07` onto the stack."; + OP_PUSHNUM_8 => 0x58, "Push the array `0x08` onto the stack."; + OP_PUSHNUM_9 => 0x59, "Push the array `0x09` onto the stack."; + OP_PUSHNUM_10 => 0x5a, "Push the array `0x0a` onto the stack."; + OP_PUSHNUM_11 => 0x5b, "Push the array `0x0b` onto the stack."; + OP_PUSHNUM_12 => 0x5c, "Push the array `0x0c` onto the stack."; + OP_PUSHNUM_13 => 0x5d, "Push the array `0x0d` onto the stack."; + OP_PUSHNUM_14 => 0x5e, "Push the array `0x0e` onto the stack."; + OP_PUSHNUM_15 => 0x5f, "Push the array `0x0f` onto the stack."; + OP_PUSHNUM_16 => 0x60, "Push the array `0x10` onto the stack."; + OP_NOP => 0x61, "Does nothing."; + OP_VER => 0x62, "Synonym for OP_RETURN."; + OP_IF => 0x63, "Pop and execute the next statements if a nonzero element was popped."; + OP_NOTIF => 0x64, "Pop and execute the next statements if a zero element was popped."; + OP_VERIF => 0x65, "Fail the script unconditionally, does not even need to be executed."; + OP_VERNOTIF => 0x66, "Fail the script unconditionally, does not even need to be executed."; + OP_ELSE => 0x67, "Execute statements if those after the previous OP_IF were not, and vice-versa. \ + If there is no previous OP_IF, this acts as a RETURN."; + OP_ENDIF => 0x68, "Pop and execute the next statements if a zero element was popped."; + OP_VERIFY => 0x69, "If the top value is zero or the stack is empty, fail; otherwise, pop the stack."; + OP_RETURN => 0x6a, "Fail the script immediately. (Must be executed.)."; + OP_TOALTSTACK => 0x6b, "Pop one element from the main stack onto the alt stack."; + OP_FROMALTSTACK => 0x6c, "Pop one element from the alt stack onto the main stack."; + OP_2DROP => 0x6d, "Drops the top two stack items."; + OP_2DUP => 0x6e, "Duplicates the top two stack items as AB -> ABAB."; + OP_3DUP => 0x6f, "Duplicates the two three stack items as ABC -> ABCABC."; + OP_2OVER => 0x70, "Copies the two stack items of items two spaces back to the front, as xxAB -> ABxxAB."; + OP_2ROT => 0x71, "Moves the two stack items four spaces back to the front, as xxxxAB -> ABxxxx."; + OP_2SWAP => 0x72, "Swaps the top two pairs, as ABCD -> CDAB."; + OP_IFDUP => 0x73, "Duplicate the top stack element unless it is zero."; + OP_DEPTH => 0x74, "Push the current number of stack items onto the stack."; + OP_DROP => 0x75, "Drops the top stack item."; + OP_DUP => 0x76, "Duplicates the top stack item."; + OP_NIP => 0x77, "Drops the second-to-top stack item."; + OP_OVER => 0x78, "Copies the second-to-top stack item, as xA -> AxA."; + OP_PICK => 0x79, "Pop the top stack element as N. Copy the Nth stack element to the top."; + OP_ROLL => 0x7a, "Pop the top stack element as N. Move the Nth stack element to the top."; + OP_ROT => 0x7b, "Rotate the top three stack items, as [top next1 next2] -> [next2 top next1]."; + OP_SWAP => 0x7c, "Swap the top two stack items."; + OP_TUCK => 0x7d, "Copy the top stack item to before the second item, as [top next] -> [top next top]."; + OP_CAT => 0x7e, "Fail the script unconditionally, does not even need to be executed."; + OP_SUBSTR => 0x7f, "Fail the script unconditionally, does not even need to be executed."; + OP_LEFT => 0x80, "Fail the script unconditionally, does not even need to be executed."; + OP_RIGHT => 0x81, "Fail the script unconditionally, does not even need to be executed."; + OP_SIZE => 0x82, "Pushes the length of the top stack item onto the stack."; + OP_INVERT => 0x83, "Fail the script unconditionally, does not even need to be executed."; + OP_AND => 0x84, "Fail the script unconditionally, does not even need to be executed."; + OP_OR => 0x85, "Fail the script unconditionally, does not even need to be executed."; + OP_XOR => 0x86, "Fail the script unconditionally, does not even need to be executed."; + OP_EQUAL => 0x87, "Pushes 1 if the inputs are exactly equal, 0 otherwise."; + OP_EQUALVERIFY => 0x88, "Returns success if the inputs are exactly equal, failure otherwise."; + OP_RESERVED1 => 0x89, "Synonym for OP_RETURN."; + OP_RESERVED2 => 0x8a, "Synonym for OP_RETURN."; + OP_1ADD => 0x8b, "Increment the top stack element in place."; + OP_1SUB => 0x8c, "Decrement the top stack element in place."; + OP_2MUL => 0x8d, "Fail the script unconditionally, does not even need to be executed."; + OP_2DIV => 0x8e, "Fail the script unconditionally, does not even need to be executed."; + OP_NEGATE => 0x8f, "Multiply the top stack item by -1 in place."; + OP_ABS => 0x90, "Absolute value the top stack item in place."; + OP_NOT => 0x91, "Map 0 to 1 and everything else to 0, in place."; + OP_0NOTEQUAL => 0x92, "Map 0 to 0 and everything else to 1, in place."; + OP_ADD => 0x93, "Pop two stack items and push their sum."; + OP_SUB => 0x94, "Pop two stack items and push the second minus the top."; + OP_MUL => 0x95, "Fail the script unconditionally, does not even need to be executed."; + OP_DIV => 0x96, "Fail the script unconditionally, does not even need to be executed."; + OP_MOD => 0x97, "Fail the script unconditionally, does not even need to be executed."; + OP_LSHIFT => 0x98, "Fail the script unconditionally, does not even need to be executed."; + OP_RSHIFT => 0x99, "Fail the script unconditionally, does not even need to be executed."; + OP_BOOLAND => 0x9a, "Pop the top two stack items and push 1 if both are nonzero, else push 0."; + OP_BOOLOR => 0x9b, "Pop the top two stack items and push 1 if either is nonzero, else push 0."; + OP_NUMEQUAL => 0x9c, "Pop the top two stack items and push 1 if both are numerically equal, else push 0."; + OP_NUMEQUALVERIFY => 0x9d, "Pop the top two stack items and return success if both are numerically equal, else return failure."; + OP_NUMNOTEQUAL => 0x9e, "Pop the top two stack items and push 0 if both are numerically equal, else push 1."; + OP_LESSTHAN => 0x9f, "Pop the top two items; push 1 if the second is less than the top, 0 otherwise."; + OP_GREATERTHAN => 0xa0, "Pop the top two items; push 1 if the second is greater than the top, 0 otherwise."; + OP_LESSTHANOREQUAL => 0xa1, "Pop the top two items; push 1 if the second is <= the top, 0 otherwise."; + OP_GREATERTHANOREQUAL => 0xa2, "Pop the top two items; push 1 if the second is >= the top, 0 otherwise."; + OP_MIN => 0xa3, "Pop the top two items; push the smaller."; + OP_MAX => 0xa4, "Pop the top two items; push the larger."; + OP_WITHIN => 0xa5, "Pop the top three items; if the top is >= the second and < the third, push 1, otherwise push 0."; + OP_RIPEMD160 => 0xa6, "Pop the top stack item and push its RIPEMD160 hash."; + OP_SHA1 => 0xa7, "Pop the top stack item and push its SHA1 hash."; + OP_SHA256 => 0xa8, "Pop the top stack item and push its SHA256 hash."; + OP_HASH160 => 0xa9, "Pop the top stack item and push its RIPEMD(SHA256) hash."; + OP_HASH256 => 0xaa, "Pop the top stack item and push its SHA256(SHA256) hash."; + OP_CODESEPARATOR => 0xab, "Ignore this and everything preceding when deciding what to sign when signature-checking."; + OP_CHECKSIG => 0xac, " pushing 1/0 for success/failure."; + OP_CHECKSIGVERIFY => 0xad, " returning success/failure."; + OP_CHECKMULTISIG => 0xae, "Pop N, N pubkeys, M, M signatures, a dummy (due to bug in reference code), \ + and verify that all M signatures are valid. Push 1 for 'all valid', 0 otherwise."; + OP_CHECKMULTISIGVERIFY => 0xaf, "Like the above but return success/failure."; + OP_NOP1 => 0xb0, "Does nothing."; + OP_CLTV => 0xb1, ""; + OP_CSV => 0xb2, ""; + OP_NOP4 => 0xb3, "Does nothing."; + OP_NOP5 => 0xb4, "Does nothing."; + OP_NOP6 => 0xb5, "Does nothing."; + OP_NOP7 => 0xb6, "Does nothing."; + OP_NOP8 => 0xb7, "Does nothing."; + OP_NOP9 => 0xb8, "Does nothing."; + OP_NOP10 => 0xb9, "Does nothing."; + // Every other opcode acts as OP_RETURN + OP_CHECKSIGADD => 0xba, "OP_CHECKSIGADD post tapscript."; + OP_RETURN_187 => 0xbb, "Synonym for OP_RETURN."; + OP_RETURN_188 => 0xbc, "Synonym for OP_RETURN."; + OP_RETURN_189 => 0xbd, "Synonym for OP_RETURN."; + OP_RETURN_190 => 0xbe, "Synonym for OP_RETURN."; + OP_RETURN_191 => 0xbf, "Synonym for OP_RETURN."; + OP_RETURN_192 => 0xc0, "Synonym for OP_RETURN."; + OP_RETURN_193 => 0xc1, "Synonym for OP_RETURN."; + OP_RETURN_194 => 0xc2, "Synonym for OP_RETURN."; + OP_RETURN_195 => 0xc3, "Synonym for OP_RETURN."; + OP_RETURN_196 => 0xc4, "Synonym for OP_RETURN."; + OP_RETURN_197 => 0xc5, "Synonym for OP_RETURN."; + OP_RETURN_198 => 0xc6, "Synonym for OP_RETURN."; + OP_RETURN_199 => 0xc7, "Synonym for OP_RETURN."; + OP_RETURN_200 => 0xc8, "Synonym for OP_RETURN."; + OP_RETURN_201 => 0xc9, "Synonym for OP_RETURN."; + OP_RETURN_202 => 0xca, "Synonym for OP_RETURN."; + OP_RETURN_203 => 0xcb, "Synonym for OP_RETURN."; + OP_RETURN_204 => 0xcc, "Synonym for OP_RETURN."; + OP_RETURN_205 => 0xcd, "Synonym for OP_RETURN."; + OP_RETURN_206 => 0xce, "Synonym for OP_RETURN."; + OP_RETURN_207 => 0xcf, "Synonym for OP_RETURN."; + OP_RETURN_208 => 0xd0, "Synonym for OP_RETURN."; + OP_RETURN_209 => 0xd1, "Synonym for OP_RETURN."; + OP_RETURN_210 => 0xd2, "Synonym for OP_RETURN."; + OP_RETURN_211 => 0xd3, "Synonym for OP_RETURN."; + OP_RETURN_212 => 0xd4, "Synonym for OP_RETURN."; + OP_RETURN_213 => 0xd5, "Synonym for OP_RETURN."; + OP_RETURN_214 => 0xd6, "Synonym for OP_RETURN."; + OP_RETURN_215 => 0xd7, "Synonym for OP_RETURN."; + OP_RETURN_216 => 0xd8, "Synonym for OP_RETURN."; + OP_RETURN_217 => 0xd9, "Synonym for OP_RETURN."; + OP_RETURN_218 => 0xda, "Synonym for OP_RETURN."; + OP_RETURN_219 => 0xdb, "Synonym for OP_RETURN."; + OP_RETURN_220 => 0xdc, "Synonym for OP_RETURN."; + OP_RETURN_221 => 0xdd, "Synonym for OP_RETURN."; + OP_RETURN_222 => 0xde, "Synonym for OP_RETURN."; + OP_RETURN_223 => 0xdf, "Synonym for OP_RETURN."; + OP_RETURN_224 => 0xe0, "Synonym for OP_RETURN."; + OP_RETURN_225 => 0xe1, "Synonym for OP_RETURN."; + OP_RETURN_226 => 0xe2, "Synonym for OP_RETURN."; + OP_RETURN_227 => 0xe3, "Synonym for OP_RETURN."; + OP_RETURN_228 => 0xe4, "Synonym for OP_RETURN."; + OP_RETURN_229 => 0xe5, "Synonym for OP_RETURN."; + OP_RETURN_230 => 0xe6, "Synonym for OP_RETURN."; + OP_RETURN_231 => 0xe7, "Synonym for OP_RETURN."; + OP_RETURN_232 => 0xe8, "Synonym for OP_RETURN."; + OP_RETURN_233 => 0xe9, "Synonym for OP_RETURN."; + OP_RETURN_234 => 0xea, "Synonym for OP_RETURN."; + OP_RETURN_235 => 0xeb, "Synonym for OP_RETURN."; + OP_RETURN_236 => 0xec, "Synonym for OP_RETURN."; + OP_RETURN_237 => 0xed, "Synonym for OP_RETURN."; + OP_RETURN_238 => 0xee, "Synonym for OP_RETURN."; + OP_RETURN_239 => 0xef, "Synonym for OP_RETURN."; + OP_RETURN_240 => 0xf0, "Synonym for OP_RETURN."; + OP_RETURN_241 => 0xf1, "Synonym for OP_RETURN."; + OP_RETURN_242 => 0xf2, "Synonym for OP_RETURN."; + OP_RETURN_243 => 0xf3, "Synonym for OP_RETURN."; + OP_RETURN_244 => 0xf4, "Synonym for OP_RETURN."; + OP_RETURN_245 => 0xf5, "Synonym for OP_RETURN."; + OP_RETURN_246 => 0xf6, "Synonym for OP_RETURN."; + OP_RETURN_247 => 0xf7, "Synonym for OP_RETURN."; + OP_RETURN_248 => 0xf8, "Synonym for OP_RETURN."; + OP_RETURN_249 => 0xf9, "Synonym for OP_RETURN."; + OP_RETURN_250 => 0xfa, "Synonym for OP_RETURN."; + OP_RETURN_251 => 0xfb, "Synonym for OP_RETURN."; + OP_RETURN_252 => 0xfc, "Synonym for OP_RETURN."; + OP_RETURN_253 => 0xfd, "Synonym for OP_RETURN."; + OP_RETURN_254 => 0xfe, "Synonym for OP_RETURN."; + OP_INVALIDOPCODE => 0xff, "Synonym for OP_RETURN." +} + +/// Classification context for the opcode. +/// +/// Some opcodes like [`OP_RESERVED`] abort the script in `ClassifyContext::Legacy` context, +/// but will act as `OP_SUCCESSx` in `ClassifyContext::TapScript` (see BIP342 for full list). +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum ClassifyContext { + /// Opcode used in tapscript context. + TapScript, + /// Opcode used in legacy context. + Legacy, +} + +impl Opcode { + /// Classifies an Opcode into a broad class. + #[inline] + #[must_use] + pub fn classify(self, ctx: ClassifyContext) -> Class { + match (self, ctx) { + // 3 opcodes illegal in all contexts + (OP_VERIF, _) | (OP_VERNOTIF, _) | (OP_INVALIDOPCODE, _) => Class::IllegalOp, + + // 15 opcodes illegal in Legacy context + #[rustfmt::skip] + (OP_CAT, ctx) | (OP_SUBSTR, ctx) + | (OP_LEFT, ctx) | (OP_RIGHT, ctx) + | (OP_INVERT, ctx) + | (OP_AND, ctx) | (OP_OR, ctx) | (OP_XOR, ctx) + | (OP_2MUL, ctx) | (OP_2DIV, ctx) + | (OP_MUL, ctx) | (OP_DIV, ctx) | (OP_MOD, ctx) + | (OP_LSHIFT, ctx) | (OP_RSHIFT, ctx) if ctx == ClassifyContext::Legacy => Class::IllegalOp, + + // 87 opcodes of SuccessOp class only in TapScript context + (op, ClassifyContext::TapScript) + if op.code == 80 + || op.code == 98 + || (op.code >= 126 && op.code <= 129) + || (op.code >= 131 && op.code <= 134) + || (op.code >= 137 && op.code <= 138) + || (op.code >= 141 && op.code <= 142) + || (op.code >= 149 && op.code <= 153) + || (op.code >= 187 && op.code <= 254) => + Class::SuccessOp, + + // 11 opcodes of NoOp class + (OP_NOP, _) => Class::NoOp, + (op, _) if op.code >= OP_NOP1.code && op.code <= OP_NOP10.code => Class::NoOp, + + // 1 opcode for `OP_RETURN` + (OP_RETURN, _) => Class::ReturnOp, + + // 4 opcodes operating equally to `OP_RETURN` only in Legacy context + (OP_RESERVED, ctx) | (OP_RESERVED1, ctx) | (OP_RESERVED2, ctx) | (OP_VER, ctx) + if ctx == ClassifyContext::Legacy => + Class::ReturnOp, + + // 71 opcodes operating equally to `OP_RETURN` only in Legacy context + (op, ClassifyContext::Legacy) if op.code >= OP_CHECKSIGADD.code => Class::ReturnOp, + + // 2 opcodes operating equally to `OP_RETURN` only in TapScript context + (OP_CHECKMULTISIG, ClassifyContext::TapScript) + | (OP_CHECKMULTISIGVERIFY, ClassifyContext::TapScript) => Class::ReturnOp, + + // 1 opcode of PushNum class + (OP_PUSHNUM_NEG1, _) => Class::PushNum(-1), + + // 16 opcodes of PushNum class + (op, _) if op.code >= OP_PUSHNUM_1.code && op.code <= OP_PUSHNUM_16.code => + Class::PushNum(1 + self.code as i32 - OP_PUSHNUM_1.code as i32), + + // 76 opcodes of PushBytes class + (op, _) if op.code <= OP_PUSHBYTES_75.code => Class::PushBytes(self.code as u32), + + // opcodes of Ordinary class: 61 for Legacy and 60 for TapScript context + (_, _) => Class::Ordinary(Ordinary::with(self)), + } + } + + /// Encodes [`Opcode`] as a byte. + #[inline] + pub const fn to_u8(self) -> u8 { self.code } + + /// Encodes PUSHNUM [`Opcode`] as a `u8` representing its number (1-16). + /// + /// Does not convert `OP_FALSE` to 0. Only `1` to `OP_PUSHNUM_16` are covered. + /// + /// # Returns + /// + /// Returns `None` if `self` is not a PUSHNUM. + #[inline] + #[must_use] + pub const fn decode_pushnum(self) -> Option { + const START: u8 = OP_PUSHNUM_1.code; + const END: u8 = OP_PUSHNUM_16.code; + match self.code { + START..=END => Some(self.code - START + 1), + _ => None, + } + } +} + +impl From for Opcode { + #[inline] + fn from(b: u8) -> Opcode { Opcode { code: b } } +} + +impl fmt::Debug for Opcode { + fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { fmt::Display::fmt(self, f) } +} + +#[cfg(feature = "serde")] +impl serde::Serialize for Opcode { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +/// Broad categories of opcodes with similar behavior. +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum Class { + /// Pushes the given number onto the stack. + PushNum(i32), + /// Pushes the given number of bytes onto the stack. + PushBytes(u32), + /// Fails the script if executed. + ReturnOp, + /// Succeeds the script even if not executed. + SuccessOp, + /// Fails the script even if not executed. + IllegalOp, + /// Does nothing. + NoOp, + /// Any opcode not covered above. + Ordinary(Ordinary), +} + +macro_rules! ordinary_opcode { + ($($op:ident),*) => ( + #[repr(u8)] + #[doc(hidden)] + #[derive(Copy, Clone, PartialEq, Eq, Debug)] + pub enum Ordinary { + $( $op = $op.code ),* + } + + impl fmt::Display for Ordinary { + fn fmt(&self, f: &mut fmt::Formatter) -> core::fmt::Result { + match *self { + $(Ordinary::$op => { f.pad(stringify!($op)) }),* + } + } + } + + impl Ordinary { + fn with(b: Opcode) -> Self { + match b { + $( $op => { Ordinary::$op } ),* + _ => unreachable!("construction of `Ordinary` type from non-ordinary opcode {}", b), + } + } + + /// Constructs a new [`Ordinary`] from an [`Opcode`]. + pub fn from_opcode(b: Opcode) -> Option { + match b { + $( $op => { Some(Ordinary::$op) } ),* + _ => None, + } + } + } + ); +} + +// "Ordinary" opcodes -- should be 61 of these +ordinary_opcode! { + // pushdata + OP_PUSHDATA1, OP_PUSHDATA2, OP_PUSHDATA4, + // control flow + OP_IF, OP_NOTIF, OP_ELSE, OP_ENDIF, OP_VERIFY, + // stack + OP_TOALTSTACK, OP_FROMALTSTACK, + OP_2DROP, OP_2DUP, OP_3DUP, OP_2OVER, OP_2ROT, OP_2SWAP, + OP_DROP, OP_DUP, OP_NIP, OP_OVER, OP_PICK, OP_ROLL, OP_ROT, OP_SWAP, OP_TUCK, + OP_IFDUP, OP_DEPTH, OP_SIZE, + // equality + OP_EQUAL, OP_EQUALVERIFY, + // arithmetic + OP_1ADD, OP_1SUB, OP_NEGATE, OP_ABS, OP_NOT, OP_0NOTEQUAL, + OP_ADD, OP_SUB, OP_BOOLAND, OP_BOOLOR, + OP_NUMEQUAL, OP_NUMEQUALVERIFY, OP_NUMNOTEQUAL, OP_LESSTHAN, + OP_GREATERTHAN, OP_LESSTHANOREQUAL, OP_GREATERTHANOREQUAL, + OP_MIN, OP_MAX, OP_WITHIN, + // crypto + OP_RIPEMD160, OP_SHA1, OP_SHA256, OP_HASH160, OP_HASH256, + OP_CODESEPARATOR, OP_CHECKSIG, OP_CHECKSIGVERIFY, + OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY, + OP_CHECKSIGADD +} + +impl Ordinary { + /// Encodes [`Opcode`] as a byte. + #[inline] + pub fn to_u8(self) -> u8 { self as u8 } +} + +#[cfg(test)] +mod tests { + use std::collections::HashSet; + + use super::*; + + macro_rules! roundtrip { + ($unique:expr, $op:ident) => { + assert_eq!($op, Opcode::from($op.to_u8())); + + let s1 = format!("{}", $op); + let s2 = format!("{:?}", $op); + assert_eq!(s1, s2); + assert_eq!(s1, stringify!($op)); + assert!($unique.insert(s1)); + }; + } + + #[test] + fn formatting_works() { + let op = all::OP_NOP; + let s = format!("{:>10}", op); + assert_eq!(s, " OP_NOP"); + } + + #[test] + fn ordinary_op_code() { + let ordinary_op = Ordinary::from_opcode(OP_PUSHDATA1).expect("0x4C"); + assert_eq!(ordinary_op.to_u8(), 0x4C_u8); + } + + #[test] + fn decode_pushnum() { + // Test all possible opcodes + // - Sanity check + assert_eq!(OP_PUSHNUM_1.code, 0x51_u8); + assert_eq!(OP_PUSHNUM_16.code, 0x60_u8); + for i in 0x00..=0xff_u8 { + let expected = match i { + // OP_PUSHNUM_1 ..= OP_PUSHNUM_16 + 0x51..=0x60 => Some(i - 0x50), + _ => None, + }; + assert_eq!(Opcode::from(i).decode_pushnum(), expected); + } + + // Test the named opcode constants + // - This is the OP right before PUSHNUMs start + assert!(OP_RESERVED.decode_pushnum().is_none()); + assert_eq!(OP_PUSHNUM_1.decode_pushnum().expect("pushnum"), 1); + assert_eq!(OP_PUSHNUM_2.decode_pushnum().expect("pushnum"), 2); + assert_eq!(OP_PUSHNUM_3.decode_pushnum().expect("pushnum"), 3); + assert_eq!(OP_PUSHNUM_4.decode_pushnum().expect("pushnum"), 4); + assert_eq!(OP_PUSHNUM_5.decode_pushnum().expect("pushnum"), 5); + assert_eq!(OP_PUSHNUM_6.decode_pushnum().expect("pushnum"), 6); + assert_eq!(OP_PUSHNUM_7.decode_pushnum().expect("pushnum"), 7); + assert_eq!(OP_PUSHNUM_8.decode_pushnum().expect("pushnum"), 8); + assert_eq!(OP_PUSHNUM_9.decode_pushnum().expect("pushnum"), 9); + assert_eq!(OP_PUSHNUM_10.decode_pushnum().expect("pushnum"), 10); + assert_eq!(OP_PUSHNUM_11.decode_pushnum().expect("pushnum"), 11); + assert_eq!(OP_PUSHNUM_12.decode_pushnum().expect("pushnum"), 12); + assert_eq!(OP_PUSHNUM_13.decode_pushnum().expect("pushnum"), 13); + assert_eq!(OP_PUSHNUM_14.decode_pushnum().expect("pushnum"), 14); + assert_eq!(OP_PUSHNUM_15.decode_pushnum().expect("pushnum"), 15); + assert_eq!(OP_PUSHNUM_16.decode_pushnum().expect("pushnum"), 16); + // - This is the OP right after PUSHNUMs end + assert!(OP_NOP.decode_pushnum().is_none()); + } + + #[test] + fn classify_test() { + let op174 = OP_CHECKMULTISIG; + assert_eq!( + op174.classify(ClassifyContext::Legacy), + Class::Ordinary(Ordinary::OP_CHECKMULTISIG) + ); + assert_eq!(op174.classify(ClassifyContext::TapScript), Class::ReturnOp); + + let op175 = OP_CHECKMULTISIGVERIFY; + assert_eq!( + op175.classify(ClassifyContext::Legacy), + Class::Ordinary(Ordinary::OP_CHECKMULTISIGVERIFY) + ); + assert_eq!(op175.classify(ClassifyContext::TapScript), Class::ReturnOp); + + let op186 = OP_CHECKSIGADD; + assert_eq!(op186.classify(ClassifyContext::Legacy), Class::ReturnOp); + assert_eq!( + op186.classify(ClassifyContext::TapScript), + Class::Ordinary(Ordinary::OP_CHECKSIGADD) + ); + + let op187 = OP_RETURN_187; + assert_eq!(op187.classify(ClassifyContext::Legacy), Class::ReturnOp); + assert_eq!(op187.classify(ClassifyContext::TapScript), Class::SuccessOp); + } + + #[test] + fn str_roundtrip() { + let mut unique = HashSet::new(); + roundtrip!(unique, OP_PUSHBYTES_0); + roundtrip!(unique, OP_PUSHBYTES_1); + roundtrip!(unique, OP_PUSHBYTES_2); + roundtrip!(unique, OP_PUSHBYTES_3); + roundtrip!(unique, OP_PUSHBYTES_4); + roundtrip!(unique, OP_PUSHBYTES_5); + roundtrip!(unique, OP_PUSHBYTES_6); + roundtrip!(unique, OP_PUSHBYTES_7); + roundtrip!(unique, OP_PUSHBYTES_8); + roundtrip!(unique, OP_PUSHBYTES_9); + roundtrip!(unique, OP_PUSHBYTES_10); + roundtrip!(unique, OP_PUSHBYTES_11); + roundtrip!(unique, OP_PUSHBYTES_12); + roundtrip!(unique, OP_PUSHBYTES_13); + roundtrip!(unique, OP_PUSHBYTES_14); + roundtrip!(unique, OP_PUSHBYTES_15); + roundtrip!(unique, OP_PUSHBYTES_16); + roundtrip!(unique, OP_PUSHBYTES_17); + roundtrip!(unique, OP_PUSHBYTES_18); + roundtrip!(unique, OP_PUSHBYTES_19); + roundtrip!(unique, OP_PUSHBYTES_20); + roundtrip!(unique, OP_PUSHBYTES_21); + roundtrip!(unique, OP_PUSHBYTES_22); + roundtrip!(unique, OP_PUSHBYTES_23); + roundtrip!(unique, OP_PUSHBYTES_24); + roundtrip!(unique, OP_PUSHBYTES_25); + roundtrip!(unique, OP_PUSHBYTES_26); + roundtrip!(unique, OP_PUSHBYTES_27); + roundtrip!(unique, OP_PUSHBYTES_28); + roundtrip!(unique, OP_PUSHBYTES_29); + roundtrip!(unique, OP_PUSHBYTES_30); + roundtrip!(unique, OP_PUSHBYTES_31); + roundtrip!(unique, OP_PUSHBYTES_32); + roundtrip!(unique, OP_PUSHBYTES_33); + roundtrip!(unique, OP_PUSHBYTES_34); + roundtrip!(unique, OP_PUSHBYTES_35); + roundtrip!(unique, OP_PUSHBYTES_36); + roundtrip!(unique, OP_PUSHBYTES_37); + roundtrip!(unique, OP_PUSHBYTES_38); + roundtrip!(unique, OP_PUSHBYTES_39); + roundtrip!(unique, OP_PUSHBYTES_40); + roundtrip!(unique, OP_PUSHBYTES_41); + roundtrip!(unique, OP_PUSHBYTES_42); + roundtrip!(unique, OP_PUSHBYTES_43); + roundtrip!(unique, OP_PUSHBYTES_44); + roundtrip!(unique, OP_PUSHBYTES_45); + roundtrip!(unique, OP_PUSHBYTES_46); + roundtrip!(unique, OP_PUSHBYTES_47); + roundtrip!(unique, OP_PUSHBYTES_48); + roundtrip!(unique, OP_PUSHBYTES_49); + roundtrip!(unique, OP_PUSHBYTES_50); + roundtrip!(unique, OP_PUSHBYTES_51); + roundtrip!(unique, OP_PUSHBYTES_52); + roundtrip!(unique, OP_PUSHBYTES_53); + roundtrip!(unique, OP_PUSHBYTES_54); + roundtrip!(unique, OP_PUSHBYTES_55); + roundtrip!(unique, OP_PUSHBYTES_56); + roundtrip!(unique, OP_PUSHBYTES_57); + roundtrip!(unique, OP_PUSHBYTES_58); + roundtrip!(unique, OP_PUSHBYTES_59); + roundtrip!(unique, OP_PUSHBYTES_60); + roundtrip!(unique, OP_PUSHBYTES_61); + roundtrip!(unique, OP_PUSHBYTES_62); + roundtrip!(unique, OP_PUSHBYTES_63); + roundtrip!(unique, OP_PUSHBYTES_64); + roundtrip!(unique, OP_PUSHBYTES_65); + roundtrip!(unique, OP_PUSHBYTES_66); + roundtrip!(unique, OP_PUSHBYTES_67); + roundtrip!(unique, OP_PUSHBYTES_68); + roundtrip!(unique, OP_PUSHBYTES_69); + roundtrip!(unique, OP_PUSHBYTES_70); + roundtrip!(unique, OP_PUSHBYTES_71); + roundtrip!(unique, OP_PUSHBYTES_72); + roundtrip!(unique, OP_PUSHBYTES_73); + roundtrip!(unique, OP_PUSHBYTES_74); + roundtrip!(unique, OP_PUSHBYTES_75); + roundtrip!(unique, OP_PUSHDATA1); + roundtrip!(unique, OP_PUSHDATA2); + roundtrip!(unique, OP_PUSHDATA4); + roundtrip!(unique, OP_PUSHNUM_NEG1); + roundtrip!(unique, OP_RESERVED); + roundtrip!(unique, OP_PUSHNUM_1); + roundtrip!(unique, OP_PUSHNUM_2); + roundtrip!(unique, OP_PUSHNUM_3); + roundtrip!(unique, OP_PUSHNUM_4); + roundtrip!(unique, OP_PUSHNUM_5); + roundtrip!(unique, OP_PUSHNUM_6); + roundtrip!(unique, OP_PUSHNUM_7); + roundtrip!(unique, OP_PUSHNUM_8); + roundtrip!(unique, OP_PUSHNUM_9); + roundtrip!(unique, OP_PUSHNUM_10); + roundtrip!(unique, OP_PUSHNUM_11); + roundtrip!(unique, OP_PUSHNUM_12); + roundtrip!(unique, OP_PUSHNUM_13); + roundtrip!(unique, OP_PUSHNUM_14); + roundtrip!(unique, OP_PUSHNUM_15); + roundtrip!(unique, OP_PUSHNUM_16); + roundtrip!(unique, OP_NOP); + roundtrip!(unique, OP_VER); + roundtrip!(unique, OP_IF); + roundtrip!(unique, OP_NOTIF); + roundtrip!(unique, OP_VERIF); + roundtrip!(unique, OP_VERNOTIF); + roundtrip!(unique, OP_ELSE); + roundtrip!(unique, OP_ENDIF); + roundtrip!(unique, OP_VERIFY); + roundtrip!(unique, OP_RETURN); + roundtrip!(unique, OP_TOALTSTACK); + roundtrip!(unique, OP_FROMALTSTACK); + roundtrip!(unique, OP_2DROP); + roundtrip!(unique, OP_2DUP); + roundtrip!(unique, OP_3DUP); + roundtrip!(unique, OP_2OVER); + roundtrip!(unique, OP_2ROT); + roundtrip!(unique, OP_2SWAP); + roundtrip!(unique, OP_IFDUP); + roundtrip!(unique, OP_DEPTH); + roundtrip!(unique, OP_DROP); + roundtrip!(unique, OP_DUP); + roundtrip!(unique, OP_NIP); + roundtrip!(unique, OP_OVER); + roundtrip!(unique, OP_PICK); + roundtrip!(unique, OP_ROLL); + roundtrip!(unique, OP_ROT); + roundtrip!(unique, OP_SWAP); + roundtrip!(unique, OP_TUCK); + roundtrip!(unique, OP_CAT); + roundtrip!(unique, OP_SUBSTR); + roundtrip!(unique, OP_LEFT); + roundtrip!(unique, OP_RIGHT); + roundtrip!(unique, OP_SIZE); + roundtrip!(unique, OP_INVERT); + roundtrip!(unique, OP_AND); + roundtrip!(unique, OP_OR); + roundtrip!(unique, OP_XOR); + roundtrip!(unique, OP_EQUAL); + roundtrip!(unique, OP_EQUALVERIFY); + roundtrip!(unique, OP_RESERVED1); + roundtrip!(unique, OP_RESERVED2); + roundtrip!(unique, OP_1ADD); + roundtrip!(unique, OP_1SUB); + roundtrip!(unique, OP_2MUL); + roundtrip!(unique, OP_2DIV); + roundtrip!(unique, OP_NEGATE); + roundtrip!(unique, OP_ABS); + roundtrip!(unique, OP_NOT); + roundtrip!(unique, OP_0NOTEQUAL); + roundtrip!(unique, OP_ADD); + roundtrip!(unique, OP_SUB); + roundtrip!(unique, OP_MUL); + roundtrip!(unique, OP_DIV); + roundtrip!(unique, OP_MOD); + roundtrip!(unique, OP_LSHIFT); + roundtrip!(unique, OP_RSHIFT); + roundtrip!(unique, OP_BOOLAND); + roundtrip!(unique, OP_BOOLOR); + roundtrip!(unique, OP_NUMEQUAL); + roundtrip!(unique, OP_NUMEQUALVERIFY); + roundtrip!(unique, OP_NUMNOTEQUAL); + roundtrip!(unique, OP_LESSTHAN); + roundtrip!(unique, OP_GREATERTHAN); + roundtrip!(unique, OP_LESSTHANOREQUAL); + roundtrip!(unique, OP_GREATERTHANOREQUAL); + roundtrip!(unique, OP_MIN); + roundtrip!(unique, OP_MAX); + roundtrip!(unique, OP_WITHIN); + roundtrip!(unique, OP_RIPEMD160); + roundtrip!(unique, OP_SHA1); + roundtrip!(unique, OP_SHA256); + roundtrip!(unique, OP_HASH160); + roundtrip!(unique, OP_HASH256); + roundtrip!(unique, OP_CODESEPARATOR); + roundtrip!(unique, OP_CHECKSIG); + roundtrip!(unique, OP_CHECKSIGVERIFY); + roundtrip!(unique, OP_CHECKMULTISIG); + roundtrip!(unique, OP_CHECKMULTISIGVERIFY); + roundtrip!(unique, OP_NOP1); + roundtrip!(unique, OP_CLTV); + roundtrip!(unique, OP_CSV); + roundtrip!(unique, OP_NOP4); + roundtrip!(unique, OP_NOP5); + roundtrip!(unique, OP_NOP6); + roundtrip!(unique, OP_NOP7); + roundtrip!(unique, OP_NOP8); + roundtrip!(unique, OP_NOP9); + roundtrip!(unique, OP_NOP10); + roundtrip!(unique, OP_CHECKSIGADD); + roundtrip!(unique, OP_RETURN_187); + roundtrip!(unique, OP_RETURN_188); + roundtrip!(unique, OP_RETURN_189); + roundtrip!(unique, OP_RETURN_190); + roundtrip!(unique, OP_RETURN_191); + roundtrip!(unique, OP_RETURN_192); + roundtrip!(unique, OP_RETURN_193); + roundtrip!(unique, OP_RETURN_194); + roundtrip!(unique, OP_RETURN_195); + roundtrip!(unique, OP_RETURN_196); + roundtrip!(unique, OP_RETURN_197); + roundtrip!(unique, OP_RETURN_198); + roundtrip!(unique, OP_RETURN_199); + roundtrip!(unique, OP_RETURN_200); + roundtrip!(unique, OP_RETURN_201); + roundtrip!(unique, OP_RETURN_202); + roundtrip!(unique, OP_RETURN_203); + roundtrip!(unique, OP_RETURN_204); + roundtrip!(unique, OP_RETURN_205); + roundtrip!(unique, OP_RETURN_206); + roundtrip!(unique, OP_RETURN_207); + roundtrip!(unique, OP_RETURN_208); + roundtrip!(unique, OP_RETURN_209); + roundtrip!(unique, OP_RETURN_210); + roundtrip!(unique, OP_RETURN_211); + roundtrip!(unique, OP_RETURN_212); + roundtrip!(unique, OP_RETURN_213); + roundtrip!(unique, OP_RETURN_214); + roundtrip!(unique, OP_RETURN_215); + roundtrip!(unique, OP_RETURN_216); + roundtrip!(unique, OP_RETURN_217); + roundtrip!(unique, OP_RETURN_218); + roundtrip!(unique, OP_RETURN_219); + roundtrip!(unique, OP_RETURN_220); + roundtrip!(unique, OP_RETURN_221); + roundtrip!(unique, OP_RETURN_222); + roundtrip!(unique, OP_RETURN_223); + roundtrip!(unique, OP_RETURN_224); + roundtrip!(unique, OP_RETURN_225); + roundtrip!(unique, OP_RETURN_226); + roundtrip!(unique, OP_RETURN_227); + roundtrip!(unique, OP_RETURN_228); + roundtrip!(unique, OP_RETURN_229); + roundtrip!(unique, OP_RETURN_230); + roundtrip!(unique, OP_RETURN_231); + roundtrip!(unique, OP_RETURN_232); + roundtrip!(unique, OP_RETURN_233); + roundtrip!(unique, OP_RETURN_234); + roundtrip!(unique, OP_RETURN_235); + roundtrip!(unique, OP_RETURN_236); + roundtrip!(unique, OP_RETURN_237); + roundtrip!(unique, OP_RETURN_238); + roundtrip!(unique, OP_RETURN_239); + roundtrip!(unique, OP_RETURN_240); + roundtrip!(unique, OP_RETURN_241); + roundtrip!(unique, OP_RETURN_242); + roundtrip!(unique, OP_RETURN_243); + roundtrip!(unique, OP_RETURN_244); + roundtrip!(unique, OP_RETURN_245); + roundtrip!(unique, OP_RETURN_246); + roundtrip!(unique, OP_RETURN_247); + roundtrip!(unique, OP_RETURN_248); + roundtrip!(unique, OP_RETURN_249); + roundtrip!(unique, OP_RETURN_250); + roundtrip!(unique, OP_RETURN_251); + roundtrip!(unique, OP_RETURN_252); + roundtrip!(unique, OP_RETURN_253); + roundtrip!(unique, OP_RETURN_254); + roundtrip!(unique, OP_INVALIDOPCODE); + assert_eq!(unique.len(), 256); + } +} diff --git a/bitcoin/src/blockdata/script/borrowed.rs b/bitcoin/src/blockdata/script/borrowed.rs index e1fe91b781..c048ba3670 100644 --- a/bitcoin/src/blockdata/script/borrowed.rs +++ b/bitcoin/src/blockdata/script/borrowed.rs @@ -12,10 +12,10 @@ use super::{ use crate::consensus::Encodable; use crate::opcodes::all::*; use crate::opcodes::{self, Opcode}; -use crate::policy::DUST_RELAY_TX_FEE; +use crate::policy::{DUST_RELAY_TX_FEE, MAX_OP_RETURN_RELAY}; use crate::prelude::{sink, DisplayHex, String, ToString}; -use crate::taproot::{LeafVersion, TapLeafHash, TapLeafHashExt as _}; -use crate::FeeRate; +use crate::taproot::{LeafVersion, TapLeafHash}; +use crate::{Amount, FeeRate}; #[rustfmt::skip] // Keep public re-exports separate. #[doc(inline)] @@ -172,7 +172,7 @@ crate::internal_macros::define_extension_trait! { instructions.next().is_none() } - /// Checks whether a script pubkey is a Segregated Witness (segwit) program. + /// Checks whether a script pubkey is a Segregated Witness (SegWit) program. #[inline] fn is_witness_program(&self) -> bool { self.witness_version().is_some() } @@ -217,7 +217,7 @@ crate::internal_macros::define_extension_trait! { /// What this function considers to be standard may change without warning pending Bitcoin Core /// changes. #[inline] - fn is_standard_op_return(&self) -> bool { self.is_op_return() && self.len() <= 80 } + fn is_standard_op_return(&self) -> bool { self.is_op_return() && self.len() <= MAX_OP_RETURN_RELAY } /// Checks whether a script is trivially known to have no satisfying input. /// @@ -261,7 +261,7 @@ crate::internal_macros::define_extension_trait! { /// Returns the minimum value an output with this script should have in order to be /// broadcastable on today’s Bitcoin network. #[deprecated(since = "0.32.0", note = "use `minimal_non_dust` etc. instead")] - fn dust_value(&self) -> crate::Amount { self.minimal_non_dust() } + fn dust_value(&self) -> Option { self.minimal_non_dust() } /// Returns the minimum value an output with this script should have in order to be /// broadcastable on today's Bitcoin network. @@ -272,7 +272,7 @@ crate::internal_macros::define_extension_trait! { /// To use a custom value, use [`minimal_non_dust_custom`]. /// /// [`minimal_non_dust_custom`]: Script::minimal_non_dust_custom - fn minimal_non_dust(&self) -> crate::Amount { + fn minimal_non_dust(&self) -> Option { self.minimal_non_dust_internal(DUST_RELAY_TX_FEE.into()) } @@ -287,7 +287,7 @@ crate::internal_macros::define_extension_trait! { /// To use the default Bitcoin Core value, use [`minimal_non_dust`]. /// /// [`minimal_non_dust`]: Script::minimal_non_dust - fn minimal_non_dust_custom(&self, dust_relay_fee: FeeRate) -> crate::Amount { + fn minimal_non_dust_custom(&self, dust_relay_fee: FeeRate) -> Option { self.minimal_non_dust_internal(dust_relay_fee.to_sat_per_kwu() * 4) } @@ -394,7 +394,7 @@ mod sealed { crate::internal_macros::define_extension_trait! { pub(crate) trait ScriptExtPriv impl for Script { - fn minimal_non_dust_internal(&self, dust_relay_fee: u64) -> crate::Amount { + fn minimal_non_dust_internal(&self, dust_relay_fee: u64) -> Option { // This must never be lower than Bitcoin Core's GetDustThreshold() (as of v0.21) as it may // otherwise allow users to create transactions which likely can never be broadcast/confirmed. let sats = dust_relay_fee @@ -408,13 +408,12 @@ crate::internal_macros::define_extension_trait! { 32 + 4 + 1 + 107 + 4 + // The spend cost copied from Core 8 + // The serialized size of the TxOut's amount field self.consensus_encode(&mut sink()).expect("sinks don't error").to_u64() // The serialized size of this script_pubkey - }) - .expect("dust_relay_fee or script length should not be absurdly large") + })? / 1000; // divide by 1000 like in Core to get value as it cancels out DEFAULT_MIN_RELAY_TX_FEE // Note: We ensure the division happens at the end, since Core performs the division at the end. // This will make sure none of the implicit floor operations mess with the value. - crate::Amount::from_sat(sats) + Amount::from_sat(sats).ok() } fn count_sigops_internal(&self, accurate: bool) -> usize { diff --git a/bitcoin/src/blockdata/script/builder.rs b/bitcoin/src/blockdata/script/builder.rs index 6e0c7875c5..b8a52ea91a 100644 --- a/bitcoin/src/blockdata/script/builder.rs +++ b/bitcoin/src/blockdata/script/builder.rs @@ -150,4 +150,6 @@ impl fmt::Display for Builder { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.0, f) } } -internals::debug_from_display!(Builder); +impl fmt::Debug for Builder { + fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { fmt::Display::fmt(self, f) } +} diff --git a/bitcoin/src/blockdata/script/instruction.rs b/bitcoin/src/blockdata/script/instruction.rs index 8bf30cdd76..16cd1df458 100644 --- a/bitcoin/src/blockdata/script/instruction.rs +++ b/bitcoin/src/blockdata/script/instruction.rs @@ -31,7 +31,7 @@ impl Instruction<'_> { } } - /// Returns the number interpretted by the script parser + /// Returns the number interpreted by the script parser /// if it can be coerced into a number. /// /// This does not require the script num to be minimal. @@ -47,12 +47,8 @@ impl Instruction<'_> { _ => None, } } - Instruction::PushBytes(bytes) => { - match super::read_scriptint_non_minimal(bytes.as_bytes()) { - Ok(v) => Some(v), - _ => None, - } - } + Instruction::PushBytes(bytes) => + super::read_scriptint_non_minimal(bytes.as_bytes()).ok(), } } @@ -78,10 +74,7 @@ impl Instruction<'_> { _ => None, } } - Instruction::PushBytes(bytes) => match bytes.read_scriptint() { - Ok(v) => Some(v), - _ => None, - }, + Instruction::PushBytes(bytes) => bytes.read_scriptint().ok(), } } } @@ -238,7 +231,7 @@ impl<'a> InstructionIndices<'a> { let prev_remaining = self.remaining_bytes(); let prev_pos = self.pos; let instruction = next_fn(self)?; - // No underflow: there must be less remaining bytes now than previously + // No overflow: there must be less remaining bytes now than previously let consumed = prev_remaining - self.remaining_bytes(); // No overflow: sum will never exceed slice length which itself can't exceed `usize` self.pos += consumed; diff --git a/bitcoin/src/blockdata/script/mod.rs b/bitcoin/src/blockdata/script/mod.rs index 55947e43a0..1bd2bddf35 100644 --- a/bitcoin/src/blockdata/script/mod.rs +++ b/bitcoin/src/blockdata/script/mod.rs @@ -57,17 +57,16 @@ mod tests; pub mod witness_program; pub mod witness_version; +use core::convert::Infallible; use core::fmt; -use hashes::{hash160, sha256}; use io::{BufRead, Write}; -use primitives::opcodes::all::*; -use primitives::opcodes::Opcode; use crate::consensus::{encode, Decodable, Encodable}; -use crate::constants::{MAX_REDEEM_SCRIPT_SIZE, MAX_WITNESS_SCRIPT_SIZE}; use crate::internal_macros::impl_asref_push_bytes; use crate::key::WPubkeyHash; +use crate::opcodes::all::*; +use crate::opcodes::Opcode; use crate::prelude::Vec; use crate::OutPoint; @@ -81,130 +80,15 @@ pub use self::{ push_bytes::{PushBytes, PushBytesBuf, PushBytesError, PushBytesErrorReport}, }; #[doc(inline)] -pub use primitives::script::*; +pub use primitives::script::{ + RedeemScriptSizeError, Script, ScriptBuf, ScriptHash, WScriptHash, WitnessScriptSizeError, +}; pub(crate) use self::borrowed::ScriptExtPriv; pub(crate) use self::owned::ScriptBufExtPriv; -hashes::hash_newtype! { - /// A hash of Bitcoin Script bytecode. - pub struct ScriptHash(hash160::Hash); - /// SegWit version of a Bitcoin Script bytecode hash. - pub struct WScriptHash(sha256::Hash); -} - -hashes::impl_hex_for_newtype!(ScriptHash, WScriptHash); -#[cfg(feature = "serde")] -hashes::impl_serde_for_newtype!(ScriptHash, WScriptHash); - impl_asref_push_bytes!(ScriptHash, WScriptHash); -impl ScriptHash { - /// Constructs a new `ScriptHash` after first checking the script size. - /// - /// # 520-byte limitation on serialized script size - /// - /// > As a consequence of the requirement for backwards compatibility the serialized script is - /// > itself subject to the same rules as any other PUSHDATA operation, including the rule that - /// > no data greater than 520 bytes may be pushed to the stack. Thus it is not possible to - /// > spend a P2SH output if the redemption script it refers to is >520 bytes in length. - /// - /// ref: [BIP-16](https://github.com/bitcoin/bips/blob/master/bip-0016.mediawiki#user-content-520byte_limitation_on_serialized_script_size) - pub fn from_script(redeem_script: &Script) -> Result { - if redeem_script.len() > MAX_REDEEM_SCRIPT_SIZE { - return Err(RedeemScriptSizeError { size: redeem_script.len() }); - } - - Ok(ScriptHash(hash160::Hash::hash(redeem_script.as_bytes()))) - } - - /// Constructs a new `ScriptHash` from any script irrespective of script size. - /// - /// If you hash a script that exceeds 520 bytes in size and use it to create a P2SH output - /// then the output will be unspendable (see [BIP-16]). - /// - /// [BIP-16]: - pub fn from_script_unchecked(script: &Script) -> Self { - ScriptHash(hash160::Hash::hash(script.as_bytes())) - } -} - -impl WScriptHash { - /// Constructs a new `WScriptHash` after first checking the script size. - /// - /// # 10,000-byte limit on the witness script - /// - /// > The witnessScript (≤ 10,000 bytes) is popped off the initial witness stack. SHA256 of the - /// > witnessScript must match the 32-byte witness program. - /// - /// ref: [BIP-141](https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki) - pub fn from_script(witness_script: &Script) -> Result { - if witness_script.len() > MAX_WITNESS_SCRIPT_SIZE { - return Err(WitnessScriptSizeError { size: witness_script.len() }); - } - - Ok(WScriptHash(sha256::Hash::hash(witness_script.as_bytes()))) - } - - /// Constructs a new `WScriptHash` from any script irrespective of script size. - /// - /// If you hash a script that exceeds 10,000 bytes in size and use it to create a Segwit - /// output then the output will be unspendable (see [BIP-141]). - /// - /// ref: [BIP-141](https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki) - pub fn from_script_unchecked(script: &Script) -> Self { - WScriptHash(sha256::Hash::hash(script.as_bytes())) - } -} - -impl TryFrom for ScriptHash { - type Error = RedeemScriptSizeError; - - fn try_from(redeem_script: ScriptBuf) -> Result { - Self::from_script(&redeem_script) - } -} - -impl TryFrom<&ScriptBuf> for ScriptHash { - type Error = RedeemScriptSizeError; - - fn try_from(redeem_script: &ScriptBuf) -> Result { - Self::from_script(redeem_script) - } -} - -impl TryFrom<&Script> for ScriptHash { - type Error = RedeemScriptSizeError; - - fn try_from(redeem_script: &Script) -> Result { - Self::from_script(redeem_script) - } -} - -impl TryFrom for WScriptHash { - type Error = WitnessScriptSizeError; - - fn try_from(witness_script: ScriptBuf) -> Result { - Self::from_script(&witness_script) - } -} - -impl TryFrom<&ScriptBuf> for WScriptHash { - type Error = WitnessScriptSizeError; - - fn try_from(witness_script: &ScriptBuf) -> Result { - Self::from_script(witness_script) - } -} - -impl TryFrom<&Script> for WScriptHash { - type Error = WitnessScriptSizeError; - - fn try_from(witness_script: &Script) -> Result { - Self::from_script(witness_script) - } -} - /// Constructs a new [`ScriptBuf`] containing the script code used for spending a P2WPKH output. /// /// The `scriptCode` is described in [BIP143]. @@ -351,7 +235,9 @@ pub enum Error { Serialization, } -internals::impl_from_infallible!(Error); +impl From for Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -383,39 +269,3 @@ impl std::error::Error for Error { } } } - -/// Error while hashing a redeem script. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct RedeemScriptSizeError { - /// Invalid redeem script size (cannot exceed 520 bytes). - pub size: usize, -} - -internals::impl_from_infallible!(RedeemScriptSizeError); - -impl fmt::Display for RedeemScriptSizeError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "redeem script size exceeds {} bytes: {}", MAX_REDEEM_SCRIPT_SIZE, self.size) - } -} - -#[cfg(feature = "std")] -impl std::error::Error for RedeemScriptSizeError {} - -/// Error while hashing a witness script. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct WitnessScriptSizeError { - /// Invalid witness script size (cannot exceed 10,000 bytes). - pub size: usize, -} - -internals::impl_from_infallible!(WitnessScriptSizeError); - -impl fmt::Display for WitnessScriptSizeError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "witness script size exceeds {} bytes: {}", MAX_WITNESS_SCRIPT_SIZE, self.size) - } -} - -#[cfg(feature = "std")] -impl std::error::Error for WitnessScriptSizeError {} diff --git a/bitcoin/src/blockdata/script/owned.rs b/bitcoin/src/blockdata/script/owned.rs index ccc82ca894..8b8b73e385 100644 --- a/bitcoin/src/blockdata/script/owned.rs +++ b/bitcoin/src/blockdata/script/owned.rs @@ -203,7 +203,7 @@ impl<'a> Extend> for ScriptBuf { /// Pretends that this is a mutable reference to [`ScriptBuf`]'s internal buffer. /// /// In reality the backing `Vec` is swapped with an empty one and this is holding both the -/// reference and the vec. The vec is put back when this drops so it also covers paics. (But not +/// reference and the vec. The vec is put back when this drops so it also covers panics. (But not /// leaks, which is OK since we never leak.) pub(crate) struct ScriptBufAsVec<'a>(&'a mut ScriptBuf, Vec); diff --git a/bitcoin/src/blockdata/script/push_bytes.rs b/bitcoin/src/blockdata/script/push_bytes.rs index 59f6513149..bae7902b3b 100644 --- a/bitcoin/src/blockdata/script/push_bytes.rs +++ b/bitcoin/src/blockdata/script/push_bytes.rs @@ -413,7 +413,7 @@ mod error { /// Error returned on attempt to create too large `PushBytes`. #[allow(unused)] - #[derive(Debug, Clone, Copy, PartialEq, Eq)] + #[derive(Debug, Clone, PartialEq, Eq)] pub struct PushBytesError { never: core::convert::Infallible, } @@ -435,7 +435,7 @@ mod error { use core::fmt; /// Error returned on attempt to create too large `PushBytes`. - #[derive(Debug, Clone, Copy, PartialEq, Eq)] + #[derive(Debug, Clone, PartialEq, Eq)] pub struct PushBytesError { /// How long the input was. pub(super) len: usize, diff --git a/bitcoin/src/blockdata/script/tests.rs b/bitcoin/src/blockdata/script/tests.rs index 5a3409af6c..27dcd32ea3 100644 --- a/bitcoin/src/blockdata/script/tests.rs +++ b/bitcoin/src/blockdata/script/tests.rs @@ -1,7 +1,6 @@ // SPDX-License-Identifier: CC0-1.0 use hex_lit::hex; -use primitives::opcodes; use super::*; use crate::address::script_pubkey::{ @@ -9,7 +8,7 @@ use crate::address::script_pubkey::{ }; use crate::consensus::encode::{deserialize, serialize}; use crate::crypto::key::{PublicKey, XOnlyPublicKey}; -use crate::FeeRate; +use crate::{opcodes, Amount, FeeRate}; #[test] #[rustfmt::skip] @@ -416,8 +415,10 @@ fn standard_op_return() { assert!(ScriptBuf::from_hex("6a48656c6c6f2c2074686973206973206d7920666972737420636f6e747269627574696f6e20746f207275737420626974636f696e2e20506c6561736520617070726f7665206d79205052206672656e") .unwrap() .is_standard_op_return()); - - assert!(!ScriptBuf::from_hex("6a48656c6c6f2c2074686973206973206d7920666972737420636f6e747269627574696f6e20746f207275737420626974636f696e2e20506c6561736520617070726f7665206d79205052206672656e21") + assert!(ScriptBuf::from_hex("6a48656c6c6f2c2074686973206973206d7920666972737420636f6e747269627574696f6e20746f207275737420626974636f696e2e20506c6561736520617070726f7665206d79205052206672656e21") + .unwrap() + .is_standard_op_return()); + assert!(!ScriptBuf::from_hex("6a48656c6c6f2c2074686973206973206d7920666972737420636f6e747269627574696f6e20746f207275737420626974636f696e2e20506c6561736520617070726f7665206d79205052206672656e21524f42") .unwrap() .is_standard_op_return()); } @@ -483,10 +484,6 @@ fn script_json_serialize() { #[test] fn script_asm() { - assert_eq!( - ScriptBuf::from_hex("6363636363686868686800").unwrap().to_string(), - "OP_IF OP_IF OP_IF OP_IF OP_IF OP_ENDIF OP_ENDIF OP_ENDIF OP_ENDIF OP_ENDIF OP_0" - ); assert_eq!( ScriptBuf::from_hex("6363636363686868686800").unwrap().to_string(), "OP_IF OP_IF OP_IF OP_IF OP_IF OP_ENDIF OP_ENDIF OP_ENDIF OP_ENDIF OP_ENDIF OP_0" @@ -497,7 +494,7 @@ fn script_asm() { assert_eq!(ScriptBuf::from_hex("0047304402202457e78cc1b7f50d0543863c27de75d07982bde8359b9e3316adec0aec165f2f02200203fd331c4e4a4a02f48cf1c291e2c0d6b2f7078a784b5b3649fca41f8794d401004cf1552103244e602b46755f24327142a0517288cebd159eccb6ccf41ea6edf1f601e9af952103bbbacc302d19d29dbfa62d23f37944ae19853cf260c745c2bea739c95328fcb721039227e83246bd51140fe93538b2301c9048be82ef2fb3c7fc5d78426ed6f609ad210229bf310c379b90033e2ecb07f77ecf9b8d59acb623ab7be25a0caed539e2e6472103703e2ed676936f10b3ce9149fa2d4a32060fb86fa9a70a4efe3f21d7ab90611921031e9b7c6022400a6bb0424bbcde14cff6c016b91ee3803926f3440abf5c146d05210334667f975f55a8455d515a2ef1c94fdfa3315f12319a14515d2a13d82831f62f57ae").unwrap().to_string(), "OP_0 OP_PUSHBYTES_71 304402202457e78cc1b7f50d0543863c27de75d07982bde8359b9e3316adec0aec165f2f02200203fd331c4e4a4a02f48cf1c291e2c0d6b2f7078a784b5b3649fca41f8794d401 OP_0 OP_PUSHDATA1 552103244e602b46755f24327142a0517288cebd159eccb6ccf41ea6edf1f601e9af952103bbbacc302d19d29dbfa62d23f37944ae19853cf260c745c2bea739c95328fcb721039227e83246bd51140fe93538b2301c9048be82ef2fb3c7fc5d78426ed6f609ad210229bf310c379b90033e2ecb07f77ecf9b8d59acb623ab7be25a0caed539e2e6472103703e2ed676936f10b3ce9149fa2d4a32060fb86fa9a70a4efe3f21d7ab90611921031e9b7c6022400a6bb0424bbcde14cff6c016b91ee3803926f3440abf5c146d05210334667f975f55a8455d515a2ef1c94fdfa3315f12319a14515d2a13d82831f62f57ae"); // Various weird scripts found in transaction 6d7ed9914625c73c0288694a6819196a27ef6c08f98e1270d975a8e65a3dc09a - // which triggerred overflow bugs on 32-bit machines in script formatting in the past. + // which triggered overflow bugs on 32-bit machines in script formatting in the past. assert_eq!(ScriptBuf::from_hex("01").unwrap().to_string(), "OP_PUSHBYTES_1 "); assert_eq!(ScriptBuf::from_hex("0201").unwrap().to_string(), "OP_PUSHBYTES_2 "); assert_eq!(ScriptBuf::from_hex("4c").unwrap().to_string(), ""); @@ -593,7 +590,7 @@ macro_rules! unwrap_all { } #[test] -fn test_iterator() { +fn iterator() { let zero = ScriptBuf::from_hex("00").unwrap(); let zeropush = ScriptBuf::from_hex("0100").unwrap(); @@ -671,14 +668,14 @@ fn script_ord() { #[test] #[cfg(feature = "bitcoinconsensus")] -fn test_bitcoinconsensus() { +fn bitcoinconsensus() { use crate::consensus_validation::ScriptExt as _; - // a random segwit transaction from the blockchain using native segwit + // a random SegWit transaction from the blockchain using native SegWit let spent_bytes = hex!("0020701a8d401c84fb13e6baf169d59684e17abd9fa216c8cc5b9fc63d622ff8c58d"); let spent = Script::from_bytes(&spent_bytes); let spending = hex!("010000000001011f97548fbbe7a0db7588a66e18d803d0089315aa7d4cc28360b6ec50ef36718a0100000000ffffffff02df1776000000000017a9146c002a686959067f4866b8fb493ad7970290ab728757d29f0000000000220020701a8d401c84fb13e6baf169d59684e17abd9fa216c8cc5b9fc63d622ff8c58d04004730440220565d170eed95ff95027a69b313758450ba84a01224e1f7f130dda46e94d13f8602207bdd20e307f062594022f12ed5017bbf4a055a06aea91c10110a0e3bb23117fc014730440220647d2dc5b15f60bc37dc42618a370b2a1490293f9e5c8464f53ec4fe1dfe067302203598773895b4b16d37485cbe21b337f4e4b650739880098c592553add7dd4355016952210375e00eb72e29da82b89367947f29ef34afb75e8654f6ea368e0acdfd92976b7c2103a1b26313f430c4b15bb1fdce663207659d8cac749a0e53d70eff01874496feff2103c96d495bfdd5ba4145e3e046fee45e84a8a48ad05bd8dbb395c011a32cf9f88053ae00000000"); - spent.verify(0, crate::Amount::from_sat(18393430), &spending).unwrap(); + spent.verify(0, Amount::from_sat_u32(18393430), &spending).unwrap(); } #[test] @@ -687,10 +684,10 @@ fn default_dust_value() { // well-known scriptPubKey types. let script_p2wpkh = Builder::new().push_int_unchecked(0).push_slice([42; 20]).into_script(); assert!(script_p2wpkh.is_p2wpkh()); - assert_eq!(script_p2wpkh.minimal_non_dust(), crate::Amount::from_sat(294)); + assert_eq!(script_p2wpkh.minimal_non_dust(), Some(Amount::from_sat_u32(294))); assert_eq!( script_p2wpkh.minimal_non_dust_custom(FeeRate::from_sat_per_vb_unchecked(6)), - crate::Amount::from_sat(588) + Some(Amount::from_sat_u32(588)) ); let script_p2pkh = Builder::new() @@ -701,15 +698,15 @@ fn default_dust_value() { .push_opcode(OP_CHECKSIG) .into_script(); assert!(script_p2pkh.is_p2pkh()); - assert_eq!(script_p2pkh.minimal_non_dust(), crate::Amount::from_sat(546)); + assert_eq!(script_p2pkh.minimal_non_dust(), Some(Amount::from_sat_u32(546))); assert_eq!( script_p2pkh.minimal_non_dust_custom(FeeRate::from_sat_per_vb_unchecked(6)), - crate::Amount::from_sat(1092) + Some(Amount::from_sat_u32(1092)) ); } #[test] -fn test_script_get_sigop_count() { +fn script_get_sigop_count() { assert_eq!( Builder::new() .push_opcode(OP_DUP) @@ -786,7 +783,7 @@ fn test_script_get_sigop_count() { #[test] #[cfg(feature = "serde")] -fn test_script_serde_human_and_not() { +fn script_serde_human_and_not() { let script = ScriptBuf::from(vec![0u8, 1u8, 2u8]); // Serialize @@ -801,7 +798,7 @@ fn test_script_serde_human_and_not() { } #[test] -fn test_instructions_are_fused() { +fn instructions_are_fused() { let script = ScriptBuf::new(); let mut instructions = script.instructions(); assert!(instructions.next().is_none()); diff --git a/bitcoin/src/blockdata/script/witness_program.rs b/bitcoin/src/blockdata/script/witness_program.rs index 3b0cd4cad4..e0fe7bc60e 100644 --- a/bitcoin/src/blockdata/script/witness_program.rs +++ b/bitcoin/src/blockdata/script/witness_program.rs @@ -7,6 +7,7 @@ //! //! [BIP141]: +use core::convert::Infallible; use core::fmt; use internals::array_vec::ArrayVec; @@ -24,6 +25,9 @@ pub const MIN_SIZE: usize = 2; /// The maximum byte size of a segregated witness program. pub const MAX_SIZE: usize = 40; +/// The P2A program which is given by 0x4e73. +pub(crate) const P2A_PROGRAM: [u8; 2] = [78, 115]; + /// The segregated witness program. /// /// The segregated witness program is technically only the program bytes _excluding_ the witness @@ -31,7 +35,7 @@ pub const MAX_SIZE: usize = 40; /// number, therefore we carry the version number around along with the program bytes. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct WitnessProgram { - /// The segwit version associated with this witness program. + /// The SegWit version associated with this witness program. version: WitnessVersion, /// The witness program (between 2 and 40 bytes). program: ArrayVec, @@ -47,7 +51,7 @@ impl WitnessProgram { return Err(InvalidLength(program_len)); } - // Specific segwit v0 check. These addresses can never spend funds sent to them. + // Specific SegWit v0 check. These addresses can never spend funds sent to them. if version == WitnessVersion::V0 && (program_len != 20 && program_len != 32) { return Err(InvalidSegwitV0Length(program_len)); } @@ -104,6 +108,11 @@ impl WitnessProgram { WitnessProgram::new_p2tr(pubkey) } + /// Constructs a new pay to anchor address + pub const fn p2a() -> Self { + WitnessProgram { version: WitnessVersion::V1, program: ArrayVec::from_slice(&P2A_PROGRAM) } + } + /// Returns the witness program version. pub fn version(&self) -> WitnessVersion { self.version } @@ -127,6 +136,11 @@ impl WitnessProgram { /// Returns true if this witness program is for a P2TR output. pub fn is_p2tr(&self) -> bool { self.version == WitnessVersion::V1 && self.program.len() == 32 } + + /// Returns true if this is a pay to anchor output. + pub fn is_p2a(&self) -> bool { + self.version == WitnessVersion::V1 && self.program == P2A_PROGRAM + } } /// Witness program error. @@ -139,7 +153,9 @@ pub enum Error { InvalidSegwitV0Length(usize), } -internals::impl_from_infallible!(Error); +impl From for Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { diff --git a/bitcoin/src/blockdata/script/witness_version.rs b/bitcoin/src/blockdata/script/witness_version.rs index e9a1126d33..3c8f408dbc 100644 --- a/bitcoin/src/blockdata/script/witness_version.rs +++ b/bitcoin/src/blockdata/script/witness_version.rs @@ -7,6 +7,7 @@ //! //! [BIP141]: +use core::convert::Infallible; use core::fmt; use core::str::FromStr; @@ -81,7 +82,7 @@ impl FromStr for WitnessVersion { type Err = FromStrError; fn from_str(s: &str) -> Result { - let version: u8 = parse::int(s)?; + let version: u8 = parse::int_from_str(s)?; Ok(WitnessVersion::try_from(version)?) } } @@ -159,7 +160,9 @@ pub enum FromStrError { Invalid(TryFromError), } -internals::impl_from_infallible!(FromStrError); +impl From for FromStrError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for FromStrError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -202,7 +205,9 @@ pub enum TryFromInstructionError { DataPush, } -internals::impl_from_infallible!(TryFromInstructionError); +impl From for TryFromInstructionError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for TryFromInstructionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { diff --git a/bitcoin/src/blockdata/transaction.rs b/bitcoin/src/blockdata/transaction.rs index 0dd69f1c7c..0c60d3c654 100644 --- a/bitcoin/src/blockdata/transaction.rs +++ b/bitcoin/src/blockdata/transaction.rs @@ -115,8 +115,8 @@ crate::internal_macros::define_extension_trait! { Weight::from_non_witness_data_size(self.base_size().to_u64()) } - /// The weight of the TxIn when it's included in a segwit transaction (i.e., a transaction - /// having at least one segwit input). + /// The weight of the TxIn when it's included in a SegWit transaction (i.e., a transaction + /// having at least one SegWit input). /// /// This always takes into account the witness, even when empty, in which /// case 1WU for the witness length varint (`00`) is included. @@ -125,7 +125,7 @@ crate::internal_macros::define_extension_trait! { /// might increase more than `TxIn::segwit_weight`. This happens when: /// - the new input added causes the input length `VarInt` to increase its encoding length /// - the new input is the first segwit input added - this will add an additional 2WU to the - /// transaction weight to take into account the segwit marker + /// transaction weight to take into account the SegWit marker fn segwit_weight(&self) -> Weight { Weight::from_non_witness_data_size(self.base_size().to_u64()) + Weight::from_witness_data_size(self.witness.size().to_u64()) @@ -162,7 +162,7 @@ crate::internal_macros::define_extension_trait! { /// # Panics /// /// If output size * 4 overflows, this should never happen under normal conditions. Use - /// `Weght::from_vb_checked(self.size().to_u64())` if you are concerned. + /// `Weght::from_vb_checked(self.size() as u64)` if you are concerned. fn weight(&self) -> Weight { // Size is equivalent to virtual size since all bytes of a TxOut are non-witness bytes. Weight::from_vb(self.size().to_u64()) @@ -183,8 +183,8 @@ crate::internal_macros::define_extension_trait! { /// To use a custom value, use [`minimal_non_dust_custom`]. /// /// [`minimal_non_dust_custom`]: TxOut::minimal_non_dust_custom - fn minimal_non_dust(script_pubkey: ScriptBuf) -> Self { - TxOut { value: script_pubkey.minimal_non_dust(), script_pubkey } + fn minimal_non_dust(script_pubkey: ScriptBuf) -> Option { + Some(TxOut { value: script_pubkey.minimal_non_dust()?, script_pubkey }) } /// Constructs a new `TxOut` with given script and the smallest possible `value` that is **not** dust @@ -198,8 +198,8 @@ crate::internal_macros::define_extension_trait! { /// To use the default Bitcoin Core value, use [`minimal_non_dust`]. /// /// [`minimal_non_dust`]: TxOut::minimal_non_dust - fn minimal_non_dust_custom(script_pubkey: ScriptBuf, dust_relay_fee: FeeRate) -> Self { - TxOut { value: script_pubkey.minimal_non_dust_custom(dust_relay_fee), script_pubkey } + fn minimal_non_dust_custom(script_pubkey: ScriptBuf, dust_relay_fee: FeeRate) -> Option { + Some(TxOut { value: script_pubkey.minimal_non_dust_custom(dust_relay_fee)?, script_pubkey }) } } } @@ -213,23 +213,14 @@ fn size_from_script_pubkey(script_pubkey: &Script) -> usize { /// Extension functionality for the [`Transaction`] type. pub trait TransactionExt: sealed::Sealed { /// Computes a "normalized TXID" which does not include any signatures. - /// - /// This method is deprecated. `ntxid` has been renamed to `compute_ntxid` to note that it's - /// computationally expensive. Use `compute_ntxid` instead. #[deprecated(since = "0.31.0", note = "use `compute_ntxid()` instead")] fn ntxid(&self) -> sha256d::Hash; /// Computes the [`Txid`]. - /// - /// This method is deprecated. `txid` has been renamed to `compute_txid` to note that it's - /// computationally expensive. Use `compute_txid` instead. #[deprecated(since = "0.31.0", note = "use `compute_txid()` instead")] fn txid(&self) -> Txid; - /// Computes the segwit version of the transaction id. - /// - /// This method is deprecated. `wtxid` has been renamed to `compute_wtxid` to note that it's - /// computationally expensive. Use `compute_wtxid` instead. + /// Computes the SegWit version of the transaction id. #[deprecated(since = "0.31.0", note = "use `compute_wtxid()` instead")] fn wtxid(&self) -> Wtxid; @@ -243,13 +234,13 @@ pub trait TransactionExt: sealed::Sealed { /// multiplied by three plus the with-witness consensus-serialized size. /// /// For transactions with no inputs, this function will return a value 2 less than the actual - /// weight of the serialized transaction. The reason is that zero-input transactions, post-segwit, + /// weight of the serialized transaction. The reason is that zero-input transactions, post-SegWit, /// cannot be unambiguously serialized; we make a choice that adds two extra bytes. For more /// details see [BIP 141](https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki) - /// which uses a "input count" of `0x00` as a `marker` for a Segwit-encoded transaction. + /// which uses a "input count" of `0x00` as a `marker` for a SegWit-encoded transaction. /// /// If you need to use 0-input transactions, we strongly recommend you do so using the PSBT - /// API. The unsigned transaction encoded within PSBT is always a non-segwit transaction + /// API. The unsigned transaction encoded within PSBT is always a non-SegWit transaction /// and can therefore avoid this ambiguity. fn weight(&self) -> Weight; @@ -274,7 +265,7 @@ pub trait TransactionExt: sealed::Sealed { /// > Virtual transaction size is defined as Transaction weight / 4 (rounded up to the next integer). /// /// [`BIP141`]: https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki - /// [`policy`]: ../../policy/index.html + /// [`policy`]: crate::policy fn vsize(&self) -> usize; /// Checks if this is a coinbase transaction. @@ -329,7 +320,7 @@ pub trait TransactionExt: sealed::Sealed { /// The `spent` parameter is a closure/function that looks up the output being spent by each input /// It takes in an [`OutPoint`] and returns a [`TxOut`]. If you can't provide this, a placeholder of /// `|_| None` can be used. Without access to the previous [`TxOut`], any sigops in a redeemScript (P2SH) - /// as well as any segwit sigops will not be counted for that input. + /// as well as any SegWit sigops will not be counted for that input. fn total_sigop_cost(&self, spent: S) -> usize where S: FnMut(&OutPoint) -> Option; @@ -352,7 +343,7 @@ impl TransactionExt for Transaction { fn weight(&self) -> Weight { // This is the exact definition of a weight unit, as defined by BIP-141 (quote above). let wu = self.base_size() * 3 + self.total_size(); - Weight::from_wu_usize(wu) + Weight::from_wu(wu.to_u64()) } fn base_size(&self) -> usize { @@ -423,7 +414,7 @@ impl TransactionExt for Transaction { // coinbase tx is correctly handled because `spent` will always returns None. cost = cost.saturating_add(self.count_p2sh_sigops(&mut spent).saturating_mul(4)); - cost.saturating_add(self.count_witness_sigops(&mut spent)) + cost.saturating_add(self.count_witness_sigops(spent)) } #[inline] @@ -442,7 +433,7 @@ impl TransactionExt for Transaction { } /// Iterates over transaction outputs and for each output yields the length of the scriptPubkey. -// This exists to hardcode the type of the closure crated by `map`. +// This exists to hardcode the type of the closure created by `map`. pub struct TxOutToScriptPubkeyLengthIter<'a> { inner: core::slice::Iter<'a, TxOut>, } @@ -461,13 +452,13 @@ trait TransactionExtPriv { /// `count_p2sh_sigops` and `count_witness_sigops` respectively). fn count_p2pk_p2pkh_sigops(&self) -> usize; - /// Does not include wrapped segwit (see `count_witness_sigops`). - fn count_p2sh_sigops(&self, spent: &mut S) -> usize + /// Does not include wrapped SegWit (see `count_witness_sigops`). + fn count_p2sh_sigops(&self, spent: S) -> usize where S: FnMut(&OutPoint) -> Option; - /// Includes wrapped segwit (returns 0 for Taproot spends). - fn count_witness_sigops(&self, spent: &mut S) -> usize + /// Includes wrapped SegWit (returns 0 for Taproot spends). + fn count_witness_sigops(&self, spent: S) -> usize where S: FnMut(&OutPoint) -> Option; @@ -480,7 +471,7 @@ impl TransactionExtPriv for Transaction { fn count_p2pk_p2pkh_sigops(&self) -> usize { let mut count: usize = 0; for input in &self.input { - // 0 for p2wpkh, p2wsh, and p2sh (including wrapped segwit). + // 0 for p2wpkh, p2wsh, and p2sh (including wrapped SegWit). count = count.saturating_add(input.script_sig.count_sigops_legacy()); } for output in &self.output { @@ -489,8 +480,8 @@ impl TransactionExtPriv for Transaction { count } - /// Does not include wrapped segwit (see `count_witness_sigops`). - fn count_p2sh_sigops(&self, spent: &mut S) -> usize + /// Does not include wrapped SegWit (see `count_witness_sigops`). + fn count_p2sh_sigops(&self, mut spent: S) -> usize where S: FnMut(&OutPoint) -> Option, { @@ -514,8 +505,8 @@ impl TransactionExtPriv for Transaction { count } - /// Includes wrapped segwit (returns 0 for Taproot spends). - fn count_witness_sigops(&self, spent: &mut S) -> usize + /// Includes wrapped SegWit (returns 0 for Taproot spends). + fn count_witness_sigops(&self, mut spent: S) -> usize where S: FnMut(&OutPoint) -> Option, { @@ -636,26 +627,15 @@ impl std::error::Error for IndexOutOfBoundsError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { None } } -crate::internal_macros::define_extension_trait! { - /// Extension functionality for the [`Version`] type. - pub trait VersionExt impl for Version { - /// Constructs a new non-standard transaction version. - fn non_standard(version: i32) -> Version { Self(version) } - - /// Returns true if this transaction version number is considered standard. - fn is_standard(&self) -> bool { *self == Version::ONE || *self == Version::TWO || *self == Version::THREE } - } -} - impl Encodable for Version { fn consensus_encode(&self, w: &mut W) -> Result { - self.0.consensus_encode(w) + self.to_u32().consensus_encode(w) } } impl Decodable for Version { fn consensus_decode(r: &mut R) -> Result { - Decodable::consensus_decode(r).map(Version) + Decodable::consensus_decode(r).map(Version::maybe_non_standard) } } @@ -721,7 +701,7 @@ impl Encodable for Transaction { len += self.input.consensus_encode(w)?; len += self.output.consensus_encode(w)?; } else { - // BIP-141 (segwit) transaction serialization also includes marker, flag, and witness data. + // BIP-141 (SegWit) transaction serialization also includes marker, flag, and witness data. len += SEGWIT_MARKER.consensus_encode(w)?; len += SEGWIT_FLAG.consensus_encode(w)?; len += self.input.consensus_encode(w)?; @@ -741,7 +721,7 @@ impl Decodable for Transaction { ) -> Result { let version = Version::consensus_decode_from_finite_reader(r)?; let input = Vec::::consensus_decode_from_finite_reader(r)?; - // segwit + // SegWit if input.is_empty() { let segwit_flag = u8::consensus_decode_from_finite_reader(r)?; match segwit_flag { @@ -768,7 +748,7 @@ impl Decodable for Transaction { // We don't support anything else x => Err(encode::ParseError::UnsupportedSegwitFlag(x).into()), } - // non-segwit + // non-SegWit } else { Ok(Transaction { version, @@ -782,11 +762,11 @@ impl Decodable for Transaction { /// Computes the value of an output accounting for the cost of spending it. /// -/// The effective value is the value of an output value minus the amount to spend it. That is, the +/// The effective value is the value of an output value minus the amount to spend it. That is, the /// effective_value can be calculated as: value - (fee_rate * weight). /// /// Note: the effective value of a [`Transaction`] may increase less than the effective value of -/// a [`TxOut`] when adding another [`TxOut`] to the transaction. This happens when the new +/// a [`TxOut`] when adding another [`TxOut`] to the transaction. This happens when the new /// [`TxOut`] added causes the output length `VarInt` to increase its encoding length. /// /// # Parameters @@ -799,8 +779,8 @@ pub fn effective_value( value: Amount, ) -> Option { let weight = satisfaction_weight.checked_add(TX_IN_BASE_WEIGHT)?; - let signed_input_fee = fee_rate.checked_mul_by_weight(weight)?.to_signed().ok()?; - value.to_signed().ok()?.checked_sub(signed_input_fee) + let signed_input_fee = fee_rate.to_fee(weight)?.to_signed(); + value.to_signed().checked_sub(signed_input_fee) } /// Predicts the weight of a to-be-constructed transaction. @@ -851,38 +831,23 @@ where I: IntoIterator, O: IntoIterator, { - // This fold() does three things: - // 1) Counts the inputs and returns the sum as `input_count`. - // 2) Sums all of the input weights and returns the sum as `partial_input_weight` - // For every input: script_size * 4 + witness_size - // Since script_size is non-witness data, it gets a 4x multiplier. - // 3) Counts the number of inputs that have a witness data and returns the count as - // `num_inputs_with_witnesses`. - let (input_count, partial_input_weight, inputs_with_witnesses) = inputs.into_iter().fold( - (0, 0, 0), - |(count, partial_input_weight, inputs_with_witnesses), prediction| { + let (input_count, input_weight, inputs_with_witnesses) = + inputs.into_iter().fold((0, 0, 0), |(count, weight, with_witnesses), prediction| { ( count + 1, - partial_input_weight + prediction.weight().to_wu() as usize, - inputs_with_witnesses + (prediction.witness_size > 0) as usize, + weight + prediction.total_weight().to_wu() as usize, + with_witnesses + (prediction.witness_size > 0) as usize, ) - }, - ); - - // This fold() does two things: - // 1) Counts the outputs and returns the sum as `output_count`. - // 2) Sums the output script sizes and returns the sum as `output_scripts_size`. - // script_len + the length of a VarInt struct that stores the value of script_len - let (output_count, output_scripts_size) = output_script_lens.into_iter().fold( - (0, 0), - |(output_count, total_scripts_size), script_len| { - let script_size = script_len + compact_size::encoded_size(script_len); - (output_count + 1, total_scripts_size + script_size) - }, - ); + }); + + let (output_count, output_scripts_size) = + output_script_lens.into_iter().fold((0, 0), |(count, scripts_size), script_len| { + (count + 1, scripts_size + script_len + compact_size::encoded_size(script_len)) + }); + predict_weight_internal( input_count, - partial_input_weight, + input_weight, inputs_with_witnesses, output_count, output_scripts_size, @@ -891,26 +856,18 @@ where const fn predict_weight_internal( input_count: usize, - partial_input_weight: usize, + input_weight: usize, inputs_with_witnesses: usize, output_count: usize, output_scripts_size: usize, ) -> Weight { - // Lengths of txid, index and sequence: (32, 4, 4). - // Multiply the lengths by 4 since the fields are all non-witness fields. - let input_weight = partial_input_weight + input_count * 4 * (32 + 4 + 4); - // The value field of a TxOut is 8 bytes. let output_size = 8 * output_count + output_scripts_size; - let non_input_size = - // version: - 4 + - // count varints: - compact_size::encoded_size_const(input_count as u64) + - compact_size::encoded_size_const(output_count as u64) + - output_size + - // lock_time - 4; + let non_input_size = 4 // version + + compact_size::encoded_size_const(input_count as u64) // Can't use ToU64 in const context. + + compact_size::encoded_size_const(output_count as u64) + + output_size + + 4; // locktime let weight = if inputs_with_witnesses == 0 { non_input_size * 4 + input_weight } else { @@ -930,14 +887,14 @@ pub const fn predict_weight_from_slices( inputs: &[InputWeightPrediction], output_script_lens: &[usize], ) -> Weight { - let mut partial_input_weight = 0; + let mut input_weight = 0; let mut inputs_with_witnesses = 0; // for loops not supported in const fn let mut i = 0; while i < inputs.len() { let prediction = inputs[i]; - partial_input_weight += prediction.weight().to_wu() as usize; + input_weight += prediction.total_weight().to_wu() as usize; inputs_with_witnesses += (prediction.witness_size > 0) as usize; i += 1; } @@ -953,7 +910,7 @@ pub const fn predict_weight_from_slices( predict_weight_internal( inputs.len(), - partial_input_weight, + input_weight, inputs_with_witnesses, output_script_lens.len(), output_scripts_size, @@ -1042,7 +999,7 @@ impl InputWeightPrediction { /// /// # Panics /// - /// The funcion panics in const context and debug builds if `bytes_to_grind` is higher than 62. + /// The function panics in const context and debug builds if `bytes_to_grind` is higher than 62. /// /// [signature grinding]: https://bitcoin.stackexchange.com/questions/111660/what-is-signature-grinding pub const fn ground_p2wpkh(bytes_to_grind: usize) -> Self { @@ -1062,7 +1019,7 @@ impl InputWeightPrediction { /// /// # Panics /// - /// The funcion panics in const context and debug builds if `bytes_to_grind` is higher than 62. + /// The function panics in const context and debug builds if `bytes_to_grind` is higher than 62. /// /// [nested P2WPKH]: https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki#p2wpkh-nested-in-bip16-p2sh /// [signature grinding]: https://bitcoin.stackexchange.com/questions/111660/what-is-signature-grinding @@ -1083,7 +1040,7 @@ impl InputWeightPrediction { /// /// # Panics /// - /// The funcion panics in const context and debug builds if `bytes_to_grind` is higher than 62. + /// The function panics in const context and debug builds if `bytes_to_grind` is higher than 62. /// /// [signature grinding]: https://bitcoin.stackexchange.com/questions/111660/what-is-signature-grinding pub const fn ground_p2pkh_compressed(bytes_to_grind: usize) -> Self { @@ -1142,8 +1099,28 @@ impl InputWeightPrediction { /// Computes the **signature weight** added to a transaction by an input with this weight prediction, /// not counting the prevout (txid, index), sequence, potential witness flag bytes or the witness count varint. - pub const fn weight(&self) -> Weight { - Weight::from_wu_usize(self.script_size * 4 + self.witness_size) + #[deprecated(since = "TBD", note = "use `InputWeightPrediction::witness_weight()` instead")] + pub const fn weight(&self) -> Weight { Self::witness_weight(self) } + + /// Computes the signature, prevout (txid, index), and sequence weights of this weight + /// prediction. + /// + /// See also [`InputWeightPrediction::witness_weight`] + pub const fn total_weight(&self) -> Weight { + // `impl const Trait` is currently unavailable: rust/issues/67792 + // Convert to u64s because we can't use `Add` in const context. + let weight = TX_IN_BASE_WEIGHT.to_wu() + Self::witness_weight(self).to_wu(); + Weight::from_wu(weight) + } + + /// Computes the **signature weight** added to a transaction by an input with this weight prediction, + /// not counting the prevout (txid, index), sequence, potential witness flag bytes or the witness count varint. + /// + /// See also [`InputWeightPrediction::total_weight`] + pub const fn witness_weight(&self) -> Weight { + let wu = self.script_size * 4 + self.witness_size; + let wu = wu as u64; // Can't use `ToU64` in const context. + Weight::from_wu(wu) } } @@ -1226,7 +1203,7 @@ mod tests { assert_eq!( "5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456:lol" .parse::(), - Err(ParseOutPointError::Vout(parse::int::("lol").unwrap_err())) + Err(ParseOutPointError::Vout(parse::int_from_str::("lol").unwrap_err())) ); assert_eq!( @@ -1381,17 +1358,11 @@ mod tests { #[test] fn transaction_version() { - let tx_bytes = hex!("ffffff7f0100000000000000000000000000000000000000000000000000000000000000000000000000ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000"); + let tx_bytes = hex!("ffffffff0100000000000000000000000000000000000000000000000000000000000000000000000000ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000"); let tx: Result = deserialize(&tx_bytes); assert!(tx.is_ok()); let realtx = tx.unwrap(); - assert_eq!(realtx.version, Version::non_standard(2147483647)); - - let tx2_bytes = hex!("000000800100000000000000000000000000000000000000000000000000000000000000000000000000ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000"); - let tx2: Result = deserialize(&tx2_bytes); - assert!(tx2.is_ok()); - let realtx2 = tx2.unwrap(); - assert_eq!(realtx2.version, Version::non_standard(-2147483648)); + assert_eq!(realtx.version, Version::maybe_non_standard(u32::MAX)); } #[test] @@ -1428,7 +1399,7 @@ mod tests { #[test] fn txid() { - // segwit tx from Liquid integration tests, txid/hash from Core decoderawtransaction + // SegWit tx from Liquid integration tests, txid/hash from Core decoderawtransaction let tx_bytes = hex!( "01000000000102ff34f95a672bb6a4f6ff4a7e90fa8c7b3be7e70ffc39bc99be3bda67942e836c00000000\ 23220020cde476664d3fa347b8d54ef3aee33dcb686a65ced2b5207cbf4ec5eda6b9b46e4f414d4c934ad8\ @@ -1472,7 +1443,7 @@ mod tests { assert_eq!(format!("{:.10x}", tx.compute_txid()), "9652aa62b0"); assert_eq!(tx.weight(), Weight::from_wu(2718)); - // non-segwit tx from my mempool + // non-SegWit tx from my mempool let tx_bytes = hex!( "01000000010c7196428403d8b0c88fcb3ee8d64f56f55c8973c9ab7dd106bb4f3527f5888d000000006a47\ 30440220503a696f55f2c00eee2ac5e65b17767cd88ed04866b5637d3c1d5d996a70656d02202c9aff698f\ @@ -1563,7 +1534,7 @@ mod tests { use crate::consensus_validation::{TransactionExt as _, TxVerifyError}; use crate::witness::Witness; - // a random recent segwit transaction from blockchain using both old and segwit inputs + // a random recent SegWit transaction from blockchain using both old and SegWit inputs let mut spending: Transaction = deserialize(hex!("020000000001031cfbc8f54fbfa4a33a30068841371f80dbfe166211242213188428f437445c91000000006a47304402206fbcec8d2d2e740d824d3d36cc345b37d9f65d665a99f5bd5c9e8d42270a03a8022013959632492332200c2908459547bf8dbf97c65ab1a28dec377d6f1d41d3d63e012103d7279dfb90ce17fe139ba60a7c41ddf605b25e1c07a4ddcb9dfef4e7d6710f48feffffff476222484f5e35b3f0e43f65fc76e21d8be7818dd6a989c160b1e5039b7835fc00000000171600140914414d3c94af70ac7e25407b0689e0baa10c77feffffffa83d954a62568bbc99cc644c62eb7383d7c2a2563041a0aeb891a6a4055895570000000017160014795d04cc2d4f31480d9a3710993fbd80d04301dffeffffff06fef72f000000000017a91476fd7035cd26f1a32a5ab979e056713aac25796887a5000f00000000001976a914b8332d502a529571c6af4be66399cd33379071c588ac3fda0500000000001976a914fc1d692f8de10ae33295f090bea5fe49527d975c88ac522e1b00000000001976a914808406b54d1044c429ac54c0e189b0d8061667e088ac6eb68501000000001976a914dfab6085f3a8fb3e6710206a5a959313c5618f4d88acbba20000000000001976a914eb3026552d7e3f3073457d0bee5d4757de48160d88ac0002483045022100bee24b63212939d33d513e767bc79300051f7a0d433c3fcf1e0e3bf03b9eb1d70220588dc45a9ce3a939103b4459ce47500b64e23ab118dfc03c9caa7d6bfc32b9c601210354fd80328da0f9ae6eef2b3a81f74f9a6f66761fadf96f1d1d22b1fd6845876402483045022100e29c7e3a5efc10da6269e5fc20b6a1cb8beb92130cc52c67e46ef40aaa5cac5f0220644dd1b049727d991aece98a105563416e10a5ac4221abac7d16931842d5c322012103960b87412d6e169f30e12106bdf70122aabb9eb61f455518322a18b920a4dfa887d30700") .as_slice()).unwrap(); let spent1: Transaction = deserialize(hex!("020000000001040aacd2c49f5f3c0968cfa8caf9d5761436d95385252e3abb4de8f5dcf8a582f20000000017160014bcadb2baea98af0d9a902e53a7e9adff43b191e9feffffff96cd3c93cac3db114aafe753122bd7d1afa5aa4155ae04b3256344ecca69d72001000000171600141d9984579ceb5c67ebfbfb47124f056662fe7adbfeffffffc878dd74d3a44072eae6178bb94b9253177db1a5aaa6d068eb0e4db7631762e20000000017160014df2a48cdc53dae1aba7aa71cb1f9de089d75aac3feffffffe49f99275bc8363f5f593f4eec371c51f62c34ff11cc6d8d778787d340d6896c0100000017160014229b3b297a0587e03375ab4174ef56eeb0968735feffffff03360d0f00000000001976a9149f44b06f6ee92ddbc4686f71afe528c09727a5c788ac24281b00000000001976a9140277b4f68ff20307a2a9f9b4487a38b501eb955888ac227c0000000000001976a9148020cd422f55eef8747a9d418f5441030f7c9c7788ac0247304402204aa3bd9682f9a8e101505f6358aacd1749ecf53a62b8370b97d59243b3d6984f02200384ad449870b0e6e89c92505880411285ecd41cf11e7439b973f13bad97e53901210205b392ffcb83124b1c7ce6dd594688198ef600d34500a7f3552d67947bbe392802473044022033dfd8d190a4ae36b9f60999b217c775b96eb10dee3a1ff50fb6a75325719106022005872e4e36d194e49ced2ebcf8bb9d843d842e7b7e0eb042f4028396088d292f012103c9d7cbf369410b090480de2aa15c6c73d91b9ffa7d88b90724614b70be41e98e0247304402207d952de9e59e4684efed069797e3e2d993e9f98ec8a9ccd599de43005fe3f713022076d190cc93d9513fc061b1ba565afac574e02027c9efbfa1d7b71ab8dbb21e0501210313ad44bc030cc6cb111798c2bf3d2139418d751c1e79ec4e837ce360cc03b97a024730440220029e75edb5e9413eb98d684d62a077b17fa5b7cc19349c1e8cc6c4733b7b7452022048d4b9cae594f03741029ff841e35996ef233701c1ea9aa55c301362ea2e2f68012103590657108a72feb8dc1dec022cf6a230bb23dc7aaa52f4032384853b9f8388baf9d20700") @@ -1685,7 +1656,7 @@ mod tests { // 10 sat/kwu * (204wu + BASE_WEIGHT) = 4 sats let expected_fee = "4 sats".parse::().unwrap(); - let expected_effective_value = value.to_signed().unwrap() - expected_fee; + let expected_effective_value = (value.to_signed() - expected_fee).unwrap(); assert_eq!(effective_value, expected_effective_value); } @@ -1701,25 +1672,19 @@ mod tests { assert!(eff_value.is_none()); } - #[test] - fn effective_value_value_does_not_overflow() { - let eff_value = effective_value(FeeRate::ZERO, Weight::ZERO, Amount::MAX); - assert!(eff_value.is_none()); - } - #[test] fn txin_txout_weight() { // [(is_segwit, tx_hex, expected_weight)] let txs = [ - // one segwit input (P2WPKH) + // one SegWit input (P2WPKH) (true, "020000000001018a763b78d3e17acea0625bf9e52b0dc1beb2241b2502185348ba8ff4a253176e0100000000ffffffff0280d725000000000017a914c07ed639bd46bf7087f2ae1dfde63b815a5f8b488767fda20300000000160014869ec8520fa2801c8a01bfdd2e82b19833cd0daf02473044022016243edad96b18c78b545325aaff80131689f681079fb107a67018cb7fb7830e02205520dae761d89728f73f1a7182157f6b5aecf653525855adb7ccb998c8e6143b012103b9489bde92afbcfa85129a82ffa512897105d1a27ad9806bded27e0532fc84e700000000", Weight::from_wu(565)), - // one segwit input (P2WSH) + // one SegWit input (P2WSH) (true, "01000000000101a3ccad197118a2d4975fadc47b90eacfdeaf8268adfdf10ed3b4c3b7e1ad14530300000000ffffffff0200cc5501000000001976a91428ec6f21f4727bff84bb844e9697366feeb69f4d88aca2a5100d00000000220020701a8d401c84fb13e6baf169d59684e17abd9fa216c8cc5b9fc63d622ff8c58d04004730440220548f11130353b3a8f943d2f14260345fc7c20bde91704c9f1cbb5456355078cd0220383ed4ed39b079b618bcb279bbc1f2ca18cb028c4641cb522c9c5868c52a0dc20147304402203c332ecccb3181ca82c0600520ee51fee80d3b4a6ab110945e59475ec71e44ac0220679a11f3ca9993b04ccebda3c834876f353b065bb08f50076b25f5bb93c72ae1016952210375e00eb72e29da82b89367947f29ef34afb75e8654f6ea368e0acdfd92976b7c2103a1b26313f430c4b15bb1fdce663207659d8cac749a0e53d70eff01874496feff2103c96d495bfdd5ba4145e3e046fee45e84a8a48ad05bd8dbb395c011a32cf9f88053ae00000000", Weight::from_wu(766)), - // one segwit input (P2WPKH) and two legacy inputs (P2PKH) + // one SegWit input (P2WPKH) and two legacy inputs (P2PKH) (true, "010000000001036b6b6ac7e34e97c53c1cc74c99c7948af2e6aac75d8778004ae458d813456764000000006a473044022001deec7d9075109306320b3754188f81a8236d0d232b44bc69f8309115638b8f02204e17a5194a519cf994d0afeea1268740bdc10616b031a521113681cc415e815c012103488d3272a9fad78ee887f0684cb8ebcfc06d0945e1401d002e590c7338b163feffffffffc75bd7aa6424aee972789ec28ba181254ee6d8311b058d165bd045154d7660b0000000006b483045022100c8641bcbee3e4c47a00417875015d8c5d5ea918fb7e96f18c6ffe51bc555b401022074e2c46f5b1109cd79e39a9aa203eadd1d75356415e51d80928a5fb5feb0efee0121033504b4c6dfc3a5daaf7c425aead4c2dbbe4e7387ce8e6be2648805939ecf7054ffffffff494df3b205cd9430a26f8e8c0dc0bb80496fbc555a524d6ea307724bc7e60eee0100000000ffffffff026d861500000000001976a9145c54ed1360072ebaf56e87693b88482d2c6a101588ace407000000000000160014761e31e2629c6e11936f2f9888179d60a5d4c1f900000247304402201fa38a67a63e58b67b6cfffd02f59121ca1c8a1b22e1efe2573ae7e4b4f06c2b022002b9b431b58f6e36b3334fb14eaecee7d2f06967a77ef50d8d5f90dda1057f0c01210257dc6ce3b1100903306f518ee8fa113d778e403f118c080b50ce079fba40e09a00000000", Weight::from_wu(1755)), // three legacy inputs (P2PKH) (false, "0100000003e4d7be4314204a239d8e00691128dca7927e19a7339c7948bde56f669d27d797010000006b483045022100b988a858e2982e2daaf0755b37ad46775d6132057934877a5badc91dee2f66ff022020b967c1a2f0916007662ec609987e951baafa6d4fda23faaad70715611d6a2501210254a2dccd8c8832d4677dc6f0e562eaaa5d11feb9f1de2c50a33832e7c6190796ffffffff9e22eb1b3f24c260187d716a8a6c2a7efb5af14a30a4792a6eeac3643172379c000000006a47304402207df07f0cd30dca2cf7bed7686fa78d8a37fe9c2254dfdca2befed54e06b779790220684417b8ff9f0f6b480546a9e90ecee86a625b3ea1e4ca29b080da6bd6c5f67e01210254a2dccd8c8832d4677dc6f0e562eaaa5d11feb9f1de2c50a33832e7c6190796ffffffff1123df3bfb503b59769731da103d4371bc029f57979ebce68067768b958091a1000000006a47304402207a016023c2b0c4db9a7d4f9232fcec2193c2f119a69125ad5bcedcba56dd525e02206a734b3a321286c896759ac98ebfd9d808df47f1ce1fbfbe949891cc3134294701210254a2dccd8c8832d4677dc6f0e562eaaa5d11feb9f1de2c50a33832e7c6190796ffffffff0200c2eb0b000000001976a914e5eb3e05efad136b1405f5c2f9adb14e15a35bb488ac88cfff1b000000001976a9144846db516db3130b7a3c92253599edec6bc9630b88ac00000000", Weight::from_wu(2080)), - // one segwit input (P2TR) + // one SegWit input (P2TR) (true, "01000000000101b5cee87f1a60915c38bb0bc26aaf2b67be2b890bbc54bb4be1e40272e0d2fe0b0000000000ffffffff025529000000000000225120106daad8a5cb2e6fc74783714273bad554a148ca2d054e7a19250e9935366f3033760000000000002200205e6d83c44f57484fd2ef2a62b6d36cdcd6b3e06b661e33fd65588a28ad0dbe060141df9d1bfce71f90d68bf9e9461910b3716466bfe035c7dbabaa7791383af6c7ef405a3a1f481488a91d33cd90b098d13cb904323a3e215523aceaa04e1bb35cdb0100000000", Weight::from_wu(617)), // one legacy input (P2PKH) (false, "0100000001c336895d9fa674f8b1e294fd006b1ac8266939161600e04788c515089991b50a030000006a47304402204213769e823984b31dcb7104f2c99279e74249eacd4246dabcf2575f85b365aa02200c3ee89c84344ae326b637101a92448664a8d39a009c8ad5d147c752cbe112970121028b1b44b4903c9103c07d5a23e3c7cf7aeb0ba45ddbd2cfdce469ab197381f195fdffffff040000000000000000536a4c5058325bb7b7251cf9e36cac35d691bd37431eeea426d42cbdecca4db20794f9a4030e6cb5211fabf887642bcad98c9994430facb712da8ae5e12c9ae5ff314127d33665000bb26c0067000bb0bf00322a50c300000000000017a9145ca04fdc0a6d2f4e3f67cfeb97e438bb6287725f8750c30000000000001976a91423086a767de0143523e818d4273ddfe6d9e4bbcc88acc8465003000000001976a914c95cbacc416f757c65c942f9b6b8a20038b9b12988ac00000000", Weight::from_wu(1396)), @@ -1742,7 +1707,7 @@ mod tests { + tx.input.iter().fold(Weight::ZERO, |sum, i| sum + txin_weight(i)) + tx.output.iter().fold(Weight::ZERO, |sum, o| sum + o.weight()); - // The empty tx uses segwit serialization but a legacy tx does not. + // The empty tx uses SegWit serialization but a legacy tx does not. if !tx.uses_segwit_serialization() { calculated_weight -= Weight::from_wu(2); } @@ -1941,19 +1906,34 @@ mod tests { // Confirm signature grinding input weight predictions are aligned with constants. assert_eq!( - InputWeightPrediction::ground_p2wpkh(0).weight(), - InputWeightPrediction::P2WPKH_MAX.weight() + InputWeightPrediction::ground_p2wpkh(0).witness_weight(), + InputWeightPrediction::P2WPKH_MAX.witness_weight() ); assert_eq!( - InputWeightPrediction::ground_nested_p2wpkh(0).weight(), - InputWeightPrediction::NESTED_P2WPKH_MAX.weight() + InputWeightPrediction::ground_nested_p2wpkh(0).witness_weight(), + InputWeightPrediction::NESTED_P2WPKH_MAX.witness_weight() ); assert_eq!( - InputWeightPrediction::ground_p2pkh_compressed(0).weight(), - InputWeightPrediction::P2PKH_COMPRESSED_MAX.weight() + InputWeightPrediction::ground_p2pkh_compressed(0).witness_weight(), + InputWeightPrediction::P2PKH_COMPRESSED_MAX.witness_weight() ); } + #[test] + fn weight_prediction_const_from_slices() { + let predict = [ + InputWeightPrediction::P2WPKH_MAX, + InputWeightPrediction::NESTED_P2WPKH_MAX, + InputWeightPrediction::P2PKH_COMPRESSED_MAX, + InputWeightPrediction::P2PKH_UNCOMPRESSED_MAX, + InputWeightPrediction::P2TR_KEY_DEFAULT_SIGHASH, + InputWeightPrediction::P2TR_KEY_NON_DEFAULT_SIGHASH, + ]; + + let weight = predict_weight_from_slices(&predict, &[1]); + assert_eq!(weight, Weight::from_wu(2493)); + } + #[test] fn sequence_debug_output() { let seq = Sequence::from_seconds_floor(1000); diff --git a/bitcoin/src/blockdata/witness.rs b/bitcoin/src/blockdata/witness.rs index 318baaf508..f8e8abe5ba 100644 --- a/bitcoin/src/blockdata/witness.rs +++ b/bitcoin/src/blockdata/witness.rs @@ -13,7 +13,10 @@ use crate::crypto::ecdsa; use crate::prelude::Vec; #[cfg(doc)] use crate::script::ScriptExt as _; -use crate::taproot::{self, TAPROOT_ANNEX_PREFIX}; +use crate::taproot::{ + self, ControlBlock, LeafScript, LeafVersion, TAPROOT_ANNEX_PREFIX, TAPROOT_CONTROL_BASE_SIZE, + TAPROOT_LEAF_MASK, TaprootMerkleBranch, +}; use crate::Script; #[rustfmt::skip] // Keep public re-exports separate. @@ -132,6 +135,15 @@ crate::internal_macros::define_extension_trait! { witness } + /// Finishes constructing the P2TR script spend witness by pushing the required items. + fn push_p2tr_script_spend(&mut self, script: &Script, control_block: &ControlBlock>, annex: Option<&[u8]>) { + self.push(script.as_bytes()); + self.push(&*control_block.encode_to_arrayvec()); + if let Some(annex) = annex { + self.push(annex); + } + } + /// Pushes, as a new element on the witness, an ECDSA signature. /// /// Pushes the DER encoded signature + sighash_type, requires an allocation. @@ -147,14 +159,28 @@ crate::internal_macros::define_extension_trait! { /// /// See [`Script::is_p2tr`] to check whether this is actually a Taproot witness. fn tapscript(&self) -> Option<&Script> { - if self.is_empty() { - return None; + match P2TrSpend::from_witness(self) { + // Note: the method is named "tapscript" but historically it was actually returning + // leaf script. This is broken but we now keep the behavior the same to not subtly + // break someone. + Some(P2TrSpend::Script { leaf_script, .. }) => Some(leaf_script), + _ => None, } + } - if self.taproot_annex().is_some() { - self.third_to_last().map(Script::from_bytes) - } else { - self.second_to_last().map(Script::from_bytes) + /// Returns the leaf script with its version but without the merkle proof. + /// + /// This does not guarantee that this represents a P2TR [`Witness`]. It + /// merely gets the second to last or third to last element depending on + /// the first byte of the last element being equal to 0x50 and the associated + /// version. + fn taproot_leaf_script(&self) -> Option> { + match P2TrSpend::from_witness(self) { + Some(P2TrSpend::Script { leaf_script, control_block, .. }) if control_block.len() >= TAPROOT_CONTROL_BASE_SIZE => { + let version = LeafVersion::from_consensus(control_block[0] & TAPROOT_LEAF_MASK).ok()?; + Some(LeafScript { version, script: leaf_script, }) + }, + _ => None, } } @@ -166,14 +192,9 @@ crate::internal_macros::define_extension_trait! { /// /// See [`Script::is_p2tr`] to check whether this is actually a Taproot witness. fn taproot_control_block(&self) -> Option<&[u8]> { - if self.is_empty() { - return None; - } - - if self.taproot_annex().is_some() { - self.second_to_last() - } else { - self.last() + match P2TrSpend::from_witness(self) { + Some(P2TrSpend::Script { control_block, .. }) => Some(control_block), + _ => None, } } @@ -183,17 +204,7 @@ crate::internal_macros::define_extension_trait! { /// /// See [`Script::is_p2tr`] to check whether this is actually a Taproot witness. fn taproot_annex(&self) -> Option<&[u8]> { - self.last().and_then(|last| { - // From BIP341: - // If there are at least two witness elements, and the first byte of - // the last element is 0x50, this last element is called annex a - // and is removed from the witness stack. - if self.len() >= 2 && last.first() == Some(&TAPROOT_ANNEX_PREFIX) { - Some(last) - } else { - None - } - }) + P2TrSpend::from_witness(self)?.annex() } /// Get the p2wsh witness script following BIP141 rules. @@ -206,6 +217,90 @@ crate::internal_macros::define_extension_trait! { } } +/// Represents a possible Taproot spend. +/// +/// Taproot can be spent as key spend or script spend and, depending on which it is, different data +/// is in the witness. This type helps representing that data more cleanly when parsing the witness +/// because there are a lot of conditions that make reasoning hard. It's better to parse it at one +/// place and pass it along. +/// +/// This type is so far private but it could be published eventually. The design is geared towards +/// it but it's not fully finished. +enum P2TrSpend<'a> { + Key { + // This field is technically present in witness in case of key spend but none of our code + // uses it yet. Rather than deleting it, it's kept here commented as documentation and as + // an easy way to add it if anything needs it - by just uncommenting. + // signature: &'a [u8], + annex: Option<&'a [u8]>, + }, + Script { + leaf_script: &'a Script, + control_block: &'a [u8], + annex: Option<&'a [u8]>, + }, +} + +impl<'a> P2TrSpend<'a> { + /// Parses `Witness` to determine what kind of taproot spend this is. + /// + /// Note: this assumes `witness` is a taproot spend. The function cannot figure it out for sure + /// (without knowing the output), so it doesn't attempt to check anything other than what is + /// required for the program to not crash. + /// + /// In other words, if the caller is certain that the witness is a valid p2tr spend (e.g. + /// obtained from Bitcoin Core) then it's OK to unwrap this but not vice versa - `Some` does + /// not imply correctness. + fn from_witness(witness: &'a Witness) -> Option { + // BIP341 says: + // If there are at least two witness elements, and the first byte of + // the last element is 0x50, this last element is called annex a + // and is removed from the witness stack. + // + // However here we're not removing anything, so we have to adjust the numbers to account + // for the fact that annex is still there. + match witness.len() { + 0 => None, + 1 => Some(P2TrSpend::Key { + /* signature: witness.last().expect("len > 0") ,*/ annex: None, + }), + 2 if witness.last().expect("len > 0").starts_with(&[TAPROOT_ANNEX_PREFIX]) => { + let spend = P2TrSpend::Key { + // signature: witness.get_back(1).expect("len > 1"), + annex: witness.last(), + }; + Some(spend) + } + // 2 => this is script spend without annex - same as when there are 3+ elements and the + // last one does NOT start with TAPROOT_ANNEX_PREFIX. This is handled in the catchall + // arm. + 3.. if witness.last().expect("len > 0").starts_with(&[TAPROOT_ANNEX_PREFIX]) => { + let spend = P2TrSpend::Script { + leaf_script: Script::from_bytes(witness.get_back(2).expect("len > 2")), + control_block: witness.get_back(1).expect("len > 1"), + annex: witness.last(), + }; + Some(spend) + } + _ => { + let spend = P2TrSpend::Script { + leaf_script: Script::from_bytes(witness.get_back(1).expect("len > 1")), + control_block: witness.last().expect("len > 0"), + annex: None, + }; + Some(spend) + } + } + } + + fn annex(&self) -> Option<&'a [u8]> { + match self { + P2TrSpend::Key { annex, .. } => *annex, + P2TrSpend::Script { annex, .. } => *annex, + } + } +} + mod sealed { pub trait Sealed {} impl Sealed for super::Witness {} @@ -246,7 +341,7 @@ mod test { } #[test] - fn test_push_ecdsa_sig() { + fn push_ecdsa_sig() { // The very first signature in block 734,958 let sig_bytes = hex!("304402207c800d698f4b0298c5aac830b822f011bb02df41eb114ade9a6702f364d5e39c0220366900d2a60cab903e77ef7dd415d46509b1f78ac78906e3296f495aa1b1b541"); @@ -286,20 +381,14 @@ mod test { } #[test] - fn test_get_tapscript() { + fn get_tapscript() { let tapscript = hex!("deadbeef"); let control_block = hex!("02"); // annex starting with 0x50 causes the branching logic. let annex = hex!("50"); - let witness_vec = vec![tapscript.clone(), control_block.clone()]; - let witness_vec_annex = vec![tapscript.clone(), control_block, annex]; - - let witness_serialized: Vec = serialize(&witness_vec); - let witness_serialized_annex: Vec = serialize(&witness_vec_annex); - - let witness = deserialize::(&witness_serialized[..]).unwrap(); - let witness_annex = deserialize::(&witness_serialized_annex[..]).unwrap(); + let witness = Witness::from([&*tapscript, &control_block]); + let witness_annex = Witness::from([&*tapscript, &control_block, &annex]); // With or without annex, the tapscript should be returned. assert_eq!(witness.tapscript(), Some(Script::from_bytes(&tapscript[..]))); @@ -307,19 +396,32 @@ mod test { } #[test] - fn test_get_tapscript_from_keypath() { - let signature = hex!("deadbeef"); + fn get_taproot_leaf_script() { + let tapscript = hex!("deadbeef"); + let control_block = + hex!("c0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"); // annex starting with 0x50 causes the branching logic. let annex = hex!("50"); - let witness_vec = vec![signature.clone()]; - let witness_vec_annex = vec![signature.clone(), annex]; + let witness = Witness::from([&*tapscript, &control_block]); + let witness_annex = Witness::from([&*tapscript, &control_block, &annex]); + + let expected_leaf_script = + LeafScript { version: LeafVersion::TapScript, script: Script::from_bytes(&tapscript) }; + + // With or without annex, the tapscript should be returned. + assert_eq!(witness.taproot_leaf_script().unwrap(), expected_leaf_script); + assert_eq!(witness_annex.taproot_leaf_script().unwrap(), expected_leaf_script); + } - let witness_serialized: Vec = serialize(&witness_vec); - let witness_serialized_annex: Vec = serialize(&witness_vec_annex); + #[test] + fn get_tapscript_from_keypath() { + let signature = hex!("deadbeef"); + // annex starting with 0x50 causes the branching logic. + let annex = hex!("50"); - let witness = deserialize::(&witness_serialized[..]).unwrap(); - let witness_annex = deserialize::(&witness_serialized_annex[..]).unwrap(); + let witness = Witness::from([&*signature]); + let witness_annex = Witness::from([&*signature, &annex]); // With or without annex, no tapscript should be returned. assert_eq!(witness.tapscript(), None); @@ -327,41 +429,32 @@ mod test { } #[test] - fn test_get_control_block() { + fn get_control_block() { let tapscript = hex!("deadbeef"); let control_block = hex!("02"); // annex starting with 0x50 causes the branching logic. let annex = hex!("50"); + let signature = vec![0xff; 64]; - let witness_vec = vec![tapscript.clone(), control_block.clone()]; - let witness_vec_annex = vec![tapscript.clone(), control_block.clone(), annex]; - - let witness_serialized: Vec = serialize(&witness_vec); - let witness_serialized_annex: Vec = serialize(&witness_vec_annex); - - let witness = deserialize::(&witness_serialized[..]).unwrap(); - let witness_annex = deserialize::(&witness_serialized_annex[..]).unwrap(); + let witness = Witness::from([&*tapscript, &control_block]); + let witness_annex = Witness::from([&*tapscript, &control_block, &annex]); + let witness_key_spend_annex = Witness::from([&*signature, &annex]); // With or without annex, the tapscript should be returned. assert_eq!(witness.taproot_control_block(), Some(&control_block[..])); assert_eq!(witness_annex.taproot_control_block(), Some(&control_block[..])); + assert!(witness_key_spend_annex.taproot_control_block().is_none()) } #[test] - fn test_get_annex() { + fn get_annex() { let tapscript = hex!("deadbeef"); let control_block = hex!("02"); // annex starting with 0x50 causes the branching logic. let annex = hex!("50"); - let witness_vec = vec![tapscript.clone(), control_block.clone()]; - let witness_vec_annex = vec![tapscript.clone(), control_block.clone(), annex.clone()]; - - let witness_serialized: Vec = serialize(&witness_vec); - let witness_serialized_annex: Vec = serialize(&witness_vec_annex); - - let witness = deserialize::(&witness_serialized[..]).unwrap(); - let witness_annex = deserialize::(&witness_serialized_annex[..]).unwrap(); + let witness = Witness::from([&*tapscript, &control_block]); + let witness_annex = Witness::from([&*tapscript, &control_block, &annex]); // With or without annex, the tapscript should be returned. assert_eq!(witness.taproot_annex(), None); @@ -372,14 +465,8 @@ mod test { // annex starting with 0x50 causes the branching logic. let annex = hex!("50"); - let witness_vec = vec![signature.clone()]; - let witness_vec_annex = vec![signature.clone(), annex.clone()]; - - let witness_serialized: Vec = serialize(&witness_vec); - let witness_serialized_annex: Vec = serialize(&witness_vec_annex); - - let witness = deserialize::(&witness_serialized[..]).unwrap(); - let witness_annex = deserialize::(&witness_serialized_annex[..]).unwrap(); + let witness = Witness::from([&*signature]); + let witness_annex = Witness::from([&*signature, &annex]); // With or without annex, the tapscript should be returned. assert_eq!(witness.taproot_annex(), None); @@ -387,7 +474,7 @@ mod test { } #[test] - fn test_tx() { + fn tx() { const S: &str = "02000000000102b44f26b275b8ad7b81146ba3dbecd081f9c1ea0dc05b97516f56045cfcd3df030100000000ffffffff1cb4749ae827c0b75f3d0a31e63efc8c71b47b5e3634a4c698cd53661cab09170100000000ffffffff020b3a0500000000001976a9143ea74de92762212c96f4dd66c4d72a4deb20b75788ac630500000000000016001493a8dfd1f0b6a600ab01df52b138cda0b82bb7080248304502210084622878c94f4c356ce49c8e33a063ec90f6ee9c0208540888cfab056cd1fca9022014e8dbfdfa46d318c6887afd92dcfa54510e057565e091d64d2ee3a66488f82c0121026e181ffb98ebfe5a64c983073398ea4bcd1548e7b971b4c175346a25a1c12e950247304402203ef00489a0d549114977df2820fab02df75bebb374f5eee9e615107121658cfa02204751f2d1784f8e841bff6d3bcf2396af2f1a5537c0e4397224873fbd3bfbe9cf012102ae6aa498ce2dd204e9180e71b4fb1260fe3d1a95c8025b34e56a9adf5f278af200000000"; let tx_bytes = hex!(S); let tx: Transaction = deserialize(&tx_bytes).unwrap(); @@ -397,13 +484,10 @@ mod test { assert_eq!(expected_wit[i], wit_el.to_lower_hex_string()); } assert_eq!(expected_wit[1], tx.input[0].witness.last().unwrap().to_lower_hex_string()); - assert_eq!( - expected_wit[0], - tx.input[0].witness.second_to_last().unwrap().to_lower_hex_string() - ); - assert_eq!(expected_wit[0], tx.input[0].witness.nth(0).unwrap().to_lower_hex_string()); - assert_eq!(expected_wit[1], tx.input[0].witness.nth(1).unwrap().to_lower_hex_string()); - assert_eq!(None, tx.input[0].witness.nth(2)); + assert_eq!(expected_wit[0], tx.input[0].witness.get_back(1).unwrap().to_lower_hex_string()); + assert_eq!(expected_wit[0], tx.input[0].witness.get(0).unwrap().to_lower_hex_string()); + assert_eq!(expected_wit[1], tx.input[0].witness.get(1).unwrap().to_lower_hex_string()); + assert_eq!(None, tx.input[0].witness.get(2)); assert_eq!(expected_wit[0], tx.input[0].witness[0].to_lower_hex_string()); assert_eq!(expected_wit[1], tx.input[0].witness[1].to_lower_hex_string()); diff --git a/bitcoin/src/consensus/encode.rs b/bitcoin/src/consensus/encode.rs index e2946b16b6..c2a751a0fe 100644 --- a/bitcoin/src/consensus/encode.rs +++ b/bitcoin/src/consensus/encode.rs @@ -16,7 +16,7 @@ use core::mem; -use hashes::{sha256, sha256d, GeneralHash, Hash}; +use hashes::{sha256, sha256d, Hash}; use hex::DisplayHex as _; use internals::{compact_size, ToU64}; use io::{BufRead, Cursor, Read, Write}; @@ -286,7 +286,7 @@ pub trait Decodable: Sized { /// /// ### Rules for trait implementations /// - /// * Simple types that that have a fixed size (own and member fields), don't have to overwrite + /// * Simple types that have a fixed size (own and member fields), don't have to overwrite /// this method, or be concern with it. /// * Types that deserialize using externally provided length should implement it: /// * Make `consensus_decode` forward to `consensus_decode_bytes_from_finite_reader` with the @@ -629,7 +629,7 @@ impl Decodable for Box<[u8]> { /// Does a double-SHA256 on `data` and returns the first 4 bytes. fn sha2_checksum(data: &[u8]) -> [u8; 4] { - let checksum = ::hash(data); + let checksum = sha256d::hash(data); let checksum = checksum.to_byte_array(); [checksum[0], checksum[1], checksum[2], checksum[3]] } diff --git a/bitcoin/src/consensus/error.rs b/bitcoin/src/consensus/error.rs index 21fd89bbe2..be0bccc9de 100644 --- a/bitcoin/src/consensus/error.rs +++ b/bitcoin/src/consensus/error.rs @@ -2,6 +2,7 @@ //! Consensus encoding errors. +use core::convert::Infallible; use core::fmt; use hex::error::{InvalidCharError, OddLengthStringError}; @@ -12,7 +13,7 @@ use internals::write_err; use super::IterReader; /// Error deserializing from a slice. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum DeserializeError { /// Error parsing encoded object. @@ -21,7 +22,9 @@ pub enum DeserializeError { Unconsumed, } -internals::impl_from_infallible!(DeserializeError); +impl From for DeserializeError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for DeserializeError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -64,7 +67,9 @@ pub enum DecodeError { Other(E), // Yielded by the inner iterator. } -internals::impl_from_infallible!(DecodeError); +impl From for DecodeError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for DecodeError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -101,14 +106,16 @@ pub enum Error { Parse(ParseError), } -internals::impl_from_infallible!(Error); +impl From for Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use Error::*; match *self { - Io(ref e) => write_err!(f, "IO error"; e), + Io(ref e) => write_err!(f, "I/O error"; e), Parse(ref e) => write_err!(f, "error parsing encoded object"; e), } } @@ -142,7 +149,7 @@ impl From for Error { } /// Encoding is invalid. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub enum ParseError { /// Missing data (early end of file or slice too short). @@ -165,11 +172,13 @@ pub enum ParseError { NonMinimalVarInt, /// Parsing error. ParseFailed(&'static str), - /// Unsupported Segwit flag. + /// Unsupported SegWit flag. UnsupportedSegwitFlag(u8), } -internals::impl_from_infallible!(ParseError); +impl From for ParseError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -184,7 +193,7 @@ impl fmt::Display for ParseError { NonMinimalVarInt => write!(f, "non-minimal varint"), ParseFailed(ref s) => write!(f, "parse failed: {}", s), UnsupportedSegwitFlag(ref swflag) => - write!(f, "unsupported segwit version: {}", swflag), + write!(f, "unsupported SegWit version: {}", swflag), } } } diff --git a/bitcoin/src/consensus/mod.rs b/bitcoin/src/consensus/mod.rs index 708ff6347a..ecf4b6fdb0 100644 --- a/bitcoin/src/consensus/mod.rs +++ b/bitcoin/src/consensus/mod.rs @@ -44,8 +44,8 @@ impl>> IterReader { (Err(consensus::encode::Error::Io(io_error)), Some(de_error)) if io_error.kind() == io::ErrorKind::Other && io_error.get_ref().is_none() => Err(DecodeError::Other(de_error)), (Err(consensus::encode::Error::Parse(parse_error)), None) => Err(DecodeError::Parse(parse_error)), - (Err(consensus::encode::Error::Io(io_error)), de_error) => panic!("unexpected IO error {:?} returned from {}::consensus_decode(), deserialization error: {:?}", io_error, core::any::type_name::(), de_error), - (Err(consensus_error), Some(de_error)) => panic!("{} should've returned `Other` IO error because of deserialization error {:?} but it returned consensus error {:?} instead", core::any::type_name::(), de_error, consensus_error), + (Err(consensus::encode::Error::Io(io_error)), de_error) => panic!("unexpected I/O error {:?} returned from {}::consensus_decode(), deserialization error: {:?}", io_error, core::any::type_name::(), de_error), + (Err(consensus_error), Some(de_error)) => panic!("{} should've returned `Other` I/O error because of deserialization error {:?} but it returned consensus error {:?} instead", core::any::type_name::(), de_error, consensus_error), } } } diff --git a/bitcoin/src/consensus/serde.rs b/bitcoin/src/consensus/serde.rs index 8db3989b21..a182e8640f 100644 --- a/bitcoin/src/consensus/serde.rs +++ b/bitcoin/src/consensus/serde.rs @@ -443,12 +443,12 @@ impl With { match (result, writer.error) { (Ok(_), None) => writer.serializer.end(), (Ok(_), Some(error)) => - panic!("{} silently ate an IO error: {:?}", core::any::type_name::(), error), + panic!("{} silently ate an I/O error: {:?}", core::any::type_name::(), error), (Err(io_error), Some(ser_error)) if io_error.kind() == io::ErrorKind::Other && io_error.get_ref().is_none() => Err(ser_error), (Err(io_error), ser_error) => panic!( - "{} returned an unexpected IO error: {:?} serialization error: {:?}", + "{} returned an unexpected I/O error: {:?} serialization error: {:?}", core::any::type_name::(), io_error, ser_error diff --git a/bitcoin/src/consensus_validation.rs b/bitcoin/src/consensus_validation.rs index 4f814c904b..e80994f905 100644 --- a/bitcoin/src/consensus_validation.rs +++ b/bitcoin/src/consensus_validation.rs @@ -4,6 +4,7 @@ //! //! Relies on the `bitcoinconsensus` crate that uses Bitcoin Core libconsensus to perform validation. +use core::convert::Infallible; use core::fmt; use internals::write_err; @@ -133,7 +134,7 @@ define_extension_trait! { fn verify( &self, index: usize, - amount: crate::Amount, + amount: Amount, spending_tx: &[u8], ) -> Result<(), BitcoinconsensusError> { verify_script(self, index, amount, spending_tx) @@ -152,7 +153,7 @@ define_extension_trait! { fn verify_with_flags( &self, index: usize, - amount: crate::Amount, + amount: Amount, spending_tx: &[u8], flags: impl Into, ) -> Result<(), BitcoinconsensusError> { @@ -237,7 +238,9 @@ pub enum TxVerifyError { UnknownSpentOutput(OutPoint), } -internals::impl_from_infallible!(TxVerifyError); +impl From for TxVerifyError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for TxVerifyError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { diff --git a/bitcoin/src/crypto/ecdsa.rs b/bitcoin/src/crypto/ecdsa.rs index 2755f0b511..58c0b028b4 100644 --- a/bitcoin/src/crypto/ecdsa.rs +++ b/bitcoin/src/crypto/ecdsa.rs @@ -4,6 +4,7 @@ //! //! This module provides ECDSA signatures used by Bitcoin that can be roundtrip (de)serialized. +use core::convert::Infallible; use core::str::FromStr; use core::{fmt, iter}; @@ -213,7 +214,9 @@ pub enum DecodeError { Secp256k1(secp256k1::Error), } -internals::impl_from_infallible!(DecodeError); +impl From for DecodeError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for DecodeError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -258,7 +261,9 @@ pub enum ParseSignatureError { Decode(DecodeError), } -internals::impl_from_infallible!(ParseSignatureError); +impl From for ParseSignatureError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for ParseSignatureError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { diff --git a/bitcoin/src/crypto/key.rs b/bitcoin/src/crypto/key.rs index 9772f39d40..9db3079069 100644 --- a/bitcoin/src/crypto/key.rs +++ b/bitcoin/src/crypto/key.rs @@ -5,12 +5,14 @@ //! This module provides keys used in Bitcoin that can be roundtrip //! (de)serialized. +use core::convert::Infallible; use core::fmt::{self, Write as _}; use core::ops; use core::str::FromStr; use hashes::hash160; use hex::{FromHex, HexToArrayError}; +use internals::array::ArrayExt; use internals::array_vec::ArrayVec; use internals::{impl_to_hex_from_lower_hex, write_err}; use io::{Read, Write}; @@ -20,7 +22,7 @@ use crate::internal_macros::impl_asref_push_bytes; use crate::network::NetworkKind; use crate::prelude::{DisplayHex, String, Vec}; use crate::script::{self, ScriptBuf}; -use crate::taproot::{TapNodeHash, TapTweakHash, TapTweakHashExt as _}; +use crate::taproot::{TapNodeHash, TapTweakHash}; #[rustfmt::skip] // Keep public re-exports separate. pub use secp256k1::{constants, Keypair, Parity, Secp256k1, Verification, XOnlyPublicKey}; @@ -241,14 +243,14 @@ impl FromStr for PublicKey { match s.len() { 66 => { let bytes = <[u8; 33]>::from_hex(s).map_err(|e| match e { - InvalidChar(e) => ParsePublicKeyError::InvalidChar(e.invalid_char()), + InvalidChar(e) => ParsePublicKeyError::InvalidChar(e), InvalidLength(_) => unreachable!("length checked already"), })?; Ok(PublicKey::from_slice(&bytes)?) } 130 => { let bytes = <[u8; 65]>::from_hex(s).map_err(|e| match e { - InvalidChar(e) => ParsePublicKeyError::InvalidChar(e.invalid_char()), + InvalidChar(e) => ParsePublicKeyError::InvalidChar(e), InvalidLength(_) => unreachable!("length checked already"), })?; Ok(PublicKey::from_slice(&bytes)?) @@ -323,11 +325,11 @@ impl CompressedPublicKey { /// Serializes the public key. /// - /// As the type name suggests, the key is serialzied in compressed format. + /// As the type name suggests, the key is serialized in compressed format. /// /// Note that this can be used as a sort key to get BIP67-compliant sorting. /// That's why this type doesn't have the `to_sort_key` method - it would duplicate this one. - pub fn to_bytes(&self) -> [u8; 33] { self.0.serialize() } + pub fn to_bytes(self) -> [u8; 33] { self.0.serialize() } /// Deserializes a public key from a slice. pub fn from_slice(data: &[u8]) -> Result { @@ -458,12 +460,22 @@ impl PrivateKey { /// Serializes the private key to bytes. pub fn to_vec(self) -> Vec { self.inner[..].to_vec() } + /// Deserializes a private key from a byte array. + pub fn from_byte_array( + data: [u8; 32], + network: impl Into, + ) -> Result { + Ok(PrivateKey::new(secp256k1::SecretKey::from_byte_array(&data)?, network)) + } + /// Deserializes a private key from a slice. + #[deprecated(since = "TBD", note = "use from_byte_array instead")] pub fn from_slice( data: &[u8], network: impl Into, ) -> Result { - Ok(PrivateKey::new(secp256k1::SecretKey::from_slice(data)?, network)) + let array = data.try_into().map_err(|_| secp256k1::Error::InvalidSecretKey)?; + Self::from_byte_array(array, network) } /// Formats the private key to WIF format. @@ -494,15 +506,20 @@ impl PrivateKey { pub fn from_wif(wif: &str) -> Result { let data = base58::decode_check(wif)?; - let compressed = match data.len() { - 33 => false, - 34 => true, - length => { - return Err(InvalidBase58PayloadLengthError { length }.into()); + let (compressed, data) = if let Ok(data) = <&[u8; 33]>::try_from(&*data) { + (false, data) + } else if let Ok(data) = <&[u8; 34]>::try_from(&*data) { + let (compressed_flag, data) = data.split_last::<33>(); + if *compressed_flag != 1 { + return Err(InvalidWifCompressionFlagError { invalid: *compressed_flag }.into()); } + (true, data) + } else { + return Err(InvalidBase58PayloadLengthError { length: data.len() }.into()); }; - let network = match data[0] { + let (network, key) = data.split_first(); + let network = match *network { 128 => NetworkKind::Main, 239 => NetworkKind::Test, invalid => { @@ -513,9 +530,23 @@ impl PrivateKey { Ok(PrivateKey { compressed, network, - inner: secp256k1::SecretKey::from_slice(&data[1..33])?, + inner: secp256k1::SecretKey::from_byte_array(key)?, }) } + + /// Returns a new private key with the negated secret value. + /// + /// The resulting key corresponds to the same x-only public key (identical x-coordinate) + /// but with the opposite y-coordinate parity. This is useful for ensuring compatibility + /// with specific public key formats and BIP-340 requirements. + #[inline] + pub fn negate(&self) -> Self { + PrivateKey { + compressed: self.compressed, + network: self.network, + inner: self.inner.negate(), + } + } } impl fmt::Display for PrivateKey { @@ -918,7 +949,9 @@ pub enum FromSliceError { InvalidLength(usize), } -internals::impl_from_infallible!(FromSliceError); +impl From for FromSliceError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for FromSliceError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -960,9 +993,13 @@ pub enum FromWifError { InvalidAddressVersion(InvalidAddressVersionError), /// A secp256k1 error. Secp256k1(secp256k1::Error), + /// Invalid WIF compression flag. + InvalidWifCompressionFlag(InvalidWifCompressionFlagError), } -internals::impl_from_infallible!(FromWifError); +impl From for FromWifError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for FromWifError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -973,8 +1010,9 @@ impl fmt::Display for FromWifError { InvalidBase58PayloadLength(ref e) => write_err!(f, "decoded base58 data was an invalid length"; e), InvalidAddressVersion(ref e) => - write_err!(f, "decoded base58 data contained an invalid address version btye"; e), + write_err!(f, "decoded base58 data contained an invalid address version byte"; e), Secp256k1(ref e) => write_err!(f, "private key validation failed"; e), + InvalidWifCompressionFlag(ref e) => write_err!(f, "invalid WIF compression flag";e), } } } @@ -989,6 +1027,7 @@ impl std::error::Error for FromWifError { InvalidBase58PayloadLength(ref e) => Some(e), InvalidAddressVersion(ref e) => Some(e), Secp256k1(ref e) => Some(e), + InvalidWifCompressionFlag(ref e) => Some(e), } } } @@ -1011,25 +1050,33 @@ impl From for FromWifError { fn from(e: InvalidAddressVersionError) -> FromWifError { Self::InvalidAddressVersion(e) } } +impl From for FromWifError { + fn from(e: InvalidWifCompressionFlagError) -> FromWifError { + Self::InvalidWifCompressionFlag(e) + } +} + /// Error returned while constructing public key from string. #[derive(Debug, Clone, PartialEq, Eq)] pub enum ParsePublicKeyError { /// Error originated while parsing string. Encoding(FromSliceError), /// Hex decoding error. - InvalidChar(u8), + InvalidChar(hex::InvalidCharError), /// `PublicKey` hex should be 66 or 130 digits long. InvalidHexLength(usize), } -internals::impl_from_infallible!(ParsePublicKeyError); +impl From for ParsePublicKeyError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for ParsePublicKeyError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use ParsePublicKeyError::*; - match self { - Encoding(e) => write_err!(f, "string error"; e), - InvalidChar(char) => write!(f, "hex error {}", char), + match *self { + Encoding(ref e) => write_err!(f, "string error"; e), + InvalidChar(ref e) => write_err!(f, "hex decoding"; e), InvalidHexLength(got) => write!(f, "pubkey string should be 66 or 130 digits long, got: {}", got), } @@ -1041,9 +1088,10 @@ impl std::error::Error for ParsePublicKeyError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { use ParsePublicKeyError::*; - match self { - Encoding(e) => Some(e), - InvalidChar(_) | InvalidHexLength(_) => None, + match *self { + Encoding(ref e) => Some(e), + InvalidChar(ref e) => Some(e), + InvalidHexLength(_) => None, } } } @@ -1061,7 +1109,9 @@ pub enum ParseCompressedPublicKeyError { Hex(hex::HexToArrayError), } -internals::impl_from_infallible!(ParseCompressedPublicKeyError); +impl From for ParseCompressedPublicKeyError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for ParseCompressedPublicKeyError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -1093,14 +1143,14 @@ impl From for ParseCompressedPublicKeyError { fn from(e: hex::HexToArrayError) -> Self { Self::Hex(e) } } -/// Segwit public keys must always be compressed. +/// SegWit public keys must always be compressed. #[derive(Debug, Clone, PartialEq, Eq)] #[non_exhaustive] pub struct UncompressedPublicKeyError; impl fmt::Display for UncompressedPublicKeyError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("segwit public keys must always be compressed") + f.write_str("SegWit public keys must always be compressed") } } @@ -1151,13 +1201,43 @@ impl fmt::Display for InvalidAddressVersionError { #[cfg(feature = "std")] impl std::error::Error for InvalidAddressVersionError {} +/// Invalid compression flag for a WIF key +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct InvalidWifCompressionFlagError { + /// The invalid compression flag. + pub(crate) invalid: u8, +} + +impl InvalidWifCompressionFlagError { + /// Returns the invalid compression flag. + pub fn invalid_compression_flag(&self) -> u8 { self.invalid } +} + +impl fmt::Display for InvalidWifCompressionFlagError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "invalid WIF compression flag. Expected a 0x01 byte at the end of the key but found: {}", self.invalid) + } +} + +#[cfg(feature = "std")] +impl std::error::Error for InvalidWifCompressionFlagError {} + #[cfg(test)] mod tests { use super::*; use crate::address::Address; #[test] - fn test_key_derivation() { + fn key_derivation() { + // mainnet compressed WIF with invalid compression flag. + let sk = PrivateKey::from_wif("L2x4uC2YgfFWZm9tF4pjDnVR6nJkheizFhEr2KvDNnTEmEqVzPJY"); + assert!(matches!( + sk, + Err(FromWifError::InvalidWifCompressionFlag(InvalidWifCompressionFlagError { + invalid: 49 + })) + )); + // testnet compressed let sk = PrivateKey::from_wif("cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy").unwrap(); @@ -1204,7 +1284,7 @@ mod tests { } #[test] - fn test_pubkey_hash() { + fn pubkey_hash() { let pk = "032e58afe51f9ed8ad3cc7897f634d881fdbe49a81564629ded8156bebd2ffd1af" .parse::() .unwrap(); @@ -1215,7 +1295,7 @@ mod tests { } #[test] - fn test_wpubkey_hash() { + fn wpubkey_hash() { let pk = "032e58afe51f9ed8ad3cc7897f634d881fdbe49a81564629ded8156bebd2ffd1af" .parse::() .unwrap(); @@ -1229,7 +1309,7 @@ mod tests { #[test] #[cfg(feature = "serde")] - fn test_key_serde() { + fn skey_serde() { use serde_test::{assert_tokens, Configure, Token}; static KEY_WIF: &str = "cVt4o7BGAig1UXywgGSmARhxMdzP5qvQsxKkSsc1XEkw3tDTQFpy"; @@ -1520,12 +1600,31 @@ mod tests { assert_eq!(s.len(), 130); let res = s.parse::(); assert!(res.is_err()); - assert_eq!(res.unwrap_err(), ParsePublicKeyError::InvalidChar(103)); + if let Err(ParsePublicKeyError::InvalidChar(err)) = res { + assert_eq!(err.invalid_char(), b'g'); + assert_eq!(err.pos(), 129); + } else { + panic!("Expected Invalid char error"); + } let s = "032e58afe51f9ed8ad3cc7897f634d881fdbe49a81564629ded8156bebd2ffd1ag"; assert_eq!(s.len(), 66); let res = s.parse::(); assert!(res.is_err()); - assert_eq!(res.unwrap_err(), ParsePublicKeyError::InvalidChar(103)); + if let Err(ParsePublicKeyError::InvalidChar(err)) = res { + assert_eq!(err.invalid_char(), b'g'); + assert_eq!(err.pos(), 65); + } else { + panic!("Expected Invalid char error"); + } + } + + #[test] + #[allow(deprecated)] // tests the deprecated function + #[allow(deprecated_in_future)] + fn invalid_private_key_len() { + use crate::Network; + assert!(PrivateKey::from_slice(&[1u8; 31], Network::Regtest).is_err()); + assert!(PrivateKey::from_slice(&[1u8; 33], Network::Regtest).is_err()); } } diff --git a/bitcoin/src/crypto/sighash.rs b/bitcoin/src/crypto/sighash.rs index d5651de35f..8ef0047d4e 100644 --- a/bitcoin/src/crypto/sighash.rs +++ b/bitcoin/src/crypto/sighash.rs @@ -11,6 +11,7 @@ //! handle its complexity efficiently. Computing these hashes is as simple as creating //! [`SighashCache`] and calling its methods. +use core::convert::Infallible; use core::{fmt, str}; #[cfg(feature = "arbitrary")] @@ -21,11 +22,11 @@ use io::Write; use crate::address::script_pubkey::ScriptExt as _; use crate::consensus::{encode, Encodable}; -use crate::prelude::{Borrow, BorrowMut, String, ToOwned, Vec}; +use crate::prelude::{Borrow, BorrowMut, String, ToOwned}; use crate::taproot::{LeafVersion, TapLeafHash, TapLeafTag, TAPROOT_ANNEX_PREFIX}; use crate::transaction::TransactionExt as _; use crate::witness::Witness; -use crate::{transaction, Amount, Script, ScriptBuf, Sequence, Transaction, TxIn, TxOut}; +use crate::{transaction, Amount, Script, Sequence, Transaction, TxOut}; /// Used for signature hash for invalid use of SIGHASH_SINGLE. #[rustfmt::skip] @@ -51,7 +52,7 @@ hash_newtype! { #[hash_newtype(forward)] pub struct LegacySighash(sha256d::Hash); - /// Hash of a transaction according to the segwit version 0 signature algorithm. + /// Hash of a transaction according to the SegWit version 0 signature algorithm. #[hash_newtype(forward)] pub struct SegwitV0Sighash(sha256d::Hash); } @@ -92,7 +93,7 @@ hashes::impl_serde_for_newtype!(TapSighash); impl_message_from_hash!(TapSighash); -/// Efficiently calculates signature hash message for legacy, segwit and Taproot inputs. +/// Efficiently calculates signature hash message for legacy, SegWit and Taproot inputs. #[derive(Debug)] pub struct SighashCache> { /// Access to transaction required for transaction introspection. Moreover, type @@ -100,17 +101,17 @@ pub struct SighashCache> { /// the latter in particular is necessary for [`SighashCache::witness_mut`]. tx: T, - /// Common cache for Taproot and segwit inputs, `None` for legacy inputs. + /// Common cache for Taproot and SegWit inputs, `None` for legacy inputs. common_cache: Option, - /// Cache for segwit v0 inputs (the result of another round of sha256 on `common_cache`). + /// Cache for SegWit v0 inputs (the result of another round of sha256 on `common_cache`). segwit_cache: Option, /// Cache for Taproot v1 inputs. taproot_cache: Option, } -/// Common values cached between segwit and Taproot inputs. +/// Common values cached between SegWit and Taproot inputs. #[derive(Debug)] struct CommonCache { prevouts: sha256::Hash, @@ -121,7 +122,7 @@ struct CommonCache { outputs: sha256::Hash, } -/// Values cached for segwit inputs, equivalent to [`CommonCache`] plus another round of `sha256`. +/// Values cached for SegWit inputs, equivalent to [`CommonCache`] plus another round of `sha256`. #[derive(Debug)] struct SegwitCache { prevouts: sha256d::Hash, @@ -302,7 +303,9 @@ pub enum PrevoutsIndexError { InvalidAllIndex, } -internals::impl_from_infallible!(PrevoutsIndexError); +impl From for PrevoutsIndexError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for PrevoutsIndexError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -426,6 +429,14 @@ impl EcdsaSighashType { } } + /// Checks if the sighash type is [`Self::Single`] or [`Self::SinglePlusAnyoneCanPay`]. + /// + /// This matches Bitcoin Core's behavior where SIGHASH_SINGLE bug check is based on the base + /// type (after masking with 0x1f), regardless of the ANYONECANPAY flag. + /// + /// See: + pub fn is_single(&self) -> bool { matches!(self, Self::Single | Self::SinglePlusAnyoneCanPay) } + /// Constructs a new [`EcdsaSighashType`] from a raw `u32`. /// /// **Note**: this replicates consensus behaviour, for current standardness rules correctness @@ -859,8 +870,8 @@ impl> SighashCache { /// Computes the BIP143 sighash to spend a p2wpkh transaction for any flag type. /// - /// `script_pubkey` is the `scriptPubkey` (native segwit) of the spend transaction - /// ([`TxOut::script_pubkey`]) or the `redeemScript` (wrapped segwit). + /// `script_pubkey` is the `scriptPubkey` (native SegWit) of the spend transaction + /// ([`TxOut::script_pubkey`]) or the `redeemScript` (wrapped SegWit). pub fn p2wpkh_signature_hash( &mut self, input_index: usize, @@ -883,6 +894,9 @@ impl> SighashCache { } /// Computes the BIP143 sighash to spend a p2wsh transaction for any flag type. + /// + /// `witness_script` is the script that goes into the [`Witness`], + /// not the one that goes into `script_pubkey` of a [`TxOut`]. pub fn p2wsh_signature_hash( &mut self, input_index: usize, @@ -956,63 +970,60 @@ impl> SighashCache { script_pubkey: &Script, sighash_type: u32, ) -> Result<(), io::Error> { + use crate::consensus::encode::WriteExt; + let (sighash, anyone_can_pay) = EcdsaSighashType::from_consensus(sighash_type).split_anyonecanpay_flag(); - // Build tx to sign - let mut tx = Transaction { - version: self_.version, - lock_time: self_.lock_time, - input: vec![], - output: vec![], - }; + self_.version.consensus_encode(writer)?; // Add all inputs necessary.. if anyone_can_pay { - tx.input = vec![TxIn { - previous_output: self_.input[input_index].previous_output, - script_sig: script_pubkey.to_owned(), - sequence: self_.input[input_index].sequence, - witness: Witness::default(), - }]; + writer.emit_compact_size(1u8)?; + self_.input[input_index].previous_output.consensus_encode(writer)?; + script_pubkey.consensus_encode(writer)?; + self_.input[input_index].sequence.consensus_encode(writer)?; } else { - tx.input = Vec::with_capacity(self_.input.len()); + writer.emit_compact_size(self_.input.len())?; for (n, input) in self_.input.iter().enumerate() { - tx.input.push(TxIn { - previous_output: input.previous_output, - script_sig: if n == input_index { - script_pubkey.to_owned() - } else { - ScriptBuf::new() - }, - sequence: if n != input_index - && (sighash == EcdsaSighashType::Single - || sighash == EcdsaSighashType::None) - { - Sequence::ZERO - } else { - input.sequence - }, - witness: Witness::default(), - }); + input.previous_output.consensus_encode(writer)?; + if n == input_index { + script_pubkey.consensus_encode(writer)?; + } else { + Script::new().consensus_encode(writer)?; + } + if n != input_index + && (sighash == EcdsaSighashType::Single + || sighash == EcdsaSighashType::None) + { + Sequence::ZERO.consensus_encode(writer)?; + } else { + input.sequence.consensus_encode(writer)?; + } } } // ..then all outputs - tx.output = match sighash { - EcdsaSighashType::All => self_.output.clone(), + match sighash { + EcdsaSighashType::All => { + self_.output.consensus_encode(writer)?; + } EcdsaSighashType::Single => { - let output_iter = self_ - .output - .iter() - .take(input_index + 1) // sign all outputs up to and including this one, but erase - .enumerate() // all of them except for this one - .map(|(n, out)| if n == input_index { out.clone() } else { TxOut::NULL }); - output_iter.collect() + // sign all outputs up to and including this one, but erase + // all of them except for this one + let count = input_index.min(self_.output.len() - 1); + writer.emit_compact_size(count + 1)?; + for _ in 0..count { + // consensus encoding of the "NULL txout" - max amount, empty script_pubkey + writer + .write_all(&[0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00])?; + } + self_.output[count].consensus_encode(writer)?; + } + EcdsaSighashType::None => { + writer.emit_compact_size(0u8)?; } - EcdsaSighashType::None => vec![], _ => unreachable!(), }; - // hash the result - tx.consensus_encode(writer)?; + self_.lock_time.consensus_encode(writer)?; sighash_type.to_le_bytes().consensus_encode(writer)?; Ok(()) } @@ -1150,9 +1161,9 @@ impl> SighashCache { /// *sighasher.witness_mut(input_index).unwrap() = Witness::p2wpkh(&signature, &pk); /// ``` /// - /// For full signing code see the [`segwit v0`] and [`taproot`] signing examples. + /// For full signing code see the [`SegWit v0`] and [`taproot`] signing examples. /// - /// [`segwit v0`]: + /// [`SegWit v0`]: /// [`taproot`]: pub fn witness_mut(&mut self, input_index: usize) -> Option<&mut Witness> { self.tx.borrow_mut().input.get_mut(input_index).map(|i| &mut i.witness) @@ -1203,7 +1214,9 @@ pub enum TaprootError { InvalidSighashType(u32), } -internals::impl_from_infallible!(TaprootError); +impl From for TaprootError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for TaprootError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1262,7 +1275,9 @@ pub enum P2wpkhError { NotP2wpkhScript, } -internals::impl_from_infallible!(P2wpkhError); +impl From for P2wpkhError { + fn from(never: Infallible) -> Self { match never {} } +} impl From for P2wpkhError { fn from(value: transaction::InputsIndexError) -> Self { P2wpkhError::Sighash(value) } @@ -1273,7 +1288,7 @@ impl fmt::Display for P2wpkhError { use P2wpkhError::*; match *self { - Sighash(ref e) => write_err!(f, "error encoding segwit v0 signing data"; e), + Sighash(ref e) => write_err!(f, "error encoding SegWit v0 signing data"; e), NotP2wpkhScript => write!(f, "script is not a script pubkey for a p2wpkh output"), } } @@ -1327,7 +1342,9 @@ pub enum AnnexError { IncorrectPrefix(u8), } -internals::impl_from_infallible!(AnnexError); +impl From for AnnexError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for AnnexError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1354,7 +1371,7 @@ impl std::error::Error for AnnexError { fn is_invalid_use_of_sighash_single(sighash: u32, input_index: usize, outputs_len: usize) -> bool { let ty = EcdsaSighashType::from_consensus(sighash); - ty == EcdsaSighashType::Single && input_index >= outputs_len + ty.is_single() && input_index >= outputs_len } /// Result of [`SighashCache::legacy_encode_signing_data_to`]. @@ -1401,6 +1418,7 @@ impl EncodeSigningDataResult { /// // use the hash from `writer` /// } /// ``` + #[allow(clippy::wrong_self_convention)] // Consume self so we can take the error. pub fn is_sighash_single_bug(self) -> Result { match self { EncodeSigningDataResult::SighashSingleBug => Ok(true), @@ -1437,12 +1455,14 @@ pub enum SigningDataError { Sighash(E), } -internals::impl_from_infallible!(SigningDataError); +impl From for SigningDataError { + fn from(never: Infallible) -> Self { match never {} } +} impl SigningDataError { - /// Returns the sighash variant, panicking if it's IO. + /// Returns the sighash variant, panicking if it's I/O. /// - /// This is used when encoding to hash engine when we know that IO doesn't fail. + /// This is used when encoding to hash engine when we know that I/O doesn't fail. fn unwrap_sighash(self) -> E { match self { Self::Sighash(error) => error, @@ -1517,28 +1537,35 @@ mod tests { use super::*; use crate::consensus::deserialize; use crate::locktime::absolute; - use crate::script::ScriptBufExt as _; + use crate::script::{ScriptBuf, ScriptBufExt as _}; + use crate::TxIn; extern crate serde_json; + const DUMMY_TXOUT: TxOut = TxOut { value: Amount::MIN, script_pubkey: ScriptBuf::new() }; + #[test] fn sighash_single_bug() { - const SIGHASH_SINGLE: u32 = 3; - // We need a tx with more inputs than outputs. let tx = Transaction { version: transaction::Version::ONE, lock_time: absolute::LockTime::ZERO, input: vec![TxIn::EMPTY_COINBASE, TxIn::EMPTY_COINBASE], - output: vec![TxOut::NULL], + output: vec![DUMMY_TXOUT], }; let script = ScriptBuf::new(); let cache = SighashCache::new(&tx); - let got = cache.legacy_signature_hash(1, &script, SIGHASH_SINGLE).expect("sighash"); + let sighash_single = 3; + let got = cache.legacy_signature_hash(1, &script, sighash_single).expect("sighash"); let want = LegacySighash::from_byte_array(UINT256_ONE); + assert_eq!(got, want); - assert_eq!(got, want) + // https://github.com/rust-bitcoin/rust-bitcoin/issues/4112 + let sighash_single = 131; + let got = cache.legacy_signature_hash(1, &script, sighash_single).expect("sighash"); + let want = LegacySighash::from_byte_array(UINT256_ONE); + assert_eq!(got, want); } #[test] @@ -1585,7 +1612,7 @@ mod tests { } #[test] - fn test_tap_sighash_hash() { + fn tap_sighash_hash() { let bytes = hex!("00011b96877db45ffa23b307e9f0ac87b80ef9a80b4c5f0db3fbe734422453e83cc5576f3d542c5d4898fb2b696c15d43332534a7c1d1255fda38993545882df92c3e353ff6d36fbfadc4d168452afd8467f02fe53d71714fcea5dfe2ea759bd00185c4cb02bc76d42620393ca358a1a713f4997f9fc222911890afb3fe56c6a19b202df7bffdcfad08003821294279043746631b00e2dc5e52a111e213bbfe6ef09a19428d418dab0d50000000000"); let expected = hex!("04e808aad07a40b3767a1442fead79af6ef7e7c9316d82dec409bb31e77699b0"); let mut enc = sha256t::Hash::::engine(); @@ -1595,7 +1622,7 @@ mod tests { } #[test] - fn test_sighashes_keyspending() { + fn sighashes_keyspending() { // following test case has been taken from Bitcoin Core test framework test_taproot_sighash( @@ -1656,7 +1683,7 @@ mod tests { } #[test] - fn test_sighashes_with_annex() { + fn sighashes_with_annex() { test_taproot_sighash( "0200000001df8123752e8f37d132c4e9f1ff7e4f9b986ade9211267e9ebd5fd22a5e718dec6d01000000ce4023b903cb7b23000000000017a914a18b36ea7a094db2f4940fc09edf154e86de7bd787580200000000000017a914afd0d512a2c5c2b40e25669e9cc460303c325b8b87580200000000000017a914a18b36ea7a094db2f4940fc09edf154e86de7bd787f6020000", "01ea49260000000000225120ab5e9800806bf18cb246edcf5fe63441208fe955a4b5a35bbff65f5db622a010", @@ -1670,7 +1697,7 @@ mod tests { } #[test] - fn test_sighashes_with_script_path() { + fn sighashes_with_script_path() { test_taproot_sighash( "020000000189fc651483f9296b906455dd939813bf086b1bbe7c77635e157c8e14ae29062195010000004445b5c7044561320000000000160014331414dbdada7fb578f700f38fb69995fc9b5ab958020000000000001976a914268db0a8104cc6d8afd91233cc8b3d1ace8ac3ef88ac580200000000000017a914ec00dcb368d6a693e11986d265f659d2f59e8be2875802000000000000160014c715799a49a0bae3956df9c17cb4440a673ac0df6f010000", "011bec34000000000022512028055142ea437db73382e991861446040b61dd2185c4891d7daf6893d79f7182", @@ -1684,7 +1711,7 @@ mod tests { } #[test] - fn test_sighashes_with_script_path_raw_hash() { + fn sighashes_with_script_path_raw_hash() { test_taproot_sighash( "020000000189fc651483f9296b906455dd939813bf086b1bbe7c77635e157c8e14ae29062195010000004445b5c7044561320000000000160014331414dbdada7fb578f700f38fb69995fc9b5ab958020000000000001976a914268db0a8104cc6d8afd91233cc8b3d1ace8ac3ef88ac580200000000000017a914ec00dcb368d6a693e11986d265f659d2f59e8be2875802000000000000160014c715799a49a0bae3956df9c17cb4440a673ac0df6f010000", "011bec34000000000022512028055142ea437db73382e991861446040b61dd2185c4891d7daf6893d79f7182", @@ -1698,7 +1725,7 @@ mod tests { } #[test] - fn test_sighashes_with_annex_and_script() { + fn sighashes_with_annex_and_script() { test_taproot_sighash( "020000000132fb72cb8fba496755f027a9743e2d698c831fdb8304e4d1a346ac92cbf51acba50100000026bdc7df044aad34000000000017a9144fa2554ed6174586854fa3bc01de58dcf33567d0875802000000000000160014950367e1e62cdf240b35b883fc2f5e39f0eb9ab95802000000000000160014950367e1e62cdf240b35b883fc2f5e39f0eb9ab958020000000000001600141b31217d48ccc8760dcc0710fade5866d628e733a02d5122", "011458360000000000225120a7baec3fb9f84614e3899fcc010c638f80f13539344120e1f4d8b68a9a011a13", @@ -1713,7 +1740,7 @@ mod tests { #[test] #[rustfmt::skip] // Allow long function call `taproot_signature_hash`. - fn test_sighash_errors() { + fn sighash_errors() { use crate::transaction::{IndexOutOfBoundsError, InputsIndexError}; let dumb_tx = Transaction { @@ -1731,13 +1758,13 @@ mod tests { c.taproot_signature_hash(0, &empty_prevouts, None, None, TapSighashType::All), Err(TaprootError::PrevoutsSize(PrevoutsSizeError)) ); - let two = [TxOut::NULL, TxOut::NULL]; + let two = [DUMMY_TXOUT, DUMMY_TXOUT]; let too_many_prevouts = Prevouts::All(&two); assert_eq!( c.taproot_signature_hash(0, &too_many_prevouts, None, None, TapSighashType::All), Err(TaprootError::PrevoutsSize(PrevoutsSizeError)) ); - let tx_out = TxOut::NULL; + let tx_out = DUMMY_TXOUT; let prevout = Prevouts::One(1, &tx_out); assert_eq!( c.taproot_signature_hash(0, &prevout, None, None, TapSighashType::All), @@ -1772,7 +1799,7 @@ mod tests { } #[test] - fn test_annex_errors() { + fn annex_errors() { assert_eq!(Annex::new(&[]), Err(AnnexError::Empty)); assert_eq!(Annex::new(&[0x51]), Err(AnnexError::IncorrectPrefix(0x51))); assert_eq!(Annex::new(&[0x51, 0x50]), Err(AnnexError::IncorrectPrefix(0x51))); @@ -1835,8 +1862,6 @@ mod tests { fn bip_341_sighash_tests() { use hex::DisplayHex; - use crate::taproot::TapTweakHashExt as _; - fn sighash_deser_numeric<'de, D>(deserializer: D) -> Result where D: serde::Deserializer<'de>, @@ -1862,6 +1887,7 @@ mod tests { #[serde(rename = "scriptPubKey")] script_pubkey: ScriptBuf, #[serde(rename = "amountSats")] + #[serde(with = "crate::amount::serde::as_sat")] value: Amount, } @@ -1999,7 +2025,8 @@ mod tests { .unwrap(); let msg = secp256k1::Message::from(sighash); - let key_spend_sig = secp.sign_schnorr_with_aux_rand(&msg, &tweaked_keypair, &[0u8; 32]); + let key_spend_sig = + secp.sign_schnorr_with_aux_rand(msg.as_ref(), &tweaked_keypair, &[0u8; 32]); assert_eq!(expected.internal_pubkey, internal_key); assert_eq!(expected.tweak, tweak); @@ -2062,7 +2089,7 @@ mod tests { ).unwrap(); let spk = ScriptBuf::from_hex("00141d0f172a0ecb48aee1be1f2687d2963ae33f71a1").unwrap(); - let value = Amount::from_sat(600_000_000); + let value = Amount::from_sat_u32(600_000_000); let mut cache = SighashCache::new(&tx); assert_eq!( @@ -2103,7 +2130,7 @@ mod tests { let redeem_script = ScriptBuf::from_hex("001479091972186c449eb1ded22b78e40d009bdf0089").unwrap(); - let value = Amount::from_sat(1_000_000_000); + let value = Amount::from_sat_u32(1_000_000_000); let mut cache = SighashCache::new(&tx); assert_eq!( @@ -2153,7 +2180,7 @@ mod tests { ) .unwrap(); - let value = Amount::from_sat(987_654_321); + let value = Amount::from_sat_u32(987_654_321); (tx, witness_script, value) } diff --git a/bitcoin/src/crypto/taproot.rs b/bitcoin/src/crypto/taproot.rs index bfa857aa03..fe66028b87 100644 --- a/bitcoin/src/crypto/taproot.rs +++ b/bitcoin/src/crypto/taproot.rs @@ -4,11 +4,13 @@ //! //! This module provides Taproot keys used in Bitcoin (including reexporting secp256k1 keys). +use core::convert::Infallible; use core::fmt; #[cfg(feature = "arbitrary")] use arbitrary::{Arbitrary, Unstructured}; use internals::write_err; +use internals::array::ArrayExt; use io::Write; use crate::prelude::Vec; @@ -28,19 +30,17 @@ pub struct Signature { impl Signature { /// Deserializes the signature from a slice. pub fn from_slice(sl: &[u8]) -> Result { - match sl.len() { - 64 => { - // default type - let signature = secp256k1::schnorr::Signature::from_slice(sl)?; - Ok(Signature { signature, sighash_type: TapSighashType::Default }) - } - 65 => { - let (sighash_type, signature) = sl.split_last().expect("slice len checked == 65"); - let sighash_type = TapSighashType::from_consensus_u8(*sighash_type)?; - let signature = secp256k1::schnorr::Signature::from_slice(signature)?; - Ok(Signature { signature, sighash_type }) - } - len => Err(SigFromSliceError::InvalidSignatureSize(len)), + if let Ok(signature) = <[u8; 64]>::try_from(sl) { + // default type + let signature = secp256k1::schnorr::Signature::from_byte_array(signature); + Ok(Signature { signature, sighash_type: TapSighashType::Default }) + } else if let Ok(signature) = <[u8; 65]>::try_from(sl) { + let (sighash_type, signature) = signature.split_last(); + let sighash_type = TapSighashType::from_consensus_u8(*sighash_type)?; + let signature = secp256k1::schnorr::Signature::from_byte_array(*signature); + Ok(Signature { signature, sighash_type }) + } else { + Err(SigFromSliceError::InvalidSignatureSize(sl.len())) } } @@ -49,9 +49,8 @@ impl Signature { /// Note: this allocates on the heap, prefer [`serialize`](Self::serialize) if vec is not needed. pub fn to_vec(self) -> Vec { let mut ser_sig = self.signature.as_ref().to_vec(); - if self.sighash_type == TapSighashType::Default { - // default sighash type, don't add extra sighash byte - } else { + // If default sighash type, don't add extra sighash byte + if self.sighash_type != TapSighashType::Default { ser_sig.push(self.sighash_type as u8); } ser_sig @@ -70,7 +69,7 @@ impl Signature { /// You can get a slice from it using deref coercions or turn it into an iterator. pub fn serialize(self) -> SerializedSignature { let mut buf = [0; serialized_signature::MAX_LEN]; - let ser_sig = self.signature.serialize(); + let ser_sig = self.signature.to_byte_array(); buf[..64].copy_from_slice(&ser_sig); let len = if self.sighash_type == TapSighashType::Default { // default sighash type, don't add extra sighash byte @@ -97,7 +96,9 @@ pub enum SigFromSliceError { InvalidSignatureSize(usize), } -internals::impl_from_infallible!(SigFromSliceError); +impl From for SigFromSliceError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for SigFromSliceError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -138,7 +139,7 @@ impl<'a> Arbitrary<'a> for Signature { let arbitrary_bytes: [u8; secp256k1::constants::SCHNORR_SIGNATURE_SIZE] = u.arbitrary()?; Ok(Signature { - signature: secp256k1::schnorr::Signature::from_slice(&arbitrary_bytes).unwrap(), + signature: secp256k1::schnorr::Signature::from_byte_array(arbitrary_bytes), sighash_type: TapSighashType::arbitrary(u)?, }) } diff --git a/bitcoin/src/hash_types.rs b/bitcoin/src/hash_types.rs index 10a995de68..e564999ca2 100644 --- a/bitcoin/src/hash_types.rs +++ b/bitcoin/src/hash_types.rs @@ -13,10 +13,9 @@ pub use crate::{ #[cfg(test)] mod tests { use super::*; - use crate::{ - LegacySighash, PubkeyHash, ScriptHash, SegwitV0Sighash, TapSighash, WPubkeyHash, - WScriptHash, XKeyIdentifier, - }; + use crate::key::{PubkeyHash, WPubkeyHash}; + use crate::script::{ScriptHash, WScriptHash}; + use crate::{LegacySighash, SegwitV0Sighash, TapSighash, XKeyIdentifier}; #[rustfmt::skip] /// sha256d of the empty string diff --git a/bitcoin/src/internal_macros.rs b/bitcoin/src/internal_macros.rs index dfecac978d..9b97bb9bbc 100644 --- a/bitcoin/src/internal_macros.rs +++ b/bitcoin/src/internal_macros.rs @@ -303,7 +303,7 @@ macro_rules! impl_array_newtype { /// Copies the underlying bytes into a new `Vec`. #[inline] - pub fn to_vec(&self) -> alloc::vec::Vec { self.0.to_vec() } + pub fn to_vec(self) -> alloc::vec::Vec { self.0.to_vec() } /// Returns a slice of the underlying bytes. #[inline] @@ -312,7 +312,7 @@ macro_rules! impl_array_newtype { /// Copies the underlying bytes into a new `Vec`. #[inline] #[deprecated(since = "TBD", note = "use to_vec instead")] - pub fn to_bytes(&self) -> alloc::vec::Vec { self.to_vec() } + pub fn to_bytes(self) -> alloc::vec::Vec { self.to_vec() } /// Converts the object to a raw pointer. #[inline] diff --git a/bitcoin/src/lib.rs b/bitcoin/src/lib.rs index 7a73dc2243..ed14a8bad0 100644 --- a/bitcoin/src/lib.rs +++ b/bitcoin/src/lib.rs @@ -16,7 +16,6 @@ //! * `base64` (dependency) - enables encoding of PSBTs and message signatures. //! * `bitcoinconsensus` (dependency) - enables validating scripts and transactions. //! * `default` - enables `std` and `secp-recovery`. -//! * `ordered` (dependency) - adds implementations of `ArbitraryOrd` to some structs. //! * `rand` (transitive dependency) - makes it more convenient to generate random values. //! * `rand-std` - same as `rand` but also enables `std` here and in `secp256k1`. //! * `serde` (dependency) - implements `serde`-based serialization and deserialization. @@ -39,6 +38,7 @@ // Exclude lints we don't think are valuable. #![allow(clippy::needless_question_mark)] // https://github.com/rust-bitcoin/rust-bitcoin/pull/2134 #![allow(clippy::manual_range_contains)] // More readable than clippy's format. +#![allow(clippy::incompatible_msrv)] // Has FPs and we're testing it which is more reliable anyway. // We only support machines with index size of 4 bytes or more. // @@ -61,13 +61,16 @@ extern crate test; #[macro_use] extern crate alloc; -#[cfg(feature = "base64")] /// Encodes and decodes base64 as bytes or utf8. +#[cfg(feature = "base64")] pub extern crate base64; /// Bitcoin base58 encoding and decoding. pub extern crate base58; +/// Re-export the `bech32` crate. +pub extern crate bech32; + /// Rust implementation of cryptographic hash function algorithms. pub extern crate hashes; @@ -77,11 +80,9 @@ pub extern crate hex; /// Re-export the `bitcoin-io` crate. pub extern crate io; -/// Re-export the `ordered` crate. -#[cfg(feature = "ordered")] -pub extern crate ordered; - -/// Rust wrapper library for Pieter Wuille's libsecp256k1. Implements ECDSA and BIP 340 signatures +/// Re-export the `rust-secp256k1` crate. +/// +/// Rust wrapper library for Pieter Wuille's libsecp256k1. Implements ECDSA and BIP-340 signatures /// for the SECG elliptic curve group secp256k1 and related utilities. pub extern crate secp256k1; @@ -120,14 +121,14 @@ pub mod taproot; pub use primitives::{ block::{ Block, BlockHash, Checked as BlockChecked, Header as BlockHeader, - Unchecked as BockUnchecked, Validation as BlockValidation, WitnessCommitment, + Unchecked as BlockUnchecked, Validation as BlockValidation, Version as BlockVersion, + WitnessCommitment, }, merkle_tree::{TxMerkleNode, WitnessMerkleNode}, - opcodes::Opcode, pow::CompactTarget, // No `pow` module outside of `primitives`. script::{Script, ScriptBuf}, sequence::{self, Sequence}, // No `sequence` module outside of `primitives`. - transaction::{OutPoint, Transaction, TxIn, TxOut, Txid, Wtxid}, + transaction::{OutPoint, Transaction, TxIn, TxOut, Txid, Version as TransactionVersion, Wtxid}, witness::Witness, }; #[doc(inline)] @@ -135,6 +136,7 @@ pub use units::{ amount::{Amount, Denomination, SignedAmount}, block::{BlockHeight, BlockInterval}, fee_rate::FeeRate, + time::{self, BlockTime}, weight::Weight, }; @@ -144,9 +146,7 @@ pub use crate::{ bip158::{FilterHash, FilterHeader}, bip32::XKeyIdentifier, crypto::ecdsa, - crypto::key::{ - self, CompressedPublicKey, PrivateKey, PubkeyHash, PublicKey, WPubkeyHash, XOnlyPublicKey, - }, + crypto::key::{self, CompressedPublicKey, PrivateKey, PublicKey, XOnlyPublicKey}, crypto::sighash::{self, LegacySighash, SegwitV0Sighash, TapSighash, TapSighashTag}, merkle_tree::MerkleBlock, network::params::{self, Params}, @@ -161,13 +161,11 @@ pub use crate::{ pub use crate::{ // Also, re-export types and modules from `blockdata` that don't come from `primitives`. blockdata::locktime::{absolute, relative}, + blockdata::opcodes::{self, Opcode}, blockdata::script::witness_program::{self, WitnessProgram}, blockdata::script::witness_version::{self, WitnessVersion}, - blockdata::script::{ScriptHash, WScriptHash}, // TODO: Move these down below after they are in primitives. // These modules also re-export all the respective `primitives` types. - blockdata::{ - block, constants, fee_rate, locktime, opcodes, script, transaction, weight, witness, - }, + blockdata::{block, constants, fee_rate, locktime, script, transaction, weight, witness}, }; #[rustfmt::skip] @@ -196,10 +194,10 @@ mod prelude { pub mod amount { //! Bitcoin amounts. //! - //! This module mainly introduces the [Amount] and [SignedAmount] types. + //! This module mainly introduces the [`Amount`] and [`SignedAmount`] types. //! We refer to the documentation on the types for more information. - use crate::consensus::{encode, Decodable, Encodable}; + use crate::consensus::{self, encode, Decodable, Encodable}; use crate::io::{BufRead, Write}; #[rustfmt::skip] // Keep public re-exports separate. @@ -216,7 +214,9 @@ pub mod amount { impl Decodable for Amount { #[inline] fn consensus_decode(r: &mut R) -> Result { - Ok(Amount::from_sat(Decodable::consensus_decode(r)?)) + Amount::from_sat(Decodable::consensus_decode(r)?).map_err(|_| { + consensus::parse_failed_error("amount is greater than Amount::MAX_MONEY") + }) } } @@ -234,8 +234,8 @@ pub mod parse { #[doc(inline)] pub use units::parse::{ hex_check_unprefixed, hex_remove_prefix, hex_u128, hex_u128_unchecked, hex_u128_unprefixed, - hex_u32, hex_u32_unchecked, hex_u32_unprefixed, int, ContainsPrefixError, - MissingPrefixError, ParseIntError, PrefixedHexError, UnprefixedHexError, + hex_u32, hex_u32_unchecked, hex_u32_unprefixed, int_from_box, int_from_str, + int_from_string, ParseIntError, PrefixedHexError, UnprefixedHexError, }; } diff --git a/bitcoin/src/merkle_tree/block.rs b/bitcoin/src/merkle_tree/block.rs index 4410c8a942..e39fab19af 100644 --- a/bitcoin/src/merkle_tree/block.rs +++ b/bitcoin/src/merkle_tree/block.rs @@ -9,6 +9,7 @@ //! //! Support proofs that transaction(s) belong to a block. +use core::convert::Infallible; use core::fmt; use internals::ToU64 as _; @@ -146,7 +147,7 @@ impl Decodable for MerkleBlock { /// stored, and its children are not explored further. Otherwise, no hash is /// stored, but we recurse into both (or the only) child branch. During /// decoding, the same depth-first traversal is performed, consuming bits and -/// hashes as they written during encoding. +/// hashes as they are written during encoding. /// /// The serialization is fixed and provides a hard guarantee about the /// encoded size: @@ -473,7 +474,9 @@ pub enum MerkleBlockError { IdenticalHashesFound, } -internals::impl_from_infallible!(MerkleBlockError); +impl From for MerkleBlockError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for MerkleBlockError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { diff --git a/bitcoin/src/merkle_tree/mod.rs b/bitcoin/src/merkle_tree/mod.rs index b656ce7e78..82801fdd43 100644 --- a/bitcoin/src/merkle_tree/mod.rs +++ b/bitcoin/src/merkle_tree/mod.rs @@ -41,7 +41,7 @@ impl_hashencode!(WitnessMerkleNode); /// Other Merkle trees in Bitcoin, such as those used in Taproot commitments, /// do not use this algorithm and cannot use this trait. pub trait MerkleNode: Copy { - /// The hash (TXID or WTXID) of a transaciton in the tree. + /// The hash (TXID or WTXID) of a transaction in the tree. type Leaf: TxIdentifier; /// Convert a hash to a leaf node of the tree. diff --git a/bitcoin/src/network/params.rs b/bitcoin/src/network/params.rs index eecc42cb86..e322009be9 100644 --- a/bitcoin/src/network/params.rs +++ b/bitcoin/src/network/params.rs @@ -113,7 +113,7 @@ pub struct Params { /// Expected amount of time to mine one block. pub pow_target_spacing: u64, /// Difficulty recalculation interval. - pub pow_target_timespan: u64, + pub pow_target_timespan: u32, /// Determines whether minimal difficulty may be used for blocks or not. pub allow_min_difficulty_blocks: bool, /// Determines whether retargeting is disabled for this network or not. @@ -261,7 +261,7 @@ impl Params { /// Calculates the number of blocks between difficulty adjustments. pub fn difficulty_adjustment_interval(&self) -> u64 { - self.pow_target_timespan / self.pow_target_spacing + u64::from(self.pow_target_timespan) / self.pow_target_spacing } } diff --git a/bitcoin/src/p2p/address.rs b/bitcoin/src/p2p/address.rs index d761d95437..accb7cc345 100644 --- a/bitcoin/src/p2p/address.rs +++ b/bitcoin/src/p2p/address.rs @@ -363,7 +363,7 @@ mod test { } #[test] - fn test_socket_addr() { + fn socket_addr() { let s4 = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(111, 222, 123, 4)), 5555); let a4 = Address::new(&s4, ServiceFlags::NETWORK | ServiceFlags::WITNESS); assert_eq!(a4.socket_addr().unwrap(), s4); diff --git a/bitcoin/src/p2p/message.rs b/bitcoin/src/p2p/message.rs index 04eb15d618..e9522a7c51 100644 --- a/bitcoin/src/p2p/message.rs +++ b/bitcoin/src/p2p/message.rs @@ -27,7 +27,7 @@ use crate::{block, consensus, transaction}; pub const MAX_INV_SIZE: usize = 50_000; /// Maximum size, in bytes, of an encoded message -/// This by neccessity should be larger tham `MAX_VEC_SIZE` +/// This by necessity should be larger tham `MAX_VEC_SIZE` pub const MAX_MSG_SIZE: usize = 5_000_000; /// Serializer for command string @@ -148,7 +148,7 @@ impl std::error::Error for CommandStringError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { None } } -/// A Network message +/// A Network message using the v1 p2p protocol. #[derive(Clone, Debug, PartialEq, Eq)] pub struct RawNetworkMessage { magic: Magic, @@ -157,6 +157,12 @@ pub struct RawNetworkMessage { checksum: [u8; 4], } +/// A Network message using the v2 p2p protocol defined in BIP324. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct V2NetworkMessage { + payload: NetworkMessage, +} + /// A Network message payload. Proper documentation is available on at /// [Bitcoin Wiki: Protocol Specification](https://en.bitcoin.it/wiki/Protocol_specification) #[derive(Clone, PartialEq, Eq, Debug)] @@ -332,6 +338,27 @@ impl RawNetworkMessage { pub fn command(&self) -> CommandString { self.payload.command() } } +impl V2NetworkMessage { + /// Constructs a new [V2NetworkMessage]. + pub fn new(payload: NetworkMessage) -> Self { Self { payload } } + + /// Consumes the [V2NetworkMessage] instance and returns the inner payload. + pub fn into_payload(self) -> NetworkMessage { self.payload } + + /// The actual message data + pub fn payload(&self) -> &NetworkMessage { &self.payload } + + /// Return the message command as a static string reference. + /// + /// This returns `"unknown"` for [NetworkMessage::Unknown], + /// regardless of the actual command in the unknown message. + /// Use the [Self::command] method to get the command for unknown messages. + pub fn cmd(&self) -> &'static str { self.payload.cmd() } + + /// Return the CommandString for the message command. + pub fn command(&self) -> CommandString { self.payload.command() } +} + struct HeaderSerializationWrapper<'a>(&'a Vec); impl Encodable for HeaderSerializationWrapper<'_> { @@ -404,6 +431,62 @@ impl Encodable for RawNetworkMessage { } } +impl Encodable for V2NetworkMessage { + fn consensus_encode(&self, writer: &mut W) -> Result { + // A subset of message types are optimized to only use one byte to encode the command. + // Non-optimized message types use the zero-byte flag and the following twelve bytes to encode the command. + let (command_byte, full_command) = match self.payload { + NetworkMessage::Addr(_) => (1u8, None), + NetworkMessage::Inv(_) => (14u8, None), + NetworkMessage::GetData(_) => (11u8, None), + NetworkMessage::NotFound(_) => (17u8, None), + NetworkMessage::GetBlocks(_) => (9u8, None), + NetworkMessage::GetHeaders(_) => (12u8, None), + NetworkMessage::MemPool => (15u8, None), + NetworkMessage::Tx(_) => (21u8, None), + NetworkMessage::Block(_) => (2u8, None), + NetworkMessage::Headers(_) => (13u8, None), + NetworkMessage::Ping(_) => (18u8, None), + NetworkMessage::Pong(_) => (19u8, None), + NetworkMessage::MerkleBlock(_) => (16u8, None), + NetworkMessage::FilterLoad(_) => (8u8, None), + NetworkMessage::FilterAdd(_) => (6u8, None), + NetworkMessage::FilterClear => (7u8, None), + NetworkMessage::GetCFilters(_) => (22u8, None), + NetworkMessage::CFilter(_) => (23u8, None), + NetworkMessage::GetCFHeaders(_) => (24u8, None), + NetworkMessage::CFHeaders(_) => (25u8, None), + NetworkMessage::GetCFCheckpt(_) => (26u8, None), + NetworkMessage::CFCheckpt(_) => (27u8, None), + NetworkMessage::SendCmpct(_) => (20u8, None), + NetworkMessage::CmpctBlock(_) => (4u8, None), + NetworkMessage::GetBlockTxn(_) => (10u8, None), + NetworkMessage::BlockTxn(_) => (3u8, None), + NetworkMessage::FeeFilter(_) => (5u8, None), + NetworkMessage::AddrV2(_) => (28u8, None), + NetworkMessage::Version(_) + | NetworkMessage::Verack + | NetworkMessage::SendHeaders + | NetworkMessage::GetAddr + | NetworkMessage::WtxidRelay + | NetworkMessage::SendAddrV2 + | NetworkMessage::Alert(_) + | NetworkMessage::Reject(_) + | NetworkMessage::Unknown { .. } => (0u8, Some(self.payload.command())), + }; + + let mut len = command_byte.consensus_encode(writer)?; + if let Some(cmd) = full_command { + len += cmd.consensus_encode(writer)?; + } + + // Encode the payload. + len += self.payload.consensus_encode(writer)?; + + Ok(len) + } +} + struct HeaderDeserializationWrapper(Vec); impl Decodable for HeaderDeserializationWrapper { @@ -537,6 +620,79 @@ impl Decodable for RawNetworkMessage { } } +impl Decodable for V2NetworkMessage { + fn consensus_decode_from_finite_reader( + r: &mut R, + ) -> Result { + let short_id: u8 = Decodable::consensus_decode_from_finite_reader(r)?; + let payload = match short_id { + 0u8 => { + // Full command encoding. + let cmd = CommandString::consensus_decode_from_finite_reader(r)?; + match &cmd.0[..] { + "version" => + NetworkMessage::Version(Decodable::consensus_decode_from_finite_reader(r)?), + "verack" => NetworkMessage::Verack, + "sendheaders" => NetworkMessage::SendHeaders, + "getaddr" => NetworkMessage::GetAddr, + "wtxidrelay" => NetworkMessage::WtxidRelay, + "sendaddrv2" => NetworkMessage::SendAddrV2, + "alert" => + NetworkMessage::Alert(Decodable::consensus_decode_from_finite_reader(r)?), + "reject" => + NetworkMessage::Reject(Decodable::consensus_decode_from_finite_reader(r)?), + _ => NetworkMessage::Unknown { + command: cmd, + payload: Vec::consensus_decode_from_finite_reader(r)?, + }, + } + } + 1u8 => NetworkMessage::Addr(Decodable::consensus_decode_from_finite_reader(r)?), + 2u8 => NetworkMessage::Block(Decodable::consensus_decode_from_finite_reader(r)?), + 3u8 => NetworkMessage::BlockTxn(Decodable::consensus_decode_from_finite_reader(r)?), + 4u8 => NetworkMessage::CmpctBlock(Decodable::consensus_decode_from_finite_reader(r)?), + 5u8 => NetworkMessage::FeeFilter(Decodable::consensus_decode_from_finite_reader(r)?), + 6u8 => NetworkMessage::FilterAdd(Decodable::consensus_decode_from_finite_reader(r)?), + 7u8 => NetworkMessage::FilterClear, + 8u8 => NetworkMessage::FilterLoad(Decodable::consensus_decode_from_finite_reader(r)?), + 9u8 => NetworkMessage::GetBlocks(Decodable::consensus_decode_from_finite_reader(r)?), + 10u8 => NetworkMessage::GetBlockTxn(Decodable::consensus_decode_from_finite_reader(r)?), + 11u8 => NetworkMessage::GetData(Decodable::consensus_decode_from_finite_reader(r)?), + 12u8 => NetworkMessage::GetHeaders(Decodable::consensus_decode_from_finite_reader(r)?), + 13u8 => NetworkMessage::Headers( + HeaderDeserializationWrapper::consensus_decode_from_finite_reader(r)?.0, + ), + 14u8 => NetworkMessage::Inv(Decodable::consensus_decode_from_finite_reader(r)?), + 15u8 => NetworkMessage::MemPool, + 16u8 => NetworkMessage::MerkleBlock(Decodable::consensus_decode_from_finite_reader(r)?), + 17u8 => NetworkMessage::NotFound(Decodable::consensus_decode_from_finite_reader(r)?), + 18u8 => NetworkMessage::Ping(Decodable::consensus_decode_from_finite_reader(r)?), + 19u8 => NetworkMessage::Pong(Decodable::consensus_decode_from_finite_reader(r)?), + 20u8 => NetworkMessage::SendCmpct(Decodable::consensus_decode_from_finite_reader(r)?), + 21u8 => NetworkMessage::Tx(Decodable::consensus_decode_from_finite_reader(r)?), + 22u8 => NetworkMessage::GetCFilters(Decodable::consensus_decode_from_finite_reader(r)?), + 23u8 => NetworkMessage::CFilter(Decodable::consensus_decode_from_finite_reader(r)?), + 24u8 => + NetworkMessage::GetCFHeaders(Decodable::consensus_decode_from_finite_reader(r)?), + 25u8 => NetworkMessage::CFHeaders(Decodable::consensus_decode_from_finite_reader(r)?), + 26u8 => + NetworkMessage::GetCFCheckpt(Decodable::consensus_decode_from_finite_reader(r)?), + 27u8 => NetworkMessage::CFCheckpt(Decodable::consensus_decode_from_finite_reader(r)?), + 28u8 => NetworkMessage::AddrV2(Decodable::consensus_decode_from_finite_reader(r)?), + _ => + return Err(encode::Error::Parse(encode::ParseError::ParseFailed( + "Unknown short ID", + ))), + }; + Ok(V2NetworkMessage { payload }) + } + + #[inline] + fn consensus_decode(r: &mut R) -> Result { + Self::consensus_decode_from_finite_reader(&mut r.take(MAX_MSG_SIZE.to_u64())) + } +} + #[cfg(test)] mod test { use std::net::Ipv4Addr; @@ -546,7 +702,8 @@ mod test { use super::*; use crate::bip152::BlockTransactionsRequest; - use crate::block::Block; + use crate::bip158::{FilterHeader, FilterHash}; + use crate::block::{Block, BlockHash}; use crate::consensus::encode::{deserialize, deserialize_partial, serialize}; use crate::p2p::address::AddrV2; use crate::p2p::message_blockdata::{GetBlocksMessage, GetHeadersMessage, Inventory}; @@ -558,9 +715,9 @@ mod test { use crate::p2p::message_network::{Reject, RejectReason, VersionMessage}; use crate::p2p::ServiceFlags; use crate::script::ScriptBuf; - use crate::transaction::Transaction; + use crate::transaction::{Transaction, Txid}; - fn hash(slice: [u8; 32]) -> sha256d::Hash { sha256d::Hash::from_slice(&slice).unwrap() } + fn hash(array: [u8; 32]) -> sha256d::Hash { sha256d::Hash::from_byte_array(array) } #[test] fn full_round_ser_der_raw_network_message() { @@ -581,16 +738,20 @@ mod test { 45, Address::new(&([123, 255, 000, 100], 833).into(), ServiceFlags::NETWORK), )]), - NetworkMessage::Inv(vec![Inventory::Block(hash([8u8; 32]).into())]), - NetworkMessage::GetData(vec![Inventory::Transaction(hash([45u8; 32]).into())]), - NetworkMessage::NotFound(vec![Inventory::Error]), + NetworkMessage::Inv(vec![Inventory::Block(BlockHash::from_byte_array(hash([8u8; 32]).to_byte_array()))]), + NetworkMessage::GetData(vec![Inventory::Transaction(Txid::from_byte_array(hash([45u8; 32]).to_byte_array()))]), + NetworkMessage::NotFound(vec![Inventory::Error([0u8; 32])]), NetworkMessage::GetBlocks(GetBlocksMessage::new( - vec![hash([1u8; 32]).into(), hash([4u8; 32]).into()], - hash([5u8; 32]).into(), + vec![ + BlockHash::from_byte_array(hash([1u8; 32]).to_byte_array()), + BlockHash::from_byte_array(hash([4u8; 32]).to_byte_array())], + BlockHash::from_byte_array(hash([5u8; 32]).to_byte_array()), )), NetworkMessage::GetHeaders(GetHeadersMessage::new( - vec![hash([10u8; 32]).into(), hash([40u8; 32]).into()], - hash([50u8; 32]).into(), + vec![ + BlockHash::from_byte_array(hash([10u8; 32]).to_byte_array()), + BlockHash::from_byte_array(hash([40u8; 32]).to_byte_array())], + BlockHash::from_byte_array(hash([50u8; 32]).to_byte_array()), )), NetworkMessage::MemPool, NetworkMessage::Tx(tx), @@ -615,32 +776,32 @@ mod test { NetworkMessage::GetCFilters(GetCFilters { filter_type: 2, start_height: BlockHeight::from(52), - stop_hash: hash([42u8; 32]).into(), + stop_hash: BlockHash::from_byte_array(hash([42u8; 32]).to_byte_array()), }), NetworkMessage::CFilter(CFilter { filter_type: 7, - block_hash: hash([25u8; 32]).into(), + block_hash: BlockHash::from_byte_array(hash([25u8; 32]).to_byte_array()), filter: vec![1, 2, 3], }), NetworkMessage::GetCFHeaders(GetCFHeaders { filter_type: 4, start_height: BlockHeight::from(102), - stop_hash: hash([47u8; 32]).into(), + stop_hash: BlockHash::from_byte_array(hash([47u8; 32]).to_byte_array()), }), NetworkMessage::CFHeaders(CFHeaders { filter_type: 13, - stop_hash: hash([53u8; 32]).into(), - previous_filter_header: hash([12u8; 32]).into(), - filter_hashes: vec![hash([4u8; 32]).into(), hash([12u8; 32]).into()], + stop_hash: BlockHash::from_byte_array(hash([53u8; 32]).to_byte_array()), + previous_filter_header: FilterHeader::from_byte_array(hash([12u8; 32]).to_byte_array()), + filter_hashes: vec![FilterHash::from_byte_array(hash([4u8; 32]).to_byte_array()), FilterHash::from_byte_array(hash([12u8; 32]).to_byte_array())], }), NetworkMessage::GetCFCheckpt(GetCFCheckpt { filter_type: 17, - stop_hash: hash([25u8; 32]).into(), + stop_hash: BlockHash::from_byte_array(hash([25u8; 32]).to_byte_array()), }), NetworkMessage::CFCheckpt(CFCheckpt { filter_type: 27, - stop_hash: hash([77u8; 32]).into(), - filter_headers: vec![hash([3u8; 32]).into(), hash([99u8; 32]).into()], + stop_hash: BlockHash::from_byte_array(hash([77u8; 32]).to_byte_array()), + filter_headers: vec![FilterHeader::from_byte_array(hash([3u8; 32]).to_byte_array()), FilterHeader::from_byte_array(hash([99u8; 32]).to_byte_array())], }), NetworkMessage::Alert(vec![45, 66, 3, 2, 6, 8, 9, 12, 3, 130]), NetworkMessage::Reject(Reject { @@ -661,7 +822,7 @@ mod test { NetworkMessage::CmpctBlock(cmptblock), NetworkMessage::GetBlockTxn(GetBlockTxn { txs_request: BlockTransactionsRequest { - block_hash: hash([11u8; 32]).into(), + block_hash: BlockHash::from_byte_array(hash([11u8; 32]).to_byte_array()), indexes: vec![0, 1, 2, 3, 10, 3002], }, }), @@ -669,9 +830,14 @@ mod test { NetworkMessage::SendCmpct(SendCmpct { send_compact: true, version: 8333 }), ]; - for msg in msgs { - let raw_msg = RawNetworkMessage::new(Magic::from_bytes([57, 0, 0, 0]), msg); + for msg in &msgs { + // V1 messages. + let raw_msg = RawNetworkMessage::new(Magic::from_bytes([57, 0, 0, 0]), msg.clone()); assert_eq!(deserialize::(&serialize(&raw_msg)).unwrap(), raw_msg); + + // V2 messages. + let v2_msg = V2NetworkMessage::new(msg.clone()); + assert_eq!(deserialize::(&serialize(&v2_msg)).unwrap(), v2_msg); } } @@ -709,6 +875,17 @@ mod test { 0x00, 0x00, 0x00, 0x00, 0x5d, 0xf6, 0xe0, 0xe2]); } + #[test] + fn serialize_v2_verack() { + assert_eq!( + serialize(&V2NetworkMessage::new(NetworkMessage::Verack)), + [ + 0x00, // Full command encoding flag. + 0x76, 0x65, 0x72, 0x61, 0x63, 0x6B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + ] + ); + } + #[test] #[rustfmt::skip] fn serialize_ping() { @@ -719,6 +896,17 @@ mod test { 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]); } + #[test] + fn serialize_v2_ping() { + assert_eq!( + serialize(&V2NetworkMessage::new(NetworkMessage::Ping(100))), + [ + 0x12, // Ping command short ID + 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + ] + ); + } + #[test] #[rustfmt::skip] fn serialize_mempool() { @@ -728,6 +916,16 @@ mod test { 0x00, 0x00, 0x00, 0x00, 0x5d, 0xf6, 0xe0, 0xe2]); } + #[test] + fn serialize_v2_mempool() { + assert_eq!( + serialize(&V2NetworkMessage::new(NetworkMessage::MemPool)), + [ + 0x0F, // MemPool command short ID + ] + ); + } + #[test] #[rustfmt::skip] fn serialize_getaddr() { @@ -737,6 +935,17 @@ mod test { 0x00, 0x00, 0x00, 0x00, 0x5d, 0xf6, 0xe0, 0xe2]); } + #[test] + fn serialize_v2_getaddr() { + assert_eq!( + serialize(&V2NetworkMessage::new(NetworkMessage::GetAddr)), + [ + 0x00, // Full command encoding flag. + 0x67, 0x65, 0x74, 0x61, 0x64, 0x64, 0x72, 0x00, 0x00, 0x00, 0x00, 0x00, + ] + ); + } + #[test] fn deserialize_getaddr() { #[rustfmt::skip] @@ -752,6 +961,19 @@ mod test { assert_eq!(preimage.payload, msg.payload); } + #[test] + fn deserialize_v2_getaddr() { + let msg = deserialize(&[ + 0x00, // Full command encoding flag + 0x67, 0x65, 0x74, 0x61, 0x64, 0x64, 0x72, 0x00, 0x00, 0x00, 0x00, 0x00, + ]); + + let preimage = V2NetworkMessage::new(NetworkMessage::GetAddr); + assert!(msg.is_ok()); + let msg: V2NetworkMessage = msg.unwrap(); + assert_eq!(preimage, msg); + } + #[test] fn deserialize_version() { #[rustfmt::skip] @@ -796,6 +1018,44 @@ mod test { } } + #[test] + fn deserialize_v2_version() { + #[rustfmt::skip] + let msg = deserialize::(&[ + 0x00, // Full command encoding flag + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x00, 0x00, 0x00, 0x00, 0x00, // "version" command + 0x7f, 0x11, 0x01, 0x00, // version: 70015 + 0x0d, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // services + 0xf0, 0x0f, 0x4d, 0x5c, 0x00, 0x00, 0x00, 0x00, // timestamp: 1548554224 + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // receiver services: NONE + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x5b, 0xf0, 0x8c, 0x80, 0xb4, 0xbd, // addr_recv + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // sender services: NONE + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // addr_from + 0xfa, 0xa9, 0x95, 0x59, 0xcc, 0x68, 0xa1, 0xc1, // nonce + 0x10, 0x2f, 0x53, 0x61, 0x74, 0x6f, 0x73, 0x68, 0x69, 0x3a, 0x30, 0x2e, 0x31, 0x37, 0x2e, 0x31, 0x2f, // user_agent: "/Satoshi:0.17.1/" + 0x93, 0x8c, 0x08, 0x00, // start_height: 560275 + 0x01 // relay: true + ]).unwrap(); + + if let NetworkMessage::Version(version_msg) = msg.payload { + assert_eq!(version_msg.version, 70015); + assert_eq!( + version_msg.services, + ServiceFlags::NETWORK + | ServiceFlags::BLOOM + | ServiceFlags::WITNESS + | ServiceFlags::NETWORK_LIMITED + ); + assert_eq!(version_msg.timestamp, 1548554224); + assert_eq!(version_msg.nonce, 13952548347456104954); + assert_eq!(version_msg.user_agent, "/Satoshi:0.17.1/"); + assert_eq!(version_msg.start_height, 560275); + assert!(version_msg.relay); + } else { + panic!("wrong message type"); + } + } + #[test] fn deserialize_partial_message() { #[rustfmt::skip] diff --git a/bitcoin/src/p2p/message_blockdata.rs b/bitcoin/src/p2p/message_blockdata.rs index 90b0bba72b..6ccff60d91 100644 --- a/bitcoin/src/p2p/message_blockdata.rs +++ b/bitcoin/src/p2p/message_blockdata.rs @@ -16,8 +16,9 @@ use crate::transaction::{Txid, Wtxid}; /// An inventory item. #[derive(PartialEq, Eq, Clone, Debug, Copy, Hash, PartialOrd, Ord)] pub enum Inventory { - /// Error --- these inventories can be ignored - Error, + /// Error --- these inventories can be ignored. + /// While a 32 byte hash is expected over the wire, the value is meaningless. + Error([u8; 32]), /// Transaction Transaction(Txid), /// Block @@ -42,10 +43,10 @@ pub enum Inventory { impl Inventory { /// Return the item value represented as a SHA256-d hash. /// - /// Returns [None] only for [Inventory::Error]. + /// Returns [None] only for [Inventory::Error] who's hash value is meaningless. pub fn network_hash(&self) -> Option<[u8; 32]> { match self { - Inventory::Error => None, + Inventory::Error(_) => None, Inventory::Transaction(t) => Some(t.to_byte_array()), Inventory::Block(b) => Some(b.to_byte_array()), Inventory::CompactBlock(b) => Some(b.to_byte_array()), @@ -66,7 +67,7 @@ impl Encodable for Inventory { }; } Ok(match *self { - Inventory::Error => encode_inv!(0, [0; 32]), + Inventory::Error(_) => encode_inv!(0, [0; 32]), Inventory::Transaction(ref t) => encode_inv!(1, t), Inventory::Block(ref b) => encode_inv!(2, b), Inventory::CompactBlock(ref b) => encode_inv!(4, b), @@ -83,7 +84,7 @@ impl Decodable for Inventory { fn consensus_decode(r: &mut R) -> Result { let inv_type: u32 = Decodable::consensus_decode(r)?; Ok(match inv_type { - 0 => Inventory::Error, + 0 => Inventory::Error(Decodable::consensus_decode(r)?), 1 => Inventory::Transaction(Decodable::consensus_decode(r)?), 2 => Inventory::Block(Decodable::consensus_decode(r)?), 4 => Inventory::CompactBlock(Decodable::consensus_decode(r)?), diff --git a/bitcoin/src/p2p/mod.rs b/bitcoin/src/p2p/mod.rs index 074a225c23..cb9c1f5a86 100644 --- a/bitcoin/src/p2p/mod.rs +++ b/bitcoin/src/p2p/mod.rs @@ -24,7 +24,7 @@ use core::str::FromStr; use core::{fmt, ops}; use hex::FromHex; -use internals::{debug_from_display, impl_to_hex_from_lower_hex, write_err}; +use internals::{impl_to_hex_from_lower_hex, write_err}; use io::{BufRead, Write}; use crate::consensus::encode::{self, Decodable, Encodable}; @@ -68,12 +68,12 @@ impl ServiceFlags { /// clients. pub const NETWORK: ServiceFlags = ServiceFlags(1 << 0); - /// GETUTXO means the node is capable of responding to the getutxo protocol request. Bitcoin + /// GETUTXO means the node is capable of responding to the getutxo protocol request. Bitcoin /// Core does not support this but a patch set called Bitcoin XT does. /// See BIP 64 for details on how this is implemented. pub const GETUTXO: ServiceFlags = ServiceFlags(1 << 1); - /// BLOOM means the node is capable and willing to handle bloom-filtered connections. Bitcoin + /// BLOOM means the node is capable and willing to handle bloom-filtered connections. Bitcoin /// Core nodes used to support this by default, without advertising this bit, but no longer do /// as of protocol version 70011 (= NO_BLOOM_VERSION) pub const BLOOM: ServiceFlags = ServiceFlags(1 << 2); @@ -291,7 +291,10 @@ impl fmt::Display for Magic { Ok(()) } } -debug_from_display!(Magic); + +impl fmt::Debug for Magic { + fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { fmt::Display::fmt(self, f) } +} impl fmt::LowerHex for Magic { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { @@ -402,6 +405,7 @@ mod tests { ServiceFlags::WITNESS, ServiceFlags::COMPACT_FILTERS, ServiceFlags::NETWORK_LIMITED, + ServiceFlags::P2P_V2, ]; let mut flags = ServiceFlags::NONE; @@ -427,8 +431,12 @@ mod tests { // Test formatting. assert_eq!("ServiceFlags(NONE)", ServiceFlags::NONE.to_string()); assert_eq!("ServiceFlags(WITNESS)", ServiceFlags::WITNESS.to_string()); - let flag = ServiceFlags::WITNESS | ServiceFlags::BLOOM | ServiceFlags::NETWORK; - assert_eq!("ServiceFlags(NETWORK|BLOOM|WITNESS)", flag.to_string()); + assert_eq!("ServiceFlags(P2P_V2)", ServiceFlags::P2P_V2.to_string()); + let flag = ServiceFlags::WITNESS + | ServiceFlags::BLOOM + | ServiceFlags::NETWORK + | ServiceFlags::P2P_V2; + assert_eq!("ServiceFlags(NETWORK|BLOOM|WITNESS|P2P_V2)", flag.to_string()); let flag = ServiceFlags::WITNESS | 0xf0.into(); assert_eq!("ServiceFlags(WITNESS|COMPACT_FILTERS|0xb0)", flag.to_string()); } diff --git a/bitcoin/src/policy.rs b/bitcoin/src/policy.rs index 35c2dda8b5..96b77bbab0 100644 --- a/bitcoin/src/policy.rs +++ b/bitcoin/src/policy.rs @@ -18,8 +18,8 @@ use super::constants::{MAX_BLOCK_SIGOPS_COST, WITNESS_SCALE_FACTOR}; /// Maximum weight of a transaction for it to be relayed by most nodes on the network pub const MAX_STANDARD_TX_WEIGHT: u32 = 400_000; -/// Minimum non-witness size for a standard transaction (1 segwit input + 1 P2WPKH output = 82 bytes) -pub const MIN_STANDARD_TX_NONWITNESS_SIZE: u32 = 82; +/// Minimum non-witness size for a standard transaction, set to 65 bytes. +pub const MIN_STANDARD_TX_NONWITNESS_SIZE: u32 = 65; /// Maximum number of sigops in a standard tx. pub const MAX_STANDARD_TX_SIGOPS_COST: u32 = MAX_BLOCK_SIGOPS_COST as u32 / 5; @@ -43,6 +43,9 @@ pub const DEFAULT_MIN_RELAY_TX_FEE: u32 = 1_000; /// mempools. pub const DEFAULT_MEMPOOL_EXPIRY: u32 = 336; +// 80 bytes of data, +1 for OP_RETURN, +2 for the pushdata opcodes. +pub(crate) const MAX_OP_RETURN_RELAY: usize = 83; + /// The virtual transaction size, as computed by default by bitcoind node. pub fn get_virtual_tx_size(weight: i64, n_sigops: i64) -> i64 { (cmp::max(weight, n_sigops * DEFAULT_BYTES_PER_SIGOP as i64) + WITNESS_SCALE_FACTOR as i64 - 1) diff --git a/bitcoin/src/pow.rs b/bitcoin/src/pow.rs index 7566e73b2f..ae295a3b7d 100644 --- a/bitcoin/src/pow.rs +++ b/bitcoin/src/pow.rs @@ -10,8 +10,6 @@ use core::{cmp, fmt}; use internals::impl_to_hex_from_lower_hex; use io::{BufRead, Write}; -#[cfg(all(test, mutate))] -use mutagen::mutate; use units::parse::{self, ParseIntError, PrefixedHexError, UnprefixedHexError}; use crate::block::{BlockHash, Header}; @@ -218,7 +216,6 @@ impl Target { /// /// Proof-of-work validity for a block requires the hash of the block to be less than or equal /// to the target. - #[cfg_attr(all(test, mutate), mutate)] pub fn is_met_by(&self, hash: BlockHash) -> bool { let hash = U256::from_le_bytes(hash.to_byte_array()); hash <= self.0 @@ -255,9 +252,8 @@ impl Target { /// /// [max]: Target::max /// [target]: crate::block::HeaderExt::target - #[cfg_attr(all(test, mutate), mutate)] pub fn difficulty(&self, params: impl AsRef) -> u128 { - // Panic here may be eaiser to debug than during the actual division. + // Panic here may be easier to debug than during the actual division. assert_ne!(self.0, U256::ZERO, "divide by zero"); let max = params.as_ref().max_attainable_target; @@ -274,7 +270,6 @@ impl Target { /// Panics if `self` is zero (divide by zero). /// /// [`difficulty`]: Target::difficulty - #[cfg_attr(all(test, mutate), mutate)] pub fn difficulty_float(&self, params: impl AsRef) -> f64 { // We want to explicitly panic to be uniform with `difficulty()` // (float division by zero does not panic). @@ -357,7 +352,7 @@ define_extension_trait! { /// ref: /// /// Given the previous Target, represented as a [`CompactTarget`], the difficulty is adjusted - /// by taking the timespan between them, and multipling the current [`CompactTarget`] by a factor + /// by taking the timespan between them, and multiplying the current [`CompactTarget`] by a factor /// of the net timespan and expected timespan. The [`CompactTarget`] may not adjust by more than /// a factor of 4, or adjust beyond the maximum threshold for the network. /// @@ -371,12 +366,14 @@ define_extension_trait! { /// Take the example of the first difficulty adjustment. Block 2016 introduces a new [`CompactTarget`], /// which takes the net timespan between Block 2015 and Block 0, and recomputes the difficulty. /// + /// To calculate the timespan, users should first convert their u32 timestamps to i64s before subtracting them + /// /// # Returns /// /// The expected [`CompactTarget`] recalculation. fn from_next_work_required( last: CompactTarget, - timespan: u64, + timespan: i64, params: impl AsRef, ) -> CompactTarget { let params = params.as_ref(); @@ -387,11 +384,11 @@ define_extension_trait! { // ref: let min_timespan = params.pow_target_timespan >> 2; // Lines 56/57 let max_timespan = params.pow_target_timespan << 2; // Lines 58/59 - let actual_timespan = timespan.clamp(min_timespan, max_timespan); + let actual_timespan = timespan.clamp(min_timespan.into(), max_timespan.into()); let prev_target: Target = last.into(); let maximum_retarget = prev_target.max_transition_threshold(params); // bnPowLimit let retarget = prev_target.0; // bnNew - let retarget = retarget.mul(actual_timespan.into()); + let retarget = retarget.mul(u128::try_from(actual_timespan).expect("clamped value won't be negative").into()); let retarget = retarget.div(params.pow_target_timespan.into()); let retarget = Target(retarget); if retarget.ge(&maximum_retarget) { @@ -423,9 +420,9 @@ define_extension_trait! { current: Header, params: impl AsRef, ) -> CompactTarget { - let timespan = current.time - last_epoch_boundary.time; + let timespan = i64::from(current.time.to_u32()) - i64::from(last_epoch_boundary.time.to_u32()); let bits = current.bits; - CompactTarget::from_next_work_required(bits, timespan.into(), params) + CompactTarget::from_next_work_required(bits, timespan, params) } } } @@ -497,7 +494,6 @@ impl U256 { } /// Constructs a new `U256` from a big-endian array of `u8`s. - #[cfg_attr(all(test, mutate), mutate)] fn from_be_bytes(a: [u8; 32]) -> U256 { let (high, low) = split_in_half(a); let big = u128::from_be_bytes(high); @@ -506,7 +502,6 @@ impl U256 { } /// Constructs a new `U256` from a little-endian array of `u8`s. - #[cfg_attr(all(test, mutate), mutate)] fn from_le_bytes(a: [u8; 32]) -> U256 { let (high, low) = split_in_half(a); let little = u128::from_le_bytes(high); @@ -515,7 +510,6 @@ impl U256 { } /// Converts `U256` to a big-endian array of `u8`s. - #[cfg_attr(all(test, mutate), mutate)] fn to_be_bytes(self) -> [u8; 32] { let mut out = [0; 32]; out[..16].copy_from_slice(&self.0.to_be_bytes()); @@ -524,7 +518,6 @@ impl U256 { } /// Converts `U256` to a little-endian array of `u8`s. - #[cfg_attr(all(test, mutate), mutate)] fn to_le_bytes(self) -> [u8; 32] { let mut out = [0; 32]; out[..16].copy_from_slice(&self.1.to_le_bytes()); @@ -556,13 +549,10 @@ impl U256 { ret.wrapping_inc() } - #[cfg_attr(all(test, mutate), mutate)] fn is_zero(&self) -> bool { self.0 == 0 && self.1 == 0 } - #[cfg_attr(all(test, mutate), mutate)] fn is_one(&self) -> bool { self.0 == 0 && self.1 == 1 } - #[cfg_attr(all(test, mutate), mutate)] fn is_max(&self) -> bool { self.0 == u128::MAX && self.1 == u128::MAX } /// Returns the low 32 bits. @@ -585,7 +575,6 @@ impl U256 { } /// Returns the least number of bits needed to represent the number. - #[cfg_attr(all(test, mutate), mutate)] fn bits(&self) -> u32 { if self.0 > 0 { 256 - self.0.leading_zeros() @@ -600,9 +589,6 @@ impl U256 { /// /// The multiplication result along with a boolean indicating whether an arithmetic overflow /// occurred. If an overflow occurred then the wrapped value is returned. - // mutagen false pos mul_u64: replace `|` with `^` (XOR is same as OR when combined with <<) - // mutagen false pos mul_u64: replace `|` with `^` - #[cfg_attr(all(test, mutate), mutate)] fn mul_u64(self, rhs: u64) -> (U256, bool) { let mut carry: u128 = 0; let mut split_le = @@ -616,8 +602,8 @@ impl U256 { carry = n >> 64; // and carry the high bits. } - let low = u128::from(split_le[0]) | u128::from(split_le[1]) << 64; - let high = u128::from(split_le[2]) | u128::from(split_le[3]) << 64; + let low = u128::from(split_le[0]) | (u128::from(split_le[1]) << 64); + let high = u128::from(split_le[2]) | (u128::from(split_le[3]) << 64); (Self(high, low), carry != 0) } @@ -630,7 +616,6 @@ impl U256 { /// # Panics /// /// If `rhs` is zero. - #[cfg_attr(all(test, mutate), mutate)] fn div_rem(self, rhs: Self) -> (Self, Self) { let mut sub_copy = self; let mut shift_copy = rhs; @@ -670,7 +655,6 @@ impl U256 { /// Returns a tuple of the addition along with a boolean indicating whether an arithmetic /// overflow would occur. If an overflow would have occurred then the wrapped value is returned. #[must_use = "this returns the result of the operation, without modifying the original"] - #[cfg_attr(all(test, mutate), mutate)] fn overflowing_add(self, rhs: Self) -> (Self, bool) { let mut ret = U256::ZERO; let mut ret_overflow = false; @@ -695,7 +679,6 @@ impl U256 { /// Returns a tuple of the subtraction along with a boolean indicating whether an arithmetic /// overflow would occur. If an overflow would have occurred then the wrapped value is returned. #[must_use = "this returns the result of the operation, without modifying the original"] - #[cfg_attr(all(test, mutate), mutate)] fn overflowing_sub(self, rhs: Self) -> (Self, bool) { let ret = self.wrapping_add(!rhs).wrapping_add(Self::ONE); let overflow = rhs > self; @@ -708,7 +691,6 @@ impl U256 { /// indicating whether an arithmetic overflow would occur. If an /// overflow would have occurred then the wrapped value is returned. #[must_use = "this returns the result of the operation, without modifying the original"] - #[cfg_attr(all(test, mutate), mutate)] fn overflowing_mul(self, rhs: Self) -> (Self, bool) { let mut ret = U256::ZERO; let mut ret_overflow = false; @@ -756,7 +738,6 @@ impl U256 { /// Returns `self` incremented by 1 wrapping around at the boundary of the type. #[must_use = "this returns the result of the increment, without modifying the original"] - #[cfg_attr(all(test, mutate), mutate)] fn wrapping_inc(&self) -> U256 { let mut ret = U256::ZERO; @@ -776,7 +757,6 @@ impl U256 { /// restricted to the range of the type, rather than the bits shifted out of the LHS being /// returned to the other end. We do not currently support `rotate_left`. #[must_use = "this returns the result of the operation, without modifying the original"] - #[cfg_attr(all(test, mutate), mutate)] fn wrapping_shl(self, rhs: u32) -> Self { let shift = rhs & 0x000000ff; @@ -803,7 +783,6 @@ impl U256 { /// restricted to the range of the type, rather than the bits shifted out of the LHS being /// returned to the other end. We do not currently support `rotate_right`. #[must_use = "this returns the result of the operation, without modifying the original"] - #[cfg_attr(all(test, mutate), mutate)] fn wrapping_shr(self, rhs: u32) -> Self { let shift = rhs & 0x000000ff; @@ -870,7 +849,7 @@ impl U256 { // (This is why we only care if the other non-msb dropped bits are all 0 or not, // so we can just OR them to make sure any bits show up somewhere.) let mantissa = - (mantissa + ((dropped_bits - (dropped_bits >> 127 & !mantissa)) >> 127)) as u64; + (mantissa + ((dropped_bits - ((dropped_bits >> 127) & !mantissa)) >> 127)) as u64; // Step 6: Calculate the exponent // If self is 0, exponent should be 0 (special meaning) and mantissa will end up 0 too // Otherwise, (255 - n) + 1022 so it simplifies to 1277 - n @@ -1099,6 +1078,7 @@ pub mod test_utils { mod tests { use super::*; use crate::pow::test_utils::{u128_to_work, u32_to_target, u64_to_target}; + use crate::BlockTime; impl U256 { fn bit_at(&self, index: usize) -> bool { @@ -1113,8 +1093,8 @@ mod tests { /// Constructs a new U256 from a big-endian array of u64's fn from_array(a: [u64; 4]) -> Self { let mut ret = U256::ZERO; - ret.0 = (a[0] as u128) << 64 ^ (a[1] as u128); - ret.1 = (a[2] as u128) << 64 ^ (a[3] as u128); + ret.0 = ((a[0] as u128) << 64) ^ (a[1] as u128); + ret.1 = ((a[2] as u128) << 64) ^ (a[3] as u128); ret } } @@ -1557,11 +1537,11 @@ mod tests { #[test] fn u256_multiplication_bits_in_each_word() { // Put a digit in the least significant bit of each 64 bit word. - let u = 1_u128 << 64 | 1_u128; + let u = (1_u128 << 64) | 1_u128; let x = U256(u, u); // Put a digit in the second least significant bit of each 64 bit word. - let u = 2_u128 << 64 | 2_u128; + let u = (2_u128 << 64) | 2_u128; let y = U256(u, u); let (got, overflow) = x.overflowing_mul(y); @@ -1750,8 +1730,8 @@ mod tests { fn compact_target_from_upwards_difficulty_adjustment() { let params = Params::new(crate::Network::Signet); let starting_bits = CompactTarget::from_consensus(503543726); // Genesis compact target on Signet - let start_time: u64 = 1598918400; // Genesis block unix time - let end_time: u64 = 1599332177; // Block 2015 unix time + let start_time: i64 = 1598918400; // Genesis block unix time + let end_time: i64 = 1599332177; // Block 2015 unix time let timespan = end_time - start_time; // Faster than expected let adjustment = CompactTarget::from_next_work_required(starting_bits, timespan, ¶ms); let adjustment_bits = CompactTarget::from_consensus(503394215); // Block 2016 compact target @@ -1762,8 +1742,8 @@ mod tests { fn compact_target_from_downwards_difficulty_adjustment() { let params = Params::new(crate::Network::Signet); let starting_bits = CompactTarget::from_consensus(503394215); // Block 2016 compact target - let start_time: u64 = 1599332844; // Block 2016 unix time - let end_time: u64 = 1600591200; // Block 4031 unix time + let start_time: i64 = 1599332844; // Block 2016 unix time + let end_time: i64 = 1600591200; // Block 4031 unix time let timespan = end_time - start_time; // Slower than expected let adjustment = CompactTarget::from_next_work_required(starting_bits, timespan, ¶ms); let adjustment_bits = CompactTarget::from_consensus(503397348); // Block 4032 compact target @@ -1783,7 +1763,7 @@ mod tests { version: Version::ONE, prev_blockhash: BlockHash::from_byte_array([0; 32]), merkle_root: TxMerkleNode::from_byte_array([0; 32]), - time: 1599332177, + time: BlockTime::from_u32(1599332177), bits: epoch_start.bits, nonce: epoch_start.nonce, }; @@ -1805,7 +1785,7 @@ mod tests { version: Version::ONE, prev_blockhash: BlockHash::from_byte_array([0; 32]), merkle_root: TxMerkleNode::from_byte_array([0; 32]), - time: 1599332844, + time: BlockTime::from_u32(1599332844), bits: starting_bits, nonce: 0, }; @@ -1815,7 +1795,7 @@ mod tests { version: Version::ONE, prev_blockhash: BlockHash::from_byte_array([0; 32]), merkle_root: TxMerkleNode::from_byte_array([0; 32]), - time: 1600591200, + time: BlockTime::from_u32(1600591200), bits: starting_bits, nonce: 0, }; @@ -1829,7 +1809,18 @@ mod tests { fn compact_target_from_maximum_upward_difficulty_adjustment() { let params = Params::new(crate::Network::Signet); let starting_bits = CompactTarget::from_consensus(503403001); - let timespan = (0.2 * params.pow_target_timespan as f64) as u64; + let timespan = params.pow_target_timespan / 5; + let got = CompactTarget::from_next_work_required(starting_bits, timespan.into(), params); + let want = + Target::from_compact(starting_bits).min_transition_threshold().to_compact_lossy(); + assert_eq!(got, want); + } + + #[test] + fn compact_target_from_maximum_upward_difficulty_adjustment_with_negative_timespan() { + let params = Params::new(crate::Network::Signet); + let starting_bits = CompactTarget::from_consensus(503403001); + let timespan: i64 = -i64::from(params.pow_target_timespan); let got = CompactTarget::from_next_work_required(starting_bits, timespan, params); let want = Target::from_compact(starting_bits).min_transition_threshold().to_compact_lossy(); @@ -1841,7 +1832,7 @@ mod tests { let params = Params::new(crate::Network::Signet); let starting_bits = CompactTarget::from_consensus(403403001); // High difficulty for Signet let timespan = 5 * params.pow_target_timespan; // Really slow. - let got = CompactTarget::from_next_work_required(starting_bits, timespan, ¶ms); + let got = CompactTarget::from_next_work_required(starting_bits, timespan.into(), ¶ms); let want = Target::from_compact(starting_bits).max_transition_threshold(params).to_compact_lossy(); assert_eq!(got, want); @@ -1852,7 +1843,7 @@ mod tests { let params = Params::new(crate::Network::Signet); let starting_bits = CompactTarget::from_consensus(503543726); // Genesis compact target on Signet let timespan = 5 * params.pow_target_timespan; // Really slow. - let got = CompactTarget::from_next_work_required(starting_bits, timespan, ¶ms); + let got = CompactTarget::from_next_work_required(starting_bits, timespan.into(), ¶ms); let want = params.max_attainable_target.to_compact_lossy(); assert_eq!(got, want); } diff --git a/bitcoin/src/psbt/error.rs b/bitcoin/src/psbt/error.rs index 26da3e4f67..dcf244ad38 100644 --- a/bitcoin/src/psbt/error.rs +++ b/bitcoin/src/psbt/error.rs @@ -1,5 +1,6 @@ // SPDX-License-Identifier: CC0-1.0 +use core::convert::Infallible; use core::fmt; use internals::write_err; @@ -107,7 +108,9 @@ pub enum Error { Io(io::Error), } -internals::impl_from_infallible!(Error); +impl From for Error { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { diff --git a/bitcoin/src/psbt/macros.rs b/bitcoin/src/psbt/macros.rs index 089d705b48..4044a5e527 100644 --- a/bitcoin/src/psbt/macros.rs +++ b/bitcoin/src/psbt/macros.rs @@ -113,15 +113,15 @@ macro_rules! impl_psbt_insert_pair { #[rustfmt::skip] macro_rules! psbt_insert_hash_pair { - (&mut $slf:ident.$map:ident <= $raw_key:ident|$raw_value:ident|$hash:path|$hash_type_error:path) => { + (&mut $slf:ident.$map:ident <= $raw_key:ident|$raw_value:ident|$hash:ident|$hash_type_error:path) => { if $raw_key.key_data.is_empty() { return Err($crate::psbt::Error::InvalidKey($raw_key)); } - let key_val: $hash = Deserialize::deserialize(&$raw_key.key_data)?; + let key_val: $hash::Hash = Deserialize::deserialize(&$raw_key.key_data)?; match $slf.$map.entry(key_val) { btree_map::Entry::Vacant(empty_key) => { let val: Vec = Deserialize::deserialize(&$raw_value)?; - if <$hash as hashes::GeneralHash>::hash(&val) != key_val { + if $hash::hash(&val) != key_val { return Err($crate::psbt::Error::InvalidPreimageHashPair { preimage: val.into_boxed_slice(), hash: Box::from(key_val.borrow()), diff --git a/bitcoin/src/psbt/map/input.rs b/bitcoin/src/psbt/map/input.rs index cfe9cfb467..66802623b1 100644 --- a/bitcoin/src/psbt/map/input.rs +++ b/bitcoin/src/psbt/map/input.rs @@ -69,11 +69,11 @@ const PSBT_IN_PROPRIETARY: u64 = 0xFC; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct Input { /// The non-witness transaction this input spends from. Should only be - /// `Option::Some` for inputs which spend non-segwit outputs or - /// if it is unknown whether an input spends a segwit output. + /// `Option::Some` for inputs which spend non-SegWit outputs or + /// if it is unknown whether an input spends a SegWit output. pub non_witness_utxo: Option, /// The transaction output this input spends from. Should only be - /// `Option::Some` for inputs which spend segwit outputs, + /// `Option::Some` for inputs which spend SegWit outputs, /// including P2SH embedded ones. pub witness_utxo: Option, /// A map from public keys to their corresponding signature as would be @@ -317,22 +317,22 @@ impl Input { } PSBT_IN_RIPEMD160 => { psbt_insert_hash_pair! { - &mut self.ripemd160_preimages <= raw_key|raw_value|ripemd160::Hash|error::PsbtHash::Ripemd + &mut self.ripemd160_preimages <= raw_key|raw_value|ripemd160|error::PsbtHash::Ripemd } } PSBT_IN_SHA256 => { psbt_insert_hash_pair! { - &mut self.sha256_preimages <= raw_key|raw_value|sha256::Hash|error::PsbtHash::Sha256 + &mut self.sha256_preimages <= raw_key|raw_value|sha256|error::PsbtHash::Sha256 } } PSBT_IN_HASH160 => { psbt_insert_hash_pair! { - &mut self.hash160_preimages <= raw_key|raw_value|hash160::Hash|error::PsbtHash::Hash160 + &mut self.hash160_preimages <= raw_key|raw_value|hash160|error::PsbtHash::Hash160 } } PSBT_IN_HASH256 => { psbt_insert_hash_pair! { - &mut self.hash256_preimages <= raw_key|raw_value|sha256d::Hash|error::PsbtHash::Hash256 + &mut self.hash256_preimages <= raw_key|raw_value|sha256d|error::PsbtHash::Hash256 } } PSBT_IN_TAP_KEY_SIG => { diff --git a/bitcoin/src/psbt/mod.rs b/bitcoin/src/psbt/mod.rs index c9eac7165b..6f0ce3fc4b 100644 --- a/bitcoin/src/psbt/mod.rs +++ b/bitcoin/src/psbt/mod.rs @@ -13,6 +13,7 @@ mod map; pub mod raw; pub mod serialize; +use core::convert::Infallible; use core::{cmp, fmt}; #[cfg(feature = "std")] use std::collections::{HashMap, HashSet}; @@ -134,6 +135,7 @@ impl Psbt { /// An alias for [`extract_tx_fee_rate_limit`]. /// /// [`extract_tx_fee_rate_limit`]: Psbt::extract_tx_fee_rate_limit + #[allow(clippy::result_large_err)] // The PSBT returned in `SendingToomuch` is large. pub fn extract_tx(self) -> Result { self.internal_extract_tx_with_fee_rate_limit(Self::DEFAULT_MAX_FEE_RATE) } @@ -145,6 +147,7 @@ impl Psbt { /// [`ExtractTxError`] variants will contain either the [`Psbt`] itself or the [`Transaction`] /// that was extracted. These can be extracted from the Errors in order to recover. /// See the error documentation for info on the variants. In general, it covers large fees. + #[allow(clippy::result_large_err)] // The PSBT returned in `SendingToomuch` is large. pub fn extract_tx_fee_rate_limit(self) -> Result { self.internal_extract_tx_with_fee_rate_limit(Self::DEFAULT_MAX_FEE_RATE) } @@ -156,6 +159,7 @@ impl Psbt { /// See [`extract_tx`]. /// /// [`extract_tx`]: Psbt::extract_tx + #[allow(clippy::result_large_err)] // The PSBT returned in `SendingToomuch` is large. pub fn extract_tx_with_fee_rate_limit( self, max_fee_rate: FeeRate, @@ -183,6 +187,7 @@ impl Psbt { } #[inline] + #[allow(clippy::result_large_err)] // The PSBT returned in `SendingToomuch` is large. fn internal_extract_tx_with_fee_rate_limit( self, max_fee_rate: FeeRate, @@ -255,8 +260,8 @@ impl Psbt { == derivation2[derivation2.len() - derivation1.len()..]) { continue; - } else if derivation2[..] - == derivation1[derivation1.len() - derivation2.len()..] + } else if derivation2.len() <= derivation1.len() + && derivation2[..] == derivation1[derivation1.len() - derivation2.len()..] { entry.insert((fingerprint1, derivation1)); continue; @@ -422,6 +427,8 @@ impl Psbt { k.get_key(&KeyRequest::Bip32(key_source.clone()), secp) { secret_key + } else if let Ok(Some(sk)) = k.get_key(&KeyRequest::XOnlyPubkey(xonly), secp) { + sk } else { continue; }; @@ -444,9 +451,9 @@ impl Psbt { .to_inner(); #[cfg(feature = "rand-std")] - let signature = secp.sign_schnorr(&msg, &key_pair); + let signature = secp.sign_schnorr(msg.as_ref(), &key_pair); #[cfg(not(feature = "rand-std"))] - let signature = secp.sign_schnorr_no_aux_rand(&msg, &key_pair); + let signature = secp.sign_schnorr_no_aux_rand(msg.as_ref(), &key_pair); let signature = taproot::Signature { signature, sighash_type }; input.tap_key_sig = Some(signature); @@ -471,9 +478,9 @@ impl Psbt { self.sighash_taproot(input_index, cache, Some(lh))?; #[cfg(feature = "rand-std")] - let signature = secp.sign_schnorr(&msg, &key_pair); + let signature = secp.sign_schnorr(msg.as_ref(), &key_pair); #[cfg(not(feature = "rand-std"))] - let signature = secp.sign_schnorr_no_aux_rand(&msg, &key_pair); + let signature = secp.sign_schnorr_no_aux_rand(msg.as_ref(), &key_pair); let signature = taproot::Signature { signature, sighash_type }; input.tap_script_sigs.insert((xonly, lh), signature); @@ -627,8 +634,11 @@ impl Psbt { /// Gets the input at `input_index` after checking that it is a valid index. fn checked_input(&self, input_index: usize) -> Result<&Input, IndexOutOfBoundsError> { - self.check_index_is_within_bounds(input_index)?; - Ok(&self.inputs[input_index]) + // No `?` operator in const context. + match self.check_index_is_within_bounds(input_index) { + Ok(_) => Ok(&self.inputs[input_index]), + Err(e) => Err(e), + } } /// Checks `input_index` is within bounds for the PSBT `inputs` array and @@ -666,7 +676,7 @@ impl Psbt { let utxo = self.spend_utxo(input_index)?; let spk = utxo.script_pubkey.clone(); - // Anything that is not segwit and is not p2sh is `Bare`. + // Anything that is not SegWit and is not p2sh is `Bare`. if !(spk.is_witness_program() || spk.is_p2sh()) { return Ok(OutputType::Bare); } @@ -709,15 +719,15 @@ impl Psbt { /// - [`Error::NegativeFee`] if calculated value is negative. /// - [`Error::FeeOverflow`] if an integer overflow occurs. pub fn fee(&self) -> Result { - let mut inputs: u64 = 0; + let mut inputs = Amount::ZERO; for utxo in self.iter_funding_utxos() { - inputs = inputs.checked_add(utxo?.value.to_sat()).ok_or(Error::FeeOverflow)?; + inputs = inputs.checked_add(utxo?.value).ok_or(Error::FeeOverflow)?; } - let mut outputs: u64 = 0; + let mut outputs = Amount::ZERO; for out in &self.unsigned_tx.output { - outputs = outputs.checked_add(out.value.to_sat()).ok_or(Error::FeeOverflow)?; + outputs = outputs.checked_add(out.value).ok_or(Error::FeeOverflow)?; } - inputs.checked_sub(outputs).map(Amount::from_sat).ok_or(Error::NegativeFee) + inputs.checked_sub(outputs).ok_or(Error::NegativeFee) } } @@ -729,6 +739,8 @@ pub enum KeyRequest { Pubkey(PublicKey), /// Request a private key using BIP-32 fingerprint and derivation path. Bip32(KeySource), + /// Request a private key using the associated x-only public key. + XOnlyPubkey(XOnlyPublicKey), } /// Trait to get a private key from a key request, key is then used to sign an input. @@ -760,6 +772,7 @@ impl GetKey for Xpriv { ) -> Result, Self::Error> { match key_request { KeyRequest::Pubkey(_) => Err(GetKeyError::NotSupported), + KeyRequest::XOnlyPubkey(_) => Err(GetKeyError::NotSupported), KeyRequest::Bip32((fingerprint, path)) => { let key = if self.fingerprint(secp) == *fingerprint { let k = self.derive_xpriv(secp, &path); @@ -823,7 +836,7 @@ impl_get_key_for_set!(BTreeSet); impl_get_key_for_set!(HashSet); #[rustfmt::skip] -macro_rules! impl_get_key_for_map { +macro_rules! impl_get_key_for_pubkey_map { ($map:ident) => { impl GetKey for $map { @@ -836,13 +849,67 @@ impl GetKey for $map { ) -> Result, Self::Error> { match key_request { KeyRequest::Pubkey(pk) => Ok(self.get(&pk).cloned()), + KeyRequest::XOnlyPubkey(xonly) => { + let pubkey_even = PublicKey::new(xonly.public_key(secp256k1::Parity::Even)); + let key = self.get(&pubkey_even).cloned(); + + if key.is_some() { + return Ok(key); + } + + let pubkey_odd = PublicKey::new(xonly.public_key(secp256k1::Parity::Odd)); + if let Some(priv_key) = self.get(&pubkey_odd).copied() { + let negated_priv_key = priv_key.negate(); + return Ok(Some(negated_priv_key)); + } + + Ok(None) + }, KeyRequest::Bip32(_) => Err(GetKeyError::NotSupported), } } }}} -impl_get_key_for_map!(BTreeMap); +impl_get_key_for_pubkey_map!(BTreeMap); #[cfg(feature = "std")] -impl_get_key_for_map!(HashMap); +impl_get_key_for_pubkey_map!(HashMap); + +#[rustfmt::skip] +macro_rules! impl_get_key_for_xonly_map { + ($map:ident) => { + +impl GetKey for $map { + type Error = GetKeyError; + + fn get_key( + &self, + key_request: &KeyRequest, + secp: &Secp256k1, + ) -> Result, Self::Error> { + match key_request { + KeyRequest::XOnlyPubkey(xonly) => Ok(self.get(xonly).cloned()), + KeyRequest::Pubkey(pk) => { + let (xonly, parity) = pk.inner.x_only_public_key(); + + if let Some(mut priv_key) = self.get(&XOnlyPublicKey::from(xonly)).cloned() { + let computed_pk = priv_key.public_key(&secp); + let (_, computed_parity) = computed_pk.inner.x_only_public_key(); + + if computed_parity != parity { + priv_key = priv_key.negate(); + } + + return Ok(Some(priv_key)); + } + + Ok(None) + }, + KeyRequest::Bip32(_) => Err(GetKeyError::NotSupported), + } + } +}}} +impl_get_key_for_xonly_map!(BTreeMap); +#[cfg(feature = "std")] +impl_get_key_for_xonly_map!(HashMap); /// Errors when getting a key. #[derive(Debug, Clone, PartialEq, Eq)] @@ -854,7 +921,9 @@ pub enum GetKeyError { NotSupported, } -internals::impl_from_infallible!(GetKeyError); +impl From for GetKeyError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for GetKeyError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -894,11 +963,11 @@ pub enum OutputType { Wpkh, /// A pay-to-witness-script-hash output (P2WSH). Wsh, - /// A nested segwit output, pay-to-witness-pubkey-hash nested in a pay-to-script-hash. + /// A nested SegWit output, pay-to-witness-pubkey-hash nested in a pay-to-script-hash. ShWpkh, - /// A nested segwit output, pay-to-witness-script-hash nested in a pay-to-script-hash. + /// A nested SegWit output, pay-to-witness-script-hash nested in a pay-to-script-hash. ShWsh, - /// A pay-to-script-hash output excluding wrapped segwit (P2SH). + /// A pay-to-script-hash output excluding wrapped SegWit (P2SH). Sh, /// A Taproot output (P2TR). Tr, @@ -951,7 +1020,7 @@ pub enum SignError { NotEcdsa, /// The `scriptPubkey` is not a P2WPKH script. NotWpkh, - /// Sighash computation error (segwit v0 input). + /// Sighash computation error (SegWit v0 input). SegwitV0Sighash(transaction::InputsIndexError), /// Sighash computation error (p2wpkh input). P2wpkhSighash(sighash::P2wpkhError), @@ -967,7 +1036,9 @@ pub enum SignError { Unsupported, } -internals::impl_from_infallible!(SignError); +impl From for SignError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for SignError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -983,7 +1054,7 @@ impl fmt::Display for SignError { MismatchedAlgoKey => write!(f, "signing algorithm and key type does not match"), NotEcdsa => write!(f, "attempted to ECDSA sign an non-ECDSA input"), NotWpkh => write!(f, "the scriptPubkey is not a P2WPKH script"), - SegwitV0Sighash(ref e) => write_err!(f, "segwit v0 sighash"; e), + SegwitV0Sighash(ref e) => write_err!(f, "SegWit v0 sighash"; e), P2wpkhSighash(ref e) => write_err!(f, "p2wpkh sighash"; e), TaprootError(ref e) => write_err!(f, "Taproot sighash"; e), UnknownOutputType => write!(f, "unable to determine the output type"), @@ -1056,7 +1127,9 @@ pub enum ExtractTxError { }, } -internals::impl_from_infallible!(ExtractTxError); +impl From for ExtractTxError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for ExtractTxError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1108,7 +1181,9 @@ pub enum IndexOutOfBoundsError { }, } -internals::impl_from_infallible!(IndexOutOfBoundsError); +impl From for IndexOutOfBoundsError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for IndexOutOfBoundsError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1142,6 +1217,7 @@ impl std::error::Error for IndexOutOfBoundsError { #[cfg(feature = "base64")] mod display_from_str { + use core::convert::Infallible; use core::fmt; use core::str::FromStr; @@ -1161,7 +1237,9 @@ mod display_from_str { Base64Encoding(::base64::DecodeError), } - internals::impl_from_infallible!(PsbtParseError); + impl From for PsbtParseError { + fn from(never: Infallible) -> Self { match never {} } + } impl fmt::Display for PsbtParseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1209,7 +1287,14 @@ mod tests { use hashes::{hash160, ripemd160, sha256}; use hex::{test_hex_unwrap as hex, FromHex}; #[cfg(feature = "rand-std")] - use secp256k1::{All, SecretKey}; + use { + crate::address::script_pubkey::ScriptBufExt as _, + crate::bip32::{DerivationPath, Fingerprint}, + crate::locktime, + crate::witness_version::WitnessVersion, + crate::WitnessProgram, + secp256k1::{All, SecretKey}, + }; use super::*; use crate::address::script_pubkey::ScriptExt as _; @@ -1249,7 +1334,7 @@ mod tests { witness: Witness::default(), }], output: vec![TxOut { - value: Amount::from_sat(output), + value: Amount::from_sat(output).unwrap(), script_pubkey: ScriptBuf::from_hex( "a9143545e6e33b832c47050f24d3eeb93c9c03948bc787", ) @@ -1263,7 +1348,7 @@ mod tests { inputs: vec![Input { witness_utxo: Some(TxOut { - value: Amount::from_sat(input), + value: Amount::from_sat(input).unwrap(), script_pubkey: ScriptBuf::from_hex( "a914339725ba21efd62ac753a9bcd067d6c7a6a39d0587", ) @@ -1297,7 +1382,7 @@ mod tests { #[test] fn psbt_uncompressed_key() { - let psbt: Psbt = hex_psbt("70736274ff01003302000000010000000000000000000000000000000000000000000000000000000000000000ffffffff00ffffffff000000000000420204bb0d5d0cca36e7b9c80f63bc04c1240babb83bcd2803ef7ac8b6e2af594291daec281e856c98d210c5ab14dfd5828761f8ee7d5f45ca21ad3e4c4b41b747a3a047304402204f67e2afb76142d44fae58a2495d33a3419daa26cd0db8d04f3452b63289ac0f022010762a9fb67e94cc5cad9026f6dc99ff7f070f4278d30fbc7d0c869dd38c7fe70100").unwrap(); + let psbt = hex_psbt("70736274ff01003302000000010000000000000000000000000000000000000000000000000000000000000000ffffffff00ffffffff000000000000420204bb0d5d0cca36e7b9c80f63bc04c1240babb83bcd2803ef7ac8b6e2af594291daec281e856c98d210c5ab14dfd5828761f8ee7d5f45ca21ad3e4c4b41b747a3a047304402204f67e2afb76142d44fae58a2495d33a3419daa26cd0db8d04f3452b63289ac0f022010762a9fb67e94cc5cad9026f6dc99ff7f070f4278d30fbc7d0c869dd38c7fe70100").unwrap(); assert!(psbt.inputs[0].partial_sigs.len() == 1); let pk = psbt.inputs[0].partial_sigs.iter().next().unwrap().0; assert!(!pk.compressed); @@ -1410,14 +1495,14 @@ mod tests { }], output: vec![ TxOut { - value: Amount::from_sat(99_999_699), + value: Amount::from_sat_u32(99_999_699), script_pubkey: ScriptBuf::from_hex( "76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac", ) .unwrap(), }, TxOut { - value: Amount::from_sat(100_000_000), + value: Amount::from_sat_u32(100_000_000), script_pubkey: ScriptBuf::from_hex( "a9143545e6e33b832c47050f24d3eeb93c9c03948bc787", ) @@ -1452,13 +1537,13 @@ mod tests { #[test] fn deserialize_and_serialize_psbt_with_two_partial_sigs() { let hex = "70736274ff0100890200000001207ae985d787dfe6143d5c58fad79cc7105e0e799fcf033b7f2ba17e62d7b3200000000000ffffffff02563d03000000000022002019899534b9a011043c0dd57c3ff9a381c3522c5f27c6a42319085b56ca543a1d6adc020000000000220020618b47a07ebecca4e156edb1b9ea7c24bdee0139fc049237965ffdaf56d5ee73000000000001012b801a0600000000002200201148e93e9315e37dbed2121be5239257af35adc03ffdfc5d914b083afa44dab82202025fe7371376d53cf8a2783917c28bf30bd690b0a4d4a207690093ca2b920ee076473044022007e06b362e89912abd4661f47945430739b006a85d1b2a16c01dc1a4bd07acab022061576d7aa834988b7ab94ef21d8eebd996ea59ea20529a19b15f0c9cebe3d8ac01220202b3fe93530020a8294f0e527e33fbdff184f047eb6b5a1558a352f62c29972f8a473044022002787f926d6817504431ee281183b8119b6845bfaa6befae45e13b6d430c9d2f02202859f149a6cd26ae2f03a107e7f33c7d91730dade305fe077bae677b5d44952a01010547522102b3fe93530020a8294f0e527e33fbdff184f047eb6b5a1558a352f62c29972f8a21025fe7371376d53cf8a2783917c28bf30bd690b0a4d4a207690093ca2b920ee07652ae0001014752210283ef76537f2d58ae3aa3a4bd8ae41c3f230ccadffb1a0bd3ca504d871cff05e7210353d79cc0cb1396f4ce278d005f16d948e02a6aec9ed1109f13747ecb1507b37b52ae00010147522102b3937241777b6665e0d694e52f9c1b188433641df852da6fc42187b5d8a368a321034cdd474f01cc5aa7ff834ad8bcc882a87e854affc775486bc2a9f62e8f49bd7852ae00"; - let psbt: Psbt = hex_psbt(hex).unwrap(); + let psbt = hex_psbt(hex).unwrap(); assert_eq!(hex, psbt.serialize_hex()); } #[cfg(feature = "serde")] #[test] - fn test_serde_psbt() { + fn serde_psbt() { //! Create a full PSBT value with various fields filled and make sure it can be JSONized. use hashes::sha256d; @@ -1483,7 +1568,7 @@ mod tests { )]), }], output: vec![TxOut { - value: Amount::from_sat(190_303_501_938), + value: Amount::from_sat(190_303_501_938).unwrap(), script_pubkey: ScriptBuf::from_hex( "a914339725ba21efd62ac753a9bcd067d6c7a6a39d0587", ) @@ -1534,7 +1619,7 @@ mod tests { Input { non_witness_utxo: Some(tx), witness_utxo: Some(TxOut { - value: Amount::from_sat(190_303_501_938), + value: Amount::from_sat(190_303_501_938).unwrap(), script_pubkey: ScriptBuf::from_hex("a914339725ba21efd62ac753a9bcd067d6c7a6a39d0587").unwrap(), }), sighash_type: Some("SIGHASH_SINGLE|SIGHASH_ANYONECANPAY".parse::().unwrap()), @@ -1659,11 +1744,11 @@ mod tests { ], output: vec![ TxOut { - value: Amount::from_sat(99_999_699), + value: Amount::from_sat_u32(99_999_699), script_pubkey: ScriptBuf::from_hex("76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac").unwrap(), }, TxOut { - value: Amount::from_sat(100_000_000), + value: Amount::from_sat_u32(100_000_000), script_pubkey: ScriptBuf::from_hex("a9143545e6e33b832c47050f24d3eeb93c9c03948bc787").unwrap(), }, ], @@ -1706,11 +1791,11 @@ mod tests { ], output: vec![ TxOut { - value: Amount::from_sat(200_000_000), + value: Amount::from_sat_u32(200_000_000), script_pubkey: ScriptBuf::from_hex("76a91485cff1097fd9e008bb34af709c62197b38978a4888ac").unwrap(), }, TxOut { - value: Amount::from_sat(190_303_501_938), + value: Amount::from_sat(190_303_501_938).unwrap(), script_pubkey: ScriptBuf::from_hex("a914339725ba21efd62ac753a9bcd067d6c7a6a39d0587").unwrap(), }, ], @@ -1744,7 +1829,7 @@ mod tests { #[test] fn valid_vector_2() { - let psbt: Psbt = hex_psbt("70736274ff0100a00200000002ab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40000000000feffffffab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40100000000feffffff02603bea0b000000001976a914768a40bbd740cbe81d988e71de2a4d5c71396b1d88ac8e240000000000001976a9146f4620b553fa095e721b9ee0efe9fa039cca459788ac000000000001076a47304402204759661797c01b036b25928948686218347d89864b719e1f7fcf57d1e511658702205309eabf56aa4d8891ffd111fdf1336f3a29da866d7f8486d75546ceedaf93190121035cdc61fc7ba971c0b501a646a2a83b102cb43881217ca682dc86e2d73fa882920001012000e1f5050000000017a9143545e6e33b832c47050f24d3eeb93c9c03948bc787010416001485d13537f2e265405a34dbafa9e3dda01fb82308000000").unwrap(); + let psbt = hex_psbt("70736274ff0100a00200000002ab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40000000000feffffffab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40100000000feffffff02603bea0b000000001976a914768a40bbd740cbe81d988e71de2a4d5c71396b1d88ac8e240000000000001976a9146f4620b553fa095e721b9ee0efe9fa039cca459788ac000000000001076a47304402204759661797c01b036b25928948686218347d89864b719e1f7fcf57d1e511658702205309eabf56aa4d8891ffd111fdf1336f3a29da866d7f8486d75546ceedaf93190121035cdc61fc7ba971c0b501a646a2a83b102cb43881217ca682dc86e2d73fa882920001012000e1f5050000000017a9143545e6e33b832c47050f24d3eeb93c9c03948bc787010416001485d13537f2e265405a34dbafa9e3dda01fb82308000000").unwrap(); assert_eq!(psbt.inputs.len(), 2); assert_eq!(psbt.outputs.len(), 2); @@ -1769,7 +1854,7 @@ mod tests { #[test] fn valid_vector_3() { - let psbt: Psbt = hex_psbt("70736274ff0100750200000001268171371edff285e937adeea4b37b78000c0566cbb3ad64641713ca42171bf60000000000feffffff02d3dff505000000001976a914d0c59903c5bac2868760e90fd521a4665aa7652088ac00e1f5050000000017a9143545e6e33b832c47050f24d3eeb93c9c03948bc787b32e1300000100fda5010100000000010289a3c71eab4d20e0371bbba4cc698fa295c9463afa2e397f8533ccb62f9567e50100000017160014be18d152a9b012039daf3da7de4f53349eecb985ffffffff86f8aa43a71dff1448893a530a7237ef6b4608bbb2dd2d0171e63aec6a4890b40100000017160014fe3e9ef1a745e974d902c4355943abcb34bd5353ffffffff0200c2eb0b000000001976a91485cff1097fd9e008bb34af709c62197b38978a4888ac72fef84e2c00000017a914339725ba21efd62ac753a9bcd067d6c7a6a39d05870247304402202712be22e0270f394f568311dc7ca9a68970b8025fdd3b240229f07f8a5f3a240220018b38d7dcd314e734c9276bd6fb40f673325bc4baa144c800d2f2f02db2765c012103d2e15674941bad4a996372cb87e1856d3652606d98562fe39c5e9e7e413f210502483045022100d12b852d85dcd961d2f5f4ab660654df6eedcc794c0c33ce5cc309ffb5fce58d022067338a8e0e1725c197fb1a88af59f51e44e4255b20167c8684031c05d1f2592a01210223b72beef0965d10be0778efecd61fcac6f79a4ea169393380734464f84f2ab30000000001030401000000000000").unwrap(); + let psbt = hex_psbt("70736274ff0100750200000001268171371edff285e937adeea4b37b78000c0566cbb3ad64641713ca42171bf60000000000feffffff02d3dff505000000001976a914d0c59903c5bac2868760e90fd521a4665aa7652088ac00e1f5050000000017a9143545e6e33b832c47050f24d3eeb93c9c03948bc787b32e1300000100fda5010100000000010289a3c71eab4d20e0371bbba4cc698fa295c9463afa2e397f8533ccb62f9567e50100000017160014be18d152a9b012039daf3da7de4f53349eecb985ffffffff86f8aa43a71dff1448893a530a7237ef6b4608bbb2dd2d0171e63aec6a4890b40100000017160014fe3e9ef1a745e974d902c4355943abcb34bd5353ffffffff0200c2eb0b000000001976a91485cff1097fd9e008bb34af709c62197b38978a4888ac72fef84e2c00000017a914339725ba21efd62ac753a9bcd067d6c7a6a39d05870247304402202712be22e0270f394f568311dc7ca9a68970b8025fdd3b240229f07f8a5f3a240220018b38d7dcd314e734c9276bd6fb40f673325bc4baa144c800d2f2f02db2765c012103d2e15674941bad4a996372cb87e1856d3652606d98562fe39c5e9e7e413f210502483045022100d12b852d85dcd961d2f5f4ab660654df6eedcc794c0c33ce5cc309ffb5fce58d022067338a8e0e1725c197fb1a88af59f51e44e4255b20167c8684031c05d1f2592a01210223b72beef0965d10be0778efecd61fcac6f79a4ea169393380734464f84f2ab30000000001030401000000000000").unwrap(); assert_eq!(psbt.inputs.len(), 1); assert_eq!(psbt.outputs.len(), 2); @@ -1789,7 +1874,7 @@ mod tests { #[test] fn valid_vector_4() { - let psbt: Psbt = hex_psbt("70736274ff0100a00200000002ab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40000000000feffffffab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40100000000feffffff02603bea0b000000001976a914768a40bbd740cbe81d988e71de2a4d5c71396b1d88ac8e240000000000001976a9146f4620b553fa095e721b9ee0efe9fa039cca459788ac00000000000100df0200000001268171371edff285e937adeea4b37b78000c0566cbb3ad64641713ca42171bf6000000006a473044022070b2245123e6bf474d60c5b50c043d4c691a5d2435f09a34a7662a9dc251790a022001329ca9dacf280bdf30740ec0390422422c81cb45839457aeb76fc12edd95b3012102657d118d3357b8e0f4c2cd46db7b39f6d9c38d9a70abcb9b2de5dc8dbfe4ce31feffffff02d3dff505000000001976a914d0c59903c5bac2868760e90fd521a4665aa7652088ac00e1f5050000000017a9143545e6e33b832c47050f24d3eeb93c9c03948bc787b32e13000001012000e1f5050000000017a9143545e6e33b832c47050f24d3eeb93c9c03948bc787010416001485d13537f2e265405a34dbafa9e3dda01fb8230800220202ead596687ca806043edc3de116cdf29d5e9257c196cd055cf698c8d02bf24e9910b4a6ba670000008000000080020000800022020394f62be9df19952c5587768aeb7698061ad2c4a25c894f47d8c162b4d7213d0510b4a6ba6700000080010000800200008000").unwrap(); + let psbt = hex_psbt("70736274ff0100a00200000002ab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40000000000feffffffab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40100000000feffffff02603bea0b000000001976a914768a40bbd740cbe81d988e71de2a4d5c71396b1d88ac8e240000000000001976a9146f4620b553fa095e721b9ee0efe9fa039cca459788ac00000000000100df0200000001268171371edff285e937adeea4b37b78000c0566cbb3ad64641713ca42171bf6000000006a473044022070b2245123e6bf474d60c5b50c043d4c691a5d2435f09a34a7662a9dc251790a022001329ca9dacf280bdf30740ec0390422422c81cb45839457aeb76fc12edd95b3012102657d118d3357b8e0f4c2cd46db7b39f6d9c38d9a70abcb9b2de5dc8dbfe4ce31feffffff02d3dff505000000001976a914d0c59903c5bac2868760e90fd521a4665aa7652088ac00e1f5050000000017a9143545e6e33b832c47050f24d3eeb93c9c03948bc787b32e13000001012000e1f5050000000017a9143545e6e33b832c47050f24d3eeb93c9c03948bc787010416001485d13537f2e265405a34dbafa9e3dda01fb8230800220202ead596687ca806043edc3de116cdf29d5e9257c196cd055cf698c8d02bf24e9910b4a6ba670000008000000080020000800022020394f62be9df19952c5587768aeb7698061ad2c4a25c894f47d8c162b4d7213d0510b4a6ba6700000080010000800200008000").unwrap(); assert_eq!(psbt.inputs.len(), 2); assert_eq!(psbt.outputs.len(), 2); @@ -1815,7 +1900,7 @@ mod tests { #[test] fn valid_vector_5() { - let psbt: Psbt = hex_psbt("70736274ff0100550200000001279a2323a5dfb51fc45f220fa58b0fc13e1e3342792a85d7e36cd6333b5cbc390000000000ffffffff01a05aea0b000000001976a914ffe9c0061097cc3b636f2cb0460fa4fc427d2b4588ac0000000000010120955eea0b0000000017a9146345200f68d189e1adc0df1c4d16ea8f14c0dbeb87220203b1341ccba7683b6af4f1238cd6e97e7167d569fac47f1e48d47541844355bd4646304302200424b58effaaa694e1559ea5c93bbfd4a89064224055cdf070b6771469442d07021f5c8eb0fea6516d60b8acb33ad64ede60e8785bfb3aa94b99bdf86151db9a9a010104220020771fd18ad459666dd49f3d564e3dbc42f4c84774e360ada16816a8ed488d5681010547522103b1341ccba7683b6af4f1238cd6e97e7167d569fac47f1e48d47541844355bd462103de55d1e1dac805e3f8a58c1fbf9b94c02f3dbaafe127fefca4995f26f82083bd52ae220603b1341ccba7683b6af4f1238cd6e97e7167d569fac47f1e48d47541844355bd4610b4a6ba67000000800000008004000080220603de55d1e1dac805e3f8a58c1fbf9b94c02f3dbaafe127fefca4995f26f82083bd10b4a6ba670000008000000080050000800000").unwrap(); + let psbt = hex_psbt("70736274ff0100550200000001279a2323a5dfb51fc45f220fa58b0fc13e1e3342792a85d7e36cd6333b5cbc390000000000ffffffff01a05aea0b000000001976a914ffe9c0061097cc3b636f2cb0460fa4fc427d2b4588ac0000000000010120955eea0b0000000017a9146345200f68d189e1adc0df1c4d16ea8f14c0dbeb87220203b1341ccba7683b6af4f1238cd6e97e7167d569fac47f1e48d47541844355bd4646304302200424b58effaaa694e1559ea5c93bbfd4a89064224055cdf070b6771469442d07021f5c8eb0fea6516d60b8acb33ad64ede60e8785bfb3aa94b99bdf86151db9a9a010104220020771fd18ad459666dd49f3d564e3dbc42f4c84774e360ada16816a8ed488d5681010547522103b1341ccba7683b6af4f1238cd6e97e7167d569fac47f1e48d47541844355bd462103de55d1e1dac805e3f8a58c1fbf9b94c02f3dbaafe127fefca4995f26f82083bd52ae220603b1341ccba7683b6af4f1238cd6e97e7167d569fac47f1e48d47541844355bd4610b4a6ba67000000800000008004000080220603de55d1e1dac805e3f8a58c1fbf9b94c02f3dbaafe127fefca4995f26f82083bd10b4a6ba670000008000000080050000800000").unwrap(); assert_eq!(psbt.inputs.len(), 1); assert_eq!(psbt.outputs.len(), 1); @@ -1837,7 +1922,7 @@ mod tests { #[test] fn valid_vector_6() { - let psbt: Psbt = hex_psbt("70736274ff01003f0200000001ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000000000ffffffff010000000000000000036a010000000000000a0f0102030405060708090f0102030405060708090a0b0c0d0e0f0000").unwrap(); + let psbt = hex_psbt("70736274ff01003f0200000001ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000000000ffffffff010000000000000000036a010000000000000a0f0102030405060708090f0102030405060708090a0b0c0d0e0f0000").unwrap(); assert_eq!(psbt.inputs.len(), 1); assert_eq!(psbt.outputs.len(), 1); @@ -1992,11 +2077,11 @@ mod tests { ], output: vec![ TxOut { - value: Amount::from_sat(99_999_699), + value: Amount::from_sat_u32(99_999_699), script_pubkey: ScriptBuf::from_hex("76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac").unwrap(), }, TxOut { - value: Amount::from_sat(100_000_000), + value: Amount::from_sat_u32(100_000_000), script_pubkey: ScriptBuf::from_hex("a9143545e6e33b832c47050f24d3eeb93c9c03948bc787").unwrap(), }, ], @@ -2039,11 +2124,11 @@ mod tests { ], output: vec![ TxOut { - value: Amount::from_sat(200_000_000), + value: Amount::from_sat_u32(200_000_000), script_pubkey: ScriptBuf::from_hex("76a91485cff1097fd9e008bb34af709c62197b38978a4888ac").unwrap(), }, TxOut { - value: Amount::from_sat(190_303_501_938), + value: Amount::from_sat(190_303_501_938).unwrap(), script_pubkey: ScriptBuf::from_hex("a914339725ba21efd62ac753a9bcd067d6c7a6a39d0587").unwrap(), }, ], @@ -2063,7 +2148,7 @@ mod tests { unserialized.inputs[0].hash160_preimages = hash160_preimages; unserialized.inputs[0].sha256_preimages = sha256_preimages; - let rtt: Psbt = hex_psbt(&unserialized.serialize_hex()).unwrap(); + let rtt = hex_psbt(&unserialized.serialize_hex()).unwrap(); assert_eq!(rtt, unserialized); // Now add an ripemd160 with incorrect preimage @@ -2078,13 +2163,13 @@ mod tests { #[test] fn serialize_and_deserialize_proprietary() { - let mut psbt: Psbt = hex_psbt("70736274ff0100a00200000002ab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40000000000feffffffab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40100000000feffffff02603bea0b000000001976a914768a40bbd740cbe81d988e71de2a4d5c71396b1d88ac8e240000000000001976a9146f4620b553fa095e721b9ee0efe9fa039cca459788ac000000000001076a47304402204759661797c01b036b25928948686218347d89864b719e1f7fcf57d1e511658702205309eabf56aa4d8891ffd111fdf1336f3a29da866d7f8486d75546ceedaf93190121035cdc61fc7ba971c0b501a646a2a83b102cb43881217ca682dc86e2d73fa882920001012000e1f5050000000017a9143545e6e33b832c47050f24d3eeb93c9c03948bc787010416001485d13537f2e265405a34dbafa9e3dda01fb82308000000").unwrap(); + let mut psbt = hex_psbt("70736274ff0100a00200000002ab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40000000000feffffffab0949a08c5af7c49b8212f417e2f15ab3f5c33dcf153821a8139f877a5b7be40100000000feffffff02603bea0b000000001976a914768a40bbd740cbe81d988e71de2a4d5c71396b1d88ac8e240000000000001976a9146f4620b553fa095e721b9ee0efe9fa039cca459788ac000000000001076a47304402204759661797c01b036b25928948686218347d89864b719e1f7fcf57d1e511658702205309eabf56aa4d8891ffd111fdf1336f3a29da866d7f8486d75546ceedaf93190121035cdc61fc7ba971c0b501a646a2a83b102cb43881217ca682dc86e2d73fa882920001012000e1f5050000000017a9143545e6e33b832c47050f24d3eeb93c9c03948bc787010416001485d13537f2e265405a34dbafa9e3dda01fb82308000000").unwrap(); psbt.proprietary.insert( raw::ProprietaryKey { prefix: b"test".to_vec(), subtype: 0u64, key: b"test".to_vec() }, b"test".to_vec(), ); assert!(!psbt.proprietary.is_empty()); - let rtt: Psbt = hex_psbt(&psbt.serialize_hex()).unwrap(); + let rtt = hex_psbt(&psbt.serialize_hex()).unwrap(); assert!(!rtt.proprietary.is_empty()); } @@ -2113,6 +2198,18 @@ mod tests { assert_eq!(psbt1, psbt2); } + // https://github.com/rust-bitcoin/rust-bitcoin/issues/3628 + #[test] + fn combine_psbt_fuzz_3628() { + let mut psbt1 = hex_psbt(include_str!("../../tests/data/psbt_fuzz1.hex")).unwrap(); + let psbt2 = hex_psbt(include_str!("../../tests/data/psbt_fuzz2.hex")).unwrap(); + + assert!(matches!( + psbt1.combine(psbt2).unwrap_err(), + Error::CombineInconsistentKeySources(_) + )); + } + #[cfg(feature = "rand-std")] fn gen_keys() -> (PrivateKey, PublicKey, Secp256k1) { use secp256k1::rand::thread_rng; @@ -2139,12 +2236,48 @@ mod tests { } #[test] - fn test_fee() { - let output_0_val = Amount::from_sat(99_999_699); - let output_1_val = Amount::from_sat(100_000_000); - let prev_output_val = Amount::from_sat(200_000_000); + #[cfg(feature = "rand-std")] + fn pubkey_map_get_key_negates_odd_parity_keys() { + use crate::psbt::{GetKey, KeyRequest}; + + let (mut priv_key, mut pk, secp) = gen_keys(); + let (xonly, parity) = pk.inner.x_only_public_key(); + + let mut pubkey_map: HashMap = HashMap::new(); + + if parity == secp256k1::Parity::Even { + priv_key = PrivateKey { + compressed: priv_key.compressed, + network: priv_key.network, + inner: priv_key.inner.negate(), + }; + pk = priv_key.public_key(&secp); + } + + pubkey_map.insert(pk, priv_key); + + let req_result = pubkey_map.get_key(&KeyRequest::XOnlyPubkey(xonly), &secp).unwrap(); + + let retrieved_key = req_result.unwrap(); + + let retrieved_pub_key = retrieved_key.public_key(&secp); + let (retrieved_xonly, retrieved_parity) = retrieved_pub_key.inner.x_only_public_key(); + + assert_eq!(xonly, retrieved_xonly); + assert_eq!( + retrieved_parity, + secp256k1::Parity::Even, + "Key should be normalized to have even parity, even when original had odd parity" + ); + } + + #[test] + fn fee() { + let output_0_val = Amount::from_sat_u32(99_999_699); + let output_1_val = Amount::from_sat_u32(100_000_000); + let prev_output_val = Amount::from_sat_u32(200_000_000); - let mut t = Psbt { + let t = Psbt { unsigned_tx: Transaction { version: transaction::Version::TWO, lock_time: absolute::LockTime::from_consensus(1257139), @@ -2203,7 +2336,7 @@ mod tests { script_pubkey: ScriptBuf::new() }, TxOut { - value: Amount::from_sat(190_303_501_938), + value: Amount::from_sat(190_303_501_938).unwrap(), script_pubkey: ScriptBuf::new() }, ], @@ -2222,7 +2355,7 @@ mod tests { }; assert_eq!( t.fee().expect("fee calculation"), - prev_output_val - (output_0_val + output_1_val) + (prev_output_val - (output_0_val + output_1_val)).unwrap() ); // no previous output let mut t2 = t.clone(); @@ -2238,28 +2371,82 @@ mod tests { Error::NegativeFee => {} e => panic!("unexpected error: {:?}", e), } - // overflow - t.unsigned_tx.output[0].value = Amount::MAX; - t.unsigned_tx.output[1].value = Amount::MAX; - match t.fee().unwrap_err() { - Error::FeeOverflow => {} - e => panic!("unexpected error: {:?}", e), - } } #[test] #[cfg(feature = "rand-std")] - fn sign_psbt() { - use crate::address::script_pubkey::ScriptBufExt as _; - use crate::bip32::{DerivationPath, Fingerprint}; - use crate::witness_version::WitnessVersion; - use crate::WitnessProgram; + fn hashmap_can_sign_taproot() { + let (priv_key, pk, secp) = gen_keys(); + let internal_key: XOnlyPublicKey = pk.inner.into(); + + let tx = Transaction { + version: transaction::Version::TWO, + lock_time: locktime::absolute::LockTime::ZERO, + input: vec![TxIn::EMPTY_COINBASE], + output: vec![TxOut { value: Amount::ZERO, script_pubkey: ScriptBuf::new() }], + }; + + let mut psbt = Psbt::from_unsigned_tx(tx).unwrap(); + psbt.inputs[0].tap_internal_key = Some(internal_key); + psbt.inputs[0].witness_utxo = Some(transaction::TxOut { + value: Amount::from_sat_u32(10), + script_pubkey: ScriptBuf::new_p2tr(&secp, internal_key, None), + }); + + let mut key_map: HashMap = HashMap::new(); + key_map.insert(pk, priv_key); + + let key_source = (Fingerprint::default(), DerivationPath::default()); + let mut tap_key_origins = std::collections::BTreeMap::new(); + tap_key_origins.insert(internal_key, (vec![], key_source)); + psbt.inputs[0].tap_key_origins = tap_key_origins; + let signing_keys = psbt.sign(&key_map, &secp).unwrap(); + assert_eq!(signing_keys.len(), 1); + assert_eq!(signing_keys[&0], SigningKeys::Schnorr(vec![internal_key])); + } + + #[test] + #[cfg(feature = "rand-std")] + fn xonly_hashmap_can_sign_taproot() { + let (priv_key, pk, secp) = gen_keys(); + let internal_key: XOnlyPublicKey = pk.inner.into(); + + let tx = Transaction { + version: transaction::Version::TWO, + lock_time: locktime::absolute::LockTime::ZERO, + input: vec![TxIn::EMPTY_COINBASE], + output: vec![TxOut { value: Amount::ZERO, script_pubkey: ScriptBuf::new() }], + }; + + let mut psbt = Psbt::from_unsigned_tx(tx).unwrap(); + psbt.inputs[0].tap_internal_key = Some(internal_key); + psbt.inputs[0].witness_utxo = Some(transaction::TxOut { + value: Amount::from_sat_u32(10), + script_pubkey: ScriptBuf::new_p2tr(&secp, internal_key, None), + }); + + let mut xonly_key_map: HashMap = HashMap::new(); + xonly_key_map.insert(internal_key, priv_key); + + let key_source = (Fingerprint::default(), DerivationPath::default()); + let mut tap_key_origins = std::collections::BTreeMap::new(); + tap_key_origins.insert(internal_key, (vec![], key_source)); + psbt.inputs[0].tap_key_origins = tap_key_origins; + + let signing_keys = psbt.sign(&xonly_key_map, &secp).unwrap(); + assert_eq!(signing_keys.len(), 1); + assert_eq!(signing_keys[&0], SigningKeys::Schnorr(vec![internal_key])); + } + + #[test] + #[cfg(feature = "rand-std")] + fn sign_psbt() { let unsigned_tx = Transaction { version: transaction::Version::TWO, lock_time: absolute::LockTime::ZERO, input: vec![TxIn::EMPTY_COINBASE, TxIn::EMPTY_COINBASE], - output: vec![TxOut::NULL], + output: vec![TxOut { value: Amount::ZERO, script_pubkey: ScriptBuf::new() }], }; let mut psbt = Psbt::from_unsigned_tx(unsigned_tx).unwrap(); @@ -2272,7 +2459,7 @@ mod tests { // First input we can spend. See comment above on key_map for why we use defaults here. let txout_wpkh = TxOut { - value: Amount::from_sat(10), + value: Amount::from_sat_u32(10), script_pubkey: ScriptBuf::new_p2wpkh(pk.wpubkey_hash().unwrap()), }; psbt.inputs[0].witness_utxo = Some(txout_wpkh); @@ -2284,7 +2471,7 @@ mod tests { // Second input is unspendable by us e.g., from another wallet that supports future upgrades. let unknown_prog = WitnessProgram::new(WitnessVersion::V4, &[0xaa; 34]).unwrap(); let txout_unknown_future = TxOut { - value: Amount::from_sat(10), + value: Amount::from_sat_u32(10), script_pubkey: ScriptBuf::new_witness_program(&unknown_prog), }; psbt.inputs[1].witness_utxo = Some(txout_unknown_future); diff --git a/bitcoin/src/psbt/serialize.rs b/bitcoin/src/psbt/serialize.rs index 5b505d4b77..4c2b0f4815 100644 --- a/bitcoin/src/psbt/serialize.rs +++ b/bitcoin/src/psbt/serialize.rs @@ -7,6 +7,8 @@ use hashes::{hash160, ripemd160, sha256, sha256d}; use internals::compact_size; +#[allow(unused)] // MSRV polyfill +use internals::slice::SliceExt; use secp256k1::XOnlyPublicKey; use super::map::{Input, Map, Output, PsbtSighashType}; @@ -214,14 +216,12 @@ impl Serialize for KeySource { impl Deserialize for KeySource { fn deserialize(bytes: &[u8]) -> Result { - if bytes.len() < 4 { - return Err(io::Error::from(io::ErrorKind::UnexpectedEof).into()); - } + let (fingerprint, mut d) = bytes.split_first_chunk::<4>() + .ok_or(io::Error::from(io::ErrorKind::UnexpectedEof))?; - let fprint: Fingerprint = bytes[0..4].try_into().expect("4 is the fingerprint length"); + let fprint: Fingerprint = fingerprint.into(); let mut dpath: Vec = Default::default(); - let mut d = &bytes[4..]; while !d.is_empty() { match u32::consensus_decode(&mut d) { Ok(index) => dpath.push(index.into()), @@ -260,7 +260,8 @@ impl Serialize for XOnlyPublicKey { impl Deserialize for XOnlyPublicKey { fn deserialize(bytes: &[u8]) -> Result { - XOnlyPublicKey::from_slice(bytes).map_err(|_| Error::InvalidXOnlyPublicKey) + XOnlyPublicKey::from_byte_array(bytes.try_into().map_err(|_| Error::InvalidXOnlyPublicKey)?) + .map_err(|_| Error::InvalidXOnlyPublicKey) } } @@ -365,7 +366,7 @@ impl Serialize for TapTree { for leaf_info in self.script_leaves() { // # Cast Safety: // - // TaprootMerkleBranch can only have len atmost 128(TAPROOT_CONTROL_MAX_NODE_COUNT). + // TaprootMerkleBranch can only have len at most 128(TAPROOT_CONTROL_MAX_NODE_COUNT). // safe to cast from usize to u8 buf.push(leaf_info.merkle_branch().len() as u8); buf.push(leaf_info.version().to_consensus()); @@ -458,6 +459,12 @@ mod tests { assert!(sighash.is_ok()) } + #[test] + fn deserialize_xonly_public_key_len() { + assert!(XOnlyPublicKey::deserialize(&[1; 31]).is_err()); + assert!(XOnlyPublicKey::deserialize(&[1; 33]).is_err()); + } + #[test] #[should_panic(expected = "InvalidMagic")] fn invalid_vector_1() { diff --git a/bitcoin/src/sign_message.rs b/bitcoin/src/sign_message.rs index bbc2b44de9..d5f2be8481 100644 --- a/bitcoin/src/sign_message.rs +++ b/bitcoin/src/sign_message.rs @@ -21,6 +21,7 @@ pub const BITCOIN_SIGNED_MSG_PREFIX: &[u8] = b"\x18Bitcoin Signed Message:\n"; #[cfg(feature = "secp-recovery")] mod message_signing { + use core::convert::Infallible; use core::fmt; use hashes::sha256d; @@ -44,7 +45,9 @@ mod message_signing { UnsupportedAddressType(AddressType), } - internals::impl_from_infallible!(MessageSignatureError); + impl From for MessageSignatureError { + fn from(never: Infallible) -> Self { match never {} } + } impl fmt::Display for MessageSignatureError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -101,7 +104,7 @@ mod message_signing { pub fn serialize(&self) -> [u8; 65] { let (recid, raw) = self.signature.serialize_compact(); let mut serialized = [0u8; 65]; - serialized[0] = recid.to_i32() as u8 + if self.compressed { 31 } else { 27 }; + serialized[0] = i32::from(recid) as u8 + if self.compressed { 31 } else { 27 }; serialized[1..].copy_from_slice(&raw[..]); serialized } @@ -112,7 +115,7 @@ mod message_signing { if bytes[0] < 27 { return Err(secp256k1::Error::InvalidRecoveryId); }; - let recid = RecoveryId::from_i32(((bytes[0] - 27) & 0x03) as i32)?; + let recid = RecoveryId::try_from(((bytes[0] - 27) & 0x03) as i32)?; Ok(MessageSignature { signature: RecoverableSignature::from_compact(&bytes[1..], recid)?, compressed: ((bytes[0] - 27) & 0x04) != 0, @@ -240,7 +243,7 @@ mod tests { #[test] #[cfg(all(feature = "secp-recovery", feature = "base64", feature = "rand-std"))] - fn test_message_signature() { + fn message_signature() { use secp256k1; use crate::{Address, AddressType, Network, NetworkKind}; @@ -286,7 +289,7 @@ mod tests { #[test] #[cfg(all(feature = "secp-recovery", feature = "base64"))] - fn test_incorrect_message_signature() { + fn incorrect_message_signature() { use base64::prelude::{Engine as _, BASE64_STANDARD}; use secp256k1; diff --git a/bitcoin/src/taproot/merkle_branch/borrowed.rs b/bitcoin/src/taproot/merkle_branch/borrowed.rs new file mode 100644 index 0000000000..58538f09d2 --- /dev/null +++ b/bitcoin/src/taproot/merkle_branch/borrowed.rs @@ -0,0 +1,290 @@ +use core::borrow::{Borrow, BorrowMut}; +use internals::slice::SliceExt; + +use super::{DecodeError, InvalidMerkleBranchSizeError, InvalidMerkleTreeDepthError, TaprootMerkleBranchBuf, TapNodeHash, TAPROOT_CONTROL_MAX_NODE_COUNT, TAPROOT_CONTROL_NODE_SIZE}; + +pub use privacy_boundary::TaprootMerkleBranch; + +/// Makes sure only the allowed conversions are accessible to external code. +mod privacy_boundary { + use super::*; + + /// The Merkle proof for inclusion of a tree in a Taproot tree hash. + #[repr(transparent)] + #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] + pub struct TaprootMerkleBranch([TapNodeHash]); + + impl TaprootMerkleBranch { + /// Returns a reference to the slice of hashes. + #[inline] + pub const fn as_slice(&self) -> &[TapNodeHash] { &self.0 } + + /// Returns a reference to the mutable slice of hashes. + #[inline] + pub fn as_mut_slice(&mut self) -> &mut [TapNodeHash] { &mut self.0 } + + pub(super) const fn from_hashes_unchecked(hashes: &[TapNodeHash]) -> &Self { + unsafe { + &*(hashes as *const _ as *const Self) + } + } + + pub(super) fn from_mut_hashes_unchecked(hashes: &mut [TapNodeHash]) -> &mut Self { + unsafe { + &mut *(hashes as *mut _ as *mut Self) + } + } + } +} + +impl TaprootMerkleBranch { + /// Returns an empty branch. + pub const fn new() -> &'static Self { + Self::from_hashes_unchecked(&[]) + } + + /// Returns the number of nodes in this Merkle proof. + #[inline] + pub fn len(&self) -> usize { self.as_slice().len() } + + /// Checks if this Merkle proof is empty. + #[inline] + pub fn is_empty(&self) -> bool { self.as_slice().is_empty() } + + /// Creates an iterator over the node hashes. + #[inline] + pub fn iter(&self) -> core::slice::Iter<'_, TapNodeHash> { self.into_iter() } + + /// Creates an iterator over the mutable node hashes. + #[inline] + pub fn iter_mut(&mut self) -> core::slice::IterMut<'_, TapNodeHash> { self.into_iter() } + + /// Casts `TaprootMerkleBranch` to a byte slice. + pub(crate) fn as_bytes(&self) -> &[u8] { + let ptr = self.as_slice().as_ptr(); + let num_bytes = self.len() * TAPROOT_CONTROL_NODE_SIZE; + // SAFETY: + // The pointer points to memory that's borrowed and the returned slice has the same + // lifetime. The alignment is of the types is the same (as checked in the test), the + // length is within the bounds - as computed above by multiplication. + unsafe { core::slice::from_raw_parts(ptr.cast::(), num_bytes) } + } + + /// Serializes to a writer. + /// + /// # Returns + /// + /// The number of bytes written to the writer. + pub fn encode(&self, writer: &mut Write) -> io::Result { + let bytes = self.as_bytes(); + writer.write_all(bytes)?; + Ok(bytes.len()) + } + + /// Zero-copy decodes `bytes` as Taproot Merkle branch. + /// + /// Note that "decoding" is quite trivial: it only performs appropriate bound checks and casts + /// the reference. + pub fn decode(bytes: &[u8]) -> Result<&Self, DecodeError> { + let (nodes, remainder) = bytes.bitcoin_as_chunks(); + if remainder.is_empty() { + Self::decode_exact(nodes).map_err(Into::into) + } else { + Err(InvalidMerkleBranchSizeError(bytes.len()).into()) + } + } + + /// Decodes a byte slice that is statically known to be multiple of 32. + /// + /// This can be used as a building block for other ways of decoding. + fn decode_exact(nodes: &[[u8; TAPROOT_CONTROL_NODE_SIZE]]) -> Result<&Self, InvalidMerkleTreeDepthError> { + // SAFETY: + // The lifetime of the returned reference is the same as the lifetime of the input + // reference, the size of `TapNodeHash` is equal to `TAPROOT_CONTROL_NODE_SIZE` and the + // alignment of `TapNodeHash` is equal to the alignment of `u8` (see tests below). + Self::from_hashes(unsafe { &*(nodes as *const _ as *const [TapNodeHash]) }) + } + + fn from_hashes(nodes: &[TapNodeHash]) -> Result<&Self, InvalidMerkleTreeDepthError>{ + if nodes.len() <= TAPROOT_CONTROL_MAX_NODE_COUNT { + Ok(Self::from_hashes_unchecked(nodes)) + } else { + Err(InvalidMerkleTreeDepthError(nodes.len())) + } + } +} + +impl Default for &'_ TaprootMerkleBranch { + fn default() -> Self { + TaprootMerkleBranch::new() + } +} + +impl AsRef for TaprootMerkleBranch { + fn as_ref(&self) -> &TaprootMerkleBranch { + self + } +} + +impl AsMut for TaprootMerkleBranch { + fn as_mut(&mut self) -> &mut TaprootMerkleBranch { + self + } +} + +impl AsRef for TaprootMerkleBranchBuf { + fn as_ref(&self) -> &TaprootMerkleBranch { + // TaprootMerkleBranchBuf maintains the invariant that the node count is in range. + TaprootMerkleBranch::from_hashes_unchecked(self.as_slice()) + } +} + +impl AsMut for TaprootMerkleBranchBuf { + fn as_mut(&mut self) -> &mut TaprootMerkleBranch { + // TaprootMerkleBranchBuf maintains the invariant that the node count is in range. + TaprootMerkleBranch::from_mut_hashes_unchecked(self.as_mut_slice()) + } +} + +impl Borrow for TaprootMerkleBranchBuf { + #[inline] + fn borrow(&self) -> &TaprootMerkleBranch { self.as_ref() } +} + +impl BorrowMut for TaprootMerkleBranchBuf { + #[inline] + fn borrow_mut(&mut self) -> &mut TaprootMerkleBranch { self.as_mut() } +} + +impl<'a> TryFrom<&'a [TapNodeHash]> for &'a TaprootMerkleBranch { + type Error = InvalidMerkleTreeDepthError; + + fn try_from(value: &'a [TapNodeHash]) -> Result { + TaprootMerkleBranch::from_hashes(value) + } +} + +macro_rules! impl_from_array { + ($($len:expr),* $(,)?) => { + $( + impl AsRef for [TapNodeHash; $len] { + fn as_ref(&self) -> &TaprootMerkleBranch { + #[allow(unused_comparisons)] + const _: () = { assert!($len <= TAPROOT_CONTROL_MAX_NODE_COUNT) }; + // There's a static check to ensure correct macro usage above. + TaprootMerkleBranch::from_hashes_unchecked(self) + } + } + + impl AsMut for [TapNodeHash; $len] { + fn as_mut(&mut self) -> &mut TaprootMerkleBranch { + #[allow(unused_comparisons)] + const _: () = { assert!($len <= TAPROOT_CONTROL_MAX_NODE_COUNT) }; + // There's a static check to ensure correct macro usage above. + TaprootMerkleBranch::from_mut_hashes_unchecked(self) + } + } + + impl Borrow for [TapNodeHash; $len] { + fn borrow(&self) -> &TaprootMerkleBranch { + self.as_ref() + } + } + + impl BorrowMut for [TapNodeHash; $len] { + fn borrow_mut(&mut self) -> &mut TaprootMerkleBranch { + self.as_mut() + } + } + + impl<'a> From<&'a [TapNodeHash; $len]> for &'a TaprootMerkleBranch { + #[inline] + fn from(branch: &'a [TapNodeHash; $len]) -> Self { + branch.as_ref() + } + } + + impl<'a> From<&'a mut [TapNodeHash; $len]> for &'a mut TaprootMerkleBranch { + #[inline] + fn from(branch: &'a mut [TapNodeHash; $len]) -> Self { + branch.as_mut() + } + } + )* + } +} + +// Implement for all values [0, 128] inclusive. +// +// The reason zero is included is that `TaprootMerkleBranchBuf` doesn't contain the hash of the node +// that's being proven - it's not needed because the script is already right before control block. +impl_from_array!( + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, + 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, + 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, + 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, + 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, + 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128 +); + +impl AsRef<[TapNodeHash]> for TaprootMerkleBranch { + #[inline] + fn as_ref(&self) -> &[TapNodeHash] { self.as_slice() } +} + +impl AsMut<[TapNodeHash]> for TaprootMerkleBranch { + #[inline] + fn as_mut(&mut self) -> &mut [TapNodeHash] { self.as_mut_slice() } +} + +impl Borrow<[TapNodeHash]> for TaprootMerkleBranch { + #[inline] + fn borrow(&self) -> &[TapNodeHash] { self.as_ref() } +} + +impl BorrowMut<[TapNodeHash]> for TaprootMerkleBranch { + #[inline] + fn borrow_mut(&mut self) -> &mut [TapNodeHash] { self.as_mut() } +} + +impl alloc::borrow::ToOwned for TaprootMerkleBranch { + // It could be argued that this should've been a stack-allocated type. + // However such type would be huge and this trait interacts with `Cow`. + // If someone wants to pass it around they're better off just always copying rather than using + // `Cow`. + type Owned = TaprootMerkleBranchBuf; + + fn to_owned(&self) -> Self::Owned { + self.into() + } +} + +impl<'a> IntoIterator for &'a TaprootMerkleBranch { + type IntoIter = core::slice::Iter<'a, TapNodeHash>; + type Item = &'a TapNodeHash; + + fn into_iter(self) -> Self::IntoIter { + self.as_slice().iter() + } +} + +impl<'a> IntoIterator for &'a mut TaprootMerkleBranch { + type IntoIter = core::slice::IterMut<'a, TapNodeHash>; + type Item = &'a mut TapNodeHash; + + #[inline] + fn into_iter(self) -> Self::IntoIter { self.as_mut_slice().iter_mut() } +} + +#[cfg(test)] +mod tests { + #[test] + fn alignment() { + assert!(core::mem::align_of_val(super::TaprootMerkleBranch::new()) == core::mem::align_of::()); + } + + const _: () = { + assert!(core::mem::size_of::() == super::TAPROOT_CONTROL_NODE_SIZE); + assert!(core::mem::align_of::() == core::mem::align_of::()); + }; +} diff --git a/bitcoin/src/taproot/merkle_branch.rs b/bitcoin/src/taproot/merkle_branch/buf.rs similarity index 75% rename from bitcoin/src/taproot/merkle_branch.rs rename to bitcoin/src/taproot/merkle_branch/buf.rs index c629eacb69..a6e0e7526f 100644 --- a/bitcoin/src/taproot/merkle_branch.rs +++ b/bitcoin/src/taproot/merkle_branch/buf.rs @@ -1,12 +1,12 @@ // SPDX-License-Identifier: CC0-1.0 -//! Contains `TaprootMerkleBranch` and its associated types. +//! Contains `TaprootMerkleBranchBuf` and its associated types. use hashes::Hash; use super::{ - InvalidMerkleBranchSizeError, InvalidMerkleTreeDepthError, TapNodeHash, TaprootError, - TAPROOT_CONTROL_MAX_NODE_COUNT, TAPROOT_CONTROL_NODE_SIZE, + InvalidMerkleTreeDepthError, TapNodeHash, TaprootError, TaprootMerkleBranch, + TAPROOT_CONTROL_MAX_NODE_COUNT, }; use crate::prelude::{Borrow, BorrowMut, Box, Vec}; @@ -15,9 +15,9 @@ use crate::prelude::{Borrow, BorrowMut, Box, Vec}; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "serde", serde(into = "Vec"))] #[cfg_attr(feature = "serde", serde(try_from = "Vec"))] -pub struct TaprootMerkleBranch(Vec); +pub struct TaprootMerkleBranchBuf(Vec); -impl TaprootMerkleBranch { +impl TaprootMerkleBranchBuf { /// Returns a reference to the slice of hashes. #[deprecated(since = "0.32.0", note = "use `as_slice` instead")] #[inline] @@ -27,6 +27,10 @@ impl TaprootMerkleBranch { #[inline] pub fn as_slice(&self) -> &[TapNodeHash] { &self.0 } + /// Returns a mutable reference to the slice of hashes. + #[inline] + pub fn as_mut_slice(&mut self) -> &mut [TapNodeHash] { &mut self.0 } + /// Returns the number of nodes in this Merkle proof. #[inline] pub fn len(&self) -> usize { self.0.len() } @@ -45,22 +49,7 @@ impl TaprootMerkleBranch { /// The function returns an error if the number of bytes is not an integer multiple of 32 or /// if the number of hashes exceeds 128. pub fn decode(sl: &[u8]) -> Result { - if sl.len() % TAPROOT_CONTROL_NODE_SIZE != 0 { - Err(InvalidMerkleBranchSizeError(sl.len()).into()) - } else if sl.len() > TAPROOT_CONTROL_NODE_SIZE * TAPROOT_CONTROL_MAX_NODE_COUNT { - Err(InvalidMerkleTreeDepthError(sl.len() / TAPROOT_CONTROL_NODE_SIZE).into()) - } else { - let inner = sl - .chunks_exact(TAPROOT_CONTROL_NODE_SIZE) - .map(|chunk| { - let bytes = <[u8; 32]>::try_from(chunk) - .expect("chunks_exact always returns the correct size"); - TapNodeHash::from_byte_array(bytes) - }) - .collect(); - - Ok(TaprootMerkleBranch(inner)) - } + TaprootMerkleBranch::decode(sl).map(alloc::borrow::ToOwned::to_owned).map_err(Into::into) } /// Constructs a new Merkle proof from list of hashes. @@ -75,7 +64,7 @@ impl TaprootMerkleBranch { if collection.as_ref().len() > TAPROOT_CONTROL_MAX_NODE_COUNT { Err(InvalidMerkleTreeDepthError(collection.as_ref().len())) } else { - Ok(TaprootMerkleBranch(collection.into())) + Ok(TaprootMerkleBranchBuf(collection.into())) } } @@ -97,7 +86,7 @@ impl TaprootMerkleBranch { } /// Appends elements to proof. - pub(super) fn push(&mut self, h: TapNodeHash) -> Result<(), InvalidMerkleTreeDepthError> { + pub(in super::super) fn push(&mut self, h: TapNodeHash) -> Result<(), InvalidMerkleTreeDepthError> { if self.len() >= TAPROOT_CONTROL_MAX_NODE_COUNT { Err(InvalidMerkleTreeDepthError(self.0.len())) } else { @@ -118,7 +107,7 @@ impl TaprootMerkleBranch { macro_rules! impl_try_from { ($from:ty) => { - impl TryFrom<$from> for TaprootMerkleBranch { + impl TryFrom<$from> for TaprootMerkleBranchBuf { type Error = InvalidMerkleTreeDepthError; /// Constructs a new Merkle proof from list of hashes. @@ -128,7 +117,7 @@ macro_rules! impl_try_from { /// If inner proof length is more than [`TAPROOT_CONTROL_MAX_NODE_COUNT`] (128). #[inline] fn try_from(v: $from) -> Result { - TaprootMerkleBranch::from_collection(v) + TaprootMerkleBranchBuf::from_collection(v) } } }; @@ -137,10 +126,10 @@ impl_try_from!(&[TapNodeHash]); impl_try_from!(Vec); impl_try_from!(Box<[TapNodeHash]>); -macro_rules! impl_try_from_array { +macro_rules! impl_from_array { ($($len:expr),* $(,)?) => { $( - impl From<[TapNodeHash; $len]> for TaprootMerkleBranch { + impl From<[TapNodeHash; $len]> for TaprootMerkleBranchBuf { #[inline] fn from(a: [TapNodeHash; $len]) -> Self { Self(a.to_vec()) @@ -151,9 +140,9 @@ macro_rules! impl_try_from_array { } // Implement for all values [0, 128] inclusive. // -// The reason zero is included is that `TaprootMerkleBranch` doesn't contain the hash of the node +// The reason zero is included is that `TaprootMerkleBranchBuf` doesn't contain the hash of the node // that's being proven - it's not needed because the script is already right before control block. -impl_try_from_array!( +impl_from_array!( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, @@ -162,12 +151,12 @@ impl_try_from_array!( 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128 ); -impl From for Vec { +impl From for Vec { #[inline] - fn from(branch: TaprootMerkleBranch) -> Self { branch.0 } + fn from(branch: TaprootMerkleBranchBuf) -> Self { branch.0 } } -impl IntoIterator for TaprootMerkleBranch { +impl IntoIterator for TaprootMerkleBranchBuf { type IntoIter = IntoIter; type Item = TapNodeHash; @@ -175,7 +164,7 @@ impl IntoIterator for TaprootMerkleBranch { fn into_iter(self) -> Self::IntoIter { IntoIter(self.0.into_iter()) } } -impl<'a> IntoIterator for &'a TaprootMerkleBranch { +impl<'a> IntoIterator for &'a TaprootMerkleBranchBuf { type IntoIter = core::slice::Iter<'a, TapNodeHash>; type Item = &'a TapNodeHash; @@ -183,7 +172,7 @@ impl<'a> IntoIterator for &'a TaprootMerkleBranch { fn into_iter(self) -> Self::IntoIter { self.0.iter() } } -impl<'a> IntoIterator for &'a mut TaprootMerkleBranch { +impl<'a> IntoIterator for &'a mut TaprootMerkleBranchBuf { type IntoIter = core::slice::IterMut<'a, TapNodeHash>; type Item = &'a mut TapNodeHash; @@ -191,41 +180,47 @@ impl<'a> IntoIterator for &'a mut TaprootMerkleBranch { fn into_iter(self) -> Self::IntoIter { self.0.iter_mut() } } -impl core::ops::Deref for TaprootMerkleBranch { - type Target = [TapNodeHash]; +impl core::ops::Deref for TaprootMerkleBranchBuf { + type Target = TaprootMerkleBranch; #[inline] - fn deref(&self) -> &Self::Target { &self.0 } + fn deref(&self) -> &Self::Target { self.as_ref() } } -impl core::ops::DerefMut for TaprootMerkleBranch { +impl core::ops::DerefMut for TaprootMerkleBranchBuf { #[inline] - fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } + fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut() } } -impl AsRef<[TapNodeHash]> for TaprootMerkleBranch { +impl AsRef<[TapNodeHash]> for TaprootMerkleBranchBuf { #[inline] fn as_ref(&self) -> &[TapNodeHash] { &self.0 } } -impl AsMut<[TapNodeHash]> for TaprootMerkleBranch { +impl AsMut<[TapNodeHash]> for TaprootMerkleBranchBuf { #[inline] fn as_mut(&mut self) -> &mut [TapNodeHash] { &mut self.0 } } -impl Borrow<[TapNodeHash]> for TaprootMerkleBranch { +impl Borrow<[TapNodeHash]> for TaprootMerkleBranchBuf { #[inline] fn borrow(&self) -> &[TapNodeHash] { &self.0 } } -impl BorrowMut<[TapNodeHash]> for TaprootMerkleBranch { +impl BorrowMut<[TapNodeHash]> for TaprootMerkleBranchBuf { #[inline] fn borrow_mut(&mut self) -> &mut [TapNodeHash] { &mut self.0 } } +impl<'a> From<&'a TaprootMerkleBranch> for TaprootMerkleBranchBuf { + fn from(value: &'a TaprootMerkleBranch) -> Self { + Self(value.as_slice().into()) + } +} + /// Iterator over node hashes within Taproot Merkle branch. /// -/// This is created by `into_iter` method on `TaprootMerkleBranch` (via `IntoIterator` trait). +/// This is created by `into_iter` method on `TaprootMerkleBranchBuf` (via `IntoIterator` trait). #[derive(Clone, Debug)] pub struct IntoIter(alloc::vec::IntoIter); @@ -252,6 +247,7 @@ impl Iterator for IntoIter { fn nth(&mut self, n: usize) -> Option { self.0.nth(n) } #[inline] + #[allow(clippy::double_ended_iterator_last)] // False positive. Using `.last()` on a `vec::IntoIter` is correct. fn last(self) -> Option { self.0.last() } #[inline] diff --git a/bitcoin/src/taproot/merkle_branch/mod.rs b/bitcoin/src/taproot/merkle_branch/mod.rs new file mode 100644 index 0000000000..0fd32c2594 --- /dev/null +++ b/bitcoin/src/taproot/merkle_branch/mod.rs @@ -0,0 +1,67 @@ +//! Contains `TaprootMerkleBranchBuf` and its associated types. + +mod buf; +mod borrowed; + +pub use buf::TaprootMerkleBranchBuf; +pub use borrowed::TaprootMerkleBranch; + +use core::fmt; +use super::{ + InvalidMerkleBranchSizeError, InvalidMerkleTreeDepthError, TapNodeHash, TaprootError, + TAPROOT_CONTROL_MAX_NODE_COUNT, TAPROOT_CONTROL_NODE_SIZE, +}; + +/// Returned when decoding of merkle branch fails. +#[derive(Debug)] +pub struct DecodeError { + /// Represents the invalid number of bytes. It may be invalid in two ways: it might not be a + /// multiple of 32, in which case it is guaranteed to be wrong for that reason; + /// only if it is a multiple of 32 do we check that it does not exceed 32 * 128, in which case + /// it is wrong for that reason. + /// + /// This error type is used in `Result<&TaprootMerkleBranch, DecodeError>`, so by keeping its + /// size down to a single `usize` (by not using enum) and considering the niche optimization on + /// the *fat reference* `&TaprootMerkleBranch`, the `Result` will have the same size as just + /// `&TaprootMerkleBranch`. + num_bytes: usize, +} + +impl From for DecodeError { + fn from(value: InvalidMerkleBranchSizeError) -> Self { + Self { + num_bytes: value.0, + } + } +} + +impl From for DecodeError { + fn from(value: InvalidMerkleTreeDepthError) -> Self { + Self { + num_bytes: value.0 * TAPROOT_CONTROL_NODE_SIZE, + } + } +} + +impl fmt::Display for DecodeError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.num_bytes % TAPROOT_CONTROL_NODE_SIZE == 0 { + write!(f, "the Merkle branch has {} nodes which is more than the limit {}", self.num_bytes / TAPROOT_CONTROL_NODE_SIZE, TAPROOT_CONTROL_MAX_NODE_COUNT) + } else { + write!(f, "the Merkle branch is {} bytes long which is not an integer multiple of {}", self.num_bytes, TAPROOT_CONTROL_NODE_SIZE) + } + } +} + +#[cfg(feature = "std")] +impl std::error::Error for DecodeError {} + +impl From for TaprootError { + fn from(value: DecodeError) -> Self { + if value.num_bytes % TAPROOT_CONTROL_NODE_SIZE == 0 { + InvalidMerkleTreeDepthError(value.num_bytes / TAPROOT_CONTROL_NODE_SIZE).into() + } else { + InvalidMerkleBranchSizeError(value.num_bytes).into() + } + } +} diff --git a/bitcoin/src/taproot/mod.rs b/bitcoin/src/taproot/mod.rs index b8bd003423..b14264f249 100644 --- a/bitcoin/src/taproot/mod.rs +++ b/bitcoin/src/taproot/mod.rs @@ -7,12 +7,17 @@ pub mod merkle_branch; pub mod serialized_signature; -use core::cmp::Reverse; +use core::cmp::{Ordering, Reverse}; +use core::convert::Infallible; use core::fmt; use core::iter::FusedIterator; -use hashes::{sha256t, HashEngine}; +use hashes::{hash_newtype, sha256t, sha256t_tag, HashEngine}; +use internals::array::ArrayExt; use internals::{impl_to_hex_from_lower_hex, write_err}; +#[allow(unused)] // MSRV polyfill +use internals::slice::SliceExt; + use io::Write; use secp256k1::{Scalar, Secp256k1}; @@ -26,51 +31,97 @@ use crate::{Script, ScriptBuf}; #[doc(inline)] pub use crate::crypto::taproot::{SigFromSliceError, Signature}; #[doc(inline)] +pub use merkle_branch::TaprootMerkleBranchBuf; +#[doc(inline)] pub use merkle_branch::TaprootMerkleBranch; -pub use primitives::taproot::{ - TapBranchTag, TapLeafHash, TapLeafTag, TapNodeHash, TapTweakHash, TapTweakTag, -}; - -crate::internal_macros::define_extension_trait! { - /// Extension functionality for the [`TapTweakHash`] type. - pub trait TapTweakHashExt impl for TapTweakHash { - /// Constructs a new BIP341 [`TapTweakHash`] from key and tweak. Produces `H_taptweak(P||R)` where - /// `P` is the internal key and `R` is the Merkle root. - fn from_key_and_tweak( - internal_key: UntweakedPublicKey, - merkle_root: Option, - ) -> TapTweakHash { - let mut eng = sha256t::Hash::::engine(); - // always hash the key - eng.input(&internal_key.serialize()); - if let Some(h) = merkle_root { - eng.input(h.as_ref()); - } else { - // nothing to hash - } - let inner = sha256t::Hash::::from_engine(eng); - TapTweakHash::from_byte_array(inner.to_byte_array()) - } - /// Converts a `TapTweakHash` into a `Scalar` ready for use with key tweaking API. - fn to_scalar(self) -> Scalar { - // This is statistically extremely unlikely to panic. - Scalar::from_be_bytes(self.to_byte_array()).expect("hash value greater than curve order") +type ControlBlockArrayVec = internals::array_vec::ArrayVec; + +// Taproot test vectors from BIP-341 state the hashes without any reversing +sha256t_tag! { + pub struct TapLeafTag = hash_str("TapLeaf"); +} + +hash_newtype! { + /// Taproot-tagged hash with tag \"TapLeaf\". + /// + /// This is used for computing tapscript script spend hash. + pub struct TapLeafHash(sha256t::Hash); +} + +hashes::impl_hex_for_newtype!(TapLeafHash); +#[cfg(feature = "serde")] +hashes::impl_serde_for_newtype!(TapLeafHash); + +sha256t_tag! { + pub struct TapBranchTag = hash_str("TapBranch"); +} + +hash_newtype! { + /// Tagged hash used in Taproot trees. + /// + /// See BIP-340 for tagging rules. + #[repr(transparent)] + pub struct TapNodeHash(sha256t::Hash); +} + +hashes::impl_hex_for_newtype!(TapNodeHash); +#[cfg(feature = "serde")] +hashes::impl_serde_for_newtype!(TapNodeHash); + +sha256t_tag! { + pub struct TapTweakTag = hash_str("TapTweak"); +} + +hash_newtype! { + /// Taproot-tagged hash with tag \"TapTweak\". + /// + /// This hash type is used while computing the tweaked public key. + pub struct TapTweakHash(sha256t::Hash); +} + +hashes::impl_hex_for_newtype!(TapTweakHash); +#[cfg(feature = "serde")] +hashes::impl_serde_for_newtype!(TapTweakHash); + +impl From for TapNodeHash { + fn from(leaf: TapLeafHash) -> TapNodeHash { TapNodeHash::from_byte_array(leaf.to_byte_array()) } +} + +impl TapTweakHash { + /// Constructs a new BIP341 [`TapTweakHash`] from key and tweak. Produces `H_taptweak(P||R)` where + /// `P` is the internal key and `R` is the Merkle root. + pub fn from_key_and_tweak( + internal_key: UntweakedPublicKey, + merkle_root: Option, + ) -> TapTweakHash { + let mut eng = sha256t::Hash::::engine(); + // always hash the key + eng.input(&internal_key.serialize()); + if let Some(h) = merkle_root { + eng.input(h.as_ref()); + } else { + // nothing to hash } + let inner = sha256t::Hash::::from_engine(eng); + TapTweakHash::from_byte_array(inner.to_byte_array()) + } + + /// Converts a `TapTweakHash` into a `Scalar` ready for use with key tweaking API. + pub fn to_scalar(self) -> Scalar { + // This is statistically extremely unlikely to panic. + Scalar::from_be_bytes(self.to_byte_array()).expect("hash value greater than curve order") } } -crate::internal_macros::define_extension_trait! { - /// Extension functionality for the [`TapLeafHash`] type. - pub trait TapLeafHashExt impl for TapLeafHash { - /// Computes the leaf hash from components. - fn from_script(script: &Script, ver: LeafVersion) -> TapLeafHash { - let mut eng = sha256t::Hash::::engine(); - ver.to_consensus().consensus_encode(&mut eng).expect("engines don't error"); - script.consensus_encode(&mut eng).expect("engines don't error"); - let inner = sha256t::Hash::::from_engine(eng); - TapLeafHash::from_byte_array(inner.to_byte_array()) - } +impl TapLeafHash { + /// Computes the leaf hash from components. + pub fn from_script(script: &Script, ver: LeafVersion) -> TapLeafHash { + let mut eng = sha256t::Hash::::engine(); + ver.to_consensus().consensus_encode(&mut eng).expect("engines don't error"); + script.consensus_encode(&mut eng).expect("engines don't error"); + let inner = sha256t::Hash::::from_engine(eng); + TapLeafHash::from_byte_array(inner.to_byte_array()) } } @@ -82,25 +133,22 @@ impl From<&LeafNode> for TapNodeHash { fn from(leaf: &LeafNode) -> TapNodeHash { leaf.node_hash() } } -crate::internal_macros::define_extension_trait! { - /// Extension functionality for the [`TapNodeHash`] type. - pub trait TapNodeHashExt impl for TapNodeHash { - /// Computes branch hash given two hashes of the nodes underneath it. - fn from_node_hashes(a: TapNodeHash, b: TapNodeHash) -> TapNodeHash { - combine_node_hashes(a, b).0 - } +impl TapNodeHash { + /// Computes branch hash given two hashes of the nodes underneath it. + pub fn from_node_hashes(a: TapNodeHash, b: TapNodeHash) -> TapNodeHash { + combine_node_hashes(a, b).0 + } - /// Assumes the given 32 byte array as hidden [`TapNodeHash`]. - /// - /// Similar to [`TapLeafHash::from_byte_array`], but explicitly conveys that the - /// hash is constructed from a hidden node. This also has better ergonomics - /// because it does not require the caller to import the Hash trait. - fn assume_hidden(hash: [u8; 32]) -> TapNodeHash { TapNodeHash::from_byte_array(hash) } + /// Assumes the given 32 byte array as hidden [`TapNodeHash`]. + /// + /// Similar to [`TapLeafHash::from_byte_array`], but explicitly conveys that the + /// hash is constructed from a hidden node. This also has better ergonomics + /// because it does not require the caller to import the Hash trait. + pub fn assume_hidden(hash: [u8; 32]) -> TapNodeHash { TapNodeHash::from_byte_array(hash) } - /// Computes the [`TapNodeHash`] from a script and a leaf version. - fn from_script(script: &Script, ver: LeafVersion) -> TapNodeHash { - TapNodeHash::from(TapLeafHash::from_script(script, ver)) - } + /// Computes the [`TapNodeHash`] from a script and a leaf version. + pub fn from_script(script: &Script, ver: LeafVersion) -> TapNodeHash { + TapNodeHash::from(TapLeafHash::from_script(script, ver)) } } @@ -141,8 +189,17 @@ pub const TAPROOT_CONTROL_BASE_SIZE: usize = 33; pub const TAPROOT_CONTROL_MAX_SIZE: usize = TAPROOT_CONTROL_BASE_SIZE + TAPROOT_CONTROL_NODE_SIZE * TAPROOT_CONTROL_MAX_NODE_COUNT; +/// The leaf script with its version. +#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] +pub struct LeafScript { + /// The version of the script. + pub version: LeafVersion, + /// The script, usually `ScriptBuf` or `&Script`. + pub script: S, +} + // type alias for versioned tap script corresponding Merkle proof -type ScriptMerkleProofMap = BTreeMap<(ScriptBuf, LeafVersion), BTreeSet>; +type ScriptMerkleProofMap = BTreeMap<(ScriptBuf, LeafVersion), BTreeSet>; /// Represents Taproot spending information. /// @@ -173,7 +230,7 @@ pub struct TaprootSpendInfo { output_key_parity: secp256k1::Parity, /// The tweaked output key. output_key: TweakedPublicKey, - /// Map from (script, leaf_version) to (sets of) [`TaprootMerkleBranch`]. More than one control + /// Map from (script, leaf_version) to (sets of) [`TaprootMerkleBranchBuf`]. More than one control /// block for a given script is only possible if it appears in multiple branches of the tree. In /// all cases, keeping one should be enough for spending funds, but we keep all of the paths so /// that a full tree can be constructed again from spending data if required. @@ -585,7 +642,9 @@ pub enum IncompleteBuilderError { HiddenParts(TaprootBuilder), } -internals::impl_from_infallible!(IncompleteBuilderError); +impl From for IncompleteBuilderError { + fn from(never: Infallible) -> Self { match never {} } +} impl IncompleteBuilderError { /// Converts error into the original incomplete [`TaprootBuilder`] instance. @@ -631,7 +690,9 @@ pub enum HiddenNodesError { HiddenParts(NodeInfo), } -internals::impl_from_infallible!(HiddenNodesError); +impl From for HiddenNodesError { + fn from(never: Infallible) -> Self { match never {} } +} impl HiddenNodesError { /// Converts error into the original incomplete [`NodeInfo`] instance. @@ -789,7 +850,7 @@ impl DoubleEndedIterator for LeafNodes<'_> { /// /// You can use [`TaprootSpendInfo::from_node_info`] to a get a [`TaprootSpendInfo`] from the Merkle /// root [`NodeInfo`]. -#[derive(Debug, Clone, PartialOrd, Ord)] +#[derive(Debug, Clone)] pub struct NodeInfo { /// Merkle hash for this node. pub(crate) hash: TapNodeHash, @@ -799,6 +860,24 @@ pub struct NodeInfo { pub(crate) has_hidden_nodes: bool, } +/// Explicitly implement Ord so future changes to NodeInfo (e.g. adding a new field) won't result in +/// potentially changing addresses out from under users +impl Ord for NodeInfo { + fn cmp(&self, other: &Self) -> Ordering { + match self.hash.cmp(&other.hash) { + Ordering::Equal => match self.leaves.cmp(&other.leaves) { + Ordering::Equal => self.has_hidden_nodes.cmp(&other.has_hidden_nodes), + other => other, + }, + other => other, + } + } +} + +impl PartialOrd for NodeInfo { + fn partial_cmp(&self, other: &NodeInfo) -> Option { Some(self.cmp(other)) } +} + impl PartialEq for NodeInfo { fn eq(&self, other: &Self) -> bool { self.hash.eq(&other.hash) } } @@ -967,7 +1046,7 @@ pub struct LeafNode { /// The [`TapLeaf`] leaf: TapLeaf, /// The Merkle proof (hashing partners) to get this node. - merkle_branch: TaprootMerkleBranch, + merkle_branch: TaprootMerkleBranchBuf, } impl LeafNode { @@ -1062,7 +1141,7 @@ impl<'leaf> ScriptLeaf<'leaf> { /// Control block data structure used in Tapscript satisfaction. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct ControlBlock { +pub struct ControlBlock where Branch: ?Sized { /// The tapleaf version. pub leaf_version: LeafVersion, /// The parity of the output key (NOT THE INTERNAL KEY WHICH IS ALWAYS XONLY). @@ -1070,7 +1149,7 @@ pub struct ControlBlock { /// The internal key. pub internal_key: UntweakedPublicKey, /// The Merkle proof of a script associated with this leaf. - pub merkle_branch: TaprootMerkleBranch, + pub merkle_branch: Branch, } impl ControlBlock { @@ -1087,27 +1166,29 @@ impl ControlBlock { /// - [`TaprootError::InvalidInternalKey`] if internal key is invalid (first 32 bytes after the parity byte). /// - [`TaprootError::InvalidMerkleTreeDepth`] if Merkle tree is too deep (more than 128 levels). pub fn decode(sl: &[u8]) -> Result { - if sl.len() < TAPROOT_CONTROL_BASE_SIZE - || (sl.len() - TAPROOT_CONTROL_BASE_SIZE) % TAPROOT_CONTROL_NODE_SIZE != 0 - { - return Err(InvalidControlBlockSizeError(sl.len()).into()); - } - let output_key_parity = match sl[0] & 1 { + let (base, merkle_branch) = sl.split_first_chunk::() + .ok_or(InvalidControlBlockSizeError(sl.len()))?; + + let (&first, internal_key) = base.split_first(); + + let output_key_parity = match first & 1 { 0 => secp256k1::Parity::Even, _ => secp256k1::Parity::Odd, }; - let leaf_version = LeafVersion::from_consensus(sl[0] & TAPROOT_LEAF_MASK)?; - let internal_key = UntweakedPublicKey::from_slice(&sl[1..TAPROOT_CONTROL_BASE_SIZE]) + let leaf_version = LeafVersion::from_consensus(first & TAPROOT_LEAF_MASK)?; + let internal_key = UntweakedPublicKey::from_byte_array(internal_key) .map_err(TaprootError::InvalidInternalKey)?; - let merkle_branch = TaprootMerkleBranch::decode(&sl[TAPROOT_CONTROL_BASE_SIZE..])?; + let merkle_branch = TaprootMerkleBranchBuf::decode(merkle_branch)?; Ok(ControlBlock { leaf_version, output_key_parity, internal_key, merkle_branch }) } +} +impl + ?Sized> ControlBlock { /// Returns the size of control block. Faster and more efficient than calling /// `Self::serialize().len()`. Can be handy for fee estimation. pub fn size(&self) -> usize { - TAPROOT_CONTROL_BASE_SIZE + TAPROOT_CONTROL_NODE_SIZE * self.merkle_branch.len() + TAPROOT_CONTROL_BASE_SIZE + TAPROOT_CONTROL_NODE_SIZE * self.merkle_branch.as_ref().len() } /// Serializes to a writer. @@ -1116,12 +1197,26 @@ impl ControlBlock { /// /// The number of bytes written to the writer. pub fn encode(&self, writer: &mut W) -> io::Result { + self.encode_inner(move |bytes| writer.write_all(bytes))?; + Ok(self.size()) + } + + pub(crate) fn encode_to_arrayvec(&self) -> ControlBlockArrayVec { + let mut result = ControlBlockArrayVec::new(); + self.encode_inner(|bytes| -> Result<(), core::convert::Infallible> { + result.extend_from_slice(bytes); + Ok(()) + }).unwrap_or_else(|never| match never {}); + result + } + + fn encode_inner(&self, mut write: impl FnMut(&[u8]) -> Result<(), E>) -> Result<(), E> { let first_byte: u8 = i32::from(self.output_key_parity) as u8 | self.leaf_version.to_consensus(); - writer.write_all(&[first_byte])?; - writer.write_all(&self.internal_key.serialize())?; - self.merkle_branch.encode(writer)?; - Ok(self.size()) + write(&[first_byte])?; + write(&self.internal_key.serialize())?; + write(self.merkle_branch.as_ref().as_bytes())?; + Ok(()) } /// Serializes the control block. @@ -1149,7 +1244,7 @@ impl ControlBlock { // Initially the curr_hash is the leaf hash let mut curr_hash = TapNodeHash::from_script(script, self.leaf_version); // Verify the proof - for elem in &self.merkle_branch { + for elem in self.merkle_branch.as_ref() { // Recalculate the curr hash as parent hash curr_hash = TapNodeHash::from_node_hashes(curr_hash, *elem); } @@ -1323,7 +1418,9 @@ pub enum TaprootBuilderError { EmptyTree, } -internals::impl_from_infallible!(TaprootBuilderError); +impl From for TaprootBuilderError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for TaprootBuilderError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1380,7 +1477,9 @@ pub enum TaprootError { EmptyTree, } -internals::impl_from_infallible!(TaprootError); +impl From for TaprootError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for TaprootError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1436,7 +1535,9 @@ impl InvalidMerkleBranchSizeError { pub fn invalid_merkle_branch_size(&self) -> usize { self.0 } } -internals::impl_from_infallible!(InvalidMerkleBranchSizeError); +impl From for InvalidMerkleBranchSizeError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for InvalidMerkleBranchSizeError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1460,7 +1561,9 @@ impl InvalidMerkleTreeDepthError { pub fn invalid_merkle_tree_depth(&self) -> usize { self.0 } } -internals::impl_from_infallible!(InvalidMerkleTreeDepthError); +impl From for InvalidMerkleTreeDepthError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for InvalidMerkleTreeDepthError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1484,7 +1587,9 @@ impl InvalidTaprootLeafVersionError { pub fn invalid_leaf_version(&self) -> u8 { self.0 } } -internals::impl_from_infallible!(InvalidTaprootLeafVersionError); +impl From for InvalidTaprootLeafVersionError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for InvalidTaprootLeafVersionError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1504,7 +1609,9 @@ impl InvalidControlBlockSizeError { pub fn invalid_control_block_size(&self) -> usize { self.0 } } -internals::impl_from_infallible!(InvalidControlBlockSizeError); +impl From for InvalidControlBlockSizeError { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for InvalidControlBlockSizeError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1519,17 +1626,9 @@ impl fmt::Display for InvalidControlBlockSizeError { #[cfg(feature = "std")] impl std::error::Error for InvalidControlBlockSizeError {} -mod sealed { - pub trait Sealed {} - impl Sealed for super::TapTweakHash {} - impl Sealed for super::TapLeafHash {} - impl Sealed for super::TapNodeHash {} -} - #[cfg(test)] mod test { use hashes::sha256; - use hashes::sha256t::Tag; use hex::{DisplayHex, FromHex}; use secp256k1::VerifyOnly; @@ -1554,19 +1653,19 @@ mod test { } #[test] - fn test_midstates() { - use sha256t::Hash; + fn midstates() { + use sha256t::{Hash, Tag}; // test that engine creation roundtrips - assert_eq!(tag_engine("TapLeaf").midstate(), TapLeafTag::engine().midstate()); - assert_eq!(tag_engine("TapBranch").midstate(), TapBranchTag::engine().midstate()); - assert_eq!(tag_engine("TapTweak").midstate(), TapTweakTag::engine().midstate()); - assert_eq!(tag_engine("TapSighash").midstate(), TapSighashTag::engine().midstate()); + assert_eq!(tag_engine("TapLeaf").midstate().unwrap(), TapLeafTag::MIDSTATE); + assert_eq!(tag_engine("TapBranch").midstate().unwrap(), TapBranchTag::MIDSTATE); + assert_eq!(tag_engine("TapTweak").midstate().unwrap(), TapTweakTag::MIDSTATE); + assert_eq!(tag_engine("TapSighash").midstate().unwrap(), TapSighashTag::MIDSTATE); // check that hash creation is the same as building into the same engine fn empty_hash(tag_name: &str) -> [u8; 32] { let mut e = tag_engine(tag_name); e.input(&[]); - Hash::::from_engine(e).to_byte_array() + sha256::Hash::from_engine(e).to_byte_array() } assert_eq!(empty_hash("TapLeaf"), Hash::::hash(&[]).to_byte_array()); assert_eq!(empty_hash("TapBranch"), Hash::::hash(&[]).to_byte_array()); @@ -1575,7 +1674,7 @@ mod test { } #[test] - fn test_vectors_core() { + fn vectors_core() { //! Test vectors taken from Core use sha256t::Hash; @@ -1583,19 +1682,19 @@ mod test { // CHashWriter writer = HasherTapLeaf; // writer.GetSHA256().GetHex() assert_eq!( - Hash::::from_engine(TapLeafTag::engine()).to_string(), + Hash::::from_engine(Hash::::engine()).to_string(), "5212c288a377d1f8164962a5a13429f9ba6a7b84e59776a52c6637df2106facb" ); assert_eq!( - Hash::::from_engine(TapBranchTag::engine()).to_string(), + Hash::::from_engine(Hash::::engine()).to_string(), "53c373ec4d6f3c53c1f5fb2ff506dcefe1a0ed74874f93fa93c8214cbe9ffddf" ); assert_eq!( - Hash::::from_engine(TapTweakTag::engine()).to_string(), + Hash::::from_engine(Hash::::engine()).to_string(), "8aa4229474ab0100b2d6f0687f031d1fc9d8eef92a042ad97d279bff456b15e4" ); assert_eq!( - Hash::::from_engine(TapSighashTag::engine()).to_string(), + Hash::::from_engine(Hash::::engine()).to_string(), "dabc11914abcd8072900042a2681e52f8dba99ce82e224f97b5fdb7cd4b9c803" ); @@ -1822,7 +1921,7 @@ mod test { #[test] #[cfg(feature = "serde")] - fn test_leaf_version_serde() { + fn leaf_version_serde() { let leaf_version = LeafVersion::TapScript; // use serde_test to test serialization and deserialization assert_tokens(&leaf_version, &[Token::U8(192)]); @@ -1834,7 +1933,7 @@ mod test { #[test] #[cfg(feature = "serde")] - fn test_merkle_branch_serde() { + fn merkle_branch_serde() { let hash1 = TapNodeHash::from_byte_array( "03ba2a4dcd914fed29a1c630c7e811271b081a0e2f2f52cf1c197583dfd46c1b" .parse::>() @@ -1847,7 +1946,7 @@ mod test { .unwrap() .to_byte_array(), ); - let merkle_branch = TaprootMerkleBranch::from([hash1, hash2]); + let merkle_branch = TaprootMerkleBranchBuf::from([hash1, hash2]); // use serde_test to test serialization and deserialization serde_test::assert_tokens( &merkle_branch.readable(), diff --git a/bitcoin/src/taproot/serialized_signature.rs b/bitcoin/src/taproot/serialized_signature.rs index 14cfa22be0..be388e2c3c 100644 --- a/bitcoin/src/taproot/serialized_signature.rs +++ b/bitcoin/src/taproot/serialized_signature.rs @@ -155,8 +155,8 @@ impl SerializedSignature { /// Convert the serialized signature into the Signature struct. /// (This deserializes it) #[inline] - pub fn to_signature(&self) -> Result { - Signature::from_slice(self) + pub fn to_signature(self) -> Result { + Signature::from_slice(&self) } /// Constructs a new SerializedSignature from a Signature. @@ -217,7 +217,7 @@ mod into_iter { #[inline] fn size_hint(&self) -> (usize, Option) { - // can't underlflow thanks to the invariant + // can't overflow thanks to the invariant let len = self.signature.len() - self.pos; (len, Some(len)) } diff --git a/bitcoin/tests/data/psbt_fuzz1.hex b/bitcoin/tests/data/psbt_fuzz1.hex new file mode 100644 index 0000000000..dd3c9b7ed9 --- /dev/null +++ b/bitcoin/tests/data/psbt_fuzz1.hex @@ -0,0 +1 @@ +70736274ff01000a000000ff0000000074ff4f010488b21eff02000001004a92244992244902030203030303030303030303030303030303030303030303030303030303030303f4000000000000000a000208ffffffff08080804000000000000000c080808000b0000000000010000 \ No newline at end of file diff --git a/bitcoin/tests/data/psbt_fuzz2.hex b/bitcoin/tests/data/psbt_fuzz2.hex new file mode 100644 index 0000000000..8a33cf01c8 --- /dev/null +++ b/bitcoin/tests/data/psbt_fuzz2.hex @@ -0,0 +1 @@ +70736274ff01000a000000ff0000000074ff4f010488b21eff02000001004a92244992244902030203030303030303030303030303030303030303030303030303030303030303f4000000000000000a000208ffffffff080808040000000000000008000000000000001000 \ No newline at end of file diff --git a/bitcoin/tests/data/serde/txout_bincode b/bitcoin/tests/data/serde/txout_bincode index 2c3d207774..1bbd8c3317 100644 Binary files a/bitcoin/tests/data/serde/txout_bincode and b/bitcoin/tests/data/serde/txout_bincode differ diff --git a/bitcoin/tests/psbt-sign-taproot.rs b/bitcoin/tests/psbt-sign-taproot.rs index 15ff88115a..0f8cf5933c 100644 --- a/bitcoin/tests/psbt-sign-taproot.rs +++ b/bitcoin/tests/psbt-sign-taproot.rs @@ -205,13 +205,13 @@ fn create_psbt_for_taproot_key_path_spend( ) -> Psbt { let send_value = 6400; let out_puts = vec![TxOut { - value: Amount::from_sat(send_value), + value: Amount::from_sat(send_value).unwrap(), script_pubkey: to_address.script_pubkey(), }]; let prev_tx_id = "06980ca116f74c7845a897461dd0e1d15b114130176de5004957da516b4dee3a"; let transaction = Transaction { - version: Version(2), + version: Version::TWO, lock_time: absolute::LockTime::ZERO, input: vec![TxIn { previous_output: OutPoint { txid: prev_tx_id.parse().unwrap(), vout: 0 }, @@ -243,7 +243,7 @@ fn create_psbt_for_taproot_key_path_spend( let mut input = Input { witness_utxo: { let script_pubkey = from_address.script_pubkey(); - Some(TxOut { value: Amount::from_sat(utxo_value), script_pubkey }) + Some(TxOut { value: Amount::from_sat(utxo_value).unwrap(), script_pubkey }) }, tap_key_origins: origins, ..Default::default() @@ -283,12 +283,12 @@ fn create_psbt_for_taproot_script_path_spend( let mfp = "73c5da0a"; let out_puts = vec![TxOut { - value: Amount::from_sat(send_value), + value: Amount::from_sat(send_value).unwrap(), script_pubkey: to_address.script_pubkey(), }]; let prev_tx_id = "9d7c6770fca57285babab60c51834cfcfd10ad302119cae842d7216b4ac9a376"; let transaction = Transaction { - version: Version(2), + version: Version::TWO, lock_time: absolute::LockTime::ZERO, input: vec![TxIn { previous_output: OutPoint { txid: prev_tx_id.parse().unwrap(), vout: 0 }, @@ -322,7 +322,7 @@ fn create_psbt_for_taproot_script_path_spend( let mut input = Input { witness_utxo: { let script_pubkey = from_address.script_pubkey(); - Some(TxOut { value: Amount::from_sat(utxo_value), script_pubkey }) + Some(TxOut { value: Amount::from_sat(utxo_value).unwrap(), script_pubkey }) }, tap_key_origins: origins, tap_scripts, diff --git a/bitcoin/tests/serde.rs b/bitcoin/tests/serde.rs index af6f26ad76..25c89306c0 100644 --- a/bitcoin/tests/serde.rs +++ b/bitcoin/tests/serde.rs @@ -109,10 +109,7 @@ fn serde_regression_txin() { #[test] fn serde_regression_txout() { - let txout = TxOut { - value: Amount::from_sat(0xDEADBEEFCAFEBABE), - script_pubkey: ScriptBuf::from(vec![0u8, 1u8, 2u8]), - }; + let txout = TxOut { value: Amount::MAX, script_pubkey: ScriptBuf::from(vec![0u8, 1u8, 2u8]) }; let got = serialize(&txout).unwrap(); let want = include_bytes!("data/serde/txout_bincode") as &[_]; assert_eq!(got, want) @@ -238,7 +235,7 @@ fn serde_regression_psbt() { .unwrap()]), }], output: vec![TxOut { - value: Amount::from_sat(190_303_501_938), + value: Amount::from_sat(190_303_501_938).unwrap(), script_pubkey: ScriptBuf::from_hex("a914339725ba21efd62ac753a9bcd067d6c7a6a39d0587") .unwrap(), }], @@ -285,7 +282,7 @@ fn serde_regression_psbt() { inputs: vec![Input { non_witness_utxo: Some(tx), witness_utxo: Some(TxOut { - value: Amount::from_sat(190_303_501_938), + value: Amount::from_sat(190_303_501_938).unwrap(), script_pubkey: ScriptBuf::from_hex("a914339725ba21efd62ac753a9bcd067d6c7a6a39d0587").unwrap(), }), sighash_type: Some(PsbtSighashType::from("SIGHASH_SINGLE|SIGHASH_ANYONECANPAY".parse::().unwrap())), diff --git a/chacha20_poly1305/Cargo.toml b/chacha20_poly1305/Cargo.toml index 7e4e453588..da5242bf99 100644 --- a/chacha20_poly1305/Cargo.toml +++ b/chacha20_poly1305/Cargo.toml @@ -22,3 +22,6 @@ hex = { package = "hex-conservative", version = "0.3.0", default-features = fals [package.metadata.docs.rs] all-features = true rustdoc-args = ["--cfg", "docsrs"] + +[lints.rust] +unexpected_cfgs = { level = "deny", check-cfg = ['cfg(bench)'] } diff --git a/chacha20_poly1305/src/benches.rs b/chacha20_poly1305/src/benches.rs new file mode 100644 index 0000000000..ebe086c3f0 --- /dev/null +++ b/chacha20_poly1305/src/benches.rs @@ -0,0 +1,42 @@ +use test::Bencher; + +use crate::{ChaCha20, Key, Nonce}; + +#[bench] +pub fn chacha20_10(bh: &mut Bencher) { + let key = Key::new([0u8; 32]); + let nonce = Nonce::new([0u8; 12]); + let count = 1; + let mut chacha = ChaCha20::new(key, nonce, count); + let mut bytes = [0u8; 10]; + bh.iter(|| { + chacha.apply_keystream(&mut bytes[..]); + }); + bh.bytes = bytes.len() as u64; +} + +#[bench] +pub fn chacha20_1k(bh: &mut Bencher) { + let key = Key::new([0u8; 32]); + let nonce = Nonce::new([0u8; 12]); + let count = 1; + let mut chacha = ChaCha20::new(key, nonce, count); + let mut bytes = [0u8; 1024]; + bh.iter(|| { + chacha.apply_keystream(&mut bytes[..]); + }); + bh.bytes = bytes.len() as u64; +} + +#[bench] +pub fn chacha20_64k(bh: &mut Bencher) { + let key = Key::new([0u8; 32]); + let nonce = Nonce::new([0u8; 12]); + let count = 1; + let mut chacha = ChaCha20::new(key, nonce, count); + let mut bytes = [0u8; 65536]; + bh.iter(|| { + chacha.apply_keystream(&mut bytes[..]); + }); + bh.bytes = bytes.len() as u64; +} diff --git a/chacha20_poly1305/src/chacha20.rs b/chacha20_poly1305/src/chacha20.rs index 51d0e4b88d..0f332fbaba 100644 --- a/chacha20_poly1305/src/chacha20.rs +++ b/chacha20_poly1305/src/chacha20.rs @@ -45,7 +45,7 @@ impl Nonce { /// /// This type is attempting to be as close as possible to the experimental [`core::simd::u32x4`] /// which at this time is feature gated and well beyond the project's MSRV. But ideally -/// an easy transistion can be made in the future. +/// an easy transition can be made in the future. /// /// A few SIMD relevant design choices: /// * Heavy use of inline functions to help the compiler recognize vectorizable sections. @@ -243,7 +243,7 @@ pub struct ChaCha20 { nonce: Nonce, /// Internal block index of keystream. block_count: u32, - /// Interal byte offset index of the block_count. + /// Internal byte offset index of the block_count. seek_offset_bytes: usize, } @@ -333,7 +333,7 @@ mod tests { use super::*; #[test] - fn test_chacha_block() { + fn chacha_block() { let mut state = State { matrix: [ U32x4([0x61707865, 0x3320646e, 0x79622d32, 0x6b206574]), @@ -357,7 +357,7 @@ mod tests { } #[test] - fn test_prepare_state() { + fn prepare_state() { let key = Key(Vec::from_hex("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f") .unwrap() @@ -374,7 +374,7 @@ mod tests { } #[test] - fn test_small_plaintext() { + fn small_plaintext() { let key = Key(Vec::from_hex("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f") .unwrap() @@ -391,7 +391,7 @@ mod tests { } #[test] - fn test_modulo_64() { + fn modulo_64() { let key = Key(Vec::from_hex("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f") .unwrap() @@ -408,7 +408,7 @@ mod tests { } #[test] - fn test_rfc_standard() { + fn rfc_standard() { let key = Key(Vec::from_hex("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f") .unwrap() @@ -428,7 +428,7 @@ mod tests { } #[test] - fn test_new_from_block() { + fn new_from_block() { let key = Key(Vec::from_hex("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f") .unwrap() diff --git a/chacha20_poly1305/src/lib.rs b/chacha20_poly1305/src/lib.rs index 330f5389e8..0deea843df 100644 --- a/chacha20_poly1305/src/lib.rs +++ b/chacha20_poly1305/src/lib.rs @@ -1,10 +1,14 @@ // SPDX-License-Identifier: CC0-1.0 +//! # ChaCha20 - Poly1305 +//! //! Combine the ChaCha20 stream cipher with the Poly1305 message authentication code //! to form an authenticated encryption with additional data (AEAD) algorithm. + #![no_std] // Experimental features we need. #![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![cfg_attr(bench, feature(test))] // Coding conventions. #![warn(missing_docs)] #![warn(deprecated_in_future)] @@ -18,6 +22,10 @@ extern crate alloc; #[cfg(feature = "std")] extern crate std; +#[cfg(bench)] +mod benches; +#[cfg(bench)] +extern crate test; pub mod chacha20; pub mod poly1305; @@ -165,7 +173,7 @@ mod tests { use super::*; #[test] - fn test_rfc7539() { + fn rfc7539() { let mut message = *b"Ladies and Gentlemen of the class of '99: If I could offer you only one tip for the future, sunscreen would be it."; let aad = Vec::from_hex("50515253c0c1c2c3c4c5c6c7").unwrap(); let key = Key::new( diff --git a/chacha20_poly1305/src/poly1305.rs b/chacha20_poly1305/src/poly1305.rs index 39e4e7a83c..4aef1343ca 100644 --- a/chacha20_poly1305/src/poly1305.rs +++ b/chacha20_poly1305/src/poly1305.rs @@ -134,10 +134,10 @@ impl Poly1305 { self.acc[i] = t & mask | self.acc[i] & !mask; } // Voodoo from donna to convert to [u32; 4]. - let a0 = self.acc[0] | self.acc[1] << 26; - let a1 = self.acc[1] >> 6 | self.acc[2] << 20; - let a2 = self.acc[2] >> 12 | self.acc[3] << 14; - let a3 = self.acc[3] >> 18 | self.acc[4] << 8; + let a0 = self.acc[0] | (self.acc[1] << 26); + let a1 = (self.acc[1] >> 6) | (self.acc[2] << 20); + let a2 = (self.acc[2] >> 12) | (self.acc[3] << 14); + let a3 = (self.acc[3] >> 18) | (self.acc[4] << 8); let a = [a0, a1, a2, a3]; // a + s let mut tag: [u64; 4] = [0; 4]; @@ -196,21 +196,21 @@ fn prepare_padded_message_slice(msg: &[u8], is_last: bool) -> [u32; 5] { // Encode number in five 26-bit limbs. let m0 = u32::from_le_bytes(fmt_msg[0..4].try_into().expect("Valid subset of 32.")) & BITMASK; let m1 = - u32::from_le_bytes(fmt_msg[3..7].try_into().expect("Valid subset of 32.")) >> 2 & BITMASK; - let m2 = - u32::from_le_bytes(fmt_msg[6..10].try_into().expect("Valid subset of 32.")) >> 4 & BITMASK; - let m3 = - u32::from_le_bytes(fmt_msg[9..13].try_into().expect("Valid subset of 32.")) >> 6 & BITMASK; - let m4 = - u32::from_le_bytes(fmt_msg[12..16].try_into().expect("Valid subset of 32.")) >> 8 | hi_bit; + (u32::from_le_bytes(fmt_msg[3..7].try_into().expect("Valid subset of 32.")) >> 2) & BITMASK; + let m2 = (u32::from_le_bytes(fmt_msg[6..10].try_into().expect("Valid subset of 32.")) >> 4) + & BITMASK; + let m3 = (u32::from_le_bytes(fmt_msg[9..13].try_into().expect("Valid subset of 32.")) >> 6) + & BITMASK; + let m4 = (u32::from_le_bytes(fmt_msg[12..16].try_into().expect("Valid subset of 32.")) >> 8) + | hi_bit; [m0, m1, m2, m3, m4] } fn _print_acc(num: &[u32; 5]) { - let a0 = num[0] | num[1] << 26; - let a1 = num[1] >> 6 | num[2] << 20; - let a2 = num[2] >> 12 | num[3] << 14; - let a3 = num[3] >> 18 | num[4] << 8; + let a0 = num[0] | (num[1] << 26); + let a1 = (num[1] >> 6) | (num[2] << 20); + let a2 = (num[2] >> 12) | (num[3] << 14); + let a3 = (num[3] >> 18) | (num[4] << 8); let a = [a0, a1, a2, a3]; let mut ret: [u8; 16] = [0; 16]; for i in 0..a.len() { @@ -230,7 +230,7 @@ mod tests { use super::*; #[test] - fn test_rfc7539() { + fn rfc7539() { let key = Vec::from_hex("85d6be7857556d337f4452fe42d506a80103808afb0db2fd4abff6af4149f51b") .unwrap() .as_slice() diff --git a/clippy.toml b/clippy.toml index ddca6b4cb8..47eac63c91 100644 --- a/clippy.toml +++ b/clippy.toml @@ -1,2 +1,4 @@ msrv = "1.63.0" too-many-arguments-threshold = 13 +avoid-breaking-exported-api = false +doc-valid-idents = ["SegWit", "OpenSSL"] diff --git a/contrib/api.sh b/contrib/api.sh new file mode 100755 index 0000000000..03bfe1203b --- /dev/null +++ b/contrib/api.sh @@ -0,0 +1,149 @@ +#!/usr/bin/env bash +# +# Script for querying the API. +# +# Shellcheck can't search dynamic paths +# shellcheck source=/dev/null + +set -euo pipefail + +file="" # File name of the all-features API text file. +crate_full_name="" # Full crate name using underscores e.g., `bitcoin_primitives`. +crate="" # Short name e.g., `primitives`. + +# Set to false to turn off verbose output. +flag_verbose=false + +usage() { + cat <]+>)?(?=\(|;| |$)' "$file" | sed "s/^${crate_full_name}:://" +} + +# Print all public structs and enums excluding error types. +structs_and_enums_no_err() { + grep -oP 'pub (struct|enum) \K[\w:]+(?:<[^>]+>)?(?=\(|;| |$)' "$file" | sed "s/^${crate_full_name}:://" | grep -v Error +} + +# Print all public traits. +traits() { + grep -oP '^pub trait \K[\w:]+' "$file" | sed "s/^${crate_full_name}:://" | sed 's/:$//' +} + +# Check all the commands we use are present in the current environment. +check_required_commands() { + need_cmd grep +} + +say() { + echo "api: $1" +} + +say_err() { + say "$1" >&2 +} + +verbose_say() { + if [ "$flag_verbose" = true ]; then + say "$1" + fi +} + +err() { + echo "$1" >&2 + exit 1 +} + +need_cmd() { + if ! command -v "$1" > /dev/null 2>&1 + then err "need '$1' (command not found)" + fi +} + +# +# Main script +# +main "$@" +exit 0 diff --git a/contrib/check-for-api-changes.sh b/contrib/check-for-api-changes.sh new file mode 100755 index 0000000000..3bfe415715 --- /dev/null +++ b/contrib/check-for-api-changes.sh @@ -0,0 +1,101 @@ +#!/usr/bin/env bash +# +# Checks the public API of crates, exits with non-zero if there are currently +# changes to the public API not already committed to in the various api/*.txt +# files. + +set -euo pipefail + +REPO_DIR=$(git rev-parse --show-toplevel) +API_DIR="$REPO_DIR/api" + +NIGHTLY=$(cat nightly-version) +# Our docs have broken intra doc links if all features are not enabled. +RUSTDOCFLAGS="-A rustdoc::broken_intra_doc_links" + +# `sort -n -u` doesn't work for some reason. +SORT="sort --numeric-sort" + +# Sort order is effected by locale. See `man sort`. +# > Set LC_ALL=C to get the traditional sort order that uses native byte values. +export LC_ALL=C + +main() { + need_nightly + need_cargo_public_api + + generate_api_files "hashes" + generate_api_files "io" + generate_api_files "primitives" + generate_api_files "units" + + check_for_changes +} + +# Uses `CARGO` to generate API files in the specified crate. +# +# Files: +# +# - no-features.txt +# - alloc-only.txt +# - all-features.txt +generate_api_files() { + local crate=$1 + pushd "$REPO_DIR/$crate" > /dev/null + + run_cargo --no-default-features | $SORT | uniq > "$API_DIR/$crate/no-features.txt" + run_cargo --no-default-features --features=alloc | $SORT | uniq > "$API_DIR/$crate/alloc-only.txt" + run_cargo_all_features | $SORT | uniq > "$API_DIR/$crate/all-features.txt" + + popd > /dev/null +} + +# Check if there are changes (dirty git index) to the `api/` directory. +check_for_changes() { + pushd "$REPO_DIR" > /dev/null + + if [[ $(git status --porcelain api) ]]; then + git diff --color=always + echo + err "You have introduced changes to the public API, commit the changes to api/ currently in your working directory" + else + echo "No changes to the current public API" + fi + + popd > /dev/null +} + +# Run cargo when --all-features is not used. +run_cargo() { + RUSTDOCFLAGS="$RUSTDOCFLAGS" cargo +"$NIGHTLY" public-api --simplified "$@" +} + +# Run cargo with all features enabled. +run_cargo_all_features() { + cargo +"$NIGHTLY" public-api --simplified --all-features +} + +need_nightly() { + cargo_ver=$(cargo +"$NIGHTLY" --version) + if echo "$cargo_ver" | grep -q -v nightly; then + err "Need a nightly compiler; have $cargo_ver" + fi +} + +need_cargo_public_api() { + if command -v cargo-public-api > /dev/null; then + return + fi + err "cargo-public-api is not installed; please run 'cargo +nightly install cargo-public-api --locked'" +} + +err() { + echo "$1" >&2 + exit 1 +} + +# +# Main script +# +main "$@" +exit 0 diff --git a/contrib/release.sh b/contrib/release.sh index b30f2fb846..c02657948d 100755 --- a/contrib/release.sh +++ b/contrib/release.sh @@ -6,7 +6,7 @@ set -euox pipefail main () { - for crate in "internals" "hashes" "bitcoin"; do + for crate in "bitcoin" "hashes" "internals" "units"; do if release_changes $crate; then echo "$crate has changes implying this is a release PR, checking if we can publish ..." @@ -35,12 +35,14 @@ release_changes() { # We use `set -e` so this will fail the script if the dry-run fails. publish_dry_run() { local crate=$1 - if [ "$crate" == "hashes" ]; then + if [ "$crate" == "bitcoin" ]; then + cargo publish -p "bitcoin" --dry-run + elif [ "$crate" == "hashes" ]; then cargo publish -p "bitcoin_hashes" --dry-run elif [ "$crate" == "internals" ]; then cargo publish -p "bitcoin-internals" --dry-run - elif [ "$crate" == "bitcoin" ]; then - cargo publish -p "bitcoin" --dry-run + elif [ "$crate" == "units" ]; then + cargo publish -p "bitcoin-units" --dry-run fi } diff --git a/contrib/test-miri.sh b/contrib/test-miri.sh index 6ca2dab512..5e979a6f46 100755 --- a/contrib/test-miri.sh +++ b/contrib/test-miri.sh @@ -6,7 +6,7 @@ cd "$(dirname "$0")/.." . contrib/test_vars.sh -target_features="$(rustc --print target-features | awk '{ if ($1 == "") { exit 0 } if (NR != 1 && $1 != "crt-static") { if (NR == 2) { printf "+%s", $1 } else { printf ",+%s", $1 } } }')" +target_features="$(rustc --print target-features | awk '{ if ($1 == "") { exit 0 } if (NR != 1 && $1 != "crt-static" && $1 != "soft-float") { if (NR == 2) { printf "+%s", $1 } else { printf ",+%s", $1 } } }')" for crate in $CRATES; do diff --git a/contrib/update-lock-files.sh b/contrib/update-lock-files.sh index 02ba1f7c82..90b1865068 100755 --- a/contrib/update-lock-files.sh +++ b/contrib/update-lock-files.sh @@ -5,7 +5,7 @@ set -euo pipefail for file in Cargo-minimal.lock Cargo-recent.lock; do - cp --force "$file" Cargo.lock + cp -f "$file" Cargo.lock cargo check - cp --force Cargo.lock "$file" + cp -f Cargo.lock "$file" done diff --git a/fuzz/fuzz_targets/bitcoin/deserialize_psbt.rs b/fuzz/fuzz_targets/bitcoin/deserialize_psbt.rs index 59d28142cb..7b36cf6975 100644 --- a/fuzz/fuzz_targets/bitcoin/deserialize_psbt.rs +++ b/fuzz/fuzz_targets/bitcoin/deserialize_psbt.rs @@ -1,17 +1,6 @@ +use bitcoin_fuzz::fuzz_utils::consume_random_bytes; use honggfuzz::fuzz; -fn consume_random_bytes<'a>(data: &mut &'a [u8]) -> &'a [u8] { - if data.is_empty() { - return &[]; - } - - let length = (data[0] as usize) % (data.len() + 1); - let (bytes, rest) = data.split_at(length); - *data = rest; - - bytes -} - fn do_test(data: &[u8]) { let mut new_data = data; let bytes = consume_random_bytes(&mut new_data); diff --git a/fuzz/fuzz_targets/bitcoin/deserialize_script.rs b/fuzz/fuzz_targets/bitcoin/deserialize_script.rs index 8bd904fff0..41ef804933 100644 --- a/fuzz/fuzz_targets/bitcoin/deserialize_script.rs +++ b/fuzz/fuzz_targets/bitcoin/deserialize_script.rs @@ -1,14 +1,25 @@ use bitcoin::address::Address; use bitcoin::consensus::encode; use bitcoin::script::{self, ScriptExt as _}; -use bitcoin::Network; +use bitcoin::{FeeRate, Network}; +use bitcoin_fuzz::fuzz_utils::{consume_random_bytes, consume_u64}; use honggfuzz::fuzz; fn do_test(data: &[u8]) { - let s: Result = encode::deserialize(data); + let mut new_data = data; + let bytes = consume_random_bytes(&mut new_data); + let s: Result = encode::deserialize(bytes); if let Ok(script) = s { let _: Result, script::Error> = script.instructions().collect(); + let _ = script.to_string(); + let _ = script.count_sigops(); + let _ = script.count_sigops_legacy(); + let _ = script.minimal_non_dust(); + + let fee_rate = FeeRate::from_sat_per_kwu(consume_u64(&mut new_data)); + let _ = script.minimal_non_dust_custom(fee_rate); + let mut b = script::Builder::new(); for ins in script.instructions_minimal() { if ins.is_err() { diff --git a/fuzz/fuzz_targets/bitcoin/deserialize_transaction.rs b/fuzz/fuzz_targets/bitcoin/deserialize_transaction.rs index 0ded475ad6..c37e8e7e21 100644 --- a/fuzz/fuzz_targets/bitcoin/deserialize_transaction.rs +++ b/fuzz/fuzz_targets/bitcoin/deserialize_transaction.rs @@ -16,7 +16,7 @@ fn do_test(data: &[u8]) { } let no_witness_len = bitcoin::consensus::encode::serialize(&tx).len(); // For 0-input transactions, `no_witness_len` will be incorrect because - // we serialize as segwit even after "stripping the witnesses". We need + // we serialize as SegWit even after "stripping the witnesses". We need // to drop two bytes (i.e. eight weight). Similarly, calculated_weight is // incorrect and needs 2 wu removing for the marker/flag bytes. if tx.input.is_empty() { diff --git a/fuzz/generate-files.sh b/fuzz/generate-files.sh index e1c8a9d463..e85f20ed54 100755 --- a/fuzz/generate-files.sh +++ b/fuzz/generate-files.sh @@ -57,7 +57,7 @@ on: jobs: fuzz: if: \${{ !github.event.act }} - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 strategy: fail-fast: false matrix: @@ -89,7 +89,7 @@ $(for name in $(listTargetNames); do echo " $name,"; done) echo "Using RUSTFLAGS \$RUSTFLAGS" cd fuzz && ./fuzz.sh "\${{ matrix.fuzz_target }}" - run: echo "\${{ matrix.fuzz_target }}" >executed_\${{ matrix.fuzz_target }} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: executed_\${{ matrix.fuzz_target }} path: executed_\${{ matrix.fuzz_target }} @@ -97,10 +97,10 @@ $(for name in $(listTargetNames); do echo " $name,"; done) verify-execution: if: \${{ !github.event.act }} needs: fuzz - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 - name: Display structure of downloaded files run: ls -R - run: find executed_* -type f -exec cat {} + | sort > executed diff --git a/fuzz/src/fuzz_utils.rs b/fuzz/src/fuzz_utils.rs new file mode 100644 index 0000000000..8a072db98b --- /dev/null +++ b/fuzz/src/fuzz_utils.rs @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: CC0-1.0 + +//! Helper functions for fuzzing. + +pub fn consume_random_bytes<'a>(data: &mut &'a [u8]) -> &'a [u8] { + if data.is_empty() { + return &[]; + } + + let length = (data[0] as usize) % (data.len() + 1); + let (bytes, rest) = data.split_at(length); + *data = rest; + + bytes +} + +#[allow(dead_code)] +pub fn consume_u64(data: &mut &[u8]) -> u64 { + // We need at least 8 bytes to read a u64 + if data.len() < 8 { + return 0; + } + + let (u64_bytes, rest) = data.split_at(8); + *data = rest; + + u64::from_le_bytes([ + u64_bytes[0], + u64_bytes[1], + u64_bytes[2], + u64_bytes[3], + u64_bytes[4], + u64_bytes[5], + u64_bytes[6], + u64_bytes[7], + ]) +} diff --git a/fuzz/src/lib.rs b/fuzz/src/lib.rs new file mode 100644 index 0000000000..1be440fab2 --- /dev/null +++ b/fuzz/src/lib.rs @@ -0,0 +1,5 @@ +// SPDX-License-Identifier: CC0-1.0 + +//! # Fuzzing + +pub mod fuzz_utils; diff --git a/hashes/CHANGELOG.md b/hashes/CHANGELOG.md index eccea78f9d..bd9aed7337 100644 --- a/hashes/CHANGELOG.md +++ b/hashes/CHANGELOG.md @@ -1,6 +1,12 @@ +# 0.16.0 - 2024-12-12 + +* Make `hex-conservative` an optional dependency [#3611](https://github.com/rust-bitcoin/rust-bitcoin/pull/3611) +* Bump `hex-conservative` to `v0.3.0` [#3543](https://github.com/rust-bitcoin/rust-bitcoin/pull/3543) +* Hide error internals [#3579](https://github.com/rust-bitcoin/rust-bitcoin/pull/3579) + # 0.15.0 - 2024-10-16 -This release is massive. The biggest visable changes are to the `Hash` trait, which has mostly been replaced +This release is massive. The biggest visible changes are to the `Hash` trait, which has mostly been replaced by inherent functions. You should not need to import it at all anymore for normal usage. Check out how we are using `hashes` in `rust-bitcoin` to see an example. Enjoy! @@ -46,7 +52,7 @@ using `hashes` in `rust-bitcoin` to see an example. Enjoy! * Bump MSRV to Rust version 1.56.1 [#2188](https://github.com/rust-bitcoin/rust-bitcoin/pull/2188) -## API improvemnts +## API improvements * Add support for SHA384 [#2538](https://github.com/rust-bitcoin/rust-bitcoin/pull/2538) * Make from_hex inherent for byte-like types [#2491](https://github.com/rust-bitcoin/rust-bitcoin/pull/2491) diff --git a/hashes/Cargo.toml b/hashes/Cargo.toml index c05e318809..3e020d4b9a 100644 --- a/hashes/Cargo.toml +++ b/hashes/Cargo.toml @@ -1,12 +1,12 @@ [package] name = "bitcoin_hashes" -version = "0.15.0" +version = "0.16.0" authors = ["Andrew Poelstra "] license = "CC0-1.0" repository = "https://github.com/rust-bitcoin/rust-bitcoin" documentation = "https://docs.rs/bitcoin_hashes/" description = "Hash functions used by the rust-bitcoin eccosystem" -categories = ["algorithms"] +categories = ["algorithms", "cryptography", "no-std"] keywords = [ "crypto", "bitcoin", "hash", "digest" ] readme = "README.md" edition = "2021" @@ -15,16 +15,15 @@ exclude = ["tests", "contrib"] [features] default = ["std"] -std = ["alloc", "bitcoin-io?/std", "hex/std"] -alloc = ["bitcoin-io?/alloc", "hex/alloc"] +std = ["alloc", "hex?/std"] +alloc = ["hex?/alloc"] serde = ["dep:serde", "hex"] # Smaller (but slower) implementation of sha256, sha512 and ripemd160 small-hash = [] [dependencies] - +internals = { package = "bitcoin-internals", version = "0.4.0" } hex = { package = "hex-conservative", version = "0.3.0", default-features = false, optional = true } -bitcoin-io = { version = "0.2.0", default-features = false, optional = true } serde = { version = "1.0", default-features = false, optional = true } [dev-dependencies] diff --git a/hashes/README.md b/hashes/README.md index 87ec9b345a..6052b3603a 100644 --- a/hashes/README.md +++ b/hashes/README.md @@ -3,7 +3,7 @@ This is a simple, no-dependency library which implements the hash functions needed by Bitcoin. These are SHA1, SHA256, SHA256d, SHA512, and RIPEMD160. As an ancillary thing, it exposes hexadecimal serialization and deserialization, -since these are needed to display hashes anway. +since these are needed to display hashes anyway. [Documentation](https://docs.rs/bitcoin_hashes/) @@ -29,5 +29,5 @@ Alternatively add symlinks in your `.git/hooks` directory to any of the githooks ### Running Benchmarks -We use a custom Rust compiler configuration conditional to guard the bench mark code. To run the -bench marks use: `RUSTFLAGS='--cfg=bench' cargo +nightly bench`. +We use a custom Rust compiler configuration conditional to guard the benchmark code. To run the +benchmarks use: `RUSTFLAGS='--cfg=bench' cargo +nightly bench`. diff --git a/hashes/contrib/sanitizer.sh b/hashes/contrib/sanitizer.sh index e3d1e81521..495e404c18 100755 --- a/hashes/contrib/sanitizer.sh +++ b/hashes/contrib/sanitizer.sh @@ -5,7 +5,7 @@ set -euox pipefail # Run the sanitizer with these features. -FEATURES="std bitcoin-io serde" +FEATURES="std serde" cargo clean CC='clang -fsanitize=address -fno-omit-frame-pointer' \ diff --git a/hashes/contrib/test_vars.sh b/hashes/contrib/test_vars.sh index 35360740a3..efcd3dcfc6 100644 --- a/hashes/contrib/test_vars.sh +++ b/hashes/contrib/test_vars.sh @@ -5,10 +5,10 @@ # shellcheck disable=SC2034 # Test all these features with "std" enabled. -FEATURES_WITH_STD="bitcoin-io serde small-hash" +FEATURES_WITH_STD="serde small-hash" # Test all these features without "std" enabled. -FEATURES_WITHOUT_STD="alloc bitcoin-io serde small-hash" +FEATURES_WITHOUT_STD="alloc serde small-hash" # Run these examples. EXAMPLES="" diff --git a/hashes/embedded/Cargo.toml b/hashes/embedded/Cargo.toml index be73fe563d..dcf5a7ecbc 100644 --- a/hashes/embedded/Cargo.toml +++ b/hashes/embedded/Cargo.toml @@ -19,8 +19,8 @@ cortex-m-rt = "0.6.10" cortex-m-semihosting = "0.3.3" panic-halt = "0.2.0" alloc-cortex-m = { version = "0.4.1", optional = true } -bitcoin_hashes = { path="../", default-features = false, features = ["bitcoin-io"] } -bitcoin-io = { path = "../../io", default_features = false } +bitcoin_hashes = { path="../", default-features = false, features = [] } +bitcoin-io = { path = "../../io", default_features = false, features = ["hashes"] } [[bin]] name = "embedded" @@ -32,6 +32,9 @@ codegen-units = 1 # better optimizations debug = true # symbols are nice and they don't increase the size on Flash lto = true # better optimizations +[patch.crates-io.bitcoin_hashes] +path = "../../hashes" + [patch.crates-io.bitcoin-internals] path = "../../internals" diff --git a/hashes/src/cmp.rs b/hashes/src/cmp.rs index b19aeb4003..3a72b389ff 100644 --- a/hashes/src/cmp.rs +++ b/hashes/src/cmp.rs @@ -95,6 +95,7 @@ mod tests { } #[cfg(bench)] +#[cfg(feature = "hex")] mod benches { use test::Bencher; diff --git a/hashes/src/error.rs b/hashes/src/error.rs index bac9ba6492..ce2c82af2d 100644 --- a/hashes/src/error.rs +++ b/hashes/src/error.rs @@ -2,13 +2,16 @@ //! Error code for the `hashes` crate. +use core::convert::Infallible; use core::fmt; /// Attempted to create a hash from an invalid length slice. #[derive(Debug, Clone, PartialEq, Eq)] pub struct FromSliceError(pub(crate) FromSliceErrorInner); -impl_from_infallible!(FromSliceError); +impl From for FromSliceError { + fn from(never: Infallible) -> Self { match never {} } +} impl FromSliceError { /// Returns the expected slice length. @@ -25,7 +28,9 @@ pub(crate) struct FromSliceErrorInner { pub(crate) got: usize, } -impl_from_infallible!(FromSliceErrorInner); +impl From for FromSliceErrorInner { + fn from(never: Infallible) -> Self { match never {} } +} impl fmt::Display for FromSliceError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -35,19 +40,3 @@ impl fmt::Display for FromSliceError { #[cfg(feature = "std")] impl std::error::Error for FromSliceError {} - -/// Derives `From` for the given type. -// This is a duplicate of `internals::impl_from_infallible`, see there for complete docs. -#[doc(hidden)] -macro_rules! impl_from_infallible { - ( $name:ident $(< $( $lt:tt $( : $clt:tt $(+ $dlt:tt )* )? ),+ >)? ) => { - impl $(< $( $lt $( : $clt $(+ $dlt )* )? ),+ >)? - From - for $name - $(< $( $lt ),+ >)? - { - fn from(never: core::convert::Infallible) -> Self { match never {} } - } - } -} -pub(crate) use impl_from_infallible; diff --git a/hashes/src/hash160.rs b/hashes/src/hash160/mod.rs similarity index 94% rename from hashes/src/hash160.rs rename to hashes/src/hash160/mod.rs index 2e90ca3914..3c457c1aac 100644 --- a/hashes/src/hash160.rs +++ b/hashes/src/hash160/mod.rs @@ -15,8 +15,17 @@ crate::internal_macros::general_hash_type! { "Output of the Bitcoin HASH160 hash function. (RIPEMD160(SHA256))" } +fn from_engine(e: HashEngine) -> Hash { + let sha2 = sha256::Hash::from_engine(e.0); + let rmd = ripemd160::Hash::hash(sha2.as_byte_array()); + + let mut ret = [0; 20]; + ret.copy_from_slice(rmd.as_byte_array()); + Hash(ret) +} + /// Engine to compute HASH160 hash function. -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct HashEngine(sha256::HashEngine); impl HashEngine { @@ -29,24 +38,20 @@ impl Default for HashEngine { } impl crate::HashEngine for HashEngine { + type Hash = Hash; + type Bytes = [u8; 20]; const BLOCK_SIZE: usize = 64; // Same as sha256::HashEngine::BLOCK_SIZE; + fn input(&mut self, data: &[u8]) { self.0.input(data) } fn n_bytes_hashed(&self) -> u64 { self.0.n_bytes_hashed() } -} - -fn from_engine(e: HashEngine) -> Hash { - let sha2 = sha256::Hash::from_engine(e.0); - let rmd = ripemd160::Hash::hash(sha2.as_byte_array()); - - let mut ret = [0; 20]; - ret.copy_from_slice(rmd.as_byte_array()); - Hash(ret) + fn finalize(self) -> Self::Hash { Hash::from_engine(self) } } #[cfg(test)] mod tests { #[test] #[cfg(feature = "alloc")] + #[cfg(feature = "hex")] fn test() { use alloc::string::ToString; @@ -117,7 +122,7 @@ mod tests { 0xf1, 0x4a, 0xca, 0xd7, ]; - let hash = hash160::Hash::from_slice(&HASH_BYTES).expect("right number of bytes"); + let hash = hash160::Hash::from_byte_array(HASH_BYTES); assert_tokens(&hash.compact(), &[Token::BorrowedBytes(&HASH_BYTES[..])]); assert_tokens(&hash.readable(), &[Token::Str("132072df690933835eb8b6ad0b77e7b6f14acad7")]); } @@ -127,7 +132,7 @@ mod tests { mod benches { use test::Bencher; - use crate::{hash160, GeneralHash as _, Hash as _, HashEngine}; + use crate::{hash160, Hash as _, HashEngine}; #[bench] pub fn hash160_10(bh: &mut Bencher) { diff --git a/hashes/src/hkdf.rs b/hashes/src/hkdf/mod.rs similarity index 73% rename from hashes/src/hkdf.rs rename to hashes/src/hkdf/mod.rs index cfe641f690..c381422da4 100644 --- a/hashes/src/hkdf.rs +++ b/hashes/src/hkdf/mod.rs @@ -11,7 +11,7 @@ use alloc::vec; use alloc::vec::Vec; use core::fmt; -use crate::{GeneralHash, HashEngine, Hmac, HmacEngine, IsByteArray}; +use crate::{HashEngine, Hmac, HmacEngine, IsByteArray}; /// Output keying material max length multiple. const MAX_OUTPUT_BLOCKS: usize = 255; @@ -32,20 +32,21 @@ impl fmt::Display for MaxLengthError { impl std::error::Error for MaxLengthError {} /// HMAC-based Extract-and-Expand Key Derivation Function (HKDF). -pub struct Hkdf { +#[derive(Copy, Clone)] +pub struct Hkdf { /// Pseudorandom key based on the extract step. - prk: Hmac, + prk: Hmac, } -impl Hkdf +impl Hkdf where - ::Engine: Default, + T: Default, { /// Initialize a HKDF by performing the extract step. pub fn new(salt: &[u8], ikm: &[u8]) -> Self { - let mut hmac_engine: HmacEngine = HmacEngine::new(salt); - hmac_engine.input(ikm); - Self { prk: Hmac::from_engine(hmac_engine) } + let mut engine: HmacEngine = HmacEngine::new(salt); + engine.input(ikm); + Self { prk: engine.finalize() } } /// Expand the key to generate output key material in okm. @@ -64,19 +65,19 @@ where let total_blocks = (okm.len() + T::Bytes::LEN - 1) / T::Bytes::LEN; while counter <= total_blocks as u8 { - let mut hmac_engine: HmacEngine = HmacEngine::new(self.prk.as_ref()); + let mut engine: HmacEngine = HmacEngine::new(self.prk.as_ref()); // First block does not have a previous block, // all other blocks include last block in the HMAC input. if counter != 1u8 { let previous_start_index = (counter as usize - 2) * T::Bytes::LEN; let previous_end_index = (counter as usize - 1) * T::Bytes::LEN; - hmac_engine.input(&okm[previous_start_index..previous_end_index]); + engine.input(&okm[previous_start_index..previous_end_index]); } - hmac_engine.input(info); - hmac_engine.input(&[counter]); + engine.input(info); + engine.input(&[counter]); - let t = Hmac::from_engine(hmac_engine); + let t = engine.finalize(); let start_index = (counter as usize - 1) * T::Bytes::LEN; // Last block might not take full hash length. let end_index = if counter == (total_blocks as u8) { @@ -105,8 +106,29 @@ where } } +impl fmt::Debug for Hkdf { + fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { + use crate::{sha256t, sha256t_tag}; + + struct Fingerprint([u8; 8]); // Print 16 hex characters as a fingerprint. + + impl fmt::Debug for Fingerprint { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { crate::debug_hex(&self.0, f) } + } + + sha256t_tag! { + pub struct Tag = hash_str("bitcoin_hashes1DEBUG"); + } + + let hash = sha256t::Hash::::hash(self.prk.as_ref()); + let fingerprint = Fingerprint(core::array::from_fn(|i| hash.as_byte_array()[i])); + f.debug_tuple("Hkdf").field(&format_args!("#{:?}", fingerprint)).finish() + } +} + #[cfg(test)] #[cfg(feature = "alloc")] +#[cfg(feature = "hex")] mod tests { use hex::prelude::{DisplayHex, FromHex}; @@ -114,12 +136,12 @@ mod tests { use crate::sha256; #[test] - fn test_rfc5869_basic() { + fn rfc5869_basic() { let salt = Vec::from_hex("000102030405060708090a0b0c").unwrap(); let ikm = Vec::from_hex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b").unwrap(); let info = Vec::from_hex("f0f1f2f3f4f5f6f7f8f9").unwrap(); - let hkdf = Hkdf::::new(&salt, &ikm); + let hkdf = Hkdf::::new(&salt, &ikm); let mut okm = [0u8; 42]; hkdf.expand(&info, &mut okm).unwrap(); @@ -130,7 +152,7 @@ mod tests { } #[test] - fn test_rfc5869_longer_inputs_outputs() { + fn rfc5869_longer_inputs_outputs() { let salt = Vec::from_hex( "606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeaf" ).unwrap(); @@ -141,7 +163,7 @@ mod tests { "b0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff" ).unwrap(); - let hkdf = Hkdf::::new(&salt, &ikm); + let hkdf = Hkdf::::new(&salt, &ikm); let mut okm = [0u8; 82]; hkdf.expand(&info, &mut okm).unwrap(); @@ -152,12 +174,12 @@ mod tests { } #[test] - fn test_too_long_okm() { + fn too_long_okm() { let salt = Vec::from_hex("000102030405060708090a0b0c").unwrap(); let ikm = Vec::from_hex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b").unwrap(); let info = Vec::from_hex("f0f1f2f3f4f5f6f7f8f9").unwrap(); - let hkdf = Hkdf::::new(&salt, &ikm); + let hkdf = Hkdf::::new(&salt, &ikm); let mut okm = [0u8; 256 * 32]; let e = hkdf.expand(&info, &mut okm); @@ -165,12 +187,12 @@ mod tests { } #[test] - fn test_short_okm() { + fn short_okm() { let salt = Vec::from_hex("000102030405060708090a0b0c").unwrap(); let ikm = Vec::from_hex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b").unwrap(); let info = Vec::from_hex("f0f1f2f3f4f5f6f7f8f9").unwrap(); - let hkdf = Hkdf::::new(&salt, &ikm); + let hkdf = Hkdf::::new(&salt, &ikm); let mut okm = [0u8; 1]; hkdf.expand(&info, &mut okm).unwrap(); @@ -178,12 +200,12 @@ mod tests { } #[test] - fn test_alloc_wrapper() { + fn alloc_wrapper() { let salt = Vec::from_hex("000102030405060708090a0b0c").unwrap(); let ikm = Vec::from_hex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b").unwrap(); let info = Vec::from_hex("f0f1f2f3f4f5f6f7f8f9").unwrap(); - let hkdf = Hkdf::::new(&salt, &ikm); + let hkdf = Hkdf::::new(&salt, &ikm); let okm = hkdf.expand_to_len(&info, 42).unwrap(); assert_eq!( @@ -191,4 +213,15 @@ mod tests { "3cb25f25faacd57a90434f64d0362f2a2d2d0a90cf1a5a4c5db02d56ecc4c5bf34007208d5b887185865" ); } + + #[test] + fn debug() { + let salt = Vec::from_hex("000102030405060708090a0b0c").unwrap(); + let ikm = Vec::from_hex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b").unwrap(); + + let hkdf = Hkdf::::new(&salt, &ikm); + let debug = alloc::format!("{:?}", hkdf); + + assert_eq!(debug, "Hkdf(#ec7bd36ab2ed4045)"); + } } diff --git a/hashes/src/hmac.rs b/hashes/src/hmac/mod.rs similarity index 78% rename from hashes/src/hmac.rs rename to hashes/src/hmac/mod.rs index 283bdc7d0b..2d70253fca 100644 --- a/hashes/src/hmac.rs +++ b/hashes/src/hmac/mod.rs @@ -12,34 +12,27 @@ use core::{convert, fmt, str}; #[cfg(feature = "serde")] use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use crate::{FromSliceError, GeneralHash, Hash, HashEngine}; +use crate::{Hash, HashEngine}; /// A hash computed from a RFC 2104 HMAC. Parameterized by the underlying hash function. #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(transparent)] -pub struct Hmac(T); +pub struct Hmac(T); -impl str::FromStr for Hmac { +impl str::FromStr for Hmac { type Err = ::Err; fn from_str(s: &str) -> Result { Ok(Hmac(str::FromStr::from_str(s)?)) } } /// Pair of underlying hash engines, used for the inner and outer hash of HMAC. -#[derive(Clone)] -pub struct HmacEngine { - iengine: T::Engine, - oengine: T::Engine, +#[derive(Debug, Clone)] +pub struct HmacEngine { + iengine: T, + oengine: T, } -impl Default for HmacEngine -where - ::Engine: Default, -{ - fn default() -> Self { HmacEngine::new(&[]) } -} - -impl HmacEngine { - /// Constructs a new keyed HMAC from `key`. +impl HmacEngine { + /// Constructs a new keyed HMAC engine from `key`. /// /// We only support underlying hashes whose block sizes are ≤ 128 bytes. /// @@ -48,24 +41,23 @@ impl HmacEngine { /// Larger hashes will result in a panic. pub fn new(key: &[u8]) -> HmacEngine where - ::Engine: Default, + T: Default, { - debug_assert!(T::Engine::BLOCK_SIZE <= 128); + debug_assert!(T::BLOCK_SIZE <= 128); let mut ipad = [0x36u8; 128]; let mut opad = [0x5cu8; 128]; - let mut ret = HmacEngine { - iengine: ::engine(), - oengine: ::engine(), - }; + let mut ret = HmacEngine { iengine: T::default(), oengine: T::default() }; - if key.len() > T::Engine::BLOCK_SIZE { - let hash = ::hash(key); - let hash = hash.as_byte_array().as_ref(); - for (b_i, b_h) in ipad.iter_mut().zip(hash) { + if key.len() > T::BLOCK_SIZE { + let mut engine = T::default(); + engine.input(key); + let hash = engine.finalize(); + + for (b_i, b_h) in ipad.iter_mut().zip(hash.as_ref()) { *b_i ^= *b_h; } - for (b_o, b_h) in opad.iter_mut().zip(hash) { + for (b_o, b_h) in opad.iter_mut().zip(hash.as_ref()) { *b_o ^= *b_h; } } else { @@ -77,60 +69,57 @@ impl HmacEngine { } }; - HashEngine::input(&mut ret.iengine, &ipad[..T::Engine::BLOCK_SIZE]); - HashEngine::input(&mut ret.oengine, &opad[..T::Engine::BLOCK_SIZE]); + ret.iengine.input(&ipad[..T::BLOCK_SIZE]); + ret.oengine.input(&opad[..T::BLOCK_SIZE]); ret } /// A special constructor giving direct access to the underlying "inner" and "outer" engines. - pub fn from_inner_engines(iengine: T::Engine, oengine: T::Engine) -> HmacEngine { + pub fn from_inner_engines(iengine: T, oengine: T) -> HmacEngine { HmacEngine { iengine, oengine } } } -impl HashEngine for HmacEngine { - const BLOCK_SIZE: usize = T::Engine::BLOCK_SIZE; +impl HashEngine for HmacEngine { + type Hash = Hmac; + type Bytes = T::Bytes; + const BLOCK_SIZE: usize = T::BLOCK_SIZE; fn n_bytes_hashed(&self) -> u64 { self.iengine.n_bytes_hashed() } - fn input(&mut self, buf: &[u8]) { self.iengine.input(buf) } + fn finalize(mut self) -> Self::Hash { + let ihash = self.iengine.finalize(); + self.oengine.input(ihash.as_ref()); + Hmac(self.oengine.finalize()) + } } -impl fmt::Debug for Hmac { +impl fmt::Debug for Hmac { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&self.0, f) } } -impl fmt::Display for Hmac { +impl fmt::Display for Hmac { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.0, f) } } -impl fmt::LowerHex for Hmac { +impl fmt::LowerHex for Hmac { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::LowerHex::fmt(&self.0, f) } } -impl convert::AsRef<[u8]> for Hmac { +impl convert::AsRef<[u8]> for Hmac { // Calling as_byte_array is more reliable fn as_ref(&self) -> &[u8] { self.0.as_byte_array().as_ref() } } -impl GeneralHash for Hmac { - type Engine = HmacEngine; - - fn from_engine(mut e: HmacEngine) -> Hmac { - let ihash = T::from_engine(e.iengine); - e.oengine.input(ihash.as_byte_array().as_ref()); - let ohash = T::from_engine(e.oengine); - Hmac(ohash) - } -} - -impl Hash for Hmac { +impl Hash for Hmac { type Bytes = T::Bytes; fn from_byte_array(bytes: T::Bytes) -> Self { Hmac(T::from_byte_array(bytes)) } - #[allow(deprecated)] - fn from_slice(sl: &[u8]) -> Result, FromSliceError> { T::from_slice(sl).map(Hmac) } + #[allow(deprecated_in_future)] // Because of `FromSliceError`. + fn from_slice(sl: &[u8]) -> Result, crate::FromSliceError> { + T::from_slice(sl).map(Hmac) + } fn to_byte_array(self) -> Self::Bytes { self.0.to_byte_array() } @@ -138,35 +127,36 @@ impl Hash for Hmac { } #[cfg(feature = "serde")] -impl Serialize for Hmac { +impl Serialize for Hmac { fn serialize(&self, s: S) -> Result { Serialize::serialize(&self.0, s) } } #[cfg(feature = "serde")] -impl<'de, T: GeneralHash + Deserialize<'de>> Deserialize<'de> for Hmac { +impl<'de, T: Hash + Deserialize<'de>> Deserialize<'de> for Hmac { fn deserialize>(d: D) -> Result, D::Error> { let bytes = Deserialize::deserialize(d)?; Ok(Hmac(bytes)) } } -crate::internal_macros::impl_io_write!( +#[cfg(feature = "std")] +crate::internal_macros::impl_write!( HmacEngine, |us: &mut HmacEngine, buf| { us.input(buf); Ok(buf.len()) }, |_us| { Ok(()) }, - T: crate::GeneralHash + T: crate::HashEngine ); #[cfg(test)] mod tests { #[test] fn test() { - use crate::{sha256, GeneralHash as _, Hash as _, HashEngine, Hmac, HmacEngine}; + use crate::{sha256, Hash as _, HashEngine, HmacEngine}; #[derive(Clone)] struct Test { @@ -281,9 +271,9 @@ mod tests { ]; for test in tests { - let mut engine = HmacEngine::::new(test.key); + let mut engine = HmacEngine::::new(test.key); engine.input(test.input); - let hash = Hmac::::from_engine(engine); + let hash = engine.finalize(); assert_eq!(hash.as_ref(), test.output); assert_eq!(hash.to_byte_array(), test.output); } @@ -324,11 +314,11 @@ mod tests { mod benches { use test::Bencher; - use crate::{sha256, GeneralHash as _, HashEngine, Hmac}; + use crate::{sha256, HashEngine as _, HmacEngine}; #[bench] pub fn hmac_sha256_10(bh: &mut Bencher) { - let mut engine = Hmac::::engine(); + let mut engine = HmacEngine::::new(&[]); let bytes = [1u8; 10]; bh.iter(|| { engine.input(&bytes); @@ -338,7 +328,7 @@ mod benches { #[bench] pub fn hmac_sha256_1k(bh: &mut Bencher) { - let mut engine = Hmac::::engine(); + let mut engine = HmacEngine::::new(&[]); let bytes = [1u8; 1024]; bh.iter(|| { engine.input(&bytes); @@ -348,7 +338,7 @@ mod benches { #[bench] pub fn hmac_sha256_64k(bh: &mut Bencher) { - let mut engine = Hmac::::engine(); + let mut engine = HmacEngine::::new(&[]); let bytes = [1u8; 65536]; bh.iter(|| { engine.input(&bytes); diff --git a/hashes/src/internal_macros.rs b/hashes/src/internal_macros.rs index 39999b3ddf..5e9599322a 100644 --- a/hashes/src/internal_macros.rs +++ b/hashes/src/internal_macros.rs @@ -9,7 +9,7 @@ /// /// * `$bits` - number of bits this hash type has /// * `$reverse` - `bool` - `true` if the hash type should be displayed backwards, `false` -/// otherwise. +/// otherwise. /// * `$gen: $gent` - generic type(s) and trait bound(s) /// /// Restrictions on usage: @@ -26,12 +26,6 @@ macro_rules! hash_trait_impls { #[cfg(not(feature = "hex"))] $crate::impl_debug_only!(Hash, { $bits / 8 }, $reverse $(, $gen: $gent)*); - impl<$($gen: $gent),*> $crate::GeneralHash for Hash<$($gen),*> { - type Engine = HashEngine; - - fn from_engine(e: HashEngine) -> Hash<$($gen),*> { Self::from_engine(e) } - } - #[cfg(feature = "serde")] $crate::serde_impl!(Hash, { $bits / 8} $(, $gen: $gent)*); @@ -42,7 +36,8 @@ macro_rules! hash_trait_impls { fn from_byte_array(bytes: Self::Bytes) -> Self { Self::from_byte_array(bytes) } - #[allow(deprecated)] + #[allow(deprecated_in_future)] // Because of `FromSliceError`. + #[allow(deprecated)] // Because of `from_slice`. fn from_slice(sl: &[u8]) -> $crate::_export::_core::result::Result, $crate::FromSliceError> { Self::from_slice(sl) } @@ -70,6 +65,30 @@ pub(crate) use hash_trait_impls; /// [`hash_trait_impls`]. macro_rules! general_hash_type { ($bits:expr, $reverse:expr, $doc:literal) => { + /// Hashes some bytes. + pub fn hash(data: &[u8]) -> Hash { + use crate::HashEngine as _; + + let mut engine = Hash::engine(); + engine.input(data); + engine.finalize() + } + + /// Hashes all the byte slices retrieved from the iterator together. + pub fn hash_byte_chunks(byte_slices: I) -> Hash + where + B: AsRef<[u8]>, + I: IntoIterator, + { + use crate::HashEngine as _; + + let mut engine = Hash::engine(); + for slice in byte_slices { + engine.input(slice.as_ref()); + } + engine.finalize() + } + $crate::internal_macros::hash_type_no_default!($bits, $reverse, $doc); impl Hash { @@ -81,7 +100,7 @@ macro_rules! general_hash_type { /// Hashes some bytes. #[allow(clippy::self_named_constructors)] // Hash is a noun and a verb. - pub fn hash(data: &[u8]) -> Self { ::hash(data) } + pub fn hash(data: &[u8]) -> Self { hash(data) } /// Hashes all the byte slices retrieved from the iterator together. pub fn hash_byte_chunks(byte_slices: I) -> Self @@ -89,13 +108,7 @@ macro_rules! general_hash_type { B: AsRef<[u8]>, I: IntoIterator, { - ::hash_byte_chunks(byte_slices) - } - - /// Hashes the entire contents of the `reader`. - #[cfg(feature = "bitcoin-io")] - pub fn hash_reader(reader: &mut R) -> Result { - ::hash_reader(reader) + hash_byte_chunks(byte_slices) } } }; @@ -132,11 +145,13 @@ macro_rules! hash_type_no_default { } /// Copies a byte slice into a hash object. + #[deprecated(since = "0.15.0", note = "use `from_byte_array` instead")] + #[allow(deprecated_in_future)] // Because of `FromSliceError`. pub fn from_slice( sl: &[u8], ) -> $crate::_export::_core::result::Result { if sl.len() != $bits / 8 { - Err($crate::FromSliceError($crate::error::FromSliceErrorInner { + Err($crate::error::FromSliceError($crate::error::FromSliceErrorInner { expected: $bits / 8, got: sl.len(), })) @@ -156,7 +171,7 @@ macro_rules! hash_type_no_default { $crate::internal_macros::hash_trait_impls!($bits, $reverse); - $crate::internal_macros::impl_io_write!( + $crate::internal_macros::impl_write!( HashEngine, |us: &mut HashEngine, buf| { crate::HashEngine::input(us, buf); @@ -168,29 +183,16 @@ macro_rules! hash_type_no_default { } pub(crate) use hash_type_no_default; -// We do not use the `bitcoin_io::impl_write` macro because we don't have an unconditional -// dependency on `bitcoin-io` and we want to implement `std:io::Write` even when we don't depend on -// `bitcoin-io`. -macro_rules! impl_io_write { +macro_rules! impl_write { ($ty: ty, $write_fn: expr, $flush_fn: expr $(, $bounded_ty: ident : $bounds: path),*) => { - #[cfg(feature = "bitcoin-io")] - impl<$($bounded_ty: $bounds),*> bitcoin_io::Write for $ty { - #[inline] - fn write(&mut self, buf: &[u8]) -> bitcoin_io::Result { - $write_fn(self, buf) - } - #[inline] - fn flush(&mut self) -> bitcoin_io::Result<()> { - $flush_fn(self) - } - } - + // `bitcoin_io::Write` is implemented in `bitcoin_io`. #[cfg(feature = "std")] impl<$($bounded_ty: $bounds),*> std::io::Write for $ty { #[inline] fn write(&mut self, buf: &[u8]) -> std::io::Result { $write_fn(self, buf) } + #[inline] fn flush(&mut self) -> std::io::Result<()> { $flush_fn(self) @@ -198,7 +200,7 @@ macro_rules! impl_io_write { } } } -pub(crate) use impl_io_write; +pub(crate) use impl_write; macro_rules! engine_input_impl( () => ( diff --git a/hashes/src/lib.rs b/hashes/src/lib.rs index bc6206d305..b3962214ca 100644 --- a/hashes/src/lib.rs +++ b/hashes/src/lib.rs @@ -1,11 +1,11 @@ // SPDX-License-Identifier: CC0-1.0 -//! Rust hashes library. +//! # Rust Bitcoin Hashes Library //! //! This library implements the hash functions needed by Bitcoin. As an ancillary thing, it exposes //! hexadecimal serialization and deserialization, since these are needed to display hashes. //! -//! ## Commonly used operations +//! # Examples //! //! Hashing a single byte slice or a string: //! @@ -26,7 +26,7 @@ //! //! let mut reader: &[u8] = b"hello"; // In real code, this could be a `File` or `TcpStream`. //! let mut engine = Sha256::engine(); -//! std::io::copy(&mut reader, &mut engine).unwrap(); +//! std::io::copy(&mut reader, &mut engine).expect("engine writes don't error"); //! let _hash = Sha256::from_engine(engine); //! # } //! ``` @@ -58,6 +58,8 @@ #![warn(missing_docs)] #![warn(deprecated_in_future)] #![doc(test(attr(warn(unused))))] +// Pedantic lints that we enforce. +#![warn(clippy::return_self_not_must_use)] // Instead of littering the codebase for non-fuzzing and bench code just globally allow. #![cfg_attr(hashes_fuzz, allow(dead_code, unused_imports))] #![cfg_attr(bench, allow(dead_code, unused_imports))] @@ -73,9 +75,6 @@ extern crate core; #[cfg(feature = "std")] extern crate std; -#[cfg(feature = "bitcoin-io")] -extern crate bitcoin_io as io; - /// A generic serialization/deserialization framework. #[cfg(feature = "serde")] pub extern crate serde; @@ -97,10 +96,11 @@ pub mod _export { } } +#[deprecated(since = "TBD", note = "unused now that `Hash::from_slice` is deprecated")] +mod error; mod internal_macros; pub mod cmp; -pub mod error; pub mod hash160; pub mod hkdf; pub mod hmac; @@ -133,11 +133,9 @@ use core::{convert, hash}; #[rustfmt::skip] // Keep public re-exports separate. #[doc(inline)] pub use self::{ - error::FromSliceError, hkdf::Hkdf, hmac::{Hmac, HmacEngine}, }; - /// HASH-160: Alias for the [`hash160::Hash`] hash type. #[doc(inline)] pub use hash160::Hash as Hash160; @@ -166,6 +164,11 @@ pub use sha512_256::Hash as Sha512_256; #[doc(inline)] pub use siphash24::Hash as Siphash24; +/// Attempted to create a hash from an invalid length slice. +#[deprecated(since = "TBD", note = "unused now that `Hash::from_slice` is deprecated")] +#[allow(deprecated_in_future)] +pub type FromSliceError = crate::error::FromSliceError; // Alias instead of re-export so we can deprecate it. + /// Tagged SHA-256: Type alias for the [`sha256t::Hash`] hash type. pub type Sha256t = sha256t::Hash; @@ -183,6 +186,15 @@ pub type HkdfSha512 = Hkdf; /// A hashing engine which bytes can be serialized into. pub trait HashEngine: Clone { + /// The `Hash` type returned when finalizing this engine. + type Hash: Hash; + + /// The byte array that is used internally in `finalize`. + type Bytes: Copy + IsByteArray; + + /// Length of the hash, in bytes. + const LEN: usize = Self::Bytes::LEN; + /// Length of the hash's internal block size, in bytes. const BLOCK_SIZE: usize; @@ -191,74 +203,9 @@ pub trait HashEngine: Clone { /// Return the number of bytes already input into the engine. fn n_bytes_hashed(&self) -> u64; -} -/// Trait describing hash digests which can be constructed by hashing arbitrary data. -/// -/// Some methods have been bound to engines which implement Default, which is -/// generally an unkeyed hash function. -pub trait GeneralHash: Hash { - /// A hashing engine which bytes can be serialized into. It is expected - /// to implement the `io::Write` trait, and to never return errors under - /// any conditions. - type Engine: HashEngine; - - /// Constructs a new engine. - fn engine() -> Self::Engine - where - Self::Engine: Default, - { - Self::Engine::default() - } - - /// Produces a hash from the current state of a given engine. - fn from_engine(e: Self::Engine) -> Self; - - /// Hashes some bytes. - fn hash(data: &[u8]) -> Self - where - Self::Engine: Default, - { - let mut engine = Self::engine(); - engine.input(data); - Self::from_engine(engine) - } - - /// Hashes all the byte slices retrieved from the iterator together. - fn hash_byte_chunks(byte_slices: I) -> Self - where - B: AsRef<[u8]>, - I: IntoIterator, - Self::Engine: Default, - { - let mut engine = Self::engine(); - for slice in byte_slices { - engine.input(slice.as_ref()); - } - Self::from_engine(engine) - } - - /// Hashes the entire contents of the `reader`. - #[cfg(feature = "bitcoin-io")] - fn hash_reader(reader: &mut R) -> Result - where - Self::Engine: Default, - { - let mut engine = Self::engine(); - loop { - let bytes = reader.fill_buf()?; - - let read = bytes.len(); - // Empty slice means EOF. - if read == 0 { - break; - } - - engine.input(bytes); - reader.consume(read); - } - Ok(Self::from_engine(engine)) - } + /// Finalizes this engine. + fn finalize(self) -> Self::Hash; } /// Trait which applies to hashes of all types. @@ -280,7 +227,8 @@ pub trait Hash: fn from_byte_array(bytes: Self::Bytes) -> Self; /// Copies a byte slice into a hash object. - #[deprecated(since = "0.15.0", note = "use `from_byte_array` instead")] + #[allow(deprecated_in_future)] // Because of `FromSliceError`. + #[deprecated(since = "TBD", note = "use `from_byte_array` instead")] fn from_slice(sl: &[u8]) -> Result; /// Returns the underlying byte array. @@ -301,7 +249,6 @@ impl IsByteArray for [u8; N] { } mod sealed { - #[doc(hidden)] pub trait IsByteArray {} impl IsByteArray for [u8; N] {} @@ -350,6 +297,7 @@ mod tests { #[test] #[cfg(feature = "alloc")] + #[cfg(feature = "hex")] fn newtype_fmt_roundtrip() { use alloc::format; @@ -366,13 +314,4 @@ mod tests { let rinsed = hex.parse::().expect("failed to parse hex"); assert_eq!(rinsed, orig) } - - #[test] - #[cfg(feature = "bitcoin-io")] - fn hash_reader() { - use crate::sha256; - - let mut reader: &[u8] = b"hello"; - assert_eq!(sha256::Hash::hash_reader(&mut reader).unwrap(), sha256::Hash::hash(b"hello"),) - } } diff --git a/hashes/src/macros.rs b/hashes/src/macros.rs index 3d5382aec8..2c9aa1bcf7 100644 --- a/hashes/src/macros.rs +++ b/hashes/src/macros.rs @@ -33,11 +33,7 @@ macro_rules! sha256t_tag { $crate::sha256t_tag_struct!($tag_vis, $tag, stringify!($hash_name), $(#[$($tag_attr)*])*); impl $crate::sha256t::Tag for $tag { - #[inline] - fn engine() -> $crate::sha256::HashEngine { - const MIDSTATE: $crate::sha256::Midstate = $crate::sha256t_tag_constructor!($constructor, $($tag_value)+); - $crate::sha256::HashEngine::from_midstate(MIDSTATE) - } + const MIDSTATE: $crate::sha256::Midstate = $crate::sha256t_tag_constructor!($constructor, $($tag_value)+); } } } @@ -145,7 +141,8 @@ macro_rules! hash_newtype { /// Copies a byte slice into a hash object. #[deprecated(since = "0.15.0", note = "use `from_byte_array` instead")] - #[allow(deprecated)] + #[allow(deprecated_in_future)] // Because of `FromSliceError`. + #[allow(deprecated)] // Because of `from_slice`. pub fn from_slice(sl: &[u8]) -> $crate::_export::_core::result::Result<$newtype, $crate::FromSliceError> { Ok($newtype(<$hash as $crate::Hash>::from_slice(sl)?)) } @@ -161,19 +158,6 @@ macro_rules! hash_newtype { } } - impl $crate::_export::_core::convert::From<$hash> for $newtype { - fn from(inner: $hash) -> $newtype { - // Due to rust 1.22 we have to use this instead of simple `Self(inner)` - Self { 0: inner } - } - } - - impl $crate::_export::_core::convert::From<$newtype> for $hash { - fn from(hashtype: $newtype) -> $hash { - hashtype.0 - } - } - impl $crate::Hash for $newtype { type Bytes = <$hash as $crate::Hash>::Bytes; @@ -182,7 +166,8 @@ macro_rules! hash_newtype { fn from_byte_array(bytes: Self::Bytes) -> Self { Self::from_byte_array(bytes) } #[inline] - #[allow(deprecated)] + #[allow(deprecated_in_future)] // Because of `FromSliceError`. + #[allow(deprecated)] // Because of `from_slice`. fn from_slice(sl: &[u8]) -> $crate::_export::_core::result::Result<$newtype, $crate::FromSliceError> { Self::from_slice(sl) } @@ -195,7 +180,7 @@ macro_rules! hash_newtype { }; } -/// Implements string functions using hex for a new type crated with [`crate::hash_newtype`] macro. +/// Implements string functions using hex for a new type created with [`crate::hash_newtype`] macro. /// /// Implements: /// @@ -212,7 +197,7 @@ macro_rules! impl_hex_for_newtype { } } -/// Implements `fmt::Debug` using hex for a new type crated with [`crate::hash_newtype`] macro. +/// Implements `fmt::Debug` using hex for a new type created with [`crate::hash_newtype`] macro. /// /// This is provided in case you do not want to use the `hex` feature. #[macro_export] @@ -533,6 +518,7 @@ macro_rules! serde_impl( )); /// Does an "empty" serde implementation for the configuration without serde feature. +#[doc(hidden)] #[macro_export] #[cfg(not(feature = "serde"))] macro_rules! serde_impl( @@ -579,6 +565,25 @@ mod test { fn all_zeros() -> Self { Self::from_byte_array([0; 32]) } } + #[test] + fn macros_work_in_function_scope() { + use crate::sha256t; + + sha256t_tag! { + #[repr(align(2))] // This tests that we can add additional attributes. + pub struct FunctionScopeTag = hash_str("It works"); + } + + hash_newtype! { + /// Some docs. + #[repr(align(4))] // This tests that we can add additional attributes. + pub struct FunctionScopeHash(pub(crate) sha256t::Hash); + } + + assert_eq!(2, core::mem::align_of::()); + assert_eq!(4, core::mem::align_of::()); + } + // NB: This runs with and without `hex` feature enabled, testing different code paths for each. #[test] #[cfg(feature = "alloc")] diff --git a/hashes/src/ripemd160/benches.rs b/hashes/src/ripemd160/benches.rs new file mode 100644 index 0000000000..cb023a933a --- /dev/null +++ b/hashes/src/ripemd160/benches.rs @@ -0,0 +1,33 @@ +use test::Bencher; + +use crate::{ripemd160, Hash, HashEngine}; + +#[bench] +pub fn ripemd160_10(bh: &mut Bencher) { + let mut engine = ripemd160::Hash::engine(); + let bytes = [1u8; 10]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} + +#[bench] +pub fn ripemd160_1k(bh: &mut Bencher) { + let mut engine = ripemd160::Hash::engine(); + let bytes = [1u8; 1024]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} + +#[bench] +pub fn ripemd160_64k(bh: &mut Bencher) { + let mut engine = ripemd160::Hash::engine(); + let bytes = [1u8; 65536]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} diff --git a/hashes/src/ripemd160.rs b/hashes/src/ripemd160/crypto.rs similarity index 72% rename from hashes/src/ripemd160.rs rename to hashes/src/ripemd160/crypto.rs index fa0893d7a0..70e92f66d6 100644 --- a/hashes/src/ripemd160.rs +++ b/hashes/src/ripemd160/crypto.rs @@ -1,92 +1,8 @@ // SPDX-License-Identifier: CC0-1.0 -//! RIPEMD160 implementation. +use internals::slice::SliceExt; -use core::cmp; - -use crate::{incomplete_block_len, HashEngine as _}; - -crate::internal_macros::general_hash_type! { - 160, - false, - "Output of the RIPEMD160 hash function." -} - -#[cfg(not(hashes_fuzz))] -fn from_engine(mut e: HashEngine) -> Hash { - // pad buffer with a single 1-bit then all 0s, until there are exactly 8 bytes remaining - let n_bytes_hashed = e.bytes_hashed; - - let zeroes = [0; BLOCK_SIZE - 8]; - e.input(&[0x80]); - if crate::incomplete_block_len(&e) > zeroes.len() { - e.input(&zeroes); - } - let pad_length = zeroes.len() - incomplete_block_len(&e); - e.input(&zeroes[..pad_length]); - debug_assert_eq!(incomplete_block_len(&e), zeroes.len()); - - e.input(&(8 * n_bytes_hashed).to_le_bytes()); - debug_assert_eq!(incomplete_block_len(&e), 0); - - Hash(e.midstate()) -} - -#[cfg(hashes_fuzz)] -fn from_engine(e: HashEngine) -> Hash { - let mut res = e.midstate(); - res[0] ^= (e.bytes_hashed & 0xff) as u8; - Hash(res) -} - -const BLOCK_SIZE: usize = 64; - -/// Engine to compute RIPEMD160 hash function. -#[derive(Clone)] -pub struct HashEngine { - buffer: [u8; BLOCK_SIZE], - h: [u32; 5], - bytes_hashed: u64, -} - -impl HashEngine { - /// Constructs a new SHA256 hash engine. - pub const fn new() -> Self { - Self { - h: [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0], - bytes_hashed: 0, - buffer: [0; BLOCK_SIZE], - } - } - - #[cfg(not(hashes_fuzz))] - fn midstate(&self) -> [u8; 20] { - let mut ret = [0; 20]; - for (val, ret_bytes) in self.h.iter().zip(ret.chunks_exact_mut(4)) { - ret_bytes.copy_from_slice(&(*val).to_le_bytes()); - } - ret - } - - #[cfg(hashes_fuzz)] - fn midstate(&self) -> [u8; 20] { - let mut ret = [0; 20]; - ret.copy_from_slice(&self.buffer[..20]); - ret - } -} - -impl Default for HashEngine { - fn default() -> Self { Self::new() } -} - -impl crate::HashEngine for HashEngine { - const BLOCK_SIZE: usize = 64; - - fn n_bytes_hashed(&self) -> u64 { self.bytes_hashed } - - crate::internal_macros::engine_input_impl!(); -} +use super::{HashEngine, BLOCK_SIZE}; #[cfg(feature = "small-hash")] #[macro_use] @@ -214,12 +130,12 @@ macro_rules! process_block( ); impl HashEngine { - fn process_block(&mut self) { + pub(super) fn process_block(&mut self) { debug_assert_eq!(self.buffer.len(), BLOCK_SIZE); let mut w = [0u32; 16]; - for (w_val, buff_bytes) in w.iter_mut().zip(self.buffer.chunks_exact(4)) { - *w_val = u32::from_le_bytes(buff_bytes.try_into().expect("4 byte slice")) + for (w_val, buff_bytes) in w.iter_mut().zip(self.buffer.bitcoin_as_chunks().0) { + *w_val = u32::from_le_bytes(*buff_bytes) } process_block!(self.h, w, @@ -405,149 +321,3 @@ impl HashEngine { ); } } - -#[cfg(test)] -mod tests { - #[test] - #[cfg(feature = "alloc")] - fn test() { - use alloc::string::ToString; - - use crate::{ripemd160, HashEngine}; - - #[derive(Clone)] - struct Test { - input: &'static str, - output: [u8; 20], - output_str: &'static str, - } - - #[rustfmt::skip] - let tests = [ - // Test messages from FIPS 180-1 - Test { - input: "abc", - output: [ - 0x8e, 0xb2, 0x08, 0xf7, - 0xe0, 0x5d, 0x98, 0x7a, - 0x9b, 0x04, 0x4a, 0x8e, - 0x98, 0xc6, 0xb0, 0x87, - 0xf1, 0x5a, 0x0b, 0xfc, - ], - output_str: "8eb208f7e05d987a9b044a8e98c6b087f15a0bfc" - }, - Test { - input: - "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", - output: [ - 0x12, 0xa0, 0x53, 0x38, - 0x4a, 0x9c, 0x0c, 0x88, - 0xe4, 0x05, 0xa0, 0x6c, - 0x27, 0xdc, 0xf4, 0x9a, - 0xda, 0x62, 0xeb, 0x2b, - ], - output_str: "12a053384a9c0c88e405a06c27dcf49ada62eb2b" - }, - // Examples from wikipedia - Test { - input: "The quick brown fox jumps over the lazy dog", - output: [ - 0x37, 0xf3, 0x32, 0xf6, - 0x8d, 0xb7, 0x7b, 0xd9, - 0xd7, 0xed, 0xd4, 0x96, - 0x95, 0x71, 0xad, 0x67, - 0x1c, 0xf9, 0xdd, 0x3b, - ], - output_str: "37f332f68db77bd9d7edd4969571ad671cf9dd3b", - }, - Test { - input: "The quick brown fox jumps over the lazy cog", - output: [ - 0x13, 0x20, 0x72, 0xdf, - 0x69, 0x09, 0x33, 0x83, - 0x5e, 0xb8, 0xb6, 0xad, - 0x0b, 0x77, 0xe7, 0xb6, - 0xf1, 0x4a, 0xca, 0xd7, - ], - output_str: "132072df690933835eb8b6ad0b77e7b6f14acad7", - }, - ]; - - for mut test in tests { - // Hash through high-level API, check hex encoding/decoding - let hash = ripemd160::Hash::hash(test.input.as_bytes()); - assert_eq!(hash, test.output_str.parse::().expect("parse hex")); - assert_eq!(hash.as_byte_array(), &test.output); - assert_eq!(hash.to_string(), test.output_str); - assert_eq!(ripemd160::Hash::from_bytes_ref(&test.output), &hash); - assert_eq!(ripemd160::Hash::from_bytes_mut(&mut test.output), &hash); - - // Hash through engine, checking that we can input byte by byte - let mut engine = ripemd160::Hash::engine(); - for ch in test.input.as_bytes() { - engine.input(&[*ch]); - } - let manual_hash = ripemd160::Hash::from_engine(engine); - assert_eq!(hash, manual_hash); - assert_eq!(hash.to_byte_array(), test.output); - } - } - - #[test] - #[cfg(feature = "serde")] - fn ripemd_serde() { - use serde_test::{assert_tokens, Configure, Token}; - - use crate::ripemd160; - - #[rustfmt::skip] - static HASH_BYTES: [u8; 20] = [ - 0x13, 0x20, 0x72, 0xdf, - 0x69, 0x09, 0x33, 0x83, - 0x5e, 0xb8, 0xb6, 0xad, - 0x0b, 0x77, 0xe7, 0xb6, - 0xf1, 0x4a, 0xca, 0xd7, - ]; - - let hash = ripemd160::Hash::from_slice(&HASH_BYTES).expect("right number of bytes"); - assert_tokens(&hash.compact(), &[Token::BorrowedBytes(&HASH_BYTES[..])]); - assert_tokens(&hash.readable(), &[Token::Str("132072df690933835eb8b6ad0b77e7b6f14acad7")]); - } -} - -#[cfg(bench)] -mod benches { - use test::Bencher; - - use crate::{ripemd160, Hash, HashEngine}; - - #[bench] - pub fn ripemd160_10(bh: &mut Bencher) { - let mut engine = ripemd160::Hash::engine(); - let bytes = [1u8; 10]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } - - #[bench] - pub fn ripemd160_1k(bh: &mut Bencher) { - let mut engine = ripemd160::Hash::engine(); - let bytes = [1u8; 1024]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } - - #[bench] - pub fn ripemd160_64k(bh: &mut Bencher) { - let mut engine = ripemd160::Hash::engine(); - let bytes = [1u8; 65536]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } -} diff --git a/hashes/src/ripemd160/mod.rs b/hashes/src/ripemd160/mod.rs new file mode 100644 index 0000000000..bb2941455a --- /dev/null +++ b/hashes/src/ripemd160/mod.rs @@ -0,0 +1,99 @@ +// SPDX-License-Identifier: CC0-1.0 + +//! RIPEMD160 implementation. + +use internals::slice::SliceExt; + +#[cfg(bench)] +mod benches; +mod crypto; +#[cfg(bench)] +mod tests; + +use core::cmp; + +use crate::{incomplete_block_len, HashEngine as _}; + +crate::internal_macros::general_hash_type! { + 160, + false, + "Output of the RIPEMD160 hash function." +} + +#[cfg(not(hashes_fuzz))] +fn from_engine(mut e: HashEngine) -> Hash { + // pad buffer with a single 1-bit then all 0s, until there are exactly 8 bytes remaining + let n_bytes_hashed = e.bytes_hashed; + + let zeroes = [0; BLOCK_SIZE - 8]; + e.input(&[0x80]); + if crate::incomplete_block_len(&e) > zeroes.len() { + e.input(&zeroes); + } + let pad_length = zeroes.len() - incomplete_block_len(&e); + e.input(&zeroes[..pad_length]); + debug_assert_eq!(incomplete_block_len(&e), zeroes.len()); + + e.input(&(8 * n_bytes_hashed).to_le_bytes()); + debug_assert_eq!(incomplete_block_len(&e), 0); + + Hash(e.midstate()) +} + +#[cfg(hashes_fuzz)] +fn from_engine(e: HashEngine) -> Hash { + let mut res = e.midstate(); + res[0] ^= (e.bytes_hashed & 0xff) as u8; + Hash(res) +} + +const BLOCK_SIZE: usize = 64; + +/// Engine to compute RIPEMD160 hash function. +#[derive(Debug, Clone)] +pub struct HashEngine { + buffer: [u8; BLOCK_SIZE], + h: [u32; 5], + bytes_hashed: u64, +} + +impl HashEngine { + /// Constructs a new SHA256 hash engine. + pub const fn new() -> Self { + Self { + h: [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0], + bytes_hashed: 0, + buffer: [0; BLOCK_SIZE], + } + } + + #[cfg(not(hashes_fuzz))] + fn midstate(&self) -> [u8; 20] { + let mut ret = [0; 20]; + for (val, ret_bytes) in self.h.iter().zip(ret.bitcoin_as_chunks_mut().0) { + *ret_bytes = val.to_le_bytes(); + } + ret + } + + #[cfg(hashes_fuzz)] + fn midstate(&self) -> [u8; 20] { + let mut ret = [0; 20]; + ret.copy_from_slice(&self.buffer[..20]); + ret + } +} + +impl Default for HashEngine { + fn default() -> Self { Self::new() } +} + +impl crate::HashEngine for HashEngine { + type Hash = Hash; + type Bytes = [u8; 20]; + const BLOCK_SIZE: usize = 64; + + fn n_bytes_hashed(&self) -> u64 { self.bytes_hashed } + crate::internal_macros::engine_input_impl!(); + fn finalize(self) -> Self::Hash { Hash::from_engine(self) } +} diff --git a/hashes/src/ripemd160/tests.rs b/hashes/src/ripemd160/tests.rs new file mode 100644 index 0000000000..3b9a324b68 --- /dev/null +++ b/hashes/src/ripemd160/tests.rs @@ -0,0 +1,106 @@ +#[test] +#[cfg(feature = "alloc")] +#[cfg(feature = "hex")] +fn test() { + use alloc::string::ToString; + + use crate::{ripemd160, HashEngine}; + + #[derive(Clone)] + struct Test { + input: &'static str, + output: [u8; 20], + output_str: &'static str, + } + + #[rustfmt::skip] + let tests = [ + // Test messages from FIPS 180-1 + Test { + input: "abc", + output: [ + 0x8e, 0xb2, 0x08, 0xf7, + 0xe0, 0x5d, 0x98, 0x7a, + 0x9b, 0x04, 0x4a, 0x8e, + 0x98, 0xc6, 0xb0, 0x87, + 0xf1, 0x5a, 0x0b, 0xfc, + ], + output_str: "8eb208f7e05d987a9b044a8e98c6b087f15a0bfc" + }, + Test { + input: + "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", + output: [ + 0x12, 0xa0, 0x53, 0x38, + 0x4a, 0x9c, 0x0c, 0x88, + 0xe4, 0x05, 0xa0, 0x6c, + 0x27, 0xdc, 0xf4, 0x9a, + 0xda, 0x62, 0xeb, 0x2b, + ], + output_str: "12a053384a9c0c88e405a06c27dcf49ada62eb2b" + }, + // Examples from wikipedia + Test { + input: "The quick brown fox jumps over the lazy dog", + output: [ + 0x37, 0xf3, 0x32, 0xf6, + 0x8d, 0xb7, 0x7b, 0xd9, + 0xd7, 0xed, 0xd4, 0x96, + 0x95, 0x71, 0xad, 0x67, + 0x1c, 0xf9, 0xdd, 0x3b, + ], + output_str: "37f332f68db77bd9d7edd4969571ad671cf9dd3b", + }, + Test { + input: "The quick brown fox jumps over the lazy cog", + output: [ + 0x13, 0x20, 0x72, 0xdf, + 0x69, 0x09, 0x33, 0x83, + 0x5e, 0xb8, 0xb6, 0xad, + 0x0b, 0x77, 0xe7, 0xb6, + 0xf1, 0x4a, 0xca, 0xd7, + ], + output_str: "132072df690933835eb8b6ad0b77e7b6f14acad7", + }, + ]; + + for mut test in tests { + // Hash through high-level API, check hex encoding/decoding + let hash = ripemd160::Hash::hash(test.input.as_bytes()); + assert_eq!(hash, test.output_str.parse::().expect("parse hex")); + assert_eq!(hash.as_byte_array(), &test.output); + assert_eq!(hash.to_string(), test.output_str); + assert_eq!(ripemd160::Hash::from_bytes_ref(&test.output), &hash); + assert_eq!(ripemd160::Hash::from_bytes_mut(&mut test.output), &hash); + + // Hash through engine, checking that we can input byte by byte + let mut engine = ripemd160::Hash::engine(); + for ch in test.input.as_bytes() { + engine.input(&[*ch]); + } + let manual_hash = ripemd160::Hash::from_engine(engine); + assert_eq!(hash, manual_hash); + assert_eq!(hash.to_byte_array(), test.output); + } +} + +#[test] +#[cfg(feature = "serde")] +fn ripemd_serde() { + use serde_test::{assert_tokens, Configure, Token}; + + use crate::ripemd160; + + #[rustfmt::skip] + static HASH_BYTES: [u8; 20] = [ + 0x13, 0x20, 0x72, 0xdf, + 0x69, 0x09, 0x33, 0x83, + 0x5e, 0xb8, 0xb6, 0xad, + 0x0b, 0x77, 0xe7, 0xb6, + 0xf1, 0x4a, 0xca, 0xd7, + ]; + + let hash = ripemd160::Hash::from_slice(&HASH_BYTES).expect("right number of bytes"); + assert_tokens(&hash.compact(), &[Token::BorrowedBytes(&HASH_BYTES[..])]); + assert_tokens(&hash.readable(), &[Token::Str("132072df690933835eb8b6ad0b77e7b6f14acad7")]); +} diff --git a/hashes/src/sha1.rs b/hashes/src/sha1.rs deleted file mode 100644 index 152a8f3489..0000000000 --- a/hashes/src/sha1.rs +++ /dev/null @@ -1,257 +0,0 @@ -// SPDX-License-Identifier: CC0-1.0 - -//! SHA1 implementation. - -use core::cmp; - -use crate::{incomplete_block_len, HashEngine as _}; - -crate::internal_macros::general_hash_type! { - 160, - false, - "Output of the SHA1 hash function." -} - -fn from_engine(mut e: HashEngine) -> Hash { - // pad buffer with a single 1-bit then all 0s, until there are exactly 8 bytes remaining - let n_bytes_hashed = e.bytes_hashed; - - let zeroes = [0; BLOCK_SIZE - 8]; - e.input(&[0x80]); - if incomplete_block_len(&e) > zeroes.len() { - e.input(&zeroes); - } - let pad_length = zeroes.len() - incomplete_block_len(&e); - e.input(&zeroes[..pad_length]); - debug_assert_eq!(incomplete_block_len(&e), zeroes.len()); - - e.input(&(8 * n_bytes_hashed).to_be_bytes()); - debug_assert_eq!(incomplete_block_len(&e), 0); - - Hash(e.midstate()) -} - -const BLOCK_SIZE: usize = 64; - -/// Engine to compute SHA1 hash function. -#[derive(Clone)] -pub struct HashEngine { - buffer: [u8; BLOCK_SIZE], - h: [u32; 5], - bytes_hashed: u64, -} - -impl HashEngine { - /// Constructs a new SHA1 hash engine. - pub const fn new() -> Self { - Self { - h: [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0], - bytes_hashed: 0, - buffer: [0; BLOCK_SIZE], - } - } - - #[cfg(not(hashes_fuzz))] - pub(crate) fn midstate(&self) -> [u8; 20] { - let mut ret = [0; 20]; - for (val, ret_bytes) in self.h.iter().zip(ret.chunks_exact_mut(4)) { - ret_bytes.copy_from_slice(&val.to_be_bytes()) - } - ret - } - - #[cfg(hashes_fuzz)] - pub(crate) fn midstate(&self) -> [u8; 20] { - let mut ret = [0; 20]; - ret.copy_from_slice(&self.buffer[..20]); - ret - } -} - -impl Default for HashEngine { - fn default() -> Self { Self::new() } -} - -impl crate::HashEngine for HashEngine { - const BLOCK_SIZE: usize = 64; - - fn n_bytes_hashed(&self) -> u64 { self.bytes_hashed } - - crate::internal_macros::engine_input_impl!(); -} - -impl HashEngine { - // Basic unoptimized algorithm from Wikipedia - fn process_block(&mut self) { - debug_assert_eq!(self.buffer.len(), BLOCK_SIZE); - - let mut w = [0u32; 80]; - for (w_val, buff_bytes) in w.iter_mut().zip(self.buffer.chunks_exact(4)) { - *w_val = u32::from_be_bytes(buff_bytes.try_into().expect("4 bytes slice")) - } - for i in 16..80 { - w[i] = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]).rotate_left(1); - } - - let mut a = self.h[0]; - let mut b = self.h[1]; - let mut c = self.h[2]; - let mut d = self.h[3]; - let mut e = self.h[4]; - - for (i, &wi) in w.iter().enumerate() { - let (f, k) = match i { - 0..=19 => ((b & c) | (!b & d), 0x5a827999), - 20..=39 => (b ^ c ^ d, 0x6ed9eba1), - 40..=59 => ((b & c) | (b & d) | (c & d), 0x8f1bbcdc), - 60..=79 => (b ^ c ^ d, 0xca62c1d6), - _ => unreachable!(), - }; - - let new_a = - a.rotate_left(5).wrapping_add(f).wrapping_add(e).wrapping_add(k).wrapping_add(wi); - e = d; - d = c; - c = b.rotate_left(30); - b = a; - a = new_a; - } - - self.h[0] = self.h[0].wrapping_add(a); - self.h[1] = self.h[1].wrapping_add(b); - self.h[2] = self.h[2].wrapping_add(c); - self.h[3] = self.h[3].wrapping_add(d); - self.h[4] = self.h[4].wrapping_add(e); - } -} - -#[cfg(test)] -mod tests { - #[test] - #[cfg(feature = "alloc")] - fn test() { - use alloc::string::ToString; - - use crate::{sha1, HashEngine}; - - #[derive(Clone)] - struct Test { - input: &'static str, - output: [u8; 20], - output_str: &'static str, - } - - #[rustfmt::skip] - let tests = [ - // Examples from wikipedia - Test { - input: "", - output: [ - 0xda, 0x39, 0xa3, 0xee, - 0x5e, 0x6b, 0x4b, 0x0d, - 0x32, 0x55, 0xbf, 0xef, - 0x95, 0x60, 0x18, 0x90, - 0xaf, 0xd8, 0x07, 0x09, - ], - output_str: "da39a3ee5e6b4b0d3255bfef95601890afd80709" - }, - Test { - input: "The quick brown fox jumps over the lazy dog", - output: [ - 0x2f, 0xd4, 0xe1, 0xc6, - 0x7a, 0x2d, 0x28, 0xfc, - 0xed, 0x84, 0x9e, 0xe1, - 0xbb, 0x76, 0xe7, 0x39, - 0x1b, 0x93, 0xeb, 0x12, - ], - output_str: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", - }, - Test { - input: "The quick brown fox jumps over the lazy cog", - output: [ - 0xde, 0x9f, 0x2c, 0x7f, - 0xd2, 0x5e, 0x1b, 0x3a, - 0xfa, 0xd3, 0xe8, 0x5a, - 0x0b, 0xd1, 0x7d, 0x9b, - 0x10, 0x0d, 0xb4, 0xb3, - ], - output_str: "de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3", - }, - ]; - - for test in tests { - // Hash through high-level API, check hex encoding/decoding - let hash = sha1::Hash::hash(test.input.as_bytes()); - assert_eq!(hash, test.output_str.parse::().expect("parse hex")); - assert_eq!(hash.as_byte_array(), &test.output); - assert_eq!(hash.to_string(), test.output_str); - - // Hash through engine, checking that we can input byte by byte - let mut engine = sha1::Hash::engine(); - for ch in test.input.as_bytes() { - engine.input(&[*ch]); - } - let manual_hash = sha1::Hash::from_engine(engine); - assert_eq!(hash, manual_hash); - assert_eq!(hash.to_byte_array(), test.output); - } - } - - #[test] - #[cfg(feature = "serde")] - fn sha1_serde() { - use serde_test::{assert_tokens, Configure, Token}; - - use crate::sha1; - - #[rustfmt::skip] - static HASH_BYTES: [u8; 20] = [ - 0x13, 0x20, 0x72, 0xdf, - 0x69, 0x09, 0x33, 0x83, - 0x5e, 0xb8, 0xb6, 0xad, - 0x0b, 0x77, 0xe7, 0xb6, - 0xf1, 0x4a, 0xca, 0xd7, - ]; - - let hash = sha1::Hash::from_slice(&HASH_BYTES).expect("right number of bytes"); - assert_tokens(&hash.compact(), &[Token::BorrowedBytes(&HASH_BYTES[..])]); - assert_tokens(&hash.readable(), &[Token::Str("132072df690933835eb8b6ad0b77e7b6f14acad7")]); - } -} - -#[cfg(bench)] -mod benches { - use test::Bencher; - - use crate::{sha1, Hash, HashEngine}; - - #[bench] - pub fn sha1_10(bh: &mut Bencher) { - let mut engine = sha1::Hash::engine(); - let bytes = [1u8; 10]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } - - #[bench] - pub fn sha1_1k(bh: &mut Bencher) { - let mut engine = sha1::Hash::engine(); - let bytes = [1u8; 1024]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } - - #[bench] - pub fn sha1_64k(bh: &mut Bencher) { - let mut engine = sha1::Hash::engine(); - let bytes = [1u8; 65536]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } -} diff --git a/hashes/src/sha1/benches.rs b/hashes/src/sha1/benches.rs new file mode 100644 index 0000000000..fcf2518df9 --- /dev/null +++ b/hashes/src/sha1/benches.rs @@ -0,0 +1,33 @@ +use test::Bencher; + +use crate::{sha1, Hash, HashEngine}; + +#[bench] +pub fn sha1_10(bh: &mut Bencher) { + let mut engine = sha1::Hash::engine(); + let bytes = [1u8; 10]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} + +#[bench] +pub fn sha1_1k(bh: &mut Bencher) { + let mut engine = sha1::Hash::engine(); + let bytes = [1u8; 1024]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} + +#[bench] +pub fn sha1_64k(bh: &mut Bencher) { + let mut engine = sha1::Hash::engine(); + let bytes = [1u8; 65536]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} diff --git a/hashes/src/sha1/crypto.rs b/hashes/src/sha1/crypto.rs new file mode 100644 index 0000000000..70d04f506c --- /dev/null +++ b/hashes/src/sha1/crypto.rs @@ -0,0 +1,50 @@ +// SPDX-License-Identifier: CC0-1.0 + +use internals::slice::SliceExt; + +use super::{HashEngine, BLOCK_SIZE}; + +impl HashEngine { + // Basic unoptimized algorithm from Wikipedia + pub(super) fn process_block(&mut self) { + debug_assert_eq!(self.buffer.len(), BLOCK_SIZE); + + let mut w = [0u32; 80]; + for (w_val, buff_bytes) in w.iter_mut().zip(self.buffer.bitcoin_as_chunks().0) { + *w_val = u32::from_be_bytes(*buff_bytes) + } + for i in 16..80 { + w[i] = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]).rotate_left(1); + } + + let mut a = self.h[0]; + let mut b = self.h[1]; + let mut c = self.h[2]; + let mut d = self.h[3]; + let mut e = self.h[4]; + + for (i, &wi) in w.iter().enumerate() { + let (f, k) = match i { + 0..=19 => ((b & c) | (!b & d), 0x5a827999), + 20..=39 => (b ^ c ^ d, 0x6ed9eba1), + 40..=59 => ((b & c) | (b & d) | (c & d), 0x8f1bbcdc), + 60..=79 => (b ^ c ^ d, 0xca62c1d6), + _ => unreachable!(), + }; + + let new_a = + a.rotate_left(5).wrapping_add(f).wrapping_add(e).wrapping_add(k).wrapping_add(wi); + e = d; + d = c; + c = b.rotate_left(30); + b = a; + a = new_a; + } + + self.h[0] = self.h[0].wrapping_add(a); + self.h[1] = self.h[1].wrapping_add(b); + self.h[2] = self.h[2].wrapping_add(c); + self.h[3] = self.h[3].wrapping_add(d); + self.h[4] = self.h[4].wrapping_add(e); + } +} diff --git a/hashes/src/sha1/mod.rs b/hashes/src/sha1/mod.rs new file mode 100644 index 0000000000..69ca308978 --- /dev/null +++ b/hashes/src/sha1/mod.rs @@ -0,0 +1,93 @@ +// SPDX-License-Identifier: CC0-1.0 + +//! SHA1 implementation. + +use internals::slice::SliceExt; + +#[cfg(bench)] +mod benches; +mod crypto; +#[cfg(bench)] +mod tests; + +use core::cmp; + +use crate::{incomplete_block_len, HashEngine as _}; + +crate::internal_macros::general_hash_type! { + 160, + false, + "Output of the SHA1 hash function." +} + +fn from_engine(mut e: HashEngine) -> Hash { + // pad buffer with a single 1-bit then all 0s, until there are exactly 8 bytes remaining + let n_bytes_hashed = e.bytes_hashed; + + let zeroes = [0; BLOCK_SIZE - 8]; + e.input(&[0x80]); + if incomplete_block_len(&e) > zeroes.len() { + e.input(&zeroes); + } + let pad_length = zeroes.len() - incomplete_block_len(&e); + e.input(&zeroes[..pad_length]); + debug_assert_eq!(incomplete_block_len(&e), zeroes.len()); + + e.input(&(8 * n_bytes_hashed).to_be_bytes()); + debug_assert_eq!(incomplete_block_len(&e), 0); + + Hash(e.midstate()) +} + +const BLOCK_SIZE: usize = 64; + +/// Engine to compute SHA1 hash function. +#[derive(Debug, Clone)] +pub struct HashEngine { + buffer: [u8; BLOCK_SIZE], + h: [u32; 5], + bytes_hashed: u64, +} + +impl HashEngine { + /// Constructs a new SHA1 hash engine. + pub const fn new() -> Self { + Self { + h: [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0], + bytes_hashed: 0, + buffer: [0; BLOCK_SIZE], + } + } + + #[cfg(not(hashes_fuzz))] + pub(crate) fn midstate(&self) -> [u8; 20] { + let mut ret = [0; 20]; + for (val, ret_bytes) in self.h.iter().zip(ret.bitcoin_as_chunks_mut().0) { + *ret_bytes = val.to_be_bytes(); + } + ret + } + + #[cfg(hashes_fuzz)] + pub(crate) fn midstate(&self) -> [u8; 20] { + let mut ret = [0; 20]; + ret.copy_from_slice(&self.buffer[..20]); + ret + } +} + +impl Default for HashEngine { + fn default() -> Self { Self::new() } +} + +impl crate::HashEngine for HashEngine { + type Hash = Hash; + type Bytes = [u8; 20]; + const BLOCK_SIZE: usize = 64; + + fn n_bytes_hashed(&self) -> u64 { self.bytes_hashed } + + crate::internal_macros::engine_input_impl!(); + + fn finalize(self) -> Self::Hash { Hash::from_engine(self) } +} diff --git a/hashes/src/sha1/tests.rs b/hashes/src/sha1/tests.rs new file mode 100644 index 0000000000..ce30cd3342 --- /dev/null +++ b/hashes/src/sha1/tests.rs @@ -0,0 +1,91 @@ +#[test] +#[cfg(feature = "alloc")] +#[cfg(feature = "hex")] +fn test() { + use alloc::string::ToString; + + use crate::{sha1, HashEngine}; + + #[derive(Clone)] + struct Test { + input: &'static str, + output: [u8; 20], + output_str: &'static str, + } + + #[rustfmt::skip] + let tests = [ + // Examples from wikipedia + Test { + input: "", + output: [ + 0xda, 0x39, 0xa3, 0xee, + 0x5e, 0x6b, 0x4b, 0x0d, + 0x32, 0x55, 0xbf, 0xef, + 0x95, 0x60, 0x18, 0x90, + 0xaf, 0xd8, 0x07, 0x09, + ], + output_str: "da39a3ee5e6b4b0d3255bfef95601890afd80709" + }, + Test { + input: "The quick brown fox jumps over the lazy dog", + output: [ + 0x2f, 0xd4, 0xe1, 0xc6, + 0x7a, 0x2d, 0x28, 0xfc, + 0xed, 0x84, 0x9e, 0xe1, + 0xbb, 0x76, 0xe7, 0x39, + 0x1b, 0x93, 0xeb, 0x12, + ], + output_str: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", + }, + Test { + input: "The quick brown fox jumps over the lazy cog", + output: [ + 0xde, 0x9f, 0x2c, 0x7f, + 0xd2, 0x5e, 0x1b, 0x3a, + 0xfa, 0xd3, 0xe8, 0x5a, + 0x0b, 0xd1, 0x7d, 0x9b, + 0x10, 0x0d, 0xb4, 0xb3, + ], + output_str: "de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3", + }, + ]; + + for test in tests { + // Hash through high-level API, check hex encoding/decoding + let hash = sha1::Hash::hash(test.input.as_bytes()); + assert_eq!(hash, test.output_str.parse::().expect("parse hex")); + assert_eq!(hash.as_byte_array(), &test.output); + assert_eq!(hash.to_string(), test.output_str); + + // Hash through engine, checking that we can input byte by byte + let mut engine = sha1::Hash::engine(); + for ch in test.input.as_bytes() { + engine.input(&[*ch]); + } + let manual_hash = sha1::Hash::from_engine(engine); + assert_eq!(hash, manual_hash); + assert_eq!(hash.to_byte_array(), test.output); + } +} + +#[test] +#[cfg(feature = "serde")] +fn sha1_serde() { + use serde_test::{assert_tokens, Configure, Token}; + + use crate::sha1; + + #[rustfmt::skip] + static HASH_BYTES: [u8; 20] = [ + 0x13, 0x20, 0x72, 0xdf, + 0x69, 0x09, 0x33, 0x83, + 0x5e, 0xb8, 0xb6, 0xad, + 0x0b, 0x77, 0xe7, 0xb6, + 0xf1, 0x4a, 0xca, 0xd7, + ]; + + let hash = sha1::Hash::from_slice(&HASH_BYTES).expect("right number of bytes"); + assert_tokens(&hash.compact(), &[Token::BorrowedBytes(&HASH_BYTES[..])]); + assert_tokens(&hash.readable(), &[Token::Str("132072df690933835eb8b6ad0b77e7b6f14acad7")]); +} diff --git a/hashes/src/sha256/benches.rs b/hashes/src/sha256/benches.rs new file mode 100644 index 0000000000..f6e270d903 --- /dev/null +++ b/hashes/src/sha256/benches.rs @@ -0,0 +1,33 @@ +use test::Bencher; + +use crate::{sha256, Hash, HashEngine}; + +#[bench] +pub fn sha256_10(bh: &mut Bencher) { + let mut engine = sha256::Hash::engine(); + let bytes = [1u8; 10]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} + +#[bench] +pub fn sha256_1k(bh: &mut Bencher) { + let mut engine = sha256::Hash::engine(); + let bytes = [1u8; 1024]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} + +#[bench] +pub fn sha256_64k(bh: &mut Bencher) { + let mut engine = sha256::Hash::engine(); + let bytes = [1u8; 65536]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} diff --git a/hashes/src/sha256.rs b/hashes/src/sha256/crypto.rs similarity index 61% rename from hashes/src/sha256.rs rename to hashes/src/sha256/crypto.rs index 98e8ec1327..eea9ff7a04 100644 --- a/hashes/src/sha256.rs +++ b/hashes/src/sha256/crypto.rs @@ -1,258 +1,13 @@ // SPDX-License-Identifier: CC0-1.0 -//! SHA256 implementation. - #[cfg(all(feature = "std", target_arch = "x86"))] use core::arch::x86::*; #[cfg(all(feature = "std", target_arch = "x86_64"))] use core::arch::x86_64::*; -use core::{cmp, convert, fmt}; - -use crate::{incomplete_block_len, sha256d, HashEngine as _}; -#[cfg(doc)] -use crate::{sha256t, sha256t_tag}; - -crate::internal_macros::general_hash_type! { - 256, - false, - "Output of the SHA256 hash function." -} - -#[cfg(not(hashes_fuzz))] -fn from_engine(mut e: HashEngine) -> Hash { - // pad buffer with a single 1-bit then all 0s, until there are exactly 8 bytes remaining - let n_bytes_hashed = e.bytes_hashed; - - let zeroes = [0; BLOCK_SIZE - 8]; - e.input(&[0x80]); - if incomplete_block_len(&e) > zeroes.len() { - e.input(&zeroes); - } - let pad_length = zeroes.len() - incomplete_block_len(&e); - e.input(&zeroes[..pad_length]); - debug_assert_eq!(incomplete_block_len(&e), zeroes.len()); - - e.input(&(8 * n_bytes_hashed).to_be_bytes()); - debug_assert_eq!(incomplete_block_len(&e), 0); - - Hash(e.midstate_unchecked().bytes) -} - -#[cfg(hashes_fuzz)] -fn from_engine(e: HashEngine) -> Hash { - let mut hash = e.midstate_unchecked().bytes; - if hash == [0; 32] { - // Assume sha256 is secure and never generate 0-hashes (which represent invalid - // secp256k1 secret keys, causing downstream application breakage). - hash[0] = 1; - } - Hash(hash) -} - -const BLOCK_SIZE: usize = 64; - -/// Engine to compute SHA256 hash function. -#[derive(Clone)] -pub struct HashEngine { - buffer: [u8; BLOCK_SIZE], - h: [u32; 8], - bytes_hashed: u64, -} - -impl HashEngine { - /// Constructs a new SHA256 hash engine. - pub const fn new() -> Self { - Self { - h: [ - 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, - 0x5be0cd19, - ], - bytes_hashed: 0, - buffer: [0; BLOCK_SIZE], - } - } - /// Constructs a new [`HashEngine`] from a [`Midstate`]. - /// - /// Please see docs on [`Midstate`] before using this function. - pub fn from_midstate(midstate: Midstate) -> HashEngine { - let mut ret = [0; 8]; - for (ret_val, midstate_bytes) in ret.iter_mut().zip(midstate.as_ref().chunks_exact(4)) { - *ret_val = u32::from_be_bytes(midstate_bytes.try_into().expect("4 byte slice")); - } +use internals::slice::SliceExt; - HashEngine { buffer: [0; BLOCK_SIZE], h: ret, bytes_hashed: midstate.bytes_hashed } - } - - /// Returns `true` if the midstate can be extracted from this engine. - /// - /// The midstate can only be extracted if the number of bytes input into - /// the hash engine is a multiple of 64. See caveat on [`Self::midstate`]. - /// - /// Please see docs on [`Midstate`] before using this function. - pub const fn can_extract_midstate(&self) -> bool { self.bytes_hashed % 64 == 0 } - - /// Outputs the midstate of the hash engine. - /// - /// Please see docs on [`Midstate`] before using this function. - pub fn midstate(&self) -> Result { - if !self.can_extract_midstate() { - return Err(MidstateError { invalid_n_bytes_hashed: self.bytes_hashed }); - } - Ok(self.midstate_unchecked()) - } - - // Does not check that `HashEngine::can_extract_midstate`. - #[cfg(not(hashes_fuzz))] - fn midstate_unchecked(&self) -> Midstate { - let mut ret = [0; 32]; - for (val, ret_bytes) in self.h.iter().zip(ret.chunks_exact_mut(4)) { - ret_bytes.copy_from_slice(&val.to_be_bytes()); - } - Midstate { bytes: ret, bytes_hashed: self.bytes_hashed } - } - - // Does not check that `HashEngine::can_extract_midstate`. - #[cfg(hashes_fuzz)] - fn midstate_unchecked(&self) -> Midstate { - let mut ret = [0; 32]; - ret.copy_from_slice(&self.buffer[..32]); - Midstate { bytes: ret, bytes_hashed: self.bytes_hashed } - } -} - -impl Default for HashEngine { - fn default() -> Self { Self::new() } -} - -impl crate::HashEngine for HashEngine { - const BLOCK_SIZE: usize = 64; - - fn n_bytes_hashed(&self) -> u64 { self.bytes_hashed } - - crate::internal_macros::engine_input_impl!(); -} - -impl Hash { - /// Iterate the sha256 algorithm to turn a sha256 hash into a sha256d hash - pub fn hash_again(&self) -> sha256d::Hash { - crate::Hash::from_byte_array(::hash(&self.0).0) - } - - /// Computes hash from `bytes` in `const` context. - /// - /// Warning: this function is inefficient. It should be only used in `const` context. - #[deprecated(since = "0.15.0", note = "use `Self::hash_unoptimized` instead")] - pub const fn const_hash(bytes: &[u8]) -> Self { Hash::hash_unoptimized(bytes) } - - /// Computes hash from `bytes` in `const` context. - /// - /// Warning: this function is inefficient. It should be only used in `const` context. - pub const fn hash_unoptimized(bytes: &[u8]) -> Self { - Hash(Midstate::compute_midstate_unoptimized(bytes, true).bytes) - } -} - -/// Unfinalized output of the SHA256 hash function. -/// -/// The `Midstate` type is obscure and specialized and should not be used unless you are sure of -/// what you are doing. -/// -/// It represents "partially hashed data" but does not itself have properties of cryptographic -/// hashes. For example, when (ab)used as hashes, midstates are vulnerable to trivial -/// length-extension attacks. They are typically used to optimize the computation of full hashes. -/// For example, when implementing BIP-340 tagged hashes, which always begin by hashing the same -/// fixed 64-byte prefix, it makes sense to hash the prefix once, store the midstate as a constant, -/// and hash any future data starting from the constant rather than from a fresh hash engine. -/// -/// For BIP-340 support we provide the [`sha256t`] module, and the [`sha256t_tag`] macro which will -/// create the midstate for you in const context. -#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Midstate { - /// Raw bytes of the midstate i.e., the already-hashed contents of the hash engine. - bytes: [u8; 32], - /// Number of bytes hashed to achieve this midstate. - // INVARIANT must always be a multiple of 64. - bytes_hashed: u64, -} - -impl Midstate { - /// Construct a new [`Midstate`] from the `state` and the `bytes_hashed` to get to that state. - /// - /// # Panics - /// - /// Panics if `bytes_hashed` is not a multiple of 64. - pub const fn new(state: [u8; 32], bytes_hashed: u64) -> Self { - if bytes_hashed % 64 != 0 { - panic!("bytes hashed is not a multiple of 64"); - } - - Midstate { bytes: state, bytes_hashed } - } - - /// Deconstructs the [`Midstate`], returning the underlying byte array and number of bytes hashed. - pub const fn as_parts(&self) -> (&[u8; 32], u64) { (&self.bytes, self.bytes_hashed) } - - /// Deconstructs the [`Midstate`], returning the underlying byte array and number of bytes hashed. - pub const fn to_parts(self) -> ([u8; 32], u64) { (self.bytes, self.bytes_hashed) } - - /// Constructs a new midstate for tagged hashes. - /// - /// Warning: this function is inefficient. It should be only used in `const` context. - /// - /// Computes non-finalized hash of `sha256(tag) || sha256(tag)` for use in [`sha256t`]. It's - /// provided for use with [`sha256t`]. - pub const fn hash_tag(tag: &[u8]) -> Self { - let hash = Hash::hash_unoptimized(tag); - let mut buf = [0u8; 64]; - let mut i = 0usize; - while i < buf.len() { - buf[i] = hash.0[i % hash.0.len()]; - i += 1; - } - Self::compute_midstate_unoptimized(&buf, false) - } -} - -impl fmt::Debug for Midstate { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - struct Encoder<'a> { - bytes: &'a [u8; 32], - } - impl fmt::Debug for Encoder<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { crate::debug_hex(self.bytes, f) } - } - - f.debug_struct("Midstate") - .field("bytes", &Encoder { bytes: &self.bytes }) - .field("length", &self.bytes_hashed) - .finish() - } -} - -impl convert::AsRef<[u8]> for Midstate { - fn as_ref(&self) -> &[u8] { &self.bytes } -} - -/// `Midstate` invariant violated (not a multiple of 64). -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct MidstateError { - /// The invalid number of bytes hashed. - invalid_n_bytes_hashed: u64, -} - -impl fmt::Display for MidstateError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "invalid number of bytes hashed {} (should have been a multiple of 64)", - self.invalid_n_bytes_hashed - ) - } -} - -#[cfg(feature = "std")] -impl std::error::Error for MidstateError {} +use super::{HashEngine, Midstate, BLOCK_SIZE}; #[allow(non_snake_case)] const fn Ch(x: u32, y: u32, z: u32) -> u32 { z ^ (x & (y ^ z)) } @@ -343,7 +98,7 @@ impl Midstate { w } - const fn compute_midstate_unoptimized(bytes: &[u8], finalize: bool) -> Self { + pub(super) const fn compute_midstate_unoptimized(bytes: &[u8], finalize: bool) -> Self { let mut state = [ 0x6a09e667u32, 0xbb67ae85, @@ -493,7 +248,7 @@ impl Midstate { } impl HashEngine { - fn process_block(&mut self) { + pub(super) fn process_block(&mut self) { #[cfg(all(feature = "std", any(target_arch = "x86", target_arch = "x86_64")))] { if std::is_x86_feature_detected!("sse4.1") @@ -773,8 +528,8 @@ impl HashEngine { debug_assert_eq!(self.buffer.len(), BLOCK_SIZE); let mut w = [0u32; 16]; - for (w_val, buff_bytes) in w.iter_mut().zip(self.buffer.chunks_exact(4)) { - *w_val = u32::from_be_bytes(buff_bytes.try_into().expect("4 byte slice")); + for (w_val, buff_bytes) in w.iter_mut().zip(self.buffer.bitcoin_as_chunks().0) { + *w_val = u32::from_be_bytes(*buff_bytes); } let mut a = self.h[0]; @@ -864,277 +619,3 @@ impl HashEngine { self.h[7] = self.h[7].wrapping_add(h); } } - -#[cfg(test)] -mod tests { - use core::array; - - use super::*; - use crate::{sha256, HashEngine}; - - #[test] - #[cfg(feature = "alloc")] - fn test() { - use alloc::string::ToString; - - #[derive(Clone)] - struct Test { - input: &'static str, - output: [u8; 32], - output_str: &'static str, - } - - #[rustfmt::skip] - let tests = [ - // Examples from wikipedia - Test { - input: "", - output: [ - 0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14, - 0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24, - 0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c, - 0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55, - ], - output_str: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" - }, - Test { - input: "The quick brown fox jumps over the lazy dog", - output: [ - 0xd7, 0xa8, 0xfb, 0xb3, 0x07, 0xd7, 0x80, 0x94, - 0x69, 0xca, 0x9a, 0xbc, 0xb0, 0x08, 0x2e, 0x4f, - 0x8d, 0x56, 0x51, 0xe4, 0x6d, 0x3c, 0xdb, 0x76, - 0x2d, 0x02, 0xd0, 0xbf, 0x37, 0xc9, 0xe5, 0x92, - ], - output_str: "d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592", - }, - Test { - input: "The quick brown fox jumps over the lazy dog.", - output: [ - 0xef, 0x53, 0x7f, 0x25, 0xc8, 0x95, 0xbf, 0xa7, - 0x82, 0x52, 0x65, 0x29, 0xa9, 0xb6, 0x3d, 0x97, - 0xaa, 0x63, 0x15, 0x64, 0xd5, 0xd7, 0x89, 0xc2, - 0xb7, 0x65, 0x44, 0x8c, 0x86, 0x35, 0xfb, 0x6c, - ], - output_str: "ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c", - }, - ]; - - for test in tests { - // Hash through high-level API, check hex encoding/decoding - let hash = sha256::Hash::hash(test.input.as_bytes()); - assert_eq!(hash, test.output_str.parse::().expect("parse hex")); - assert_eq!(hash.as_byte_array(), &test.output); - assert_eq!(hash.to_string(), test.output_str); - - // Hash through engine, checking that we can input byte by byte - let mut engine = sha256::Hash::engine(); - for ch in test.input.as_bytes() { - engine.input(&[*ch]); - } - let manual_hash = sha256::Hash::from_engine(engine); - assert_eq!(hash, manual_hash); - assert_eq!(hash.to_byte_array(), test.output); - } - } - - #[test] - #[cfg(feature = "alloc")] - fn fmt_roundtrips() { - use alloc::format; - - let hash = sha256::Hash::hash(b"some arbitrary bytes"); - let hex = format!("{}", hash); - let rinsed = hex.parse::().expect("failed to parse hex"); - assert_eq!(rinsed, hash) - } - - #[test] - #[rustfmt::skip] - pub(crate) fn midstate() { - // Test vector obtained by doing an asset issuance on Elements - let mut engine = sha256::Hash::engine(); - // sha256dhash of outpoint - // 73828cbc65fd68ab78dc86992b76ae50ae2bf8ceedbe8de0483172f0886219f7:0 - engine.input(&[ - 0x9d, 0xd0, 0x1b, 0x56, 0xb1, 0x56, 0x45, 0x14, - 0x3e, 0xad, 0x15, 0x8d, 0xec, 0x19, 0xf8, 0xce, - 0xa9, 0x0b, 0xd0, 0xa9, 0xb2, 0xf8, 0x1d, 0x21, - 0xff, 0xa3, 0xa4, 0xc6, 0x44, 0x81, 0xd4, 0x1c, - ]); - // 32 bytes of zeroes representing "new asset" - engine.input(&[0; 32]); - - // RPC output - static WANT: Midstate = sha256::Midstate::new([ - 0x0b, 0xcf, 0xe0, 0xe5, 0x4e, 0x6c, 0xc7, 0xd3, - 0x4f, 0x4f, 0x7c, 0x1d, 0xf0, 0xb0, 0xf5, 0x03, - 0xf2, 0xf7, 0x12, 0x91, 0x2a, 0x06, 0x05, 0xb4, - 0x14, 0xed, 0x33, 0x7f, 0x7f, 0x03, 0x2e, 0x03, - ], 64); - - assert_eq!( - engine.midstate().expect("total_bytes_hashed is valid"), - WANT, - ); - } - - #[test] - fn engine_with_state() { - let mut engine = sha256::Hash::engine(); - let midstate_engine = sha256::HashEngine::from_midstate(engine.midstate_unchecked()); - // Fresh engine and engine initialized with fresh state should have same state - assert_eq!(engine.h, midstate_engine.h); - - // Midstate changes after writing 64 bytes - engine.input(&[1; 63]); - assert_eq!(engine.h, midstate_engine.h); - engine.input(&[2; 1]); - assert_ne!(engine.h, midstate_engine.h); - - // Initializing an engine with midstate from another engine should result in - // both engines producing the same hashes - let data_vec: &[&[u8]] = &[&[3u8; 1], &[4u8; 63], &[5u8; 65], &[6u8; 66]]; - for data in data_vec { - let mut engine = engine.clone(); - let mut midstate_engine = - sha256::HashEngine::from_midstate(engine.midstate_unchecked()); - assert_eq!(engine.h, midstate_engine.h); - assert_eq!(engine.bytes_hashed, midstate_engine.bytes_hashed); - engine.input(data); - midstate_engine.input(data); - assert_eq!(engine.h, midstate_engine.h); - let hash1 = sha256::Hash::from_engine(engine); - let hash2 = sha256::Hash::from_engine(midstate_engine); - assert_eq!(hash1, hash2); - } - - // Test that a specific midstate results in a specific hash. Midstate was - // obtained by applying sha256 to sha256("MuSig coefficient")||sha256("MuSig - // coefficient"). - #[rustfmt::skip] - static MIDSTATE: [u8; 32] = [ - 0x0f, 0xd0, 0x69, 0x0c, 0xfe, 0xfe, 0xae, 0x97, - 0x99, 0x6e, 0xac, 0x7f, 0x5c, 0x30, 0xd8, 0x64, - 0x8c, 0x4a, 0x05, 0x73, 0xac, 0xa1, 0xa2, 0x2f, - 0x6f, 0x43, 0xb8, 0x01, 0x85, 0xce, 0x27, 0xcd, - ]; - #[rustfmt::skip] - static HASH_EXPECTED: [u8; 32] = [ - 0x18, 0x84, 0xe4, 0x72, 0x40, 0x4e, 0xf4, 0x5a, - 0xb4, 0x9c, 0x4e, 0xa4, 0x9a, 0xe6, 0x23, 0xa8, - 0x88, 0x52, 0x7f, 0x7d, 0x8a, 0x06, 0x94, 0x20, - 0x8f, 0xf1, 0xf7, 0xa9, 0xd5, 0x69, 0x09, 0x59, - ]; - let midstate_engine = - sha256::HashEngine::from_midstate(sha256::Midstate::new(MIDSTATE, 64)); - let hash = sha256::Hash::from_engine(midstate_engine); - assert_eq!(hash, sha256::Hash(HASH_EXPECTED)); - } - - #[test] - fn hash_unoptimized() { - let bytes: [u8; 256] = array::from_fn(|i| i as u8); - - for i in 0..=256 { - let bytes = &bytes[0..i]; - assert_eq!( - Hash::hash(bytes), - Hash::hash_unoptimized(bytes), - "hashes don't match for n_bytes_hashed {}", - i + 1 - ); - } - } - - // The midstate of an empty hash engine tagged with "TapLeaf". - const TAP_LEAF_MIDSTATE: Midstate = Midstate::new( - [ - 156, 224, 228, 230, 124, 17, 108, 57, 56, 179, 202, 242, 195, 15, 80, 137, 211, 243, - 147, 108, 71, 99, 110, 96, 125, 179, 62, 234, 221, 198, 240, 201, - ], - 64, - ); - - #[test] - fn const_midstate() { assert_eq!(Midstate::hash_tag(b"TapLeaf"), TAP_LEAF_MIDSTATE,) } - - #[test] - #[cfg(feature = "alloc")] - fn regression_midstate_debug_format() { - use alloc::format; - - let want = "Midstate { bytes: 9ce0e4e67c116c3938b3caf2c30f5089d3f3936c47636e607db33eeaddc6f0c9, length: 64 }"; - let got = format!("{:?}", TAP_LEAF_MIDSTATE); - assert_eq!(got, want); - } - - #[test] - #[cfg(feature = "serde")] - fn sha256_serde() { - use serde_test::{assert_tokens, Configure, Token}; - - #[rustfmt::skip] - static HASH_BYTES: [u8; 32] = [ - 0xef, 0x53, 0x7f, 0x25, 0xc8, 0x95, 0xbf, 0xa7, - 0x82, 0x52, 0x65, 0x29, 0xa9, 0xb6, 0x3d, 0x97, - 0xaa, 0x63, 0x15, 0x64, 0xd5, 0xd7, 0x89, 0xc2, - 0xb7, 0x65, 0x44, 0x8c, 0x86, 0x35, 0xfb, 0x6c, - ]; - - let hash = sha256::Hash::from_slice(&HASH_BYTES).expect("right number of bytes"); - assert_tokens(&hash.compact(), &[Token::BorrowedBytes(&HASH_BYTES[..])]); - assert_tokens( - &hash.readable(), - &[Token::Str("ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c")], - ); - } - - #[cfg(target_arch = "wasm32")] - mod wasm_tests { - use super::*; - #[test] - #[wasm_bindgen_test::wasm_bindgen_test] - fn sha256_tests() { - test(); - midstate(); - engine_with_state(); - } - } -} - -#[cfg(bench)] -mod benches { - use test::Bencher; - - use crate::{sha256, Hash, HashEngine}; - - #[bench] - pub fn sha256_10(bh: &mut Bencher) { - let mut engine = sha256::Hash::engine(); - let bytes = [1u8; 10]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } - - #[bench] - pub fn sha256_1k(bh: &mut Bencher) { - let mut engine = sha256::Hash::engine(); - let bytes = [1u8; 1024]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } - - #[bench] - pub fn sha256_64k(bh: &mut Bencher) { - let mut engine = sha256::Hash::engine(); - let bytes = [1u8; 65536]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } -} diff --git a/hashes/src/sha256/mod.rs b/hashes/src/sha256/mod.rs new file mode 100644 index 0000000000..a45eb0bfee --- /dev/null +++ b/hashes/src/sha256/mod.rs @@ -0,0 +1,262 @@ +// SPDX-License-Identifier: CC0-1.0 + +//! SHA256 implementation. + +#[cfg(bench)] +mod benches; +mod crypto; +#[cfg(bench)] +mod tests; + +use core::{cmp, convert, fmt}; + +use internals::slice::SliceExt; + +use crate::{incomplete_block_len, sha256d, HashEngine as _}; +#[cfg(doc)] +use crate::{sha256t, sha256t_tag}; + +crate::internal_macros::general_hash_type! { + 256, + false, + "Output of the SHA256 hash function." +} + +#[cfg(not(hashes_fuzz))] +fn from_engine(mut e: HashEngine) -> Hash { + // pad buffer with a single 1-bit then all 0s, until there are exactly 8 bytes remaining + let n_bytes_hashed = e.bytes_hashed; + + let zeroes = [0; BLOCK_SIZE - 8]; + e.input(&[0x80]); + if incomplete_block_len(&e) > zeroes.len() { + e.input(&zeroes); + } + let pad_length = zeroes.len() - incomplete_block_len(&e); + e.input(&zeroes[..pad_length]); + debug_assert_eq!(incomplete_block_len(&e), zeroes.len()); + + e.input(&(8 * n_bytes_hashed).to_be_bytes()); + debug_assert_eq!(incomplete_block_len(&e), 0); + + Hash(e.midstate_unchecked().bytes) +} + +#[cfg(hashes_fuzz)] +fn from_engine(e: HashEngine) -> Hash { + let mut hash = e.midstate_unchecked().bytes; + if hash == [0; 32] { + // Assume sha256 is secure and never generate 0-hashes (which represent invalid + // secp256k1 secret keys, causing downstream application breakage). + hash[0] = 1; + } + Hash(hash) +} + +const BLOCK_SIZE: usize = 64; + +/// Engine to compute SHA256 hash function. +#[derive(Debug, Clone)] +pub struct HashEngine { + buffer: [u8; BLOCK_SIZE], + h: [u32; 8], + bytes_hashed: u64, +} + +impl HashEngine { + /// Constructs a new SHA256 hash engine. + pub const fn new() -> Self { + Self { + h: [ + 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, + 0x5be0cd19, + ], + bytes_hashed: 0, + buffer: [0; BLOCK_SIZE], + } + } + + /// Constructs a new [`HashEngine`] from a [`Midstate`]. + /// + /// Please see docs on [`Midstate`] before using this function. + pub fn from_midstate(midstate: Midstate) -> HashEngine { + let mut ret = [0; 8]; + for (ret_val, midstate_bytes) in ret.iter_mut().zip(midstate.as_ref().bitcoin_as_chunks().0) + { + *ret_val = u32::from_be_bytes(*midstate_bytes); + } + + HashEngine { buffer: [0; BLOCK_SIZE], h: ret, bytes_hashed: midstate.bytes_hashed } + } + + /// Returns `true` if the midstate can be extracted from this engine. + /// + /// The midstate can only be extracted if the number of bytes input into + /// the hash engine is a multiple of 64. See caveat on [`Self::midstate`]. + /// + /// Please see docs on [`Midstate`] before using this function. + pub const fn can_extract_midstate(&self) -> bool { self.bytes_hashed % 64 == 0 } + + /// Outputs the midstate of the hash engine. + /// + /// Please see docs on [`Midstate`] before using this function. + pub fn midstate(&self) -> Result { + if !self.can_extract_midstate() { + return Err(MidstateError { invalid_n_bytes_hashed: self.bytes_hashed }); + } + Ok(self.midstate_unchecked()) + } + + // Does not check that `HashEngine::can_extract_midstate`. + #[cfg(not(hashes_fuzz))] + fn midstate_unchecked(&self) -> Midstate { + let mut ret = [0; 32]; + for (val, ret_bytes) in self.h.iter().zip(ret.bitcoin_as_chunks_mut::<4>().0) { + *ret_bytes = val.to_be_bytes(); + } + Midstate { bytes: ret, bytes_hashed: self.bytes_hashed } + } + + // Does not check that `HashEngine::can_extract_midstate`. + #[cfg(hashes_fuzz)] + fn midstate_unchecked(&self) -> Midstate { + let mut ret = [0; 32]; + ret.copy_from_slice(&self.buffer[..32]); + Midstate { bytes: ret, bytes_hashed: self.bytes_hashed } + } +} + +impl Default for HashEngine { + fn default() -> Self { Self::new() } +} + +impl crate::HashEngine for HashEngine { + type Hash = Hash; + type Bytes = [u8; 32]; + const BLOCK_SIZE: usize = 64; + + fn n_bytes_hashed(&self) -> u64 { self.bytes_hashed } + crate::internal_macros::engine_input_impl!(); + fn finalize(self) -> Self::Hash { Hash::from_engine(self) } +} + +impl Hash { + /// Iterate the sha256 algorithm to turn a sha256 hash into a sha256d hash + #[must_use] + pub fn hash_again(&self) -> sha256d::Hash { sha256d::Hash::from_byte_array(hash(&self.0).0) } + + /// Computes hash from `bytes` in `const` context. + /// + /// Warning: this function is inefficient. It should be only used in `const` context. + #[deprecated(since = "0.15.0", note = "use `Self::hash_unoptimized` instead")] + pub const fn const_hash(bytes: &[u8]) -> Self { Hash::hash_unoptimized(bytes) } + + /// Computes hash from `bytes` in `const` context. + /// + /// Warning: this function is inefficient. It should be only used in `const` context. + pub const fn hash_unoptimized(bytes: &[u8]) -> Self { + Hash(Midstate::compute_midstate_unoptimized(bytes, true).bytes) + } +} + +/// Unfinalized output of the SHA256 hash function. +/// +/// The `Midstate` type is obscure and specialized and should not be used unless you are sure of +/// what you are doing. +/// +/// It represents "partially hashed data" but does not itself have properties of cryptographic +/// hashes. For example, when (ab)used as hashes, midstates are vulnerable to trivial +/// length-extension attacks. They are typically used to optimize the computation of full hashes. +/// For example, when implementing BIP-340 tagged hashes, which always begin by hashing the same +/// fixed 64-byte prefix, it makes sense to hash the prefix once, store the midstate as a constant, +/// and hash any future data starting from the constant rather than from a fresh hash engine. +/// +/// For BIP-340 support we provide the [`sha256t`] module, and the [`sha256t_tag`] macro which will +/// create the midstate for you in const context. +#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Midstate { + /// Raw bytes of the midstate i.e., the already-hashed contents of the hash engine. + bytes: [u8; 32], + /// Number of bytes hashed to achieve this midstate. + // INVARIANT must always be a multiple of 64. + bytes_hashed: u64, +} + +impl Midstate { + /// Construct a new [`Midstate`] from the `state` and the `bytes_hashed` to get to that state. + /// + /// # Panics + /// + /// Panics if `bytes_hashed` is not a multiple of 64. + pub const fn new(state: [u8; 32], bytes_hashed: u64) -> Self { + if bytes_hashed % 64 != 0 { + panic!("bytes hashed is not a multiple of 64"); + } + + Midstate { bytes: state, bytes_hashed } + } + + /// Deconstructs the [`Midstate`], returning the underlying byte array and number of bytes hashed. + pub const fn as_parts(&self) -> (&[u8; 32], u64) { (&self.bytes, self.bytes_hashed) } + + /// Deconstructs the [`Midstate`], returning the underlying byte array and number of bytes hashed. + pub const fn to_parts(self) -> ([u8; 32], u64) { (self.bytes, self.bytes_hashed) } + + /// Constructs a new midstate for tagged hashes. + /// + /// Warning: this function is inefficient. It should be only used in `const` context. + /// + /// Computes non-finalized hash of `sha256(tag) || sha256(tag)` for use in [`sha256t`]. It's + /// provided for use with [`sha256t`]. + #[must_use] + pub const fn hash_tag(tag: &[u8]) -> Self { + let hash = Hash::hash_unoptimized(tag); + let mut buf = [0u8; 64]; + let mut i = 0usize; + while i < buf.len() { + buf[i] = hash.0[i % hash.0.len()]; + i += 1; + } + Self::compute_midstate_unoptimized(&buf, false) + } +} + +impl fmt::Debug for Midstate { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + struct Encoder<'a> { + bytes: &'a [u8; 32], + } + impl fmt::Debug for Encoder<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { crate::debug_hex(self.bytes, f) } + } + + f.debug_struct("Midstate") + .field("bytes", &Encoder { bytes: &self.bytes }) + .field("length", &self.bytes_hashed) + .finish() + } +} + +impl convert::AsRef<[u8]> for Midstate { + fn as_ref(&self) -> &[u8] { &self.bytes } +} + +/// `Midstate` invariant violated (not a multiple of 64). +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct MidstateError { + /// The invalid number of bytes hashed. + invalid_n_bytes_hashed: u64, +} + +impl fmt::Display for MidstateError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "invalid number of bytes hashed {} (should have been a multiple of 64)", + self.invalid_n_bytes_hashed + ) + } +} + +#[cfg(feature = "std")] +impl std::error::Error for MidstateError {} diff --git a/hashes/src/sha256/tests.rs b/hashes/src/sha256/tests.rs new file mode 100644 index 0000000000..4e45ae6545 --- /dev/null +++ b/hashes/src/sha256/tests.rs @@ -0,0 +1,233 @@ +use core::array; + +use super::*; +use crate::{sha256, HashEngine}; + +#[test] +#[cfg(feature = "alloc")] +#[cfg(feature = "hex")] +fn test() { + use alloc::string::ToString; + + #[derive(Clone)] + struct Test { + input: &'static str, + output: [u8; 32], + output_str: &'static str, + } + + #[rustfmt::skip] + let tests = [ + // Examples from wikipedia + Test { + input: "", + output: [ + 0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14, + 0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24, + 0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c, + 0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55, + ], + output_str: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" + }, + Test { + input: "The quick brown fox jumps over the lazy dog", + output: [ + 0xd7, 0xa8, 0xfb, 0xb3, 0x07, 0xd7, 0x80, 0x94, + 0x69, 0xca, 0x9a, 0xbc, 0xb0, 0x08, 0x2e, 0x4f, + 0x8d, 0x56, 0x51, 0xe4, 0x6d, 0x3c, 0xdb, 0x76, + 0x2d, 0x02, 0xd0, 0xbf, 0x37, 0xc9, 0xe5, 0x92, + ], + output_str: "d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592", + }, + Test { + input: "The quick brown fox jumps over the lazy dog.", + output: [ + 0xef, 0x53, 0x7f, 0x25, 0xc8, 0x95, 0xbf, 0xa7, + 0x82, 0x52, 0x65, 0x29, 0xa9, 0xb6, 0x3d, 0x97, + 0xaa, 0x63, 0x15, 0x64, 0xd5, 0xd7, 0x89, 0xc2, + 0xb7, 0x65, 0x44, 0x8c, 0x86, 0x35, 0xfb, 0x6c, + ], + output_str: "ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c", + }, + ]; + + for test in tests { + // Hash through high-level API, check hex encoding/decoding + let hash = sha256::Hash::hash(test.input.as_bytes()); + assert_eq!(hash, test.output_str.parse::().expect("parse hex")); + assert_eq!(hash.as_byte_array(), &test.output); + assert_eq!(hash.to_string(), test.output_str); + + // Hash through engine, checking that we can input byte by byte + let mut engine = sha256::Hash::engine(); + for ch in test.input.as_bytes() { + engine.input(&[*ch]); + } + let manual_hash = sha256::Hash::from_engine(engine); + assert_eq!(hash, manual_hash); + assert_eq!(hash.to_byte_array(), test.output); + } +} + +#[test] +#[cfg(feature = "alloc")] +#[cfg(feature = "hex")] +fn fmt_roundtrips() { + use alloc::format; + + let hash = sha256::Hash::hash(b"some arbitrary bytes"); + let hex = format!("{}", hash); + let rinsed = hex.parse::().expect("failed to parse hex"); + assert_eq!(rinsed, hash) +} + +#[test] +#[rustfmt::skip] +pub(crate) fn midstate() { + // Test vector obtained by doing an asset issuance on Elements + let mut engine = sha256::Hash::engine(); + // sha256dhash of outpoint + // 73828cbc65fd68ab78dc86992b76ae50ae2bf8ceedbe8de0483172f0886219f7:0 + engine.input(&[ + 0x9d, 0xd0, 0x1b, 0x56, 0xb1, 0x56, 0x45, 0x14, + 0x3e, 0xad, 0x15, 0x8d, 0xec, 0x19, 0xf8, 0xce, + 0xa9, 0x0b, 0xd0, 0xa9, 0xb2, 0xf8, 0x1d, 0x21, + 0xff, 0xa3, 0xa4, 0xc6, 0x44, 0x81, 0xd4, 0x1c, + ]); + // 32 bytes of zeroes representing "new asset" + engine.input(&[0; 32]); + + // RPC output + static WANT: Midstate = sha256::Midstate::new([ + 0x0b, 0xcf, 0xe0, 0xe5, 0x4e, 0x6c, 0xc7, 0xd3, + 0x4f, 0x4f, 0x7c, 0x1d, 0xf0, 0xb0, 0xf5, 0x03, + 0xf2, 0xf7, 0x12, 0x91, 0x2a, 0x06, 0x05, 0xb4, + 0x14, 0xed, 0x33, 0x7f, 0x7f, 0x03, 0x2e, 0x03, + ], 64); + + assert_eq!( + engine.midstate().expect("total_bytes_hashed is valid"), + WANT, + ); +} + +#[test] +fn engine_with_state() { + let mut engine = sha256::Hash::engine(); + let midstate_engine = sha256::HashEngine::from_midstate(engine.midstate_unchecked()); + // Fresh engine and engine initialized with fresh state should have same state + assert_eq!(engine.h, midstate_engine.h); + + // Midstate changes after writing 64 bytes + engine.input(&[1; 63]); + assert_eq!(engine.h, midstate_engine.h); + engine.input(&[2; 1]); + assert_ne!(engine.h, midstate_engine.h); + + // Initializing an engine with midstate from another engine should result in + // both engines producing the same hashes + let data_vec: &[&[u8]] = &[&[3u8; 1], &[4u8; 63], &[5u8; 65], &[6u8; 66]]; + for data in data_vec { + let mut engine = engine.clone(); + let mut midstate_engine = sha256::HashEngine::from_midstate(engine.midstate_unchecked()); + assert_eq!(engine.h, midstate_engine.h); + assert_eq!(engine.bytes_hashed, midstate_engine.bytes_hashed); + engine.input(data); + midstate_engine.input(data); + assert_eq!(engine.h, midstate_engine.h); + let hash1 = sha256::Hash::from_engine(engine); + let hash2 = sha256::Hash::from_engine(midstate_engine); + assert_eq!(hash1, hash2); + } + + // Test that a specific midstate results in a specific hash. Midstate was + // obtained by applying sha256 to sha256("MuSig coefficient")||sha256("MuSig + // coefficient"). + #[rustfmt::skip] + static MIDSTATE: [u8; 32] = [ + 0x0f, 0xd0, 0x69, 0x0c, 0xfe, 0xfe, 0xae, 0x97, + 0x99, 0x6e, 0xac, 0x7f, 0x5c, 0x30, 0xd8, 0x64, + 0x8c, 0x4a, 0x05, 0x73, 0xac, 0xa1, 0xa2, 0x2f, + 0x6f, 0x43, 0xb8, 0x01, 0x85, 0xce, 0x27, 0xcd, + ]; + #[rustfmt::skip] + static HASH_EXPECTED: [u8; 32] = [ + 0x18, 0x84, 0xe4, 0x72, 0x40, 0x4e, 0xf4, 0x5a, + 0xb4, 0x9c, 0x4e, 0xa4, 0x9a, 0xe6, 0x23, 0xa8, + 0x88, 0x52, 0x7f, 0x7d, 0x8a, 0x06, 0x94, 0x20, + 0x8f, 0xf1, 0xf7, 0xa9, 0xd5, 0x69, 0x09, 0x59, + ]; + let midstate_engine = sha256::HashEngine::from_midstate(sha256::Midstate::new(MIDSTATE, 64)); + let hash = sha256::Hash::from_engine(midstate_engine); + assert_eq!(hash, sha256::Hash(HASH_EXPECTED)); +} + +#[test] +fn hash_unoptimized() { + let bytes: [u8; 256] = array::from_fn(|i| i as u8); + + for i in 0..=256 { + let bytes = &bytes[0..i]; + assert_eq!( + Hash::hash(bytes), + Hash::hash_unoptimized(bytes), + "hashes don't match for n_bytes_hashed {}", + i + 1 + ); + } +} + +// The midstate of an empty hash engine tagged with "TapLeaf". +const TAP_LEAF_MIDSTATE: Midstate = Midstate::new( + [ + 156, 224, 228, 230, 124, 17, 108, 57, 56, 179, 202, 242, 195, 15, 80, 137, 211, 243, 147, + 108, 71, 99, 110, 96, 125, 179, 62, 234, 221, 198, 240, 201, + ], + 64, +); + +#[test] +fn const_midstate() { assert_eq!(Midstate::hash_tag(b"TapLeaf"), TAP_LEAF_MIDSTATE,) } + +#[test] +#[cfg(feature = "alloc")] +fn regression_midstate_debug_format() { + use alloc::format; + + let want = "Midstate { bytes: 9ce0e4e67c116c3938b3caf2c30f5089d3f3936c47636e607db33eeaddc6f0c9, length: 64 }"; + let got = format!("{:?}", TAP_LEAF_MIDSTATE); + assert_eq!(got, want); +} + +#[test] +#[cfg(feature = "serde")] +fn sha256_serde() { + use serde_test::{assert_tokens, Configure, Token}; + + #[rustfmt::skip] + static HASH_BYTES: [u8; 32] = [ + 0xef, 0x53, 0x7f, 0x25, 0xc8, 0x95, 0xbf, 0xa7, + 0x82, 0x52, 0x65, 0x29, 0xa9, 0xb6, 0x3d, 0x97, + 0xaa, 0x63, 0x15, 0x64, 0xd5, 0xd7, 0x89, 0xc2, + 0xb7, 0x65, 0x44, 0x8c, 0x86, 0x35, 0xfb, 0x6c, + ]; + + let hash = sha256::Hash::from_slice(&HASH_BYTES).expect("right number of bytes"); + assert_tokens(&hash.compact(), &[Token::BorrowedBytes(&HASH_BYTES[..])]); + assert_tokens( + &hash.readable(), + &[Token::Str("ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c")], + ); +} + +#[cfg(target_arch = "wasm32")] +mod wasm_tests { + use super::*; + #[test] + #[wasm_bindgen_test::wasm_bindgen_test] + fn sha256_tests() { + test(); + midstate(); + engine_with_state(); + } +} diff --git a/hashes/src/sha256d.rs b/hashes/src/sha256d/mod.rs similarity index 95% rename from hashes/src/sha256d.rs rename to hashes/src/sha256d/mod.rs index 332d6bc118..69bb8a72a3 100644 --- a/hashes/src/sha256d.rs +++ b/hashes/src/sha256d/mod.rs @@ -10,8 +10,17 @@ crate::internal_macros::general_hash_type! { "Output of the SHA256d hash function." } +fn from_engine(e: HashEngine) -> Hash { + let sha2 = sha256::Hash::from_engine(e.0); + let sha2d = sha256::Hash::hash(sha2.as_byte_array()); + + let mut ret = [0; 32]; + ret.copy_from_slice(sha2d.as_byte_array()); + Hash(ret) +} + /// Engine to compute SHA256d hash function. -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct HashEngine(sha256::HashEngine); impl HashEngine { @@ -24,18 +33,13 @@ impl Default for HashEngine { } impl crate::HashEngine for HashEngine { + type Hash = Hash; + type Bytes = [u8; 32]; const BLOCK_SIZE: usize = 64; // Same as sha256::HashEngine::BLOCK_SIZE; + fn input(&mut self, data: &[u8]) { self.0.input(data) } fn n_bytes_hashed(&self) -> u64 { self.0.n_bytes_hashed() } -} - -fn from_engine(e: HashEngine) -> Hash { - let sha2 = sha256::Hash::from_engine(e.0); - let sha2d = sha256::Hash::hash(sha2.as_byte_array()); - - let mut ret = [0; 32]; - ret.copy_from_slice(sha2d.as_byte_array()); - Hash(ret) + fn finalize(self) -> Self::Hash { Hash::from_engine(self) } } #[cfg(test)] @@ -45,6 +49,7 @@ mod tests { #[test] #[cfg(feature = "alloc")] + #[cfg(feature = "hex")] fn test() { use alloc::string::ToString; @@ -98,6 +103,7 @@ mod tests { #[test] #[cfg(feature = "alloc")] + #[cfg(feature = "hex")] fn fmt_roundtrips() { use alloc::format; @@ -120,7 +126,7 @@ mod tests { 0xb7, 0x65, 0x44, 0x8c, 0x86, 0x35, 0xfb, 0x6c, ]; - let hash = sha256d::Hash::from_slice(&HASH_BYTES).expect("right number of bytes"); + let hash = sha256d::Hash::from_byte_array(HASH_BYTES); assert_tokens(&hash.compact(), &[Token::BorrowedBytes(&HASH_BYTES[..])]); assert_tokens( &hash.readable(), diff --git a/hashes/src/sha256t.rs b/hashes/src/sha256t/mod.rs similarity index 58% rename from hashes/src/sha256t.rs rename to hashes/src/sha256t/mod.rs index cb3f4aaaac..d5acd5edfd 100644 --- a/hashes/src/sha256t.rs +++ b/hashes/src/sha256t/mod.rs @@ -5,14 +5,42 @@ use core::cmp; use core::marker::PhantomData; -use crate::{sha256, FromSliceError, HashEngine as _}; +#[cfg(doc)] +use crate::sha256::Midstate; +use crate::{sha256, HashEngine as _}; -type HashEngine = sha256::HashEngine; +/// Hashes some bytes. +pub fn hash(data: &[u8]) -> Hash +where + T: Tag, +{ + use crate::HashEngine as _; + + let mut engine = HashEngine::default(); + engine.input(data); + engine.finalize() +} + +/// Hashes all the byte slices retrieved from the iterator together. +pub fn hash_byte_chunks(byte_slices: I) -> Hash +where + B: AsRef<[u8]>, + I: IntoIterator, + T: Tag, +{ + use crate::HashEngine as _; + + let mut engine = HashEngine::default(); + for slice in byte_slices { + engine.input(slice.as_ref()); + } + engine.finalize() +} /// Trait representing a tag that can be used as a context for SHA256t hashes. pub trait Tag { - /// Returns a hash engine that is pre-tagged and is ready to be used for the data. - fn engine() -> sha256::HashEngine; + /// The [`Midstate`] after pre-tagging the hash engine. + const MIDSTATE: sha256::Midstate; } /// Output of the SHA256t hash function. @@ -44,11 +72,12 @@ where /// Copies a byte slice into a hash object. #[deprecated(since = "0.15.0", note = "use `from_byte_array` instead")] - pub fn from_slice(sl: &[u8]) -> Result, FromSliceError> { + #[allow(deprecated_in_future)] // Because of `FromSliceError`. + pub fn from_slice(sl: &[u8]) -> Result, crate::FromSliceError> { use crate::error::FromSliceErrorInner; if sl.len() != 32 { - Err(FromSliceError(FromSliceErrorInner { expected: 32, got: sl.len() })) + Err(crate::error::FromSliceError(FromSliceErrorInner { expected: 32, got: sl.len() })) } else { let mut ret = [0; 32]; ret.copy_from_slice(sl); @@ -57,10 +86,12 @@ where } /// Produces a hash from the current state of a given engine. - pub fn from_engine(e: HashEngine) -> Hash { from_engine(e) } + pub fn from_engine(e: HashEngine) -> Hash { + Hash::from_byte_array(sha256::Hash::from_engine(e.0).to_byte_array()) + } /// Constructs a new engine. - pub fn engine() -> HashEngine { T::engine() } + pub fn engine() -> HashEngine { HashEngine::default() } /// Hashes some bytes. #[allow(clippy::self_named_constructors)] // Hash is a noun and a verb. @@ -85,25 +116,6 @@ where Self::from_engine(engine) } - /// Hashes the entire contents of the `reader`. - #[cfg(feature = "bitcoin-io")] - pub fn hash_reader(reader: &mut R) -> Result { - let mut engine = Self::engine(); - loop { - let bytes = reader.fill_buf()?; - - let read = bytes.len(); - // Empty slice means EOF. - if read == 0 { - break; - } - - engine.input(bytes); - reader.consume(read); - } - Ok(Self::from_engine(engine)) - } - /// Returns the underlying byte array. pub const fn to_byte_array(self) -> [u8; 32] { self.0 } @@ -119,9 +131,6 @@ impl PartialEq for Hash { fn eq(&self, other: &Hash) -> bool { self.0 == other.0 } } impl Eq for Hash {} -impl Default for Hash { - fn default() -> Self { Hash([0; 32], PhantomData) } -} impl PartialOrd for Hash { fn partial_cmp(&self, other: &Hash) -> Option { Some(cmp::Ord::cmp(self, other)) @@ -136,112 +145,41 @@ impl core::hash::Hash for Hash { crate::internal_macros::hash_trait_impls!(256, false, T: Tag); -fn from_engine(e: sha256::HashEngine) -> Hash -where - T: Tag, -{ - Hash::from_byte_array(sha256::Hash::from_engine(e).to_byte_array()) -} - -/// Macro used to define a newtype tagged hash. -/// -/// This macro creates two types: -/// -/// * a tag struct -/// * a hash wrapper -/// -/// The syntax is: -/// -/// ``` -/// # #[allow(deprecated)] { -/// # use bitcoin_hashes::sha256t_hash_newtype; -/// sha256t_hash_newtype! { -/// /// Optional documentation details here. -/// /// Summary is always generated. -/// pub struct FooTag = hash_str("foo"); -/// -/// /// A foo hash. -/// // Direction works just like the hash_newtype! macro. -/// #[hash_newtype(backward)] -/// pub struct FooHash(_); -/// } -/// # } -/// ``` -/// -/// The structs must be defined in this order - tag first, then hash type. `hash_str` marker -/// says the midstate should be generated by hashing the supplied string in a way described in -/// BIP-341. Alternatively, you can supply `hash_bytes` to hash raw bytes. If you have the midstate -/// already pre-computed and prefer **compiler** performance to readability you may use -/// `raw(MIDSTATE_BYTES, HASHED_BYTES_LENGTH)` instead. -/// -/// Both visibility modifiers and attributes are optional and passed to inner structs (excluding -/// `#[hash_newtype(...)]`). The attributes suffer same compiler performance limitations as in -/// [`hash_newtype`] macro. -/// -/// [`hash_newtype`]: crate::hash_newtype -#[macro_export] -#[deprecated(since = "0.15.0", note = "use `sha256_tag!` combined with `hash_newtype!` instead")] -macro_rules! sha256t_hash_newtype { - ($(#[$($tag_attr:tt)*])* $tag_vis:vis struct $tag:ident = $constructor:tt($($tag_value:tt)+); $(#[$($hash_attr:tt)*])* $hash_vis:vis struct $hash_name:ident($(#[$($field_attr:tt)*])* _);) => { - $crate::sha256t_tag_struct!($tag_vis, $tag, stringify!($hash_name), $(#[$($tag_attr)*])*); - - impl $crate::sha256t::Tag for $tag { - #[inline] - fn engine() -> $crate::sha256::HashEngine { - const MIDSTATE: $crate::sha256::Midstate = $crate::sha256t_tag_constructor!($constructor, $($tag_value)+); - $crate::sha256::HashEngine::from_midstate(MIDSTATE) - } - } - - $crate::hash_newtype! { - $(#[$($hash_attr)*])* - $hash_vis struct $hash_name($(#[$($field_attr)*])* $crate::sha256t::Hash<$tag>); - } +/// Engine to compute SHA256t hash function. +#[derive(Debug)] +pub struct HashEngine(sha256::HashEngine, PhantomData); - impl $hash_name { - /// Constructs a new engine. - #[allow(unused)] // the user of macro may not need this - pub fn engine() -> <$hash_name as $crate::GeneralHash>::Engine { - <$hash_name as $crate::GeneralHash>::engine() - } - - /// Produces a hash from the current state of a given engine. - #[allow(unused)] // the user of macro may not need this - pub fn from_engine(e: <$hash_name as $crate::GeneralHash>::Engine) -> Self { - <$hash_name as $crate::GeneralHash>::from_engine(e) - } - - /// Hashes some bytes. - #[allow(unused)] // the user of macro may not need this - pub fn hash(data: &[u8]) -> Self { - <$hash_name as $crate::GeneralHash>::hash(data) - } - - /// Hashes all the byte slices retrieved from the iterator together. - #[allow(unused)] // the user of macro may not need this - pub fn hash_byte_chunks(byte_slices: I) -> Self - where - B: AsRef<[u8]>, - I: IntoIterator, - { - <$hash_name as $crate::GeneralHash>::hash_byte_chunks(byte_slices) - } - } +impl Default for HashEngine { + fn default() -> Self { + let tagged = sha256::HashEngine::from_midstate(T::MIDSTATE); + HashEngine(tagged, PhantomData) + } +} - impl $crate::GeneralHash for $hash_name { - type Engine = <$crate::sha256t::Hash<$tag> as $crate::GeneralHash>::Engine; +impl Clone for HashEngine { + fn clone(&self) -> Self { Self(self.0.clone(), PhantomData) } +} - fn engine() -> Self::Engine { - <$crate::sha256t::Hash<$tag> as $crate::GeneralHash>::engine() - } +impl crate::HashEngine for HashEngine { + type Hash = Hash; + type Bytes = [u8; 32]; + const BLOCK_SIZE: usize = 64; // Same as sha256::HashEngine::BLOCK_SIZE; - fn from_engine(e: Self::Engine) -> $hash_name { - Self::from(<$crate::sha256t::Hash<$tag> as $crate::GeneralHash>::from_engine(e)) - } - } - } + fn input(&mut self, data: &[u8]) { self.0.input(data) } + fn n_bytes_hashed(&self) -> u64 { self.0.n_bytes_hashed() } + fn finalize(self) -> Self::Hash { Hash::from_engine(self) } } +crate::internal_macros::impl_write!( + HashEngine, + |us: &mut HashEngine, buf| { + us.input(buf); + Ok(buf.len()) + }, + |_us| { Ok(()) }, + T: crate::sha256t::Tag +); + // Workaround macros being unavailable in attributes. #[doc(hidden)] #[macro_export] @@ -281,10 +219,12 @@ mod tests { // The digest created by sha256 hashing `&[0]` starting with `TEST_MIDSTATE`. #[cfg(feature = "alloc")] + #[cfg(feature = "hex")] const HASH_ZERO_BACKWARD: &str = "29589d5122ec666ab5b4695070b6debc63881a4f85d88d93ddc90078038213ed"; // And the same thing, forward. #[cfg(feature = "alloc")] + #[cfg(feature = "hex")] const HASH_ZERO_FORWARD: &str = "ed1382037800c9dd938dd8854f1a8863bcdeb6705069b4b56a66ec22519d5829"; @@ -292,19 +232,17 @@ mod tests { pub struct TestHashTag; impl sha256t::Tag for TestHashTag { - fn engine() -> sha256::HashEngine { - // The TapRoot TapLeaf midstate. - let midstate = sha256::Midstate::new(TEST_MIDSTATE, 64); - sha256::HashEngine::from_midstate(midstate) - } + const MIDSTATE: sha256::Midstate = sha256::Midstate::new(TEST_MIDSTATE, 64); } // We support manually implementing `Tag` and creating a tagged hash from it. #[cfg(feature = "alloc")] + #[cfg(feature = "hex")] pub type TestHash = sha256t::Hash; #[test] #[cfg(feature = "alloc")] + #[cfg(feature = "hex")] fn manually_created_sha256t_hash_type() { use alloc::string::ToString; diff --git a/hashes/src/sha384.rs b/hashes/src/sha384/mod.rs similarity index 97% rename from hashes/src/sha384.rs rename to hashes/src/sha384/mod.rs index 18f23b40a9..3d6538af08 100644 --- a/hashes/src/sha384.rs +++ b/hashes/src/sha384/mod.rs @@ -17,7 +17,7 @@ fn from_engine(e: HashEngine) -> Hash { } /// Engine to compute SHA384 hash function. -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct HashEngine(sha512::HashEngine); impl HashEngine { @@ -30,17 +30,20 @@ impl Default for HashEngine { } impl crate::HashEngine for HashEngine { + type Hash = Hash; + type Bytes = [u8; 48]; const BLOCK_SIZE: usize = sha512::BLOCK_SIZE; fn n_bytes_hashed(&self) -> u64 { self.0.n_bytes_hashed() } - fn input(&mut self, inp: &[u8]) { self.0.input(inp); } + fn finalize(self) -> Self::Hash { Hash::from_engine(self) } } #[cfg(test)] mod tests { #[test] #[cfg(feature = "alloc")] + #[cfg(feature = "hex")] fn test() { use alloc::string::ToString; diff --git a/hashes/src/sha512/benches.rs b/hashes/src/sha512/benches.rs new file mode 100644 index 0000000000..56b71800be --- /dev/null +++ b/hashes/src/sha512/benches.rs @@ -0,0 +1,33 @@ +use test::Bencher; + +use crate::{sha512, Hash, HashEngine}; + +#[bench] +pub fn sha512_10(bh: &mut Bencher) { + let mut engine = sha512::Hash::engine(); + let bytes = [1u8; 10]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} + +#[bench] +pub fn sha512_1k(bh: &mut Bencher) { + let mut engine = sha512::Hash::engine(); + let bytes = [1u8; 1024]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} + +#[bench] +pub fn sha512_64k(bh: &mut Bencher) { + let mut engine = sha512::Hash::engine(); + let bytes = [1u8; 65536]; + bh.iter(|| { + engine.input(&bytes); + }); + bh.bytes = bytes.len() as u64; +} diff --git a/hashes/src/sha512.rs b/hashes/src/sha512/crypto.rs similarity index 52% rename from hashes/src/sha512.rs rename to hashes/src/sha512/crypto.rs index 1ecd563df8..6bf4f5ce26 100644 --- a/hashes/src/sha512.rs +++ b/hashes/src/sha512/crypto.rs @@ -1,125 +1,8 @@ // SPDX-License-Identifier: CC0-1.0 -//! SHA512 implementation. +use internals::slice::SliceExt; -use core::cmp; - -use crate::{incomplete_block_len, HashEngine as _}; - -crate::internal_macros::general_hash_type! { - 512, - false, - "Output of the SHA512 hash function." -} - -#[cfg(not(hashes_fuzz))] -pub(crate) fn from_engine(mut e: HashEngine) -> Hash { - // pad buffer with a single 1-bit then all 0s, until there are exactly 16 bytes remaining - let n_bytes_hashed = e.bytes_hashed; - - let zeroes = [0; BLOCK_SIZE - 16]; - e.input(&[0x80]); - if incomplete_block_len(&e) > zeroes.len() { - e.input(&zeroes); - } - let pad_length = zeroes.len() - incomplete_block_len(&e); - e.input(&zeroes[..pad_length]); - debug_assert_eq!(incomplete_block_len(&e), zeroes.len()); - - e.input(&[0; 8]); - e.input(&(8 * n_bytes_hashed).to_be_bytes()); - debug_assert_eq!(incomplete_block_len(&e), 0); - - Hash(e.midstate()) -} - -#[cfg(hashes_fuzz)] -pub(crate) fn from_engine(e: HashEngine) -> Hash { - let mut hash = e.midstate(); - hash[0] ^= 0xff; // Make this distinct from SHA-256 - Hash(hash) -} - -pub(crate) const BLOCK_SIZE: usize = 128; - -/// Engine to compute SHA512 hash function. -#[derive(Clone)] -pub struct HashEngine { - h: [u64; 8], - bytes_hashed: u64, - buffer: [u8; BLOCK_SIZE], -} - -impl HashEngine { - /// Constructs a new SHA512 hash engine. - #[rustfmt::skip] - pub const fn new() -> Self { - Self { - h: [ - 0x6a09e667f3bcc908, 0xbb67ae8584caa73b, 0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1, - 0x510e527fade682d1, 0x9b05688c2b3e6c1f, 0x1f83d9abfb41bd6b, 0x5be0cd19137e2179, - ], - bytes_hashed: 0, - buffer: [0; BLOCK_SIZE], - } - } -} - -impl Default for HashEngine { - fn default() -> Self { Self::new() } -} - -impl HashEngine { - #[cfg(not(hashes_fuzz))] - pub(crate) fn midstate(&self) -> [u8; 64] { - let mut ret = [0; 64]; - for (val, ret_bytes) in self.h.iter().zip(ret.chunks_exact_mut(8)) { - ret_bytes.copy_from_slice(&val.to_be_bytes()); - } - ret - } - - #[cfg(hashes_fuzz)] - pub(crate) fn midstate(&self) -> [u8; 64] { - let mut ret = [0; 64]; - ret.copy_from_slice(&self.buffer[..64]); - ret - } - - /// Constructs a new hash engine suitable for use constructing a `sha512_256::HashEngine`. - #[rustfmt::skip] - pub(crate) const fn sha512_256() -> Self { - HashEngine { - h: [ - 0x22312194fc2bf72c, 0x9f555fa3c84c64c2, 0x2393b86b6f53b151, 0x963877195940eabd, - 0x96283ee2a88effe3, 0xbe5e1e2553863992, 0x2b0199fc2c85b8aa, 0x0eb72ddc81c52ca2, - ], - bytes_hashed: 0, - buffer: [0; BLOCK_SIZE], - } - } - - /// Constructs a new hash engine suitable for constructing a `sha384::HashEngine`. - #[rustfmt::skip] - pub(crate) const fn sha384() -> Self { - HashEngine { - h: [ - 0xcbbb9d5dc1059ed8, 0x629a292a367cd507, 0x9159015a3070dd17, 0x152fecd8f70e5939, - 0x67332667ffc00b31, 0x8eb44a8768581511, 0xdb0c2e0d64f98fa7, 0x47b5481dbefa4fa4, - ], - bytes_hashed: 0, - buffer: [0; BLOCK_SIZE], - } - } -} - -impl crate::HashEngine for HashEngine { - const BLOCK_SIZE: usize = 128; - - fn n_bytes_hashed(&self) -> u64 { self.bytes_hashed } - - crate::internal_macros::engine_input_impl!(); -} +use super::{HashEngine, BLOCK_SIZE}; #[allow(non_snake_case)] fn Ch(x: u64, y: u64, z: u64) -> u64 { z ^ (x & (y ^ z)) } @@ -194,8 +77,8 @@ impl HashEngine { debug_assert_eq!(self.buffer.len(), BLOCK_SIZE); let mut w = [0u64; 16]; - for (w_val, buff_bytes) in w.iter_mut().zip(self.buffer.chunks_exact(8)) { - *w_val = u64::from_be_bytes(buff_bytes.try_into().expect("8 byte slice")); + for (w_val, buff_bytes) in w.iter_mut().zip(self.buffer.bitcoin_as_chunks().0) { + *w_val = u64::from_be_bytes(*buff_bytes); } let mut a = self.h[0]; @@ -302,152 +185,3 @@ impl HashEngine { self.h[7] = self.h[7].wrapping_add(h); } } - -#[cfg(test)] -mod tests { - #[test] - #[cfg(feature = "alloc")] - fn test() { - use alloc::string::ToString; - - use crate::{sha512, HashEngine}; - - #[derive(Clone)] - struct Test { - input: &'static str, - output: [u8; 64], - output_str: &'static str, - } - - #[rustfmt::skip] - let tests = [ - // Test vectors computed with `sha512sum` - Test { - input: "", - output: [ - 0xcf, 0x83, 0xe1, 0x35, 0x7e, 0xef, 0xb8, 0xbd, - 0xf1, 0x54, 0x28, 0x50, 0xd6, 0x6d, 0x80, 0x07, - 0xd6, 0x20, 0xe4, 0x05, 0x0b, 0x57, 0x15, 0xdc, - 0x83, 0xf4, 0xa9, 0x21, 0xd3, 0x6c, 0xe9, 0xce, - 0x47, 0xd0, 0xd1, 0x3c, 0x5d, 0x85, 0xf2, 0xb0, - 0xff, 0x83, 0x18, 0xd2, 0x87, 0x7e, 0xec, 0x2f, - 0x63, 0xb9, 0x31, 0xbd, 0x47, 0x41, 0x7a, 0x81, - 0xa5, 0x38, 0x32, 0x7a, 0xf9, 0x27, 0xda, 0x3e, - ], - output_str: "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" - }, - Test { - input: "The quick brown fox jumps over the lazy dog", - output: [ - 0x07, 0xe5, 0x47, 0xd9, 0x58, 0x6f, 0x6a, 0x73, - 0xf7, 0x3f, 0xba, 0xc0, 0x43, 0x5e, 0xd7, 0x69, - 0x51, 0x21, 0x8f, 0xb7, 0xd0, 0xc8, 0xd7, 0x88, - 0xa3, 0x09, 0xd7, 0x85, 0x43, 0x6b, 0xbb, 0x64, - 0x2e, 0x93, 0xa2, 0x52, 0xa9, 0x54, 0xf2, 0x39, - 0x12, 0x54, 0x7d, 0x1e, 0x8a, 0x3b, 0x5e, 0xd6, - 0xe1, 0xbf, 0xd7, 0x09, 0x78, 0x21, 0x23, 0x3f, - 0xa0, 0x53, 0x8f, 0x3d, 0xb8, 0x54, 0xfe, 0xe6, - ], - output_str: "07e547d9586f6a73f73fbac0435ed76951218fb7d0c8d788a309d785436bbb642e93a252a954f23912547d1e8a3b5ed6e1bfd7097821233fa0538f3db854fee6", - }, - Test { - input: "The quick brown fox jumps over the lazy dog.", - output: [ - 0x91, 0xea, 0x12, 0x45, 0xf2, 0x0d, 0x46, 0xae, - 0x9a, 0x03, 0x7a, 0x98, 0x9f, 0x54, 0xf1, 0xf7, - 0x90, 0xf0, 0xa4, 0x76, 0x07, 0xee, 0xb8, 0xa1, - 0x4d, 0x12, 0x89, 0x0c, 0xea, 0x77, 0xa1, 0xbb, - 0xc6, 0xc7, 0xed, 0x9c, 0xf2, 0x05, 0xe6, 0x7b, - 0x7f, 0x2b, 0x8f, 0xd4, 0xc7, 0xdf, 0xd3, 0xa7, - 0xa8, 0x61, 0x7e, 0x45, 0xf3, 0xc4, 0x63, 0xd4, - 0x81, 0xc7, 0xe5, 0x86, 0xc3, 0x9a, 0xc1, 0xed, - ], - output_str: "91ea1245f20d46ae9a037a989f54f1f790f0a47607eeb8a14d12890cea77a1bbc6c7ed9cf205e67b7f2b8fd4c7dfd3a7a8617e45f3c463d481c7e586c39ac1ed", - }, - ]; - - for test in tests { - // Hash through high-level API, check hex encoding/decoding - let hash = sha512::Hash::hash(test.input.as_bytes()); - assert_eq!(hash, test.output_str.parse::().expect("parse hex")); - assert_eq!(hash.as_byte_array(), &test.output); - assert_eq!(hash.to_string(), test.output_str); - - // Hash through engine, checking that we can input byte by byte - let mut engine = sha512::Hash::engine(); - for ch in test.input.as_bytes() { - engine.input(&[*ch]); - } - let manual_hash = sha512::Hash::from_engine(engine); - assert_eq!(hash, manual_hash); - assert_eq!(hash.to_byte_array(), test.output); - } - } - - #[test] - #[cfg(feature = "serde")] - fn sha512_serde() { - use serde_test::{assert_tokens, Configure, Token}; - - use crate::sha512; - - #[rustfmt::skip] - static HASH_BYTES: [u8; 64] = [ - 0x8b, 0x41, 0xe1, 0xb7, 0x8a, 0xd1, 0x15, 0x21, - 0x11, 0x3c, 0x52, 0xff, 0x18, 0x2a, 0x1b, 0x8e, - 0x0a, 0x19, 0x57, 0x54, 0xaa, 0x52, 0x7f, 0xcd, - 0x00, 0xa4, 0x11, 0x62, 0x0b, 0x46, 0xf2, 0x0f, - 0xff, 0xfb, 0x80, 0x88, 0xcc, 0xf8, 0x54, 0x97, - 0x12, 0x1a, 0xd4, 0x49, 0x9e, 0x08, 0x45, 0xb8, - 0x76, 0xf6, 0xdd, 0x66, 0x40, 0x08, 0x8a, 0x2f, - 0x0b, 0x2d, 0x8a, 0x60, 0x0b, 0xdf, 0x4c, 0x0c, - ]; - - let hash = sha512::Hash::from_slice(&HASH_BYTES).expect("right number of bytes"); - assert_tokens(&hash.compact(), &[Token::BorrowedBytes(&HASH_BYTES[..])]); - assert_tokens( - &hash.readable(), - &[Token::Str( - "8b41e1b78ad11521113c52ff182a1b8e0a195754aa527fcd00a411620b46f20f\ - fffb8088ccf85497121ad4499e0845b876f6dd6640088a2f0b2d8a600bdf4c0c", - )], - ); - } -} - -#[cfg(bench)] -mod benches { - use test::Bencher; - - use crate::{sha512, Hash, HashEngine}; - - #[bench] - pub fn sha512_10(bh: &mut Bencher) { - let mut engine = sha512::Hash::engine(); - let bytes = [1u8; 10]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } - - #[bench] - pub fn sha512_1k(bh: &mut Bencher) { - let mut engine = sha512::Hash::engine(); - let bytes = [1u8; 1024]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } - - #[bench] - pub fn sha512_64k(bh: &mut Bencher) { - let mut engine = sha512::Hash::engine(); - let bytes = [1u8; 65536]; - bh.iter(|| { - engine.input(&bytes); - }); - bh.bytes = bytes.len() as u64; - } -} diff --git a/hashes/src/sha512/mod.rs b/hashes/src/sha512/mod.rs new file mode 100644 index 0000000000..31203aba4d --- /dev/null +++ b/hashes/src/sha512/mod.rs @@ -0,0 +1,132 @@ +// SPDX-License-Identifier: CC0-1.0 + +//! SHA512 implementation. + +use internals::slice::SliceExt; + +#[cfg(bench)] +mod benches; +mod crypto; +#[cfg(bench)] +mod tests; + +use core::cmp; + +use crate::{incomplete_block_len, HashEngine as _}; + +crate::internal_macros::general_hash_type! { + 512, + false, + "Output of the SHA512 hash function." +} + +#[cfg(not(hashes_fuzz))] +pub(crate) fn from_engine(mut e: HashEngine) -> Hash { + // pad buffer with a single 1-bit then all 0s, until there are exactly 16 bytes remaining + let n_bytes_hashed = e.bytes_hashed; + + let zeroes = [0; BLOCK_SIZE - 16]; + e.input(&[0x80]); + if incomplete_block_len(&e) > zeroes.len() { + e.input(&zeroes); + } + let pad_length = zeroes.len() - incomplete_block_len(&e); + e.input(&zeroes[..pad_length]); + debug_assert_eq!(incomplete_block_len(&e), zeroes.len()); + + e.input(&[0; 8]); + e.input(&(8 * n_bytes_hashed).to_be_bytes()); + debug_assert_eq!(incomplete_block_len(&e), 0); + + Hash(e.midstate()) +} + +#[cfg(hashes_fuzz)] +pub(crate) fn from_engine(e: HashEngine) -> Hash { + let mut hash = e.midstate(); + hash[0] ^= 0xff; // Make this distinct from SHA-256 + Hash(hash) +} + +pub(crate) const BLOCK_SIZE: usize = 128; + +/// Engine to compute SHA512 hash function. +#[derive(Debug, Clone)] +pub struct HashEngine { + h: [u64; 8], + bytes_hashed: u64, + buffer: [u8; BLOCK_SIZE], +} + +impl HashEngine { + /// Constructs a new SHA512 hash engine. + #[rustfmt::skip] + pub const fn new() -> Self { + Self { + h: [ + 0x6a09e667f3bcc908, 0xbb67ae8584caa73b, 0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1, + 0x510e527fade682d1, 0x9b05688c2b3e6c1f, 0x1f83d9abfb41bd6b, 0x5be0cd19137e2179, + ], + bytes_hashed: 0, + buffer: [0; BLOCK_SIZE], + } + } +} + +impl Default for HashEngine { + fn default() -> Self { Self::new() } +} + +impl HashEngine { + #[cfg(not(hashes_fuzz))] + pub(crate) fn midstate(&self) -> [u8; 64] { + let mut ret = [0; 64]; + for (val, ret_bytes) in self.h.iter().zip(ret.bitcoin_as_chunks_mut().0) { + *ret_bytes = val.to_be_bytes(); + } + ret + } + + #[cfg(hashes_fuzz)] + pub(crate) fn midstate(&self) -> [u8; 64] { + let mut ret = [0; 64]; + ret.copy_from_slice(&self.buffer[..64]); + ret + } + + /// Constructs a new hash engine suitable for use constructing a `sha512_256::HashEngine`. + #[rustfmt::skip] + pub(crate) const fn sha512_256() -> Self { + HashEngine { + h: [ + 0x22312194fc2bf72c, 0x9f555fa3c84c64c2, 0x2393b86b6f53b151, 0x963877195940eabd, + 0x96283ee2a88effe3, 0xbe5e1e2553863992, 0x2b0199fc2c85b8aa, 0x0eb72ddc81c52ca2, + ], + bytes_hashed: 0, + buffer: [0; BLOCK_SIZE], + } + } + + /// Constructs a new hash engine suitable for constructing a `sha384::HashEngine`. + #[rustfmt::skip] + pub(crate) const fn sha384() -> Self { + HashEngine { + h: [ + 0xcbbb9d5dc1059ed8, 0x629a292a367cd507, 0x9159015a3070dd17, 0x152fecd8f70e5939, + 0x67332667ffc00b31, 0x8eb44a8768581511, 0xdb0c2e0d64f98fa7, 0x47b5481dbefa4fa4, + ], + bytes_hashed: 0, + buffer: [0; BLOCK_SIZE], + } + } +} + +impl crate::HashEngine for HashEngine { + type Hash = Hash; + type Bytes = [u8; 64]; + const BLOCK_SIZE: usize = 128; + + fn n_bytes_hashed(&self) -> u64 { self.bytes_hashed } + crate::internal_macros::engine_input_impl!(); + fn finalize(self) -> Self::Hash { Hash::from_engine(self) } +} diff --git a/hashes/src/sha512/tests.rs b/hashes/src/sha512/tests.rs new file mode 100644 index 0000000000..ecf78582b2 --- /dev/null +++ b/hashes/src/sha512/tests.rs @@ -0,0 +1,109 @@ +#[test] +#[cfg(feature = "alloc")] +#[cfg(feature = "hex")] +fn test() { + use alloc::string::ToString; + + use crate::{sha512, HashEngine}; + + #[derive(Clone)] + struct Test { + input: &'static str, + output: [u8; 64], + output_str: &'static str, + } + + #[rustfmt::skip] + let tests = [ + // Test vectors computed with `sha512sum` + Test { + input: "", + output: [ + 0xcf, 0x83, 0xe1, 0x35, 0x7e, 0xef, 0xb8, 0xbd, + 0xf1, 0x54, 0x28, 0x50, 0xd6, 0x6d, 0x80, 0x07, + 0xd6, 0x20, 0xe4, 0x05, 0x0b, 0x57, 0x15, 0xdc, + 0x83, 0xf4, 0xa9, 0x21, 0xd3, 0x6c, 0xe9, 0xce, + 0x47, 0xd0, 0xd1, 0x3c, 0x5d, 0x85, 0xf2, 0xb0, + 0xff, 0x83, 0x18, 0xd2, 0x87, 0x7e, 0xec, 0x2f, + 0x63, 0xb9, 0x31, 0xbd, 0x47, 0x41, 0x7a, 0x81, + 0xa5, 0x38, 0x32, 0x7a, 0xf9, 0x27, 0xda, 0x3e, + ], + output_str: "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" + }, + Test { + input: "The quick brown fox jumps over the lazy dog", + output: [ + 0x07, 0xe5, 0x47, 0xd9, 0x58, 0x6f, 0x6a, 0x73, + 0xf7, 0x3f, 0xba, 0xc0, 0x43, 0x5e, 0xd7, 0x69, + 0x51, 0x21, 0x8f, 0xb7, 0xd0, 0xc8, 0xd7, 0x88, + 0xa3, 0x09, 0xd7, 0x85, 0x43, 0x6b, 0xbb, 0x64, + 0x2e, 0x93, 0xa2, 0x52, 0xa9, 0x54, 0xf2, 0x39, + 0x12, 0x54, 0x7d, 0x1e, 0x8a, 0x3b, 0x5e, 0xd6, + 0xe1, 0xbf, 0xd7, 0x09, 0x78, 0x21, 0x23, 0x3f, + 0xa0, 0x53, 0x8f, 0x3d, 0xb8, 0x54, 0xfe, 0xe6, + ], + output_str: "07e547d9586f6a73f73fbac0435ed76951218fb7d0c8d788a309d785436bbb642e93a252a954f23912547d1e8a3b5ed6e1bfd7097821233fa0538f3db854fee6", + }, + Test { + input: "The quick brown fox jumps over the lazy dog.", + output: [ + 0x91, 0xea, 0x12, 0x45, 0xf2, 0x0d, 0x46, 0xae, + 0x9a, 0x03, 0x7a, 0x98, 0x9f, 0x54, 0xf1, 0xf7, + 0x90, 0xf0, 0xa4, 0x76, 0x07, 0xee, 0xb8, 0xa1, + 0x4d, 0x12, 0x89, 0x0c, 0xea, 0x77, 0xa1, 0xbb, + 0xc6, 0xc7, 0xed, 0x9c, 0xf2, 0x05, 0xe6, 0x7b, + 0x7f, 0x2b, 0x8f, 0xd4, 0xc7, 0xdf, 0xd3, 0xa7, + 0xa8, 0x61, 0x7e, 0x45, 0xf3, 0xc4, 0x63, 0xd4, + 0x81, 0xc7, 0xe5, 0x86, 0xc3, 0x9a, 0xc1, 0xed, + ], + output_str: "91ea1245f20d46ae9a037a989f54f1f790f0a47607eeb8a14d12890cea77a1bbc6c7ed9cf205e67b7f2b8fd4c7dfd3a7a8617e45f3c463d481c7e586c39ac1ed", + }, + ]; + + for test in tests { + // Hash through high-level API, check hex encoding/decoding + let hash = sha512::Hash::hash(test.input.as_bytes()); + assert_eq!(hash, test.output_str.parse::().expect("parse hex")); + assert_eq!(hash.as_byte_array(), &test.output); + assert_eq!(hash.to_string(), test.output_str); + + // Hash through engine, checking that we can input byte by byte + let mut engine = sha512::Hash::engine(); + for ch in test.input.as_bytes() { + engine.input(&[*ch]); + } + let manual_hash = sha512::Hash::from_engine(engine); + assert_eq!(hash, manual_hash); + assert_eq!(hash.to_byte_array(), test.output); + } +} + +#[test] +#[cfg(feature = "serde")] +fn sha512_serde() { + use serde_test::{assert_tokens, Configure, Token}; + + use crate::sha512; + + #[rustfmt::skip] + static HASH_BYTES: [u8; 64] = [ + 0x8b, 0x41, 0xe1, 0xb7, 0x8a, 0xd1, 0x15, 0x21, + 0x11, 0x3c, 0x52, 0xff, 0x18, 0x2a, 0x1b, 0x8e, + 0x0a, 0x19, 0x57, 0x54, 0xaa, 0x52, 0x7f, 0xcd, + 0x00, 0xa4, 0x11, 0x62, 0x0b, 0x46, 0xf2, 0x0f, + 0xff, 0xfb, 0x80, 0x88, 0xcc, 0xf8, 0x54, 0x97, + 0x12, 0x1a, 0xd4, 0x49, 0x9e, 0x08, 0x45, 0xb8, + 0x76, 0xf6, 0xdd, 0x66, 0x40, 0x08, 0x8a, 0x2f, + 0x0b, 0x2d, 0x8a, 0x60, 0x0b, 0xdf, 0x4c, 0x0c, + ]; + + let hash = sha512::Hash::from_slice(&HASH_BYTES).expect("right number of bytes"); + assert_tokens(&hash.compact(), &[Token::BorrowedBytes(&HASH_BYTES[..])]); + assert_tokens( + &hash.readable(), + &[Token::Str( + "8b41e1b78ad11521113c52ff182a1b8e0a195754aa527fcd00a411620b46f20f\ + fffb8088ccf85497121ad4499e0845b876f6dd6640088a2f0b2d8a600bdf4c0c", + )], + ); +} diff --git a/hashes/src/sha512_256.rs b/hashes/src/sha512_256/mod.rs similarity index 97% rename from hashes/src/sha512_256.rs rename to hashes/src/sha512_256/mod.rs index cb1d5d372f..cf12d46047 100644 --- a/hashes/src/sha512_256.rs +++ b/hashes/src/sha512_256/mod.rs @@ -27,7 +27,7 @@ fn from_engine(e: HashEngine) -> Hash { /// the output to 256 bits. It has different initial constants than sha512 so it /// produces an entirely different hash compared to sha512. More information at /// . -#[derive(Clone)] +#[derive(Debug, Clone)] pub struct HashEngine(sha512::HashEngine); impl HashEngine { @@ -40,17 +40,20 @@ impl Default for HashEngine { } impl crate::HashEngine for HashEngine { + type Hash = Hash; + type Bytes = [u8; 64]; const BLOCK_SIZE: usize = sha512::BLOCK_SIZE; fn n_bytes_hashed(&self) -> u64 { self.0.n_bytes_hashed() } - fn input(&mut self, inp: &[u8]) { self.0.input(inp); } + fn finalize(self) -> Self::Hash { Hash::from_engine(self) } } #[cfg(test)] mod tests { #[test] #[cfg(feature = "alloc")] + #[cfg(feature = "hex")] fn test() { use alloc::string::ToString; diff --git a/hashes/src/siphash24.rs b/hashes/src/siphash24/mod.rs similarity index 97% rename from hashes/src/siphash24.rs rename to hashes/src/siphash24/mod.rs index b751604b64..50abe8575c 100644 --- a/hashes/src/siphash24.rs +++ b/hashes/src/siphash24/mod.rs @@ -121,6 +121,8 @@ impl HashEngine { } impl crate::HashEngine for HashEngine { + type Hash = Hash; + type Bytes = [u8; 8]; const BLOCK_SIZE: usize = 8; #[inline] @@ -165,9 +167,14 @@ impl crate::HashEngine for HashEngine { } fn n_bytes_hashed(&self) -> u64 { self.bytes_hashed } + + fn finalize(self) -> Self::Hash { Hash::from_engine(self) } } impl Hash { + /// Constructs a new SipHash24 engine with keys. + pub fn engine(k0: u64, k1: u64) -> HashEngine { HashEngine::with_keys(k0, k1) } + /// Produces a hash from the current state of a given engine. pub fn from_engine(e: HashEngine) -> Hash { from_engine(e) } @@ -242,7 +249,7 @@ mod tests { use super::*; #[test] - fn test_siphash_2_4() { + fn siphash_2_4() { #[rustfmt::skip] let vecs: [[u8; 8]; 64] = [ [0x31, 0x0e, 0x0e, 0xdd, 0x47, 0xdb, 0x6f, 0x72], @@ -318,7 +325,7 @@ mod tests { for i in 0..64 { vin[i] = i as u8; - let vec = Hash::from_slice(&vecs[i]).unwrap(); + let vec = Hash::from_byte_array(vecs[i]); let out = Hash::hash_with_keys(k0, k1, &vin[0..i]); assert_eq!(vec, out, "vec #{}", i); diff --git a/hashes/tests/io.rs b/hashes/tests/io.rs deleted file mode 100644 index 9151bba30a..0000000000 --- a/hashes/tests/io.rs +++ /dev/null @@ -1,129 +0,0 @@ -// SPDX-License-Identifier: CC0-1.0 - -//! Test the `bitcoin-io` implementations. - -#![cfg(feature = "bitcoin-io")] -#![cfg(feature = "hex")] - -use bitcoin_hashes::{ - hash160, hmac, ripemd160, sha1, sha256, sha256d, sha384, sha512, sha512_256, siphash24, - GeneralHash as _, -}; -use bitcoin_io::Write; - -macro_rules! write_test { - ($mod:ident, $exp_empty:expr, $exp_256:expr, $exp_64k:expr,) => { - #[test] - fn $mod() { - let mut engine = $mod::Hash::engine(); - engine.write_all(&[]).unwrap(); - assert_eq!(format!("{}", $mod::Hash::from_engine(engine)), $exp_empty); - - let mut engine = $mod::Hash::engine(); - engine.write_all(&[1; 256]).unwrap(); - assert_eq!(format!("{}", $mod::Hash::from_engine(engine)), $exp_256); - - let mut engine = $mod::Hash::engine(); - engine.write_all(&[99; 64000]).unwrap(); - assert_eq!(format!("{}", $mod::Hash::from_engine(engine)), $exp_64k); - } - }; -} - -write_test!( - sha1, - "da39a3ee5e6b4b0d3255bfef95601890afd80709", - "ac458b067c6b021c7e9358229b636e9d1e4cb154", - "e4b66838f9f7b6f91e5be32a02ae78094df402e7", -); - -write_test!( - sha256, - "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", - "2661920f2409dd6c8adeb0c44972959f232b6429afa913845d0fd95e7e768234", - "5c5e904f5d4fd587c7a906bf846e08a927286f388c54c39213a4884695271bbc", -); - -write_test!( - sha256d, - "56944c5d3f98413ef45cf54545538103cc9f298e0575820ad3591376e2e0f65d", - "374000d830c75d10d9417e493a7652920f30efbd300e3fb092f24c28c20baf64", - "0050d4148ad7a0437ca0643fad5bf4614cd95d9ba21fde52370b37dcc3f03307", -); - -write_test!( - sha384, - "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b", - "82135637ef6d6dd31a20e2bc9998681a3eecaf8f8c76d45e545214de38439d9a533848ec75f53e4b1a8805709c5124d0", - "fb7511d9a98c5686f9c2f55e242397815c9229d8759451e1710b8da6861e08d52f0357176f4b74f8cad9e23ab65411c7", -); - -write_test!( - sha512, - "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce\ - 47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", - "57ecf739d3a7ca647639adae80a05f4f361304bfcbfa1ceba93296b096e74287\ - 45fc10c142cecdd3bb587a3dba598c072f6f78b31cc0a06a3da0105ee51f75d6", - "dd28f78c53f3bc9bd0c2dca9642a1ad402a70412f985c1f6e54fadb98ce9c458\ - 4761df8d04ed04bb734ba48dd2106bb9ea54524f1394cdd18e6da3166e71c3ee", -); - -write_test!( - sha512_256, - "c672b8d1ef56ed28ab87c3622c5114069bdd3ad7b8f9737498d0c01ecef0967a", - "8d4bb96e7956cf5f08bf5c45f7982630c46b0b022f25cbaf722ae97c06a6e7a2", - "3367646f3e264653f7dd664ac2cb6d3b96329e86ffb7a29a1082e2a4ddc9ee7a", -); - -write_test!( - ripemd160, - "9c1185a5c5e9fc54612808977ee8f548b2258d31", - "e571a1ca5b780aa52bafdb9ec852544ffca418ba", - "ddd2ecce739e823629c7d46ab18918e9c4a51c75", -); - -write_test!( - hash160, - "b472a266d0bd89c13706a4132ccfb16f7c3b9fcb", - "671356a1a874695ad3bc20cae440f4360835bd5a", - "a9608c952c8dbcc20c53803d2ca5ad31d64d9313", -); - -#[test] -fn hmac() { - let mut engine = hmac::HmacEngine::::new(&[0xde, 0xad, 0xbe, 0xef]); - engine.write_all(&[]).unwrap(); - assert_eq!( - format!("{}", hmac::Hmac::from_engine(engine)), - "bf5515149cf797955c4d3194cca42472883281951697c8375d9d9b107f384225" - ); - - let mut engine = hmac::HmacEngine::::new(&[0xde, 0xad, 0xbe, 0xef]); - engine.write_all(&[1; 256]).unwrap(); - assert_eq!( - format!("{}", hmac::Hmac::from_engine(engine)), - "59c9aca10c81c73cb4c196d94db741b6bf2050e0153d5a45f2526bff34675ac5" - ); - - let mut engine = hmac::HmacEngine::::new(&[0xde, 0xad, 0xbe, 0xef]); - engine.write_all(&[99; 64000]).unwrap(); - assert_eq!( - format!("{}", hmac::Hmac::from_engine(engine)), - "30df499717415a395379a1eaabe50038036e4abb5afc94aa55c952f4aa57be08" - ); -} - -#[test] -fn siphash24() { - let mut engine = siphash24::HashEngine::with_keys(0, 0); - engine.write_all(&[]).unwrap(); - assert_eq!(format!("{}", siphash24::Hash::from_engine(engine)), "d70077739d4b921e"); - - let mut engine = siphash24::HashEngine::with_keys(0, 0); - engine.write_all(&[1; 256]).unwrap(); - assert_eq!(format!("{}", siphash24::Hash::from_engine(engine)), "3a3ccefde9b5b1e3"); - - let mut engine = siphash24::HashEngine::with_keys(0, 0); - engine.write_all(&[99; 64000]).unwrap(); - assert_eq!(format!("{}", siphash24::Hash::from_engine(engine)), "ce456e4e4ecbc5bf"); -} diff --git a/hashes/tests/regression.rs b/hashes/tests/regression.rs index d40998e361..31aeec5f80 100644 --- a/hashes/tests/regression.rs +++ b/hashes/tests/regression.rs @@ -1,12 +1,14 @@ //! Regression tests for each hash type. //! //! Note that if `bitcoin-io` is enabled then we get more regression-like testing from `./io.rs`. +//! +//! Test input data and expected hashes is the same as in `io/src/hash.rs`. #![cfg(feature = "hex")] use bitcoin_hashes::{ hash160, ripemd160, sha1, sha256, sha256d, sha256t, sha384, sha512, sha512_256, siphash24, - GeneralHash as _, HashEngine as _, Hmac, HmacEngine, + HashEngine as _, HmacEngine, }; const DATA: &str = "arbitrary data to hash as a regression test"; @@ -40,10 +42,7 @@ impl_regression_test! { pub struct RegHashTag; impl sha256t::Tag for RegHashTag { - fn engine() -> sha256::HashEngine { - let midstate = sha256::Midstate::new([0xab; 32], 64); - sha256::HashEngine::from_midstate(midstate) - } + const MIDSTATE: sha256::Midstate = sha256::Midstate::new([0xab; 32], 64); } type RegHash = sha256t::Hash; @@ -56,27 +55,11 @@ fn regression_sha256t() { assert_eq!(got, want); } -#[test] -fn regression_hmac_sha256_with_default_key() { - let hash = Hmac::::hash(DATA.as_bytes()); - let got = format!("{}", hash); - let want = "58cc7ed8567bd86eba61f7ed2d5a4edab1774dc10488e57de2eb007a2d9ae82d"; - assert_eq!(got, want); -} - -#[test] -fn regression_hmac_sha512_with_default_key() { - let hash = Hmac::::hash(DATA.as_bytes()); - let got = format!("{}", hash); - let want = "5f5db2f3e1178bf19af5db38a0ed04dc5bc52d641648542886eea9b6bbec0db658ed7a5799ca18f5bc1949f39d24151a32990ee85974e40bb8a35e2288f494ce"; - assert_eq!(got, want); -} - #[test] fn regression_hmac_sha256_with_key() { - let mut engine = HmacEngine::::new(HMAC_KEY); + let mut engine = HmacEngine::::new(HMAC_KEY); engine.input(DATA.as_bytes()); - let hash = Hmac::from_engine(engine); + let hash = engine.finalize(); let got = format!("{}", hash); let want = "d159cecaf4adf90b6a641bab767e4817d3a51c414acea3682686c35ec0b37b52"; @@ -85,9 +68,9 @@ fn regression_hmac_sha256_with_key() { #[test] fn regression_hmac_sha512_with_key() { - let mut engine = HmacEngine::::new(HMAC_KEY); + let mut engine = HmacEngine::::new(HMAC_KEY); engine.input(DATA.as_bytes()); - let hash = Hmac::from_engine(engine); + let hash = engine.finalize(); let got = format!("{}", hash); let want = "8511773748f89ba22c07fb3a2981a12c1823695119de41f4a62aead6b848bd34939acf16475c35ed7956114fead3e794cc162ecd35e447a4dabc3227d55f757b"; @@ -104,3 +87,13 @@ fn regression_siphash24_with_key() { let want = "e823ed82311d601a"; assert_eq!(got, want); } + +#[test] +fn regression_sha256_hash_again() { + let hash = sha256::Hash::hash(b"Don't explain your philosophy. Embody it."); + let again = hash.hash_again(); + + let got = format!("{}", again); + let want = "28273103bcd88ab99e2b1007174770ff3f0ea91ee4b3ac942879ed1a2d264b4c"; + assert_eq!(got, want); +} diff --git a/internals/Cargo.toml b/internals/Cargo.toml index 3682e47971..374fe62d79 100644 --- a/internals/Cargo.toml +++ b/internals/Cargo.toml @@ -6,7 +6,7 @@ license = "CC0-1.0" repository = "https://github.com/rust-bitcoin/rust-bitcoin/" documentation = "https://docs.rs/bitcoin-internals" description = "Internal types and macros used by rust-bitcoin ecosystem" -categories = ["cryptography::cryptocurrencies"] +categories = ["cryptography::cryptocurrencies", "no-std"] keywords = ["internal"] readme = "README.md" edition = "2021" @@ -15,13 +15,13 @@ exclude = ["tests", "contrib"] [features] default = [] -std = ["alloc", "hex/std"] -alloc = ["hex/alloc"] +std = ["alloc", "hex?/std"] +alloc = ["hex?/alloc"] test-serde = ["serde", "serde_json", "bincode"] [dependencies] -hex = { package = "hex-conservative", version = "0.3.0", default-features = false } +hex = { package = "hex-conservative", version = "0.3.0", default-features = false, optional = true } serde = { version = "1.0.103", default-features = false, optional = true } # Don't enable these directly, use `test-serde` feature instead. @@ -35,4 +35,4 @@ all-features = true rustdoc-args = ["--cfg", "docsrs"] [lints.rust] -unexpected_cfgs = { level = "deny" } +unexpected_cfgs = { level = "deny", check-cfg = ['cfg(kani)'] } diff --git a/internals/src/array.rs b/internals/src/array.rs new file mode 100644 index 0000000000..becbf6b64c --- /dev/null +++ b/internals/src/array.rs @@ -0,0 +1,105 @@ +//! Contains extensions related to arrays. + +/// Extension trait for arrays. +pub trait ArrayExt { + /// The item type the array is storing. + type Item; + + /// Just like the slicing operation, this returns an array `LEN` items long at position + /// `OFFSET`. + /// + /// The correctness of this operation is compile-time checked. + /// + /// Note that unlike slicing where the second number is the end index, here the second number + /// is array length! + fn sub_array(&self) -> &[Self::Item; LEN]; + + /// Returns an item at given statically-known index. + /// + /// This is just like normal indexing except the check happens at compile time. + fn get_static(&self) -> &Self::Item { + &self.sub_array::()[0] + } + + /// Returns the first item in an array. + /// + /// Fails to compile if the array is empty. + /// + /// Note that this method's name is intentionally shadowing the `std`'s `first` method which + /// returns `Option`. The rationale is that given the known length of the array, we always know + /// that this will not return `None` so trying to keep the `std` method around is pointless. + /// Importing the trait will also cause compile failures - that's also intentional to expose + /// the places where useless checks are made. + fn first(&self) -> &Self::Item { + self.get_static::<0>() + } + + /// Splits the array into two, non-overlaping smaller arrays covering the entire range. + /// + /// This is almost equivalent to just calling [`sub_array`](Self::sub_array) twice, except it also + /// checks that the arrays don't overlap and that they cover the full range. This is very useful + /// for demonstrating correctness, especially when chained. Using this technique even revealed + /// a bug in the past. ([#4195](https://github.com/rust-bitcoin/rust-bitcoin/issues/4195)) + fn split_array(&self) -> (&[Self::Item; LEFT], &[Self::Item; RIGHT]); + + /// Splits the array into the first element and the remaining, one element shorter, array. + /// + /// Fails to compile if the array is empty. + /// + /// Note that this method's name is intentionally shadowing the `std`'s `split_first` method which + /// returns `Option`. The rationale is that given the known length of the array, we always know + /// that this will not return `None` so trying to keep the `std` method around is pointless. + /// Importing the trait will also cause compile failures - that's also intentional to expose + /// the places where useless checks are made. + fn split_first(&self) -> (&Self::Item, &[Self::Item; RIGHT]) { + let (first, remaining) = self.split_array::<1, RIGHT>(); + (&first[0], remaining) + } + + /// Splits the array into the last element and the remaining, one element shorter, array. + /// + /// Fails to compile if the array is empty. + /// + /// Note that this method's name is intentionally shadowing the `std`'s `split_last` method which + /// returns `Option`. The rationale is that given the known length of the array, we always know + /// that this will not return `None` so trying to keep the `std` method around is pointless. + /// Importing the trait will also cause compile failures - that's also intentional to expose + /// the places where useless checks are made. + /// + /// The returned tuple is also reversed just as `std` for consistency and simpler diffs when + /// migrating. + fn split_last(&self) -> (&Self::Item, &[Self::Item; LEFT]) { + let (remaining, last) = self.split_array::(); + (&last[0], remaining) + } +} + +impl ArrayExt for [T; N] { + type Item = T; + + fn sub_array(&self) -> &[Self::Item; LEN] { + #[allow(clippy::let_unit_value)] + let _ = Hack::::IS_VALID_RANGE; + + self[OFFSET..(OFFSET + LEN)].try_into().expect("this is also compiler-checked above") + } + + fn split_array(&self) -> (&[Self::Item; LEFT], &[Self::Item; RIGHT]) { + #[allow(clippy::let_unit_value)] + let _ = Hack2::::IS_FULL_RANGE; + + (self.sub_array::<0, LEFT>(), self.sub_array::()) + } +} + +struct Hack; + +impl Hack { + const IS_VALID_RANGE: () = assert!(OFFSET + LEN <= N); +} + +struct Hack2; + +impl Hack2 { + const IS_FULL_RANGE: () = assert!(LEFT + RIGHT == N); +} diff --git a/internals/src/array_vec.rs b/internals/src/array_vec.rs index 49fca83d80..76d0490d5f 100644 --- a/internals/src/array_vec.rs +++ b/internals/src/array_vec.rs @@ -25,7 +25,7 @@ mod safety_boundary { /// Constructs an empty `ArrayVec`. pub const fn new() -> Self { Self { len: 0, data: [MaybeUninit::uninit(); CAP] } } - /// Constructs a new `ArrayVec` initialized with the contets of `slice`. + /// Constructs a new `ArrayVec` initialized with the contents of `slice`. /// /// # Panics /// @@ -188,3 +188,45 @@ mod tests { av.extend_from_slice(b"abc"); } } + +#[cfg(kani)] +mod verification { + use super::*; + + #[kani::unwind(16)] // One greater than 15 (max number of elements). + #[kani::proof] + fn no_out_of_bounds_less_than_cap() { + const CAP: usize = 32; + let n = kani::any::(); + let elements = (n & 0x0F) as usize; // Just use 4 bits. + + let val = kani::any::(); + + let mut v = ArrayVec::::new(); + for _ in 0..elements { + v.push(val); + } + + for i in 0..elements { + assert_eq!(v[i], val); + } + } + + #[kani::unwind(16)] // One grater than 15. + #[kani::proof] + fn no_out_of_bounds_upto_cap() { + const CAP: usize = 15; + let elements = CAP; + + let val = kani::any::(); + + let mut v = ArrayVec::::new(); + for _ in 0..elements { + v.push(val); + } + + for i in 0..elements { + assert_eq!(v[i], val); + } + } +} diff --git a/internals/src/error/input_string.rs b/internals/src/error/input_string.rs index 817d20e436..b544c73452 100644 --- a/internals/src/error/input_string.rs +++ b/internals/src/error/input_string.rs @@ -95,7 +95,7 @@ impl_from!(&str); /// /// This is created by `display_cannot_parse` method and should be used as /// `write_err!("{}", self.input.display_cannot_parse("what is parsed"); self.source)` in parse -/// error [`Display`](fmt::Display) imlementation if the error has source. If the error doesn't +/// error [`Display`](fmt::Display) implementation if the error has source. If the error doesn't /// have a source just use regular `write!` with same formatting arguments. pub struct CannotParse<'a, T: fmt::Display + ?Sized> { input: &'a InputString, diff --git a/internals/src/lib.rs b/internals/src/lib.rs index 87bba85f41..c2e2ce7080 100644 --- a/internals/src/lib.rs +++ b/internals/src/lib.rs @@ -35,6 +35,7 @@ pub mod rust_version { include!(concat!(env!("OUT_DIR"), "/rust_version.rs")); } +pub mod array; pub mod array_vec; pub mod compact_size; pub mod const_tools; @@ -42,6 +43,8 @@ pub mod error; pub mod macros; mod parse; pub mod script; +pub mod slice; +pub mod wrap_debug; #[cfg(feature = "serde")] #[macro_use] pub mod serde; diff --git a/internals/src/macros.rs b/internals/src/macros.rs index d0bad52d77..69725e232a 100644 --- a/internals/src/macros.rs +++ b/internals/src/macros.rs @@ -2,21 +2,6 @@ //! Various macros used by the Rust Bitcoin ecosystem. -/// Implements `Debug` by calling through to `Display`. -#[macro_export] -macro_rules! debug_from_display { - ($thing:ident) => { - impl core::fmt::Debug for $thing { - fn fmt( - &self, - f: &mut core::fmt::Formatter, - ) -> core::result::Result<(), core::fmt::Error> { - core::fmt::Display::fmt(self, f) - } - } - }; -} - /// Asserts a boolean expression at compile time. #[macro_export] macro_rules! const_assert { @@ -30,65 +15,6 @@ macro_rules! const_assert { } } -/// Derives `From` for the given type. -/// -/// Supports types with arbitrary combinations of lifetimes and type parameters. -/// -/// Note: Paths are not supported (for ex. impl_from_infallible!(Hello). -/// -/// # Examples -/// -/// ```rust -/// # #[allow(unused)] -/// # fn main() { -/// # use core::fmt::{Display, Debug}; -/// use bitcoin_internals::impl_from_infallible; -/// -/// enum AlphaEnum { Item } -/// impl_from_infallible!(AlphaEnum); -/// -/// enum BetaEnum<'b> { Item(&'b usize) } -/// impl_from_infallible!(BetaEnum<'b>); -/// -/// enum GammaEnum { Item(T) } -/// impl_from_infallible!(GammaEnum); -/// -/// enum DeltaEnum<'b, 'a: 'static + 'b, T: 'a, D: Debug + Display + 'a> { -/// Item((&'b usize, &'a usize, T, D)) -/// } -/// impl_from_infallible!(DeltaEnum<'b, 'a: 'static + 'b, T: 'a, D: Debug + Display + 'a>); -/// -/// struct AlphaStruct; -/// impl_from_infallible!(AlphaStruct); -/// -/// struct BetaStruct<'b>(&'b usize); -/// impl_from_infallible!(BetaStruct<'b>); -/// -/// struct GammaStruct(T); -/// impl_from_infallible!(GammaStruct); -/// -/// struct DeltaStruct<'b, 'a: 'static + 'b, T: 'a, D: Debug + Display + 'a> { -/// hello: &'a T, -/// what: &'b D, -/// } -/// impl_from_infallible!(DeltaStruct<'b, 'a: 'static + 'b, T: 'a, D: Debug + Display + 'a>); -/// # } -/// ``` -/// -/// See for more information about this macro. -#[macro_export] -macro_rules! impl_from_infallible { - ( $name:ident $(< $( $lt:tt $( : $clt:tt $(+ $dlt:tt )* )? ),+ >)? ) => { - impl $(< $( $lt $( : $clt $(+ $dlt )* )? ),+ >)? - From - for $name - $(< $( $lt ),+ >)? - { - fn from(never: core::convert::Infallible) -> Self { match never {} } - } - } -} - /// Adds an implementation of `pub fn to_hex(&self) -> String` if `alloc` feature is enabled. /// /// The added function allocates a `String` then calls through to [`core::fmt::LowerHex`]. diff --git a/internals/src/serde.rs b/internals/src/serde.rs index f48bc63463..7c25cfae49 100644 --- a/internals/src/serde.rs +++ b/internals/src/serde.rs @@ -310,9 +310,11 @@ macro_rules! serde_round_trip ( }) ); +#[cfg(feature = "hex")] /// Serializes a byte slice using the `hex` crate. pub struct SerializeBytesAsHex<'a>(pub &'a [u8]); +#[cfg(feature = "hex")] impl serde::Serialize for SerializeBytesAsHex<'_> { fn serialize(&self, serializer: S) -> Result where diff --git a/internals/src/slice.rs b/internals/src/slice.rs new file mode 100644 index 0000000000..b703238e45 --- /dev/null +++ b/internals/src/slice.rs @@ -0,0 +1,170 @@ +//! Contains extensions related to slices. + +/// Extension trait for slice. +pub trait SliceExt { + /// The item type the slice is storing. + type Item; + + /// Splits up the slice into a slice of arrays and a remainder. + /// + /// Note that `N` must not be zero: + /// + /// ```compile_fail + /// let slice = [1, 2, 3]; + /// let fail = slice.as_chunks::<0>(); + /// ``` + fn bitcoin_as_chunks(&self) -> (&[[Self::Item; N]], &[Self::Item]); + + /// Splits up the slice into a slice of arrays and a remainder. + /// + /// Note that `N` must not be zero: + /// + /// ```compile_fail + /// let mut slice = [1, 2, 3]; + /// let fail = slice.as_chunks_mut::<0>(); + /// ``` + fn bitcoin_as_chunks_mut( + &mut self, + ) -> (&mut [[Self::Item; N]], &mut [Self::Item]); + + /// Tries to access a sub-array of length `ARRAY_LEN` at the specified `offset`. + /// + /// Returns `None` in case of out-of-bounds access. + fn get_array(&self, offset: usize) -> Option<&[Self::Item; ARRAY_LEN]>; + + /// Splits the slice into an array and remainder if it's long enough. + /// + /// Returns `None` if the slice is shorter than `ARRAY_LEN` + #[allow(clippy::type_complexity)] // it's not really complex and redefining would make it + // harder to understand + fn split_first_chunk(&self) -> Option<(&[Self::Item; ARRAY_LEN], &[Self::Item])>; + + /// Splits the slice into a remainder and an array if it's long enough. + /// + /// Returns `None` if the slice is shorter than `ARRAY_LEN` + #[allow(clippy::type_complexity)] // it's not really complex and redefining would make it + // harder to understand + fn split_last_chunk(&self) -> Option<(&[Self::Item], &[Self::Item; ARRAY_LEN])>; +} + +impl SliceExt for [T] { + type Item = T; + + fn bitcoin_as_chunks(&self) -> (&[[Self::Item; N]], &[Self::Item]) { + #[allow(clippy::let_unit_value)] + let _ = Hack::::IS_NONZERO; + + let chunks_count = self.len() / N; + let total_left_len = chunks_count * N; + let (left, right) = self.split_at(total_left_len); + // SAFETY: we've obtained the pointer from a slice that's still live + // we're merely casting, so no aliasing issues here + // arrays of T have same alignment as T + // the resulting slice points within the obtained slice as was computed above + let left = unsafe { + core::slice::from_raw_parts(left.as_ptr().cast::<[Self::Item; N]>(), chunks_count) + }; + (left, right) + } + + fn bitcoin_as_chunks_mut( + &mut self, + ) -> (&mut [[Self::Item; N]], &mut [Self::Item]) { + #[allow(clippy::let_unit_value)] + let _ = Hack::::IS_NONZERO; + + let chunks_count = self.len() / N; + let total_left_len = chunks_count * N; + let (left, right) = self.split_at_mut(total_left_len); + // SAFETY: we've obtained the pointer from a slice that's still live + // we're merely casting, so no aliasing issues here + // arrays of T have same alignment as T + // the resulting slice points within the obtained slice as was computed above + let left = unsafe { + core::slice::from_raw_parts_mut( + left.as_mut_ptr().cast::<[Self::Item; N]>(), + chunks_count, + ) + }; + (left, right) + } + + fn get_array(&self, offset: usize) -> Option<&[Self::Item; ARRAY_LEN]> { + self.get(offset..(offset + ARRAY_LEN)) + .map(|slice| slice.try_into().expect("the arguments to `get` evaluate to the same length the return type uses")) + } + + fn split_first_chunk(&self) -> Option<(&[Self::Item; ARRAY_LEN], &[Self::Item])> { + if self.len() < ARRAY_LEN { + return None; + } + let (first, remainder) = self.split_at(ARRAY_LEN); + Some((first.try_into().expect("we're passing `ARRAY_LEN` to `split_at` above"), remainder)) + } + + fn split_last_chunk(&self) -> Option<(&[Self::Item], &[Self::Item; ARRAY_LEN])> { + if self.len() < ARRAY_LEN { + return None; + } + let (remainder, last) = self.split_at(self.len() - ARRAY_LEN); + Some((remainder, last.try_into().expect("we're passing `self.len() - ARRAY_LEN` to `split_at` above"))) + } +} + +struct Hack; + +impl Hack { + const IS_NONZERO: () = { + assert!(N != 0); + }; +} + +#[cfg(test)] +mod tests { + use super::SliceExt; + + // some comparisons require type annotations + const EMPTY: &[i32] = &[]; + + #[test] + fn one_to_one() { + let slice = [1]; + let (left, right) = slice.bitcoin_as_chunks::<1>(); + assert_eq!(left, &[[1]]); + assert_eq!(right, EMPTY); + } + + #[test] + fn one_to_two() { + const EMPTY_LEFT: &[[i32; 2]] = &[]; + + let slice = [1i32]; + let (left, right) = slice.bitcoin_as_chunks::<2>(); + assert_eq!(left, EMPTY_LEFT); + assert_eq!(right, &[1]); + } + + #[test] + fn two_to_one() { + let slice = [1, 2]; + let (left, right) = slice.bitcoin_as_chunks::<1>(); + assert_eq!(left, &[[1], [2]]); + assert_eq!(right, EMPTY); + } + + #[test] + fn two_to_two() { + let slice = [1, 2]; + let (left, right) = slice.bitcoin_as_chunks::<2>(); + assert_eq!(left, &[[1, 2]]); + assert_eq!(right, EMPTY); + } + + #[test] + fn three_to_two() { + let slice = [1, 2, 3]; + let (left, right) = slice.bitcoin_as_chunks::<2>(); + assert_eq!(left, &[[1, 2]]); + assert_eq!(right, &[3]); + } +} diff --git a/internals/src/wrap_debug.rs b/internals/src/wrap_debug.rs new file mode 100644 index 0000000000..9a3f63d9b2 --- /dev/null +++ b/internals/src/wrap_debug.rs @@ -0,0 +1,9 @@ +//! Contains a wrapper for a function that implements `Debug`. +use core::fmt; + +/// A wrapper for a function that implements `Debug`. +pub struct WrapDebug fmt::Result>(pub F); + +impl fmt::Result> fmt::Debug for WrapDebug { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { (self.0)(f) } +} diff --git a/io/Cargo.toml b/io/Cargo.toml index 224d37804b..f59c35e00c 100644 --- a/io/Cargo.toml +++ b/io/Cargo.toml @@ -15,12 +15,17 @@ exclude = ["tests", "contrib"] [features] default = ["std"] -std = ["alloc"] -alloc = [] +std = ["alloc", "hashes?/std"] +alloc = ["hashes?/alloc"] [dependencies] internals = { package = "bitcoin-internals", version = "0.4.0" } +hashes = { package = "bitcoin_hashes", version = "0.16.0", default-features = false, optional = true } + +[dev-dependencies] +hashes = { package = "bitcoin_hashes", version = "0.16.0", default-features = false, features = ["hex"] } + [package.metadata.docs.rs] all-features = true rustdoc-args = ["--cfg", "docsrs"] diff --git a/io/README.md b/io/README.md index f276437bc3..3d5508de34 100644 --- a/io/README.md +++ b/io/README.md @@ -1,4 +1,4 @@ -# Rust-Bitcoin IO Library +# Rust-Bitcoin I/O Library The `std::io` module is not exposed in `no-std` Rust so building `no-std` applications which require reading and writing objects via standard traits is not generally possible. Thus, this library exists diff --git a/io/contrib/test_vars.sh b/io/contrib/test_vars.sh index 92dc094064..279c2507c4 100644 --- a/io/contrib/test_vars.sh +++ b/io/contrib/test_vars.sh @@ -5,10 +5,10 @@ # shellcheck disable=SC2034 # Test all these features with "std" enabled. -FEATURES_WITH_STD="" +FEATURES_WITH_STD="hashes" # Test all these features without "std" enabled. -FEATURES_WITHOUT_STD="alloc" +FEATURES_WITHOUT_STD="alloc hashes" # Run these examples. EXAMPLES="" diff --git a/io/src/bridge.rs b/io/src/bridge.rs index 204a77a51e..fc49a07719 100644 --- a/io/src/bridge.rs +++ b/io/src/bridge.rs @@ -1,43 +1,56 @@ +// SPDX-License-Identifier: CC0-1.0 + #[cfg(feature = "alloc")] use alloc::boxed::Box; use internals::rust_version; -/// A bridging wrapper providing the IO traits for types that already implement `std` IO traits. +/// A bridging wrapper providing the I/O traits for types that already implement `std` I/O traits. #[repr(transparent)] +#[derive(Debug)] pub struct FromStd(T); impl FromStd { - /// Wraps an IO type. + /// Wraps an I/O type. #[inline] pub const fn new(inner: T) -> Self { Self(inner) } + /// Wraps a mutable reference to I/O type. + #[inline] + pub fn new_mut(inner: &mut T) -> &mut Self { + // SAFETY: the type is repr(transparent) and the lifetimes match + unsafe { &mut *(inner as *mut _ as *mut Self) } + } + + /// Wraps a boxed I/O type. + #[cfg(feature = "alloc")] + #[inline] + pub fn new_boxed(inner: Box) -> Box { + // SAFETY: the type is repr(transparent) and the pointer is created from Box + unsafe { Box::from_raw(Box::into_raw(inner) as *mut Self) } + } + /// Returns the wrapped value. #[inline] pub fn into_inner(self) -> T { self.0 } /// Returns a reference to the wrapped value. #[inline] - pub fn inner(&self) -> &T { &self.0 } + pub fn get_ref(&self) -> &T { &self.0 } /// Returns a mutable reference to the wrapped value. #[inline] - pub fn inner_mut(&mut self) -> &mut T { &mut self.0 } + pub fn get_mut(&mut self) -> &mut T { &mut self.0 } - /// Wraps a mutable reference to IO type. + /// Returns a reference to the wrapped value. #[inline] - pub fn new_mut(inner: &mut T) -> &mut Self { - // SAFETY: the type is repr(transparent) and the lifetimes match - unsafe { &mut *(inner as *mut _ as *mut Self) } - } + #[deprecated(since = "TBD", note = "use `get_ref()` instead")] + pub fn inner(&self) -> &T { &self.0 } - /// Wraps a boxed IO type. - #[cfg(feature = "alloc")] + /// Returns a mutable reference to the wrapped value. #[inline] - pub fn new_boxed(inner: Box) -> Box { - // SAFETY: the type is repr(transparent) and the pointer is created from Box - unsafe { Box::from_raw(Box::into_raw(inner) as *mut Self) } - } + #[deprecated(since = "TBD", note = "use `get_ref()` instead")] + pub fn inner_mut(&mut self) -> &mut T { &mut self.0 } } impl super::Read for FromStd { @@ -106,39 +119,40 @@ impl std::io::Write for FromStd { /// A bridging wrapper providing the std traits for types that already implement our traits. #[repr(transparent)] +#[derive(Debug)] pub struct ToStd(T); impl ToStd { - /// Wraps an IO type. + /// Wraps an I/O type. #[inline] pub const fn new(inner: T) -> Self { Self(inner) } - /// Returns the wrapped value. - #[inline] - pub fn into_inner(self) -> T { self.0 } - - /// Returns a reference to the wrapped value. - #[inline] - pub fn inner(&self) -> &T { &self.0 } - - /// Returns a mutable reference to the wrapped value. - #[inline] - pub fn inner_mut(&mut self) -> &mut T { &mut self.0 } - - /// Wraps a mutable reference to IO type. + /// Wraps a mutable reference to I/O type. #[inline] pub fn new_mut(inner: &mut T) -> &mut Self { // SAFETY: the type is repr(transparent) and the lifetimes match unsafe { &mut *(inner as *mut _ as *mut Self) } } - /// Wraps a boxed IO type. + /// Wraps a boxed I/O type. #[cfg(feature = "alloc")] #[inline] pub fn new_boxed(inner: Box) -> Box { // SAFETY: the type is repr(transparent) and the pointer is created from Box unsafe { Box::from_raw(Box::into_raw(inner) as *mut Self) } } + + /// Returns the wrapped value. + #[inline] + pub fn into_inner(self) -> T { self.0 } + + /// Returns a reference to the wrapped value. + #[inline] + pub fn inner(&self) -> &T { &self.0 } + + /// Returns a mutable reference to the wrapped value. + #[inline] + pub fn inner_mut(&mut self) -> &mut T { &mut self.0 } } impl std::io::Read for ToStd { diff --git a/io/src/error.rs b/io/src/error.rs index 172dd3d6a9..a79725cf2b 100644 --- a/io/src/error.rs +++ b/io/src/error.rs @@ -1,3 +1,5 @@ +// SPDX-License-Identifier: CC0-1.0 + #[cfg(all(not(feature = "std"), feature = "alloc"))] use alloc::boxed::Box; use core::fmt; @@ -6,10 +8,12 @@ use core::fmt; #[derive(Debug)] pub struct Error { kind: ErrorKind, - /// Indicates that the `struct` can pretend to own a mutable static reference - /// and an [`UnsafeCell`](core::cell::UnsafeCell), which are not unwind safe. - /// This is so that it does not introduce non-additive cargo features. - _not_unwind_safe: core::marker::PhantomData<(&'static mut (), core::cell::UnsafeCell<()>)>, + /// We want this type to be `?UnwindSafe` and `?RefUnwindSafe` - the same as `std::io::Error`. + /// + /// In `std` builds the existence of `dyn std::error:Error` prevents `UnwindSafe` and + /// `RefUnwindSafe` from being automatically implemented. But in `no-std` builds without the + /// marker nothing prevents it. + _not_unwind_safe: core::marker::PhantomData, #[cfg(feature = "std")] error: Option>, @@ -76,19 +80,6 @@ impl std::error::Error for Error { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { self.error.as_ref().and_then(|e| e.as_ref().source()) } - - #[allow(deprecated)] - fn description(&self) -> &str { - match self.error.as_ref() { - Some(e) => e.description(), - None => self.kind.description(), - } - } - - #[allow(deprecated)] - fn cause(&self) -> Option<&dyn std::error::Error> { - self.error.as_ref().and_then(|e| e.as_ref().cause()) - } } #[cfg(feature = "std")] @@ -113,6 +104,13 @@ impl From for std::io::Error { } } +/// Useful for preventing `UnwindSafe` and `RefUnwindSafe` from being automatically implemented. +struct NotUnwindSafe { + _not_unwind_safe: core::marker::PhantomData<(&'static mut (), core::cell::UnsafeCell<()>)>, +} + +unsafe impl Sync for NotUnwindSafe {} + macro_rules! define_errorkind { ($($(#[$($attr:tt)*])* $kind:ident),*) => { #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] @@ -190,7 +188,7 @@ define_errorkind!( WriteZero, /// This operation was interrupted. Interrupted, - /// An error returned when an operation could not be completed because an “end of file” was reached prematurely. + /// An error returned when an operation could not be completed because an "end of file" was reached prematurely. UnexpectedEof, // Note: Any time we bump the MSRV any new error kinds should be added here! /// A custom error that does not fall under any other I/O error kind diff --git a/io/src/hash.rs b/io/src/hash.rs new file mode 100644 index 0000000000..859f035b73 --- /dev/null +++ b/io/src/hash.rs @@ -0,0 +1,365 @@ +// SPDX-License-Identifier: CC0-1.0 + +//! I/O hashing support. +//! +//! Support for various hashing related things e.g. +//! +//! - Hashing to a writer. +//! - Implement I/O traits for hash engines. + +use hashes::hmac::HmacEngine; +use hashes::{ + hash160, ripemd160, sha1, sha256, sha256d, sha256t, sha384, sha512, sha512_256, siphash24, + HashEngine as _, +}; + +use crate::BufRead; + +macro_rules! impl_write { + ($ty: ty, $write_fn: expr, $flush_fn: expr $(, $bounded_ty: ident : $bounds: path),*) => { + // `std::io::Write` is implemented in `bitcoin_hashes` because of the orphan rule. + impl<$($bounded_ty: $bounds),*> crate::Write for $ty { + #[inline] + fn write(&mut self, buf: &[u8]) -> crate::Result { + $write_fn(self, buf)} + + #[inline] + fn flush(&mut self) -> crate::Result<()> { + $flush_fn(self) + } + } + } +} +pub(crate) use impl_write; + +impl_write!( + hash160::HashEngine, + |us: &mut hash160::HashEngine, buf| { + hashes::HashEngine::input(us, buf); + Ok(buf.len()) + }, + |_us| { Ok(()) } +); + +impl_write!( + ripemd160::HashEngine, + |us: &mut ripemd160::HashEngine, buf| { + hashes::HashEngine::input(us, buf); + Ok(buf.len()) + }, + |_us| { Ok(()) } +); + +impl_write!( + sha1::HashEngine, + |us: &mut sha1::HashEngine, buf| { + hashes::HashEngine::input(us, buf); + Ok(buf.len()) + }, + |_us| { Ok(()) } +); + +impl_write!( + sha256::HashEngine, + |us: &mut sha256::HashEngine, buf| { + hashes::HashEngine::input(us, buf); + Ok(buf.len()) + }, + |_us| { Ok(()) } +); + +impl_write!( + sha256d::HashEngine, + |us: &mut sha256d::HashEngine, buf| { + hashes::HashEngine::input(us, buf); + Ok(buf.len()) + }, + |_us| { Ok(()) } +); + +impl_write!( + sha256t::HashEngine, + |us: &mut sha256t::HashEngine, buf| { + hashes::HashEngine::input(us, buf); + Ok(buf.len()) + }, + |_us| { Ok(()) }, + T: sha256t::Tag +); + +impl_write!( + sha384::HashEngine, + |us: &mut sha384::HashEngine, buf| { + hashes::HashEngine::input(us, buf); + Ok(buf.len()) + }, + |_us| { Ok(()) } +); + +impl_write!( + sha512::HashEngine, + |us: &mut sha512::HashEngine, buf| { + hashes::HashEngine::input(us, buf); + Ok(buf.len()) + }, + |_us| { Ok(()) } +); + +impl_write!( + sha512_256::HashEngine, + |us: &mut sha512_256::HashEngine, buf| { + hashes::HashEngine::input(us, buf); + Ok(buf.len()) + }, + |_us| { Ok(()) } +); + +impl_write!( + siphash24::HashEngine, + |us: &mut siphash24::HashEngine, buf| { + hashes::HashEngine::input(us, buf); + Ok(buf.len()) + }, + |_us| { Ok(()) } +); + +impl_write!( + HmacEngine, + |us: &mut HmacEngine, buf| { + us.input(buf); + Ok(buf.len()) + }, + |_us| { Ok(()) }, + T: hashes::HashEngine +); + +/// Hashes data from a reader. +pub fn hash_reader(reader: &mut impl BufRead) -> Result +where + T: hashes::HashEngine + Default, +{ + let mut engine = T::default(); + loop { + let bytes = reader.fill_buf()?; + + let read = bytes.len(); + // Empty slice means EOF. + if read == 0 { + break; + } + + engine.input(bytes); + reader.consume(read); + } + Ok(engine.finalize()) +} + +#[cfg(test)] +#[cfg(feature = "alloc")] +mod tests { + use alloc::format; + + use hashes::hmac; + + use super::*; + use crate::{Cursor, Write as _}; + + macro_rules! write_test { + ($mod:ident, $exp_empty:expr, $exp_256:expr, $exp_64k:expr,) => { + #[test] + fn $mod() { + let mut engine = $mod::Hash::engine(); + engine.write_all(&[]).unwrap(); + assert_eq!(format!("{}", $mod::Hash::from_engine(engine)), $exp_empty); + + let mut engine = $mod::Hash::engine(); + engine.write_all(&[1; 256]).unwrap(); + assert_eq!(format!("{}", $mod::Hash::from_engine(engine)), $exp_256); + + let mut engine = $mod::Hash::engine(); + engine.write_all(&[99; 64000]).unwrap(); + assert_eq!(format!("{}", $mod::Hash::from_engine(engine)), $exp_64k); + } + }; + } + + write_test!( + sha1, + "da39a3ee5e6b4b0d3255bfef95601890afd80709", + "ac458b067c6b021c7e9358229b636e9d1e4cb154", + "e4b66838f9f7b6f91e5be32a02ae78094df402e7", + ); + + write_test!( + sha256, + "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "2661920f2409dd6c8adeb0c44972959f232b6429afa913845d0fd95e7e768234", + "5c5e904f5d4fd587c7a906bf846e08a927286f388c54c39213a4884695271bbc", + ); + + write_test!( + sha256d, + "56944c5d3f98413ef45cf54545538103cc9f298e0575820ad3591376e2e0f65d", + "374000d830c75d10d9417e493a7652920f30efbd300e3fb092f24c28c20baf64", + "0050d4148ad7a0437ca0643fad5bf4614cd95d9ba21fde52370b37dcc3f03307", + ); + + write_test!( + sha384, + "38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b", + "82135637ef6d6dd31a20e2bc9998681a3eecaf8f8c76d45e545214de38439d9a533848ec75f53e4b1a8805709c5124d0", + "fb7511d9a98c5686f9c2f55e242397815c9229d8759451e1710b8da6861e08d52f0357176f4b74f8cad9e23ab65411c7", + ); + + write_test!( + sha512, + "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce\ + 47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e", + "57ecf739d3a7ca647639adae80a05f4f361304bfcbfa1ceba93296b096e74287\ + 45fc10c142cecdd3bb587a3dba598c072f6f78b31cc0a06a3da0105ee51f75d6", + "dd28f78c53f3bc9bd0c2dca9642a1ad402a70412f985c1f6e54fadb98ce9c458\ + 4761df8d04ed04bb734ba48dd2106bb9ea54524f1394cdd18e6da3166e71c3ee", + ); + + write_test!( + sha512_256, + "c672b8d1ef56ed28ab87c3622c5114069bdd3ad7b8f9737498d0c01ecef0967a", + "8d4bb96e7956cf5f08bf5c45f7982630c46b0b022f25cbaf722ae97c06a6e7a2", + "3367646f3e264653f7dd664ac2cb6d3b96329e86ffb7a29a1082e2a4ddc9ee7a", + ); + + write_test!( + ripemd160, + "9c1185a5c5e9fc54612808977ee8f548b2258d31", + "e571a1ca5b780aa52bafdb9ec852544ffca418ba", + "ddd2ecce739e823629c7d46ab18918e9c4a51c75", + ); + + write_test!( + hash160, + "b472a266d0bd89c13706a4132ccfb16f7c3b9fcb", + "671356a1a874695ad3bc20cae440f4360835bd5a", + "a9608c952c8dbcc20c53803d2ca5ad31d64d9313", + ); + + #[test] + fn hmac() { + let mut engine = hmac::HmacEngine::::new(&[0xde, 0xad, 0xbe, 0xef]); + engine.write_all(&[]).unwrap(); + assert_eq!( + format!("{}", engine.finalize()), + "bf5515149cf797955c4d3194cca42472883281951697c8375d9d9b107f384225" + ); + + let mut engine = hmac::HmacEngine::::new(&[0xde, 0xad, 0xbe, 0xef]); + engine.write_all(&[1; 256]).unwrap(); + assert_eq!( + format!("{}", engine.finalize()), + "59c9aca10c81c73cb4c196d94db741b6bf2050e0153d5a45f2526bff34675ac5" + ); + + let mut engine = hmac::HmacEngine::::new(&[0xde, 0xad, 0xbe, 0xef]); + engine.write_all(&[99; 64000]).unwrap(); + assert_eq!( + format!("{}", engine.finalize()), + "30df499717415a395379a1eaabe50038036e4abb5afc94aa55c952f4aa57be08" + ); + } + + #[test] + fn siphash24() { + let mut engine = siphash24::HashEngine::with_keys(0, 0); + engine.write_all(&[]).unwrap(); + assert_eq!(format!("{}", siphash24::Hash::from_engine(engine)), "d70077739d4b921e"); + + let mut engine = siphash24::HashEngine::with_keys(0, 0); + engine.write_all(&[1; 256]).unwrap(); + assert_eq!(format!("{}", siphash24::Hash::from_engine(engine)), "3a3ccefde9b5b1e3"); + + let mut engine = siphash24::HashEngine::with_keys(0, 0); + engine.write_all(&[99; 64000]).unwrap(); + assert_eq!(format!("{}", siphash24::Hash::from_engine(engine)), "ce456e4e4ecbc5bf"); + } + + // Data and expected hashes taken from `bitcoin_hashes/tests/regression.rs`. + const DATA: &str = "arbitrary data to hash as a regression test"; + const HMAC_KEY: &[u8] = b"some key"; + + macro_rules! impl_hash_reader_test { + ($($test_name:ident, $module:ident, $want:literal);* $(;)?) => { + $( + #[test] + fn $test_name() { + let hash = $module::Hash::hash(DATA.as_bytes()); + let got = format!("{}", hash); + assert_eq!(got, $want); + + let mut reader = Cursor::new(DATA); + let hash_from_reader = $crate::hash_reader::<$module::HashEngine>(&mut reader).unwrap(); + assert_eq!(hash_from_reader, hash) + } + )* + } + } + + impl_hash_reader_test! { + hash_from_reader_hash160, hash160, "a17909f6d5373b0085c4180ba207126e5040f74d"; + hash_from_reader_ripemd160, ripemd160, "e6801701c77a1cd85662335258c7869631b4a9a8"; + hash_from_reader_sha1, sha1, "e1e81eeabadafa3d5d41cc3f405385426b0f47fd"; + hash_from_reader_sha256, sha256, "d291c6c5a07fa1d9315cdae090ebe14169fbe0a219cd55a48d0d2104eab6ec51"; + hash_from_reader_sha256d, sha256d, "93a743b022290bde3233a619b21aaebe06c5cf5cc959464c41be35711e37731b"; + hash_from_reader_sha384, sha384, "f545bd83d297978d47a7f26b858a54188499dfb4d7d570a6a2362c765031d57a29d7e002df5e34d184e70b65a4f47153"; + hash_from_reader_sha512, sha512, "057d0a37e9e0ac9a93acde0752748da059a27bcf946c7af00692ac1a95db8d21f965f40af22efc4710f100f8d3e43f79f77b1f48e1e400a95b7344b7bc0dfd10"; + hash_from_reader_sha512_256, sha512_256, "e204244c429b5bca037a2a8a6e7ed8a42b808ceaff182560840bb8c5c8e9a2ec"; + } + + #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Default, Hash)] + pub struct RegHashTag; // Name comes from regression tests in `bitcoin_hashes`. + + impl sha256t::Tag for RegHashTag { + const MIDSTATE: sha256::Midstate = sha256::Midstate::new([0xab; 32], 64); + } + + type RegHash = sha256t::Hash; + + #[test] + fn regression_sha256t() { + let hash = RegHash::hash(DATA.as_bytes()); + let got = format!("{}", hash); + let want = "17db326d7c13867376ccca1f8a211377be3cbeaeb372f167822284866ddf14ca"; + assert_eq!(got, want); + } + + #[test] + fn regression_hmac_sha256_with_key() { + let mut engine = HmacEngine::::new(HMAC_KEY); + engine.input(DATA.as_bytes()); + let hash = engine.finalize(); + + let got = format!("{}", hash); + let want = "d159cecaf4adf90b6a641bab767e4817d3a51c414acea3682686c35ec0b37b52"; + assert_eq!(got, want); + } + + #[test] + fn regression_hmac_sha512_with_key() { + let mut engine = HmacEngine::::new(HMAC_KEY); + engine.input(DATA.as_bytes()); + let hash = engine.finalize(); + + let got = format!("{}", hash); + let want = "8511773748f89ba22c07fb3a2981a12c1823695119de41f4a62aead6b848bd34939acf16475c35ed7956114fead3e794cc162ecd35e447a4dabc3227d55f757b"; + assert_eq!(got, want); + } + + #[test] + fn regression_siphash24_with_key() { + let mut engine = siphash24::HashEngine::with_keys(0, 0); + engine.input(DATA.as_bytes()); + let hash = siphash24::Hash::from_engine(engine); + + let got = format!("{}", hash); + let want = "e823ed82311d601a"; + assert_eq!(got, want); + } +} diff --git a/io/src/lib.rs b/io/src/lib.rs index b867be1919..8da2d108dc 100644 --- a/io/src/lib.rs +++ b/io/src/lib.rs @@ -1,8 +1,10 @@ -//! Rust-Bitcoin IO Library +// SPDX-License-Identifier: CC0-1.0 + +//! # Rust Bitcoin I/O Library //! -//! The `std::io` module is not exposed in `no-std` Rust so building `no-std` applications which +//! The [`std::io`] module is not exposed in `no-std` Rust so building `no-std` applications which //! require reading and writing objects via standard traits is not generally possible. Thus, this -//! library exists to export a minmal version of `std::io`'s traits which we use in `rust-bitcoin` +//! library exists to export a minimal version of `std::io`'s traits which we use in `rust-bitcoin` //! so that we can support `no-std` applications. //! //! These traits are not one-for-one drop-ins, but are as close as possible while still implementing @@ -14,6 +16,8 @@ // Coding conventions. #![warn(missing_docs)] #![doc(test(attr(warn(unused))))] +// Pedantic lints that we enforce. +#![warn(clippy::return_self_not_must_use)] // Exclude lints we don't think are valuable. #![allow(clippy::needless_question_mark)] // https://github.com/rust-bitcoin/rust-bitcoin/pull/2134 #![allow(clippy::manual_range_contains)] // More readable than clippy's format. @@ -21,11 +25,17 @@ #[cfg(feature = "alloc")] extern crate alloc; +#[cfg(feature = "hashes")] +pub extern crate hashes; + #[cfg(feature = "std")] mod bridge; mod error; mod macros; +#[cfg(feature = "hashes")] +mod hash; + #[cfg(all(not(feature = "std"), feature = "alloc"))] use alloc::vec::Vec; use core::cmp; @@ -35,16 +45,28 @@ pub use bridge::{FromStd, ToStd}; #[rustfmt::skip] // Keep public re-exports separate. pub use self::error::{Error, ErrorKind}; +#[cfg(feature = "hashes")] +pub use self::hash::hash_reader; /// Result type returned by functions in this crate. pub type Result = core::result::Result; -/// A generic trait describing an input stream. See [`std::io::Read`] for more info. +/// A generic trait describing an input stream. +/// +/// See [`std::io::Read`] for more information. pub trait Read { /// Reads bytes from source into `buf`. + /// + /// # Returns + /// + /// The number of bytes read if successful or an [`Error`] if reading fails. fn read(&mut self, buf: &mut [u8]) -> Result; /// Reads bytes from source until `buf` is full. + /// + /// # Errors + /// + /// If the exact number of bytes required to fill `buf` cannot be read. #[inline] fn read_exact(&mut self, mut buf: &mut [u8]) -> Result<()> { while !buf.is_empty() { @@ -67,7 +89,11 @@ pub trait Read { /// `limit` is used to prevent a denial of service attack vector since an unbounded reader will /// exhaust all memory. /// - /// Similar to `std::io::Read::read_to_end` but with the DOS protection. + /// Similar to [`std::io::Read::read_to_end`] but with the DOS protection. + /// + /// # Returns + /// + /// The number of bytes read if successful or an [`Error`] if reading fails. #[doc(alias = "read_to_end")] #[cfg(feature = "alloc")] #[inline] @@ -79,6 +105,10 @@ pub trait Read { /// A trait describing an input stream that uses an internal buffer when reading. pub trait BufRead: Read { /// Returns data read from this reader, filling the internal buffer if needed. + /// + /// # Errors + /// + /// May error if reading fails. fn fill_buf(&mut self) -> Result<&[u8]>; /// Marks the buffered data up to amount as consumed. @@ -92,6 +122,7 @@ pub trait BufRead: Read { /// Reader adapter which limits the bytes read from an underlying reader. /// /// Created by calling `[Read::take]`. +#[derive(Debug)] pub struct Take<'a, R: Read + ?Sized> { reader: &'a mut R, remaining: u64, @@ -99,6 +130,12 @@ pub struct Take<'a, R: Read + ?Sized> { impl Take<'_, R> { /// Reads all bytes until EOF from the underlying reader into `buf`. + /// + /// Allocates space in `buf` as needed. + /// + /// # Returns + /// + /// The number of bytes read if successful or an [`Error`] if reading fails. #[cfg(feature = "alloc")] #[inline] pub fn read_to_end(&mut self, buf: &mut Vec) -> Result { @@ -189,6 +226,7 @@ impl BufRead for &[u8] { } /// Wraps an in memory reader providing the `position` function. +#[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct Cursor { inner: T, pos: u64, @@ -197,18 +235,18 @@ pub struct Cursor { impl> Cursor { /// Constructs a new `Cursor` by wrapping `inner`. #[inline] - pub fn new(inner: T) -> Self { Cursor { inner, pos: 0 } } + pub const fn new(inner: T) -> Self { Cursor { inner, pos: 0 } } /// Returns the position read up to thus far. #[inline] - pub fn position(&self) -> u64 { self.pos } + pub const fn position(&self) -> u64 { self.pos } /// Sets the internal position. /// /// This method allows seeking within the wrapped memory by setting the position. /// /// Note that setting a position that is larger than the buffer length will cause reads to - /// return no bytes (EOF). + /// succeed by reading zero bytes. #[inline] pub fn set_position(&mut self, position: u64) { self.pos = position; } @@ -222,6 +260,19 @@ impl> Cursor { /// /// This is the whole wrapped buffer, including the bytes already read. #[inline] + pub const fn get_ref(&self) -> &T { &self.inner } + + /// Returns a mutable reference to the inner buffer. + /// + /// This is the whole wrapped buffer, including the bytes already read. + #[inline] + pub fn get_mut(&mut self) -> &mut T { &mut self.inner } + + /// Returns a reference to the inner buffer. + /// + /// This is the whole wrapped buffer, including the bytes already read. + #[inline] + #[deprecated(since = "TBD", note = "use `get_ref()` instead")] pub fn inner(&self) -> &T { &self.inner } } @@ -230,6 +281,10 @@ impl> Read for Cursor { fn read(&mut self, buf: &mut [u8]) -> Result { let inner: &[u8] = self.inner.as_ref(); let start_pos = self.pos.try_into().unwrap_or(inner.len()); + if start_pos >= self.inner.as_ref().len() { + return Ok(0); + } + let read = core::cmp::min(inner.len().saturating_sub(start_pos), buf.len()); buf[..read].copy_from_slice(&inner[start_pos..start_pos + read]); self.pos = self.pos.saturating_add(read.try_into().unwrap_or(u64::MAX /* unreachable */)); @@ -251,7 +306,9 @@ impl> BufRead for Cursor { } } -/// A generic trait describing an output stream. See [`std::io::Write`] for more info. +/// A generic trait describing an output stream. +/// +/// See [`std::io::Write`] for more information. pub trait Write { /// Writes `buf` into this writer, returning how many bytes were written. fn write(&mut self, buf: &[u8]) -> Result; @@ -311,9 +368,10 @@ impl Write for &mut [u8] { fn flush(&mut self) -> Result<()> { Ok(()) } } -/// A sink to which all writes succeed. See [`std::io::Sink`] for more info. +/// A sink to which all writes succeed. /// -/// Created using `io::sink()`. +/// Created using [`sink()`]. See [`std::io::Sink`] for more information. +#[derive(Clone, Copy, Debug, Default)] pub struct Sink; impl Write for Sink { @@ -327,18 +385,20 @@ impl Write for Sink { fn flush(&mut self) -> Result<()> { Ok(()) } } -/// Returns a sink to which all writes succeed. See [`std::io::sink`] for more info. +/// Returns a sink to which all writes succeed. +/// +/// See [`std::io::sink`] for more information. #[inline] pub fn sink() -> Sink { Sink } -/// Wraps a `std` IO type to implement the traits from this crate. +/// Wraps a `std` I/O type to implement the traits from this crate. /// /// All methods are passed through converting the errors. #[cfg(feature = "std")] #[inline] pub const fn from_std(std_io: T) -> FromStd { FromStd::new(std_io) } -/// Wraps a mutable reference to `std` IO type to implement the traits from this crate. +/// Wraps a mutable reference to `std` I/O type to implement the traits from this crate. /// /// All methods are passed through converting the errors. #[cfg(feature = "std")] @@ -397,4 +457,61 @@ mod tests { assert_eq!(read, 2); assert_eq!(&buf, "16".as_bytes()) } + + #[test] + #[cfg(feature = "std")] + fn set_position_past_end_read_returns_eof() { + const BUF_LEN: usize = 64; // Just a small buffer. + let mut buf = [0_u8; BUF_LEN]; // We never actually write to this buffer. + + let v = [1_u8; BUF_LEN]; + + // Sanity check the stdlib Cursor's behaviour. + let mut c = std::io::Cursor::new(v); + for pos in [BUF_LEN, BUF_LEN + 1, BUF_LEN * 2] { + c.set_position(pos as u64); + let read = c.read(&mut buf).unwrap(); + assert_eq!(read, 0); + assert_eq!(buf[0], 0x00); // Double check that buffer state is sane. + } + + let mut c = Cursor::new(v); + for pos in [BUF_LEN, BUF_LEN + 1, BUF_LEN * 2] { + c.set_position(pos as u64); + let read = c.read(&mut buf).unwrap(); + assert_eq!(read, 0); + assert_eq!(buf[0], 0x00); // Double check that buffer state is sane. + } + } + + #[test] + fn read_into_zero_length_buffer() { + use crate::Read as _; + + const BUF_LEN: usize = 64; + let data = [1_u8; BUF_LEN]; + let mut buf = [0_u8; BUF_LEN]; + + let mut slice = data.as_ref(); + let mut take = Read::take(&mut slice, 32); + + let read = take.read(&mut buf[0..0]).unwrap(); + assert_eq!(read, 0); + assert_eq!(buf[0], 0x00); // Check the buffer didn't get touched. + } + + #[test] + #[cfg(feature = "alloc")] + fn take_and_read_to_end() { + const BUF_LEN: usize = 64; + let data = [1_u8; BUF_LEN]; + + let mut slice = data.as_ref(); + let mut take = Read::take(&mut slice, 32); + + let mut v = Vec::new(); + let read = take.read_to_end(&mut v).unwrap(); + assert_eq!(read, 32); + assert_eq!(data[0..32], v[0..32]); + } } diff --git a/io/src/macros.rs b/io/src/macros.rs index 5fae2a7508..55df18aa73 100644 --- a/io/src/macros.rs +++ b/io/src/macros.rs @@ -1,14 +1,9 @@ -//! Public macros for porvide.d for users to be able implement our `io::Write` trait. +// SPDX-License-Identifier: CC0-1.0 -#[macro_export] -/// Because we cannot provide a blanket implementation of [`std::io::Write`] for all implementers -/// of this crate's `io::Write` trait, we provide this macro instead. -/// -/// This macro will implement `Write` given a `write` and `flush` fn, either by implementing the -/// crate's native `io::Write` trait directly, or a more generic trait from `std` for users using -/// that feature. In any case, this crate's `io::Write` feature will be implemented for the given -/// type, even if indirectly. +/// Implements [`crate::Write`] for `$ty`. +// See below for docs (docs.rs build enables all features). #[cfg(not(feature = "std"))] +#[macro_export] macro_rules! impl_write { ($ty: ty, $write_fn: expr, $flush_fn: expr $(, $bounded_ty: ident : $bounds: path),*) => { impl<$($bounded_ty: $bounds),*> $crate::Write for $ty { @@ -24,15 +19,18 @@ macro_rules! impl_write { } } -#[macro_export] -/// Because we cannot provide a blanket implementation of [`std::io::Write`] for all implementers -/// of this crate's `io::Write` trait, we provide this macro instead. +/// Implements [`crate::Write`] for `$ty`. /// -/// This macro will implement `Write` given a `write` and `flush` fn, either by implementing the -/// crate's native `io::Write` trait directly, or a more generic trait from `std` for users using -/// that feature. In any case, this crate's `io::Write` feature will be implemented for the given -/// type, even if indirectly. +/// Also implements [`std::io::Write`] for `$ty` if `bitcoin_io` has the `std` feature enabled. +/// +/// # Arguments +/// +/// * `$ty` - the type used to implement the two traits. +/// * `write_fn` - the function called by the `Write::write` trait method. +/// * `flush_fn` - the function called by the `Write::flush` trait method. +/// * `$bounded_ty: $bounds` - optional trait bounds if required. #[cfg(feature = "std")] +#[macro_export] macro_rules! impl_write { ($ty: ty, $write_fn: expr, $flush_fn: expr $(, $bounded_ty: ident : $bounds: path),*) => { impl<$($bounded_ty: $bounds),*> std::io::Write for $ty { diff --git a/io/tests/api.rs b/io/tests/api.rs new file mode 100644 index 0000000000..efc64f2e69 --- /dev/null +++ b/io/tests/api.rs @@ -0,0 +1,144 @@ +//! Test the API surface of `io`. +//! +//! The point of these tests are to check the API surface as opposed to test the API functionality. +//! +//! ref: + +#![allow(dead_code)] +#![allow(unused_imports)] + +use core::cell::Cell; +use core::convert::Infallible; + +// These imports test "typical" usage by user code. +use bitcoin_io::{self as io, BufRead, Cursor, ErrorKind, Read, Sink, Take, Write}; +#[cfg(feature = "std")] +use bitcoin_io::{FromStd, ToStd}; + +/// An arbitrary error kind. +const ERROR_KIND: ErrorKind = ErrorKind::TimedOut; + +/// A struct that includes all public non-error enums. +#[derive(Debug)] // All public types implement Debug (C-DEBUG). +struct Enums { + a: ErrorKind, +} + +impl Enums { + /// Creates an arbitrary `Enums` instance. + fn new() -> Self { Self { a: ERROR_KIND } } +} + +/// A struct that includes all public non-error structs except `Take`. +#[derive(Debug)] // All public types implement Debug (C-DEBUG). +struct Structs { + #[cfg(feature = "std")] + a: FromStd, + #[cfg(feature = "std")] + b: ToStd, + c: Cursor, + d: Sink, +} + +impl Structs { + fn new() -> Self { + Self { + #[cfg(feature = "std")] + a: FromStd::new(0), + #[cfg(feature = "std")] + b: ToStd::new(DUMMY), + c: Cursor::new(DUMMY), + d: Sink, + } + } +} + +#[derive(Debug)] // `Take` implements Debug (C-DEBUG). +struct Taker<'a> { + a: Take<'a, Dummy>, +} + +/// An arbitrary `Dummy` instance. +static DUMMY: Dummy = Dummy(0); + +/// Dummy struct to implement all the traits we provide. +#[derive(Debug, Copy, Clone)] +struct Dummy(u64); + +impl Read for Dummy { + fn read(&mut self, buf: &mut [u8]) -> Result { + if buf.is_empty() { + Ok(0) + } else { + buf[0] = (self.0 & 0xFF) as u8; + Ok(1) + } + } +} + +impl BufRead for Dummy { + fn fill_buf(&mut self) -> Result<&[u8], io::Error> { Ok(&[]) } + fn consume(&mut self, _: usize) {} +} + +impl Write for Dummy { + fn write(&mut self, buf: &[u8]) -> Result { Ok(buf.len()) } + fn write_all(&mut self, _: &[u8]) -> Result<(), io::Error> { Ok(()) } + fn flush(&mut self) -> Result<(), io::Error> { Ok(()) } +} + +impl AsRef<[u8]> for Dummy { + fn as_ref(&self) -> &[u8] { &[] } +} + +/// A struct that includes all public non-error types. +#[derive(Debug)] // All public types implement Debug (C-DEBUG). +struct Types { + a: Enums, + b: Structs, +} + +impl Types { + fn new() -> Self { Self { a: Enums::new(), b: Structs::new() } } +} + +/// A struct that includes all public error types. +#[derive(Debug)] // `io::Error` only implements `Debug`. +struct Errors { + a: io::Error, +} + +// `Debug` representation is never empty (C-DEBUG-NONEMPTY). +#[test] +fn api_all_non_error_types_have_non_empty_debug() { + let t = Types::new(); + + let debug = format!("{:?}", t.a.a); + assert!(!debug.is_empty()); + + #[cfg(feature = "std")] + { + let debug = format!("{:?}", t.b.a); + assert!(!debug.is_empty()); + let debug = format!("{:?}", t.b.b); + assert!(!debug.is_empty()); + } + let debug = format!("{:?}", t.b.c); + assert!(!debug.is_empty()); + let debug = format!("{:?}", t.b.d); + assert!(!debug.is_empty()); +} + +#[test] +fn all_non_error_tyes_implement_send_sync() { + fn assert_send() {} + fn assert_sync() {} + + // Types are `Send` and `Sync` where possible (C-SEND-SYNC). + assert_send::(); + assert_sync::(); + + // Error types are meaningful and well-behaved (C-GOOD-ERR) + assert_send::(); + assert_sync::(); +} diff --git a/justfile b/justfile index ce7af6170d..0be4cd940a 100644 --- a/justfile +++ b/justfile @@ -1,3 +1,5 @@ +set positional-arguments + default: @just --list @@ -36,6 +38,14 @@ sane: lint # Make an attempt to catch feature gate problems in doctests cargo test --manifest-path bitcoin/Cargo.toml --doc --no-default-features > /dev/null || exit 1 +# Check for API changes. +check-api: + contrib/check-for-api-changes.sh + +# Query the current API. +@query-api crate command: + contrib/api.sh $1 $2 + # Update the recent and minimal lock files. update-lock-files: contrib/update-lock-files.sh diff --git a/nightly-version b/nightly-version index 403284ecea..ba747383dc 100644 --- a/nightly-version +++ b/nightly-version @@ -1 +1 @@ -nightly-2024-11-17 +nightly-2025-03-21 diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml index 486f3fa524..b8b0bdd4c5 100644 --- a/primitives/Cargo.toml +++ b/primitives/Cargo.toml @@ -7,7 +7,7 @@ authors = ["Andrew Poelstra "] license = "CC0-1.0" repository = "https://github.com/rust-bitcoin/rust-bitcoin" description = "Primitive types used by the rust-bitcoin ecosystem" -categories = ["cryptography::cryptocurrencies"] +categories = ["cryptography::cryptocurrencies", "no-std"] keywords = ["bitcoin", "types"] readme = "README.md" edition = "2021" @@ -16,32 +16,152 @@ exclude = ["tests", "contrib"] [features] default = ["std"] -std = ["alloc", "hashes/std", "hex/std", "internals/std", "io/std", "units/std"] -alloc = ["hashes/alloc", "hex/alloc", "internals/alloc", "io/alloc", "units/alloc"] +std = ["alloc", "hashes/std", "hex/std", "internals/std", "units/std"] +alloc = ["hashes/alloc", "hex/alloc", "internals/alloc", "units/alloc"] serde = ["dep:serde", "hashes/serde", "internals/serde", "units/serde", "alloc"] arbitrary = ["dep:arbitrary", "units/arbitrary"] [dependencies] -hashes = { package = "bitcoin_hashes", version = "0.15.0", default-features = false, features = ["bitcoin-io", "hex"] } +hashes = { package = "bitcoin_hashes", version = "0.16.0", default-features = false, features = ["hex"] } hex = { package = "hex-conservative", version = "0.3.0", default-features = false } -internals = { package = "bitcoin-internals", version = "0.4.0" } -io = { package = "bitcoin-io", version = "0.2.0", default-features = false } +internals = { package = "bitcoin-internals", version = "0.4.0", features = ["hex"] } units = { package = "bitcoin-units", version = "0.2.0", default-features = false } -arbitrary = { version = "1", optional = true } -ordered = { version = "0.2.0", optional = true } +arbitrary = { version = "1.4", optional = true } serde = { version = "1.0.103", default-features = false, features = ["derive", "alloc"], optional = true } [dev-dependencies] serde_json = "1.0.0" bincode = "1.3.1" -[target.'cfg(mutate)'.dev-dependencies] -mutagen = { git = "https://github.com/llogiq/mutagen" } - [package.metadata.docs.rs] all-features = true rustdoc-args = ["--cfg", "docsrs"] -[lints.rust] -unexpected_cfgs = { level = "deny", check-cfg = ['cfg(mutate)'] } +[lints.clippy] +# Exclude lints we don't think are valuable. +needless_question_mark = "allow" # https://github.com/rust-bitcoin/rust-bitcoin/pull/2134 +manual_range_contains = "allow" # More readable than clippy's format. +# Exhaustive list of pedantic clippy lints +assigning_clones = "warn" +bool_to_int_with_if = "warn" +borrow_as_ptr = "warn" +case_sensitive_file_extension_comparisons = "warn" +cast_lossless = "warn" +cast_possible_truncation = "allow" # All casts should include a code comment (except test code). +cast_possible_wrap = "allow" # Same as above re code comment. +cast_precision_loss = "warn" +cast_ptr_alignment = "warn" +cast_sign_loss = "allow" # All casts should include a code comment (except in test code). +checked_conversions = "warn" +cloned_instead_of_copied = "warn" +copy_iterator = "warn" +default_trait_access = "warn" +doc_link_with_quotes = "warn" +doc_markdown = "warn" +empty_enum = "warn" +enum_glob_use = "warn" +expl_impl_clone_on_copy = "warn" +explicit_deref_methods = "warn" +explicit_into_iter_loop = "warn" +explicit_iter_loop = "warn" +filter_map_next = "warn" +flat_map_option = "warn" +float_cmp = "allow" # Bitcoin floats are typically limited to 8 decimal places and we want them exact. +fn_params_excessive_bools = "warn" +from_iter_instead_of_collect = "warn" +if_not_else = "warn" +ignored_unit_patterns = "warn" +implicit_clone = "warn" +implicit_hasher = "warn" +inconsistent_struct_constructor = "warn" +index_refutable_slice = "warn" +inefficient_to_string = "warn" +inline_always = "warn" +into_iter_without_iter = "warn" +invalid_upcast_comparisons = "warn" +items_after_statements = "warn" +iter_filter_is_ok = "warn" +iter_filter_is_some = "warn" +iter_not_returning_iterator = "warn" +iter_without_into_iter = "warn" +large_digit_groups = "warn" +large_futures = "warn" +large_stack_arrays = "warn" +large_types_passed_by_value = "warn" +linkedlist = "warn" +macro_use_imports = "warn" +manual_assert = "warn" +manual_instant_elapsed = "warn" +manual_is_power_of_two = "warn" +manual_is_variant_and = "warn" +manual_let_else = "warn" +manual_ok_or = "warn" +manual_string_new = "warn" +many_single_char_names = "warn" +map_unwrap_or = "warn" +match_bool = "allow" # Adds extra indentation and LOC. +match_on_vec_items = "warn" +match_same_arms = "allow" # Collapses things that are conceptually unrelated to each other. +match_wild_err_arm = "warn" +match_wildcard_for_single_variants = "warn" +maybe_infinite_iter = "warn" +mismatching_type_param_order = "warn" +missing_errors_doc = "allow" # TODO: Write errors section in docs. +missing_fields_in_debug = "warn" +missing_panics_doc = "warn" +must_use_candidate = "allow" # Useful for audit but many false positives. +mut_mut = "warn" +naive_bytecount = "warn" +needless_bitwise_bool = "warn" +needless_continue = "warn" +needless_for_each = "warn" +needless_pass_by_value = "warn" +needless_raw_string_hashes = "warn" +no_effect_underscore_binding = "warn" +no_mangle_with_rust_abi = "warn" +option_as_ref_cloned = "warn" +option_option = "warn" +ptr_as_ptr = "warn" +ptr_cast_constness = "warn" +pub_underscore_fields = "warn" +range_minus_one = "warn" +range_plus_one = "warn" +redundant_closure_for_method_calls = "warn" +redundant_else = "warn" +ref_as_ptr = "warn" +ref_binding_to_reference = "warn" +ref_option = "warn" +ref_option_ref = "warn" +return_self_not_must_use = "warn" +same_functions_in_if_condition = "warn" +semicolon_if_nothing_returned = "warn" +should_panic_without_expect = "warn" +similar_names = "allow" # Too many (subjectively) false positives. +single_char_pattern = "warn" +single_match_else = "warn" +stable_sort_primitive = "warn" +str_split_at_newline = "warn" +string_add_assign = "warn" +struct_excessive_bools = "warn" +struct_field_names = "allow" # TODO: Triggers warning for `witness_elements`. +too_many_lines = "warn" +transmute_ptr_to_ptr = "warn" +trivially_copy_pass_by_ref = "warn" +unchecked_duration_subtraction = "warn" +unicode_not_nfc = "warn" +uninlined_format_args = "allow" # This is a subjective style choice. +unnecessary_box_returns = "warn" +unnecessary_join = "warn" +unnecessary_literal_bound = "warn" +unnecessary_wraps = "warn" +unnested_or_patterns = "allow" # TODO +unreadable_literal = "warn" +unsafe_derive_deserialize = "warn" +unused_async = "warn" +unused_self = "warn" +used_underscore_binding = "warn" +used_underscore_items = "warn" +verbose_bit_mask = "warn" +wildcard_imports = "warn" +zero_sized_map_values = "warn" diff --git a/primitives/README.md b/primitives/README.md index 6eaae88684..f6c32ef68c 100644 --- a/primitives/README.md +++ b/primitives/README.md @@ -14,5 +14,5 @@ This library should always compile with any combination of features on **Rust 1. ## Licensing -The code in this project is licensed under the [Creative Commons CC0 1.0 Universal license](LICENSE). +The code in this project is licensed under the [Creative Commons CC0 1.0 Universal license](../LICENSE). We use the [SPDX license list](https://spdx.org/licenses/) and [SPDX IDs](https://spdx.dev/ids/). diff --git a/primitives/contrib/test_vars.sh b/primitives/contrib/test_vars.sh index ef4753dd18..159713bb9e 100644 --- a/primitives/contrib/test_vars.sh +++ b/primitives/contrib/test_vars.sh @@ -5,10 +5,10 @@ # shellcheck disable=SC2034 # Test these features with "std" enabled. -FEATURES_WITH_STD="ordered serde arbitrary" +FEATURES_WITH_STD="serde arbitrary" # Test these features without "std" enabled. -FEATURES_WITHOUT_STD="alloc ordered serde arbitrary" +FEATURES_WITHOUT_STD="alloc serde arbitrary" # Run these examples. EXAMPLES="" diff --git a/primitives/src/block.rs b/primitives/src/block.rs index 22d31c9ed6..a4d49b0ae5 100644 --- a/primitives/src/block.rs +++ b/primitives/src/block.rs @@ -14,6 +14,7 @@ use core::marker::PhantomData; #[cfg(feature = "arbitrary")] use arbitrary::{Arbitrary, Unstructured}; use hashes::{sha256d, HashEngine as _}; +use units::BlockTime; use crate::merkle_tree::TxMerkleNode; #[cfg(feature = "alloc")] @@ -70,6 +71,7 @@ where #[cfg(feature = "alloc")] impl Block { /// Constructs a new `Block` without doing any validation. + #[inline] pub fn new_unchecked(header: Header, transactions: Vec) -> Block { Block { header, transactions, witness_root: None, marker: PhantomData:: } } @@ -77,6 +79,8 @@ impl Block { /// Ignores block validation logic and just assumes you know what you are doing. /// /// You should only use this function if you trust the block i.e., it comes from a trusted node. + #[must_use] + #[inline] pub fn assume_checked(self, witness_root: Option) -> Block { Block { header: self.header, @@ -87,37 +91,44 @@ impl Block { } /// Decomposes block into its constituent parts. + #[inline] pub fn into_parts(self) -> (Header, Vec) { (self.header, self.transactions) } } #[cfg(feature = "alloc")] impl Block { /// Gets a reference to the block header. + #[inline] pub fn header(&self) -> &Header { &self.header } /// Gets a reference to the block's list of transactions. + #[inline] pub fn transactions(&self) -> &[Transaction] { &self.transactions } /// Returns the cached witness root if one is present. /// /// It is assumed that a block will have the witness root calculated and cached as part of the /// validation process. + #[inline] pub fn cached_witness_root(&self) -> Option { self.witness_root } } #[cfg(feature = "alloc")] impl Block { /// Returns the block hash. + #[inline] pub fn block_hash(&self) -> BlockHash { self.header.block_hash() } } #[cfg(feature = "alloc")] impl From for BlockHash { + #[inline] fn from(block: Block) -> BlockHash { block.block_hash() } } #[cfg(feature = "alloc")] impl From<&Block> for BlockHash { + #[inline] fn from(block: &Block) -> BlockHash { block.block_hash() } } @@ -169,7 +180,7 @@ pub struct Header { /// The root hash of the Merkle tree of transactions in the block. pub merkle_root: TxMerkleNode, /// The timestamp of the block, as claimed by the miner. - pub time: u32, + pub time: BlockTime, /// The target value below which the blockhash must lie. pub bits: CompactTarget, /// The nonce, selected to obtain a low enough blockhash. @@ -188,7 +199,7 @@ impl Header { engine.input(&self.version.to_consensus().to_le_bytes()); engine.input(self.prev_blockhash.as_byte_array()); engine.input(self.merkle_root.as_byte_array()); - engine.input(&self.time.to_le_bytes()); + engine.input(&self.time.to_u32().to_le_bytes()); engine.input(&self.bits.to_consensus().to_le_bytes()); engine.input(&self.nonce.to_le_bytes()); @@ -211,10 +222,12 @@ impl fmt::Debug for Header { } impl From
for BlockHash { + #[inline] fn from(header: Header) -> BlockHash { header.block_hash() } } impl From<&Header> for BlockHash { + #[inline] fn from(header: &Header) -> BlockHash { header.block_hash() } } @@ -262,13 +275,14 @@ impl Version { /// Returns the inner `i32` value. /// /// This is the data type used in consensus code in Bitcoin Core. + #[inline] pub fn to_consensus(self) -> i32 { self.0 } /// Checks whether the version number is signalling a soft fork at the given bit. /// /// A block is signalling for a soft fork under BIP-9 if the first 3 bits are `001` and /// the version bit for the specific soft fork is toggled on. - pub fn is_signalling_soft_fork(&self, bit: u8) -> bool { + pub fn is_signalling_soft_fork(self, bit: u8) -> bool { // Only bits [0, 28] inclusive are used for signalling. if bit > 28 { return false; @@ -285,6 +299,7 @@ impl Version { } impl Default for Version { + #[inline] fn default() -> Version { Self::NO_SOFT_FORK_SIGNALLING } } @@ -346,29 +361,175 @@ impl<'a> Arbitrary<'a> for Version { mod tests { use super::*; + fn dummy_header() -> Header { + Header { + version: Version::ONE, + prev_blockhash: BlockHash::from_byte_array([0x99; 32]), + merkle_root: TxMerkleNode::from_byte_array([0x77; 32]), + time: BlockTime::from(2), + bits: CompactTarget::from_consensus(3), + nonce: 4, + } + } + #[test] - fn test_version_is_not_signalling_with_invalid_bit() { - let arbitrary_version = Version::from_consensus(1234567890); + fn version_is_not_signalling_with_invalid_bit() { + let arbitrary_version = Version::from_consensus(1_234_567_890); // The max bit number to signal is 28. - assert!(!Version::is_signalling_soft_fork(&arbitrary_version, 29)); + assert!(!Version::is_signalling_soft_fork(arbitrary_version, 29)); } #[test] - fn test_version_is_not_signalling_when_use_version_bit_not_set() { - let version = Version::from_consensus(0b01000000000000000000000000000000); + fn version_is_not_signalling_when_use_version_bit_not_set() { + let version = Version::from_consensus(0b0100_0000_0000_0000_0000_0000_0000_0000); // Top three bits must be 001 to signal. - assert!(!Version::is_signalling_soft_fork(&version, 1)); + assert!(!Version::is_signalling_soft_fork(version, 1)); + } + + #[test] + fn version_is_signalling() { + let version = Version::from_consensus(0b0010_0000_0000_0000_0000_0000_0000_0010); + assert!(Version::is_signalling_soft_fork(version, 1)); + let version = Version::from_consensus(0b0011_0000_0000_0000_0000_0000_0000_0000); + assert!(Version::is_signalling_soft_fork(version, 28)); + } + + #[test] + fn version_is_not_signalling() { + let version = Version::from_consensus(0b0010_0000_0000_0000_0000_0000_0000_0010); + assert!(!Version::is_signalling_soft_fork(version, 0)); + } + + #[test] + fn version_to_consensus() { + let version = Version::from_consensus(1_234_567_890); + assert_eq!(version.to_consensus(), 1_234_567_890); + } + + #[test] + fn version_default() { + let version = Version::default(); + assert_eq!(version.to_consensus(), Version::NO_SOFT_FORK_SIGNALLING.to_consensus()); + } + + // Check that the size of the header consensus serialization matches the const SIZE value + #[test] + fn header_size() { + let header = dummy_header(); + + // Calculate the size of the block header in bytes from the sum of the serialized lengths + // it's fields: version, prev_blockhash, merkle_root, time, bits, nonce. + let header_size = header.version.to_consensus().to_le_bytes().len() + + header.prev_blockhash.as_byte_array().len() + + header.merkle_root.as_byte_array().len() + + header.time.to_u32().to_le_bytes().len() + + header.bits.to_consensus().to_le_bytes().len() + + header.nonce.to_le_bytes().len(); + + assert_eq!(header_size, Header::SIZE); + } + + #[test] + #[cfg(feature = "alloc")] + fn block_new_unchecked() { + let header = dummy_header(); + let transactions = vec![]; + let block = Block::new_unchecked(header, transactions.clone()); + assert_eq!(block.header, header); + assert_eq!(block.transactions, transactions); + } + + #[test] + #[cfg(feature = "alloc")] + fn block_assume_checked() { + let header = dummy_header(); + let transactions = vec![]; + let block = Block::new_unchecked(header, transactions.clone()); + let witness_root = Some(WitnessMerkleNode::from_byte_array([0x88; 32])); + let checked_block = block.assume_checked(witness_root); + assert_eq!(checked_block.header(), &header); + assert_eq!(checked_block.transactions(), &transactions); + assert_eq!(checked_block.cached_witness_root(), witness_root); + } + + #[test] + #[cfg(feature = "alloc")] + fn block_into_parts() { + let header = dummy_header(); + let transactions = vec![]; + let block = Block::new_unchecked(header, transactions.clone()); + let (block_header, block_transactions) = block.into_parts(); + assert_eq!(block_header, header); + assert_eq!(block_transactions, transactions); + } + + #[test] + #[cfg(feature = "alloc")] + fn block_cached_witness_root() { + let header = dummy_header(); + let transactions = vec![]; + let block = Block::new_unchecked(header, transactions); + let witness_root = Some(WitnessMerkleNode::from_byte_array([0x88; 32])); + let checked_block = block.assume_checked(witness_root); + assert_eq!(checked_block.cached_witness_root(), witness_root); + } + + #[test] + #[cfg(feature = "alloc")] + fn block_block_hash() { + let header = dummy_header(); + let transactions = vec![]; + let block = Block::new_unchecked(header, transactions); + assert_eq!(block.block_hash(), header.block_hash()); + } + + #[test] + fn block_hash_from_header() { + let header = dummy_header(); + let block_hash = header.block_hash(); + assert_eq!(block_hash, BlockHash::from(header)); + } + + #[test] + fn block_hash_from_header_ref() { + let header = dummy_header(); + let block_hash: BlockHash = BlockHash::from(&header); + assert_eq!(block_hash, header.block_hash()); + } + + #[test] + #[cfg(feature = "alloc")] + fn block_hash_from_block() { + let header = dummy_header(); + let transactions = vec![]; + let block = Block::new_unchecked(header, transactions); + let block_hash: BlockHash = BlockHash::from(block); + assert_eq!(block_hash, header.block_hash()); } #[test] - fn test_version_is_signalling() { - let version = Version::from_consensus(0b00100000000000000000000000000010); - assert!(Version::is_signalling_soft_fork(&version, 1)); + #[cfg(feature = "alloc")] + fn block_hash_from_block_ref() { + let header = dummy_header(); + let transactions = vec![]; + let block = Block::new_unchecked(header, transactions); + let block_hash: BlockHash = BlockHash::from(&block); + assert_eq!(block_hash, header.block_hash()); } #[test] - fn test_version_is_not_signalling() { - let version = Version::from_consensus(0b00100000000000000000000000000010); - assert!(!Version::is_signalling_soft_fork(&version, 0)); + fn header_debug() { + let header = dummy_header(); + let expected = format!( + "Header {{ block_hash: {:?}, version: {:?}, prev_blockhash: {:?}, merkle_root: {:?}, time: {:?}, bits: {:?}, nonce: {:?} }}", + header.block_hash(), + header.version, + header.prev_blockhash, + header.merkle_root, + header.time, + header.bits, + header.nonce + ); + assert_eq!(format!("{:?}", header), expected); } } diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index b8a5204de6..f9dafc05a2 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -1,6 +1,6 @@ // SPDX-License-Identifier: CC0-1.0 -//! # Rust Bitcoin - primitive types. +//! # Rust Bitcoin - primitive types //! //! Primitive data types that are used throughout the [`rust-bitcoin`] ecosystem. //! @@ -16,9 +16,6 @@ #![warn(missing_docs)] #![warn(deprecated_in_future)] #![doc(test(attr(warn(unused))))] -// Exclude lints we don't think are valuable. -#![allow(clippy::needless_question_mark)] // https://github.com/rust-bitcoin/rust-bitcoin/pull/2134 -#![allow(clippy::manual_range_contains)] // More readable than clippy's format. #[cfg(feature = "alloc")] extern crate alloc; @@ -30,27 +27,32 @@ extern crate std; #[macro_use] extern crate serde; +#[doc(hidden)] +pub mod _export { + /// A re-export of `core::*`. + pub mod _core { + pub use core::*; + } +} + pub mod block; -#[cfg(feature = "alloc")] pub mod locktime; pub mod merkle_tree; -pub mod opcodes; +mod opcodes; pub mod pow; #[cfg(feature = "alloc")] pub mod script; pub mod sequence; -pub mod taproot; pub mod transaction; #[cfg(feature = "alloc")] pub mod witness; #[doc(inline)] -pub use units::amount::{self, Amount, SignedAmount}; -#[doc(inline)] -#[cfg(feature = "alloc")] pub use units::{ + amount::{self, Amount, SignedAmount}, block::{BlockHeight, BlockInterval}, fee_rate::{self, FeeRate}, + time::{self, BlockTime}, weight::{self, Weight}, }; @@ -60,18 +62,18 @@ pub use self::{ block::{ Block, Checked as BlockChecked, Unchecked as BlockUnchecked, Validation as BlockValidation, }, - locktime::{absolute, relative}, + script::{Script, ScriptBuf}, transaction::{Transaction, TxIn, TxOut}, witness::Witness, }; #[doc(inline)] pub use self::{ - block::{BlockHash, Header as BlockHeader, WitnessCommitment}, + block::{BlockHash, Header as BlockHeader, Version as BlockVersion, WitnessCommitment}, + locktime::{absolute, relative}, merkle_tree::{TxMerkleNode, WitnessMerkleNode}, pow::CompactTarget, sequence::Sequence, - taproot::{TapBranchTag, TapLeafHash, TapLeafTag, TapNodeHash, TapTweakHash, TapTweakTag}, - transaction::{Txid, Wtxid}, + transaction::{OutPoint, Txid, Version as TransactionVersion, Wtxid}, }; #[rustfmt::skip] diff --git a/primitives/src/locktime/absolute.rs b/primitives/src/locktime/absolute.rs index e54dae56d6..ad03f5102b 100644 --- a/primitives/src/locktime/absolute.rs +++ b/primitives/src/locktime/absolute.rs @@ -1,17 +1,14 @@ // SPDX-License-Identifier: CC0-1.0 -//! Provides type [`LockTime`] that implements the logic around nLockTime/OP_CHECKLOCKTIMEVERIFY. +//! Provides type [`LockTime`] that implements the logic around `nLockTime`/`OP_CHECKLOCKTIMEVERIFY`. //! //! There are two types of lock time: lock-by-blockheight and lock-by-blocktime, distinguished by //! whether `LockTime < LOCKTIME_THRESHOLD`. -use core::cmp::Ordering; use core::fmt; #[cfg(feature = "arbitrary")] use arbitrary::{Arbitrary, Unstructured}; -#[cfg(all(test, mutate))] -use mutagen::mutate; use units::parse::{self, PrefixedHexError, UnprefixedHexError}; #[cfg(all(doc, feature = "alloc"))] @@ -25,16 +22,17 @@ pub use units::locktime::absolute::{ConversionError, Height, ParseHeightError, P /// since epoch). /// /// Used for transaction lock time (`nLockTime` in Bitcoin Core and [`Transaction::lock_time`] -/// in this library) and also for the argument to opcode 'OP_CHECKLOCKTIMEVERIFY`. +/// in this library) and also for the argument to opcode `OP_CHECKLOCKTIMEVERIFY`. /// /// ### Note on ordering /// /// Locktimes may be height- or time-based, and these metrics are incommensurate; there is no total -/// ordering on locktimes. We therefore have implemented [`PartialOrd`] but not [`Ord`]. +/// ordering on locktimes. In order to compare locktimes, instead of using `<` or `>` we provide the +/// [`LockTime::is_satisfied_by`] API. +/// /// For [`Transaction`], which has a locktime field, we implement a total ordering to make /// it easy to store transactions in sorted data structures, and use the locktime's 32-bit integer -/// consensus encoding to order it. We also implement [`ordered::ArbitraryOrd`] if the "ordered" -/// feature is enabled. +/// consensus encoding to order it. /// /// ### Relevant BIPs /// @@ -44,13 +42,13 @@ pub use units::locktime::absolute::{ConversionError, Height, ParseHeightError, P /// # Examples /// /// ``` -/// # use bitcoin_primitives::absolute::{LockTime, LockTime::*}; -/// # let n = LockTime::from_consensus(741521); // n OP_CHECKLOCKTIMEVERIFY -/// # let lock_time = LockTime::from_consensus(741521); // nLockTime +/// use bitcoin_primitives::absolute::{self, LockTime as L}; +/// # let n = absolute::LockTime::from_consensus(741521); // n OP_CHECKLOCKTIMEVERIFY +/// # let lock_time = absolute::LockTime::from_consensus(741521); // nLockTime /// // To compare absolute lock times there are various `is_satisfied_*` methods, you may also use: /// let _is_satisfied = match (n, lock_time) { -/// (Blocks(n), Blocks(lock_time)) => n <= lock_time, -/// (Seconds(n), Seconds(lock_time)) => n <= lock_time, +/// (L::Blocks(n), L::Blocks(lock_time)) => n <= lock_time, +/// (L::Seconds(n), L::Seconds(lock_time)) => n <= lock_time, /// _ => panic!("handle invalid comparison error"), /// }; /// ``` @@ -61,10 +59,10 @@ pub enum LockTime { /// # Examples /// /// ```rust - /// use bitcoin_primitives::absolute::LockTime; + /// use bitcoin_primitives::absolute; /// /// let block: u32 = 741521; - /// let n = LockTime::from_height(block).expect("valid height"); + /// let n = absolute::LockTime::from_height(block).expect("valid height"); /// assert!(n.is_block_height()); /// assert_eq!(n.to_consensus_u32(), block); /// ``` @@ -74,10 +72,10 @@ pub enum LockTime { /// # Examples /// /// ```rust - /// use bitcoin_primitives::absolute::LockTime; + /// use bitcoin_primitives::absolute; /// /// let seconds: u32 = 1653195600; // May 22nd, 5am UTC. - /// let n = LockTime::from_time(seconds).expect("valid time"); + /// let n = absolute::LockTime::from_time(seconds).expect("valid time"); /// assert!(n.is_block_time()); /// assert_eq!(n.to_consensus_u32(), seconds); /// ``` @@ -93,29 +91,54 @@ impl LockTime { pub const SIZE: usize = 4; // Serialized length of a u32. /// Constructs a new `LockTime` from a prefixed hex string. + /// + /// # Examples + /// + /// ``` + /// # use bitcoin_primitives::absolute; + /// let hex_str = "0x61cf9980"; // Unix timestamp for January 1, 2022 + /// let lock_time = absolute::LockTime::from_hex(hex_str)?; + /// assert_eq!(lock_time.to_consensus_u32(), 0x61cf9980); + /// + /// # Ok::<_, units::parse::PrefixedHexError>(()) + /// ``` + #[inline] pub fn from_hex(s: &str) -> Result { let lock_time = parse::hex_u32_prefixed(s)?; Ok(Self::from_consensus(lock_time)) } /// Constructs a new `LockTime` from an unprefixed hex string. + /// + /// # Examples + /// + /// ``` + /// # use bitcoin_primitives::absolute; + /// let hex_str = "61cf9980"; // Unix timestamp for January 1, 2022 + /// let lock_time = absolute::LockTime::from_unprefixed_hex(hex_str)?; + /// assert_eq!(lock_time.to_consensus_u32(), 0x61cf9980); + /// + /// # Ok::<_, units::parse::UnprefixedHexError>(()) + /// ``` + #[inline] pub fn from_unprefixed_hex(s: &str) -> Result { let lock_time = parse::hex_u32_unprefixed(s)?; Ok(Self::from_consensus(lock_time)) } - /// Constructs a new `LockTime` from an nLockTime value or the argument to OP_CHEKCLOCKTIMEVERIFY. + /// Constructs a new `LockTime` from an `nLockTime` value or the argument to `OP_CHEKCLOCKTIMEVERIFY`. /// /// # Examples /// /// ```rust - /// # use bitcoin_primitives::absolute::LockTime; + /// # use bitcoin_primitives::absolute; /// /// // `from_consensus` roundtrips as expected with `to_consensus_u32`. /// let n_lock_time: u32 = 741521; - /// let lock_time = LockTime::from_consensus(n_lock_time); + /// let lock_time = absolute::LockTime::from_consensus(n_lock_time); /// assert_eq!(lock_time.to_consensus_u32(), n_lock_time); #[inline] + #[allow(clippy::missing_panics_doc)] pub fn from_consensus(n: u32) -> Self { if units::locktime::absolute::is_block_height(n) { Self::Blocks(Height::from_consensus(n).expect("n is valid")) @@ -137,9 +160,9 @@ impl LockTime { /// # Examples /// /// ```rust - /// # use bitcoin_primitives::absolute::LockTime; - /// assert!(LockTime::from_height(741521).is_ok()); - /// assert!(LockTime::from_height(1653195600).is_err()); + /// # use bitcoin_primitives::absolute; + /// assert!(absolute::LockTime::from_height(741521).is_ok()); + /// assert!(absolute::LockTime::from_height(1653195600).is_err()); /// ``` #[inline] pub fn from_height(n: u32) -> Result { @@ -163,9 +186,9 @@ impl LockTime { /// # Examples /// /// ```rust - /// # use bitcoin_primitives::absolute::LockTime; - /// assert!(LockTime::from_time(1653195600).is_ok()); - /// assert!(LockTime::from_time(741521).is_err()); + /// # use bitcoin_primitives::absolute; + /// assert!(absolute::LockTime::from_time(1653195600).is_ok()); + /// assert!(absolute::LockTime::from_time(741521).is_err()); /// ``` #[inline] pub fn from_time(n: u32) -> Result { @@ -175,7 +198,7 @@ impl LockTime { /// Returns true if both lock times use the same unit i.e., both height based or both time based. #[inline] - pub const fn is_same_unit(&self, other: LockTime) -> bool { + pub const fn is_same_unit(self, other: LockTime) -> bool { matches!( (self, other), (LockTime::Blocks(_), LockTime::Blocks(_)) @@ -185,11 +208,11 @@ impl LockTime { /// Returns true if this lock time value is a block height. #[inline] - pub const fn is_block_height(&self) -> bool { matches!(*self, LockTime::Blocks(_)) } + pub const fn is_block_height(self) -> bool { matches!(self, LockTime::Blocks(_)) } /// Returns true if this lock time value is a block time (UNIX timestamp). #[inline] - pub const fn is_block_time(&self) -> bool { !self.is_block_height() } + pub const fn is_block_time(self) -> bool { !self.is_block_height() } /// Returns true if this timelock constraint is satisfied by the respective `height`/`time`. /// @@ -197,30 +220,29 @@ impl LockTime { /// blocktime based lock it is checked against `time`. /// /// A 'timelock constraint' refers to the `n` from `n OP_CHEKCLOCKTIMEVERIFY`, this constraint - /// is satisfied if a transaction with nLockTime ([`Transaction::lock_time`]) set to + /// is satisfied if a transaction with `nLockTime` ([`Transaction::lock_time`]) set to /// `height`/`time` is valid. /// /// # Examples /// /// ```no_run - /// # use bitcoin_primitives::absolute::{LockTime, Height, Time}; + /// # use bitcoin_primitives::absolute; /// // Can be implemented if block chain data is available. - /// fn get_height() -> Height { todo!("return the current block height") } - /// fn get_time() -> Time { todo!("return the current block time") } + /// fn get_height() -> absolute::Height { todo!("return the current block height") } + /// fn get_time() -> absolute::Time { todo!("return the current block time") } /// - /// let n = LockTime::from_consensus(741521); // `n OP_CHEKCLOCKTIMEVERIFY`. + /// let n = absolute::LockTime::from_consensus(741521); // `n OP_CHEKCLOCKTIMEVERIFY`. /// if n.is_satisfied_by(get_height(), get_time()) { /// // Can create and mine a transaction that satisfies the OP_CLTV timelock constraint. /// } /// ```` #[inline] - #[cfg_attr(all(test, mutate), mutate)] - pub fn is_satisfied_by(&self, height: Height, time: Time) -> bool { - use LockTime::*; + pub fn is_satisfied_by(self, height: Height, time: Time) -> bool { + use LockTime as L; - match *self { - Blocks(n) => n <= height, - Seconds(n) => n <= time, + match self { + L::Blocks(n) => n <= height, + L::Seconds(n) => n <= time, } } @@ -238,25 +260,24 @@ impl LockTime { /// # Examples /// /// ```rust - /// # use bitcoin_primitives::absolute::LockTime; - /// let lock_time = LockTime::from_consensus(741521); - /// let check = LockTime::from_consensus(741521 + 1); + /// # use bitcoin_primitives::absolute; + /// let lock_time = absolute::LockTime::from_consensus(741521); + /// let check = absolute::LockTime::from_consensus(741521 + 1); /// assert!(lock_time.is_implied_by(check)); /// ``` #[inline] - #[cfg_attr(all(test, mutate), mutate)] - pub fn is_implied_by(&self, other: LockTime) -> bool { - use LockTime::*; + pub fn is_implied_by(self, other: LockTime) -> bool { + use LockTime as L; - match (*self, other) { - (Blocks(this), Blocks(other)) => this <= other, - (Seconds(this), Seconds(other)) => this <= other, + match (self, other) { + (L::Blocks(this), L::Blocks(other)) => this <= other, + (L::Seconds(this), L::Seconds(other)) => this <= other, _ => false, // Not the same units. } } /// Returns the inner `u32` value. This is the value used when creating this `LockTime` - /// i.e., `n OP_CHECKLOCKTIMEVERIFY` or nLockTime. + /// i.e., `n OP_CHECKLOCKTIMEVERIFY` or `nLockTime`. /// /// # Warning /// @@ -267,13 +288,13 @@ impl LockTime { /// # Examples /// /// ```rust - /// # use bitcoin_primitives::absolute::{LockTime, LockTime::*}; - /// # let n = LockTime::from_consensus(741521); // n OP_CHECKLOCKTIMEVERIFY - /// # let lock_time = LockTime::from_consensus(741521 + 1); // nLockTime + /// use bitcoin_primitives::absolute::{self, LockTime as L}; + /// # let n = absolute::LockTime::from_consensus(741521); // n OP_CHECKLOCKTIMEVERIFY + /// # let lock_time = absolute::LockTime::from_consensus(741521 + 1); // nLockTime /// /// let _is_satisfied = match (n, lock_time) { - /// (Blocks(n), Blocks(lock_time)) => n <= lock_time, - /// (Seconds(n), Seconds(lock_time)) => n <= lock_time, + /// (L::Blocks(n), L::Blocks(lock_time)) => n <= lock_time, + /// (L::Seconds(n), L::Seconds(lock_time)) => n <= lock_time, /// _ => panic!("invalid comparison"), /// }; /// @@ -301,43 +322,31 @@ impl From