diff --git a/.github/env b/.github/env
index bb61e1f4cd99..730c37f1db80 100644
--- a/.github/env
+++ b/.github/env
@@ -1 +1 @@
-IMAGE="docker.io/paritytech/ci-unified:bullseye-1.81.0-2024-09-11-v202409111034"
+IMAGE="docker.io/paritytech/ci-unified:bullseye-1.81.0-2024-11-19-v202411281558"
diff --git a/.github/scripts/common/lib.sh b/.github/scripts/common/lib.sh
index 6b8f70a26d7e..00f8c089831e 100755
--- a/.github/scripts/common/lib.sh
+++ b/.github/scripts/common/lib.sh
@@ -270,20 +270,19 @@ fetch_debian_package_from_s3() {
}
# Fetch the release artifacts like binary and signatures from S3. Assumes the ENV are set:
-# - RELEASE_ID
-# - GITHUB_TOKEN
-# - REPO in the form paritytech/polkadot
+# inputs: binary (polkadot), target(aarch64-apple-darwin)
fetch_release_artifacts_from_s3() {
BINARY=$1
- OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${BINARY}"}
+ TARGET=$2
+ OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${TARGET}/${BINARY}"}
echo "OUTPUT_DIR : $OUTPUT_DIR"
URL_BASE=$(get_s3_url_base $BINARY)
echo "URL_BASE=$URL_BASE"
- URL_BINARY=$URL_BASE/$VERSION/$BINARY
- URL_SHA=$URL_BASE/$VERSION/$BINARY.sha256
- URL_ASC=$URL_BASE/$VERSION/$BINARY.asc
+ URL_BINARY=$URL_BASE/$VERSION/$TARGET/$BINARY
+ URL_SHA=$URL_BASE/$VERSION/$TARGET/$BINARY.sha256
+ URL_ASC=$URL_BASE/$VERSION/$TARGET/$BINARY.asc
# Fetch artifacts
mkdir -p "$OUTPUT_DIR"
@@ -298,7 +297,7 @@ fetch_release_artifacts_from_s3() {
pwd
ls -al --color
popd > /dev/null
-
+ unset OUTPUT_DIR
}
# Pass the name of the binary as input, it will
@@ -306,15 +305,26 @@ fetch_release_artifacts_from_s3() {
function get_s3_url_base() {
name=$1
case $name in
- polkadot | polkadot-execute-worker | polkadot-prepare-worker | staking-miner)
+ polkadot | polkadot-execute-worker | polkadot-prepare-worker )
printf "https://releases.parity.io/polkadot"
;;
- polkadot-parachain)
- printf "https://releases.parity.io/cumulus"
+ polkadot-parachain)
+ printf "https://releases.parity.io/polkadot-parachain"
+ ;;
+
+ polkadot-omni-node)
+ printf "https://releases.parity.io/polkadot-omni-node"
;;
- *)
+ chain-spec-builder)
+ printf "https://releases.parity.io/chain-spec-builder"
+ ;;
+
+ frame-omni-bencher)
+ printf "https://releases.parity.io/frame-omni-bencher"
+ ;;
+ *)
printf "UNSUPPORTED BINARY $name"
exit 1
;;
@@ -497,3 +507,16 @@ validate_stable_tag() {
exit 1
fi
}
+
+# Prepare docker stable tag form the polkadot stable tag
+# input: tag (polkaodot-stableYYMM(-X) or polkadot-stableYYMM(-X)-rcX)
+# output: stableYYMM(-X) or stableYYMM(-X)-rcX
+prepare_docker_stable_tag() {
+ tag="$1"
+ if [[ "$tag" =~ stable[0-9]{4}(-[0-9]+)?(-rc[0-9]+)? ]]; then
+ echo "${BASH_REMATCH[0]}"
+ else
+ echo "Tag is invalid: $tag"
+ exit 1
+ fi
+}
diff --git a/.github/scripts/release/release_lib.sh b/.github/scripts/release/release_lib.sh
index 8b9254ec3f29..984709f2ea03 100644
--- a/.github/scripts/release/release_lib.sh
+++ b/.github/scripts/release/release_lib.sh
@@ -129,13 +129,69 @@ upload_s3_release() {
echo "Working on version: $version "
echo "Working on platform: $target "
+ URL_BASE=$(get_s3_url_base $product)
+
echo "Current content, should be empty on new uploads:"
- aws s3 ls "s3://releases.parity.io/${product}/${version}/${target}" --recursive --human-readable --summarize || true
+ aws s3 ls "s3://${URL_BASE}/${version}/${target}" --recursive --human-readable --summarize || true
echo "Content to be uploaded:"
- artifacts="artifacts/$product/"
+ artifacts="release-artifacts/$target/$product/"
ls "$artifacts"
- aws s3 sync --acl public-read "$artifacts" "s3://releases.parity.io/${product}/${version}/${target}"
+ aws s3 sync --acl public-read "$artifacts" "s3://${URL_BASE}/${version}/${target}"
echo "Uploaded files:"
- aws s3 ls "s3://releases.parity.io/${product}/${version}/${target}" --recursive --human-readable --summarize
- echo "✅ The release should be at https://releases.parity.io/${product}/${version}/${target}"
+ aws s3 ls "s3://${URL_BASE}/${version}/${target}" --recursive --human-readable --summarize
+ echo "✅ The release should be at https://${URL_BASE}/${version}/${target}"
+}
+
+# Upload runtimes artifacts to s3 release bucket
+#
+# input: version (stable release tage.g. polkadot-stable2412 or polkadot-stable2412-rc1)
+# output: none
+upload_s3_runtimes_release_artifacts() {
+ alias aws='podman run --rm -it docker.io/paritytech/awscli -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_BUCKET aws'
+
+ version=$1
+
+ echo "Working on version: $version "
+
+ echo "Current content, should be empty on new uploads:"
+ aws s3 ls "s3://releases.parity.io/polkadot/runtimes/${version}/" --recursive --human-readable --summarize || true
+ echo "Content to be uploaded:"
+ artifacts="artifacts/runtimes/"
+ ls "$artifacts"
+ aws s3 sync --acl public-read "$artifacts" "s3://releases.parity.io/polkadot/runtimes/${version}/"
+ echo "Uploaded files:"
+ aws s3 ls "s3://releases.parity.io/polkadot/runtimes/${version}/" --recursive --human-readable --summarize
+ echo "✅ The release should be at https://releases.parity.io/polkadot/runtimes/${version}"
+}
+
+
+# Pass the name of the binary as input, it will
+# return the s3 base url
+function get_s3_url_base() {
+ name=$1
+ case $name in
+ polkadot | polkadot-execute-worker | polkadot-prepare-worker )
+ printf "releases.parity.io/polkadot"
+ ;;
+
+ polkadot-parachain)
+ printf "releases.parity.io/polkadot-parachain"
+ ;;
+
+ polkadot-omni-node)
+ printf "releases.parity.io/polkadot-omni-node"
+ ;;
+
+ chain-spec-builder)
+ printf "releases.parity.io/chain-spec-builder"
+ ;;
+
+ frame-omni-bencher)
+ printf "releases.parity.io/frame-omni-bencher"
+ ;;
+ *)
+ printf "UNSUPPORTED BINARY $name"
+ exit 1
+ ;;
+ esac
}
diff --git a/.github/workflows/check-links.yml b/.github/workflows/check-links.yml
index dd9d3eaf824f..cea6b9a8636a 100644
--- a/.github/workflows/check-links.yml
+++ b/.github/workflows/check-links.yml
@@ -33,7 +33,7 @@ jobs:
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.1.0 (22. Sep 2023)
- name: Lychee link checker
- uses: lycheeverse/lychee-action@7cd0af4c74a61395d455af97419279d86aafaede # for v1.9.1 (10. Jan 2024)
+ uses: lycheeverse/lychee-action@f81112d0d2814ded911bd23e3beaa9dda9093915 # for v1.9.1 (10. Jan 2024)
with:
args: >-
--config .config/lychee.toml
diff --git a/.github/workflows/check-semver.yml b/.github/workflows/check-semver.yml
index 8d77b6a31b75..e9bedd16e6d1 100644
--- a/.github/workflows/check-semver.yml
+++ b/.github/workflows/check-semver.yml
@@ -11,7 +11,7 @@ concurrency:
cancel-in-progress: true
env:
- TOOLCHAIN: nightly-2024-10-19
+ TOOLCHAIN: nightly-2024-11-19
jobs:
preflight:
@@ -74,7 +74,7 @@ jobs:
- name: install parity-publish
# Set the target dir to cache the build.
- run: CARGO_TARGET_DIR=./target/ cargo install parity-publish@0.10.1 --locked -q
+ run: CARGO_TARGET_DIR=./target/ cargo install parity-publish@0.10.2 --locked -q
- name: check semver
run: |
diff --git a/.github/workflows/checks-quick.yml b/.github/workflows/checks-quick.yml
index c733a2517cb8..4c26b85a6303 100644
--- a/.github/workflows/checks-quick.yml
+++ b/.github/workflows/checks-quick.yml
@@ -97,7 +97,6 @@ jobs:
--exclude
"substrate/frame/contracts/fixtures/build"
"substrate/frame/contracts/fixtures/contracts/common"
- "substrate/frame/revive/fixtures/build"
"substrate/frame/revive/fixtures/contracts/common"
- name: deny git deps
run: python3 .github/scripts/deny-git-deps.py .
diff --git a/.github/workflows/command-backport.yml b/.github/workflows/command-backport.yml
index eecf0ac72d2c..53dcea2f1d6d 100644
--- a/.github/workflows/command-backport.yml
+++ b/.github/workflows/command-backport.yml
@@ -29,12 +29,13 @@ jobs:
steps:
- uses: actions/checkout@v4
- - name: Generate token
- id: generate_token
- uses: tibdex/github-app-token@v2.1.0
+ - name: Generate content write token for the release automation
+ id: generate_write_token
+ uses: actions/create-github-app-token@v1
with:
- app_id: ${{ secrets.CMD_BOT_APP_ID }}
- private_key: ${{ secrets.CMD_BOT_APP_KEY }}
+ app-id: ${{ vars.RELEASE_AUTOMATION_APP_ID }}
+ private-key: ${{ secrets.RELEASE_AUTOMATION_APP_PRIVATE_KEY }}
+ owner: paritytech
- name: Create backport pull requests
uses: korthout/backport-action@v3
@@ -42,7 +43,7 @@ jobs:
with:
target_branches: stable2407 stable2409 stable2412
merge_commits: skip
- github_token: ${{ steps.generate_token.outputs.token }}
+ github_token: ${{ steps.generate_write_token.outputs.token }}
pull_description: |
Backport #${pull_number} into `${target_branch}` from ${pull_author}.
@@ -86,7 +87,7 @@ jobs:
const reviewer = '${{ github.event.pull_request.user.login }}';
for (const pullNumber of pullNumbers) {
- await github.pulls.createReviewRequest({
+ await github.pulls.requestReviewers({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: parseInt(pullNumber),
diff --git a/.github/workflows/publish-check-compile.yml b/.github/workflows/publish-check-compile.yml
new file mode 100644
index 000000000000..83cd3ff8fa90
--- /dev/null
+++ b/.github/workflows/publish-check-compile.yml
@@ -0,0 +1,48 @@
+name: Check publish build
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ types: [opened, synchronize, reopened, ready_for_review]
+ merge_group:
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ preflight:
+ uses: ./.github/workflows/reusable-preflight.yml
+
+ check-publish:
+ timeout-minutes: 90
+ needs: [preflight]
+ runs-on: ${{ needs.preflight.outputs.RUNNER }}
+ container:
+ image: ${{ needs.preflight.outputs.IMAGE }}
+ steps:
+ - uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.1.7
+
+ - name: Rust Cache
+ uses: Swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2.7.5
+ with:
+ cache-on-failure: true
+
+ - name: install parity-publish
+ run: cargo install parity-publish@0.10.2 --locked -q
+
+ - name: parity-publish update plan
+ run: parity-publish --color always plan --skip-check --prdoc prdoc/
+
+ - name: parity-publish apply plan
+ run: parity-publish --color always apply --registry
+
+ - name: parity-publish check compile
+ run: |
+ packages="$(parity-publish apply --print)"
+
+ if [ -n "$packages" ]; then
+ cargo --color always check $(printf -- '-p %s ' $packages)
+ fi
diff --git a/.github/workflows/publish-check-crates.yml b/.github/workflows/publish-check-crates.yml
index 3fad3b641474..1e5a8054e2c7 100644
--- a/.github/workflows/publish-check-crates.yml
+++ b/.github/workflows/publish-check-crates.yml
@@ -24,7 +24,7 @@ jobs:
cache-on-failure: true
- name: install parity-publish
- run: cargo install parity-publish@0.8.0 --locked -q
+ run: cargo install parity-publish@0.10.2 --locked -q
- name: parity-publish check
run: parity-publish --color always check --allow-unpublished
diff --git a/.github/workflows/publish-claim-crates.yml b/.github/workflows/publish-claim-crates.yml
index 37bf06bb82d8..845b57a61b96 100644
--- a/.github/workflows/publish-claim-crates.yml
+++ b/.github/workflows/publish-claim-crates.yml
@@ -18,7 +18,7 @@ jobs:
cache-on-failure: true
- name: install parity-publish
- run: cargo install parity-publish@0.8.0 --locked -q
+ run: cargo install parity-publish@0.10.2 --locked -q
- name: parity-publish claim
env:
diff --git a/.github/workflows/release-branchoff-stable.yml b/.github/workflows/release-10_branchoff-stable.yml
similarity index 100%
rename from .github/workflows/release-branchoff-stable.yml
rename to .github/workflows/release-10_branchoff-stable.yml
diff --git a/.github/workflows/release-10_rc-automation.yml b/.github/workflows/release-11_rc-automation.yml
similarity index 100%
rename from .github/workflows/release-10_rc-automation.yml
rename to .github/workflows/release-11_rc-automation.yml
diff --git a/.github/workflows/release-build-rc.yml b/.github/workflows/release-20_build-rc.yml
similarity index 62%
rename from .github/workflows/release-build-rc.yml
rename to .github/workflows/release-20_build-rc.yml
index a43c2b282a8d..d4c7055c37c5 100644
--- a/.github/workflows/release-build-rc.yml
+++ b/.github/workflows/release-20_build-rc.yml
@@ -11,10 +11,12 @@ on:
- polkadot
- polkadot-parachain
- polkadot-omni-node
+ - frame-omni-bencher
+ - chain-spec-builder
- all
release_tag:
- description: Tag matching the actual release candidate with the format stableYYMM-rcX or stableYYMM
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX or polkadot-stableYYMM(-X)
type: string
jobs:
@@ -106,6 +108,50 @@ jobs:
attestations: write
contents: read
+ build-frame-omni-bencher-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'frame-omni-bencher' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["frame-omni-bencher"]'
+ package: "frame-omni-bencher"
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: x86_64-unknown-linux-gnu
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-chain-spec-builder-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["chain-spec-builder"]'
+ package: staging-chain-spec-builder
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: x86_64-unknown-linux-gnu
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
build-polkadot-macos-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'polkadot' || inputs.binary == 'all' }}
@@ -134,7 +180,7 @@ jobs:
uses: "./.github/workflows/release-reusable-rc-buid.yml"
with:
binary: '["polkadot-parachain"]'
- package: "polkadot-parachain-bin"
+ package: polkadot-parachain-bin
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets:
@@ -156,7 +202,51 @@ jobs:
uses: "./.github/workflows/release-reusable-rc-buid.yml"
with:
binary: '["polkadot-omni-node"]'
- package: "polkadot-omni-node"
+ package: polkadot-omni-node
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: aarch64-apple-darwin
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-frame-omni-bencher-macos-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'frame-omni-bencher' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["frame-omni-bencher"]'
+ package: frame-omni-bencher
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: aarch64-apple-darwin
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-chain-spec-builder-macos-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["chain-spec-builder"]'
+ package: staging-chain-spec-builder
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets:
diff --git a/.github/workflows/release-30_publish_release_draft.yml b/.github/workflows/release-30_publish_release_draft.yml
index 4364b4f80457..78ceea91f100 100644
--- a/.github/workflows/release-30_publish_release_draft.yml
+++ b/.github/workflows/release-30_publish_release_draft.yml
@@ -1,19 +1,46 @@
name: Release - Publish draft
-on:
- push:
- tags:
- # Catches v1.2.3 and v1.2.3-rc1
- - v[0-9]+.[0-9]+.[0-9]+*
- # - polkadot-stable[0-9]+* Activate when the release process from release org is setteled
+# This workflow runs in paritytech-release and creates full release draft with:
+# - release notes
+# - info about the runtimes
+# - attached artifacts:
+# - runtimes
+# - binaries
+# - signatures
+on:
workflow_dispatch:
inputs:
- version:
- description: Current release/rc version
+ release_tag:
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX or polkadot-stableYYMM(-X)
+ required: true
+ type: string
jobs:
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
+ validate-inputs:
+ needs: [ check-synchronization ]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Validate inputs
+ id: validate_inputs
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
+ echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
+
get-rust-versions:
+ needs: [ validate-inputs ]
runs-on: ubuntu-latest
outputs:
rustc-stable: ${{ steps.get-rust-versions.outputs.stable }}
@@ -24,47 +51,28 @@ jobs:
echo "stable=$RUST_STABLE_VERSION" >> $GITHUB_OUTPUT
build-runtimes:
+ needs: [ validate-inputs ]
uses: "./.github/workflows/release-srtool.yml"
with:
excluded_runtimes: "asset-hub-rococo bridge-hub-rococo contracts-rococo coretime-rococo people-rococo rococo rococo-parachain substrate-test bp cumulus-test kitchensink minimal-template parachain-template penpal polkadot-test seedling shell frame-try sp solochain-template polkadot-sdk-docs-first"
build_opts: "--features on-chain-release-build"
-
- build-binaries:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- # Tuples of [package, binary-name]
- binary: [ [frame-omni-bencher, frame-omni-bencher], [staging-chain-spec-builder, chain-spec-builder] ]
- steps:
- - name: Checkout sources
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
-
- - name: Install protobuf-compiler
- run: |
- sudo apt update
- sudo apt install -y protobuf-compiler
-
- - name: Build ${{ matrix.binary[1] }} binary
- run: |
- cargo build --locked --profile=production -p ${{ matrix.binary[0] }} --bin ${{ matrix.binary[1] }}
- target/production/${{ matrix.binary[1] }} --version
-
- - name: Upload ${{ matrix.binary[1] }} binary
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
- with:
- name: ${{ matrix.binary[1] }}
- path: target/production/${{ matrix.binary[1] }}
-
+ profile: production
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
publish-release-draft:
runs-on: ubuntu-latest
- needs: [ get-rust-versions, build-runtimes ]
+ environment: release
+ needs: [ validate-inputs, get-rust-versions, build-runtimes ]
outputs:
release_url: ${{ steps.create-release.outputs.html_url }}
asset_upload_url: ${{ steps.create-release.outputs.upload_url }}
+
steps:
- name: Checkout
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Download artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
@@ -87,20 +95,21 @@ jobs:
GLUTTON_WESTEND_DIGEST: ${{ github.workspace}}/glutton-westend-runtime/glutton-westend-srtool-digest.json
PEOPLE_WESTEND_DIGEST: ${{ github.workspace}}/people-westend-runtime/people-westend-srtool-digest.json
WESTEND_DIGEST: ${{ github.workspace}}/westend-runtime/westend-srtool-digest.json
+ RELEASE_TAG: ${{ needs.validate-inputs.outputs.release_tag }}
shell: bash
run: |
. ./.github/scripts/common/lib.sh
export REF1=$(get_latest_release_tag)
- if [[ -z "${{ inputs.version }}" ]]; then
+ if [[ -z "$RELEASE_TAG" ]]; then
export REF2="${{ github.ref_name }}"
echo "REF2: ${REF2}"
else
- export REF2="${{ inputs.version }}"
+ export REF2="$RELEASE_TAG"
echo "REF2: ${REF2}"
fi
echo "REL_TAG=$REF2" >> $GITHUB_ENV
- export VERSION=$(echo "$REF2" | sed -E 's/.*(stable[0-9]+).*$/\1/')
+ export VERSION=$(echo "$REF2" | sed -E 's/.*(stable[0-9]{4}(-[0-9]+)?).*$/\1/')
./scripts/release/build-changelogs.sh
@@ -112,19 +121,29 @@ jobs:
scripts/release/context.json
**/*-srtool-digest.json
+ - name: Generate content write token for the release automation
+ id: generate_write_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ vars.POLKADOT_SDK_RELEASE_RW_APP_ID }}
+ private-key: ${{ secrets.POLKADOT_SDK_RELEASE_RW_APP_KEY }}
+ owner: paritytech
+ repositories: polkadot-sdk
+
- name: Create draft release
id: create-release
- uses: actions/create-release@0cb9c9b65d5d1901c1f53e5e66eaf4afd303e70e # v1.1.4
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- tag_name: ${{ env.REL_TAG }}
- release_name: Polkadot ${{ env.REL_TAG }}
- body_path: ${{ github.workspace}}/scripts/release/RELEASE_DRAFT.md
- draft: true
+ GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
+ run: |
+ gh release create ${{ env.REL_TAG }} \
+ --repo paritytech/polkadot-sdk \
+ --draft \
+ --title "Polkadot ${{ env.REL_TAG }}" \
+ --notes-file ${{ github.workspace}}/scripts/release/RELEASE_DRAFT.md
publish-runtimes:
- needs: [ build-runtimes, publish-release-draft ]
+ needs: [ validate-inputs, build-runtimes, publish-release-draft ]
+ environment: release
continue-on-error: true
runs-on: ubuntu-latest
strategy:
@@ -132,7 +151,7 @@ jobs:
steps:
- name: Checkout sources
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Download artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
@@ -144,44 +163,83 @@ jobs:
>>$GITHUB_ENV echo ASSET=$(find ${{ matrix.chain }}-runtime -name '*.compact.compressed.wasm')
>>$GITHUB_ENV echo SPEC=$(<${JSON} jq -r .runtimes.compact.subwasm.core_version.specVersion)
+ - name: Generate content write token for the release automation
+ id: generate_write_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ vars.POLKADOT_SDK_RELEASE_RW_APP_ID }}
+ private-key: ${{ secrets.POLKADOT_SDK_RELEASE_RW_APP_KEY }}
+ owner: paritytech
+ repositories: polkadot-sdk
+
- name: Upload compressed ${{ matrix.chain }} v${{ env.SPEC }} wasm
- if: ${{ matrix.chain != 'rococo-parachain' }}
- uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5 #v1.0.2
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- upload_url: ${{ needs.publish-release-draft.outputs.asset_upload_url }}
- asset_path: ${{ env.ASSET }}
- asset_name: ${{ matrix.chain }}_runtime-v${{ env.SPEC }}.compact.compressed.wasm
- asset_content_type: application/wasm
+ GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
+ run: |
+ gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
+ --repo paritytech/polkadot-sdk \
+ '${{ env.ASSET }}#${{ matrix.chain }}_runtime-v${{ env.SPEC }}.compact.compressed.wasm'
- publish-binaries:
- needs: [ publish-release-draft, build-binaries ]
+ publish-release-artifacts:
+ needs: [ validate-inputs, publish-release-draft ]
+ environment: release
continue-on-error: true
runs-on: ubuntu-latest
strategy:
matrix:
- binary: [frame-omni-bencher, chain-spec-builder]
+ binary: [ polkadot, polkadot-execute-worker, polkadot-prepare-worker, polkadot-parachain, polkadot-omni-node, frame-omni-bencher, chain-spec-builder ]
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
steps:
- - name: Download artifacts
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Fetch binaries from s3 based on version
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ VERSION="${{ needs.validate-inputs.outputs.release_tag }}"
+ fetch_release_artifacts_from_s3 ${{ matrix.binary }} ${{ matrix.target }}
+
+ - name: Rename aarch64-apple-darwin binaries
+ if: ${{ matrix.target == 'aarch64-apple-darwin' }}
+ working-directory: ${{ github.workspace}}/release-artifacts/${{ matrix.target }}/${{ matrix.binary }}
+ run: |
+ mv ${{ matrix.binary }} ${{ matrix.binary }}-aarch64-apple-darwin
+ mv ${{ matrix.binary }}.asc ${{ matrix.binary }}-aarch64-apple-darwin.asc
+ mv ${{ matrix.binary }}.sha256 ${{ matrix.binary }}-aarch64-apple-darwin.sha256
+
+ - name: Generate content write token for the release automation
+ id: generate_write_token
+ uses: actions/create-github-app-token@v1
with:
- name: ${{ matrix.binary }}
+ app-id: ${{ vars.POLKADOT_SDK_RELEASE_RW_APP_ID }}
+ private-key: ${{ secrets.POLKADOT_SDK_RELEASE_RW_APP_KEY }}
+ owner: paritytech
+ repositories: polkadot-sdk
- - name: Upload ${{ matrix.binary }} binary
- uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5 #v1.0.2
+ - name: Upload ${{ matrix.binary }} binary to release draft
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- upload_url: ${{ needs.publish-release-draft.outputs.asset_upload_url }}
- asset_path: ${{ github.workspace}}/${{ matrix.binary }}
- asset_name: ${{ matrix.binary }}
- asset_content_type: application/octet-stream
+ GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
+ working-directory: ${{ github.workspace}}/release-artifacts/${{ matrix.target }}/${{ matrix.binary }}
+ run: |
+ if [[ ${{ matrix.target }} == "aarch64-apple-darwin" ]]; then
+ gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
+ --repo paritytech/polkadot-sdk \
+ ${{ matrix.binary }}-aarch64-apple-darwin \
+ ${{ matrix.binary }}-aarch64-apple-darwin.asc \
+ ${{ matrix.binary }}-aarch64-apple-darwin.sha256
+ else
+ gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
+ --repo paritytech/polkadot-sdk \
+ ${{ matrix.binary }} \
+ ${{ matrix.binary }}.asc \
+ ${{ matrix.binary }}.sha256
+ fi
post_to_matrix:
runs-on: ubuntu-latest
- needs: publish-release-draft
+ needs: [ validate-inputs, publish-release-draft ]
environment: release
strategy:
matrix:
@@ -197,5 +255,5 @@ jobs:
access_token: ${{ secrets.RELEASENOTES_MATRIX_V2_ACCESS_TOKEN }}
server: m.parity.io
message: |
- **New version of polkadot tagged**: ${{ github.ref_name }}
- Draft release created: ${{ needs.publish-release-draft.outputs.release_url }}
+ **New version of polkadot tagged**: ${{ needs.validate-inputs.outputs.release_tag }}
+ And release draft is release created in [polkadot-sdk repo](https://github.com/paritytech/polkadot-sdk/releases)
diff --git a/.github/workflows/release-31_promote-rc-to-final.yml b/.github/workflows/release-31_promote-rc-to-final.yml
new file mode 100644
index 000000000000..6aa9d4bddd1d
--- /dev/null
+++ b/.github/workflows/release-31_promote-rc-to-final.yml
@@ -0,0 +1,125 @@
+name: Release - Promote RC to final candidate on S3
+
+on:
+ workflow_dispatch:
+ inputs:
+ binary:
+ description: Binary to be build for the release
+ default: all
+ type: choice
+ options:
+ - polkadot
+ - polkadot-parachain
+ - polkadot-omni-node
+ - frame-omni-bencher
+ - chain-spec-builder
+ - all
+ release_tag:
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX
+ type: string
+
+
+jobs:
+
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
+ validate-inputs:
+ needs: [ check-synchronization ]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
+ final_tag: ${{ steps.validate_inputs.outputs.final_tag }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Validate inputs
+ id: validate_inputs
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
+ echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
+
+ promote-polkadot-rc-to-final:
+ if: ${{ inputs.binary == 'polkadot' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: polkadot
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-polkadot-parachain-rc-to-final:
+ if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: polkadot-parachain
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-polkadot-omni-node-rc-to-final:
+ if: ${{ inputs.binary == 'polkadot-omni-node' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: polkadot-omni-node
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-frame-omni-bencher-rc-to-final:
+ if: ${{ inputs.binary == 'frame-omni-bencher' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: frame-omni-bencher
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-chain-spec-builder-rc-to-final:
+ if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: chain-spec-builder
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/release-50_publish-docker.yml b/.github/workflows/release-50_publish-docker.yml
index 627e53bacd88..5c3c3a6e854d 100644
--- a/.github/workflows/release-50_publish-docker.yml
+++ b/.github/workflows/release-50_publish-docker.yml
@@ -4,10 +4,6 @@ name: Release - Publish Docker Image
# It builds and published releases and rc candidates.
on:
- #TODO: activate automated run later
- # release:
- # types:
- # - published
workflow_dispatch:
inputs:
image_type:
@@ -30,16 +26,6 @@ on:
- polkadot-parachain
- chain-spec-builder
- release_id:
- description: |
- Release ID.
- You can find it using the command:
- curl -s \
- -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/$OWNER/$REPO/releases | \
- jq '.[] | { name: .name, id: .id }'
- required: true
- type: number
-
registry:
description: Container registry
required: true
@@ -55,7 +41,7 @@ on:
default: parity
version:
- description: version to build/release
+ description: Version of the polkadot node release in format v1.16.0 or v1.16.0-rc1
default: v0.9.18
required: true
@@ -78,11 +64,15 @@ env:
IMAGE_TYPE: ${{ inputs.image_type }}
jobs:
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
validate-inputs:
+ needs: [check-synchronization]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
runs-on: ubuntu-latest
outputs:
version: ${{ steps.validate_inputs.outputs.VERSION }}
- release_id: ${{ steps.validate_inputs.outputs.RELEASE_ID }}
stable_tag: ${{ steps.validate_inputs.outputs.stable_tag }}
steps:
@@ -97,11 +87,6 @@ jobs:
VERSION=$(filter_version_from_input "${{ inputs.version }}")
echo "VERSION=${VERSION}" >> $GITHUB_OUTPUT
- RELEASE_ID=$(check_release_id "${{ inputs.release_id }}")
- echo "RELEASE_ID=${RELEASE_ID}" >> $GITHUB_OUTPUT
-
- echo "Release ID: $RELEASE_ID"
-
STABLE_TAG=$(validate_stable_tag ${{ inputs.stable_tag }})
echo "stable_tag=${STABLE_TAG}" >> $GITHUB_OUTPUT
@@ -114,50 +99,26 @@ jobs:
- name: Checkout sources
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- #TODO: this step will be needed when automated triggering will work
- #this step runs only if the workflow is triggered automatically when new release is published
- # if: ${{ env.EVENT_NAME == 'release' && env.EVENT_ACTION != '' && env.EVENT_ACTION == 'published' }}
- # run: |
- # mkdir -p release-artifacts && cd release-artifacts
-
- # for f in $BINARY $BINARY.asc $BINARY.sha256; do
- # URL="https://github.com/${{ github.event.repository.full_name }}/releases/download/${{ github.event.release.tag_name }}/$f"
- # echo " - Fetching $f from $URL"
- # wget "$URL" -O "$f"
- # done
- # chmod a+x $BINARY
- # ls -al
-
- name: Fetch rc artifacts or release artifacts from s3 based on version
- #this step runs only if the workflow is triggered manually
- if: ${{ env.EVENT_NAME == 'workflow_dispatch' && inputs.binary != 'polkadot-omni-node' && inputs.binary != 'chain-spec-builder'}}
+ # if: ${{ env.EVENT_NAME == 'workflow_dispatch' && inputs.binary != 'polkadot-omni-node' && inputs.binary != 'chain-spec-builder'}}
run: |
. ./.github/scripts/common/lib.sh
- VERSION="${{ needs.validate-inputs.outputs.VERSION }}"
+ VERSION="${{ needs.validate-inputs.outputs.stable_tag }}"
if [[ ${{ inputs.binary }} == 'polkadot' ]]; then
bins=(polkadot polkadot-prepare-worker polkadot-execute-worker)
for bin in "${bins[@]}"; do
- fetch_release_artifacts_from_s3 $bin
+ fetch_release_artifacts_from_s3 $bin x86_64-unknown-linux-gnu
done
else
- fetch_release_artifacts_from_s3 $BINARY
+ fetch_release_artifacts_from_s3 $BINARY x86_64-unknown-linux-gnu
fi
- - name: Fetch polkadot-omni-node/chain-spec-builder rc artifacts or release artifacts based on release id
- #this step runs only if the workflow is triggered manually and only for chain-spec-builder
- if: ${{ env.EVENT_NAME == 'workflow_dispatch' && (inputs.binary == 'polkadot-omni-node' || inputs.binary == 'chain-spec-builder') }}
- run: |
- . ./.github/scripts/common/lib.sh
-
- RELEASE_ID="${{ needs.validate-inputs.outputs.RELEASE_ID }}"
- fetch_release_artifacts
-
- name: Upload artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
name: release-artifacts
- path: release-artifacts/${{ env.BINARY }}/**/*
+ path: release-artifacts/x86_64-unknown-linux-gnu/${{ env.BINARY }}/**/*
build-container: # this job will be triggered for the polkadot-parachain rc and release or polkadot rc image build
if: ${{ inputs.binary == 'polkadot-omni-node' || inputs.binary == 'polkadot-parachain' || inputs.binary == 'chain-spec-builder' || inputs.image_type == 'rc' }}
@@ -173,7 +134,7 @@ jobs:
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- name: Check sha256 ${{ env.BINARY }}
- if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
+ # if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
working-directory: release-artifacts
run: |
. ../.github/scripts/common/lib.sh
@@ -182,7 +143,7 @@ jobs:
check_sha256 $BINARY && echo "OK" || echo "ERR"
- name: Check GPG ${{ env.BINARY }}
- if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
+ # if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
working-directory: release-artifacts
run: |
. ../.github/scripts/common/lib.sh
@@ -190,35 +151,29 @@ jobs:
check_gpg $BINARY
- name: Fetch rc commit and tag
+ working-directory: release-artifacts
if: ${{ env.IMAGE_TYPE == 'rc' }}
id: fetch_rc_refs
+ shell: bash
run: |
- . ./.github/scripts/common/lib.sh
-
- echo "release=${{ needs.validate-inputs.outputs.stable_tag }}" >> $GITHUB_OUTPUT
+ . ../.github/scripts/common/lib.sh
commit=$(git rev-parse --short HEAD) && \
echo "commit=${commit}" >> $GITHUB_OUTPUT
-
- echo "tag=${{ needs.validate-inputs.outputs.version }}" >> $GITHUB_OUTPUT
+ echo "release=$(echo ${{ needs.validate-inputs.outputs.version }})" >> $GITHUB_OUTPUT
+ echo "tag=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Fetch release tags
working-directory: release-artifacts
if: ${{ env.IMAGE_TYPE == 'release'}}
id: fetch_release_refs
+ shell: bash
run: |
- chmod a+rx $BINARY
-
- if [[ $BINARY != 'chain-spec-builder' ]]; then
- VERSION=$(./$BINARY --version | awk '{ print $2 }' )
- release=$( echo $VERSION | cut -f1 -d- )
- else
- release=$(echo ${{ needs.validate-inputs.outputs.VERSION }} | sed 's/^v//')
- fi
+ . ../.github/scripts/common/lib.sh
echo "tag=latest" >> $GITHUB_OUTPUT
- echo "release=${release}" >> $GITHUB_OUTPUT
- echo "stable=${{ needs.validate-inputs.outputs.stable_tag }}" >> $GITHUB_OUTPUT
+ echo "release=$(echo ${{ needs.validate-inputs.outputs.version }})" >> $GITHUB_OUTPUT
+ echo "stable=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Build Injected Container image for polkadot rc
if: ${{ env.BINARY == 'polkadot' }}
@@ -342,8 +297,10 @@ jobs:
- name: Fetch values
id: fetch-data
run: |
+ . ./.github/scripts/common/lib.sh
date=$(date -u '+%Y-%m-%dT%H:%M:%SZ')
echo "date=$date" >> $GITHUB_OUTPUT
+ echo "stable=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Build and push
id: docker_build
@@ -354,9 +311,9 @@ jobs:
# TODO: The owner should be used below but buildx does not resolve the VARs
# TODO: It would be good to get rid of this GHA that we don't really need.
tags: |
- parity/polkadot:${{ needs.validate-inputs.outputs.stable_tag }}
- parity/polkadot:latest
- parity/polkadot:${{ needs.fetch-latest-debian-package-version.outputs.polkadot_container_tag }}
+ egorpop/polkadot:${{ steps.fetch-data.outputs.stable }}
+ egorpop/polkadot:latest
+ egorpop/polkadot:${{ needs.fetch-latest-debian-package-version.outputs.polkadot_container_tag }}
build-args: |
VCS_REF=${{ github.ref }}
POLKADOT_VERSION=${{ needs.fetch-latest-debian-package-version.outputs.polkadot_apt_version }}
diff --git a/.github/workflows/release-reusable-promote-to-final.yml b/.github/workflows/release-reusable-promote-to-final.yml
new file mode 100644
index 000000000000..ed4a80a01e82
--- /dev/null
+++ b/.github/workflows/release-reusable-promote-to-final.yml
@@ -0,0 +1,83 @@
+name: Promote rc to final
+
+on:
+ workflow_call:
+ inputs:
+ package:
+ description: Package to be promoted
+ required: true
+ type: string
+
+ release_tag:
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX taht will be changed to final in form of polkadot-stableYYMM(-X)
+ required: true
+ type: string
+
+ target:
+ description: Target triple for which the artifacts are being uploaded (e.g aarch64-apple-darwin)
+ required: true
+ type: string
+
+ secrets:
+ AWS_DEFAULT_REGION:
+ required: true
+ AWS_RELEASE_ACCESS_KEY_ID:
+ required: true
+ AWS_RELEASE_SECRET_ACCESS_KEY:
+ required: true
+
+jobs:
+
+ promote-release-artifacts:
+ environment: release
+ runs-on: ubuntu-latest
+ env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ AWS_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Prepare final tag
+ id: prepare_final_tag
+ shell: bash
+ run: |
+ tag="$(echo ${{ inputs.release_tag }} | sed 's/-rc[0-9]*$//')"
+ echo $tag
+ echo "FINAL_TAG=${tag}" >> $GITHUB_OUTPUT
+
+ - name: Fetch binaries from s3 based on version
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ VERSION="${{ inputs.release_tag }}"
+ if [[ ${{ inputs.package }} == 'polkadot' ]]; then
+ packages=(polkadot polkadot-prepare-worker polkadot-execute-worker)
+ for package in "${packages[@]}"; do
+ fetch_release_artifacts_from_s3 $package ${{ inputs.target }}
+ done
+ else
+ fetch_release_artifacts_from_s3 ${{ inputs.package }} ${{ inputs.target }}
+ fi
+
+ - name: Configure AWS Credentials
+ uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
+ with:
+ aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }}
+ aws-region: ${{ env.AWS_REGION }}
+
+ - name: Upload ${{ inputs.package }} ${{ inputs.target }} artifacts to s3
+ run: |
+ . ./.github/scripts/release/release_lib.sh
+
+ if [[ ${{ inputs.package }} == 'polkadot' ]]; then
+ packages=(polkadot polkadot-prepare-worker polkadot-execute-worker)
+ for package in "${packages[@]}"; do
+ upload_s3_release $package ${{ steps.prepare_final_tag.outputs.final_tag }} ${{ inputs.target }}
+ done
+ else
+ upload_s3_release ${{ inputs.package }} ${{ steps.prepare_final_tag.outputs.final_tag }} ${{ inputs.target }}
+ fi
diff --git a/.github/workflows/release-reusable-rc-buid.yml b/.github/workflows/release-reusable-rc-buid.yml
index 7e31a4744b59..0222b2aa91e2 100644
--- a/.github/workflows/release-reusable-rc-buid.yml
+++ b/.github/workflows/release-reusable-rc-buid.yml
@@ -104,7 +104,7 @@ jobs:
./.github/scripts/release/build-linux-release.sh ${{ matrix.binaries }} ${{ inputs.package }}
- name: Generate artifact attestation
- uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
+ uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
with:
subject-path: /artifacts/${{ matrix.binaries }}/${{ matrix.binaries }}
@@ -133,7 +133,7 @@ jobs:
- name: Upload ${{ matrix.binaries }} artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
- name: ${{ matrix.binaries }}
+ name: ${{ matrix.binaries }}_${{ inputs.target }}
path: /artifacts/${{ matrix.binaries }}
build-macos-rc:
@@ -220,7 +220,7 @@ jobs:
./.github/scripts/release/build-macos-release.sh ${{ matrix.binaries }} ${{ inputs.package }}
- name: Generate artifact attestation
- uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
+ uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
with:
subject-path: ${{ env.ARTIFACTS_PATH }}/${{ matrix.binaries }}
@@ -278,14 +278,14 @@ jobs:
. "${GITHUB_WORKSPACE}"/.github/scripts/release/build-deb.sh ${{ inputs.package }} ${VERSION}
- name: Generate artifact attestation
- uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
+ uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
with:
subject-path: target/production/*.deb
- name: Upload ${{inputs.package }} artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
- name: ${{ inputs.package }}
+ name: ${{ inputs.package }}_${{ inputs.target }}
path: target/production
overwrite: true
@@ -302,7 +302,6 @@ jobs:
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
-
upload-polkadot-parachain-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot-parachain-bin' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-rc]
@@ -329,6 +328,32 @@ jobs:
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ upload-frame-omni-bencher-artifacts-to-s3:
+ if: ${{ inputs.package == 'frame-omni-bencher' && inputs.target == 'x86_64-unknown-linux-gnu' }}
+ needs: [build-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: ${{ inputs.package }}
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-chain-spec-builder-artifacts-to-s3:
+ if: ${{ inputs.package == 'staging-chain-spec-builder' && inputs.target == 'x86_64-unknown-linux-gnu' }}
+ needs: [build-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: chain-spec-builder
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
upload-polkadot-macos-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot' && inputs.target == 'aarch64-apple-darwin' }}
# TODO: add and use a `build-polkadot-homebrew-package` which packs all `polkadot` binaries:
@@ -395,3 +420,29 @@ jobs:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-frame-omni-bencher-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'frame-omni-bencher' && inputs.target == 'aarch64-apple-darwin' }}
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: ${{ inputs.package }}
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-chain-spec-builder-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'staging-chain-spec-builder' && inputs.target == 'aarch64-apple-darwin' }}
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: chain-spec-builder
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/release-reusable-s3-upload.yml b/.github/workflows/release-reusable-s3-upload.yml
index f85466bc8c07..48c7e53c6c8f 100644
--- a/.github/workflows/release-reusable-s3-upload.yml
+++ b/.github/workflows/release-reusable-s3-upload.yml
@@ -9,7 +9,7 @@ on:
type: string
release_tag:
- description: Tag matching the actual release candidate with the format stableYYMM-rcX or stableYYMM-rcX
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX or polkadot-stableYYMM-rcX
required: true
type: string
@@ -40,18 +40,10 @@ jobs:
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Download amd64 artifacts
- if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
- name: ${{ inputs.package }}
- path: artifacts/${{ inputs.package }}
-
- - name: Download arm artifacts
- if: ${{ inputs.target == 'aarch64-apple-darwin' }}
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- with:
- name: ${{ inputs.package }}_aarch64-apple-darwin
- path: artifacts/${{ inputs.package }}
+ name: ${{ inputs.package }}_${{ inputs.target }}
+ path: release-artifacts/${{ inputs.target }}/${{ inputs.package }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
diff --git a/.github/workflows/release-srtool.yml b/.github/workflows/release-srtool.yml
index 9a29b46d2fc3..fc10496d481b 100644
--- a/.github/workflows/release-srtool.yml
+++ b/.github/workflows/release-srtool.yml
@@ -1,7 +1,7 @@
name: Srtool build
env:
- SUBWASM_VERSION: 0.20.0
+ SUBWASM_VERSION: 0.21.0
TOML_CLI_VERSION: 0.2.4
on:
@@ -11,14 +11,16 @@ on:
type: string
build_opts:
type: string
+ profile:
+ type: string
outputs:
published_runtimes:
value: ${{ jobs.find-runtimes.outputs.runtime }}
- schedule:
- - cron: "00 02 * * 1" # 2AM weekly on monday
-
- workflow_dispatch:
+permissions:
+ id-token: write
+ attestations: write
+ contents: read
jobs:
find-runtimes:
@@ -75,6 +77,7 @@ jobs:
with:
chain: ${{ matrix.chain }}
runtime_dir: ${{ matrix.runtime_dir }}
+ profile: ${{ inputs.profile }}
- name: Summary
run: |
@@ -83,6 +86,11 @@ jobs:
echo "Compact Runtime: ${{ steps.srtool_build.outputs.wasm }}"
echo "Compressed Runtime: ${{ steps.srtool_build.outputs.wasm_compressed }}"
+ - name: Generate artifact attestation
+ uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
+ with:
+ subject-path: ${{ steps.srtool_build.outputs.wasm }}
+
# We now get extra information thanks to subwasm
- name: Install subwasm
run: |
diff --git a/.github/workflows/tests-linux-stable-coverage.yml b/.github/workflows/tests-linux-stable-coverage.yml
index c5af6bcae77f..61e01cda4428 100644
--- a/.github/workflows/tests-linux-stable-coverage.yml
+++ b/.github/workflows/tests-linux-stable-coverage.yml
@@ -102,7 +102,7 @@ jobs:
merge-multiple: true
- run: ls -al reports/
- name: Upload to Codecov
- uses: codecov/codecov-action@v4
+ uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index f508404f1efa..42a7e87bda43 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -22,7 +22,7 @@ workflow:
variables:
# CI_IMAGE: !reference [ .ci-unified, variables, CI_IMAGE ]
- CI_IMAGE: "docker.io/paritytech/ci-unified:bullseye-1.81.0-2024-09-11-v202409111034"
+ CI_IMAGE: "docker.io/paritytech/ci-unified:bullseye-1.81.0-2024-11-19-v202411281558"
# BUILDAH_IMAGE is defined in group variables
BUILDAH_COMMAND: "buildah --storage-driver overlay2"
RELENG_SCRIPTS_BRANCH: "master"
diff --git a/.gitlab/pipeline/zombienet/polkadot.yml b/.gitlab/pipeline/zombienet/polkadot.yml
index 3dab49a118e5..ac4bdac7ad15 100644
--- a/.gitlab/pipeline/zombienet/polkadot.yml
+++ b/.gitlab/pipeline/zombienet/polkadot.yml
@@ -179,7 +179,7 @@ zombienet-polkadot-elastic-scaling-0001-basic-3cores-6s-blocks:
--local-dir="${LOCAL_DIR}/elastic_scaling"
--test="0001-basic-3cores-6s-blocks.zndsl"
-zombienet-polkadot-elastic-scaling-0002-elastic-scaling-doesnt-break-parachains:
+.zombienet-polkadot-elastic-scaling-0002-elastic-scaling-doesnt-break-parachains:
extends:
- .zombienet-polkadot-common
before_script:
@@ -233,7 +233,7 @@ zombienet-polkadot-functional-0015-coretime-shared-core:
--local-dir="${LOCAL_DIR}/functional"
--test="0016-approval-voting-parallel.zndsl"
-zombienet-polkadot-functional-0017-sync-backing:
+.zombienet-polkadot-functional-0017-sync-backing:
extends:
- .zombienet-polkadot-common
script:
diff --git a/Cargo.lock b/Cargo.lock
index 5df1cfcbc2aa..cdc690ad36b5 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -5978,6 +5978,15 @@ dependencies = [
"dirs-sys-next",
]
+[[package]]
+name = "dirs"
+version = "5.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
+dependencies = [
+ "dirs-sys",
+]
+
[[package]]
name = "dirs-sys"
version = "0.4.1"
@@ -10219,9 +10228,9 @@ dependencies = [
[[package]]
name = "litep2p"
-version = "0.8.2"
+version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "569e7dbec8a0d4b08d30f4942cd579cfe8db5d3f83f8604abe61697c38d17e73"
+checksum = "14e490b5a6d486711fd0284bd30e607a287343f2935a59a9192bd7109e85f443"
dependencies = [
"async-trait",
"bs58",
@@ -14627,6 +14636,7 @@ dependencies = [
"assert_matches",
"bitflags 1.3.2",
"derive_more 0.99.17",
+ "env_logger 0.11.3",
"environmental",
"ethereum-types 0.15.1",
"frame-benchmarking 28.0.0",
@@ -14649,7 +14659,7 @@ dependencies = [
"pallet-utility 28.0.0",
"parity-scale-codec",
"paste",
- "polkavm 0.13.0",
+ "polkavm 0.17.0",
"pretty_assertions",
"rlp 0.6.1",
"scale-info",
@@ -14745,12 +14755,10 @@ dependencies = [
"anyhow",
"frame-system 28.0.0",
"log",
- "parity-wasm",
- "polkavm-linker 0.14.0",
+ "polkavm-linker 0.17.1",
"sp-core 28.0.0",
"sp-io 30.0.0",
"sp-runtime 31.0.1",
- "tempfile",
"toml 0.8.12",
]
@@ -14867,7 +14875,7 @@ dependencies = [
"bitflags 1.3.2",
"parity-scale-codec",
"paste",
- "polkavm-derive 0.14.0",
+ "polkavm-derive 0.17.0",
"scale-info",
]
@@ -16658,6 +16666,7 @@ dependencies = [
"sp-runtime 31.0.1",
"staging-xcm 7.0.0",
"staging-xcm-executor 7.0.0",
+ "westend-runtime",
"westend-runtime-constants 7.0.0",
"westend-system-emulated-network",
]
@@ -18571,7 +18580,6 @@ dependencies = [
"pallet-remark 28.0.0",
"pallet-revive 0.1.0",
"pallet-revive-eth-rpc",
- "pallet-revive-fixtures 0.1.0",
"pallet-revive-mock-network 0.1.0",
"pallet-revive-proc-macro 0.1.0",
"pallet-revive-uapi 0.1.0",
@@ -19702,15 +19710,15 @@ dependencies = [
[[package]]
name = "polkavm"
-version = "0.13.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57e79a14b15ed38cb5b9a1e38d02e933f19e3d180ae5b325fed606c5e5b9177e"
+checksum = "84979be196ba2855f73616413e7b1d18258128aa396b3dc23f520a00a807720e"
dependencies = [
"libc",
"log",
- "polkavm-assembler 0.13.0",
- "polkavm-common 0.13.0",
- "polkavm-linux-raw 0.13.0",
+ "polkavm-assembler 0.17.0",
+ "polkavm-common 0.17.0",
+ "polkavm-linux-raw 0.17.0",
]
[[package]]
@@ -19733,9 +19741,9 @@ dependencies = [
[[package]]
name = "polkavm-assembler"
-version = "0.13.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4e8da55465000feb0a61bbf556ed03024db58f3420eca37721fc726b3b2136bf"
+checksum = "0ba7b434ff630b0f73a1560e8baea807246ca22098abe49f97821e0e2d2accc4"
dependencies = [
"log",
]
@@ -19767,20 +19775,14 @@ dependencies = [
[[package]]
name = "polkavm-common"
-version = "0.13.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "084b4339aae7dfdaaa5aa7d634110afd95970e0737b6fb2a0cb10db8b56b753c"
+checksum = "8f0dbafef4ab6ceecb4982ac3b550df430ef4f9fdbf07c108b7d4f91a0682fce"
dependencies = [
"log",
- "polkavm-assembler 0.13.0",
+ "polkavm-assembler 0.17.0",
]
-[[package]]
-name = "polkavm-common"
-version = "0.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "711952a783e9c5ad407cdacb1ed147f36d37c5d43417c1091d86456d2999417b"
-
[[package]]
name = "polkavm-derive"
version = "0.8.0"
@@ -19810,11 +19812,11 @@ dependencies = [
[[package]]
name = "polkavm-derive"
-version = "0.14.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4832a0aebf6cefc988bb7b2d74ea8c86c983164672e2fc96300f356a1babfc1"
+checksum = "c0c3dbb6c8c7bd3e5f5b05aa7fc9355acf14df7ce5d392911e77d01090a38d0d"
dependencies = [
- "polkavm-derive-impl-macro 0.14.0",
+ "polkavm-derive-impl-macro 0.17.0",
]
[[package]]
@@ -19855,11 +19857,11 @@ dependencies = [
[[package]]
name = "polkavm-derive-impl"
-version = "0.14.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e339fc7c11310fe5adf711d9342278ac44a75c9784947937cce12bd4f30842f2"
+checksum = "42565aed4adbc4034612d0b17dea8db3681fb1bd1aed040d6edc5455a9f478a1"
dependencies = [
- "polkavm-common 0.14.0",
+ "polkavm-common 0.17.0",
"proc-macro2 1.0.86",
"quote 1.0.37",
"syn 2.0.87",
@@ -19897,11 +19899,11 @@ dependencies = [
[[package]]
name = "polkavm-derive-impl-macro"
-version = "0.14.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b569754b15060d03000c09e3bf11509d527f60b75d79b4c30c3625b5071d9702"
+checksum = "86d9838e95241b0bce4fe269cdd4af96464160505840ed5a8ac8536119ba19e2"
dependencies = [
- "polkavm-derive-impl 0.14.0",
+ "polkavm-derive-impl 0.17.0",
"syn 2.0.87",
]
@@ -19937,15 +19939,16 @@ dependencies = [
[[package]]
name = "polkavm-linker"
-version = "0.14.0"
+version = "0.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0959ac3b0f4fd5caf5c245c637705f19493efe83dba31a83bbba928b93b0116a"
+checksum = "0422ead3030d5cde69e2206dbc7d65da872b121876507cd5363f6c6e6aa45157"
dependencies = [
+ "dirs",
"gimli 0.31.1",
"hashbrown 0.14.5",
"log",
"object 0.36.1",
- "polkavm-common 0.14.0",
+ "polkavm-common 0.17.0",
"regalloc2 0.9.3",
"rustc-demangle",
]
@@ -19964,9 +19967,9 @@ checksum = "26e45fa59c7e1bb12ef5289080601e9ec9b31435f6e32800a5c90c132453d126"
[[package]]
name = "polkavm-linux-raw"
-version = "0.13.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "686c4dd9c9c16cc22565b51bdbb269792318d0fd2e6b966b5f6c788534cad0e9"
+checksum = "e64c3d93a58ffbc3099d1227f0da9675a025a9ea6c917038f266920c1de1e568"
[[package]]
name = "polling"
@@ -23716,9 +23719,9 @@ dependencies = [
[[package]]
name = "scale-info"
-version = "2.11.5"
+version = "2.11.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1aa7ffc1c0ef49b0452c6e2986abf2b07743320641ffd5fc63d552458e3b779b"
+checksum = "346a3b32eba2640d17a9cb5927056b08f3de90f65b72fe09402c2ad07d684d0b"
dependencies = [
"bitvec",
"cfg-if",
@@ -23730,9 +23733,9 @@ dependencies = [
[[package]]
name = "scale-info-derive"
-version = "2.11.5"
+version = "2.11.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46385cc24172cf615450267463f937c10072516359b3ff1cb24228a4a08bf951"
+checksum = "c6630024bf739e2179b91fb424b28898baf819414262c5d376677dbff1fe7ebf"
dependencies = [
"proc-macro-crate 3.1.0",
"proc-macro2 1.0.86",
@@ -25606,6 +25609,7 @@ dependencies = [
"sp-api 26.0.0",
"sp-consensus",
"sp-core 28.0.0",
+ "sp-metadata-ir 0.6.0",
"sp-runtime 31.0.1",
"sp-state-machine 0.35.0",
"sp-tracing 16.0.0",
@@ -26551,7 +26555,7 @@ dependencies = [
"libsecp256k1",
"log",
"parity-scale-codec",
- "polkavm-derive 0.9.1",
+ "polkavm-derive 0.17.0",
"rustversion",
"secp256k1 0.28.2",
"sp-core 28.0.0",
@@ -27035,7 +27039,7 @@ dependencies = [
"bytes",
"impl-trait-for-tuples",
"parity-scale-codec",
- "polkavm-derive 0.9.1",
+ "polkavm-derive 0.17.0",
"primitive-types 0.13.1",
"rustversion",
"sp-core 28.0.0",
@@ -28679,7 +28683,7 @@ dependencies = [
"merkleized-metadata",
"parity-scale-codec",
"parity-wasm",
- "polkavm-linker 0.9.2",
+ "polkavm-linker 0.17.1",
"sc-executor 0.32.0",
"shlex",
"sp-core 28.0.0",
diff --git a/Cargo.toml b/Cargo.toml
index eadf65cc19a6..8e2e7a78b31e 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -851,7 +851,7 @@ linked-hash-map = { version = "0.5.4" }
linked_hash_set = { version = "0.1.4" }
linregress = { version = "0.5.1" }
lite-json = { version = "0.2.0", default-features = false }
-litep2p = { version = "0.8.2", features = ["websocket"] }
+litep2p = { version = "0.8.3", features = ["websocket"] }
log = { version = "0.4.22", default-features = false }
macro_magic = { version = "0.5.1" }
maplit = { version = "1.0.2" }
@@ -1093,8 +1093,8 @@ polkadot-test-client = { path = "polkadot/node/test/client" }
polkadot-test-runtime = { path = "polkadot/runtime/test-runtime" }
polkadot-test-service = { path = "polkadot/node/test/service" }
polkavm = { version = "0.9.3", default-features = false }
-polkavm-derive = "0.9.1"
-polkavm-linker = "0.9.2"
+polkavm-derive = "0.17.0"
+polkavm-linker = "0.17.1"
portpicker = { version = "0.1.1" }
pretty_assertions = { version = "1.3.0" }
primitive-types = { version = "0.13.1", default-features = false, features = [
@@ -1200,7 +1200,7 @@ sc-tracing-proc-macro = { path = "substrate/client/tracing/proc-macro", default-
sc-transaction-pool = { path = "substrate/client/transaction-pool", default-features = false }
sc-transaction-pool-api = { path = "substrate/client/transaction-pool/api", default-features = false }
sc-utils = { path = "substrate/client/utils", default-features = false }
-scale-info = { version = "2.11.1", default-features = false }
+scale-info = { version = "2.11.6", default-features = false }
schemars = { version = "0.8.13", default-features = false }
schnellru = { version = "0.2.3" }
schnorrkel = { version = "0.11.4", default-features = false }
diff --git a/bridges/chains/chain-polkadot-bulletin/src/lib.rs b/bridges/chains/chain-polkadot-bulletin/src/lib.rs
index c5c18beb2cad..070bc7b0ba3d 100644
--- a/bridges/chains/chain-polkadot-bulletin/src/lib.rs
+++ b/bridges/chains/chain-polkadot-bulletin/src/lib.rs
@@ -225,4 +225,4 @@ impl ChainWithMessages for PolkadotBulletin {
}
decl_bridge_finality_runtime_apis!(polkadot_bulletin, grandpa);
-decl_bridge_messages_runtime_apis!(polkadot_bulletin, bp_messages::HashedLaneId);
+decl_bridge_messages_runtime_apis!(polkadot_bulletin, bp_messages::LegacyLaneId);
diff --git a/bridges/relays/utils/src/initialize.rs b/bridges/relays/utils/src/initialize.rs
index 564ed1f0e5cc..deb9b9d059d5 100644
--- a/bridges/relays/utils/src/initialize.rs
+++ b/bridges/relays/utils/src/initialize.rs
@@ -52,9 +52,10 @@ pub fn initialize_logger(with_timestamp: bool) {
format,
);
- let env_filter = EnvFilter::from_default_env()
- .add_directive(Level::WARN.into())
- .add_directive("bridge=info".parse().expect("static filter string is valid"));
+ let env_filter = EnvFilter::builder()
+ .with_default_directive(Level::WARN.into())
+ .with_default_directive("bridge=info".parse().expect("static filter string is valid"))
+ .from_env_lossy();
let builder = SubscriberBuilder::default().with_env_filter(env_filter);
diff --git a/bridges/snowbridge/runtime/test-common/Cargo.toml b/bridges/snowbridge/runtime/test-common/Cargo.toml
index 6f8e586bf5ff..9f47f158ed4a 100644
--- a/bridges/snowbridge/runtime/test-common/Cargo.toml
+++ b/bridges/snowbridge/runtime/test-common/Cargo.toml
@@ -6,6 +6,8 @@ authors = ["Snowfork "]
edition.workspace = true
license = "Apache-2.0"
categories = ["cryptography::cryptocurrencies"]
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/cli/Cargo.toml b/cumulus/client/cli/Cargo.toml
index 9b6f6b73960b..198f9428f1dd 100644
--- a/cumulus/client/cli/Cargo.toml
+++ b/cumulus/client/cli/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Parachain node CLI utilities."
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/collator/Cargo.toml b/cumulus/client/collator/Cargo.toml
index 6ebde0c2c653..83a3f2661e7a 100644
--- a/cumulus/client/collator/Cargo.toml
+++ b/cumulus/client/collator/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Common node-side functionality and glue code to collate parachain blocks."
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/consensus/aura/Cargo.toml b/cumulus/client/consensus/aura/Cargo.toml
index 0bb2de6bb9b8..6e0c124591cb 100644
--- a/cumulus/client/consensus/aura/Cargo.toml
+++ b/cumulus/client/consensus/aura/Cargo.toml
@@ -5,6 +5,8 @@ version = "0.7.0"
authors.workspace = true
edition.workspace = true
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/consensus/common/Cargo.toml b/cumulus/client/consensus/common/Cargo.toml
index 4bc2f1d1e600..0f532a2101c4 100644
--- a/cumulus/client/consensus/common/Cargo.toml
+++ b/cumulus/client/consensus/common/Cargo.toml
@@ -5,6 +5,8 @@ version = "0.7.0"
authors.workspace = true
edition.workspace = true
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/consensus/proposer/Cargo.toml b/cumulus/client/consensus/proposer/Cargo.toml
index bb760ae03f4d..e391481bc445 100644
--- a/cumulus/client/consensus/proposer/Cargo.toml
+++ b/cumulus/client/consensus/proposer/Cargo.toml
@@ -5,6 +5,8 @@ version = "0.7.0"
authors.workspace = true
edition.workspace = true
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/consensus/relay-chain/Cargo.toml b/cumulus/client/consensus/relay-chain/Cargo.toml
index f3ee6fc2f7d2..7f0f4333c961 100644
--- a/cumulus/client/consensus/relay-chain/Cargo.toml
+++ b/cumulus/client/consensus/relay-chain/Cargo.toml
@@ -5,6 +5,8 @@ version = "0.7.0"
authors.workspace = true
edition.workspace = true
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/network/Cargo.toml b/cumulus/client/network/Cargo.toml
index bc67678eedeb..b78df8d73eae 100644
--- a/cumulus/client/network/Cargo.toml
+++ b/cumulus/client/network/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
description = "Cumulus-specific networking protocol"
edition.workspace = true
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/parachain-inherent/Cargo.toml b/cumulus/client/parachain-inherent/Cargo.toml
index 0d82cf648743..4f53e2bc1bc2 100644
--- a/cumulus/client/parachain-inherent/Cargo.toml
+++ b/cumulus/client/parachain-inherent/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Inherent that needs to be present in every parachain block. Contains messages and a relay chain storage-proof."
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[dependencies]
async-trait = { workspace = true }
diff --git a/cumulus/client/pov-recovery/Cargo.toml b/cumulus/client/pov-recovery/Cargo.toml
index 3127dd26fcaa..762837e0bb11 100644
--- a/cumulus/client/pov-recovery/Cargo.toml
+++ b/cumulus/client/pov-recovery/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
description = "Parachain PoV recovery"
edition.workspace = true
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/relay-chain-inprocess-interface/Cargo.toml b/cumulus/client/relay-chain-inprocess-interface/Cargo.toml
index 6f1b74191be7..9e6e8da929bb 100644
--- a/cumulus/client/relay-chain-inprocess-interface/Cargo.toml
+++ b/cumulus/client/relay-chain-inprocess-interface/Cargo.toml
@@ -5,6 +5,8 @@ version = "0.7.0"
edition.workspace = true
description = "Implementation of the RelayChainInterface trait for Polkadot full-nodes."
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/relay-chain-interface/Cargo.toml b/cumulus/client/relay-chain-interface/Cargo.toml
index a496fab050dd..2b9e72bbeca6 100644
--- a/cumulus/client/relay-chain-interface/Cargo.toml
+++ b/cumulus/client/relay-chain-interface/Cargo.toml
@@ -5,6 +5,8 @@ version = "0.7.0"
edition.workspace = true
description = "Common interface for different relay chain datasources."
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/relay-chain-minimal-node/Cargo.toml b/cumulus/client/relay-chain-minimal-node/Cargo.toml
index 95ecadc8bd06..0fad188bb1ab 100644
--- a/cumulus/client/relay-chain-minimal-node/Cargo.toml
+++ b/cumulus/client/relay-chain-minimal-node/Cargo.toml
@@ -5,6 +5,8 @@ version = "0.7.0"
edition.workspace = true
description = "Minimal node implementation to be used in tandem with RPC or light-client mode."
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/relay-chain-rpc-interface/Cargo.toml b/cumulus/client/relay-chain-rpc-interface/Cargo.toml
index fb4cb4ceed4e..162f5ad0e9e8 100644
--- a/cumulus/client/relay-chain-rpc-interface/Cargo.toml
+++ b/cumulus/client/relay-chain-rpc-interface/Cargo.toml
@@ -5,6 +5,8 @@ version = "0.7.0"
edition.workspace = true
description = "Implementation of the RelayChainInterface trait that connects to a remote RPC-node."
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/client/service/Cargo.toml b/cumulus/client/service/Cargo.toml
index 0a77b465d96a..193283648f19 100644
--- a/cumulus/client/service/Cargo.toml
+++ b/cumulus/client/service/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Common functions used to assemble the components of a parachain node."
license = "GPL-3.0-or-later WITH Classpath-exception-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/pallets/aura-ext/Cargo.toml b/cumulus/pallets/aura-ext/Cargo.toml
index c08148928b7c..fcda79f1d5c1 100644
--- a/cumulus/pallets/aura-ext/Cargo.toml
+++ b/cumulus/pallets/aura-ext/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "AURA consensus extension pallet for parachains"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/pallets/parachain-system/Cargo.toml b/cumulus/pallets/parachain-system/Cargo.toml
index 3cb0394c4b95..05498a474e42 100644
--- a/cumulus/pallets/parachain-system/Cargo.toml
+++ b/cumulus/pallets/parachain-system/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Base pallet for cumulus-based parachains"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/pallets/parachain-system/proc-macro/Cargo.toml b/cumulus/pallets/parachain-system/proc-macro/Cargo.toml
index da6f0fd03efb..629818f9c4cc 100644
--- a/cumulus/pallets/parachain-system/proc-macro/Cargo.toml
+++ b/cumulus/pallets/parachain-system/proc-macro/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Proc macros provided by the parachain-system pallet"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/pallets/solo-to-para/Cargo.toml b/cumulus/pallets/solo-to-para/Cargo.toml
index 5fd1939e93a0..2088361bf11a 100644
--- a/cumulus/pallets/solo-to-para/Cargo.toml
+++ b/cumulus/pallets/solo-to-para/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Adds functionality to migrate from a Solo to a Parachain"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/pallets/xcm/Cargo.toml b/cumulus/pallets/xcm/Cargo.toml
index 35d7a083b061..ff9be866d48f 100644
--- a/cumulus/pallets/xcm/Cargo.toml
+++ b/cumulus/pallets/xcm/Cargo.toml
@@ -5,6 +5,8 @@ name = "cumulus-pallet-xcm"
version = "0.7.0"
license = "Apache-2.0"
description = "Pallet for stuff specific to parachains' usage of XCM"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/pallets/xcmp-queue/Cargo.toml b/cumulus/pallets/xcmp-queue/Cargo.toml
index 9c7470eda6da..af70a3169d8e 100644
--- a/cumulus/pallets/xcmp-queue/Cargo.toml
+++ b/cumulus/pallets/xcmp-queue/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Pallet to queue outbound and inbound XCMP messages."
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/common/Cargo.toml b/cumulus/parachains/common/Cargo.toml
index 6d436bdf799a..641693a6a01b 100644
--- a/cumulus/parachains/common/Cargo.toml
+++ b/cumulus/parachains/common/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Logic which is common to all parachain runtimes"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/integration-tests/emulated/common/Cargo.toml b/cumulus/parachains/integration-tests/emulated/common/Cargo.toml
index 23edaf6bfe65..8282d12d317f 100644
--- a/cumulus/parachains/integration-tests/emulated/common/Cargo.toml
+++ b/cumulus/parachains/integration-tests/emulated/common/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license = "Apache-2.0"
description = "Common resources for integration testing with xcm-emulator"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/Cargo.toml b/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/Cargo.toml
index aa6eebc5458f..53acd038cdf5 100644
--- a/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/Cargo.toml
+++ b/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/Cargo.toml
@@ -21,6 +21,7 @@ sp-runtime = { workspace = true }
# Polkadot
polkadot-runtime-common = { workspace = true, default-features = true }
westend-runtime-constants = { workspace = true, default-features = true }
+westend-runtime = { workspace = true }
xcm = { workspace = true }
xcm-executor = { workspace = true }
diff --git a/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/src/tests/governance.rs b/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/src/tests/governance.rs
new file mode 100644
index 000000000000..3dadcdd94870
--- /dev/null
+++ b/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/src/tests/governance.rs
@@ -0,0 +1,503 @@
+// Copyright (C) Parity Technologies (UK) Ltd.
+// SPDX-License-Identifier: Apache-2.0
+
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+use crate::imports::*;
+use frame_support::traits::ProcessMessageError;
+
+use codec::Encode;
+use frame_support::sp_runtime::traits::Dispatchable;
+use parachains_common::AccountId;
+use people_westend_runtime::people::IdentityInfo;
+use westend_runtime::governance::pallet_custom_origins::Origin::GeneralAdmin as GeneralAdminOrigin;
+use westend_system_emulated_network::people_westend_emulated_chain::people_westend_runtime;
+
+use pallet_identity::Data;
+
+use emulated_integration_tests_common::accounts::{ALICE, BOB};
+
+#[test]
+fn relay_commands_add_registrar() {
+ let (origin_kind, origin) = (OriginKind::Superuser, ::RuntimeOrigin::root());
+
+ let registrar: AccountId = [1; 32].into();
+ Westend::execute_with(|| {
+ type Runtime = ::Runtime;
+ type RuntimeCall = ::RuntimeCall;
+ type RuntimeEvent = ::RuntimeEvent;
+ type PeopleCall = ::RuntimeCall;
+ type PeopleRuntime = ::Runtime;
+
+ let add_registrar_call =
+ PeopleCall::Identity(pallet_identity::Call::::add_registrar {
+ account: registrar.into(),
+ });
+
+ let xcm_message = RuntimeCall::XcmPallet(pallet_xcm::Call::::send {
+ dest: bx!(VersionedLocation::from(Location::new(0, [Parachain(1004)]))),
+ message: bx!(VersionedXcm::from(Xcm(vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact { origin_kind, call: add_registrar_call.encode().into() }
+ ]))),
+ });
+
+ assert_ok!(xcm_message.dispatch(origin));
+
+ assert_expected_events!(
+ Westend,
+ vec![
+ RuntimeEvent::XcmPallet(pallet_xcm::Event::Sent { .. }) => {},
+ ]
+ );
+ });
+
+ PeopleWestend::execute_with(|| {
+ type RuntimeEvent = ::RuntimeEvent;
+
+ assert_expected_events!(
+ PeopleWestend,
+ vec![
+ RuntimeEvent::Identity(pallet_identity::Event::RegistrarAdded { .. }) => {},
+ RuntimeEvent::MessageQueue(pallet_message_queue::Event::Processed { success: true, .. }) => {},
+ ]
+ );
+ });
+}
+
+#[test]
+fn relay_commands_add_registrar_wrong_origin() {
+ let people_westend_alice = PeopleWestend::account_id_of(ALICE);
+
+ let origins = vec![
+ (
+ OriginKind::SovereignAccount,
+ ::RuntimeOrigin::signed(people_westend_alice),
+ ),
+ (OriginKind::Xcm, GeneralAdminOrigin.into()),
+ ];
+
+ let mut signed_origin = true;
+
+ for (origin_kind, origin) in origins {
+ let registrar: AccountId = [1; 32].into();
+ Westend::execute_with(|| {
+ type Runtime = ::Runtime;
+ type RuntimeCall = ::RuntimeCall;
+ type RuntimeEvent = ::RuntimeEvent;
+ type PeopleCall = ::RuntimeCall;
+ type PeopleRuntime = ::Runtime;
+
+ let add_registrar_call =
+ PeopleCall::Identity(pallet_identity::Call::::add_registrar {
+ account: registrar.into(),
+ });
+
+ let xcm_message = RuntimeCall::XcmPallet(pallet_xcm::Call::::send {
+ dest: bx!(VersionedLocation::from(Location::new(0, [Parachain(1004)]))),
+ message: bx!(VersionedXcm::from(Xcm(vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact { origin_kind, call: add_registrar_call.encode().into() }
+ ]))),
+ });
+
+ assert_ok!(xcm_message.dispatch(origin));
+ assert_expected_events!(
+ Westend,
+ vec![
+ RuntimeEvent::XcmPallet(pallet_xcm::Event::Sent { .. }) => {},
+ ]
+ );
+ });
+
+ PeopleWestend::execute_with(|| {
+ type RuntimeEvent = ::RuntimeEvent;
+
+ if signed_origin {
+ assert_expected_events!(
+ PeopleWestend,
+ vec![
+ RuntimeEvent::MessageQueue(pallet_message_queue::Event::ProcessingFailed { error: ProcessMessageError::Unsupported, .. }) => {},
+ ]
+ );
+ } else {
+ assert_expected_events!(
+ PeopleWestend,
+ vec![
+ RuntimeEvent::MessageQueue(pallet_message_queue::Event::Processed { success: true, .. }) => {},
+ ]
+ );
+ }
+ });
+
+ signed_origin = false;
+ }
+}
+
+#[test]
+fn relay_commands_kill_identity() {
+ // To kill an identity, first one must be set
+ PeopleWestend::execute_with(|| {
+ type PeopleRuntime = ::Runtime;
+ type PeopleRuntimeEvent = ::RuntimeEvent;
+
+ let people_westend_alice =
+ ::RuntimeOrigin::signed(PeopleWestend::account_id_of(ALICE));
+
+ let identity_info = IdentityInfo {
+ email: Data::Raw(b"test@test.io".to_vec().try_into().unwrap()),
+ ..Default::default()
+ };
+ let identity: Box<::IdentityInformation> =
+ Box::new(identity_info);
+
+ assert_ok!(::Identity::set_identity(
+ people_westend_alice,
+ identity
+ ));
+
+ assert_expected_events!(
+ PeopleWestend,
+ vec![
+ PeopleRuntimeEvent::Identity(pallet_identity::Event::IdentitySet { .. }) => {},
+ ]
+ );
+ });
+
+ let (origin_kind, origin) = (OriginKind::Superuser, ::RuntimeOrigin::root());
+
+ Westend::execute_with(|| {
+ type Runtime = ::Runtime;
+ type RuntimeCall = ::RuntimeCall;
+ type PeopleCall = ::RuntimeCall;
+ type RuntimeEvent = ::RuntimeEvent;
+ type PeopleRuntime = ::Runtime;
+
+ let kill_identity_call =
+ PeopleCall::Identity(pallet_identity::Call::::kill_identity {
+ target: people_westend_runtime::MultiAddress::Id(PeopleWestend::account_id_of(
+ ALICE,
+ )),
+ });
+
+ let xcm_message = RuntimeCall::XcmPallet(pallet_xcm::Call::::send {
+ dest: bx!(VersionedLocation::from(Location::new(0, [Parachain(1004)]))),
+ message: bx!(VersionedXcm::from(Xcm(vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact { origin_kind, call: kill_identity_call.encode().into() }
+ ]))),
+ });
+
+ assert_ok!(xcm_message.dispatch(origin));
+
+ assert_expected_events!(
+ Westend,
+ vec![
+ RuntimeEvent::XcmPallet(pallet_xcm::Event::Sent { .. }) => {},
+ ]
+ );
+ });
+
+ PeopleWestend::execute_with(|| {
+ type RuntimeEvent = ::RuntimeEvent;
+
+ assert_expected_events!(
+ PeopleWestend,
+ vec![
+ RuntimeEvent::Identity(pallet_identity::Event::IdentityKilled { .. }) => {},
+ RuntimeEvent::MessageQueue(pallet_message_queue::Event::Processed { success: true, .. }) => {},
+ ]
+ );
+ });
+}
+
+#[test]
+fn relay_commands_kill_identity_wrong_origin() {
+ let people_westend_alice = PeopleWestend::account_id_of(BOB);
+
+ let origins = vec![
+ (
+ OriginKind::SovereignAccount,
+ ::RuntimeOrigin::signed(people_westend_alice),
+ ),
+ (OriginKind::Xcm, GeneralAdminOrigin.into()),
+ ];
+
+ for (origin_kind, origin) in origins {
+ Westend::execute_with(|| {
+ type Runtime = ::Runtime;
+ type RuntimeCall = ::RuntimeCall;
+ type PeopleCall = ::RuntimeCall;
+ type RuntimeEvent = ::RuntimeEvent;
+ type PeopleRuntime = ::Runtime;
+
+ let kill_identity_call =
+ PeopleCall::Identity(pallet_identity::Call::::kill_identity {
+ target: people_westend_runtime::MultiAddress::Id(PeopleWestend::account_id_of(
+ ALICE,
+ )),
+ });
+
+ let xcm_message = RuntimeCall::XcmPallet(pallet_xcm::Call::::send {
+ dest: bx!(VersionedLocation::from(Location::new(0, [Parachain(1004)]))),
+ message: bx!(VersionedXcm::from(Xcm(vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact { origin_kind, call: kill_identity_call.encode().into() }
+ ]))),
+ });
+
+ assert_ok!(xcm_message.dispatch(origin));
+ assert_expected_events!(
+ Westend,
+ vec![
+ RuntimeEvent::XcmPallet(pallet_xcm::Event::Sent { .. }) => {},
+ ]
+ );
+ });
+
+ PeopleWestend::execute_with(|| {
+ assert_expected_events!(PeopleWestend, vec![]);
+ });
+ }
+}
+
+#[test]
+fn relay_commands_add_remove_username_authority() {
+ let people_westend_alice = PeopleWestend::account_id_of(ALICE);
+ let people_westend_bob = PeopleWestend::account_id_of(BOB);
+
+ let (origin_kind, origin, usr) =
+ (OriginKind::Superuser, ::RuntimeOrigin::root(), "rootusername");
+
+ // First, add a username authority.
+ Westend::execute_with(|| {
+ type Runtime = ::Runtime;
+ type RuntimeCall = ::RuntimeCall;
+ type RuntimeEvent = ::RuntimeEvent;
+ type PeopleCall = ::RuntimeCall;
+ type PeopleRuntime = ::Runtime;
+
+ let add_username_authority =
+ PeopleCall::Identity(pallet_identity::Call::::add_username_authority {
+ authority: people_westend_runtime::MultiAddress::Id(people_westend_alice.clone()),
+ suffix: b"suffix1".into(),
+ allocation: 10,
+ });
+
+ let add_authority_xcm_msg = RuntimeCall::XcmPallet(pallet_xcm::Call::::send {
+ dest: bx!(VersionedLocation::from(Location::new(0, [Parachain(1004)]))),
+ message: bx!(VersionedXcm::from(Xcm(vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact { origin_kind, call: add_username_authority.encode().into() }
+ ]))),
+ });
+
+ assert_ok!(add_authority_xcm_msg.dispatch(origin.clone()));
+
+ assert_expected_events!(
+ Westend,
+ vec![
+ RuntimeEvent::XcmPallet(pallet_xcm::Event::Sent { .. }) => {},
+ ]
+ );
+ });
+
+ // Check events system-parachain-side
+ PeopleWestend::execute_with(|| {
+ type RuntimeEvent = ::RuntimeEvent;
+
+ assert_expected_events!(
+ PeopleWestend,
+ vec![
+ RuntimeEvent::Identity(pallet_identity::Event::AuthorityAdded { .. }) => {},
+ RuntimeEvent::MessageQueue(pallet_message_queue::Event::Processed { success: true, .. }) => {},
+ ]
+ );
+ });
+
+ // Now, use the previously added username authority to concede a username to an account.
+ PeopleWestend::execute_with(|| {
+ type PeopleRuntimeEvent = ::RuntimeEvent;
+ let full_username = [usr.to_owned(), ".suffix1".to_owned()].concat().into_bytes();
+
+ assert_ok!(::Identity::set_username_for(
+ ::RuntimeOrigin::signed(people_westend_alice.clone()),
+ people_westend_runtime::MultiAddress::Id(people_westend_bob.clone()),
+ full_username,
+ None,
+ true
+ ));
+
+ assert_expected_events!(
+ PeopleWestend,
+ vec![
+ PeopleRuntimeEvent::Identity(pallet_identity::Event::UsernameQueued { .. }) => {},
+ ]
+ );
+ });
+
+ // Accept the given username
+ PeopleWestend::execute_with(|| {
+ type PeopleRuntimeEvent = ::RuntimeEvent;
+ let full_username = [usr.to_owned(), ".suffix1".to_owned()].concat().into_bytes();
+
+ assert_ok!(::Identity::accept_username(
+ ::RuntimeOrigin::signed(people_westend_bob.clone()),
+ full_username.try_into().unwrap(),
+ ));
+
+ assert_expected_events!(
+ PeopleWestend,
+ vec![
+ PeopleRuntimeEvent::Identity(pallet_identity::Event::UsernameSet { .. }) => {},
+ ]
+ );
+ });
+
+ // Now, remove the username authority with another priviledged XCM call.
+ Westend::execute_with(|| {
+ type Runtime = ::Runtime;
+ type RuntimeCall = ::RuntimeCall;
+ type RuntimeEvent = ::RuntimeEvent;
+ type PeopleCall = ::RuntimeCall;
+ type PeopleRuntime = ::Runtime;
+
+ let remove_username_authority = PeopleCall::Identity(pallet_identity::Call::<
+ PeopleRuntime,
+ >::remove_username_authority {
+ authority: people_westend_runtime::MultiAddress::Id(people_westend_alice.clone()),
+ suffix: b"suffix1".into(),
+ });
+
+ let remove_authority_xcm_msg = RuntimeCall::XcmPallet(pallet_xcm::Call::::send {
+ dest: bx!(VersionedLocation::from(Location::new(0, [Parachain(1004)]))),
+ message: bx!(VersionedXcm::from(Xcm(vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact { origin_kind, call: remove_username_authority.encode().into() }
+ ]))),
+ });
+
+ assert_ok!(remove_authority_xcm_msg.dispatch(origin));
+
+ assert_expected_events!(
+ Westend,
+ vec![
+ RuntimeEvent::XcmPallet(pallet_xcm::Event::Sent { .. }) => {},
+ ]
+ );
+ });
+
+ // Final event check.
+ PeopleWestend::execute_with(|| {
+ type RuntimeEvent = ::RuntimeEvent;
+
+ assert_expected_events!(
+ PeopleWestend,
+ vec![
+ RuntimeEvent::Identity(pallet_identity::Event::AuthorityRemoved { .. }) => {},
+ RuntimeEvent::MessageQueue(pallet_message_queue::Event::Processed { success: true, .. }) => {},
+ ]
+ );
+ });
+}
+
+#[test]
+fn relay_commands_add_remove_username_authority_wrong_origin() {
+ let people_westend_alice = PeopleWestend::account_id_of(ALICE);
+
+ let origins = vec![
+ (
+ OriginKind::SovereignAccount,
+ ::RuntimeOrigin::signed(people_westend_alice.clone()),
+ ),
+ (OriginKind::Xcm, GeneralAdminOrigin.into()),
+ ];
+
+ for (origin_kind, origin) in origins {
+ Westend::execute_with(|| {
+ type Runtime = ::Runtime;
+ type RuntimeCall = ::RuntimeCall;
+ type RuntimeEvent = ::RuntimeEvent;
+ type PeopleCall = ::RuntimeCall;
+ type PeopleRuntime = ::Runtime;
+
+ let add_username_authority = PeopleCall::Identity(pallet_identity::Call::<
+ PeopleRuntime,
+ >::add_username_authority {
+ authority: people_westend_runtime::MultiAddress::Id(people_westend_alice.clone()),
+ suffix: b"suffix1".into(),
+ allocation: 10,
+ });
+
+ let add_authority_xcm_msg = RuntimeCall::XcmPallet(pallet_xcm::Call::::send {
+ dest: bx!(VersionedLocation::from(Location::new(0, [Parachain(1004)]))),
+ message: bx!(VersionedXcm::from(Xcm(vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact { origin_kind, call: add_username_authority.encode().into() }
+ ]))),
+ });
+
+ assert_ok!(add_authority_xcm_msg.dispatch(origin.clone()));
+ assert_expected_events!(
+ Westend,
+ vec![
+ RuntimeEvent::XcmPallet(pallet_xcm::Event::Sent { .. }) => {},
+ ]
+ );
+ });
+
+ // Check events system-parachain-side
+ PeopleWestend::execute_with(|| {
+ assert_expected_events!(PeopleWestend, vec![]);
+ });
+
+ Westend::execute_with(|| {
+ type Runtime = ::Runtime;
+ type RuntimeCall = ::RuntimeCall;
+ type RuntimeEvent = ::RuntimeEvent;
+ type PeopleCall = ::RuntimeCall;
+ type PeopleRuntime = ::Runtime;
+
+ let remove_username_authority = PeopleCall::Identity(pallet_identity::Call::<
+ PeopleRuntime,
+ >::remove_username_authority {
+ authority: people_westend_runtime::MultiAddress::Id(people_westend_alice.clone()),
+ suffix: b"suffix1".into(),
+ });
+
+ let remove_authority_xcm_msg =
+ RuntimeCall::XcmPallet(pallet_xcm::Call::::send {
+ dest: bx!(VersionedLocation::from(Location::new(0, [Parachain(1004)]))),
+ message: bx!(VersionedXcm::from(Xcm(vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact {
+ origin_kind: OriginKind::SovereignAccount,
+ call: remove_username_authority.encode().into(),
+ }
+ ]))),
+ });
+
+ assert_ok!(remove_authority_xcm_msg.dispatch(origin));
+ assert_expected_events!(
+ Westend,
+ vec![
+ RuntimeEvent::XcmPallet(pallet_xcm::Event::Sent { .. }) => {},
+ ]
+ );
+ });
+
+ PeopleWestend::execute_with(|| {
+ assert_expected_events!(PeopleWestend, vec![]);
+ });
+ }
+}
diff --git a/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/src/tests/mod.rs b/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/src/tests/mod.rs
index 08749b295dc2..b9ad9e3db467 100644
--- a/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/src/tests/mod.rs
+++ b/cumulus/parachains/integration-tests/emulated/tests/people/people-westend/src/tests/mod.rs
@@ -14,4 +14,5 @@
// limitations under the License.
mod claim_assets;
+mod governance;
mod teleport;
diff --git a/cumulus/parachains/pallets/collective-content/Cargo.toml b/cumulus/parachains/pallets/collective-content/Cargo.toml
index c52021f67e36..09301bd738f3 100644
--- a/cumulus/parachains/pallets/collective-content/Cargo.toml
+++ b/cumulus/parachains/pallets/collective-content/Cargo.toml
@@ -5,6 +5,8 @@ authors = ["Parity Technologies "]
edition.workspace = true
description = "Managed content"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/pallets/parachain-info/Cargo.toml b/cumulus/parachains/pallets/parachain-info/Cargo.toml
index e0bed23c4f8c..604441c65f29 100644
--- a/cumulus/parachains/pallets/parachain-info/Cargo.toml
+++ b/cumulus/parachains/pallets/parachain-info/Cargo.toml
@@ -5,6 +5,8 @@ name = "staging-parachain-info"
version = "0.7.0"
license = "Apache-2.0"
description = "Pallet to store the parachain ID"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/pallets/ping/Cargo.toml b/cumulus/parachains/pallets/ping/Cargo.toml
index 51fc384a4f14..ceb38f39fd80 100644
--- a/cumulus/parachains/pallets/ping/Cargo.toml
+++ b/cumulus/parachains/pallets/ping/Cargo.toml
@@ -5,6 +5,8 @@ name = "cumulus-ping"
version = "0.7.0"
license = "Apache-2.0"
description = "Ping Pallet for Cumulus XCM/UMP testing."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/assets/asset-hub-rococo/Cargo.toml b/cumulus/parachains/runtimes/assets/asset-hub-rococo/Cargo.toml
index bfe8ed869758..949640dd4be6 100644
--- a/cumulus/parachains/runtimes/assets/asset-hub-rococo/Cargo.toml
+++ b/cumulus/parachains/runtimes/assets/asset-hub-rococo/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Rococo variant of Asset Hub parachain runtime"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/assets/asset-hub-westend/Cargo.toml b/cumulus/parachains/runtimes/assets/asset-hub-westend/Cargo.toml
index a3eaebb59153..8e47146a06c3 100644
--- a/cumulus/parachains/runtimes/assets/asset-hub-westend/Cargo.toml
+++ b/cumulus/parachains/runtimes/assets/asset-hub-westend/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Westend variant of Asset Hub parachain runtime"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/assets/asset-hub-westend/src/lib.rs b/cumulus/parachains/runtimes/assets/asset-hub-westend/src/lib.rs
index f20b6b1fece0..21368e9c2b4b 100644
--- a/cumulus/parachains/runtimes/assets/asset-hub-westend/src/lib.rs
+++ b/cumulus/parachains/runtimes/assets/asset-hub-westend/src/lib.rs
@@ -124,7 +124,7 @@ pub const VERSION: RuntimeVersion = RuntimeVersion {
spec_name: alloc::borrow::Cow::Borrowed("westmint"),
impl_name: alloc::borrow::Cow::Borrowed("westmint"),
authoring_version: 1,
- spec_version: 1_016_006,
+ spec_version: 1_017_002,
impl_version: 0,
apis: RUNTIME_API_VERSIONS,
transaction_version: 16,
@@ -2081,18 +2081,10 @@ impl_runtime_apis! {
let account = ::AddressMapper::to_account_id(&address);
System::account_nonce(account)
}
- fn eth_transact(
- from: H160,
- dest: Option,
- value: U256,
- input: Vec,
- gas_limit: Option,
- storage_deposit_limit: Option,
- ) -> pallet_revive::EthContractResult
+
+ fn eth_transact(tx: pallet_revive::evm::GenericTransaction) -> Result, pallet_revive::EthTransactError>
{
- use pallet_revive::AddressMapper;
- let blockweights = ::BlockWeights::get();
- let origin = ::AddressMapper::to_account_id(&from);
+ let blockweights: BlockWeights = ::BlockWeights::get();
let encoded_size = |pallet_call| {
let call = RuntimeCall::Revive(pallet_call);
@@ -2101,15 +2093,9 @@ impl_runtime_apis! {
};
Revive::bare_eth_transact(
- origin,
- dest,
- value,
- input,
- gas_limit.unwrap_or(blockweights.max_block),
- storage_deposit_limit.unwrap_or(u128::MAX),
+ tx,
+ blockweights.max_block,
encoded_size,
- pallet_revive::DebugInfo::UnsafeDebug,
- pallet_revive::CollectEvents::UnsafeCollect,
)
}
@@ -2127,7 +2113,7 @@ impl_runtime_apis! {
dest,
value,
gas_limit.unwrap_or(blockweights.max_block),
- storage_deposit_limit.unwrap_or(u128::MAX),
+ pallet_revive::DepositLimit::Balance(storage_deposit_limit.unwrap_or(u128::MAX)),
input_data,
pallet_revive::DebugInfo::UnsafeDebug,
pallet_revive::CollectEvents::UnsafeCollect,
@@ -2149,7 +2135,7 @@ impl_runtime_apis! {
RuntimeOrigin::signed(origin),
value,
gas_limit.unwrap_or(blockweights.max_block),
- storage_deposit_limit.unwrap_or(u128::MAX),
+ pallet_revive::DepositLimit::Balance(storage_deposit_limit.unwrap_or(u128::MAX)),
code,
data,
salt,
diff --git a/cumulus/parachains/runtimes/assets/common/Cargo.toml b/cumulus/parachains/runtimes/assets/common/Cargo.toml
index fb66f0de2322..fa9efbca7a39 100644
--- a/cumulus/parachains/runtimes/assets/common/Cargo.toml
+++ b/cumulus/parachains/runtimes/assets/common/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Assets common utilities"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/assets/test-utils/Cargo.toml b/cumulus/parachains/runtimes/assets/test-utils/Cargo.toml
index f6b3c13e8102..393d06f95b15 100644
--- a/cumulus/parachains/runtimes/assets/test-utils/Cargo.toml
+++ b/cumulus/parachains/runtimes/assets/test-utils/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Test utils for Asset Hub runtimes."
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/Cargo.toml b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/Cargo.toml
index 3eb06e3a18c1..a7710783a1e0 100644
--- a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/Cargo.toml
+++ b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Rococo's BridgeHub parachain runtime"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/bridge_to_bulletin_config.rs b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/bridge_to_bulletin_config.rs
index b284fa9e7af7..1e733503f43b 100644
--- a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/bridge_to_bulletin_config.rs
+++ b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/bridge_to_bulletin_config.rs
@@ -22,14 +22,13 @@
use crate::{
bridge_common_config::RelayersForPermissionlessLanesInstance, weights,
xcm_config::UniversalLocation, AccountId, Balance, Balances, BridgeRococoBulletinGrandpa,
- BridgeRococoBulletinMessages, PolkadotXcm, Runtime, RuntimeEvent, RuntimeHoldReason,
- XcmOverRococoBulletin, XcmRouter,
+ BridgeRococoBulletinMessages, Runtime, RuntimeEvent, RuntimeHoldReason, XcmOverRococoBulletin,
+ XcmRouter,
};
use bp_messages::{
source_chain::FromBridgedChainMessagesDeliveryProof,
- target_chain::FromBridgedChainMessagesProof, HashedLaneId,
+ target_chain::FromBridgedChainMessagesProof, LegacyLaneId,
};
-use bridge_hub_common::xcm_version::XcmVersionOfDestAndRemoteBridge;
use frame_support::{
parameter_types,
@@ -46,6 +45,7 @@ use testnet_parachains_constants::rococo::currency::UNITS as ROC;
use xcm::{
latest::prelude::*,
prelude::{InteriorLocation, NetworkId},
+ AlwaysV5,
};
use xcm_builder::{BridgeBlobDispatcher, ParentIsPreset, SiblingParachainConvertsVia};
@@ -120,7 +120,7 @@ impl pallet_bridge_messages::Config for Runt
type OutboundPayload = XcmAsPlainPayload;
type InboundPayload = XcmAsPlainPayload;
- type LaneId = HashedLaneId;
+ type LaneId = LegacyLaneId;
type DeliveryPayments = ();
type DeliveryConfirmationPayments = ();
@@ -139,8 +139,7 @@ impl pallet_xcm_bridge_hub::Config for Runtime
type BridgeMessagesPalletInstance = WithRococoBulletinMessagesInstance;
type MessageExportPrice = ();
- type DestinationVersion =
- XcmVersionOfDestAndRemoteBridge;
+ type DestinationVersion = AlwaysV5;
type ForceOrigin = EnsureRoot;
// We don't want to allow creating bridges for this instance.
@@ -253,7 +252,7 @@ where
let universal_source =
[GlobalConsensus(ByGenesis(ROCOCO_GENESIS_HASH)), Parachain(sibling_para_id)].into();
let universal_destination =
- [GlobalConsensus(RococoBulletinGlobalConsensusNetwork::get()), Parachain(2075)].into();
+ [GlobalConsensus(RococoBulletinGlobalConsensusNetwork::get())].into();
let bridge_id = BridgeId::new(&universal_source, &universal_destination);
// insert only bridge metadata, because the benchmarks create lanes
@@ -279,29 +278,3 @@ where
universal_source
}
-
-/// Contains the migration for the PeopleRococo<>RococoBulletin bridge.
-pub mod migration {
- use super::*;
- use frame_support::traits::ConstBool;
-
- parameter_types! {
- pub BulletinRococoLocation: InteriorLocation = [GlobalConsensus(RococoBulletinGlobalConsensusNetwork::get())].into();
- pub RococoPeopleToRococoBulletinMessagesLane: HashedLaneId = pallet_xcm_bridge_hub::Pallet::< Runtime, XcmOverPolkadotBulletinInstance >::bridge_locations(
- PeopleRococoLocation::get(),
- BulletinRococoLocation::get()
- )
- .unwrap()
- .calculate_lane_id(xcm::latest::VERSION).expect("Valid locations");
- }
-
- /// Ensure that the existing lanes for the People<>Bulletin bridge are correctly configured.
- pub type StaticToDynamicLanes = pallet_xcm_bridge_hub::migration::OpenBridgeForLane<
- Runtime,
- XcmOverPolkadotBulletinInstance,
- RococoPeopleToRococoBulletinMessagesLane,
- ConstBool,
- PeopleRococoLocation,
- BulletinRococoLocation,
- >;
-}
diff --git a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/genesis_config_presets.rs b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/genesis_config_presets.rs
index 98e2450ee832..55fd499c2f54 100644
--- a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/genesis_config_presets.rs
+++ b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/genesis_config_presets.rs
@@ -61,10 +61,20 @@ fn bridge_hub_rococo_genesis(
.collect(),
},
polkadot_xcm: PolkadotXcmConfig { safe_xcm_version: Some(SAFE_XCM_VERSION) },
+ bridge_polkadot_bulletin_grandpa: BridgePolkadotBulletinGrandpaConfig {
+ owner: bridges_pallet_owner.clone(),
+ },
bridge_westend_grandpa: BridgeWestendGrandpaConfig { owner: bridges_pallet_owner.clone() },
bridge_westend_messages: BridgeWestendMessagesConfig {
owner: bridges_pallet_owner.clone(),
},
+ xcm_over_polkadot_bulletin: XcmOverPolkadotBulletinConfig {
+ opened_bridges: vec![(
+ Location::new(1, [Parachain(1004)]),
+ Junctions::from([GlobalConsensus(NetworkId::PolkadotBulletin).into()]),
+ Some(bp_messages::LegacyLaneId([0, 0, 0, 0])),
+ )],
+ },
xcm_over_bridge_hub_westend: XcmOverBridgeHubWestendConfig { opened_bridges },
ethereum_system: EthereumSystemConfig { para_id: id, asset_hub_para_id },
})
diff --git a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/lib.rs b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/lib.rs
index 598afeddb984..d87ff9b43fef 100644
--- a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/lib.rs
+++ b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/src/lib.rs
@@ -169,7 +169,6 @@ pub type Migrations = (
bridge_to_westend_config::WithBridgeHubWestendMessagesInstance,
>,
bridge_to_westend_config::migration::StaticToDynamicLanes,
- bridge_to_bulletin_config::migration::StaticToDynamicLanes,
frame_support::migrations::RemoveStorage<
BridgeWestendMessagesPalletName,
OutboundLanesCongestedSignalsKey,
diff --git a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/tests/tests.rs b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/tests/tests.rs
index 29f9615bff6a..44e69c31a560 100644
--- a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/tests/tests.rs
+++ b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-rococo/tests/tests.rs
@@ -501,10 +501,10 @@ mod bridge_hub_westend_tests {
mod bridge_hub_bulletin_tests {
use super::*;
- use bp_messages::{HashedLaneId, LaneIdType};
+ use bp_messages::LegacyLaneId;
use bridge_common_config::BridgeGrandpaRococoBulletinInstance;
use bridge_hub_rococo_runtime::{
- bridge_common_config::RelayersForPermissionlessLanesInstance,
+ bridge_common_config::RelayersForLegacyLaneIdsMessagesInstance,
xcm_config::LocationToAccountId,
};
use bridge_hub_test_utils::test_cases::from_grandpa_chain;
@@ -528,7 +528,7 @@ mod bridge_hub_bulletin_tests {
AllPalletsWithoutSystem,
BridgeGrandpaRococoBulletinInstance,
WithRococoBulletinMessagesInstance,
- RelayersForPermissionlessLanesInstance,
+ RelayersForLegacyLaneIdsMessagesInstance,
>;
#[test]
@@ -599,7 +599,7 @@ mod bridge_hub_bulletin_tests {
bridge_hub_test_utils::open_bridge_with_storage::<
Runtime,
XcmOverPolkadotBulletinInstance
- >(locations, HashedLaneId::try_new(1, 2).unwrap())
+ >(locations, LegacyLaneId([0, 0, 0, 0]))
}
).1
},
@@ -663,7 +663,7 @@ mod bridge_hub_bulletin_tests {
bridge_hub_test_utils::open_bridge_with_storage::<
Runtime,
XcmOverPolkadotBulletinInstance,
- >(locations, HashedLaneId::try_new(1, 2).unwrap())
+ >(locations, LegacyLaneId([0, 0, 0, 0]))
},
)
.1
@@ -697,7 +697,7 @@ mod bridge_hub_bulletin_tests {
bridge_hub_test_utils::open_bridge_with_storage::<
Runtime,
XcmOverPolkadotBulletinInstance,
- >(locations, HashedLaneId::try_new(1, 2).unwrap())
+ >(locations, LegacyLaneId([0, 0, 0, 0]))
},
)
.1
diff --git a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-westend/Cargo.toml b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-westend/Cargo.toml
index d1886a3b1c23..d287b4de89d4 100644
--- a/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-westend/Cargo.toml
+++ b/cumulus/parachains/runtimes/bridge-hubs/bridge-hub-westend/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Westend's BridgeHub parachain runtime"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/bridge-hubs/common/Cargo.toml b/cumulus/parachains/runtimes/bridge-hubs/common/Cargo.toml
index 9cb24a2b2820..76a89bcb2e72 100644
--- a/cumulus/parachains/runtimes/bridge-hubs/common/Cargo.toml
+++ b/cumulus/parachains/runtimes/bridge-hubs/common/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Bridge hub common utilities"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[dependencies]
codec = { features = ["derive"], workspace = true }
diff --git a/cumulus/parachains/runtimes/bridge-hubs/test-utils/Cargo.toml b/cumulus/parachains/runtimes/bridge-hubs/test-utils/Cargo.toml
index 915b3090092f..16fef951f328 100644
--- a/cumulus/parachains/runtimes/bridge-hubs/test-utils/Cargo.toml
+++ b/cumulus/parachains/runtimes/bridge-hubs/test-utils/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Utils for BridgeHub testing"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/collectives/collectives-westend/Cargo.toml b/cumulus/parachains/runtimes/collectives/collectives-westend/Cargo.toml
index 810abcf572d4..dc4b73db69e3 100644
--- a/cumulus/parachains/runtimes/collectives/collectives-westend/Cargo.toml
+++ b/cumulus/parachains/runtimes/collectives/collectives-westend/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license = "Apache-2.0"
description = "Westend Collectives Parachain Runtime"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/constants/Cargo.toml b/cumulus/parachains/runtimes/constants/Cargo.toml
index d54f1e7db6c1..01b023e0fb89 100644
--- a/cumulus/parachains/runtimes/constants/Cargo.toml
+++ b/cumulus/parachains/runtimes/constants/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Common constants for Testnet Parachains runtimes"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/contracts/contracts-rococo/Cargo.toml b/cumulus/parachains/runtimes/contracts/contracts-rococo/Cargo.toml
index c98ca7ba3e74..1aeff5eb2e48 100644
--- a/cumulus/parachains/runtimes/contracts/contracts-rococo/Cargo.toml
+++ b/cumulus/parachains/runtimes/contracts/contracts-rococo/Cargo.toml
@@ -5,6 +5,8 @@ description = "Parachain testnet runtime for FRAME Contracts pallet."
authors.workspace = true
edition.workspace = true
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/coretime/coretime-rococo/Cargo.toml b/cumulus/parachains/runtimes/coretime/coretime-rococo/Cargo.toml
index 02807827cf92..ab621134b252 100644
--- a/cumulus/parachains/runtimes/coretime/coretime-rococo/Cargo.toml
+++ b/cumulus/parachains/runtimes/coretime/coretime-rococo/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Rococo's Coretime parachain runtime"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/coretime/coretime-westend/Cargo.toml b/cumulus/parachains/runtimes/coretime/coretime-westend/Cargo.toml
index 34353d312b1f..44dfbf93c30e 100644
--- a/cumulus/parachains/runtimes/coretime/coretime-westend/Cargo.toml
+++ b/cumulus/parachains/runtimes/coretime/coretime-westend/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Westend's Coretime parachain runtime"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/glutton/glutton-westend/Cargo.toml b/cumulus/parachains/runtimes/glutton/glutton-westend/Cargo.toml
index 09b4ef679d24..9bbdb8d2ee08 100644
--- a/cumulus/parachains/runtimes/glutton/glutton-westend/Cargo.toml
+++ b/cumulus/parachains/runtimes/glutton/glutton-westend/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license = "Apache-2.0"
description = "Glutton parachain runtime."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/people/people-rococo/Cargo.toml b/cumulus/parachains/runtimes/people/people-rococo/Cargo.toml
index a55143b62071..893133bf3c1a 100644
--- a/cumulus/parachains/runtimes/people/people-rococo/Cargo.toml
+++ b/cumulus/parachains/runtimes/people/people-rococo/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Rococo's People parachain runtime"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[build-dependencies]
substrate-wasm-builder = { optional = true, workspace = true, default-features = true }
diff --git a/cumulus/parachains/runtimes/people/people-westend/Cargo.toml b/cumulus/parachains/runtimes/people/people-westend/Cargo.toml
index 4d66332e96dd..66b324b51af4 100644
--- a/cumulus/parachains/runtimes/people/people-westend/Cargo.toml
+++ b/cumulus/parachains/runtimes/people/people-westend/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Westend's People parachain runtime"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[build-dependencies]
substrate-wasm-builder = { optional = true, workspace = true, default-features = true }
diff --git a/cumulus/parachains/runtimes/test-utils/Cargo.toml b/cumulus/parachains/runtimes/test-utils/Cargo.toml
index e9d666617ee2..17c81ae4921a 100644
--- a/cumulus/parachains/runtimes/test-utils/Cargo.toml
+++ b/cumulus/parachains/runtimes/test-utils/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Utils for Runtimes testing"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/parachains/runtimes/testing/rococo-parachain/Cargo.toml b/cumulus/parachains/runtimes/testing/rococo-parachain/Cargo.toml
index b0581c8d43ff..4713f4398eaa 100644
--- a/cumulus/parachains/runtimes/testing/rococo-parachain/Cargo.toml
+++ b/cumulus/parachains/runtimes/testing/rococo-parachain/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Simple runtime used by the rococo parachain(s)"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/polkadot-omni-node/Cargo.toml b/cumulus/polkadot-omni-node/Cargo.toml
index a736e1ef80c5..8b46bc882868 100644
--- a/cumulus/polkadot-omni-node/Cargo.toml
+++ b/cumulus/polkadot-omni-node/Cargo.toml
@@ -6,6 +6,8 @@ edition.workspace = true
build = "build.rs"
description = "Generic binary that can run a parachain node with u32 block number and Aura consensus"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/polkadot-omni-node/lib/Cargo.toml b/cumulus/polkadot-omni-node/lib/Cargo.toml
index a690229f1695..cca4ac3b2b69 100644
--- a/cumulus/polkadot-omni-node/lib/Cargo.toml
+++ b/cumulus/polkadot-omni-node/lib/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Helper library that can be used to build a parachain node"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/polkadot-parachain/Cargo.toml b/cumulus/polkadot-parachain/Cargo.toml
index 5520126d0742..f5ce040bb530 100644
--- a/cumulus/polkadot-parachain/Cargo.toml
+++ b/cumulus/polkadot-parachain/Cargo.toml
@@ -6,6 +6,8 @@ edition.workspace = true
build = "build.rs"
description = "Runs a polkadot parachain node"
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/primitives/aura/Cargo.toml b/cumulus/primitives/aura/Cargo.toml
index 185b2d40833f..715ce3e1a03e 100644
--- a/cumulus/primitives/aura/Cargo.toml
+++ b/cumulus/primitives/aura/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license = "Apache-2.0"
description = "Core primitives for Aura in Cumulus"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/primitives/core/Cargo.toml b/cumulus/primitives/core/Cargo.toml
index 533d368d3b00..b5bfe4fbc889 100644
--- a/cumulus/primitives/core/Cargo.toml
+++ b/cumulus/primitives/core/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license = "Apache-2.0"
description = "Cumulus related core primitive types and traits"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/primitives/parachain-inherent/Cargo.toml b/cumulus/primitives/parachain-inherent/Cargo.toml
index a4271d3fd9cc..2ff990b8d514 100644
--- a/cumulus/primitives/parachain-inherent/Cargo.toml
+++ b/cumulus/primitives/parachain-inherent/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Inherent that needs to be present in every parachain block. Contains messages and a relay chain storage-proof."
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/primitives/proof-size-hostfunction/Cargo.toml b/cumulus/primitives/proof-size-hostfunction/Cargo.toml
index e61c865d05fb..6e8168091892 100644
--- a/cumulus/primitives/proof-size-hostfunction/Cargo.toml
+++ b/cumulus/primitives/proof-size-hostfunction/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Hostfunction exposing storage proof size to the runtime."
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/primitives/storage-weight-reclaim/Cargo.toml b/cumulus/primitives/storage-weight-reclaim/Cargo.toml
index e1ae6743335a..3c358bc25edb 100644
--- a/cumulus/primitives/storage-weight-reclaim/Cargo.toml
+++ b/cumulus/primitives/storage-weight-reclaim/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Utilities to reclaim storage weight."
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/primitives/timestamp/Cargo.toml b/cumulus/primitives/timestamp/Cargo.toml
index cb328e2f2cc6..70cb3e607b98 100644
--- a/cumulus/primitives/timestamp/Cargo.toml
+++ b/cumulus/primitives/timestamp/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
description = "Provides timestamp related functionality for parachains."
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/primitives/utility/Cargo.toml b/cumulus/primitives/utility/Cargo.toml
index 2ca8b82001d5..1444571edbe0 100644
--- a/cumulus/primitives/utility/Cargo.toml
+++ b/cumulus/primitives/utility/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license = "Apache-2.0"
description = "Helper datatypes for Cumulus"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/test/relay-sproof-builder/Cargo.toml b/cumulus/test/relay-sproof-builder/Cargo.toml
index e266b5807081..c1efa141a45d 100644
--- a/cumulus/test/relay-sproof-builder/Cargo.toml
+++ b/cumulus/test/relay-sproof-builder/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license = "Apache-2.0"
description = "Mocked relay state proof builder for testing Cumulus."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/cumulus/xcm/xcm-emulator/Cargo.toml b/cumulus/xcm/xcm-emulator/Cargo.toml
index 8598481fae76..d0c637d64d01 100644
--- a/cumulus/xcm/xcm-emulator/Cargo.toml
+++ b/cumulus/xcm/xcm-emulator/Cargo.toml
@@ -5,6 +5,8 @@ version = "0.5.0"
authors.workspace = true
edition.workspace = true
license = "Apache-2.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/docs/contributor/container.md b/docs/contributor/container.md
index ec51b8b9d7cc..e387f568d7b5 100644
--- a/docs/contributor/container.md
+++ b/docs/contributor/container.md
@@ -24,7 +24,7 @@ The command below allows building a Linux binary without having to even install
docker run --rm -it \
-w /polkadot-sdk \
-v $(pwd):/polkadot-sdk \
- docker.io/paritytech/ci-unified:bullseye-1.77.0-2024-04-10-v20240408 \
+ docker.io/paritytech/ci-unified:bullseye-1.81.0-2024-11-19-v202411281558 \
cargo build --release --locked -p polkadot-parachain-bin --bin polkadot-parachain
sudo chown -R $(id -u):$(id -g) target/
```
diff --git a/polkadot/Cargo.toml b/polkadot/Cargo.toml
index 3a939464868f..101caac0e313 100644
--- a/polkadot/Cargo.toml
+++ b/polkadot/Cargo.toml
@@ -20,6 +20,8 @@ authors.workspace = true
edition.workspace = true
version = "6.0.0"
default-run = "polkadot"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/cli/Cargo.toml b/polkadot/cli/Cargo.toml
index da37f6062c57..3eff525b7b1e 100644
--- a/polkadot/cli/Cargo.toml
+++ b/polkadot/cli/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/core-primitives/Cargo.toml b/polkadot/core-primitives/Cargo.toml
index 42ca27953738..33869f216f78 100644
--- a/polkadot/core-primitives/Cargo.toml
+++ b/polkadot/core-primitives/Cargo.toml
@@ -5,6 +5,8 @@ description = "Core Polkadot types used by Relay Chains and parachains."
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/erasure-coding/Cargo.toml b/polkadot/erasure-coding/Cargo.toml
index 969742c5bb0a..528b955c4db3 100644
--- a/polkadot/erasure-coding/Cargo.toml
+++ b/polkadot/erasure-coding/Cargo.toml
@@ -5,6 +5,8 @@ description = "Erasure coding used for Polkadot's availability system"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/collation-generation/Cargo.toml b/polkadot/node/collation-generation/Cargo.toml
index 777458673f5b..c1716e2e6eb8 100644
--- a/polkadot/node/collation-generation/Cargo.toml
+++ b/polkadot/node/collation-generation/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Collator-side subsystem that handles incoming candidate submissions from the parachain."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/approval-voting-parallel/Cargo.toml b/polkadot/node/core/approval-voting-parallel/Cargo.toml
index 3a98cce80e92..995687fb4c11 100644
--- a/polkadot/node/core/approval-voting-parallel/Cargo.toml
+++ b/polkadot/node/core/approval-voting-parallel/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Approval Voting Subsystem running approval work in parallel"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/approval-voting/Cargo.toml b/polkadot/node/core/approval-voting/Cargo.toml
index f9754d2babc9..80f5dcb7f318 100644
--- a/polkadot/node/core/approval-voting/Cargo.toml
+++ b/polkadot/node/core/approval-voting/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Approval Voting Subsystem of the Polkadot node"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/av-store/Cargo.toml b/polkadot/node/core/av-store/Cargo.toml
index 1d14e4cfba37..9f6864269cef 100644
--- a/polkadot/node/core/av-store/Cargo.toml
+++ b/polkadot/node/core/av-store/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/backing/Cargo.toml b/polkadot/node/core/backing/Cargo.toml
index cd1acf9daa93..a81fe9486c63 100644
--- a/polkadot/node/core/backing/Cargo.toml
+++ b/polkadot/node/core/backing/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "The Candidate Backing Subsystem. Tracks parachain candidates that can be backed, as well as the issuance of statements about candidates."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/bitfield-signing/Cargo.toml b/polkadot/node/core/bitfield-signing/Cargo.toml
index 126a18a14166..f00ba5712661 100644
--- a/polkadot/node/core/bitfield-signing/Cargo.toml
+++ b/polkadot/node/core/bitfield-signing/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Bitfield signing subsystem for the Polkadot node"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/candidate-validation/Cargo.toml b/polkadot/node/core/candidate-validation/Cargo.toml
index 87855dbce415..fea16b1c7604 100644
--- a/polkadot/node/core/candidate-validation/Cargo.toml
+++ b/polkadot/node/core/candidate-validation/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/chain-api/Cargo.toml b/polkadot/node/core/chain-api/Cargo.toml
index a8e911e0c5c9..0f443868dada 100644
--- a/polkadot/node/core/chain-api/Cargo.toml
+++ b/polkadot/node/core/chain-api/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "The Chain API subsystem provides access to chain related utility functions like block number to hash conversions."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/chain-selection/Cargo.toml b/polkadot/node/core/chain-selection/Cargo.toml
index 755d5cadeaaf..d2cc425a4816 100644
--- a/polkadot/node/core/chain-selection/Cargo.toml
+++ b/polkadot/node/core/chain-selection/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/dispute-coordinator/Cargo.toml b/polkadot/node/core/dispute-coordinator/Cargo.toml
index 344b66af1933..11b4ac645c23 100644
--- a/polkadot/node/core/dispute-coordinator/Cargo.toml
+++ b/polkadot/node/core/dispute-coordinator/Cargo.toml
@@ -5,6 +5,8 @@ description = "The node-side components that participate in disputes"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/parachains-inherent/Cargo.toml b/polkadot/node/core/parachains-inherent/Cargo.toml
index 1e4953f40d0b..b1cd5e971b00 100644
--- a/polkadot/node/core/parachains-inherent/Cargo.toml
+++ b/polkadot/node/core/parachains-inherent/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Parachains inherent data provider for Polkadot node"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/prospective-parachains/Cargo.toml b/polkadot/node/core/prospective-parachains/Cargo.toml
index 5629e4ef7fbe..ced6c30c64b6 100644
--- a/polkadot/node/core/prospective-parachains/Cargo.toml
+++ b/polkadot/node/core/prospective-parachains/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "The Prospective Parachains subsystem. Tracks and handles prospective parachain fragments."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/provisioner/Cargo.toml b/polkadot/node/core/provisioner/Cargo.toml
index 64a598b420f7..26dca1adbc79 100644
--- a/polkadot/node/core/provisioner/Cargo.toml
+++ b/polkadot/node/core/provisioner/Cargo.toml
@@ -5,6 +5,8 @@ description = "Responsible for assembling a relay chain block from a set of avai
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/pvf-checker/Cargo.toml b/polkadot/node/core/pvf-checker/Cargo.toml
index 73ef17a2843a..cb7e3eadcf0a 100644
--- a/polkadot/node/core/pvf-checker/Cargo.toml
+++ b/polkadot/node/core/pvf-checker/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/pvf/Cargo.toml b/polkadot/node/core/pvf/Cargo.toml
index 37d5878ea597..1b2a16ae8b55 100644
--- a/polkadot/node/core/pvf/Cargo.toml
+++ b/polkadot/node/core/pvf/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/pvf/common/Cargo.toml b/polkadot/node/core/pvf/common/Cargo.toml
index 903c8dd1af29..d058d582fc26 100644
--- a/polkadot/node/core/pvf/common/Cargo.toml
+++ b/polkadot/node/core/pvf/common/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/pvf/execute-worker/Cargo.toml b/polkadot/node/core/pvf/execute-worker/Cargo.toml
index 6ad340d25612..8327cf8058cd 100644
--- a/polkadot/node/core/pvf/execute-worker/Cargo.toml
+++ b/polkadot/node/core/pvf/execute-worker/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/pvf/prepare-worker/Cargo.toml b/polkadot/node/core/pvf/prepare-worker/Cargo.toml
index 56235bd82192..9dc800a8ef56 100644
--- a/polkadot/node/core/pvf/prepare-worker/Cargo.toml
+++ b/polkadot/node/core/pvf/prepare-worker/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/core/runtime-api/Cargo.toml b/polkadot/node/core/runtime-api/Cargo.toml
index 834e4b300b9e..15cbf4665d06 100644
--- a/polkadot/node/core/runtime-api/Cargo.toml
+++ b/polkadot/node/core/runtime-api/Cargo.toml
@@ -5,6 +5,8 @@ description = "Wrapper around the parachain-related runtime APIs"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/gum/Cargo.toml b/polkadot/node/gum/Cargo.toml
index 9b2df435a06a..84875ea121b6 100644
--- a/polkadot/node/gum/Cargo.toml
+++ b/polkadot/node/gum/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Stick logs together with the TraceID as provided by tempo"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/gum/proc-macro/Cargo.toml b/polkadot/node/gum/proc-macro/Cargo.toml
index da6364977cae..b4a3401b15e4 100644
--- a/polkadot/node/gum/proc-macro/Cargo.toml
+++ b/polkadot/node/gum/proc-macro/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Generate an overseer including builder pattern and message wrapper from a single annotated struct definition."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/metrics/Cargo.toml b/polkadot/node/metrics/Cargo.toml
index 41b08b66e9b4..05344993a75e 100644
--- a/polkadot/node/metrics/Cargo.toml
+++ b/polkadot/node/metrics/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/network/approval-distribution/Cargo.toml b/polkadot/node/network/approval-distribution/Cargo.toml
index 8d674a733470..abf345552f89 100644
--- a/polkadot/node/network/approval-distribution/Cargo.toml
+++ b/polkadot/node/network/approval-distribution/Cargo.toml
@@ -5,6 +5,8 @@ description = "Polkadot Approval Distribution subsystem for the distribution of
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/network/availability-distribution/Cargo.toml b/polkadot/node/network/availability-distribution/Cargo.toml
index 8c5574f244e4..e87103d99f72 100644
--- a/polkadot/node/network/availability-distribution/Cargo.toml
+++ b/polkadot/node/network/availability-distribution/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/network/availability-recovery/Cargo.toml b/polkadot/node/network/availability-recovery/Cargo.toml
index 41f09b1f7044..be4323e74f02 100644
--- a/polkadot/node/network/availability-recovery/Cargo.toml
+++ b/polkadot/node/network/availability-recovery/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/network/bitfield-distribution/Cargo.toml b/polkadot/node/network/bitfield-distribution/Cargo.toml
index 6d007255c574..2ff30489b6c1 100644
--- a/polkadot/node/network/bitfield-distribution/Cargo.toml
+++ b/polkadot/node/network/bitfield-distribution/Cargo.toml
@@ -5,6 +5,8 @@ description = "Polkadot Bitfiled Distribution subsystem, which gossips signed av
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/network/bridge/Cargo.toml b/polkadot/node/network/bridge/Cargo.toml
index b4b5743853cd..c4b46c1dc001 100644
--- a/polkadot/node/network/bridge/Cargo.toml
+++ b/polkadot/node/network/bridge/Cargo.toml
@@ -5,6 +5,8 @@ description = "The Network Bridge Subsystem — protocol multiplexer for Polkado
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/network/collator-protocol/Cargo.toml b/polkadot/node/network/collator-protocol/Cargo.toml
index 304cb23bb6aa..a51d24c70807 100644
--- a/polkadot/node/network/collator-protocol/Cargo.toml
+++ b/polkadot/node/network/collator-protocol/Cargo.toml
@@ -5,6 +5,8 @@ description = "Polkadot Collator Protocol subsystem. Allows collators and valida
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/network/collator-protocol/src/error.rs b/polkadot/node/network/collator-protocol/src/error.rs
index ae7f9a8c1fbc..598cdcf43900 100644
--- a/polkadot/node/network/collator-protocol/src/error.rs
+++ b/polkadot/node/network/collator-protocol/src/error.rs
@@ -122,7 +122,7 @@ impl SecondingError {
PersistedValidationDataMismatch |
CandidateHashMismatch |
RelayParentMismatch |
- Duplicate | ParentHeadDataMismatch |
+ ParentHeadDataMismatch |
InvalidCoreIndex(_, _) |
InvalidSessionIndex(_, _) |
InvalidReceiptVersion(_)
diff --git a/polkadot/node/network/dispute-distribution/Cargo.toml b/polkadot/node/network/dispute-distribution/Cargo.toml
index b4dcafe09eb6..4f2f9ccadf8b 100644
--- a/polkadot/node/network/dispute-distribution/Cargo.toml
+++ b/polkadot/node/network/dispute-distribution/Cargo.toml
@@ -5,6 +5,8 @@ description = "Polkadot Dispute Distribution subsystem, which ensures all concer
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/network/gossip-support/Cargo.toml b/polkadot/node/network/gossip-support/Cargo.toml
index c8c19e5de070..7d17ea45eab9 100644
--- a/polkadot/node/network/gossip-support/Cargo.toml
+++ b/polkadot/node/network/gossip-support/Cargo.toml
@@ -5,6 +5,8 @@ description = "Polkadot Gossip Support subsystem. Responsible for keeping track
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/network/protocol/Cargo.toml b/polkadot/node/network/protocol/Cargo.toml
index 3d51d3c0a565..0bcf224332bc 100644
--- a/polkadot/node/network/protocol/Cargo.toml
+++ b/polkadot/node/network/protocol/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Primitives types for the Node-side"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/network/statement-distribution/Cargo.toml b/polkadot/node/network/statement-distribution/Cargo.toml
index de07937ffb0a..d737c7bf8968 100644
--- a/polkadot/node/network/statement-distribution/Cargo.toml
+++ b/polkadot/node/network/statement-distribution/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/overseer/Cargo.toml b/polkadot/node/overseer/Cargo.toml
index 2253a5ae0c66..62634c1da090 100644
--- a/polkadot/node/overseer/Cargo.toml
+++ b/polkadot/node/overseer/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "System overseer of the Polkadot node"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/primitives/Cargo.toml b/polkadot/node/primitives/Cargo.toml
index 7185205f905b..50ee3a80ddb8 100644
--- a/polkadot/node/primitives/Cargo.toml
+++ b/polkadot/node/primitives/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/service/Cargo.toml b/polkadot/node/service/Cargo.toml
index 6e8eade21a43..7f58a56d5d16 100644
--- a/polkadot/node/service/Cargo.toml
+++ b/polkadot/node/service/Cargo.toml
@@ -6,6 +6,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Utils to tie different Polkadot components together and allow instantiation of a node."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/subsystem-types/Cargo.toml b/polkadot/node/subsystem-types/Cargo.toml
index b5686ec96be1..44bb7036d63d 100644
--- a/polkadot/node/subsystem-types/Cargo.toml
+++ b/polkadot/node/subsystem-types/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/subsystem-util/Cargo.toml b/polkadot/node/subsystem-util/Cargo.toml
index d12daa572055..9c21fede1c47 100644
--- a/polkadot/node/subsystem-util/Cargo.toml
+++ b/polkadot/node/subsystem-util/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/subsystem/Cargo.toml b/polkadot/node/subsystem/Cargo.toml
index ce4bceec7336..4f30d3ce9c09 100644
--- a/polkadot/node/subsystem/Cargo.toml
+++ b/polkadot/node/subsystem/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/node/tracking-allocator/Cargo.toml b/polkadot/node/tracking-allocator/Cargo.toml
index d98377e53759..0fbf526ccb8b 100644
--- a/polkadot/node/tracking-allocator/Cargo.toml
+++ b/polkadot/node/tracking-allocator/Cargo.toml
@@ -5,6 +5,8 @@ version = "2.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/parachain/Cargo.toml b/polkadot/parachain/Cargo.toml
index 9d0518fd46ad..ea6c4423dc19 100644
--- a/polkadot/parachain/Cargo.toml
+++ b/polkadot/parachain/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
version = "6.0.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/primitives/Cargo.toml b/polkadot/primitives/Cargo.toml
index dd269caa2d60..150aaf153fa7 100644
--- a/polkadot/primitives/Cargo.toml
+++ b/polkadot/primitives/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Shared primitives used by Polkadot runtime"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/rpc/Cargo.toml b/polkadot/rpc/Cargo.toml
index d01528d4dee0..48980dde4bbc 100644
--- a/polkadot/rpc/Cargo.toml
+++ b/polkadot/rpc/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Polkadot specific RPC functionality."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/runtime/common/Cargo.toml b/polkadot/runtime/common/Cargo.toml
index 01b56b31cf20..1646db54455a 100644
--- a/polkadot/runtime/common/Cargo.toml
+++ b/polkadot/runtime/common/Cargo.toml
@@ -5,6 +5,8 @@ description = "Pallets and constants used in Relay Chain networks."
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/runtime/common/slot_range_helper/Cargo.toml b/polkadot/runtime/common/slot_range_helper/Cargo.toml
index 02810b75283f..3f110bdd76c6 100644
--- a/polkadot/runtime/common/slot_range_helper/Cargo.toml
+++ b/polkadot/runtime/common/slot_range_helper/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Helper crate for generating slot ranges for the Polkadot runtime."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/runtime/metrics/Cargo.toml b/polkadot/runtime/metrics/Cargo.toml
index 3709e1eb697e..0415e4754009 100644
--- a/polkadot/runtime/metrics/Cargo.toml
+++ b/polkadot/runtime/metrics/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Runtime metric interface for the Polkadot node"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/runtime/parachains/Cargo.toml b/polkadot/runtime/parachains/Cargo.toml
index a3eec3f9d961..b01778eeb424 100644
--- a/polkadot/runtime/parachains/Cargo.toml
+++ b/polkadot/runtime/parachains/Cargo.toml
@@ -5,6 +5,8 @@ description = "Relay Chain runtime code responsible for Parachains."
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/runtime/rococo/Cargo.toml b/polkadot/runtime/rococo/Cargo.toml
index 3b11c977edf3..764c53abbfcb 100644
--- a/polkadot/runtime/rococo/Cargo.toml
+++ b/polkadot/runtime/rococo/Cargo.toml
@@ -6,6 +6,8 @@ description = "Rococo testnet Relay Chain runtime."
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/runtime/rococo/constants/Cargo.toml b/polkadot/runtime/rococo/constants/Cargo.toml
index 1d0adac44af4..921bc8f5fe92 100644
--- a/polkadot/runtime/rococo/constants/Cargo.toml
+++ b/polkadot/runtime/rococo/constants/Cargo.toml
@@ -5,6 +5,8 @@ description = "Constants used throughout the Rococo network."
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[package.metadata.polkadot-sdk]
exclude-from-umbrella = true
diff --git a/polkadot/runtime/westend/Cargo.toml b/polkadot/runtime/westend/Cargo.toml
index f94301baab09..584f5855b7a4 100644
--- a/polkadot/runtime/westend/Cargo.toml
+++ b/polkadot/runtime/westend/Cargo.toml
@@ -6,6 +6,8 @@ description = "Westend testnet Relay Chain runtime."
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/runtime/westend/constants/Cargo.toml b/polkadot/runtime/westend/constants/Cargo.toml
index 27d5b19b8e77..a50e2f9cc639 100644
--- a/polkadot/runtime/westend/constants/Cargo.toml
+++ b/polkadot/runtime/westend/constants/Cargo.toml
@@ -5,6 +5,8 @@ description = "Constants used throughout the Westend network."
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[package.metadata.polkadot-sdk]
exclude-from-umbrella = true
diff --git a/polkadot/statement-table/Cargo.toml b/polkadot/statement-table/Cargo.toml
index 53ea0b74463b..d9519dafe12d 100644
--- a/polkadot/statement-table/Cargo.toml
+++ b/polkadot/statement-table/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "Stores messages other authorities issue about candidates in Polkadot."
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/utils/generate-bags/Cargo.toml b/polkadot/utils/generate-bags/Cargo.toml
index 16205b0f51f5..3006d8325ef9 100644
--- a/polkadot/utils/generate-bags/Cargo.toml
+++ b/polkadot/utils/generate-bags/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
description = "CLI to generate voter bags for Polkadot runtimes"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/xcm/Cargo.toml b/polkadot/xcm/Cargo.toml
index 86c7067ad6fa..113e72c27ae1 100644
--- a/polkadot/xcm/Cargo.toml
+++ b/polkadot/xcm/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/xcm/pallet-xcm-benchmarks/Cargo.toml b/polkadot/xcm/pallet-xcm-benchmarks/Cargo.toml
index b07bdfdca3d1..fe2b78163223 100644
--- a/polkadot/xcm/pallet-xcm-benchmarks/Cargo.toml
+++ b/polkadot/xcm/pallet-xcm-benchmarks/Cargo.toml
@@ -5,6 +5,8 @@ edition.workspace = true
license.workspace = true
version = "7.0.0"
description = "Benchmarks for the XCM pallet"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/xcm/pallet-xcm/Cargo.toml b/polkadot/xcm/pallet-xcm/Cargo.toml
index 4d44d75e34dd..e8cdd3b4931b 100644
--- a/polkadot/xcm/pallet-xcm/Cargo.toml
+++ b/polkadot/xcm/pallet-xcm/Cargo.toml
@@ -5,6 +5,8 @@ description = "A pallet for handling XCM programs."
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/xcm/procedural/Cargo.toml b/polkadot/xcm/procedural/Cargo.toml
index 83b35d19cf7e..3167766158ff 100644
--- a/polkadot/xcm/procedural/Cargo.toml
+++ b/polkadot/xcm/procedural/Cargo.toml
@@ -6,6 +6,8 @@ edition.workspace = true
license.workspace = true
version = "7.0.0"
publish = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/xcm/xcm-builder/Cargo.toml b/polkadot/xcm/xcm-builder/Cargo.toml
index eaa115740f3e..2819a0b0a555 100644
--- a/polkadot/xcm/xcm-builder/Cargo.toml
+++ b/polkadot/xcm/xcm-builder/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
version = "7.0.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/xcm/xcm-executor/Cargo.toml b/polkadot/xcm/xcm-executor/Cargo.toml
index cc966f91fe4d..20ca40de5faa 100644
--- a/polkadot/xcm/xcm-executor/Cargo.toml
+++ b/polkadot/xcm/xcm-executor/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
version = "7.0.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/xcm/xcm-simulator/Cargo.toml b/polkadot/xcm/xcm-simulator/Cargo.toml
index c7caa49393ed..47900e226d48 100644
--- a/polkadot/xcm/xcm-simulator/Cargo.toml
+++ b/polkadot/xcm/xcm-simulator/Cargo.toml
@@ -5,6 +5,8 @@ version = "7.0.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/polkadot/xcm/xcm-simulator/example/Cargo.toml b/polkadot/xcm/xcm-simulator/example/Cargo.toml
index e0aff9b7782a..6fbe9243944a 100644
--- a/polkadot/xcm/xcm-simulator/example/Cargo.toml
+++ b/polkadot/xcm/xcm-simulator/example/Cargo.toml
@@ -5,6 +5,8 @@ authors.workspace = true
edition.workspace = true
license.workspace = true
version = "7.0.0"
+homepage.workspace = true
+repository.workspace = true
[lints]
workspace = true
diff --git a/prdoc/pr_5855.prdoc b/prdoc/pr_5855.prdoc
new file mode 100644
index 000000000000..7735cfee9f37
--- /dev/null
+++ b/prdoc/pr_5855.prdoc
@@ -0,0 +1,15 @@
+# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0
+# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json
+
+title: Remove feature `test-helpers` from sc-service
+
+doc:
+ - audience: Node Dev
+ description: |
+ Removes feature `test-helpers` from sc-service.
+
+crates:
+ - name: sc-service
+ bump: major
+ - name: sc-rpc-spec-v2
+ bump: major
diff --git a/prdoc/pr_6405.prdoc b/prdoc/pr_6405.prdoc
new file mode 100644
index 000000000000..9e4e0b3c6c20
--- /dev/null
+++ b/prdoc/pr_6405.prdoc
@@ -0,0 +1,9 @@
+title: '`fatxpool`: handling limits and priorities improvements'
+doc:
+- audience: Node Dev
+ description: |-
+ This PR provides a number of improvements and fixes around handling limits and priorities in the fork-aware transaction pool.
+
+crates:
+- name: sc-transaction-pool
+ bump: major
diff --git a/prdoc/pr_6419.prdoc b/prdoc/pr_6419.prdoc
new file mode 100644
index 000000000000..6cc155d64b91
--- /dev/null
+++ b/prdoc/pr_6419.prdoc
@@ -0,0 +1,12 @@
+# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0
+# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json
+
+title: Use the custom target riscv32emac-unknown-none-polkavm
+doc:
+ - audience: Runtime Dev
+ description: |
+ Closes: https://github.com/paritytech/polkadot-sdk/issues/6335
+
+crates:
+- name: substrate-wasm-builder
+ bump: patch
diff --git a/prdoc/pr_6549.prdoc b/prdoc/pr_6549.prdoc
new file mode 100644
index 000000000000..61a64c724185
--- /dev/null
+++ b/prdoc/pr_6549.prdoc
@@ -0,0 +1,247 @@
+doc: []
+
+crates:
+ - name: polkadot-sdk
+ bump: none
+ - name: asset-test-utils
+ bump: none
+ - name: cumulus-pallet-parachain-system
+ bump: none
+ - name: cumulus-pallet-parachain-system-proc-macro
+ bump: none
+ - name: cumulus-primitives-core
+ bump: none
+ - name: polkadot-core-primitives
+ bump: none
+ - name: polkadot-parachain-primitives
+ bump: none
+ - name: polkadot-primitives
+ bump: none
+ - name: staging-xcm
+ bump: none
+ - name: xcm-procedural
+ bump: none
+ - name: cumulus-primitives-parachain-inherent
+ bump: none
+ - name: cumulus-primitives-proof-size-hostfunction
+ bump: none
+ - name: polkadot-runtime-common
+ bump: none
+ - name: polkadot-runtime-parachains
+ bump: none
+ - name: polkadot-runtime-metrics
+ bump: none
+ - name: staging-xcm-executor
+ bump: none
+ - name: slot-range-helper
+ bump: none
+ - name: staging-xcm-builder
+ bump: none
+ - name: pallet-xcm
+ bump: none
+ - name: cumulus-primitives-storage-weight-reclaim
+ bump: none
+ - name: cumulus-pallet-aura-ext
+ bump: none
+ - name: cumulus-primitives-aura
+ bump: none
+ - name: staging-parachain-info
+ bump: none
+ - name: cumulus-test-relay-sproof-builder
+ bump: none
+ - name: cumulus-client-cli
+ bump: none
+ - name: cumulus-client-collator
+ bump: none
+ - name: cumulus-client-consensus-common
+ bump: none
+ - name: cumulus-client-pov-recovery
+ bump: none
+ - name: cumulus-relay-chain-interface
+ bump: none
+ - name: polkadot-overseer
+ bump: none
+ - name: tracing-gum
+ bump: none
+ - name: tracing-gum-proc-macro
+ bump: none
+ - name: polkadot-node-metrics
+ bump: none
+ - name: polkadot-node-primitives
+ bump: none
+ - name: polkadot-erasure-coding
+ bump: none
+ - name: polkadot-node-subsystem
+ bump: none
+ - name: polkadot-node-subsystem-types
+ bump: none
+ - name: polkadot-node-network-protocol
+ bump: none
+ - name: polkadot-statement-table
+ bump: none
+ - name: polkadot-rpc
+ bump: none
+ - name: polkadot-service
+ bump: none
+ - name: cumulus-client-parachain-inherent
+ bump: none
+ - name: westend-runtime
+ bump: none
+ - name: pallet-xcm-benchmarks
+ bump: none
+ - name: westend-runtime-constants
+ bump: none
+ - name: polkadot-approval-distribution
+ bump: none
+ - name: polkadot-node-subsystem-util
+ bump: none
+ - name: polkadot-availability-bitfield-distribution
+ bump: none
+ - name: polkadot-availability-distribution
+ bump: none
+ - name: polkadot-availability-recovery
+ bump: none
+ - name: polkadot-node-core-approval-voting
+ bump: none
+ - name: polkadot-node-core-approval-voting-parallel
+ bump: none
+ - name: polkadot-node-core-av-store
+ bump: none
+ - name: polkadot-node-core-chain-api
+ bump: none
+ - name: polkadot-statement-distribution
+ bump: none
+ - name: polkadot-collator-protocol
+ bump: none
+ - name: polkadot-dispute-distribution
+ bump: none
+ - name: polkadot-gossip-support
+ bump: none
+ - name: polkadot-network-bridge
+ bump: none
+ - name: polkadot-node-collation-generation
+ bump: none
+ - name: polkadot-node-core-backing
+ bump: none
+ - name: polkadot-node-core-bitfield-signing
+ bump: none
+ - name: polkadot-node-core-candidate-validation
+ bump: none
+ - name: polkadot-node-core-pvf
+ bump: none
+ - name: polkadot-node-core-pvf-common
+ bump: none
+ - name: polkadot-node-core-pvf-execute-worker
+ bump: none
+ - name: polkadot-node-core-pvf-prepare-worker
+ bump: none
+ - name: staging-tracking-allocator
+ bump: none
+ - name: rococo-runtime
+ bump: none
+ - name: rococo-runtime-constants
+ bump: none
+ - name: polkadot-node-core-chain-selection
+ bump: none
+ - name: polkadot-node-core-dispute-coordinator
+ bump: none
+ - name: polkadot-node-core-parachains-inherent
+ bump: none
+ - name: polkadot-node-core-prospective-parachains
+ bump: none
+ - name: polkadot-node-core-provisioner
+ bump: none
+ - name: polkadot-node-core-pvf-checker
+ bump: none
+ - name: polkadot-node-core-runtime-api
+ bump: none
+ - name: cumulus-client-network
+ bump: none
+ - name: cumulus-relay-chain-inprocess-interface
+ bump: none
+ - name: polkadot-cli
+ bump: none
+ - name: cumulus-client-consensus-aura
+ bump: none
+ - name: cumulus-client-consensus-proposer
+ bump: none
+ - name: cumulus-client-consensus-relay-chain
+ bump: none
+ - name: cumulus-client-service
+ bump: none
+ - name: cumulus-relay-chain-minimal-node
+ bump: none
+ - name: cumulus-relay-chain-rpc-interface
+ bump: none
+ - name: parachains-common
+ bump: none
+ - name: cumulus-primitives-utility
+ bump: none
+ - name: cumulus-pallet-xcmp-queue
+ bump: none
+ - name: parachains-runtimes-test-utils
+ bump: none
+ - name: assets-common
+ bump: none
+ - name: bridge-hub-common
+ bump: none
+ - name: bridge-hub-test-utils
+ bump: none
+ - name: cumulus-pallet-solo-to-para
+ bump: none
+ - name: cumulus-pallet-xcm
+ bump: none
+ - name: cumulus-ping
+ bump: none
+ - name: cumulus-primitives-timestamp
+ bump: none
+ - name: emulated-integration-tests-common
+ bump: none
+ - name: xcm-emulator
+ bump: none
+ - name: pallet-collective-content
+ bump: none
+ - name: xcm-simulator
+ bump: none
+ - name: pallet-revive-fixtures
+ bump: none
+ - name: polkadot-omni-node-lib
+ bump: none
+ - name: snowbridge-runtime-test-common
+ bump: none
+ - name: testnet-parachains-constants
+ bump: none
+ - name: asset-hub-rococo-runtime
+ bump: none
+ - name: asset-hub-westend-runtime
+ bump: none
+ - name: bridge-hub-rococo-runtime
+ bump: none
+ - name: bridge-hub-westend-runtime
+ bump: none
+ - name: collectives-westend-runtime
+ bump: none
+ - name: coretime-rococo-runtime
+ bump: none
+ - name: coretime-westend-runtime
+ bump: none
+ - name: people-rococo-runtime
+ bump: none
+ - name: people-westend-runtime
+ bump: none
+ - name: contracts-rococo-runtime
+ bump: none
+ - name: glutton-westend-runtime
+ bump: none
+ - name: rococo-parachain-runtime
+ bump: none
+ - name: polkadot-omni-node
+ bump: none
+ - name: polkadot-parachain-bin
+ bump: none
+ - name: polkadot
+ bump: none
+ - name: polkadot-voter-bags
+ bump: none
+ - name: xcm-simulator-example
+ bump: none
diff --git a/prdoc/pr_6565.prdoc b/prdoc/pr_6565.prdoc
new file mode 100644
index 000000000000..f9a75a16a6a7
--- /dev/null
+++ b/prdoc/pr_6565.prdoc
@@ -0,0 +1,35 @@
+title: 'pallet_revive: Switch to 64bit RISC-V'
+doc:
+- audience: Runtime Dev
+ description: |-
+ This PR updates pallet_revive to the newest PolkaVM version and adapts the test fixtures and syscall interface to work under 64bit.
+
+ Please note that after this PR no 32bit contracts can be deployed (they will be rejected at deploy time). Pre-deployed 32bit contracts are now considered defunct since we changes how parameters are passed for functions with more than 6 arguments.
+
+ ## Fixtures
+
+ The fixtures are now built for the 64bit target. I also removed the temporary directory mechanism that triggered a full rebuild every time. It also makes it easier to find the compiled fixtures since they are now always in `target/pallet-revive-fixtures`.
+
+ ## Syscall interface
+
+ ### Passing pointer
+
+ Registers and pointers are now 64bit wide. This allows us to pass u64 arguments in a single register. Before we needed two registers to pass them. This means that just as before we need one register per pointer we pass. We keep pointers as `u32` argument by truncating the register. This is done since the memory space of PolkaVM is 32bit.
+
+ ### Functions with more than 6 arguments
+
+ We only have 6 registers to pass arguments. This is why we pass a pointer to a struct when we need more than 6. Before this PR we expected a packed struct and interpreted it as SCALE encoded tuple. However, this was buggy because the `MaxEncodedLen` returned something that was larger than the packed size of the structure. This wasn't a problem before. But now the memory space changed in a way that things were placed at the edges of the memory space and those extra bytes lead to an out of bound access.
+
+ This is why this PR drops SCALE and expects the arguments to be passed as a pointer to a `C` aligned struct. This avoids unaligned accesses. However, revive needs to adapt its codegen to properly align the structure fields.
+
+ ## TODO
+ - [ ] Add multi block migration that wipes all existing contracts as we made breaking changes to the syscall interface
+crates:
+- name: pallet-revive
+ bump: major
+- name: pallet-revive-fixtures
+ bump: major
+- name: pallet-revive-proc-macro
+ bump: major
+- name: pallet-revive-uapi
+ bump: major
diff --git a/prdoc/pr_6583.prdoc b/prdoc/pr_6583.prdoc
new file mode 100644
index 000000000000..0e67ed33e27c
--- /dev/null
+++ b/prdoc/pr_6583.prdoc
@@ -0,0 +1,7 @@
+title: Bump Westend AH
+doc:
+- audience: Runtime Dev
+ description: Bump Asset-Hub westend spec version
+crates:
+- name: asset-hub-westend-runtime
+ bump: minor
diff --git a/prdoc/pr_6608.prdoc b/prdoc/pr_6608.prdoc
new file mode 100644
index 000000000000..b9cd7008de47
--- /dev/null
+++ b/prdoc/pr_6608.prdoc
@@ -0,0 +1,14 @@
+title: '[pallet-revive] eth-prc fix geth diff'
+doc:
+- audience: Runtime Dev
+ description: |-
+ * Add a bunch of differential tests to ensure that responses from eth-rpc matches the one from `geth`
+ * EVM RPC server will not fail gas_estimation if no gas is specified, I updated pallet-revive to add an extra `skip_transfer` boolean check to replicate this behavior in our pallet
+ * `eth_transact` and `bare_eth_transact` api have been updated to use `GenericTransaction` directly as this is what is used by `eth_estimateGas` and `eth_call`
+crates:
+- name: pallet-revive-eth-rpc
+ bump: minor
+- name: pallet-revive
+ bump: minor
+- name: asset-hub-westend-runtime
+ bump: minor
diff --git a/prdoc/pr_6665.prdoc b/prdoc/pr_6665.prdoc
new file mode 100644
index 000000000000..b5aaf8a3b184
--- /dev/null
+++ b/prdoc/pr_6665.prdoc
@@ -0,0 +1,15 @@
+title: Fix runtime api impl detection by construct runtime
+doc:
+- audience: Runtime Dev
+ description: |-
+ Construct runtime uses autoref-based specialization to fetch the metadata about the implemented runtime apis. This is done to not fail to compile when there are no runtime apis implemented. However, there was an issue with detecting runtime apis when they were implemented in a different file. The problem is solved by moving the trait implemented by `impl_runtime_apis!` to the metadata ir crate.
+
+
+ Closes: https://github.com/paritytech/polkadot-sdk/issues/6659
+crates:
+- name: frame-support-procedural
+ bump: patch
+- name: sp-api-proc-macro
+ bump: patch
+- name: sp-metadata-ir
+ bump: patch
diff --git a/prdoc/pr_6681.prdoc b/prdoc/pr_6681.prdoc
new file mode 100644
index 000000000000..93a967d4a66c
--- /dev/null
+++ b/prdoc/pr_6681.prdoc
@@ -0,0 +1,406 @@
+# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0
+# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json
+
+title: update scale-info to 2.11.6
+
+doc:
+ - audience: Runtime Dev
+ description: |
+ Updates scale-info to 2.11.1 from 2.11.5.
+ Updated version of scale-info annotates generated code with `allow(deprecated)`
+
+crates:
+ - name: bridge-runtime-common
+ bump: none
+ - name: bp-header-chain
+ bump: none
+ - name: bp-runtime
+ bump: none
+ - name: frame-support
+ bump: none
+ - name: sp-core
+ bump: none
+ - name: sp-trie
+ bump: none
+ - name: sp-runtime
+ bump: none
+ - name: sp-application-crypto
+ bump: none
+ - name: sp-arithmetic
+ bump: none
+ - name: sp-weights
+ bump: none
+ - name: sp-api
+ bump: none
+ - name: sp-metadata-ir
+ bump: none
+ - name: sp-version
+ bump: none
+ - name: sp-inherents
+ bump: none
+ - name: frame-executive
+ bump: none
+ - name: frame-system
+ bump: none
+ - name: pallet-balances
+ bump: none
+ - name: frame-benchmarking
+ bump: none
+ - name: pallet-migrations
+ bump: none
+ - name: cumulus-pallet-parachain-system
+ bump: none
+ - name: cumulus-primitives-core
+ bump: none
+ - name: polkadot-core-primitives
+ bump: none
+ - name: polkadot-parachain-primitives
+ bump: none
+ - name: polkadot-primitives
+ bump: none
+ - name: sp-authority-discovery
+ bump: none
+ - name: sp-consensus-slots
+ bump: none
+ - name: sp-staking
+ bump: none
+ - name: staging-xcm
+ bump: none
+ - name: cumulus-primitives-parachain-inherent
+ bump: none
+ - name: pallet-message-queue
+ bump: none
+ - name: polkadot-runtime-common
+ bump: none
+ - name: frame-election-provider-support
+ bump: none
+ - name: sp-npos-elections
+ bump: none
+ - name: sp-consensus-grandpa
+ bump: none
+ - name: polkadot-primitives
+ bump: none
+ - name: sp-authority-discovery
+ bump: none
+ - name: sp-consensus-grandpa
+ bump: none
+ - name: sp-genesis-builder
+ bump: none
+ - name: sp-consensus-babe
+ bump: none
+ - name: sp-mixnet
+ bump: none
+ - name: sc-rpc-api
+ bump: none
+ - name: sp-session
+ bump: none
+ - name: sp-statement-store
+ bump: none
+ - name: sp-transaction-storage-proof
+ bump: none
+ - name: pallet-asset-rate
+ bump: none
+ - name: pallet-authorship
+ bump: none
+ - name: pallet-babe
+ bump: none
+ - name: pallet-session
+ bump: none
+ - name: pallet-timestamp
+ bump: none
+ - name: pallet-offences
+ bump: none
+ - name: pallet-staking
+ bump: none
+ - name: pallet-bags-list
+ bump: none
+ - name: pallet-broker
+ bump: none
+ - name: pallet-election-provider-multi-phase
+ bump: none
+ - name: pallet-fast-unstake
+ bump: none
+ - name: pallet-identity
+ bump: none
+ - name: pallet-transaction-payment
+ bump: none
+ - name: pallet-treasury
+ bump: none
+ - name: pallet-utility
+ bump: none
+ - name: pallet-collective
+ bump: none
+ - name: pallet-root-testing
+ bump: none
+ - name: pallet-vesting
+ bump: none
+ - name: polkadot-runtime-parachains
+ bump: none
+ - name: pallet-authority-discovery
+ bump: none
+ - name: pallet-mmr
+ bump: none
+ - name: sp-mmr-primitives
+ bump: none
+ - name: staging-xcm-executor
+ bump: none
+ - name: staging-xcm-builder
+ bump: none
+ - name: pallet-asset-conversion
+ bump: none
+ - name: pallet-assets
+ bump: none
+ - name: pallet-salary
+ bump: none
+ - name: pallet-ranked-collective
+ bump: none
+ - name: pallet-xcm
+ bump: none
+ - name: xcm-runtime-apis
+ bump: none
+ - name: pallet-grandpa
+ bump: none
+ - name: pallet-indices
+ bump: none
+ - name: pallet-sudo
+ bump: none
+ - name: sp-consensus-beefy
+ bump: none
+ - name: cumulus-primitives-storage-weight-reclaim
+ bump: none
+ - name: cumulus-pallet-aura-ext
+ bump: none
+ - name: pallet-aura
+ bump: none
+ - name: sp-consensus-aura
+ bump: none
+ - name: pallet-collator-selection
+ bump: none
+ - name: pallet-glutton
+ bump: none
+ - name: staging-parachain-info
+ bump: none
+ - name: westend-runtime
+ bump: none
+ - name: frame-metadata-hash-extension
+ bump: none
+ - name: frame-system-benchmarking
+ bump: none
+ - name: pallet-beefy
+ bump: none
+ - name: pallet-beefy-mmr
+ bump: none
+ - name: pallet-conviction-voting
+ bump: none
+ - name: pallet-scheduler
+ bump: none
+ - name: pallet-preimage
+ bump: none
+ - name: pallet-delegated-staking
+ bump: none
+ - name: pallet-nomination-pools
+ bump: none
+ - name: pallet-democracy
+ bump: none
+ - name: pallet-elections-phragmen
+ bump: none
+ - name: pallet-membership
+ bump: none
+ - name: pallet-multisig
+ bump: none
+ - name: polkadot-sdk-frame
+ bump: none
+ - name: pallet-dev-mode
+ bump: none
+ - name: pallet-verify-signature
+ bump: none
+ - name: pallet-nomination-pools-benchmarking
+ bump: none
+ - name: pallet-offences-benchmarking
+ bump: none
+ - name: pallet-im-online
+ bump: none
+ - name: pallet-parameters
+ bump: none
+ - name: pallet-proxy
+ bump: none
+ - name: pallet-recovery
+ bump: none
+ - name: pallet-referenda
+ bump: none
+ - name: pallet-society
+ bump: none
+ - name: pallet-state-trie-migration
+ bump: none
+ - name: pallet-whitelist
+ bump: none
+ - name: pallet-xcm-benchmarks
+ bump: none
+ - name: rococo-runtime
+ bump: none
+ - name: pallet-bounties
+ bump: none
+ - name: pallet-child-bounties
+ bump: none
+ - name: pallet-nis
+ bump: none
+ - name: pallet-tips
+ bump: none
+ - name: parachains-common
+ bump: none
+ - name: pallet-asset-tx-payment
+ bump: none
+ - name: cumulus-pallet-xcmp-queue
+ bump: none
+ - name: bp-xcm-bridge-hub-router
+ bump: none
+ - name: pallet-xcm-bridge-hub-router
+ bump: none
+ - name: assets-common
+ bump: none
+ - name: bp-messages
+ bump: none
+ - name: bp-parachains
+ bump: none
+ - name: bp-polkadot-core
+ bump: none
+ - name: bp-relayers
+ bump: none
+ - name: bp-xcm-bridge-hub
+ bump: none
+ - name: bridge-hub-common
+ bump: none
+ - name: snowbridge-core
+ bump: none
+ - name: snowbridge-beacon-primitives
+ bump: none
+ - name: snowbridge-ethereum
+ bump: none
+ - name: pallet-bridge-grandpa
+ bump: none
+ - name: pallet-bridge-messages
+ bump: none
+ - name: pallet-bridge-parachains
+ bump: none
+ - name: pallet-bridge-relayers
+ bump: none
+ - name: pallet-xcm-bridge-hub
+ bump: none
+ - name: cumulus-pallet-dmp-queue
+ bump: none
+ - name: cumulus-pallet-solo-to-para
+ bump: none
+ - name: cumulus-pallet-xcm
+ bump: none
+ - name: cumulus-ping
+ bump: none
+ - name: frame-benchmarking-pallet-pov
+ bump: none
+ - name: pallet-alliance
+ bump: none
+ - name: pallet-asset-conversion-ops
+ bump: none
+ - name: pallet-asset-conversion-tx-payment
+ bump: none
+ - name: pallet-assets-freezer
+ bump: none
+ - name: pallet-atomic-swap
+ bump: none
+ - name: pallet-collective-content
+ bump: none
+ - name: pallet-contracts
+ bump: none
+ - name: pallet-contracts-uapi
+ bump: none
+ - name: pallet-insecure-randomness-collective-flip
+ bump: none
+ - name: pallet-contracts-mock-network
+ bump: none
+ - name: xcm-simulator
+ bump: none
+ - name: pallet-core-fellowship
+ bump: none
+ - name: pallet-lottery
+ bump: none
+ - name: pallet-mixnet
+ bump: none
+ - name: pallet-nft-fractionalization
+ bump: none
+ - name: pallet-nfts
+ bump: none
+ - name: pallet-node-authorization
+ bump: none
+ - name: pallet-paged-list
+ bump: none
+ - name: pallet-remark
+ bump: none
+ - name: pallet-revive
+ bump: none
+ - name: pallet-revive-uapi
+ bump: none
+ - name: pallet-revive-eth-rpc
+ bump: none
+ - name: pallet-skip-feeless-payment
+ bump: none
+ - name: pallet-revive-mock-network
+ bump: none
+ - name: pallet-root-offences
+ bump: none
+ - name: pallet-safe-mode
+ bump: none
+ - name: pallet-scored-pool
+ bump: none
+ - name: pallet-statement
+ bump: none
+ - name: pallet-transaction-storage
+ bump: none
+ - name: pallet-tx-pause
+ bump: none
+ - name: pallet-uniques
+ bump: none
+ - name: snowbridge-outbound-queue-merkle-tree
+ bump: none
+ - name: snowbridge-pallet-ethereum-client
+ bump: none
+ - name: snowbridge-pallet-inbound-queue
+ bump: none
+ - name: snowbridge-router-primitives
+ bump: none
+ - name: snowbridge-pallet-outbound-queue
+ bump: none
+ - name: snowbridge-pallet-system
+ bump: none
+ - name: bp-asset-hub-rococo
+ bump: none
+ - name: bp-asset-hub-westend
+ bump: none
+ - name: bp-polkadot-bulletin
+ bump: none
+ - name: asset-hub-rococo-runtime
+ bump: none
+ - name: asset-hub-westend-runtime
+ bump: none
+ - name: bridge-hub-rococo-runtime
+ bump: none
+ - name: bridge-hub-westend-runtime
+ bump: none
+ - name: collectives-westend-runtime
+ bump: none
+ - name: coretime-rococo-runtime
+ bump: none
+ - name: coretime-westend-runtime
+ bump: none
+ - name: people-rococo-runtime
+ bump: none
+ - name: people-westend-runtime
+ bump: none
+ - name: penpal-runtime
+ bump: none
+ - name: contracts-rococo-runtime
+ bump: none
+ - name: glutton-westend-runtime
+ bump: none
+ - name: rococo-parachain-runtime
+ bump: none
+ - name: xcm-simulator-example
+ bump: none
\ No newline at end of file
diff --git a/prdoc/pr_6695.prdoc b/prdoc/pr_6695.prdoc
new file mode 100644
index 000000000000..7a950e8546cd
--- /dev/null
+++ b/prdoc/pr_6695.prdoc
@@ -0,0 +1,8 @@
+title: '[pallet-revive] bugfix decoding 64bit args in the decoder'
+doc:
+- audience: Runtime Dev
+ description: The argument index of the next argument is dictated by the size of
+ the current one.
+crates:
+- name: pallet-revive-proc-macro
+ bump: patch
diff --git a/prdoc/pr_6703.prdoc b/prdoc/pr_6703.prdoc
new file mode 100644
index 000000000000..2dd0962a3eea
--- /dev/null
+++ b/prdoc/pr_6703.prdoc
@@ -0,0 +1,7 @@
+title: 'network/libp2p-backend: Suppress warning adding already reserved node as reserved'
+doc:
+- audience: Node Dev
+ description: Fixes https://github.com/paritytech/polkadot-sdk/issues/6598.
+crates:
+- name: sc-network
+ bump: patch
diff --git a/prdoc/pr_6742.prdoc b/prdoc/pr_6742.prdoc
new file mode 100644
index 000000000000..92c3755a3c28
--- /dev/null
+++ b/prdoc/pr_6742.prdoc
@@ -0,0 +1,11 @@
+title: Update litep2p backend to v0.8.3
+doc:
+- audience: Node Dev
+ description: |-
+ This release includes two fixes for small memory leaks on edge-cases in the notification and request-response protocols.
+ While at it, have downgraded a log message from litep2p.
+
+crates:
+- name: sc-network
+ bump: patch
+
diff --git a/prdoc/pr_6743.prdoc b/prdoc/pr_6743.prdoc
new file mode 100644
index 000000000000..4c35ff46ca67
--- /dev/null
+++ b/prdoc/pr_6743.prdoc
@@ -0,0 +1,10 @@
+title: 'umbrella: Remove `pallet-revive-fixtures`'
+doc:
+- audience: Runtime Dev
+ description: |-
+ No need to have them in the umbrella crate also by having them in the umbrella crate they are bleeding into the normal build.
+crates:
+- name: pallet-revive-fixtures
+ bump: major
+- name: polkadot-sdk
+ bump: major
diff --git a/scripts/generate-umbrella.py b/scripts/generate-umbrella.py
index 8326909c3449..ae3873180553 100644
--- a/scripts/generate-umbrella.py
+++ b/scripts/generate-umbrella.py
@@ -120,6 +120,8 @@ def main(path, version):
"edition": { "workspace": True },
"authors": { "workspace": True },
"description": "Polkadot SDK umbrella crate.",
+ "homepage": { "workspace": True },
+ "repository": { "workspace": True },
"license": "Apache-2.0",
"metadata": { "docs": { "rs": {
"features": ["runtime-full", "node"],
diff --git a/substrate/bin/node/runtime/src/lib.rs b/substrate/bin/node/runtime/src/lib.rs
index bff263548087..faffcd23fbcf 100644
--- a/substrate/bin/node/runtime/src/lib.rs
+++ b/substrate/bin/node/runtime/src/lib.rs
@@ -3218,18 +3218,9 @@ impl_runtime_apis! {
System::account_nonce(account)
}
- fn eth_transact(
- from: H160,
- dest: Option,
- value: U256,
- input: Vec,
- gas_limit: Option,
- storage_deposit_limit: Option,
- ) -> pallet_revive::EthContractResult
+ fn eth_transact(tx: pallet_revive::evm::GenericTransaction) -> Result, pallet_revive::EthTransactError>
{
- use pallet_revive::AddressMapper;
let blockweights: BlockWeights = ::BlockWeights::get();
- let origin = ::AddressMapper::to_account_id(&from);
let encoded_size = |pallet_call| {
let call = RuntimeCall::Revive(pallet_call);
@@ -3238,15 +3229,9 @@ impl_runtime_apis! {
};
Revive::bare_eth_transact(
- origin,
- dest,
- value,
- input,
- gas_limit.unwrap_or(blockweights.max_block),
- storage_deposit_limit.unwrap_or(u128::MAX),
+ tx,
+ blockweights.max_block,
encoded_size,
- pallet_revive::DebugInfo::UnsafeDebug,
- pallet_revive::CollectEvents::UnsafeCollect,
)
}
@@ -3263,7 +3248,7 @@ impl_runtime_apis! {
dest,
value,
gas_limit.unwrap_or(RuntimeBlockWeights::get().max_block),
- storage_deposit_limit.unwrap_or(u128::MAX),
+ pallet_revive::DepositLimit::Balance(storage_deposit_limit.unwrap_or(u128::MAX)),
input_data,
pallet_revive::DebugInfo::UnsafeDebug,
pallet_revive::CollectEvents::UnsafeCollect,
@@ -3284,7 +3269,7 @@ impl_runtime_apis! {
RuntimeOrigin::signed(origin),
value,
gas_limit.unwrap_or(RuntimeBlockWeights::get().max_block),
- storage_deposit_limit.unwrap_or(u128::MAX),
+ pallet_revive::DepositLimit::Balance(storage_deposit_limit.unwrap_or(u128::MAX)),
code,
data,
salt,
diff --git a/substrate/bin/node/testing/Cargo.toml b/substrate/bin/node/testing/Cargo.toml
index 16112386ad7c..1972c03a368b 100644
--- a/substrate/bin/node/testing/Cargo.toml
+++ b/substrate/bin/node/testing/Cargo.toml
@@ -37,7 +37,7 @@ sc-client-api = { workspace = true, default-features = true }
sc-client-db = { features = ["rocksdb"], workspace = true, default-features = true }
sc-consensus = { workspace = true, default-features = true }
sc-executor = { workspace = true, default-features = true }
-sc-service = { features = ["rocksdb", "test-helpers"], workspace = true, default-features = true }
+sc-service = { features = ["rocksdb"], workspace = true, default-features = true }
sp-api = { workspace = true, default-features = true }
sp-block-builder = { workspace = true, default-features = true }
sp-blockchain = { workspace = true, default-features = true }
diff --git a/substrate/client/network/src/litep2p/mod.rs b/substrate/client/network/src/litep2p/mod.rs
index 6d3575fc2b6b..b6d64b34d64a 100644
--- a/substrate/client/network/src/litep2p/mod.rs
+++ b/substrate/client/network/src/litep2p/mod.rs
@@ -753,7 +753,7 @@ impl NetworkBackend for Litep2pNetworkBac
}
if self.litep2p.add_known_address(peer.into(), iter::once(address.clone())) == 0usize {
- log::warn!(
+ log::debug!(
target: LOG_TARGET,
"couldn't add known address ({address}) for {peer:?}, unsupported transport"
);
diff --git a/substrate/client/network/src/protocol_controller.rs b/substrate/client/network/src/protocol_controller.rs
index af7adb50907f..11f5321294d0 100644
--- a/substrate/client/network/src/protocol_controller.rs
+++ b/substrate/client/network/src/protocol_controller.rs
@@ -464,7 +464,7 @@ impl ProtocolController {
/// maintain connections with such peers.
fn on_add_reserved_peer(&mut self, peer_id: PeerId) {
if self.reserved_nodes.contains_key(&peer_id) {
- warn!(
+ debug!(
target: LOG_TARGET,
"Trying to add an already reserved node {peer_id} as reserved on {:?}.",
self.set_id,
diff --git a/substrate/client/network/test/Cargo.toml b/substrate/client/network/test/Cargo.toml
index ebece1762f29..6340d1dfb2f4 100644
--- a/substrate/client/network/test/Cargo.toml
+++ b/substrate/client/network/test/Cargo.toml
@@ -33,7 +33,7 @@ sc-network-types = { workspace = true, default-features = true }
sc-utils = { workspace = true, default-features = true }
sc-network-light = { workspace = true, default-features = true }
sc-network-sync = { workspace = true, default-features = true }
-sc-service = { features = ["test-helpers"], workspace = true }
+sc-service = { workspace = true }
sp-blockchain = { workspace = true, default-features = true }
sp-consensus = { workspace = true, default-features = true }
sp-core = { workspace = true, default-features = true }
diff --git a/substrate/client/rpc-spec-v2/Cargo.toml b/substrate/client/rpc-spec-v2/Cargo.toml
index b304bc905925..70f68436767f 100644
--- a/substrate/client/rpc-spec-v2/Cargo.toml
+++ b/substrate/client/rpc-spec-v2/Cargo.toml
@@ -56,7 +56,7 @@ sp-consensus = { workspace = true, default-features = true }
sp-externalities = { workspace = true, default-features = true }
sp-maybe-compressed-blob = { workspace = true, default-features = true }
sc-block-builder = { workspace = true, default-features = true }
-sc-service = { features = ["test-helpers"], workspace = true, default-features = true }
+sc-service = { workspace = true, default-features = true }
sc-rpc = { workspace = true, default-features = true, features = ["test-helpers"] }
assert_matches = { workspace = true }
pretty_assertions = { workspace = true }
diff --git a/substrate/client/rpc-spec-v2/src/archive/api.rs b/substrate/client/rpc-spec-v2/src/archive/api.rs
index dcfeaecb147b..a205d0502c93 100644
--- a/substrate/client/rpc-spec-v2/src/archive/api.rs
+++ b/substrate/client/rpc-spec-v2/src/archive/api.rs
@@ -20,8 +20,7 @@
use crate::{
common::events::{
- ArchiveStorageDiffEvent, ArchiveStorageDiffItem, ArchiveStorageResult,
- PaginatedStorageQuery,
+ ArchiveStorageDiffEvent, ArchiveStorageDiffItem, ArchiveStorageEvent, StorageQuery,
},
MethodResult,
};
@@ -100,13 +99,17 @@ pub trait ArchiveApi {
/// # Unstable
///
/// This method is unstable and subject to change in the future.
- #[method(name = "archive_unstable_storage", blocking)]
+ #[subscription(
+ name = "archive_unstable_storage" => "archive_unstable_storageEvent",
+ unsubscribe = "archive_unstable_stopStorage",
+ item = ArchiveStorageEvent,
+ )]
fn archive_unstable_storage(
&self,
hash: Hash,
- items: Vec>,
+ items: Vec>,
child_trie: Option,
- ) -> RpcResult;
+ );
/// Returns the storage difference between two blocks.
///
diff --git a/substrate/client/rpc-spec-v2/src/archive/archive.rs b/substrate/client/rpc-spec-v2/src/archive/archive.rs
index 55054d91d85d..62e44a016241 100644
--- a/substrate/client/rpc-spec-v2/src/archive/archive.rs
+++ b/substrate/client/rpc-spec-v2/src/archive/archive.rs
@@ -20,13 +20,13 @@
use crate::{
archive::{
- archive_storage::{ArchiveStorage, ArchiveStorageDiff},
- error::Error as ArchiveError,
- ArchiveApiServer,
+ archive_storage::ArchiveStorageDiff, error::Error as ArchiveError, ArchiveApiServer,
},
- common::events::{
- ArchiveStorageDiffEvent, ArchiveStorageDiffItem, ArchiveStorageResult,
- PaginatedStorageQuery,
+ common::{
+ events::{
+ ArchiveStorageDiffEvent, ArchiveStorageDiffItem, ArchiveStorageEvent, StorageQuery,
+ },
+ storage::{QueryResult, StorageSubscriptionClient},
},
hex_string, MethodResult, SubscriptionTaskExecutor,
};
@@ -57,42 +57,12 @@ use tokio::sync::mpsc;
pub(crate) const LOG_TARGET: &str = "rpc-spec-v2::archive";
-/// The configuration of [`Archive`].
-pub struct ArchiveConfig {
- /// The maximum number of items the `archive_storage` can return for a descendant query before
- /// pagination is required.
- pub max_descendant_responses: usize,
- /// The maximum number of queried items allowed for the `archive_storage` at a time.
- pub max_queried_items: usize,
-}
-
-/// The maximum number of items the `archive_storage` can return for a descendant query before
-/// pagination is required.
-///
-/// Note: this is identical to the `chainHead` value.
-const MAX_DESCENDANT_RESPONSES: usize = 5;
-
-/// The maximum number of queried items allowed for the `archive_storage` at a time.
-///
-/// Note: A queried item can also be a descendant query which can return up to
-/// `MAX_DESCENDANT_RESPONSES`.
-const MAX_QUERIED_ITEMS: usize = 8;
-
/// The buffer capacity for each storage query.
///
/// This is small because the underlying JSON-RPC server has
/// its down buffer capacity per connection as well.
const STORAGE_QUERY_BUF: usize = 16;
-impl Default for ArchiveConfig {
- fn default() -> Self {
- Self {
- max_descendant_responses: MAX_DESCENDANT_RESPONSES,
- max_queried_items: MAX_QUERIED_ITEMS,
- }
- }
-}
-
/// An API for archive RPC calls.
pub struct Archive, Block: BlockT, Client> {
/// Substrate client.
@@ -103,11 +73,6 @@ pub struct Archive, Block: BlockT, Client> {
executor: SubscriptionTaskExecutor,
/// The hexadecimal encoded hash of the genesis block.
genesis_hash: String,
- /// The maximum number of items the `archive_storage` can return for a descendant query before
- /// pagination is required.
- storage_max_descendant_responses: usize,
- /// The maximum number of queried items allowed for the `archive_storage` at a time.
- storage_max_queried_items: usize,
/// Phantom member to pin the block type.
_phantom: PhantomData,
}
@@ -119,18 +84,9 @@ impl, Block: BlockT, Client> Archive {
backend: Arc,
genesis_hash: GenesisHash,
executor: SubscriptionTaskExecutor,
- config: ArchiveConfig,
) -> Self {
let genesis_hash = hex_string(&genesis_hash.as_ref());
- Self {
- client,
- backend,
- executor,
- genesis_hash,
- storage_max_descendant_responses: config.max_descendant_responses,
- storage_max_queried_items: config.max_queried_items,
- _phantom: PhantomData,
- }
+ Self { client, backend, executor, genesis_hash, _phantom: PhantomData }
}
}
@@ -260,47 +216,53 @@ where
fn archive_unstable_storage(
&self,
+ pending: PendingSubscriptionSink,
hash: Block::Hash,
- items: Vec>,
+ items: Vec>,
child_trie: Option,
- ) -> RpcResult {
- let items = items
- .into_iter()
- .map(|query| {
- let key = StorageKey(parse_hex_param(query.key)?);
- let pagination_start_key = query
- .pagination_start_key
- .map(|key| parse_hex_param(key).map(|key| StorageKey(key)))
- .transpose()?;
-
- // Paginated start key is only supported
- if pagination_start_key.is_some() && !query.query_type.is_descendant_query() {
- return Err(ArchiveError::InvalidParam(
- "Pagination start key is only supported for descendants queries"
- .to_string(),
- ))
- }
+ ) {
+ let mut storage_client =
+ StorageSubscriptionClient::::new(self.client.clone());
+
+ let fut = async move {
+ let Ok(mut sink) = pending.accept().await.map(Subscription::from) else { return };
- Ok(PaginatedStorageQuery {
- key,
- query_type: query.query_type,
- pagination_start_key,
+ let items = match items
+ .into_iter()
+ .map(|query| {
+ let key = StorageKey(parse_hex_param(query.key)?);
+ Ok(StorageQuery { key, query_type: query.query_type })
})
- })
- .collect::, ArchiveError>>()?;
+ .collect::, ArchiveError>>()
+ {
+ Ok(items) => items,
+ Err(error) => {
+ let _ = sink.send(&ArchiveStorageEvent::err(error.to_string()));
+ return
+ },
+ };
- let child_trie = child_trie
- .map(|child_trie| parse_hex_param(child_trie))
- .transpose()?
- .map(ChildInfo::new_default_from_vec);
+ let child_trie = child_trie.map(|child_trie| parse_hex_param(child_trie)).transpose();
+ let child_trie = match child_trie {
+ Ok(child_trie) => child_trie.map(ChildInfo::new_default_from_vec),
+ Err(error) => {
+ let _ = sink.send(&ArchiveStorageEvent::err(error.to_string()));
+ return
+ },
+ };
- let storage_client = ArchiveStorage::new(
- self.client.clone(),
- self.storage_max_descendant_responses,
- self.storage_max_queried_items,
- );
+ let (tx, mut rx) = tokio::sync::mpsc::channel(STORAGE_QUERY_BUF);
+ let storage_fut = storage_client.generate_events(hash, items, child_trie, tx);
- Ok(storage_client.handle_query(hash, items, child_trie))
+ // We don't care about the return value of this join:
+ // - process_events might encounter an error (if the client disconnected)
+ // - storage_fut might encounter an error while processing a trie queries and
+ // the error is propagated via the sink.
+ let _ = futures::future::join(storage_fut, process_storage_events(&mut rx, &mut sink))
+ .await;
+ };
+
+ self.executor.spawn("substrate-rpc-subscription", Some("rpc"), fut.boxed());
}
fn archive_unstable_storage_diff(
@@ -337,24 +299,74 @@ where
// - process_events might encounter an error (if the client disconnected)
// - storage_fut might encounter an error while processing a trie queries and
// the error is propagated via the sink.
- let _ = futures::future::join(storage_fut, process_events(&mut rx, &mut sink)).await;
+ let _ =
+ futures::future::join(storage_fut, process_storage_diff_events(&mut rx, &mut sink))
+ .await;
};
self.executor.spawn("substrate-rpc-subscription", Some("rpc"), fut.boxed());
}
}
-/// Sends all the events to the sink.
-async fn process_events(rx: &mut mpsc::Receiver, sink: &mut Subscription) {
- while let Some(event) = rx.recv().await {
- if event.is_done() {
- log::debug!(target: LOG_TARGET, "Finished processing partial trie query");
- } else if event.is_err() {
- log::debug!(target: LOG_TARGET, "Error encountered while processing partial trie query");
+/// Sends all the events of the storage_diff method to the sink.
+async fn process_storage_diff_events(
+ rx: &mut mpsc::Receiver,
+ sink: &mut Subscription,
+) {
+ loop {
+ tokio::select! {
+ _ = sink.closed() => {
+ return
+ },
+
+ maybe_event = rx.recv() => {
+ let Some(event) = maybe_event else {
+ break;
+ };
+
+ if event.is_done() {
+ log::debug!(target: LOG_TARGET, "Finished processing partial trie query");
+ } else if event.is_err() {
+ log::debug!(target: LOG_TARGET, "Error encountered while processing partial trie query");
+ }
+
+ if sink.send(&event).await.is_err() {
+ return
+ }
+ }
}
+ }
+}
+
+/// Sends all the events of the storage method to the sink.
+async fn process_storage_events(rx: &mut mpsc::Receiver, sink: &mut Subscription) {
+ loop {
+ tokio::select! {
+ _ = sink.closed() => {
+ break
+ }
+
+ maybe_storage = rx.recv() => {
+ let Some(event) = maybe_storage else {
+ break;
+ };
+
+ match event {
+ Ok(None) => continue,
+
+ Ok(Some(event)) =>
+ if sink.send(&ArchiveStorageEvent::result(event)).await.is_err() {
+ return
+ },
- if sink.send(&event).await.is_err() {
- return
+ Err(error) => {
+ let _ = sink.send(&ArchiveStorageEvent::err(error)).await;
+ return
+ }
+ }
+ }
}
}
+
+ let _ = sink.send(&ArchiveStorageEvent::StorageDone).await;
}
diff --git a/substrate/client/rpc-spec-v2/src/archive/archive_storage.rs b/substrate/client/rpc-spec-v2/src/archive/archive_storage.rs
index 5a3920882f00..390db765a48f 100644
--- a/substrate/client/rpc-spec-v2/src/archive/archive_storage.rs
+++ b/substrate/client/rpc-spec-v2/src/archive/archive_storage.rs
@@ -33,114 +33,13 @@ use crate::{
common::{
events::{
ArchiveStorageDiffEvent, ArchiveStorageDiffItem, ArchiveStorageDiffOperationType,
- ArchiveStorageDiffResult, ArchiveStorageDiffType, ArchiveStorageResult,
- PaginatedStorageQuery, StorageQueryType, StorageResult,
+ ArchiveStorageDiffResult, ArchiveStorageDiffType, StorageResult,
},
- storage::{IterQueryType, QueryIter, Storage},
+ storage::Storage,
},
};
use tokio::sync::mpsc;
-/// Generates the events of the `archive_storage` method.
-pub struct ArchiveStorage {
- /// Storage client.
- client: Storage,
- /// The maximum number of responses the API can return for a descendant query at a time.
- storage_max_descendant_responses: usize,
- /// The maximum number of queried items allowed for the `archive_storage` at a time.
- storage_max_queried_items: usize,
-}
-
-impl ArchiveStorage {
- /// Constructs a new [`ArchiveStorage`].
- pub fn new(
- client: Arc,
- storage_max_descendant_responses: usize,
- storage_max_queried_items: usize,
- ) -> Self {
- Self {
- client: Storage::new(client),
- storage_max_descendant_responses,
- storage_max_queried_items,
- }
- }
-}
-
-impl ArchiveStorage
-where
- Block: BlockT + 'static,
- BE: Backend + 'static,
- Client: StorageProvider + 'static,
-{
- /// Generate the response of the `archive_storage` method.
- pub fn handle_query(
- &self,
- hash: Block::Hash,
- mut items: Vec>,
- child_key: Option,
- ) -> ArchiveStorageResult {
- let discarded_items = items.len().saturating_sub(self.storage_max_queried_items);
- items.truncate(self.storage_max_queried_items);
-
- let mut storage_results = Vec::with_capacity(items.len());
- for item in items {
- match item.query_type {
- StorageQueryType::Value => {
- match self.client.query_value(hash, &item.key, child_key.as_ref()) {
- Ok(Some(value)) => storage_results.push(value),
- Ok(None) => continue,
- Err(error) => return ArchiveStorageResult::err(error),
- }
- },
- StorageQueryType::Hash =>
- match self.client.query_hash(hash, &item.key, child_key.as_ref()) {
- Ok(Some(value)) => storage_results.push(value),
- Ok(None) => continue,
- Err(error) => return ArchiveStorageResult::err(error),
- },
- StorageQueryType::ClosestDescendantMerkleValue =>
- match self.client.query_merkle_value(hash, &item.key, child_key.as_ref()) {
- Ok(Some(value)) => storage_results.push(value),
- Ok(None) => continue,
- Err(error) => return ArchiveStorageResult::err(error),
- },
- StorageQueryType::DescendantsValues => {
- match self.client.query_iter_pagination(
- QueryIter {
- query_key: item.key,
- ty: IterQueryType::Value,
- pagination_start_key: item.pagination_start_key,
- },
- hash,
- child_key.as_ref(),
- self.storage_max_descendant_responses,
- ) {
- Ok((results, _)) => storage_results.extend(results),
- Err(error) => return ArchiveStorageResult::err(error),
- }
- },
- StorageQueryType::DescendantsHashes => {
- match self.client.query_iter_pagination(
- QueryIter {
- query_key: item.key,
- ty: IterQueryType::Hash,
- pagination_start_key: item.pagination_start_key,
- },
- hash,
- child_key.as_ref(),
- self.storage_max_descendant_responses,
- ) {
- Ok((results, _)) => storage_results.extend(results),
- Err(error) => return ArchiveStorageResult::err(error),
- }
- },
- };
- }
-
- ArchiveStorageResult::ok(storage_results, discarded_items)
- }
-}
-
/// Parse hex-encoded string parameter as raw bytes.
///
/// If the parsing fails, returns an error propagated to the RPC method.
diff --git a/substrate/client/rpc-spec-v2/src/archive/mod.rs b/substrate/client/rpc-spec-v2/src/archive/mod.rs
index 5f020c203eab..14fa104c113a 100644
--- a/substrate/client/rpc-spec-v2/src/archive/mod.rs
+++ b/substrate/client/rpc-spec-v2/src/archive/mod.rs
@@ -32,4 +32,4 @@ pub mod archive;
pub mod error;
pub use api::ArchiveApiServer;
-pub use archive::{Archive, ArchiveConfig};
+pub use archive::Archive;
diff --git a/substrate/client/rpc-spec-v2/src/archive/tests.rs b/substrate/client/rpc-spec-v2/src/archive/tests.rs
index 994c5d28bd61..cddaafde6659 100644
--- a/substrate/client/rpc-spec-v2/src/archive/tests.rs
+++ b/substrate/client/rpc-spec-v2/src/archive/tests.rs
@@ -19,16 +19,13 @@
use crate::{
common::events::{
ArchiveStorageDiffEvent, ArchiveStorageDiffItem, ArchiveStorageDiffOperationType,
- ArchiveStorageDiffResult, ArchiveStorageDiffType, ArchiveStorageMethodOk,
- ArchiveStorageResult, PaginatedStorageQuery, StorageQueryType, StorageResultType,
+ ArchiveStorageDiffResult, ArchiveStorageDiffType, ArchiveStorageEvent, StorageQuery,
+ StorageQueryType, StorageResult, StorageResultType,
},
hex_string, MethodResult,
};
-use super::{
- archive::{Archive, ArchiveConfig},
- *,
-};
+use super::{archive::Archive, *};
use assert_matches::assert_matches;
use codec::{Decode, Encode};
@@ -55,8 +52,6 @@ use substrate_test_runtime_client::{
const CHAIN_GENESIS: [u8; 32] = [0; 32];
const INVALID_HASH: [u8; 32] = [1; 32];
-const MAX_PAGINATION_LIMIT: usize = 5;
-const MAX_QUERIED_LIMIT: usize = 5;
const KEY: &[u8] = b":mock";
const VALUE: &[u8] = b"hello world";
const CHILD_STORAGE_KEY: &[u8] = b"child";
@@ -65,10 +60,7 @@ const CHILD_VALUE: &[u8] = b"child value";
type Header = substrate_test_runtime_client::runtime::Header;
type Block = substrate_test_runtime_client::runtime::Block;
-fn setup_api(
- max_descendant_responses: usize,
- max_queried_items: usize,
-) -> (Arc>, RpcModule>>) {
+fn setup_api() -> (Arc>, RpcModule>>) {
let child_info = ChildInfo::new_default(CHILD_STORAGE_KEY);
let builder = TestClientBuilder::new().add_extra_child_storage(
&child_info,
@@ -83,7 +75,6 @@ fn setup_api(
backend,
CHAIN_GENESIS,
Arc::new(TokioTestExecutor::default()),
- ArchiveConfig { max_descendant_responses, max_queried_items },
)
.into_rpc();
@@ -101,7 +92,7 @@ async fn get_next_event(sub: &mut RpcSubscriptio
#[tokio::test]
async fn archive_genesis() {
- let (_client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (_client, api) = setup_api();
let genesis: String =
api.call("archive_unstable_genesisHash", EmptyParams::new()).await.unwrap();
@@ -110,7 +101,7 @@ async fn archive_genesis() {
#[tokio::test]
async fn archive_body() {
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
// Invalid block hash.
let invalid_hash = hex_string(&INVALID_HASH);
@@ -144,7 +135,7 @@ async fn archive_body() {
#[tokio::test]
async fn archive_header() {
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
// Invalid block hash.
let invalid_hash = hex_string(&INVALID_HASH);
@@ -178,7 +169,7 @@ async fn archive_header() {
#[tokio::test]
async fn archive_finalized_height() {
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
let client_height: u32 = client.info().finalized_number.saturated_into();
@@ -190,7 +181,7 @@ async fn archive_finalized_height() {
#[tokio::test]
async fn archive_hash_by_height() {
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
// Genesis height.
let hashes: Vec = api.call("archive_unstable_hashByHeight", [0]).await.unwrap();
@@ -296,7 +287,7 @@ async fn archive_hash_by_height() {
#[tokio::test]
async fn archive_call() {
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
let invalid_hash = hex_string(&INVALID_HASH);
// Invalid parameter (non-hex).
@@ -355,7 +346,7 @@ async fn archive_call() {
#[tokio::test]
async fn archive_storage_hashes_values() {
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
let block = BlockBuilderBuilder::new(&*client)
.on_parent_block(client.chain_info().genesis_hash)
@@ -369,42 +360,23 @@ async fn archive_storage_hashes_values() {
let block_hash = format!("{:?}", block.header.hash());
let key = hex_string(&KEY);
- let items: Vec> = vec![
- PaginatedStorageQuery {
- key: key.clone(),
- query_type: StorageQueryType::DescendantsHashes,
- pagination_start_key: None,
- },
- PaginatedStorageQuery {
- key: key.clone(),
- query_type: StorageQueryType::DescendantsValues,
- pagination_start_key: None,
- },
- PaginatedStorageQuery {
- key: key.clone(),
- query_type: StorageQueryType::Hash,
- pagination_start_key: None,
- },
- PaginatedStorageQuery {
- key: key.clone(),
- query_type: StorageQueryType::Value,
- pagination_start_key: None,
- },
+ let items: Vec> = vec![
+ StorageQuery { key: key.clone(), query_type: StorageQueryType::DescendantsHashes },
+ StorageQuery { key: key.clone(), query_type: StorageQueryType::DescendantsValues },
+ StorageQuery { key: key.clone(), query_type: StorageQueryType::Hash },
+ StorageQuery { key: key.clone(), query_type: StorageQueryType::Value },
];
- let result: ArchiveStorageResult = api
- .call("archive_unstable_storage", rpc_params![&block_hash, items.clone()])
+ let mut sub = api
+ .subscribe_unbounded("archive_unstable_storage", rpc_params![&block_hash, items.clone()])
.await
.unwrap();
- match result {
- ArchiveStorageResult::Ok(ArchiveStorageMethodOk { result, discarded_items }) => {
- // Key has not been imported yet.
- assert_eq!(result.len(), 0);
- assert_eq!(discarded_items, 0);
- },
- _ => panic!("Unexpected result"),
- };
+ // Key has not been imported yet.
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::StorageDone,
+ );
// Import a block with the given key value pair.
let mut builder = BlockBuilderBuilder::new(&*client)
@@ -420,32 +392,103 @@ async fn archive_storage_hashes_values() {
let expected_hash = format!("{:?}", Blake2Hasher::hash(&VALUE));
let expected_value = hex_string(&VALUE);
- let result: ArchiveStorageResult = api
- .call("archive_unstable_storage", rpc_params![&block_hash, items])
+ let mut sub = api
+ .subscribe_unbounded("archive_unstable_storage", rpc_params![&block_hash, items])
.await
.unwrap();
- match result {
- ArchiveStorageResult::Ok(ArchiveStorageMethodOk { result, discarded_items }) => {
- assert_eq!(result.len(), 4);
- assert_eq!(discarded_items, 0);
-
- assert_eq!(result[0].key, key);
- assert_eq!(result[0].result, StorageResultType::Hash(expected_hash.clone()));
- assert_eq!(result[1].key, key);
- assert_eq!(result[1].result, StorageResultType::Value(expected_value.clone()));
- assert_eq!(result[2].key, key);
- assert_eq!(result[2].result, StorageResultType::Hash(expected_hash));
- assert_eq!(result[3].key, key);
- assert_eq!(result[3].result, StorageResultType::Value(expected_value));
- },
- _ => panic!("Unexpected result"),
- };
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: key.clone(),
+ result: StorageResultType::Hash(expected_hash.clone()),
+ child_trie_key: None,
+ }),
+ );
+
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: key.clone(),
+ result: StorageResultType::Value(expected_value.clone()),
+ child_trie_key: None,
+ }),
+ );
+
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: key.clone(),
+ result: StorageResultType::Hash(expected_hash),
+ child_trie_key: None,
+ }),
+ );
+
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: key.clone(),
+ result: StorageResultType::Value(expected_value),
+ child_trie_key: None,
+ }),
+ );
+
+ assert_matches!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::StorageDone
+ );
+}
+
+#[tokio::test]
+async fn archive_storage_hashes_values_child_trie() {
+ let (client, api) = setup_api();
+
+ // Get child storage values set in `setup_api`.
+ let child_info = hex_string(&CHILD_STORAGE_KEY);
+ let key = hex_string(&KEY);
+ let genesis_hash = format!("{:?}", client.genesis_hash());
+ let expected_hash = format!("{:?}", Blake2Hasher::hash(&CHILD_VALUE));
+ let expected_value = hex_string(&CHILD_VALUE);
+
+ let items: Vec> = vec![
+ StorageQuery { key: key.clone(), query_type: StorageQueryType::DescendantsHashes },
+ StorageQuery { key: key.clone(), query_type: StorageQueryType::DescendantsValues },
+ ];
+ let mut sub = api
+ .subscribe_unbounded(
+ "archive_unstable_storage",
+ rpc_params![&genesis_hash, items, &child_info],
+ )
+ .await
+ .unwrap();
+
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: key.clone(),
+ result: StorageResultType::Hash(expected_hash.clone()),
+ child_trie_key: Some(child_info.clone()),
+ })
+ );
+
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: key.clone(),
+ result: StorageResultType::Value(expected_value.clone()),
+ child_trie_key: Some(child_info.clone()),
+ })
+ );
+
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::StorageDone,
+ );
}
#[tokio::test]
async fn archive_storage_closest_merkle_value() {
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
/// The core of this test.
///
@@ -457,55 +500,47 @@ async fn archive_storage_closest_merkle_value() {
api: &RpcModule>>,
block_hash: String,
) -> HashMap {
- let result: ArchiveStorageResult = api
- .call(
+ let mut sub = api
+ .subscribe_unbounded(
"archive_unstable_storage",
rpc_params![
&block_hash,
vec![
- PaginatedStorageQuery {
+ StorageQuery {
key: hex_string(b":AAAA"),
query_type: StorageQueryType::ClosestDescendantMerkleValue,
- pagination_start_key: None,
},
- PaginatedStorageQuery {
+ StorageQuery {
key: hex_string(b":AAAB"),
query_type: StorageQueryType::ClosestDescendantMerkleValue,
- pagination_start_key: None,
},
// Key with descendant.
- PaginatedStorageQuery {
+ StorageQuery {
key: hex_string(b":A"),
query_type: StorageQueryType::ClosestDescendantMerkleValue,
- pagination_start_key: None,
},
- PaginatedStorageQuery {
+ StorageQuery {
key: hex_string(b":AA"),
query_type: StorageQueryType::ClosestDescendantMerkleValue,
- pagination_start_key: None,
},
// Keys below this comment do not produce a result.
// Key that exceed the keyspace of the trie.
- PaginatedStorageQuery {
+ StorageQuery {
key: hex_string(b":AAAAX"),
query_type: StorageQueryType::ClosestDescendantMerkleValue,
- pagination_start_key: None,
},
- PaginatedStorageQuery {
+ StorageQuery {
key: hex_string(b":AAABX"),
query_type: StorageQueryType::ClosestDescendantMerkleValue,
- pagination_start_key: None,
},
// Key that are not part of the trie.
- PaginatedStorageQuery {
+ StorageQuery {
key: hex_string(b":AAX"),
query_type: StorageQueryType::ClosestDescendantMerkleValue,
- pagination_start_key: None,
},
- PaginatedStorageQuery {
+ StorageQuery {
key: hex_string(b":AAAX"),
query_type: StorageQueryType::ClosestDescendantMerkleValue,
- pagination_start_key: None,
},
]
],
@@ -513,19 +548,21 @@ async fn archive_storage_closest_merkle_value() {
.await
.unwrap();
- let merkle_values: HashMap<_, _> = match result {
- ArchiveStorageResult::Ok(ArchiveStorageMethodOk { result, .. }) => result
- .into_iter()
- .map(|res| {
- let value = match res.result {
+ let mut merkle_values = HashMap::new();
+ loop {
+ let event = get_next_event::(&mut sub).await;
+ match event {
+ ArchiveStorageEvent::Storage(result) => {
+ let str_result = match result.result {
StorageResultType::ClosestDescendantMerkleValue(value) => value,
- _ => panic!("Unexpected StorageResultType"),
+ _ => panic!("Unexpected result type"),
};
- (res.key, value)
- })
- .collect(),
- _ => panic!("Unexpected result"),
- };
+ merkle_values.insert(result.key, str_result);
+ },
+ ArchiveStorageEvent::StorageError(err) => panic!("Unexpected error {err:?}"),
+ ArchiveStorageEvent::StorageDone => break,
+ }
+ }
// Response for AAAA, AAAB, A and AA.
assert_eq!(merkle_values.len(), 4);
@@ -604,9 +641,9 @@ async fn archive_storage_closest_merkle_value() {
}
#[tokio::test]
-async fn archive_storage_paginate_iterations() {
+async fn archive_storage_iterations() {
// 1 iteration allowed before pagination kicks in.
- let (client, api) = setup_api(1, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
// Import a new block with storage changes.
let mut builder = BlockBuilderBuilder::new(&*client)
@@ -625,237 +662,94 @@ async fn archive_storage_paginate_iterations() {
// Calling with an invalid hash.
let invalid_hash = hex_string(&INVALID_HASH);
- let result: ArchiveStorageResult = api
- .call(
+ let mut sub = api
+ .subscribe_unbounded(
"archive_unstable_storage",
rpc_params![
&invalid_hash,
- vec![PaginatedStorageQuery {
- key: hex_string(b":m"),
- query_type: StorageQueryType::DescendantsValues,
- pagination_start_key: None,
- }]
- ],
- )
- .await
- .unwrap();
- match result {
- ArchiveStorageResult::Err(_) => (),
- _ => panic!("Unexpected result"),
- };
-
- // Valid call with storage at the key.
- let result: ArchiveStorageResult = api
- .call(
- "archive_unstable_storage",
- rpc_params![
- &block_hash,
- vec![PaginatedStorageQuery {
- key: hex_string(b":m"),
- query_type: StorageQueryType::DescendantsValues,
- pagination_start_key: None,
- }]
- ],
- )
- .await
- .unwrap();
- match result {
- ArchiveStorageResult::Ok(ArchiveStorageMethodOk { result, discarded_items }) => {
- assert_eq!(result.len(), 1);
- assert_eq!(discarded_items, 0);
-
- assert_eq!(result[0].key, hex_string(b":m"));
- assert_eq!(result[0].result, StorageResultType::Value(hex_string(b"a")));
- },
- _ => panic!("Unexpected result"),
- };
-
- // Continue with pagination.
- let result: ArchiveStorageResult = api
- .call(
- "archive_unstable_storage",
- rpc_params![
- &block_hash,
- vec![PaginatedStorageQuery {
- key: hex_string(b":m"),
- query_type: StorageQueryType::DescendantsValues,
- pagination_start_key: Some(hex_string(b":m")),
- }]
- ],
- )
- .await
- .unwrap();
- match result {
- ArchiveStorageResult::Ok(ArchiveStorageMethodOk { result, discarded_items }) => {
- assert_eq!(result.len(), 1);
- assert_eq!(discarded_items, 0);
-
- assert_eq!(result[0].key, hex_string(b":mo"));
- assert_eq!(result[0].result, StorageResultType::Value(hex_string(b"ab")));
- },
- _ => panic!("Unexpected result"),
- };
-
- // Continue with pagination.
- let result: ArchiveStorageResult = api
- .call(
- "archive_unstable_storage",
- rpc_params![
- &block_hash,
- vec![PaginatedStorageQuery {
+ vec![StorageQuery {
key: hex_string(b":m"),
query_type: StorageQueryType::DescendantsValues,
- pagination_start_key: Some(hex_string(b":mo")),
}]
],
)
.await
.unwrap();
- match result {
- ArchiveStorageResult::Ok(ArchiveStorageMethodOk { result, discarded_items }) => {
- assert_eq!(result.len(), 1);
- assert_eq!(discarded_items, 0);
-
- assert_eq!(result[0].key, hex_string(b":moD"));
- assert_eq!(result[0].result, StorageResultType::Value(hex_string(b"abcmoD")));
- },
- _ => panic!("Unexpected result"),
- };
- // Continue with pagination.
- let result: ArchiveStorageResult = api
- .call(
- "archive_unstable_storage",
- rpc_params![
- &block_hash,
- vec![PaginatedStorageQuery {
- key: hex_string(b":m"),
- query_type: StorageQueryType::DescendantsValues,
- pagination_start_key: Some(hex_string(b":moD")),
- }]
- ],
- )
- .await
- .unwrap();
- match result {
- ArchiveStorageResult::Ok(ArchiveStorageMethodOk { result, discarded_items }) => {
- assert_eq!(result.len(), 1);
- assert_eq!(discarded_items, 0);
-
- assert_eq!(result[0].key, hex_string(b":moc"));
- assert_eq!(result[0].result, StorageResultType::Value(hex_string(b"abc")));
- },
- _ => panic!("Unexpected result"),
- };
+ assert_matches!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::StorageError(_)
+ );
- // Continue with pagination.
- let result: ArchiveStorageResult = api
- .call(
+ // Valid call with storage at the key.
+ let mut sub = api
+ .subscribe_unbounded(
"archive_unstable_storage",
rpc_params![
&block_hash,
- vec![PaginatedStorageQuery {
+ vec![StorageQuery {
key: hex_string(b":m"),
query_type: StorageQueryType::DescendantsValues,
- pagination_start_key: Some(hex_string(b":moc")),
}]
],
)
.await
.unwrap();
- match result {
- ArchiveStorageResult::Ok(ArchiveStorageMethodOk { result, discarded_items }) => {
- assert_eq!(result.len(), 1);
- assert_eq!(discarded_items, 0);
- assert_eq!(result[0].key, hex_string(b":mock"));
- assert_eq!(result[0].result, StorageResultType::Value(hex_string(b"abcd")));
- },
- _ => panic!("Unexpected result"),
- };
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: hex_string(b":m"),
+ result: StorageResultType::Value(hex_string(b"a")),
+ child_trie_key: None,
+ })
+ );
- // Continue with pagination until no keys are returned.
- let result: ArchiveStorageResult = api
- .call(
- "archive_unstable_storage",
- rpc_params![
- &block_hash,
- vec![PaginatedStorageQuery {
- key: hex_string(b":m"),
- query_type: StorageQueryType::DescendantsValues,
- pagination_start_key: Some(hex_string(b":mock")),
- }]
- ],
- )
- .await
- .unwrap();
- match result {
- ArchiveStorageResult::Ok(ArchiveStorageMethodOk { result, discarded_items }) => {
- assert_eq!(result.len(), 0);
- assert_eq!(discarded_items, 0);
- },
- _ => panic!("Unexpected result"),
- };
-}
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: hex_string(b":mo"),
+ result: StorageResultType::Value(hex_string(b"ab")),
+ child_trie_key: None,
+ })
+ );
-#[tokio::test]
-async fn archive_storage_discarded_items() {
- // One query at a time
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, 1);
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: hex_string(b":moD"),
+ result: StorageResultType::Value(hex_string(b"abcmoD")),
+ child_trie_key: None,
+ })
+ );
- // Import a new block with storage changes.
- let mut builder = BlockBuilderBuilder::new(&*client)
- .on_parent_block(client.chain_info().genesis_hash)
- .with_parent_block_number(0)
- .build()
- .unwrap();
- builder.push_storage_change(b":m".to_vec(), Some(b"a".to_vec())).unwrap();
- let block = builder.build().unwrap().block;
- let block_hash = format!("{:?}", block.header.hash());
- client.import(BlockOrigin::Own, block.clone()).await.unwrap();
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: hex_string(b":moc"),
+ result: StorageResultType::Value(hex_string(b"abc")),
+ child_trie_key: None,
+ })
+ );
- // Valid call with storage at the key.
- let result: ArchiveStorageResult = api
- .call(
- "archive_unstable_storage",
- rpc_params![
- &block_hash,
- vec![
- PaginatedStorageQuery {
- key: hex_string(b":m"),
- query_type: StorageQueryType::Value,
- pagination_start_key: None,
- },
- PaginatedStorageQuery {
- key: hex_string(b":m"),
- query_type: StorageQueryType::Hash,
- pagination_start_key: None,
- },
- PaginatedStorageQuery {
- key: hex_string(b":m"),
- query_type: StorageQueryType::Hash,
- pagination_start_key: None,
- }
- ]
- ],
- )
- .await
- .unwrap();
- match result {
- ArchiveStorageResult::Ok(ArchiveStorageMethodOk { result, discarded_items }) => {
- assert_eq!(result.len(), 1);
- assert_eq!(discarded_items, 2);
+ assert_eq!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::Storage(StorageResult {
+ key: hex_string(b":mock"),
+ result: StorageResultType::Value(hex_string(b"abcd")),
+ child_trie_key: None,
+ })
+ );
- assert_eq!(result[0].key, hex_string(b":m"));
- assert_eq!(result[0].result, StorageResultType::Value(hex_string(b"a")));
- },
- _ => panic!("Unexpected result"),
- };
+ assert_matches!(
+ get_next_event::(&mut sub).await,
+ ArchiveStorageEvent::StorageDone
+ );
}
#[tokio::test]
async fn archive_storage_diff_main_trie() {
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
let mut builder = BlockBuilderBuilder::new(&*client)
.on_parent_block(client.chain_info().genesis_hash)
@@ -965,7 +859,7 @@ async fn archive_storage_diff_main_trie() {
#[tokio::test]
async fn archive_storage_diff_no_changes() {
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
// Build 2 identical blocks.
let mut builder = BlockBuilderBuilder::new(&*client)
@@ -1012,7 +906,7 @@ async fn archive_storage_diff_no_changes() {
#[tokio::test]
async fn archive_storage_diff_deleted_changes() {
- let (client, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (client, api) = setup_api();
// Blocks are imported as forks.
let mut builder = BlockBuilderBuilder::new(&*client)
@@ -1079,7 +973,7 @@ async fn archive_storage_diff_deleted_changes() {
#[tokio::test]
async fn archive_storage_diff_invalid_params() {
let invalid_hash = hex_string(&INVALID_HASH);
- let (_, api) = setup_api(MAX_PAGINATION_LIMIT, MAX_QUERIED_LIMIT);
+ let (_, api) = setup_api();
// Invalid shape for parameters.
let items: Vec> = Vec::new();
diff --git a/substrate/client/rpc-spec-v2/src/chain_head/event.rs b/substrate/client/rpc-spec-v2/src/chain_head/event.rs
index bd9863060910..de74145a3f08 100644
--- a/substrate/client/rpc-spec-v2/src/chain_head/event.rs
+++ b/substrate/client/rpc-spec-v2/src/chain_head/event.rs
@@ -235,7 +235,7 @@ pub struct OperationCallDone {
pub output: String,
}
-/// The response of the `chainHead_call` method.
+/// The response of the `chainHead_storage` method.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct OperationStorageItems {
@@ -536,6 +536,7 @@ mod tests {
items: vec![StorageResult {
key: "0x1".into(),
result: StorageResultType::Value("0x123".to_string()),
+ child_trie_key: None,
}],
});
diff --git a/substrate/client/rpc-spec-v2/src/chain_head/subscription/inner.rs b/substrate/client/rpc-spec-v2/src/chain_head/subscription/inner.rs
index 95a7c7fe1832..3e1bd23776d3 100644
--- a/substrate/client/rpc-spec-v2/src/chain_head/subscription/inner.rs
+++ b/substrate/client/rpc-spec-v2/src/chain_head/subscription/inner.rs
@@ -784,7 +784,7 @@ mod tests {
use super::*;
use jsonrpsee::ConnectionId;
use sc_block_builder::BlockBuilderBuilder;
- use sc_service::client::new_in_mem;
+ use sc_service::client::new_with_backend;
use sp_consensus::BlockOrigin;
use sp_core::{testing::TaskExecutor, H256};
use substrate_test_runtime_client::{
@@ -811,13 +811,13 @@ mod tests {
)
.unwrap();
let client = Arc::new(
- new_in_mem::<_, Block, _, RuntimeApi>(
+ new_with_backend::<_, _, Block, _, RuntimeApi>(
backend.clone(),
executor,
genesis_block_builder,
+ Box::new(TaskExecutor::new()),
None,
None,
- Box::new(TaskExecutor::new()),
client_config,
)
.unwrap(),
diff --git a/substrate/client/rpc-spec-v2/src/chain_head/tests.rs b/substrate/client/rpc-spec-v2/src/chain_head/tests.rs
index c505566d887d..21e8365622a1 100644
--- a/substrate/client/rpc-spec-v2/src/chain_head/tests.rs
+++ b/substrate/client/rpc-spec-v2/src/chain_head/tests.rs
@@ -34,7 +34,7 @@ use jsonrpsee::{
use sc_block_builder::BlockBuilderBuilder;
use sc_client_api::ChildInfo;
use sc_rpc::testing::TokioTestExecutor;
-use sc_service::client::new_in_mem;
+use sc_service::client::new_with_backend;
use sp_blockchain::HeaderBackend;
use sp_consensus::BlockOrigin;
use sp_core::{
@@ -2547,13 +2547,13 @@ async fn pin_block_references() {
.unwrap();
let client = Arc::new(
- new_in_mem::<_, Block, _, RuntimeApi>(
+ new_with_backend::<_, _, Block, _, RuntimeApi>(
backend.clone(),
executor,
genesis_block_builder,
+ Box::new(TokioTestExecutor::default()),
None,
None,
- Box::new(TokioTestExecutor::default()),
client_config,
)
.unwrap(),
diff --git a/substrate/client/rpc-spec-v2/src/common/events.rs b/substrate/client/rpc-spec-v2/src/common/events.rs
index 198a60bf4cac..44f722c0c61b 100644
--- a/substrate/client/rpc-spec-v2/src/common/events.rs
+++ b/substrate/client/rpc-spec-v2/src/common/events.rs
@@ -78,6 +78,10 @@ pub struct StorageResult {
/// The result of the query.
#[serde(flatten)]
pub result: StorageResultType,
+ /// The child trie key if provided.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ #[serde(default)]
+ pub child_trie_key: Option,
}
/// The type of the storage query.
@@ -105,23 +109,41 @@ pub struct StorageResultErr {
/// The result of a storage call.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
-#[serde(untagged)]
-pub enum ArchiveStorageResult {
+#[serde(rename_all = "camelCase")]
+#[serde(tag = "event")]
+pub enum ArchiveStorageEvent {
/// Query generated a result.
- Ok(ArchiveStorageMethodOk),
+ Storage(StorageResult),
/// Query encountered an error.
- Err(ArchiveStorageMethodErr),
+ StorageError(ArchiveStorageMethodErr),
+ /// Operation storage is done.
+ StorageDone,
}
-impl ArchiveStorageResult {
- /// Create a new `ArchiveStorageResult::Ok` result.
- pub fn ok(result: Vec, discarded_items: usize) -> Self {
- Self::Ok(ArchiveStorageMethodOk { result, discarded_items })
+impl ArchiveStorageEvent {
+ /// Create a new `ArchiveStorageEvent::StorageErr` event.
+ pub fn err(error: String) -> Self {
+ Self::StorageError(ArchiveStorageMethodErr { error })
}
- /// Create a new `ArchiveStorageResult::Err` result.
- pub fn err(error: String) -> Self {
- Self::Err(ArchiveStorageMethodErr { error })
+ /// Create a new `ArchiveStorageEvent::StorageResult` event.
+ pub fn result(result: StorageResult) -> Self {
+ Self::Storage(result)
+ }
+
+ /// Checks if the event is a `StorageDone` event.
+ pub fn is_done(&self) -> bool {
+ matches!(self, Self::StorageDone)
+ }
+
+ /// Checks if the event is a `StorageErr` event.
+ pub fn is_err(&self) -> bool {
+ matches!(self, Self::StorageError(_))
+ }
+
+ /// Checks if the event is a `StorageResult` event.
+ pub fn is_result(&self) -> bool {
+ matches!(self, Self::Storage(_))
}
}
@@ -354,8 +376,11 @@ mod tests {
#[test]
fn storage_result() {
// Item with Value.
- let item =
- StorageResult { key: "0x1".into(), result: StorageResultType::Value("res".into()) };
+ let item = StorageResult {
+ key: "0x1".into(),
+ result: StorageResultType::Value("res".into()),
+ child_trie_key: None,
+ };
// Encode
let ser = serde_json::to_string(&item).unwrap();
let exp = r#"{"key":"0x1","value":"res"}"#;
@@ -365,8 +390,11 @@ mod tests {
assert_eq!(dec, item);
// Item with Hash.
- let item =
- StorageResult { key: "0x1".into(), result: StorageResultType::Hash("res".into()) };
+ let item = StorageResult {
+ key: "0x1".into(),
+ result: StorageResultType::Hash("res".into()),
+ child_trie_key: None,
+ };
// Encode
let ser = serde_json::to_string(&item).unwrap();
let exp = r#"{"key":"0x1","hash":"res"}"#;
@@ -379,6 +407,7 @@ mod tests {
let item = StorageResult {
key: "0x1".into(),
result: StorageResultType::ClosestDescendantMerkleValue("res".into()),
+ child_trie_key: None,
};
// Encode
let ser = serde_json::to_string(&item).unwrap();
diff --git a/substrate/client/rpc-spec-v2/src/common/storage.rs b/substrate/client/rpc-spec-v2/src/common/storage.rs
index 673e20b2bc78..a1e34d51530e 100644
--- a/substrate/client/rpc-spec-v2/src/common/storage.rs
+++ b/substrate/client/rpc-spec-v2/src/common/storage.rs
@@ -24,7 +24,7 @@ use sc_client_api::{Backend, ChildInfo, StorageKey, StorageProvider};
use sp_runtime::traits::Block as BlockT;
use tokio::sync::mpsc;
-use super::events::{StorageResult, StorageResultType};
+use super::events::{StorageQuery, StorageQueryType, StorageResult, StorageResultType};
use crate::hex_string;
/// Call into the storage of blocks.
@@ -70,9 +70,6 @@ pub enum IterQueryType {
/// The result of making a query call.
pub type QueryResult = Result