diff --git a/.github/scripts/cmd/cmd.py b/.github/scripts/cmd/cmd.py
index 9da05cac17b9..2c017b7d0c3e 100755
--- a/.github/scripts/cmd/cmd.py
+++ b/.github/scripts/cmd/cmd.py
@@ -58,7 +58,7 @@ def setup_logging():
%(prog)s --runtime westend rococo --pallet pallet_balances pallet_multisig --quiet --clean
'''
-parser_bench = subparsers.add_parser('bench', help='Runs benchmarks', epilog=bench_example, formatter_class=argparse.RawDescriptionHelpFormatter)
+parser_bench = subparsers.add_parser('bench', help='Runs benchmarks (old CLI)', epilog=bench_example, formatter_class=argparse.RawDescriptionHelpFormatter)
for arg, config in common_args.items():
parser_bench.add_argument(arg, **config)
@@ -67,6 +67,35 @@ def setup_logging():
parser_bench.add_argument('--pallet', help='Pallet(s) space separated', nargs='*', default=[])
parser_bench.add_argument('--fail-fast', help='Fail fast on first failed benchmark', action='store_true')
+
+"""
+BENCH OMNI
+"""
+
+bench_example = '''**Examples**:
+ Runs all benchmarks
+ %(prog)s
+
+ Runs benchmarks for pallet_balances and pallet_multisig for all runtimes which have these pallets. **--quiet** makes it to output nothing to PR but reactions
+ %(prog)s --pallet pallet_balances pallet_xcm_benchmarks::generic --quiet
+
+ Runs bench for all pallets for westend runtime and fails fast on first failed benchmark
+ %(prog)s --runtime westend --fail-fast
+
+ Does not output anything and cleans up the previous bot's & author command triggering comments in PR
+ %(prog)s --runtime westend rococo --pallet pallet_balances pallet_multisig --quiet --clean
+'''
+
+parser_bench_old = subparsers.add_parser('bench-omni', help='Runs benchmarks (frame omni bencher)', epilog=bench_example, formatter_class=argparse.RawDescriptionHelpFormatter)
+
+for arg, config in common_args.items():
+ parser_bench_old.add_argument(arg, **config)
+
+parser_bench_old.add_argument('--runtime', help='Runtime(s) space separated', choices=runtimeNames, nargs='*', default=runtimeNames)
+parser_bench_old.add_argument('--pallet', help='Pallet(s) space separated', nargs='*', default=[])
+parser_bench_old.add_argument('--fail-fast', help='Fail fast on first failed benchmark', action='store_true')
+
+
"""
FMT
"""
@@ -98,12 +127,12 @@ def main():
print(f'args: {args}')
- if args.command == 'bench':
+ if args.command == 'bench-omni':
runtime_pallets_map = {}
failed_benchmarks = {}
successful_benchmarks = {}
- profile = "release"
+ profile = "production"
print(f'Provided runtimes: {args.runtime}')
# convert to mapped dict
@@ -113,11 +142,22 @@ def main():
# loop over remaining runtimes to collect available pallets
for runtime in runtimesMatrix.values():
- os.system(f"forklift cargo build -p {runtime['package']} --profile {profile} --features={runtime['bench_features']}")
+ build_command = f"forklift cargo build -p {runtime['package']} --profile {profile} --features={runtime['bench_features']}"
+ print(f'-- building "{runtime["name"]}" with `{build_command}`')
+ os.system(build_command)
print(f'-- listing pallets for benchmark for {runtime["name"]}')
wasm_file = f"target/{profile}/wbuild/{runtime['package']}/{runtime['package'].replace('-', '_')}.wasm"
- output = os.popen(
- f"frame-omni-bencher v1 benchmark pallet --no-csv-header --no-storage-info --no-min-squares --no-median-slopes --all --list --runtime={wasm_file} {runtime['bench_flags']}").read()
+ list_command = f"frame-omni-bencher v1 benchmark pallet " \
+ f"--no-csv-header " \
+ f"--no-storage-info " \
+ f"--no-min-squares " \
+ f"--no-median-slopes " \
+ f"--all " \
+ f"--list " \
+ f"--runtime={wasm_file} " \
+ f"{runtime['bench_flags']}"
+ print(f'-- running: {list_command}')
+ output = os.popen(list_command).read()
raw_pallets = output.strip().split('\n')
all_pallets = set()
@@ -230,6 +270,149 @@ def main():
print_and_log('✅ Successful benchmarks of runtimes/pallets:')
for runtime, pallets in successful_benchmarks.items():
print_and_log(f'-- {runtime}: {pallets}')
+
+ if args.command == 'bench':
+ runtime_pallets_map = {}
+ failed_benchmarks = {}
+ successful_benchmarks = {}
+
+ profile = "production"
+
+ print(f'Provided runtimes: {args.runtime}')
+ # convert to mapped dict
+ runtimesMatrix = list(filter(lambda x: x['name'] in args.runtime, runtimesMatrix))
+ runtimesMatrix = {x['name']: x for x in runtimesMatrix}
+ print(f'Filtered out runtimes: {runtimesMatrix}')
+
+ # loop over remaining runtimes to collect available pallets
+ for runtime in runtimesMatrix.values():
+ build_command = f"forklift cargo build -p {runtime['old_package']} --profile {profile} --features={runtime['bench_features']} --locked"
+ print(f'-- building {runtime["name"]} with `{build_command}`')
+ os.system(build_command)
+
+ chain = runtime['name'] if runtime['name'] == 'dev' else f"{runtime['name']}-dev"
+
+ machine_test = f"target/{profile}/{runtime['old_bin']} benchmark machine --chain={chain}"
+ print(f"Running machine test for `{machine_test}`")
+ os.system(machine_test)
+
+ print(f'-- listing pallets for benchmark for {chain}')
+ list_command = f"target/{profile}/{runtime['old_bin']} " \
+ f"benchmark pallet " \
+ f"--no-csv-header " \
+ f"--no-storage-info " \
+ f"--no-min-squares " \
+ f"--no-median-slopes " \
+ f"--all " \
+ f"--list " \
+ f"--chain={chain}"
+ print(f'-- running: {list_command}')
+ output = os.popen(list_command).read()
+ raw_pallets = output.strip().split('\n')
+
+ all_pallets = set()
+ for pallet in raw_pallets:
+ if pallet:
+ all_pallets.add(pallet.split(',')[0].strip())
+
+ pallets = list(all_pallets)
+ print(f'Pallets in {runtime["name"]}: {pallets}')
+ runtime_pallets_map[runtime['name']] = pallets
+
+ print(f'\n')
+
+ # filter out only the specified pallets from collected runtimes/pallets
+ if args.pallet:
+ print(f'Pallets: {args.pallet}')
+ new_pallets_map = {}
+ # keep only specified pallets if they exist in the runtime
+ for runtime in runtime_pallets_map:
+ if set(args.pallet).issubset(set(runtime_pallets_map[runtime])):
+ new_pallets_map[runtime] = args.pallet
+
+ runtime_pallets_map = new_pallets_map
+
+ print(f'Filtered out runtimes & pallets: {runtime_pallets_map}\n')
+
+ if not runtime_pallets_map:
+ if args.pallet and not args.runtime:
+ print(f"No pallets {args.pallet} found in any runtime")
+ elif args.runtime and not args.pallet:
+ print(f"{args.runtime} runtime does not have any pallets")
+ elif args.runtime and args.pallet:
+ print(f"No pallets {args.pallet} found in {args.runtime}")
+ else:
+ print('No runtimes found')
+ sys.exit(1)
+
+ for runtime in runtime_pallets_map:
+ for pallet in runtime_pallets_map[runtime]:
+ config = runtimesMatrix[runtime]
+ header_path = os.path.abspath(config['header'])
+ template = None
+
+ chain = config['name'] if runtime == 'dev' else f"{config['name']}-dev"
+
+ print(f'-- config: {config}')
+ if runtime == 'dev':
+ # to support sub-modules (https://github.com/paritytech/command-bot/issues/275)
+ search_manifest_path = f"cargo metadata --locked --format-version 1 --no-deps | jq -r '.packages[] | select(.name == \"{pallet.replace('_', '-')}\") | .manifest_path'"
+ print(f'-- running: {search_manifest_path}')
+ manifest_path = os.popen(search_manifest_path).read()
+ if not manifest_path:
+ print(f'-- pallet {pallet} not found in dev runtime')
+ if args.fail_fast:
+ print_and_log(f'Error: {pallet} not found in dev runtime')
+ sys.exit(1)
+ package_dir = os.path.dirname(manifest_path)
+ print(f'-- package_dir: {package_dir}')
+ print(f'-- manifest_path: {manifest_path}')
+ output_path = os.path.join(package_dir, "src", "weights.rs")
+ template = config['template']
+ else:
+ default_path = f"./{config['path']}/src/weights"
+ xcm_path = f"./{config['path']}/src/weights/xcm"
+ output_path = default_path
+ if pallet.startswith("pallet_xcm_benchmarks"):
+ template = config['template']
+ output_path = xcm_path
+
+ print(f'-- benchmarking {pallet} in {runtime} into {output_path}')
+ cmd = f"target/{profile}/{config['old_bin']} benchmark pallet " \
+ f"--extrinsic=* " \
+ f"--chain={chain} " \
+ f"--pallet={pallet} " \
+ f"--header={header_path} " \
+ f"--output={output_path} " \
+ f"--wasm-execution=compiled " \
+ f"--steps=50 " \
+ f"--repeat=20 " \
+ f"--heap-pages=4096 " \
+ f"{f'--template={template} ' if template else ''}" \
+ f"--no-storage-info --no-min-squares --no-median-slopes "
+ print(f'-- Running: {cmd} \n')
+ status = os.system(cmd)
+
+ if status != 0 and args.fail_fast:
+ print_and_log(f'❌ Failed to benchmark {pallet} in {runtime}')
+ sys.exit(1)
+
+ # Otherwise collect failed benchmarks and print them at the end
+ # push failed pallets to failed_benchmarks
+ if status != 0:
+ failed_benchmarks[f'{runtime}'] = failed_benchmarks.get(f'{runtime}', []) + [pallet]
+ else:
+ successful_benchmarks[f'{runtime}'] = successful_benchmarks.get(f'{runtime}', []) + [pallet]
+
+ if failed_benchmarks:
+ print_and_log('❌ Failed benchmarks of runtimes/pallets:')
+ for runtime, pallets in failed_benchmarks.items():
+ print_and_log(f'-- {runtime}: {pallets}')
+
+ if successful_benchmarks:
+ print_and_log('✅ Successful benchmarks of runtimes/pallets:')
+ for runtime, pallets in successful_benchmarks.items():
+ print_and_log(f'-- {runtime}: {pallets}')
elif args.command == 'fmt':
command = f"cargo +nightly fmt"
diff --git a/.github/scripts/cmd/test_cmd.py b/.github/scripts/cmd/test_cmd.py
index 7b29fbfe90d8..68998b989909 100644
--- a/.github/scripts/cmd/test_cmd.py
+++ b/.github/scripts/cmd/test_cmd.py
@@ -47,7 +47,7 @@
def get_mock_bench_output(runtime, pallets, output_path, header, bench_flags, template = None):
return f"frame-omni-bencher v1 benchmark pallet --extrinsic=* " \
- f"--runtime=target/release/wbuild/{runtime}-runtime/{runtime.replace('-', '_')}_runtime.wasm " \
+ f"--runtime=target/production/wbuild/{runtime}-runtime/{runtime.replace('-', '_')}_runtime.wasm " \
f"--pallet={pallets} --header={header} " \
f"--output={output_path} " \
f"--wasm-execution=compiled " \
@@ -93,7 +93,7 @@ def tearDown(self):
def test_bench_command_normal_execution_all_runtimes(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=list(map(lambda x: x['name'], mock_runtimes_matrix)),
pallet=['pallet_balances'],
fail_fast=True,
@@ -117,10 +117,10 @@ def test_bench_command_normal_execution_all_runtimes(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p kitchensink-runtime --profile release --features=runtime-benchmarks"),
- call("forklift cargo build -p westend-runtime --profile release --features=runtime-benchmarks"),
- call("forklift cargo build -p rococo-runtime --profile release --features=runtime-benchmarks"),
- call("forklift cargo build -p asset-hub-westend-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p kitchensink-runtime --profile production --features=runtime-benchmarks"),
+ call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
+ call("forklift cargo build -p rococo-runtime --profile production --features=runtime-benchmarks"),
+ call("forklift cargo build -p asset-hub-westend-runtime --profile production --features=runtime-benchmarks"),
call(get_mock_bench_output(
runtime='kitchensink',
@@ -150,7 +150,7 @@ def test_bench_command_normal_execution_all_runtimes(self):
def test_bench_command_normal_execution(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['westend'],
pallet=['pallet_balances', 'pallet_staking'],
fail_fast=True,
@@ -170,7 +170,7 @@ def test_bench_command_normal_execution(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p westend-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
@@ -193,7 +193,7 @@ def test_bench_command_normal_execution(self):
def test_bench_command_normal_execution_xcm(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['westend'],
pallet=['pallet_xcm_benchmarks::generic'],
fail_fast=True,
@@ -213,7 +213,7 @@ def test_bench_command_normal_execution_xcm(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p westend-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
@@ -229,7 +229,7 @@ def test_bench_command_normal_execution_xcm(self):
def test_bench_command_two_runtimes_two_pallets(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['westend', 'rococo'],
pallet=['pallet_balances', 'pallet_staking'],
fail_fast=True,
@@ -250,8 +250,8 @@ def test_bench_command_two_runtimes_two_pallets(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p westend-runtime --profile release --features=runtime-benchmarks"),
- call("forklift cargo build -p rococo-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
+ call("forklift cargo build -p rococo-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
runtime='westend',
@@ -287,7 +287,7 @@ def test_bench_command_two_runtimes_two_pallets(self):
def test_bench_command_one_dev_runtime(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['dev'],
pallet=['pallet_balances'],
fail_fast=True,
@@ -309,7 +309,7 @@ def test_bench_command_one_dev_runtime(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p kitchensink-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p kitchensink-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
runtime='kitchensink',
@@ -324,7 +324,7 @@ def test_bench_command_one_dev_runtime(self):
def test_bench_command_one_cumulus_runtime(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['asset-hub-westend'],
pallet=['pallet_assets'],
fail_fast=True,
@@ -344,7 +344,7 @@ def test_bench_command_one_cumulus_runtime(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p asset-hub-westend-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p asset-hub-westend-runtime --profile production --features=runtime-benchmarks"),
# Asset-hub-westend runtime calls
call(get_mock_bench_output(
runtime='asset-hub-westend',
@@ -359,7 +359,7 @@ def test_bench_command_one_cumulus_runtime(self):
def test_bench_command_one_cumulus_runtime_xcm(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['asset-hub-westend'],
pallet=['pallet_xcm_benchmarks::generic', 'pallet_assets'],
fail_fast=True,
@@ -379,7 +379,7 @@ def test_bench_command_one_cumulus_runtime_xcm(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p asset-hub-westend-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p asset-hub-westend-runtime --profile production --features=runtime-benchmarks"),
# Asset-hub-westend runtime calls
call(get_mock_bench_output(
runtime='asset-hub-westend',
diff --git a/.github/scripts/common/lib.sh b/.github/scripts/common/lib.sh
index 6b8f70a26d7e..00f8c089831e 100755
--- a/.github/scripts/common/lib.sh
+++ b/.github/scripts/common/lib.sh
@@ -270,20 +270,19 @@ fetch_debian_package_from_s3() {
}
# Fetch the release artifacts like binary and signatures from S3. Assumes the ENV are set:
-# - RELEASE_ID
-# - GITHUB_TOKEN
-# - REPO in the form paritytech/polkadot
+# inputs: binary (polkadot), target(aarch64-apple-darwin)
fetch_release_artifacts_from_s3() {
BINARY=$1
- OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${BINARY}"}
+ TARGET=$2
+ OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${TARGET}/${BINARY}"}
echo "OUTPUT_DIR : $OUTPUT_DIR"
URL_BASE=$(get_s3_url_base $BINARY)
echo "URL_BASE=$URL_BASE"
- URL_BINARY=$URL_BASE/$VERSION/$BINARY
- URL_SHA=$URL_BASE/$VERSION/$BINARY.sha256
- URL_ASC=$URL_BASE/$VERSION/$BINARY.asc
+ URL_BINARY=$URL_BASE/$VERSION/$TARGET/$BINARY
+ URL_SHA=$URL_BASE/$VERSION/$TARGET/$BINARY.sha256
+ URL_ASC=$URL_BASE/$VERSION/$TARGET/$BINARY.asc
# Fetch artifacts
mkdir -p "$OUTPUT_DIR"
@@ -298,7 +297,7 @@ fetch_release_artifacts_from_s3() {
pwd
ls -al --color
popd > /dev/null
-
+ unset OUTPUT_DIR
}
# Pass the name of the binary as input, it will
@@ -306,15 +305,26 @@ fetch_release_artifacts_from_s3() {
function get_s3_url_base() {
name=$1
case $name in
- polkadot | polkadot-execute-worker | polkadot-prepare-worker | staking-miner)
+ polkadot | polkadot-execute-worker | polkadot-prepare-worker )
printf "https://releases.parity.io/polkadot"
;;
- polkadot-parachain)
- printf "https://releases.parity.io/cumulus"
+ polkadot-parachain)
+ printf "https://releases.parity.io/polkadot-parachain"
+ ;;
+
+ polkadot-omni-node)
+ printf "https://releases.parity.io/polkadot-omni-node"
;;
- *)
+ chain-spec-builder)
+ printf "https://releases.parity.io/chain-spec-builder"
+ ;;
+
+ frame-omni-bencher)
+ printf "https://releases.parity.io/frame-omni-bencher"
+ ;;
+ *)
printf "UNSUPPORTED BINARY $name"
exit 1
;;
@@ -497,3 +507,16 @@ validate_stable_tag() {
exit 1
fi
}
+
+# Prepare docker stable tag form the polkadot stable tag
+# input: tag (polkaodot-stableYYMM(-X) or polkadot-stableYYMM(-X)-rcX)
+# output: stableYYMM(-X) or stableYYMM(-X)-rcX
+prepare_docker_stable_tag() {
+ tag="$1"
+ if [[ "$tag" =~ stable[0-9]{4}(-[0-9]+)?(-rc[0-9]+)? ]]; then
+ echo "${BASH_REMATCH[0]}"
+ else
+ echo "Tag is invalid: $tag"
+ exit 1
+ fi
+}
diff --git a/.github/scripts/release/release_lib.sh b/.github/scripts/release/release_lib.sh
index 8b9254ec3f29..984709f2ea03 100644
--- a/.github/scripts/release/release_lib.sh
+++ b/.github/scripts/release/release_lib.sh
@@ -129,13 +129,69 @@ upload_s3_release() {
echo "Working on version: $version "
echo "Working on platform: $target "
+ URL_BASE=$(get_s3_url_base $product)
+
echo "Current content, should be empty on new uploads:"
- aws s3 ls "s3://releases.parity.io/${product}/${version}/${target}" --recursive --human-readable --summarize || true
+ aws s3 ls "s3://${URL_BASE}/${version}/${target}" --recursive --human-readable --summarize || true
echo "Content to be uploaded:"
- artifacts="artifacts/$product/"
+ artifacts="release-artifacts/$target/$product/"
ls "$artifacts"
- aws s3 sync --acl public-read "$artifacts" "s3://releases.parity.io/${product}/${version}/${target}"
+ aws s3 sync --acl public-read "$artifacts" "s3://${URL_BASE}/${version}/${target}"
echo "Uploaded files:"
- aws s3 ls "s3://releases.parity.io/${product}/${version}/${target}" --recursive --human-readable --summarize
- echo "✅ The release should be at https://releases.parity.io/${product}/${version}/${target}"
+ aws s3 ls "s3://${URL_BASE}/${version}/${target}" --recursive --human-readable --summarize
+ echo "✅ The release should be at https://${URL_BASE}/${version}/${target}"
+}
+
+# Upload runtimes artifacts to s3 release bucket
+#
+# input: version (stable release tage.g. polkadot-stable2412 or polkadot-stable2412-rc1)
+# output: none
+upload_s3_runtimes_release_artifacts() {
+ alias aws='podman run --rm -it docker.io/paritytech/awscli -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_BUCKET aws'
+
+ version=$1
+
+ echo "Working on version: $version "
+
+ echo "Current content, should be empty on new uploads:"
+ aws s3 ls "s3://releases.parity.io/polkadot/runtimes/${version}/" --recursive --human-readable --summarize || true
+ echo "Content to be uploaded:"
+ artifacts="artifacts/runtimes/"
+ ls "$artifacts"
+ aws s3 sync --acl public-read "$artifacts" "s3://releases.parity.io/polkadot/runtimes/${version}/"
+ echo "Uploaded files:"
+ aws s3 ls "s3://releases.parity.io/polkadot/runtimes/${version}/" --recursive --human-readable --summarize
+ echo "✅ The release should be at https://releases.parity.io/polkadot/runtimes/${version}"
+}
+
+
+# Pass the name of the binary as input, it will
+# return the s3 base url
+function get_s3_url_base() {
+ name=$1
+ case $name in
+ polkadot | polkadot-execute-worker | polkadot-prepare-worker )
+ printf "releases.parity.io/polkadot"
+ ;;
+
+ polkadot-parachain)
+ printf "releases.parity.io/polkadot-parachain"
+ ;;
+
+ polkadot-omni-node)
+ printf "releases.parity.io/polkadot-omni-node"
+ ;;
+
+ chain-spec-builder)
+ printf "releases.parity.io/chain-spec-builder"
+ ;;
+
+ frame-omni-bencher)
+ printf "releases.parity.io/frame-omni-bencher"
+ ;;
+ *)
+ printf "UNSUPPORTED BINARY $name"
+ exit 1
+ ;;
+ esac
}
diff --git a/.github/workflows/check-semver.yml b/.github/workflows/check-semver.yml
index 8d77b6a31b75..16028c8de770 100644
--- a/.github/workflows/check-semver.yml
+++ b/.github/workflows/check-semver.yml
@@ -11,7 +11,7 @@ concurrency:
cancel-in-progress: true
env:
- TOOLCHAIN: nightly-2024-10-19
+ TOOLCHAIN: nightly-2024-11-19
jobs:
preflight:
@@ -74,10 +74,15 @@ jobs:
- name: install parity-publish
# Set the target dir to cache the build.
- run: CARGO_TARGET_DIR=./target/ cargo install parity-publish@0.10.1 --locked -q
+ run: CARGO_TARGET_DIR=./target/ cargo install parity-publish@0.10.3 --locked -q
- name: check semver
run: |
+ if [ -z "$PR" ]; then
+ echo "Skipping master/merge queue"
+ exit 0
+ fi
+
export CARGO_TARGET_DIR=target
export RUSTFLAGS='-A warnings -A missing_docs'
export SKIP_WASM_BUILD=1
diff --git a/.github/workflows/cmd.yml b/.github/workflows/cmd.yml
index 525ab0c0fc23..b6a50ea0d15e 100644
--- a/.github/workflows/cmd.yml
+++ b/.github/workflows/cmd.yml
@@ -19,10 +19,10 @@ jobs:
steps:
- name: Generate token
id: generate_token
- uses: tibdex/github-app-token@v2.1.0
+ uses: actions/create-github-app-token@v1
with:
- app_id: ${{ secrets.CMD_BOT_APP_ID }}
- private_key: ${{ secrets.CMD_BOT_APP_KEY }}
+ app-id: ${{ secrets.CMD_BOT_APP_ID }}
+ private-key: ${{ secrets.CMD_BOT_APP_KEY }}
- name: Check if user is a member of the organization
id: is-member
@@ -227,7 +227,8 @@ jobs:
cat .github/env >> $GITHUB_OUTPUT
if [ -n "$IMAGE_OVERRIDE" ]; then
- echo "IMAGE=$IMAGE_OVERRIDE" >> $GITHUB_OUTPUT
+ IMAGE=$IMAGE_OVERRIDE
+ echo "IMAGE=$IMAGE" >> $GITHUB_OUTPUT
fi
if [[ $BODY == "/cmd bench"* ]]; then
@@ -237,6 +238,10 @@ jobs:
else
echo "RUNNER=ubuntu-latest" >> $GITHUB_OUTPUT
fi
+ - name: Print outputs
+ run: |
+ echo "RUNNER=${{ steps.set-image.outputs.RUNNER }}"
+ echo "IMAGE=${{ steps.set-image.outputs.IMAGE }}"
# Get PR branch name, because the issue_comment event does not contain the PR branch name
get-pr-branch:
@@ -283,10 +288,24 @@ jobs:
env:
JOB_NAME: "cmd"
runs-on: ${{ needs.set-image.outputs.RUNNER }}
- timeout-minutes: 4320 # 72 hours -> 3 days; as it could take a long time to run all the runtimes/pallets
container:
image: ${{ needs.set-image.outputs.IMAGE }}
+ timeout-minutes: 1440 # 24 hours per runtime
steps:
+ - name: Generate token
+ uses: actions/create-github-app-token@v1
+ id: generate_token
+ with:
+ app-id: ${{ secrets.CMD_BOT_APP_ID }}
+ private-key: ${{ secrets.CMD_BOT_APP_KEY }}
+
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ token: ${{ steps.generate_token.outputs.token }}
+ repository: ${{ needs.get-pr-branch.outputs.repo }}
+ ref: ${{ needs.get-pr-branch.outputs.pr-branch }}
+
- name: Get command
uses: actions-ecosystem/action-regex-match@v2
id: get-pr-comment
@@ -340,13 +359,7 @@ jobs:
repo: context.repo.repo,
body: `Command "${{ steps.get-pr-comment.outputs.group2 }}" has started 🚀 [See logs here](${job_url})`
})
-
- - name: Checkout
- uses: actions/checkout@v4
- with:
- repository: ${{ needs.get-pr-branch.outputs.repo }}
- ref: ${{ needs.get-pr-branch.outputs.pr-branch }}
-
+
- name: Install dependencies for bench
if: startsWith(steps.get-pr-comment.outputs.group2, 'bench')
run: |
@@ -364,6 +377,7 @@ jobs:
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory '*'
git remote -v
+ cat /proc/cpuinfo
python3 -m pip install -r .github/scripts/generate-prdoc.requirements.txt
python3 .github/scripts/cmd/cmd.py $CMD $PR_ARG
git status
@@ -389,16 +403,30 @@ jobs:
- name: Commit changes
run: |
if [ -n "$(git status --porcelain)" ]; then
- git config --local user.email "action@github.com"
- git config --local user.name "GitHub Action"
+ git config --global user.name command-bot
+ git config --global user.email "<>"
+ git config --global pull.rebase false
+
+ # Push the results to the target branch
+ git remote add \
+ github \
+ "https://token:${{ steps.generate_token.outputs.token }}@github.com/${{ github.event.repository.owner.login }}/${{ github.event.repository.name }}.git" || :
+
+ push_changes() {
+ git push github "HEAD:${{ needs.get-pr-branch.outputs.pr-branch }}"
+ }
git add .
git restore --staged Cargo.lock # ignore changes in Cargo.lock
git commit -m "Update from ${{ github.actor }} running command '${{ steps.get-pr-comment.outputs.group2 }}'" || true
- git pull --rebase origin ${{ needs.get-pr-branch.outputs.pr-branch }}
-
- git push origin ${{ needs.get-pr-branch.outputs.pr-branch }}
+ # Attempt to push changes
+ if ! push_changes; then
+ echo "Push failed, trying to rebase..."
+ git pull --rebase github "${{ needs.get-pr-branch.outputs.pr-branch }}"
+ # After successful rebase, try pushing again
+ push_changes
+ fi
else
echo "Nothing to commit";
fi
diff --git a/.github/workflows/command-backport.yml b/.github/workflows/command-backport.yml
index eecf0ac72d2c..8a017a434525 100644
--- a/.github/workflows/command-backport.yml
+++ b/.github/workflows/command-backport.yml
@@ -86,7 +86,7 @@ jobs:
const reviewer = '${{ github.event.pull_request.user.login }}';
for (const pullNumber of pullNumbers) {
- await github.pulls.createReviewRequest({
+ await github.pulls.requestReviewers({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: parseInt(pullNumber),
diff --git a/.github/workflows/publish-check-compile.yml b/.github/workflows/publish-check-compile.yml
new file mode 100644
index 000000000000..ada8635e314e
--- /dev/null
+++ b/.github/workflows/publish-check-compile.yml
@@ -0,0 +1,48 @@
+name: Check publish build
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ types: [opened, synchronize, reopened, ready_for_review]
+ merge_group:
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ preflight:
+ uses: ./.github/workflows/reusable-preflight.yml
+
+ check-publish:
+ timeout-minutes: 90
+ needs: [preflight]
+ runs-on: ${{ needs.preflight.outputs.RUNNER }}
+ container:
+ image: ${{ needs.preflight.outputs.IMAGE }}
+ steps:
+ - uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.1.7
+
+ - name: Rust Cache
+ uses: Swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2.7.5
+ with:
+ cache-on-failure: true
+
+ - name: install parity-publish
+ run: cargo install parity-publish@0.10.3 --locked -q
+
+ - name: parity-publish update plan
+ run: parity-publish --color always plan --skip-check --prdoc prdoc/
+
+ - name: parity-publish apply plan
+ run: parity-publish --color always apply --registry
+
+ - name: parity-publish check compile
+ run: |
+ packages="$(parity-publish apply --print)"
+
+ if [ -n "$packages" ]; then
+ cargo --color always check $(printf -- '-p %s ' $packages)
+ fi
diff --git a/.github/workflows/publish-check-crates.yml b/.github/workflows/publish-check-crates.yml
index 3fad3b641474..3150cb9dd405 100644
--- a/.github/workflows/publish-check-crates.yml
+++ b/.github/workflows/publish-check-crates.yml
@@ -24,7 +24,7 @@ jobs:
cache-on-failure: true
- name: install parity-publish
- run: cargo install parity-publish@0.8.0 --locked -q
+ run: cargo install parity-publish@0.10.3 --locked -q
- name: parity-publish check
run: parity-publish --color always check --allow-unpublished
diff --git a/.github/workflows/publish-claim-crates.yml b/.github/workflows/publish-claim-crates.yml
index 37bf06bb82d8..a6efc8a5599e 100644
--- a/.github/workflows/publish-claim-crates.yml
+++ b/.github/workflows/publish-claim-crates.yml
@@ -18,7 +18,7 @@ jobs:
cache-on-failure: true
- name: install parity-publish
- run: cargo install parity-publish@0.8.0 --locked -q
+ run: cargo install parity-publish@0.10.3 --locked -q
- name: parity-publish claim
env:
diff --git a/.github/workflows/release-branchoff-stable.yml b/.github/workflows/release-10_branchoff-stable.yml
similarity index 100%
rename from .github/workflows/release-branchoff-stable.yml
rename to .github/workflows/release-10_branchoff-stable.yml
diff --git a/.github/workflows/release-10_rc-automation.yml b/.github/workflows/release-11_rc-automation.yml
similarity index 100%
rename from .github/workflows/release-10_rc-automation.yml
rename to .github/workflows/release-11_rc-automation.yml
diff --git a/.github/workflows/release-build-rc.yml b/.github/workflows/release-20_build-rc.yml
similarity index 62%
rename from .github/workflows/release-build-rc.yml
rename to .github/workflows/release-20_build-rc.yml
index a43c2b282a8d..d4c7055c37c5 100644
--- a/.github/workflows/release-build-rc.yml
+++ b/.github/workflows/release-20_build-rc.yml
@@ -11,10 +11,12 @@ on:
- polkadot
- polkadot-parachain
- polkadot-omni-node
+ - frame-omni-bencher
+ - chain-spec-builder
- all
release_tag:
- description: Tag matching the actual release candidate with the format stableYYMM-rcX or stableYYMM
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX or polkadot-stableYYMM(-X)
type: string
jobs:
@@ -106,6 +108,50 @@ jobs:
attestations: write
contents: read
+ build-frame-omni-bencher-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'frame-omni-bencher' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["frame-omni-bencher"]'
+ package: "frame-omni-bencher"
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: x86_64-unknown-linux-gnu
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-chain-spec-builder-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["chain-spec-builder"]'
+ package: staging-chain-spec-builder
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: x86_64-unknown-linux-gnu
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
build-polkadot-macos-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'polkadot' || inputs.binary == 'all' }}
@@ -134,7 +180,7 @@ jobs:
uses: "./.github/workflows/release-reusable-rc-buid.yml"
with:
binary: '["polkadot-parachain"]'
- package: "polkadot-parachain-bin"
+ package: polkadot-parachain-bin
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets:
@@ -156,7 +202,51 @@ jobs:
uses: "./.github/workflows/release-reusable-rc-buid.yml"
with:
binary: '["polkadot-omni-node"]'
- package: "polkadot-omni-node"
+ package: polkadot-omni-node
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: aarch64-apple-darwin
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-frame-omni-bencher-macos-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'frame-omni-bencher' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["frame-omni-bencher"]'
+ package: frame-omni-bencher
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: aarch64-apple-darwin
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-chain-spec-builder-macos-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["chain-spec-builder"]'
+ package: staging-chain-spec-builder
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets:
diff --git a/.github/workflows/release-30_publish_release_draft.yml b/.github/workflows/release-30_publish_release_draft.yml
index 4364b4f80457..78ceea91f100 100644
--- a/.github/workflows/release-30_publish_release_draft.yml
+++ b/.github/workflows/release-30_publish_release_draft.yml
@@ -1,19 +1,46 @@
name: Release - Publish draft
-on:
- push:
- tags:
- # Catches v1.2.3 and v1.2.3-rc1
- - v[0-9]+.[0-9]+.[0-9]+*
- # - polkadot-stable[0-9]+* Activate when the release process from release org is setteled
+# This workflow runs in paritytech-release and creates full release draft with:
+# - release notes
+# - info about the runtimes
+# - attached artifacts:
+# - runtimes
+# - binaries
+# - signatures
+on:
workflow_dispatch:
inputs:
- version:
- description: Current release/rc version
+ release_tag:
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX or polkadot-stableYYMM(-X)
+ required: true
+ type: string
jobs:
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
+ validate-inputs:
+ needs: [ check-synchronization ]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Validate inputs
+ id: validate_inputs
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
+ echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
+
get-rust-versions:
+ needs: [ validate-inputs ]
runs-on: ubuntu-latest
outputs:
rustc-stable: ${{ steps.get-rust-versions.outputs.stable }}
@@ -24,47 +51,28 @@ jobs:
echo "stable=$RUST_STABLE_VERSION" >> $GITHUB_OUTPUT
build-runtimes:
+ needs: [ validate-inputs ]
uses: "./.github/workflows/release-srtool.yml"
with:
excluded_runtimes: "asset-hub-rococo bridge-hub-rococo contracts-rococo coretime-rococo people-rococo rococo rococo-parachain substrate-test bp cumulus-test kitchensink minimal-template parachain-template penpal polkadot-test seedling shell frame-try sp solochain-template polkadot-sdk-docs-first"
build_opts: "--features on-chain-release-build"
-
- build-binaries:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- # Tuples of [package, binary-name]
- binary: [ [frame-omni-bencher, frame-omni-bencher], [staging-chain-spec-builder, chain-spec-builder] ]
- steps:
- - name: Checkout sources
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
-
- - name: Install protobuf-compiler
- run: |
- sudo apt update
- sudo apt install -y protobuf-compiler
-
- - name: Build ${{ matrix.binary[1] }} binary
- run: |
- cargo build --locked --profile=production -p ${{ matrix.binary[0] }} --bin ${{ matrix.binary[1] }}
- target/production/${{ matrix.binary[1] }} --version
-
- - name: Upload ${{ matrix.binary[1] }} binary
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
- with:
- name: ${{ matrix.binary[1] }}
- path: target/production/${{ matrix.binary[1] }}
-
+ profile: production
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
publish-release-draft:
runs-on: ubuntu-latest
- needs: [ get-rust-versions, build-runtimes ]
+ environment: release
+ needs: [ validate-inputs, get-rust-versions, build-runtimes ]
outputs:
release_url: ${{ steps.create-release.outputs.html_url }}
asset_upload_url: ${{ steps.create-release.outputs.upload_url }}
+
steps:
- name: Checkout
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Download artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
@@ -87,20 +95,21 @@ jobs:
GLUTTON_WESTEND_DIGEST: ${{ github.workspace}}/glutton-westend-runtime/glutton-westend-srtool-digest.json
PEOPLE_WESTEND_DIGEST: ${{ github.workspace}}/people-westend-runtime/people-westend-srtool-digest.json
WESTEND_DIGEST: ${{ github.workspace}}/westend-runtime/westend-srtool-digest.json
+ RELEASE_TAG: ${{ needs.validate-inputs.outputs.release_tag }}
shell: bash
run: |
. ./.github/scripts/common/lib.sh
export REF1=$(get_latest_release_tag)
- if [[ -z "${{ inputs.version }}" ]]; then
+ if [[ -z "$RELEASE_TAG" ]]; then
export REF2="${{ github.ref_name }}"
echo "REF2: ${REF2}"
else
- export REF2="${{ inputs.version }}"
+ export REF2="$RELEASE_TAG"
echo "REF2: ${REF2}"
fi
echo "REL_TAG=$REF2" >> $GITHUB_ENV
- export VERSION=$(echo "$REF2" | sed -E 's/.*(stable[0-9]+).*$/\1/')
+ export VERSION=$(echo "$REF2" | sed -E 's/.*(stable[0-9]{4}(-[0-9]+)?).*$/\1/')
./scripts/release/build-changelogs.sh
@@ -112,19 +121,29 @@ jobs:
scripts/release/context.json
**/*-srtool-digest.json
+ - name: Generate content write token for the release automation
+ id: generate_write_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ vars.POLKADOT_SDK_RELEASE_RW_APP_ID }}
+ private-key: ${{ secrets.POLKADOT_SDK_RELEASE_RW_APP_KEY }}
+ owner: paritytech
+ repositories: polkadot-sdk
+
- name: Create draft release
id: create-release
- uses: actions/create-release@0cb9c9b65d5d1901c1f53e5e66eaf4afd303e70e # v1.1.4
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- tag_name: ${{ env.REL_TAG }}
- release_name: Polkadot ${{ env.REL_TAG }}
- body_path: ${{ github.workspace}}/scripts/release/RELEASE_DRAFT.md
- draft: true
+ GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
+ run: |
+ gh release create ${{ env.REL_TAG }} \
+ --repo paritytech/polkadot-sdk \
+ --draft \
+ --title "Polkadot ${{ env.REL_TAG }}" \
+ --notes-file ${{ github.workspace}}/scripts/release/RELEASE_DRAFT.md
publish-runtimes:
- needs: [ build-runtimes, publish-release-draft ]
+ needs: [ validate-inputs, build-runtimes, publish-release-draft ]
+ environment: release
continue-on-error: true
runs-on: ubuntu-latest
strategy:
@@ -132,7 +151,7 @@ jobs:
steps:
- name: Checkout sources
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Download artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
@@ -144,44 +163,83 @@ jobs:
>>$GITHUB_ENV echo ASSET=$(find ${{ matrix.chain }}-runtime -name '*.compact.compressed.wasm')
>>$GITHUB_ENV echo SPEC=$(<${JSON} jq -r .runtimes.compact.subwasm.core_version.specVersion)
+ - name: Generate content write token for the release automation
+ id: generate_write_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ vars.POLKADOT_SDK_RELEASE_RW_APP_ID }}
+ private-key: ${{ secrets.POLKADOT_SDK_RELEASE_RW_APP_KEY }}
+ owner: paritytech
+ repositories: polkadot-sdk
+
- name: Upload compressed ${{ matrix.chain }} v${{ env.SPEC }} wasm
- if: ${{ matrix.chain != 'rococo-parachain' }}
- uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5 #v1.0.2
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- upload_url: ${{ needs.publish-release-draft.outputs.asset_upload_url }}
- asset_path: ${{ env.ASSET }}
- asset_name: ${{ matrix.chain }}_runtime-v${{ env.SPEC }}.compact.compressed.wasm
- asset_content_type: application/wasm
+ GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
+ run: |
+ gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
+ --repo paritytech/polkadot-sdk \
+ '${{ env.ASSET }}#${{ matrix.chain }}_runtime-v${{ env.SPEC }}.compact.compressed.wasm'
- publish-binaries:
- needs: [ publish-release-draft, build-binaries ]
+ publish-release-artifacts:
+ needs: [ validate-inputs, publish-release-draft ]
+ environment: release
continue-on-error: true
runs-on: ubuntu-latest
strategy:
matrix:
- binary: [frame-omni-bencher, chain-spec-builder]
+ binary: [ polkadot, polkadot-execute-worker, polkadot-prepare-worker, polkadot-parachain, polkadot-omni-node, frame-omni-bencher, chain-spec-builder ]
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
steps:
- - name: Download artifacts
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Fetch binaries from s3 based on version
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ VERSION="${{ needs.validate-inputs.outputs.release_tag }}"
+ fetch_release_artifacts_from_s3 ${{ matrix.binary }} ${{ matrix.target }}
+
+ - name: Rename aarch64-apple-darwin binaries
+ if: ${{ matrix.target == 'aarch64-apple-darwin' }}
+ working-directory: ${{ github.workspace}}/release-artifacts/${{ matrix.target }}/${{ matrix.binary }}
+ run: |
+ mv ${{ matrix.binary }} ${{ matrix.binary }}-aarch64-apple-darwin
+ mv ${{ matrix.binary }}.asc ${{ matrix.binary }}-aarch64-apple-darwin.asc
+ mv ${{ matrix.binary }}.sha256 ${{ matrix.binary }}-aarch64-apple-darwin.sha256
+
+ - name: Generate content write token for the release automation
+ id: generate_write_token
+ uses: actions/create-github-app-token@v1
with:
- name: ${{ matrix.binary }}
+ app-id: ${{ vars.POLKADOT_SDK_RELEASE_RW_APP_ID }}
+ private-key: ${{ secrets.POLKADOT_SDK_RELEASE_RW_APP_KEY }}
+ owner: paritytech
+ repositories: polkadot-sdk
- - name: Upload ${{ matrix.binary }} binary
- uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5 #v1.0.2
+ - name: Upload ${{ matrix.binary }} binary to release draft
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- upload_url: ${{ needs.publish-release-draft.outputs.asset_upload_url }}
- asset_path: ${{ github.workspace}}/${{ matrix.binary }}
- asset_name: ${{ matrix.binary }}
- asset_content_type: application/octet-stream
+ GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
+ working-directory: ${{ github.workspace}}/release-artifacts/${{ matrix.target }}/${{ matrix.binary }}
+ run: |
+ if [[ ${{ matrix.target }} == "aarch64-apple-darwin" ]]; then
+ gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
+ --repo paritytech/polkadot-sdk \
+ ${{ matrix.binary }}-aarch64-apple-darwin \
+ ${{ matrix.binary }}-aarch64-apple-darwin.asc \
+ ${{ matrix.binary }}-aarch64-apple-darwin.sha256
+ else
+ gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
+ --repo paritytech/polkadot-sdk \
+ ${{ matrix.binary }} \
+ ${{ matrix.binary }}.asc \
+ ${{ matrix.binary }}.sha256
+ fi
post_to_matrix:
runs-on: ubuntu-latest
- needs: publish-release-draft
+ needs: [ validate-inputs, publish-release-draft ]
environment: release
strategy:
matrix:
@@ -197,5 +255,5 @@ jobs:
access_token: ${{ secrets.RELEASENOTES_MATRIX_V2_ACCESS_TOKEN }}
server: m.parity.io
message: |
- **New version of polkadot tagged**: ${{ github.ref_name }}
- Draft release created: ${{ needs.publish-release-draft.outputs.release_url }}
+ **New version of polkadot tagged**: ${{ needs.validate-inputs.outputs.release_tag }}
+ And release draft is release created in [polkadot-sdk repo](https://github.com/paritytech/polkadot-sdk/releases)
diff --git a/.github/workflows/release-31_promote-rc-to-final.yml b/.github/workflows/release-31_promote-rc-to-final.yml
new file mode 100644
index 000000000000..6aa9d4bddd1d
--- /dev/null
+++ b/.github/workflows/release-31_promote-rc-to-final.yml
@@ -0,0 +1,125 @@
+name: Release - Promote RC to final candidate on S3
+
+on:
+ workflow_dispatch:
+ inputs:
+ binary:
+ description: Binary to be build for the release
+ default: all
+ type: choice
+ options:
+ - polkadot
+ - polkadot-parachain
+ - polkadot-omni-node
+ - frame-omni-bencher
+ - chain-spec-builder
+ - all
+ release_tag:
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX
+ type: string
+
+
+jobs:
+
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
+ validate-inputs:
+ needs: [ check-synchronization ]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
+ final_tag: ${{ steps.validate_inputs.outputs.final_tag }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Validate inputs
+ id: validate_inputs
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
+ echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
+
+ promote-polkadot-rc-to-final:
+ if: ${{ inputs.binary == 'polkadot' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: polkadot
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-polkadot-parachain-rc-to-final:
+ if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: polkadot-parachain
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-polkadot-omni-node-rc-to-final:
+ if: ${{ inputs.binary == 'polkadot-omni-node' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: polkadot-omni-node
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-frame-omni-bencher-rc-to-final:
+ if: ${{ inputs.binary == 'frame-omni-bencher' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: frame-omni-bencher
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-chain-spec-builder-rc-to-final:
+ if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: chain-spec-builder
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/release-50_publish-docker.yml b/.github/workflows/release-50_publish-docker.yml
index 627e53bacd88..5c3c3a6e854d 100644
--- a/.github/workflows/release-50_publish-docker.yml
+++ b/.github/workflows/release-50_publish-docker.yml
@@ -4,10 +4,6 @@ name: Release - Publish Docker Image
# It builds and published releases and rc candidates.
on:
- #TODO: activate automated run later
- # release:
- # types:
- # - published
workflow_dispatch:
inputs:
image_type:
@@ -30,16 +26,6 @@ on:
- polkadot-parachain
- chain-spec-builder
- release_id:
- description: |
- Release ID.
- You can find it using the command:
- curl -s \
- -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/$OWNER/$REPO/releases | \
- jq '.[] | { name: .name, id: .id }'
- required: true
- type: number
-
registry:
description: Container registry
required: true
@@ -55,7 +41,7 @@ on:
default: parity
version:
- description: version to build/release
+ description: Version of the polkadot node release in format v1.16.0 or v1.16.0-rc1
default: v0.9.18
required: true
@@ -78,11 +64,15 @@ env:
IMAGE_TYPE: ${{ inputs.image_type }}
jobs:
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
validate-inputs:
+ needs: [check-synchronization]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
runs-on: ubuntu-latest
outputs:
version: ${{ steps.validate_inputs.outputs.VERSION }}
- release_id: ${{ steps.validate_inputs.outputs.RELEASE_ID }}
stable_tag: ${{ steps.validate_inputs.outputs.stable_tag }}
steps:
@@ -97,11 +87,6 @@ jobs:
VERSION=$(filter_version_from_input "${{ inputs.version }}")
echo "VERSION=${VERSION}" >> $GITHUB_OUTPUT
- RELEASE_ID=$(check_release_id "${{ inputs.release_id }}")
- echo "RELEASE_ID=${RELEASE_ID}" >> $GITHUB_OUTPUT
-
- echo "Release ID: $RELEASE_ID"
-
STABLE_TAG=$(validate_stable_tag ${{ inputs.stable_tag }})
echo "stable_tag=${STABLE_TAG}" >> $GITHUB_OUTPUT
@@ -114,50 +99,26 @@ jobs:
- name: Checkout sources
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- #TODO: this step will be needed when automated triggering will work
- #this step runs only if the workflow is triggered automatically when new release is published
- # if: ${{ env.EVENT_NAME == 'release' && env.EVENT_ACTION != '' && env.EVENT_ACTION == 'published' }}
- # run: |
- # mkdir -p release-artifacts && cd release-artifacts
-
- # for f in $BINARY $BINARY.asc $BINARY.sha256; do
- # URL="https://github.com/${{ github.event.repository.full_name }}/releases/download/${{ github.event.release.tag_name }}/$f"
- # echo " - Fetching $f from $URL"
- # wget "$URL" -O "$f"
- # done
- # chmod a+x $BINARY
- # ls -al
-
- name: Fetch rc artifacts or release artifacts from s3 based on version
- #this step runs only if the workflow is triggered manually
- if: ${{ env.EVENT_NAME == 'workflow_dispatch' && inputs.binary != 'polkadot-omni-node' && inputs.binary != 'chain-spec-builder'}}
+ # if: ${{ env.EVENT_NAME == 'workflow_dispatch' && inputs.binary != 'polkadot-omni-node' && inputs.binary != 'chain-spec-builder'}}
run: |
. ./.github/scripts/common/lib.sh
- VERSION="${{ needs.validate-inputs.outputs.VERSION }}"
+ VERSION="${{ needs.validate-inputs.outputs.stable_tag }}"
if [[ ${{ inputs.binary }} == 'polkadot' ]]; then
bins=(polkadot polkadot-prepare-worker polkadot-execute-worker)
for bin in "${bins[@]}"; do
- fetch_release_artifacts_from_s3 $bin
+ fetch_release_artifacts_from_s3 $bin x86_64-unknown-linux-gnu
done
else
- fetch_release_artifacts_from_s3 $BINARY
+ fetch_release_artifacts_from_s3 $BINARY x86_64-unknown-linux-gnu
fi
- - name: Fetch polkadot-omni-node/chain-spec-builder rc artifacts or release artifacts based on release id
- #this step runs only if the workflow is triggered manually and only for chain-spec-builder
- if: ${{ env.EVENT_NAME == 'workflow_dispatch' && (inputs.binary == 'polkadot-omni-node' || inputs.binary == 'chain-spec-builder') }}
- run: |
- . ./.github/scripts/common/lib.sh
-
- RELEASE_ID="${{ needs.validate-inputs.outputs.RELEASE_ID }}"
- fetch_release_artifacts
-
- name: Upload artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
name: release-artifacts
- path: release-artifacts/${{ env.BINARY }}/**/*
+ path: release-artifacts/x86_64-unknown-linux-gnu/${{ env.BINARY }}/**/*
build-container: # this job will be triggered for the polkadot-parachain rc and release or polkadot rc image build
if: ${{ inputs.binary == 'polkadot-omni-node' || inputs.binary == 'polkadot-parachain' || inputs.binary == 'chain-spec-builder' || inputs.image_type == 'rc' }}
@@ -173,7 +134,7 @@ jobs:
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- name: Check sha256 ${{ env.BINARY }}
- if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
+ # if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
working-directory: release-artifacts
run: |
. ../.github/scripts/common/lib.sh
@@ -182,7 +143,7 @@ jobs:
check_sha256 $BINARY && echo "OK" || echo "ERR"
- name: Check GPG ${{ env.BINARY }}
- if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
+ # if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
working-directory: release-artifacts
run: |
. ../.github/scripts/common/lib.sh
@@ -190,35 +151,29 @@ jobs:
check_gpg $BINARY
- name: Fetch rc commit and tag
+ working-directory: release-artifacts
if: ${{ env.IMAGE_TYPE == 'rc' }}
id: fetch_rc_refs
+ shell: bash
run: |
- . ./.github/scripts/common/lib.sh
-
- echo "release=${{ needs.validate-inputs.outputs.stable_tag }}" >> $GITHUB_OUTPUT
+ . ../.github/scripts/common/lib.sh
commit=$(git rev-parse --short HEAD) && \
echo "commit=${commit}" >> $GITHUB_OUTPUT
-
- echo "tag=${{ needs.validate-inputs.outputs.version }}" >> $GITHUB_OUTPUT
+ echo "release=$(echo ${{ needs.validate-inputs.outputs.version }})" >> $GITHUB_OUTPUT
+ echo "tag=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Fetch release tags
working-directory: release-artifacts
if: ${{ env.IMAGE_TYPE == 'release'}}
id: fetch_release_refs
+ shell: bash
run: |
- chmod a+rx $BINARY
-
- if [[ $BINARY != 'chain-spec-builder' ]]; then
- VERSION=$(./$BINARY --version | awk '{ print $2 }' )
- release=$( echo $VERSION | cut -f1 -d- )
- else
- release=$(echo ${{ needs.validate-inputs.outputs.VERSION }} | sed 's/^v//')
- fi
+ . ../.github/scripts/common/lib.sh
echo "tag=latest" >> $GITHUB_OUTPUT
- echo "release=${release}" >> $GITHUB_OUTPUT
- echo "stable=${{ needs.validate-inputs.outputs.stable_tag }}" >> $GITHUB_OUTPUT
+ echo "release=$(echo ${{ needs.validate-inputs.outputs.version }})" >> $GITHUB_OUTPUT
+ echo "stable=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Build Injected Container image for polkadot rc
if: ${{ env.BINARY == 'polkadot' }}
@@ -342,8 +297,10 @@ jobs:
- name: Fetch values
id: fetch-data
run: |
+ . ./.github/scripts/common/lib.sh
date=$(date -u '+%Y-%m-%dT%H:%M:%SZ')
echo "date=$date" >> $GITHUB_OUTPUT
+ echo "stable=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Build and push
id: docker_build
@@ -354,9 +311,9 @@ jobs:
# TODO: The owner should be used below but buildx does not resolve the VARs
# TODO: It would be good to get rid of this GHA that we don't really need.
tags: |
- parity/polkadot:${{ needs.validate-inputs.outputs.stable_tag }}
- parity/polkadot:latest
- parity/polkadot:${{ needs.fetch-latest-debian-package-version.outputs.polkadot_container_tag }}
+ egorpop/polkadot:${{ steps.fetch-data.outputs.stable }}
+ egorpop/polkadot:latest
+ egorpop/polkadot:${{ needs.fetch-latest-debian-package-version.outputs.polkadot_container_tag }}
build-args: |
VCS_REF=${{ github.ref }}
POLKADOT_VERSION=${{ needs.fetch-latest-debian-package-version.outputs.polkadot_apt_version }}
diff --git a/.github/workflows/release-reusable-promote-to-final.yml b/.github/workflows/release-reusable-promote-to-final.yml
new file mode 100644
index 000000000000..ed4a80a01e82
--- /dev/null
+++ b/.github/workflows/release-reusable-promote-to-final.yml
@@ -0,0 +1,83 @@
+name: Promote rc to final
+
+on:
+ workflow_call:
+ inputs:
+ package:
+ description: Package to be promoted
+ required: true
+ type: string
+
+ release_tag:
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX taht will be changed to final in form of polkadot-stableYYMM(-X)
+ required: true
+ type: string
+
+ target:
+ description: Target triple for which the artifacts are being uploaded (e.g aarch64-apple-darwin)
+ required: true
+ type: string
+
+ secrets:
+ AWS_DEFAULT_REGION:
+ required: true
+ AWS_RELEASE_ACCESS_KEY_ID:
+ required: true
+ AWS_RELEASE_SECRET_ACCESS_KEY:
+ required: true
+
+jobs:
+
+ promote-release-artifacts:
+ environment: release
+ runs-on: ubuntu-latest
+ env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ AWS_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Prepare final tag
+ id: prepare_final_tag
+ shell: bash
+ run: |
+ tag="$(echo ${{ inputs.release_tag }} | sed 's/-rc[0-9]*$//')"
+ echo $tag
+ echo "FINAL_TAG=${tag}" >> $GITHUB_OUTPUT
+
+ - name: Fetch binaries from s3 based on version
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ VERSION="${{ inputs.release_tag }}"
+ if [[ ${{ inputs.package }} == 'polkadot' ]]; then
+ packages=(polkadot polkadot-prepare-worker polkadot-execute-worker)
+ for package in "${packages[@]}"; do
+ fetch_release_artifacts_from_s3 $package ${{ inputs.target }}
+ done
+ else
+ fetch_release_artifacts_from_s3 ${{ inputs.package }} ${{ inputs.target }}
+ fi
+
+ - name: Configure AWS Credentials
+ uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
+ with:
+ aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }}
+ aws-region: ${{ env.AWS_REGION }}
+
+ - name: Upload ${{ inputs.package }} ${{ inputs.target }} artifacts to s3
+ run: |
+ . ./.github/scripts/release/release_lib.sh
+
+ if [[ ${{ inputs.package }} == 'polkadot' ]]; then
+ packages=(polkadot polkadot-prepare-worker polkadot-execute-worker)
+ for package in "${packages[@]}"; do
+ upload_s3_release $package ${{ steps.prepare_final_tag.outputs.final_tag }} ${{ inputs.target }}
+ done
+ else
+ upload_s3_release ${{ inputs.package }} ${{ steps.prepare_final_tag.outputs.final_tag }} ${{ inputs.target }}
+ fi
diff --git a/.github/workflows/release-reusable-rc-buid.yml b/.github/workflows/release-reusable-rc-buid.yml
index f5240878cba2..0222b2aa91e2 100644
--- a/.github/workflows/release-reusable-rc-buid.yml
+++ b/.github/workflows/release-reusable-rc-buid.yml
@@ -133,7 +133,7 @@ jobs:
- name: Upload ${{ matrix.binaries }} artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
- name: ${{ matrix.binaries }}
+ name: ${{ matrix.binaries }}_${{ inputs.target }}
path: /artifacts/${{ matrix.binaries }}
build-macos-rc:
@@ -285,7 +285,7 @@ jobs:
- name: Upload ${{inputs.package }} artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
- name: ${{ inputs.package }}
+ name: ${{ inputs.package }}_${{ inputs.target }}
path: target/production
overwrite: true
@@ -302,7 +302,6 @@ jobs:
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
-
upload-polkadot-parachain-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot-parachain-bin' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-rc]
@@ -329,6 +328,32 @@ jobs:
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ upload-frame-omni-bencher-artifacts-to-s3:
+ if: ${{ inputs.package == 'frame-omni-bencher' && inputs.target == 'x86_64-unknown-linux-gnu' }}
+ needs: [build-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: ${{ inputs.package }}
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-chain-spec-builder-artifacts-to-s3:
+ if: ${{ inputs.package == 'staging-chain-spec-builder' && inputs.target == 'x86_64-unknown-linux-gnu' }}
+ needs: [build-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: chain-spec-builder
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
upload-polkadot-macos-artifacts-to-s3:
if: ${{ inputs.package == 'polkadot' && inputs.target == 'aarch64-apple-darwin' }}
# TODO: add and use a `build-polkadot-homebrew-package` which packs all `polkadot` binaries:
@@ -395,3 +420,29 @@ jobs:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-frame-omni-bencher-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'frame-omni-bencher' && inputs.target == 'aarch64-apple-darwin' }}
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: ${{ inputs.package }}
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-chain-spec-builder-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'staging-chain-spec-builder' && inputs.target == 'aarch64-apple-darwin' }}
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: chain-spec-builder
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/release-reusable-s3-upload.yml b/.github/workflows/release-reusable-s3-upload.yml
index f85466bc8c07..48c7e53c6c8f 100644
--- a/.github/workflows/release-reusable-s3-upload.yml
+++ b/.github/workflows/release-reusable-s3-upload.yml
@@ -9,7 +9,7 @@ on:
type: string
release_tag:
- description: Tag matching the actual release candidate with the format stableYYMM-rcX or stableYYMM-rcX
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX or polkadot-stableYYMM-rcX
required: true
type: string
@@ -40,18 +40,10 @@ jobs:
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- name: Download amd64 artifacts
- if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
- name: ${{ inputs.package }}
- path: artifacts/${{ inputs.package }}
-
- - name: Download arm artifacts
- if: ${{ inputs.target == 'aarch64-apple-darwin' }}
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- with:
- name: ${{ inputs.package }}_aarch64-apple-darwin
- path: artifacts/${{ inputs.package }}
+ name: ${{ inputs.package }}_${{ inputs.target }}
+ path: release-artifacts/${{ inputs.target }}/${{ inputs.package }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
diff --git a/.github/workflows/release-srtool.yml b/.github/workflows/release-srtool.yml
index 9a29b46d2fc3..fc10496d481b 100644
--- a/.github/workflows/release-srtool.yml
+++ b/.github/workflows/release-srtool.yml
@@ -1,7 +1,7 @@
name: Srtool build
env:
- SUBWASM_VERSION: 0.20.0
+ SUBWASM_VERSION: 0.21.0
TOML_CLI_VERSION: 0.2.4
on:
@@ -11,14 +11,16 @@ on:
type: string
build_opts:
type: string
+ profile:
+ type: string
outputs:
published_runtimes:
value: ${{ jobs.find-runtimes.outputs.runtime }}
- schedule:
- - cron: "00 02 * * 1" # 2AM weekly on monday
-
- workflow_dispatch:
+permissions:
+ id-token: write
+ attestations: write
+ contents: read
jobs:
find-runtimes:
@@ -75,6 +77,7 @@ jobs:
with:
chain: ${{ matrix.chain }}
runtime_dir: ${{ matrix.runtime_dir }}
+ profile: ${{ inputs.profile }}
- name: Summary
run: |
@@ -83,6 +86,11 @@ jobs:
echo "Compact Runtime: ${{ steps.srtool_build.outputs.wasm }}"
echo "Compressed Runtime: ${{ steps.srtool_build.outputs.wasm_compressed }}"
+ - name: Generate artifact attestation
+ uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
+ with:
+ subject-path: ${{ steps.srtool_build.outputs.wasm }}
+
# We now get extra information thanks to subwasm
- name: Install subwasm
run: |
diff --git a/.github/workflows/runtimes-matrix.json b/.github/workflows/runtimes-matrix.json
index f991db55b86d..104e73521331 100644
--- a/.github/workflows/runtimes-matrix.json
+++ b/.github/workflows/runtimes-matrix.json
@@ -8,6 +8,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic,pallet_nomination_pools,pallet_remark,pallet_transaction_storage",
"uri": null,
+ "old_package": "staging-node-cli",
+ "old_bin": "substrate-node",
"is_relay": false
},
{
@@ -19,6 +21,8 @@
"bench_flags": "",
"bench_features": "runtime-benchmarks",
"uri": "wss://try-runtime-westend.polkadot.io:443",
+ "old_package": "polkadot",
+ "old_bin": "polkadot",
"is_relay": true
},
{
@@ -27,9 +31,11 @@
"path": "polkadot/runtime/rococo",
"header": "polkadot/file_header.txt",
"template": "polkadot/xcm/pallet-xcm-benchmarks/template.hbs",
- "uri": "wss://try-runtime-rococo.polkadot.io:443",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
+ "uri": "wss://try-runtime-rococo.polkadot.io:443",
+ "old_package": "polkadot",
+ "old_bin": "polkadot",
"is_relay": true
},
{
@@ -41,6 +47,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://westend-asset-hub-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -52,6 +60,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://rococo-asset-hub-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -63,6 +73,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://rococo-bridge-hub-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -74,6 +86,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://westend-bridge-hub-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -84,7 +98,10 @@
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
- "uri": "wss://westend-collectives-rpc.polkadot.io:443"
+ "uri": "wss://westend-collectives-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
+ "is_relay": false
},
{
"name": "contracts-rococo",
@@ -95,6 +112,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm",
"uri": "wss://rococo-contracts-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -106,6 +125,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic",
"uri": "wss://rococo-coretime-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -117,6 +138,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic",
"uri": "wss://westend-coretime-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -128,6 +151,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none",
"uri": null,
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -139,6 +164,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic",
"uri": "wss://rococo-people-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -150,6 +177,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic",
"uri": "wss://westend-people-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
}
]
diff --git a/.gitlab/pipeline/zombienet/parachain-template.yml b/.gitlab/pipeline/zombienet/parachain-template.yml
index 896ba7913be7..d5c1b6558b39 100644
--- a/.gitlab/pipeline/zombienet/parachain-template.yml
+++ b/.gitlab/pipeline/zombienet/parachain-template.yml
@@ -43,4 +43,4 @@ zombienet-parachain-template-smoke:
- ls -ltr $(pwd)/artifacts
- cargo test -p template-zombienet-tests --features zombienet --tests minimal_template_block_production_test
- cargo test -p template-zombienet-tests --features zombienet --tests parachain_template_block_production_test
- # - cargo test -p template-zombienet-tests --features zombienet --tests solochain_template_block_production_test
+ - cargo test -p template-zombienet-tests --features zombienet --tests solochain_template_block_production_test
diff --git a/.gitlab/pipeline/zombienet/polkadot.yml b/.gitlab/pipeline/zombienet/polkadot.yml
index 3dab49a118e5..ac4bdac7ad15 100644
--- a/.gitlab/pipeline/zombienet/polkadot.yml
+++ b/.gitlab/pipeline/zombienet/polkadot.yml
@@ -179,7 +179,7 @@ zombienet-polkadot-elastic-scaling-0001-basic-3cores-6s-blocks:
--local-dir="${LOCAL_DIR}/elastic_scaling"
--test="0001-basic-3cores-6s-blocks.zndsl"
-zombienet-polkadot-elastic-scaling-0002-elastic-scaling-doesnt-break-parachains:
+.zombienet-polkadot-elastic-scaling-0002-elastic-scaling-doesnt-break-parachains:
extends:
- .zombienet-polkadot-common
before_script:
@@ -233,7 +233,7 @@ zombienet-polkadot-functional-0015-coretime-shared-core:
--local-dir="${LOCAL_DIR}/functional"
--test="0016-approval-voting-parallel.zndsl"
-zombienet-polkadot-functional-0017-sync-backing:
+.zombienet-polkadot-functional-0017-sync-backing:
extends:
- .zombienet-polkadot-common
script:
diff --git a/Cargo.lock b/Cargo.lock
index 0fd416f17f2d..469b30476f7c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -125,6 +125,48 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
+[[package]]
+name = "alloy-core"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c618bd382f0bc2ac26a7e4bfae01c9b015ca8f21b37ca40059ae35a7e62b3dc6"
+dependencies = [
+ "alloy-dyn-abi",
+ "alloy-json-abi",
+ "alloy-primitives 0.8.15",
+ "alloy-rlp",
+ "alloy-sol-types 0.8.15",
+]
+
+[[package]]
+name = "alloy-dyn-abi"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41056bde53ae10ffbbf11618efbe1e0290859e5eab0fe9ef82ebdb62f12a866f"
+dependencies = [
+ "alloy-json-abi",
+ "alloy-primitives 0.8.15",
+ "alloy-sol-type-parser",
+ "alloy-sol-types 0.8.15",
+ "const-hex",
+ "itoa",
+ "serde",
+ "serde_json",
+ "winnow 0.6.18",
+]
+
+[[package]]
+name = "alloy-json-abi"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c357da577dfb56998d01f574d81ad7a1958d248740a7981b205d69d65a7da404"
+dependencies = [
+ "alloy-primitives 0.8.15",
+ "alloy-sol-type-parser",
+ "serde",
+ "serde_json",
+]
+
[[package]]
name = "alloy-primitives"
version = "0.4.2"
@@ -145,6 +187,34 @@ dependencies = [
"tiny-keccak",
]
+[[package]]
+name = "alloy-primitives"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6259a506ab13e1d658796c31e6e39d2e2ee89243bcc505ddc613b35732e0a430"
+dependencies = [
+ "alloy-rlp",
+ "bytes",
+ "cfg-if",
+ "const-hex",
+ "derive_more 1.0.0",
+ "foldhash",
+ "hashbrown 0.15.2",
+ "hex-literal",
+ "indexmap 2.7.0",
+ "itoa",
+ "k256",
+ "keccak-asm",
+ "paste",
+ "proptest",
+ "rand",
+ "ruint",
+ "rustc-hash 2.0.0",
+ "serde",
+ "sha3 0.10.8",
+ "tiny-keccak",
+]
+
[[package]]
name = "alloy-rlp"
version = "0.3.3"
@@ -169,18 +239,88 @@ dependencies = [
"proc-macro2 1.0.86",
"quote 1.0.37",
"syn 2.0.87",
- "syn-solidity",
+ "syn-solidity 0.4.2",
"tiny-keccak",
]
+[[package]]
+name = "alloy-sol-macro"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9d64f851d95619233f74b310f12bcf16e0cbc27ee3762b6115c14a84809280a"
+dependencies = [
+ "alloy-sol-macro-expander",
+ "alloy-sol-macro-input",
+ "proc-macro-error2",
+ "proc-macro2 1.0.86",
+ "quote 1.0.37",
+ "syn 2.0.87",
+]
+
+[[package]]
+name = "alloy-sol-macro-expander"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6bf7ed1574b699f48bf17caab4e6e54c6d12bc3c006ab33d58b1e227c1c3559f"
+dependencies = [
+ "alloy-sol-macro-input",
+ "const-hex",
+ "heck 0.5.0",
+ "indexmap 2.7.0",
+ "proc-macro-error2",
+ "proc-macro2 1.0.86",
+ "quote 1.0.37",
+ "syn 2.0.87",
+ "syn-solidity 0.8.15",
+ "tiny-keccak",
+]
+
+[[package]]
+name = "alloy-sol-macro-input"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8c02997ccef5f34f9c099277d4145f183b422938ed5322dc57a089fe9b9ad9ee"
+dependencies = [
+ "const-hex",
+ "dunce",
+ "heck 0.5.0",
+ "proc-macro2 1.0.86",
+ "quote 1.0.37",
+ "syn 2.0.87",
+ "syn-solidity 0.8.15",
+]
+
+[[package]]
+name = "alloy-sol-type-parser"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce13ff37285b0870d0a0746992a4ae48efaf34b766ae4c2640fa15e5305f8e73"
+dependencies = [
+ "serde",
+ "winnow 0.6.18",
+]
+
[[package]]
name = "alloy-sol-types"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98d7107bed88e8f09f0ddcc3335622d87bfb6821f3e0c7473329fb1cfad5e015"
dependencies = [
- "alloy-primitives",
- "alloy-sol-macro",
+ "alloy-primitives 0.4.2",
+ "alloy-sol-macro 0.4.2",
+ "const-hex",
+ "serde",
+]
+
+[[package]]
+name = "alloy-sol-types"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1174cafd6c6d810711b4e00383037bdb458efc4fe3dbafafa16567e0320c54d8"
+dependencies = [
+ "alloy-json-abi",
+ "alloy-primitives 0.8.15",
+ "alloy-sol-macro 0.8.15",
"const-hex",
"serde",
]
@@ -1938,6 +2078,8 @@ dependencies = [
"frame-support 28.0.0",
"parity-scale-codec",
"scale-info",
+ "sp-core 28.0.0",
+ "staging-xcm 7.0.0",
]
[[package]]
@@ -1948,6 +2090,8 @@ dependencies = [
"frame-support 28.0.0",
"parity-scale-codec",
"scale-info",
+ "sp-core 28.0.0",
+ "staging-xcm 7.0.0",
]
[[package]]
@@ -2541,6 +2685,7 @@ dependencies = [
"bp-rococo",
"bp-runtime 0.7.0",
"bp-westend",
+ "bp-xcm-bridge-hub-router 0.6.0",
"bridge-hub-common 0.1.0",
"bridge-hub-test-utils 0.7.0",
"bridge-runtime-common 0.7.0",
@@ -2779,6 +2924,7 @@ dependencies = [
"bp-rococo",
"bp-runtime 0.7.0",
"bp-westend",
+ "bp-xcm-bridge-hub-router 0.6.0",
"bridge-hub-common 0.1.0",
"bridge-hub-test-utils 0.7.0",
"bridge-runtime-common 0.7.0",
@@ -2995,6 +3141,9 @@ name = "bytes"
version = "1.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3"
+dependencies = [
+ "serde",
+]
[[package]]
name = "bzip2-sys"
@@ -3165,6 +3314,7 @@ dependencies = [
name = "chain-spec-guide-runtime"
version = "0.0.0"
dependencies = [
+ "cmd_lib",
"docify",
"frame-support 28.0.0",
"pallet-balances 28.0.0",
@@ -4805,7 +4955,6 @@ dependencies = [
"pallet-message-queue 31.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
- "polkadot-runtime-common 7.0.0",
"polkadot-runtime-parachains 7.0.0",
"rand",
"sc-client-api",
@@ -6872,6 +7021,12 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+[[package]]
+name = "foldhash"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2"
+
[[package]]
name = "foreign-types"
version = "0.3.2"
@@ -7016,6 +7171,7 @@ dependencies = [
"sc-client-db",
"sc-executor 0.32.0",
"sc-executor-common 0.29.0",
+ "sc-runtime-utilities",
"sc-service",
"sc-sysinfo",
"serde",
@@ -8225,6 +8381,10 @@ name = "hashbrown"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
+dependencies = [
+ "foldhash",
+ "serde",
+]
[[package]]
name = "hashlink"
@@ -8276,6 +8436,9 @@ name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+dependencies = [
+ "serde",
+]
[[package]]
name = "hex-conservative"
@@ -8859,6 +9022,7 @@ checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f"
dependencies = [
"equivalent",
"hashbrown 0.15.2",
+ "serde",
]
[[package]]
@@ -9358,6 +9522,16 @@ dependencies = [
"cpufeatures",
]
+[[package]]
+name = "keccak-asm"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "505d1856a39b200489082f90d897c3f07c455563880bc5952e38eabf731c83b6"
+dependencies = [
+ "digest 0.10.7",
+ "sha3-asm",
+]
+
[[package]]
name = "keccak-hash"
version = "0.11.0"
@@ -14635,7 +14809,6 @@ dependencies = [
"assert_matches",
"bitflags 1.3.2",
"derive_more 0.99.17",
- "env_logger 0.11.3",
"environmental",
"ethereum-types 0.15.1",
"frame-benchmarking 28.0.0",
@@ -14644,11 +14817,8 @@ dependencies = [
"hex",
"hex-literal",
"impl-trait-for-tuples",
- "jsonrpsee",
"log",
- "pallet-assets 29.1.0",
"pallet-balances 28.0.0",
- "pallet-message-queue 31.0.0",
"pallet-proxy 28.0.0",
"pallet-revive-fixtures 0.1.0",
"pallet-revive-proc-macro 0.1.0",
@@ -15988,6 +16158,7 @@ dependencies = [
"bp-messages 0.7.0",
"bp-runtime 0.7.0",
"bp-xcm-bridge-hub 0.2.0",
+ "bp-xcm-bridge-hub-router 0.6.0",
"frame-support 28.0.0",
"frame-system 28.0.0",
"log",
@@ -17920,6 +18091,7 @@ dependencies = [
"cumulus-primitives-aura 0.7.0",
"cumulus-primitives-core 0.7.0",
"cumulus-relay-chain-interface",
+ "cumulus-test-runtime",
"docify",
"frame-benchmarking 28.0.0",
"frame-benchmarking-cli",
@@ -17948,29 +18120,36 @@ dependencies = [
"sc-executor 0.32.0",
"sc-network",
"sc-rpc",
+ "sc-runtime-utilities",
"sc-service",
"sc-sysinfo",
"sc-telemetry",
"sc-tracing",
"sc-transaction-pool",
+ "scale-info",
"serde",
"serde_json",
"sp-api 26.0.0",
"sp-block-builder 26.0.0",
"sp-consensus-aura 0.32.0",
"sp-core 28.0.0",
+ "sp-crypto-hashing 0.1.0",
"sp-genesis-builder 0.8.0",
"sp-inherents 26.0.0",
+ "sp-io 30.0.0",
"sp-keystore 0.34.0",
"sp-runtime 31.0.1",
"sp-session 27.0.0",
+ "sp-storage 19.0.0",
"sp-timestamp 26.0.0",
"sp-transaction-pool 26.0.0",
"sp-version 29.0.0",
+ "sp-wasm-interface 20.0.0",
"sp-weights 27.0.0",
"substrate-frame-rpc-system",
"substrate-prometheus-endpoint",
"substrate-state-trie-migration-rpc",
+ "subxt-metadata",
"tokio",
"wait-timeout",
]
@@ -18579,7 +18758,6 @@ dependencies = [
"pallet-remark 28.0.0",
"pallet-revive 0.1.0",
"pallet-revive-eth-rpc",
- "pallet-revive-fixtures 0.1.0",
"pallet-revive-mock-network 0.1.0",
"pallet-revive-proc-macro 0.1.0",
"pallet-revive-uapi 0.1.0",
@@ -18709,6 +18887,7 @@ dependencies = [
"sc-rpc-api",
"sc-rpc-server",
"sc-rpc-spec-v2",
+ "sc-runtime-utilities",
"sc-service",
"sc-state-db",
"sc-statement-store",
@@ -19054,6 +19233,7 @@ version = "0.0.1"
dependencies = [
"assert_cmd",
"chain-spec-guide-runtime",
+ "cmd_lib",
"cumulus-client-service",
"cumulus-pallet-aura-ext 0.7.0",
"cumulus-pallet-parachain-system 0.7.0",
@@ -20740,6 +20920,7 @@ dependencies = [
"libc",
"rand_chacha",
"rand_core 0.6.4",
+ "serde",
]
[[package]]
@@ -23329,6 +23510,25 @@ dependencies = [
"substrate-wasm-builder 17.0.0",
]
+[[package]]
+name = "sc-runtime-utilities"
+version = "0.1.0"
+dependencies = [
+ "cumulus-primitives-proof-size-hostfunction 0.2.0",
+ "cumulus-test-runtime",
+ "parity-scale-codec",
+ "sc-executor 0.32.0",
+ "sc-executor-common 0.29.0",
+ "sp-core 28.0.0",
+ "sp-crypto-hashing 0.1.0",
+ "sp-io 30.0.0",
+ "sp-state-machine 0.35.0",
+ "sp-version 29.0.0",
+ "sp-wasm-interface 20.0.0",
+ "subxt",
+ "thiserror",
+]
+
[[package]]
name = "sc-service"
version = "0.35.0"
@@ -24274,6 +24474,16 @@ dependencies = [
"keccak",
]
+[[package]]
+name = "sha3-asm"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46"
+dependencies = [
+ "cc",
+ "cfg-if",
+]
+
[[package]]
name = "sharded-slab"
version = "0.1.4"
@@ -24966,8 +25176,7 @@ dependencies = [
name = "snowbridge-pallet-inbound-queue"
version = "0.2.0"
dependencies = [
- "alloy-primitives",
- "alloy-sol-types",
+ "alloy-core",
"frame-benchmarking 28.0.0",
"frame-support 28.0.0",
"frame-system 28.0.0",
@@ -24997,8 +25206,8 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2e6a9d00e60e3744e6b6f0c21fea6694b9c6401ac40e41340a96e561dcf1935"
dependencies = [
- "alloy-primitives",
- "alloy-sol-types",
+ "alloy-primitives 0.4.2",
+ "alloy-sol-types 0.4.2",
"frame-benchmarking 38.0.0",
"frame-support 38.0.0",
"frame-system 38.0.0",
@@ -26498,7 +26707,7 @@ dependencies = [
"libsecp256k1",
"log",
"parity-scale-codec",
- "polkavm-derive 0.9.1",
+ "polkavm-derive 0.17.0",
"rustversion",
"secp256k1 0.28.2",
"sp-core 28.0.0",
@@ -26982,7 +27191,7 @@ dependencies = [
"bytes",
"impl-trait-for-tuples",
"parity-scale-codec",
- "polkavm-derive 0.9.1",
+ "polkavm-derive 0.17.0",
"primitive-types 0.13.1",
"rustversion",
"sp-core 28.0.0",
@@ -28626,7 +28835,7 @@ dependencies = [
"merkleized-metadata",
"parity-scale-codec",
"parity-wasm",
- "polkavm-linker 0.9.2",
+ "polkavm-linker 0.17.1",
"sc-executor 0.32.0",
"shlex",
"sp-core 28.0.0",
@@ -29013,6 +29222,18 @@ dependencies = [
"syn 2.0.87",
]
+[[package]]
+name = "syn-solidity"
+version = "0.8.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "219389c1ebe89f8333df8bdfb871f6631c552ff399c23cac02480b6088aad8f0"
+dependencies = [
+ "paste",
+ "proc-macro2 1.0.86",
+ "quote 1.0.37",
+ "syn 2.0.87",
+]
+
[[package]]
name = "sync_wrapper"
version = "1.0.1"
@@ -31656,10 +31877,13 @@ name = "xcm-executor-integration-tests"
version = "1.0.0"
dependencies = [
"frame-support 28.0.0",
+ "frame-system 28.0.0",
"futures",
+ "pallet-sudo 28.0.0",
"pallet-transaction-payment 28.0.0",
"pallet-xcm 7.0.0",
"parity-scale-codec",
+ "polkadot-runtime-parachains 7.0.0",
"polkadot-test-client",
"polkadot-test-runtime",
"polkadot-test-service",
@@ -31678,6 +31902,7 @@ name = "xcm-procedural"
version = "7.0.0"
dependencies = [
"Inflector",
+ "frame-support 28.0.0",
"proc-macro2 1.0.86",
"quote 1.0.37",
"staging-xcm 7.0.0",
diff --git a/Cargo.toml b/Cargo.toml
index ecc385504181..98ab6551c802 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -293,6 +293,7 @@ members = [
"substrate/client/rpc-api",
"substrate/client/rpc-servers",
"substrate/client/rpc-spec-v2",
+ "substrate/client/runtime-utilities",
"substrate/client/service",
"substrate/client/service/test",
"substrate/client/state-db",
@@ -594,8 +595,7 @@ zero-prefixed-literal = { level = "allow", priority = 2 } # 00_1000_0
Inflector = { version = "0.11.4" }
aes-gcm = { version = "0.10" }
ahash = { version = "0.8.2" }
-alloy-primitives = { version = "0.4.2", default-features = false }
-alloy-sol-types = { version = "0.4.2", default-features = false }
+alloy-core = { version = "0.8.15", default-features = false }
always-assert = { version = "0.1" }
anyhow = { version = "1.0.81", default-features = false }
approx = { version = "0.5.1" }
@@ -848,7 +848,7 @@ linked-hash-map = { version = "0.5.4" }
linked_hash_set = { version = "0.1.4" }
linregress = { version = "0.5.1" }
lite-json = { version = "0.2.0", default-features = false }
-litep2p = { version = "0.8.2", features = ["websocket"] }
+litep2p = { version = "0.8.4", features = ["websocket"] }
log = { version = "0.4.22", default-features = false }
macro_magic = { version = "0.5.1" }
maplit = { version = "1.0.2" }
@@ -1090,8 +1090,8 @@ polkadot-test-client = { path = "polkadot/node/test/client" }
polkadot-test-runtime = { path = "polkadot/runtime/test-runtime" }
polkadot-test-service = { path = "polkadot/node/test/service" }
polkavm = { version = "0.9.3", default-features = false }
-polkavm-derive = "0.9.1"
-polkavm-linker = "0.9.2"
+polkavm-derive = "0.17.0"
+polkavm-linker = "0.17.1"
portpicker = { version = "0.1.1" }
pretty_assertions = { version = "1.3.0" }
primitive-types = { version = "0.13.1", default-features = false, features = [
@@ -1184,6 +1184,7 @@ sc-rpc-api = { path = "substrate/client/rpc-api", default-features = false }
sc-rpc-server = { path = "substrate/client/rpc-servers", default-features = false }
sc-rpc-spec-v2 = { path = "substrate/client/rpc-spec-v2", default-features = false }
sc-runtime-test = { path = "substrate/client/executor/runtime-test" }
+sc-runtime-utilities = { path = "substrate/client/runtime-utilities", default-features = true }
sc-service = { path = "substrate/client/service", default-features = false }
sc-service-test = { path = "substrate/client/service/test" }
sc-state-db = { path = "substrate/client/state-db", default-features = false }
@@ -1317,6 +1318,7 @@ substrate-test-runtime-transaction-pool = { path = "substrate/test-utils/runtime
substrate-test-utils = { path = "substrate/test-utils" }
substrate-wasm-builder = { path = "substrate/utils/wasm-builder", default-features = false }
subxt = { version = "0.38", default-features = false }
+subxt-metadata = { version = "0.38.0", default-features = false }
subxt-signer = { version = "0.38" }
syn = { version = "2.0.87" }
sysinfo = { version = "0.30" }
diff --git a/bridges/bin/runtime-common/Cargo.toml b/bridges/bin/runtime-common/Cargo.toml
index 37b56140c289..49cd086fd3eb 100644
--- a/bridges/bin/runtime-common/Cargo.toml
+++ b/bridges/bin/runtime-common/Cargo.toml
@@ -99,6 +99,7 @@ runtime-benchmarks = [
"pallet-utility/runtime-benchmarks",
"sp-runtime/runtime-benchmarks",
"sp-trie",
+ "xcm/runtime-benchmarks",
]
integrity-test = ["static_assertions"]
test-helpers = ["bp-runtime/test-helpers", "sp-trie"]
diff --git a/bridges/chains/chain-asset-hub-rococo/Cargo.toml b/bridges/chains/chain-asset-hub-rococo/Cargo.toml
index 363a869048aa..4eb93ab52bc9 100644
--- a/bridges/chains/chain-asset-hub-rococo/Cargo.toml
+++ b/bridges/chains/chain-asset-hub-rococo/Cargo.toml
@@ -19,10 +19,14 @@ scale-info = { features = ["derive"], workspace = true }
# Substrate Dependencies
frame-support = { workspace = true }
+sp-core = { workspace = true }
# Bridge Dependencies
bp-xcm-bridge-hub-router = { workspace = true }
+# Polkadot dependencies
+xcm = { workspace = true }
+
[features]
default = ["std"]
std = [
@@ -30,4 +34,6 @@ std = [
"codec/std",
"frame-support/std",
"scale-info/std",
+ "sp-core/std",
+ "xcm/std",
]
diff --git a/bridges/chains/chain-asset-hub-rococo/src/lib.rs b/bridges/chains/chain-asset-hub-rococo/src/lib.rs
index de2e9ae856d1..4ff7b391acd0 100644
--- a/bridges/chains/chain-asset-hub-rococo/src/lib.rs
+++ b/bridges/chains/chain-asset-hub-rococo/src/lib.rs
@@ -18,10 +18,13 @@
#![cfg_attr(not(feature = "std"), no_std)]
+extern crate alloc;
+
use codec::{Decode, Encode};
use scale_info::TypeInfo;
pub use bp_xcm_bridge_hub_router::XcmBridgeHubRouterCall;
+use xcm::latest::prelude::*;
/// `AssetHubRococo` Runtime `Call` enum.
///
@@ -44,5 +47,27 @@ frame_support::parameter_types! {
pub const XcmBridgeHubRouterTransactCallMaxWeight: frame_support::weights::Weight = frame_support::weights::Weight::from_parts(200_000_000, 6144);
}
+/// Builds an (un)congestion XCM program with the `report_bridge_status` call for
+/// `ToWestendXcmRouter`.
+pub fn build_congestion_message(
+ bridge_id: sp_core::H256,
+ is_congested: bool,
+) -> alloc::vec::Vec> {
+ alloc::vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact {
+ origin_kind: OriginKind::Xcm,
+ fallback_max_weight: Some(XcmBridgeHubRouterTransactCallMaxWeight::get()),
+ call: Call::ToWestendXcmRouter(XcmBridgeHubRouterCall::report_bridge_status {
+ bridge_id,
+ is_congested,
+ })
+ .encode()
+ .into(),
+ },
+ ExpectTransactStatus(MaybeErrorCode::Success),
+ ]
+}
+
/// Identifier of AssetHubRococo in the Rococo relay chain.
pub const ASSET_HUB_ROCOCO_PARACHAIN_ID: u32 = 1000;
diff --git a/bridges/chains/chain-asset-hub-westend/Cargo.toml b/bridges/chains/chain-asset-hub-westend/Cargo.toml
index 430d9b6116cf..22071399f4d1 100644
--- a/bridges/chains/chain-asset-hub-westend/Cargo.toml
+++ b/bridges/chains/chain-asset-hub-westend/Cargo.toml
@@ -19,10 +19,14 @@ scale-info = { features = ["derive"], workspace = true }
# Substrate Dependencies
frame-support = { workspace = true }
+sp-core = { workspace = true }
# Bridge Dependencies
bp-xcm-bridge-hub-router = { workspace = true }
+# Polkadot dependencies
+xcm = { workspace = true }
+
[features]
default = ["std"]
std = [
@@ -30,4 +34,6 @@ std = [
"codec/std",
"frame-support/std",
"scale-info/std",
+ "sp-core/std",
+ "xcm/std",
]
diff --git a/bridges/chains/chain-asset-hub-westend/src/lib.rs b/bridges/chains/chain-asset-hub-westend/src/lib.rs
index 9de1c8809894..9d245e08f7cc 100644
--- a/bridges/chains/chain-asset-hub-westend/src/lib.rs
+++ b/bridges/chains/chain-asset-hub-westend/src/lib.rs
@@ -18,10 +18,13 @@
#![cfg_attr(not(feature = "std"), no_std)]
+extern crate alloc;
+
use codec::{Decode, Encode};
use scale_info::TypeInfo;
pub use bp_xcm_bridge_hub_router::XcmBridgeHubRouterCall;
+use xcm::latest::prelude::*;
/// `AssetHubWestend` Runtime `Call` enum.
///
@@ -44,5 +47,27 @@ frame_support::parameter_types! {
pub const XcmBridgeHubRouterTransactCallMaxWeight: frame_support::weights::Weight = frame_support::weights::Weight::from_parts(200_000_000, 6144);
}
+/// Builds an (un)congestion XCM program with the `report_bridge_status` call for
+/// `ToRococoXcmRouter`.
+pub fn build_congestion_message(
+ bridge_id: sp_core::H256,
+ is_congested: bool,
+) -> alloc::vec::Vec> {
+ alloc::vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact {
+ origin_kind: OriginKind::Xcm,
+ fallback_max_weight: Some(XcmBridgeHubRouterTransactCallMaxWeight::get()),
+ call: Call::ToRococoXcmRouter(XcmBridgeHubRouterCall::report_bridge_status {
+ bridge_id,
+ is_congested,
+ })
+ .encode()
+ .into(),
+ },
+ ExpectTransactStatus(MaybeErrorCode::Success),
+ ]
+}
+
/// Identifier of AssetHubWestend in the Westend relay chain.
pub const ASSET_HUB_WESTEND_PARACHAIN_ID: u32 = 1000;
diff --git a/bridges/chains/chain-polkadot-bulletin/src/lib.rs b/bridges/chains/chain-polkadot-bulletin/src/lib.rs
index c5c18beb2cad..070bc7b0ba3d 100644
--- a/bridges/chains/chain-polkadot-bulletin/src/lib.rs
+++ b/bridges/chains/chain-polkadot-bulletin/src/lib.rs
@@ -225,4 +225,4 @@ impl ChainWithMessages for PolkadotBulletin {
}
decl_bridge_finality_runtime_apis!(polkadot_bulletin, grandpa);
-decl_bridge_messages_runtime_apis!(polkadot_bulletin, bp_messages::HashedLaneId);
+decl_bridge_messages_runtime_apis!(polkadot_bulletin, bp_messages::LegacyLaneId);
diff --git a/bridges/modules/xcm-bridge-hub-router/Cargo.toml b/bridges/modules/xcm-bridge-hub-router/Cargo.toml
index 55824f6a7fe7..b0286938f36d 100644
--- a/bridges/modules/xcm-bridge-hub-router/Cargo.toml
+++ b/bridges/modules/xcm-bridge-hub-router/Cargo.toml
@@ -56,6 +56,7 @@ runtime-benchmarks = [
"frame-system/runtime-benchmarks",
"sp-runtime/runtime-benchmarks",
"xcm-builder/runtime-benchmarks",
+ "xcm/runtime-benchmarks",
]
try-runtime = [
"frame-support/try-runtime",
diff --git a/bridges/modules/xcm-bridge-hub-router/src/benchmarking.rs b/bridges/modules/xcm-bridge-hub-router/src/benchmarking.rs
index 3c4a10f82e7d..ff06a1e3c8c5 100644
--- a/bridges/modules/xcm-bridge-hub-router/src/benchmarking.rs
+++ b/bridges/modules/xcm-bridge-hub-router/src/benchmarking.rs
@@ -18,9 +18,9 @@
#![cfg(feature = "runtime-benchmarks")]
-use crate::{DeliveryFeeFactor, MINIMAL_DELIVERY_FEE_FACTOR};
+use crate::{Bridge, BridgeState, Call, MINIMAL_DELIVERY_FEE_FACTOR};
use frame_benchmarking::{benchmarks_instance_pallet, BenchmarkError};
-use frame_support::traits::{Get, Hooks};
+use frame_support::traits::{EnsureOrigin, Get, Hooks, UnfilteredDispatchable};
use sp_runtime::traits::Zero;
use xcm::prelude::*;
@@ -45,16 +45,35 @@ pub trait Config: crate::Config {
benchmarks_instance_pallet! {
on_initialize_when_non_congested {
- DeliveryFeeFactor::::put(MINIMAL_DELIVERY_FEE_FACTOR + MINIMAL_DELIVERY_FEE_FACTOR);
+ Bridge::::put(BridgeState {
+ is_congested: false,
+ delivery_fee_factor: MINIMAL_DELIVERY_FEE_FACTOR + MINIMAL_DELIVERY_FEE_FACTOR,
+ });
}: {
crate::Pallet::::on_initialize(Zero::zero())
}
on_initialize_when_congested {
- DeliveryFeeFactor::::put(MINIMAL_DELIVERY_FEE_FACTOR + MINIMAL_DELIVERY_FEE_FACTOR);
+ Bridge::::put(BridgeState {
+ is_congested: false,
+ delivery_fee_factor: MINIMAL_DELIVERY_FEE_FACTOR + MINIMAL_DELIVERY_FEE_FACTOR,
+ });
let _ = T::ensure_bridged_target_destination()?;
T::make_congested();
}: {
crate::Pallet::::on_initialize(Zero::zero())
}
+
+ report_bridge_status {
+ Bridge::::put(BridgeState::default());
+
+ let origin: T::RuntimeOrigin = T::BridgeHubOrigin::try_successful_origin().expect("expected valid BridgeHubOrigin");
+ let bridge_id = Default::default();
+ let is_congested = true;
+
+ let call = Call::::report_bridge_status { bridge_id, is_congested };
+ }: { call.dispatch_bypass_filter(origin)? }
+ verify {
+ assert!(Bridge::::get().is_congested);
+ }
}
diff --git a/bridges/modules/xcm-bridge-hub-router/src/lib.rs b/bridges/modules/xcm-bridge-hub-router/src/lib.rs
index fe8f5a2efdfb..7361696faba7 100644
--- a/bridges/modules/xcm-bridge-hub-router/src/lib.rs
+++ b/bridges/modules/xcm-bridge-hub-router/src/lib.rs
@@ -30,9 +30,10 @@
#![cfg_attr(not(feature = "std"), no_std)]
-pub use bp_xcm_bridge_hub_router::XcmChannelStatusProvider;
+pub use bp_xcm_bridge_hub_router::{BridgeState, XcmChannelStatusProvider};
use codec::Encode;
use frame_support::traits::Get;
+use sp_core::H256;
use sp_runtime::{FixedPointNumber, FixedU128, Saturating};
use sp_std::vec::Vec;
use xcm::prelude::*;
@@ -98,6 +99,8 @@ pub mod pallet {
/// Checks the XCM version for the destination.
type DestinationVersion: GetVersion;
+ /// Origin of the sibling bridge hub that is allowed to report bridge status.
+ type BridgeHubOrigin: EnsureOrigin;
/// Actual message sender (`HRMP` or `DMP`) to the sibling bridge hub location.
type ToBridgeHubSender: SendXcm;
/// Local XCM channel manager.
@@ -120,95 +123,112 @@ pub mod pallet {
return T::WeightInfo::on_initialize_when_congested()
}
+ // if bridge has reported congestion, we don't change anything
+ let mut bridge = Self::bridge();
+ if bridge.is_congested {
+ return T::WeightInfo::on_initialize_when_congested()
+ }
+
// if we can't decrease the delivery fee factor anymore, we don't change anything
- let mut delivery_fee_factor = Self::delivery_fee_factor();
- if delivery_fee_factor == MINIMAL_DELIVERY_FEE_FACTOR {
+ if bridge.delivery_fee_factor == MINIMAL_DELIVERY_FEE_FACTOR {
return T::WeightInfo::on_initialize_when_congested()
}
- let previous_factor = delivery_fee_factor;
- delivery_fee_factor =
- MINIMAL_DELIVERY_FEE_FACTOR.max(delivery_fee_factor / EXPONENTIAL_FEE_BASE);
+ let previous_factor = bridge.delivery_fee_factor;
+ bridge.delivery_fee_factor =
+ MINIMAL_DELIVERY_FEE_FACTOR.max(bridge.delivery_fee_factor / EXPONENTIAL_FEE_BASE);
+
log::info!(
target: LOG_TARGET,
"Bridge channel is uncongested. Decreased fee factor from {} to {}",
previous_factor,
- delivery_fee_factor,
+ bridge.delivery_fee_factor,
);
Self::deposit_event(Event::DeliveryFeeFactorDecreased {
- new_value: delivery_fee_factor,
+ new_value: bridge.delivery_fee_factor,
});
- DeliveryFeeFactor::::put(delivery_fee_factor);
+ Bridge::::put(bridge);
T::WeightInfo::on_initialize_when_non_congested()
}
}
- /// Initialization value for the delivery fee factor.
- #[pallet::type_value]
- pub fn InitialFactor() -> FixedU128 {
- MINIMAL_DELIVERY_FEE_FACTOR
+ #[pallet::call]
+ impl, I: 'static> Pallet {
+ /// Notification about congested bridge queue.
+ #[pallet::call_index(0)]
+ #[pallet::weight(T::WeightInfo::report_bridge_status())]
+ pub fn report_bridge_status(
+ origin: OriginFor,
+ // this argument is not currently used, but to ease future migration, we'll keep it
+ // here
+ bridge_id: H256,
+ is_congested: bool,
+ ) -> DispatchResult {
+ let _ = T::BridgeHubOrigin::ensure_origin(origin)?;
+
+ log::info!(
+ target: LOG_TARGET,
+ "Received bridge status from {:?}: congested = {}",
+ bridge_id,
+ is_congested,
+ );
+
+ Bridge::::mutate(|bridge| {
+ bridge.is_congested = is_congested;
+ });
+ Ok(())
+ }
}
- /// The number to multiply the base delivery fee by.
+ /// Bridge that we are using.
///
- /// This factor is shared by all bridges, served by this pallet. For example, if this
- /// chain (`Config::UniversalLocation`) opens two bridges (
- /// `X2(GlobalConsensus(Config::BridgedNetworkId::get()), Parachain(1000))` and
- /// `X2(GlobalConsensus(Config::BridgedNetworkId::get()), Parachain(2000))`), then they
- /// both will be sharing the same fee factor. This is because both bridges are sharing
- /// the same local XCM channel with the child/sibling bridge hub, which we are using
- /// to detect congestion:
- ///
- /// ```nocompile
- /// ThisChain --- Local XCM channel --> Sibling Bridge Hub ------
- /// | |
- /// | |
- /// | |
- /// Lane1 Lane2
- /// | |
- /// | |
- /// | |
- /// \ / |
- /// Parachain1 <-- Local XCM channel --- Remote Bridge Hub <------
- /// |
- /// |
- /// Parachain1 <-- Local XCM channel ---------
- /// ```
- ///
- /// If at least one of other channels is congested, the local XCM channel with sibling
- /// bridge hub eventually becomes congested too. And we have no means to detect - which
- /// bridge exactly causes the congestion. So the best solution here is not to make
- /// any differences between all bridges, started by this chain.
+ /// **bridges-v1** assumptions: all outbound messages through this router are using single lane
+ /// and to single remote consensus. If there is some other remote consensus that uses the same
+ /// bridge hub, the separate pallet instance shall be used, In `v2` we'll have all required
+ /// primitives (lane-id aka bridge-id, derived from XCM locations) to support multiple bridges
+ /// by the same pallet instance.
#[pallet::storage]
- #[pallet::getter(fn delivery_fee_factor)]
- pub type DeliveryFeeFactor, I: 'static = ()> =
- StorageValue<_, FixedU128, ValueQuery, InitialFactor>;
+ #[pallet::getter(fn bridge)]
+ pub type Bridge, I: 'static = ()> = StorageValue<_, BridgeState, ValueQuery>;
impl, I: 'static> Pallet {
/// Called when new message is sent (queued to local outbound XCM queue) over the bridge.
pub(crate) fn on_message_sent_to_bridge(message_size: u32) {
- // if outbound channel is not congested, do nothing
- if !T::LocalXcmChannelManager::is_congested(&T::SiblingBridgeHubLocation::get()) {
- return
- }
+ log::trace!(
+ target: LOG_TARGET,
+ "on_message_sent_to_bridge - message_size: {message_size:?}",
+ );
+ let _ = Bridge::::try_mutate(|bridge| {
+ let is_channel_with_bridge_hub_congested =
+ T::LocalXcmChannelManager::is_congested(&T::SiblingBridgeHubLocation::get());
+ let is_bridge_congested = bridge.is_congested;
+
+ // if outbound queue is not congested AND bridge has not reported congestion, do
+ // nothing
+ if !is_channel_with_bridge_hub_congested && !is_bridge_congested {
+ return Err(())
+ }
+
+ // ok - we need to increase the fee factor, let's do that
+ let message_size_factor = FixedU128::from_u32(message_size.saturating_div(1024))
+ .saturating_mul(MESSAGE_SIZE_FEE_BASE);
+ let total_factor = EXPONENTIAL_FEE_BASE.saturating_add(message_size_factor);
+ let previous_factor = bridge.delivery_fee_factor;
+ bridge.delivery_fee_factor =
+ bridge.delivery_fee_factor.saturating_mul(total_factor);
- // ok - we need to increase the fee factor, let's do that
- let message_size_factor = FixedU128::from_u32(message_size.saturating_div(1024))
- .saturating_mul(MESSAGE_SIZE_FEE_BASE);
- let total_factor = EXPONENTIAL_FEE_BASE.saturating_add(message_size_factor);
- DeliveryFeeFactor::::mutate(|f| {
- let previous_factor = *f;
- *f = f.saturating_mul(total_factor);
log::info!(
target: LOG_TARGET,
"Bridge channel is congested. Increased fee factor from {} to {}",
previous_factor,
- f,
+ bridge.delivery_fee_factor,
);
- Self::deposit_event(Event::DeliveryFeeFactorIncreased { new_value: *f });
- *f
+ Self::deposit_event(Event::DeliveryFeeFactorIncreased {
+ new_value: bridge.delivery_fee_factor,
+ });
+ Ok(())
});
}
}
@@ -310,9 +330,9 @@ impl, I: 'static> ExporterFor for Pallet {
let message_size = message.encoded_size();
let message_fee = (message_size as u128).saturating_mul(T::ByteFee::get());
let fee_sum = base_fee.saturating_add(message_fee);
-
- let fee_factor = Self::delivery_fee_factor();
+ let fee_factor = Self::bridge().delivery_fee_factor;
let fee = fee_factor.saturating_mul_int(fee_sum);
+
let fee = if fee > 0 { Some((T::FeeAsset::get(), fee).into()) } else { None };
log::info!(
@@ -427,24 +447,47 @@ mod tests {
use frame_system::{EventRecord, Phase};
use sp_runtime::traits::One;
+ fn congested_bridge(delivery_fee_factor: FixedU128) -> BridgeState {
+ BridgeState { is_congested: true, delivery_fee_factor }
+ }
+
+ fn uncongested_bridge(delivery_fee_factor: FixedU128) -> BridgeState {
+ BridgeState { is_congested: false, delivery_fee_factor }
+ }
+
#[test]
fn initial_fee_factor_is_one() {
run_test(|| {
- assert_eq!(DeliveryFeeFactor::::get(), MINIMAL_DELIVERY_FEE_FACTOR);
+ assert_eq!(
+ Bridge::::get(),
+ uncongested_bridge(MINIMAL_DELIVERY_FEE_FACTOR),
+ );
})
}
#[test]
fn fee_factor_is_not_decreased_from_on_initialize_when_xcm_channel_is_congested() {
run_test(|| {
- DeliveryFeeFactor::::put(FixedU128::from_rational(125, 100));
+ Bridge::::put(uncongested_bridge(FixedU128::from_rational(125, 100)));
TestLocalXcmChannelManager::make_congested(&SiblingBridgeHubLocation::get());
// it should not decrease, because queue is congested
- let old_delivery_fee_factor = XcmBridgeHubRouter::delivery_fee_factor();
+ let old_delivery = XcmBridgeHubRouter::bridge();
XcmBridgeHubRouter::on_initialize(One::one());
- assert_eq!(XcmBridgeHubRouter::delivery_fee_factor(), old_delivery_fee_factor);
+ assert_eq!(XcmBridgeHubRouter::bridge(), old_delivery);
+ assert_eq!(System::events(), vec![]);
+ })
+ }
+
+ #[test]
+ fn fee_factor_is_not_decreased_from_on_initialize_when_bridge_has_reported_congestion() {
+ run_test(|| {
+ Bridge::::put(congested_bridge(FixedU128::from_rational(125, 100)));
+ // it should not decrease, because bridge congested
+ let old_bridge = XcmBridgeHubRouter::bridge();
+ XcmBridgeHubRouter::on_initialize(One::one());
+ assert_eq!(XcmBridgeHubRouter::bridge(), old_bridge);
assert_eq!(System::events(), vec![]);
})
}
@@ -453,16 +496,19 @@ mod tests {
fn fee_factor_is_decreased_from_on_initialize_when_xcm_channel_is_uncongested() {
run_test(|| {
let initial_fee_factor = FixedU128::from_rational(125, 100);
- DeliveryFeeFactor::::put(initial_fee_factor);
+ Bridge::::put(uncongested_bridge(initial_fee_factor));
- // it shold eventually decreased to one
- while XcmBridgeHubRouter::delivery_fee_factor() > MINIMAL_DELIVERY_FEE_FACTOR {
+ // it should eventually decrease to one
+ while XcmBridgeHubRouter::bridge().delivery_fee_factor > MINIMAL_DELIVERY_FEE_FACTOR {
XcmBridgeHubRouter::on_initialize(One::one());
}
- // verify that it doesn't decreases anymore
+ // verify that it doesn't decrease anymore
XcmBridgeHubRouter::on_initialize(One::one());
- assert_eq!(XcmBridgeHubRouter::delivery_fee_factor(), MINIMAL_DELIVERY_FEE_FACTOR);
+ assert_eq!(
+ XcmBridgeHubRouter::bridge(),
+ uncongested_bridge(MINIMAL_DELIVERY_FEE_FACTOR)
+ );
// check emitted event
let first_system_event = System::events().first().cloned();
@@ -582,7 +628,7 @@ mod tests {
// but when factor is larger than one, it increases the fee, so it becomes:
// `(BASE_FEE + BYTE_FEE * msg_size) * F + HRMP_FEE`
let factor = FixedU128::from_rational(125, 100);
- DeliveryFeeFactor::::put(factor);
+ Bridge::::put(uncongested_bridge(factor));
let expected_fee =
(FixedU128::saturating_from_integer(BASE_FEE + BYTE_FEE * (msg_size as u128)) *
factor)
@@ -598,7 +644,7 @@ mod tests {
#[test]
fn sent_message_doesnt_increase_factor_if_queue_is_uncongested() {
run_test(|| {
- let old_delivery_fee_factor = XcmBridgeHubRouter::delivery_fee_factor();
+ let old_bridge = XcmBridgeHubRouter::bridge();
assert_eq!(
send_xcm::(
Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(1000)]),
@@ -609,7 +655,7 @@ mod tests {
);
assert!(TestToBridgeHubSender::is_message_sent());
- assert_eq!(old_delivery_fee_factor, XcmBridgeHubRouter::delivery_fee_factor());
+ assert_eq!(old_bridge, XcmBridgeHubRouter::bridge());
assert_eq!(System::events(), vec![]);
});
@@ -620,7 +666,39 @@ mod tests {
run_test(|| {
TestLocalXcmChannelManager::make_congested(&SiblingBridgeHubLocation::get());
- let old_delivery_fee_factor = XcmBridgeHubRouter::delivery_fee_factor();
+ let old_bridge = XcmBridgeHubRouter::bridge();
+ assert_ok!(send_xcm::(
+ Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(1000)]),
+ vec![ClearOrigin].into(),
+ )
+ .map(drop));
+
+ assert!(TestToBridgeHubSender::is_message_sent());
+ assert!(
+ old_bridge.delivery_fee_factor < XcmBridgeHubRouter::bridge().delivery_fee_factor
+ );
+
+ // check emitted event
+ let first_system_event = System::events().first().cloned();
+ assert!(matches!(
+ first_system_event,
+ Some(EventRecord {
+ phase: Phase::Initialization,
+ event: RuntimeEvent::XcmBridgeHubRouter(
+ Event::DeliveryFeeFactorIncreased { .. }
+ ),
+ ..
+ })
+ ));
+ });
+ }
+
+ #[test]
+ fn sent_message_increases_factor_if_bridge_has_reported_congestion() {
+ run_test(|| {
+ Bridge::::put(congested_bridge(MINIMAL_DELIVERY_FEE_FACTOR));
+
+ let old_bridge = XcmBridgeHubRouter::bridge();
assert_ok!(send_xcm::(
Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(1000)]),
vec![ClearOrigin].into(),
@@ -628,7 +706,9 @@ mod tests {
.map(drop));
assert!(TestToBridgeHubSender::is_message_sent());
- assert!(old_delivery_fee_factor < XcmBridgeHubRouter::delivery_fee_factor());
+ assert!(
+ old_bridge.delivery_fee_factor < XcmBridgeHubRouter::bridge().delivery_fee_factor
+ );
// check emitted event
let first_system_event = System::events().first().cloned();
diff --git a/bridges/modules/xcm-bridge-hub-router/src/mock.rs b/bridges/modules/xcm-bridge-hub-router/src/mock.rs
index 095572883920..ac642e108c2a 100644
--- a/bridges/modules/xcm-bridge-hub-router/src/mock.rs
+++ b/bridges/modules/xcm-bridge-hub-router/src/mock.rs
@@ -80,6 +80,7 @@ impl pallet_xcm_bridge_hub_router::Config<()> for TestRuntime {
type DestinationVersion =
LatestOrNoneForLocationVersionChecker>;
+ type BridgeHubOrigin = frame_system::EnsureRoot;
type ToBridgeHubSender = TestToBridgeHubSender;
type LocalXcmChannelManager = TestLocalXcmChannelManager;
diff --git a/bridges/modules/xcm-bridge-hub-router/src/weights.rs b/bridges/modules/xcm-bridge-hub-router/src/weights.rs
index d9a0426fecaf..8f5012c9de26 100644
--- a/bridges/modules/xcm-bridge-hub-router/src/weights.rs
+++ b/bridges/modules/xcm-bridge-hub-router/src/weights.rs
@@ -52,6 +52,7 @@ use sp_std::marker::PhantomData;
pub trait WeightInfo {
fn on_initialize_when_non_congested() -> Weight;
fn on_initialize_when_congested() -> Weight;
+ fn report_bridge_status() -> Weight;
}
/// Weights for `pallet_xcm_bridge_hub_router` that are generated using one of the Bridge testnets.
@@ -85,6 +86,19 @@ impl WeightInfo for BridgeWeight {
// Minimum execution time: 4_239 nanoseconds.
Weight::from_parts(4_383_000, 3547).saturating_add(T::DbWeight::get().reads(1_u64))
}
+ /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1)
+ ///
+ /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added:
+ /// 512, mode: `MaxEncodedLen`)
+ fn report_bridge_status() -> Weight {
+ // Proof Size summary in bytes:
+ // Measured: `53`
+ // Estimated: `1502`
+ // Minimum execution time: 10_427 nanoseconds.
+ Weight::from_parts(10_682_000, 1502)
+ .saturating_add(T::DbWeight::get().reads(1_u64))
+ .saturating_add(T::DbWeight::get().writes(1_u64))
+ }
}
// For backwards compatibility and tests
@@ -120,4 +134,17 @@ impl WeightInfo for () {
// Minimum execution time: 4_239 nanoseconds.
Weight::from_parts(4_383_000, 3547).saturating_add(RocksDbWeight::get().reads(1_u64))
}
+ /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1)
+ ///
+ /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added:
+ /// 512, mode: `MaxEncodedLen`)
+ fn report_bridge_status() -> Weight {
+ // Proof Size summary in bytes:
+ // Measured: `53`
+ // Estimated: `1502`
+ // Minimum execution time: 10_427 nanoseconds.
+ Weight::from_parts(10_682_000, 1502)
+ .saturating_add(RocksDbWeight::get().reads(1_u64))
+ .saturating_add(RocksDbWeight::get().writes(1_u64))
+ }
}
diff --git a/bridges/modules/xcm-bridge-hub/Cargo.toml b/bridges/modules/xcm-bridge-hub/Cargo.toml
index fe58b910a94e..ef49b3396b5c 100644
--- a/bridges/modules/xcm-bridge-hub/Cargo.toml
+++ b/bridges/modules/xcm-bridge-hub/Cargo.toml
@@ -39,6 +39,7 @@ sp-io = { workspace = true }
bp-runtime = { workspace = true }
bp-header-chain = { workspace = true }
pallet-xcm-bridge-hub-router = { workspace = true }
+bp-xcm-bridge-hub-router = { workspace = true }
polkadot-parachain-primitives = { workspace = true }
[features]
@@ -47,6 +48,7 @@ std = [
"bp-header-chain/std",
"bp-messages/std",
"bp-runtime/std",
+ "bp-xcm-bridge-hub-router/std",
"bp-xcm-bridge-hub/std",
"codec/std",
"frame-support/std",
@@ -75,6 +77,7 @@ runtime-benchmarks = [
"sp-runtime/runtime-benchmarks",
"xcm-builder/runtime-benchmarks",
"xcm-executor/runtime-benchmarks",
+ "xcm/runtime-benchmarks",
]
try-runtime = [
"frame-support/try-runtime",
diff --git a/bridges/modules/xcm-bridge-hub/src/exporter.rs b/bridges/modules/xcm-bridge-hub/src/exporter.rs
index 5afb9f36bc94..93b6093b42af 100644
--- a/bridges/modules/xcm-bridge-hub/src/exporter.rs
+++ b/bridges/modules/xcm-bridge-hub/src/exporter.rs
@@ -364,7 +364,7 @@ mod tests {
use bp_runtime::RangeInclusiveExt;
use bp_xcm_bridge_hub::{Bridge, BridgeLocations, BridgeState};
- use frame_support::assert_ok;
+ use frame_support::{assert_ok, traits::EnsureOrigin};
use pallet_bridge_messages::InboundLaneStorage;
use xcm_builder::{NetworkExportTable, UnpaidRemoteExporter};
use xcm_executor::traits::{export_xcm, ConvertLocation};
@@ -381,9 +381,8 @@ mod tests {
BridgedUniversalDestination::get()
}
- fn open_lane() -> (BridgeLocations, TestLaneIdType) {
+ fn open_lane(origin: RuntimeOrigin) -> (BridgeLocations, TestLaneIdType) {
// open expected outbound lane
- let origin = OpenBridgeOrigin::sibling_parachain_origin();
let with = bridged_asset_hub_universal_location();
let locations =
XcmOverBridge::bridge_locations_from_origin(origin, Box::new(with.into())).unwrap();
@@ -439,7 +438,7 @@ mod tests {
}
fn open_lane_and_send_regular_message() -> (BridgeId, TestLaneIdType) {
- let (locations, lane_id) = open_lane();
+ let (locations, lane_id) = open_lane(OpenBridgeOrigin::sibling_parachain_origin());
// now let's try to enqueue message using our `ExportXcm` implementation
export_xcm::(
@@ -473,7 +472,7 @@ mod tests {
fn exporter_does_not_suspend_the_bridge_if_outbound_bridge_queue_is_not_congested() {
run_test(|| {
let (bridge_id, _) = open_lane_and_send_regular_message();
- assert!(!TestLocalXcmChannelManager::is_bridge_suspened());
+ assert!(!TestLocalXcmChannelManager::is_bridge_suspended(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Opened);
});
}
@@ -490,7 +489,7 @@ mod tests {
}
open_lane_and_send_regular_message();
- assert!(!TestLocalXcmChannelManager::is_bridge_suspened());
+ assert!(!TestLocalXcmChannelManager::is_bridge_suspended(&bridge_id));
});
}
@@ -502,11 +501,11 @@ mod tests {
open_lane_and_send_regular_message();
}
- assert!(!TestLocalXcmChannelManager::is_bridge_suspened());
+ assert!(!TestLocalXcmChannelManager::is_bridge_suspended(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Opened);
open_lane_and_send_regular_message();
- assert!(TestLocalXcmChannelManager::is_bridge_suspened());
+ assert!(TestLocalXcmChannelManager::is_bridge_suspended(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Suspended);
});
}
@@ -523,7 +522,7 @@ mod tests {
OUTBOUND_LANE_UNCONGESTED_THRESHOLD + 1,
);
- assert!(!TestLocalXcmChannelManager::is_bridge_resumed());
+ assert!(!TestLocalXcmChannelManager::is_bridge_resumed(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Suspended);
});
}
@@ -537,7 +536,7 @@ mod tests {
OUTBOUND_LANE_UNCONGESTED_THRESHOLD,
);
- assert!(!TestLocalXcmChannelManager::is_bridge_resumed());
+ assert!(!TestLocalXcmChannelManager::is_bridge_resumed(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Opened);
});
}
@@ -554,7 +553,7 @@ mod tests {
OUTBOUND_LANE_UNCONGESTED_THRESHOLD,
);
- assert!(TestLocalXcmChannelManager::is_bridge_resumed());
+ assert!(TestLocalXcmChannelManager::is_bridge_resumed(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Opened);
});
}
@@ -648,7 +647,10 @@ mod tests {
let dest = Location::new(2, BridgedUniversalDestination::get());
// open bridge
- let (_, expected_lane_id) = open_lane();
+ let origin = OpenBridgeOrigin::sibling_parachain_origin();
+ let origin_as_location =
+ OpenBridgeOriginOf::::try_origin(origin.clone()).unwrap();
+ let (_, expected_lane_id) = open_lane(origin);
// check before - no messages
assert_eq!(
@@ -662,18 +664,24 @@ mod tests {
);
// send `ExportMessage(message)` by `UnpaidRemoteExporter`.
- TestExportXcmWithXcmOverBridge::set_origin_for_execute(SiblingLocation::get());
+ ExecuteXcmOverSendXcm::set_origin_for_execute(origin_as_location);
assert_ok!(send_xcm::<
UnpaidRemoteExporter<
NetworkExportTable,
- TestExportXcmWithXcmOverBridge,
+ ExecuteXcmOverSendXcm,
UniversalLocation,
>,
>(dest.clone(), Xcm::<()>::default()));
+ // we need to set `UniversalLocation` for `sibling_parachain_origin` for
+ // `XcmOverBridgeWrappedWithExportMessageRouterInstance`.
+ ExportMessageOriginUniversalLocation::set(Some(SiblingUniversalLocation::get()));
// send `ExportMessage(message)` by `pallet_xcm_bridge_hub_router`.
- TestExportXcmWithXcmOverBridge::set_origin_for_execute(SiblingLocation::get());
- assert_ok!(send_xcm::(dest.clone(), Xcm::<()>::default()));
+ ExecuteXcmOverSendXcm::set_origin_for_execute(SiblingLocation::get());
+ assert_ok!(send_xcm::(
+ dest.clone(),
+ Xcm::<()>::default()
+ ));
// check after - a message ready to be relayed
assert_eq!(
@@ -765,7 +773,7 @@ mod tests {
);
// ok
- let _ = open_lane();
+ let _ = open_lane(OpenBridgeOrigin::sibling_parachain_origin());
let mut dest_wrapper = Some(bridged_relative_destination());
assert_ok!(XcmOverBridge::validate(
BridgedRelayNetwork::get(),
@@ -780,4 +788,77 @@ mod tests {
assert_eq!(None, dest_wrapper);
});
}
+
+ #[test]
+ fn congestion_with_pallet_xcm_bridge_hub_router_works() {
+ run_test(|| {
+ // valid routable destination
+ let dest = Location::new(2, BridgedUniversalDestination::get());
+
+ fn router_bridge_state() -> pallet_xcm_bridge_hub_router::BridgeState {
+ pallet_xcm_bridge_hub_router::Bridge::<
+ TestRuntime,
+ XcmOverBridgeWrappedWithExportMessageRouterInstance,
+ >::get()
+ }
+
+ // open two bridges
+ let origin = OpenBridgeOrigin::sibling_parachain_origin();
+ let origin_as_location =
+ OpenBridgeOriginOf::::try_origin(origin.clone()).unwrap();
+ let (bridge_1, expected_lane_id_1) = open_lane(origin);
+
+ // we need to set `UniversalLocation` for `sibling_parachain_origin` for
+ // `XcmOverBridgeWrappedWithExportMessageRouterInstance`.
+ ExportMessageOriginUniversalLocation::set(Some(SiblingUniversalLocation::get()));
+
+ // check before
+ // bridges are opened
+ assert_eq!(
+ XcmOverBridge::bridge(bridge_1.bridge_id()).unwrap().state,
+ BridgeState::Opened
+ );
+
+ // the router is uncongested
+ assert!(!router_bridge_state().is_congested);
+ assert!(!TestLocalXcmChannelManager::is_bridge_suspended(bridge_1.bridge_id()));
+ assert!(!TestLocalXcmChannelManager::is_bridge_resumed(bridge_1.bridge_id()));
+
+ // make bridges congested with sending too much messages
+ for _ in 1..(OUTBOUND_LANE_CONGESTED_THRESHOLD + 2) {
+ // send `ExportMessage(message)` by `pallet_xcm_bridge_hub_router`.
+ ExecuteXcmOverSendXcm::set_origin_for_execute(origin_as_location.clone());
+ assert_ok!(send_xcm::(
+ dest.clone(),
+ Xcm::<()>::default()
+ ));
+ }
+
+ // checks after
+ // bridges are suspended
+ assert_eq!(
+ XcmOverBridge::bridge(bridge_1.bridge_id()).unwrap().state,
+ BridgeState::Suspended,
+ );
+ // the router is congested
+ assert!(router_bridge_state().is_congested);
+ assert!(TestLocalXcmChannelManager::is_bridge_suspended(bridge_1.bridge_id()));
+ assert!(!TestLocalXcmChannelManager::is_bridge_resumed(bridge_1.bridge_id()));
+
+ // make bridges uncongested to trigger resume signal
+ XcmOverBridge::on_bridge_messages_delivered(
+ expected_lane_id_1,
+ OUTBOUND_LANE_UNCONGESTED_THRESHOLD,
+ );
+
+ // bridge is again opened
+ assert_eq!(
+ XcmOverBridge::bridge(bridge_1.bridge_id()).unwrap().state,
+ BridgeState::Opened
+ );
+ // the router is uncongested
+ assert!(!router_bridge_state().is_congested);
+ assert!(TestLocalXcmChannelManager::is_bridge_resumed(bridge_1.bridge_id()));
+ })
+ }
}
diff --git a/bridges/modules/xcm-bridge-hub/src/lib.rs b/bridges/modules/xcm-bridge-hub/src/lib.rs
index 1b2536598a20..682db811efa7 100644
--- a/bridges/modules/xcm-bridge-hub/src/lib.rs
+++ b/bridges/modules/xcm-bridge-hub/src/lib.rs
@@ -145,8 +145,8 @@
use bp_messages::{LaneState, MessageNonce};
use bp_runtime::{AccountIdOf, BalanceOf, RangeInclusiveExt};
-pub use bp_xcm_bridge_hub::{Bridge, BridgeId, BridgeState};
-use bp_xcm_bridge_hub::{BridgeLocations, BridgeLocationsError, LocalXcmChannelManager};
+pub use bp_xcm_bridge_hub::{Bridge, BridgeId, BridgeState, LocalXcmChannelManager};
+use bp_xcm_bridge_hub::{BridgeLocations, BridgeLocationsError};
use frame_support::{traits::fungible::MutateHold, DefaultNoBound};
use frame_system::Config as SystemConfig;
use pallet_bridge_messages::{Config as BridgeMessagesConfig, LanesManagerError};
diff --git a/bridges/modules/xcm-bridge-hub/src/mock.rs b/bridges/modules/xcm-bridge-hub/src/mock.rs
index 9f06b99ef6d5..d186507dab17 100644
--- a/bridges/modules/xcm-bridge-hub/src/mock.rs
+++ b/bridges/modules/xcm-bridge-hub/src/mock.rs
@@ -24,10 +24,10 @@ use bp_messages::{
};
use bp_runtime::{messages::MessageDispatchResult, Chain, ChainId, HashOf};
use bp_xcm_bridge_hub::{BridgeId, LocalXcmChannelManager};
-use codec::Encode;
+use codec::{Decode, Encode};
use frame_support::{
assert_ok, derive_impl, parameter_types,
- traits::{EnsureOrigin, Equals, Everything, OriginTrait},
+ traits::{EnsureOrigin, Equals, Everything, Get, OriginTrait},
weights::RuntimeDbWeight,
};
use polkadot_parachain_primitives::primitives::Sibling;
@@ -44,7 +44,7 @@ use xcm_builder::{
InspectMessageQueues, NetworkExportTable, NetworkExportTableItem, ParentIsPreset,
SiblingParachainConvertsVia,
};
-use xcm_executor::XcmExecutor;
+use xcm_executor::{traits::ConvertOrigin, XcmExecutor};
pub type AccountId = AccountId32;
pub type Balance = u64;
@@ -63,7 +63,7 @@ frame_support::construct_runtime! {
Balances: pallet_balances::{Pallet, Event},
Messages: pallet_bridge_messages::{Pallet, Call, Event},
XcmOverBridge: pallet_xcm_bridge_hub::{Pallet, Call, HoldReason, Event},
- XcmOverBridgeRouter: pallet_xcm_bridge_hub_router,
+ XcmOverBridgeWrappedWithExportMessageRouter: pallet_xcm_bridge_hub_router = 57,
}
}
@@ -208,17 +208,27 @@ impl pallet_xcm_bridge_hub::Config for TestRuntime {
type BlobDispatcher = TestBlobDispatcher;
}
-impl pallet_xcm_bridge_hub_router::Config<()> for TestRuntime {
+/// A router instance simulates a scenario where the router is deployed on a different chain than
+/// the `MessageExporter`. This means that the router sends an `ExportMessage`.
+pub type XcmOverBridgeWrappedWithExportMessageRouterInstance = ();
+impl pallet_xcm_bridge_hub_router::Config
+ for TestRuntime
+{
type RuntimeEvent = RuntimeEvent;
type WeightInfo = ();
- type UniversalLocation = UniversalLocation;
+ type UniversalLocation = ExportMessageOriginUniversalLocation;
type SiblingBridgeHubLocation = BridgeHubLocation;
type BridgedNetworkId = BridgedRelayNetwork;
type Bridges = NetworkExportTable;
type DestinationVersion = AlwaysLatest;
- type ToBridgeHubSender = TestExportXcmWithXcmOverBridge;
+ // We convert to root `here` location with `BridgeHubLocationXcmOriginAsRoot`
+ type BridgeHubOrigin = frame_system::EnsureRoot;
+ // **Note**: The crucial part is that `ExportMessage` is processed by `XcmExecutor`, which
+ // calls the `ExportXcm` implementation of `pallet_xcm_bridge_hub` as the
+ // `MessageExporter`.
+ type ToBridgeHubSender = ExecuteXcmOverSendXcm;
type LocalXcmChannelManager = TestLocalXcmChannelManager;
type ByteFee = ConstU128<0>;
@@ -230,7 +240,7 @@ impl xcm_executor::Config for XcmConfig {
type RuntimeCall = RuntimeCall;
type XcmSender = ();
type AssetTransactor = ();
- type OriginConverter = ();
+ type OriginConverter = BridgeHubLocationXcmOriginAsRoot;
type IsReserve = ();
type IsTeleporter = ();
type UniversalLocation = UniversalLocation;
@@ -270,8 +280,8 @@ thread_local! {
///
/// Note: The crucial part is that `ExportMessage` is processed by `XcmExecutor`, which calls the
/// `ExportXcm` implementation of `pallet_xcm_bridge_hub` as `MessageExporter`.
-pub struct TestExportXcmWithXcmOverBridge;
-impl SendXcm for TestExportXcmWithXcmOverBridge {
+pub struct ExecuteXcmOverSendXcm;
+impl SendXcm for ExecuteXcmOverSendXcm {
type Ticket = Xcm<()>;
fn validate(
@@ -298,7 +308,7 @@ impl SendXcm for TestExportXcmWithXcmOverBridge {
Ok(hash)
}
}
-impl InspectMessageQueues for TestExportXcmWithXcmOverBridge {
+impl InspectMessageQueues for ExecuteXcmOverSendXcm {
fn clear_messages() {
todo!()
}
@@ -307,12 +317,51 @@ impl InspectMessageQueues for TestExportXcmWithXcmOverBridge {
todo!()
}
}
-impl TestExportXcmWithXcmOverBridge {
+impl ExecuteXcmOverSendXcm {
pub fn set_origin_for_execute(origin: Location) {
EXECUTE_XCM_ORIGIN.with(|o| *o.borrow_mut() = Some(origin));
}
}
+/// A dynamic way to set different universal location for the origin which sends `ExportMessage`.
+pub struct ExportMessageOriginUniversalLocation;
+impl ExportMessageOriginUniversalLocation {
+ pub(crate) fn set(universal_location: Option) {
+ EXPORT_MESSAGE_ORIGIN_UNIVERSAL_LOCATION.with(|o| *o.borrow_mut() = universal_location);
+ }
+}
+impl Get for ExportMessageOriginUniversalLocation {
+ fn get() -> InteriorLocation {
+ EXPORT_MESSAGE_ORIGIN_UNIVERSAL_LOCATION.with(|o| {
+ o.borrow()
+ .clone()
+ .expect("`EXPORT_MESSAGE_ORIGIN_UNIVERSAL_LOCATION` is not set!")
+ })
+ }
+}
+thread_local! {
+ pub static EXPORT_MESSAGE_ORIGIN_UNIVERSAL_LOCATION: RefCell