diff --git a/.config/nextest.toml b/.config/nextest.toml
index 912bf2514a77..b4bdec4aea92 100644
--- a/.config/nextest.toml
+++ b/.config/nextest.toml
@@ -21,7 +21,6 @@ retries = 5
# The number of threads to run tests with. Supported values are either an integer or
# the string "num-cpus". Can be overridden through the `--test-threads` option.
# test-threads = "num-cpus"
-
test-threads = 20
# The number of threads required for each test. This is generally used in overrides to
diff --git a/.github/env b/.github/env
index bb61e1f4cd99..730c37f1db80 100644
--- a/.github/env
+++ b/.github/env
@@ -1 +1 @@
-IMAGE="docker.io/paritytech/ci-unified:bullseye-1.81.0-2024-09-11-v202409111034"
+IMAGE="docker.io/paritytech/ci-unified:bullseye-1.81.0-2024-11-19-v202411281558"
diff --git a/.github/scripts/cmd/cmd.py b/.github/scripts/cmd/cmd.py
index 9da05cac17b9..2c017b7d0c3e 100755
--- a/.github/scripts/cmd/cmd.py
+++ b/.github/scripts/cmd/cmd.py
@@ -58,7 +58,7 @@ def setup_logging():
%(prog)s --runtime westend rococo --pallet pallet_balances pallet_multisig --quiet --clean
'''
-parser_bench = subparsers.add_parser('bench', help='Runs benchmarks', epilog=bench_example, formatter_class=argparse.RawDescriptionHelpFormatter)
+parser_bench = subparsers.add_parser('bench', help='Runs benchmarks (old CLI)', epilog=bench_example, formatter_class=argparse.RawDescriptionHelpFormatter)
for arg, config in common_args.items():
parser_bench.add_argument(arg, **config)
@@ -67,6 +67,35 @@ def setup_logging():
parser_bench.add_argument('--pallet', help='Pallet(s) space separated', nargs='*', default=[])
parser_bench.add_argument('--fail-fast', help='Fail fast on first failed benchmark', action='store_true')
+
+"""
+BENCH OMNI
+"""
+
+bench_example = '''**Examples**:
+ Runs all benchmarks
+ %(prog)s
+
+ Runs benchmarks for pallet_balances and pallet_multisig for all runtimes which have these pallets. **--quiet** makes it to output nothing to PR but reactions
+ %(prog)s --pallet pallet_balances pallet_xcm_benchmarks::generic --quiet
+
+ Runs bench for all pallets for westend runtime and fails fast on first failed benchmark
+ %(prog)s --runtime westend --fail-fast
+
+ Does not output anything and cleans up the previous bot's & author command triggering comments in PR
+ %(prog)s --runtime westend rococo --pallet pallet_balances pallet_multisig --quiet --clean
+'''
+
+parser_bench_old = subparsers.add_parser('bench-omni', help='Runs benchmarks (frame omni bencher)', epilog=bench_example, formatter_class=argparse.RawDescriptionHelpFormatter)
+
+for arg, config in common_args.items():
+ parser_bench_old.add_argument(arg, **config)
+
+parser_bench_old.add_argument('--runtime', help='Runtime(s) space separated', choices=runtimeNames, nargs='*', default=runtimeNames)
+parser_bench_old.add_argument('--pallet', help='Pallet(s) space separated', nargs='*', default=[])
+parser_bench_old.add_argument('--fail-fast', help='Fail fast on first failed benchmark', action='store_true')
+
+
"""
FMT
"""
@@ -98,12 +127,12 @@ def main():
print(f'args: {args}')
- if args.command == 'bench':
+ if args.command == 'bench-omni':
runtime_pallets_map = {}
failed_benchmarks = {}
successful_benchmarks = {}
- profile = "release"
+ profile = "production"
print(f'Provided runtimes: {args.runtime}')
# convert to mapped dict
@@ -113,11 +142,22 @@ def main():
# loop over remaining runtimes to collect available pallets
for runtime in runtimesMatrix.values():
- os.system(f"forklift cargo build -p {runtime['package']} --profile {profile} --features={runtime['bench_features']}")
+ build_command = f"forklift cargo build -p {runtime['package']} --profile {profile} --features={runtime['bench_features']}"
+ print(f'-- building "{runtime["name"]}" with `{build_command}`')
+ os.system(build_command)
print(f'-- listing pallets for benchmark for {runtime["name"]}')
wasm_file = f"target/{profile}/wbuild/{runtime['package']}/{runtime['package'].replace('-', '_')}.wasm"
- output = os.popen(
- f"frame-omni-bencher v1 benchmark pallet --no-csv-header --no-storage-info --no-min-squares --no-median-slopes --all --list --runtime={wasm_file} {runtime['bench_flags']}").read()
+ list_command = f"frame-omni-bencher v1 benchmark pallet " \
+ f"--no-csv-header " \
+ f"--no-storage-info " \
+ f"--no-min-squares " \
+ f"--no-median-slopes " \
+ f"--all " \
+ f"--list " \
+ f"--runtime={wasm_file} " \
+ f"{runtime['bench_flags']}"
+ print(f'-- running: {list_command}')
+ output = os.popen(list_command).read()
raw_pallets = output.strip().split('\n')
all_pallets = set()
@@ -230,6 +270,149 @@ def main():
print_and_log('✅ Successful benchmarks of runtimes/pallets:')
for runtime, pallets in successful_benchmarks.items():
print_and_log(f'-- {runtime}: {pallets}')
+
+ if args.command == 'bench':
+ runtime_pallets_map = {}
+ failed_benchmarks = {}
+ successful_benchmarks = {}
+
+ profile = "production"
+
+ print(f'Provided runtimes: {args.runtime}')
+ # convert to mapped dict
+ runtimesMatrix = list(filter(lambda x: x['name'] in args.runtime, runtimesMatrix))
+ runtimesMatrix = {x['name']: x for x in runtimesMatrix}
+ print(f'Filtered out runtimes: {runtimesMatrix}')
+
+ # loop over remaining runtimes to collect available pallets
+ for runtime in runtimesMatrix.values():
+ build_command = f"forklift cargo build -p {runtime['old_package']} --profile {profile} --features={runtime['bench_features']} --locked"
+ print(f'-- building {runtime["name"]} with `{build_command}`')
+ os.system(build_command)
+
+ chain = runtime['name'] if runtime['name'] == 'dev' else f"{runtime['name']}-dev"
+
+ machine_test = f"target/{profile}/{runtime['old_bin']} benchmark machine --chain={chain}"
+ print(f"Running machine test for `{machine_test}`")
+ os.system(machine_test)
+
+ print(f'-- listing pallets for benchmark for {chain}')
+ list_command = f"target/{profile}/{runtime['old_bin']} " \
+ f"benchmark pallet " \
+ f"--no-csv-header " \
+ f"--no-storage-info " \
+ f"--no-min-squares " \
+ f"--no-median-slopes " \
+ f"--all " \
+ f"--list " \
+ f"--chain={chain}"
+ print(f'-- running: {list_command}')
+ output = os.popen(list_command).read()
+ raw_pallets = output.strip().split('\n')
+
+ all_pallets = set()
+ for pallet in raw_pallets:
+ if pallet:
+ all_pallets.add(pallet.split(',')[0].strip())
+
+ pallets = list(all_pallets)
+ print(f'Pallets in {runtime["name"]}: {pallets}')
+ runtime_pallets_map[runtime['name']] = pallets
+
+ print(f'\n')
+
+ # filter out only the specified pallets from collected runtimes/pallets
+ if args.pallet:
+ print(f'Pallets: {args.pallet}')
+ new_pallets_map = {}
+ # keep only specified pallets if they exist in the runtime
+ for runtime in runtime_pallets_map:
+ if set(args.pallet).issubset(set(runtime_pallets_map[runtime])):
+ new_pallets_map[runtime] = args.pallet
+
+ runtime_pallets_map = new_pallets_map
+
+ print(f'Filtered out runtimes & pallets: {runtime_pallets_map}\n')
+
+ if not runtime_pallets_map:
+ if args.pallet and not args.runtime:
+ print(f"No pallets {args.pallet} found in any runtime")
+ elif args.runtime and not args.pallet:
+ print(f"{args.runtime} runtime does not have any pallets")
+ elif args.runtime and args.pallet:
+ print(f"No pallets {args.pallet} found in {args.runtime}")
+ else:
+ print('No runtimes found')
+ sys.exit(1)
+
+ for runtime in runtime_pallets_map:
+ for pallet in runtime_pallets_map[runtime]:
+ config = runtimesMatrix[runtime]
+ header_path = os.path.abspath(config['header'])
+ template = None
+
+ chain = config['name'] if runtime == 'dev' else f"{config['name']}-dev"
+
+ print(f'-- config: {config}')
+ if runtime == 'dev':
+ # to support sub-modules (https://github.com/paritytech/command-bot/issues/275)
+ search_manifest_path = f"cargo metadata --locked --format-version 1 --no-deps | jq -r '.packages[] | select(.name == \"{pallet.replace('_', '-')}\") | .manifest_path'"
+ print(f'-- running: {search_manifest_path}')
+ manifest_path = os.popen(search_manifest_path).read()
+ if not manifest_path:
+ print(f'-- pallet {pallet} not found in dev runtime')
+ if args.fail_fast:
+ print_and_log(f'Error: {pallet} not found in dev runtime')
+ sys.exit(1)
+ package_dir = os.path.dirname(manifest_path)
+ print(f'-- package_dir: {package_dir}')
+ print(f'-- manifest_path: {manifest_path}')
+ output_path = os.path.join(package_dir, "src", "weights.rs")
+ template = config['template']
+ else:
+ default_path = f"./{config['path']}/src/weights"
+ xcm_path = f"./{config['path']}/src/weights/xcm"
+ output_path = default_path
+ if pallet.startswith("pallet_xcm_benchmarks"):
+ template = config['template']
+ output_path = xcm_path
+
+ print(f'-- benchmarking {pallet} in {runtime} into {output_path}')
+ cmd = f"target/{profile}/{config['old_bin']} benchmark pallet " \
+ f"--extrinsic=* " \
+ f"--chain={chain} " \
+ f"--pallet={pallet} " \
+ f"--header={header_path} " \
+ f"--output={output_path} " \
+ f"--wasm-execution=compiled " \
+ f"--steps=50 " \
+ f"--repeat=20 " \
+ f"--heap-pages=4096 " \
+ f"{f'--template={template} ' if template else ''}" \
+ f"--no-storage-info --no-min-squares --no-median-slopes "
+ print(f'-- Running: {cmd} \n')
+ status = os.system(cmd)
+
+ if status != 0 and args.fail_fast:
+ print_and_log(f'❌ Failed to benchmark {pallet} in {runtime}')
+ sys.exit(1)
+
+ # Otherwise collect failed benchmarks and print them at the end
+ # push failed pallets to failed_benchmarks
+ if status != 0:
+ failed_benchmarks[f'{runtime}'] = failed_benchmarks.get(f'{runtime}', []) + [pallet]
+ else:
+ successful_benchmarks[f'{runtime}'] = successful_benchmarks.get(f'{runtime}', []) + [pallet]
+
+ if failed_benchmarks:
+ print_and_log('❌ Failed benchmarks of runtimes/pallets:')
+ for runtime, pallets in failed_benchmarks.items():
+ print_and_log(f'-- {runtime}: {pallets}')
+
+ if successful_benchmarks:
+ print_and_log('✅ Successful benchmarks of runtimes/pallets:')
+ for runtime, pallets in successful_benchmarks.items():
+ print_and_log(f'-- {runtime}: {pallets}')
elif args.command == 'fmt':
command = f"cargo +nightly fmt"
diff --git a/.github/scripts/cmd/test_cmd.py b/.github/scripts/cmd/test_cmd.py
index 7b29fbfe90d8..68998b989909 100644
--- a/.github/scripts/cmd/test_cmd.py
+++ b/.github/scripts/cmd/test_cmd.py
@@ -47,7 +47,7 @@
def get_mock_bench_output(runtime, pallets, output_path, header, bench_flags, template = None):
return f"frame-omni-bencher v1 benchmark pallet --extrinsic=* " \
- f"--runtime=target/release/wbuild/{runtime}-runtime/{runtime.replace('-', '_')}_runtime.wasm " \
+ f"--runtime=target/production/wbuild/{runtime}-runtime/{runtime.replace('-', '_')}_runtime.wasm " \
f"--pallet={pallets} --header={header} " \
f"--output={output_path} " \
f"--wasm-execution=compiled " \
@@ -93,7 +93,7 @@ def tearDown(self):
def test_bench_command_normal_execution_all_runtimes(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=list(map(lambda x: x['name'], mock_runtimes_matrix)),
pallet=['pallet_balances'],
fail_fast=True,
@@ -117,10 +117,10 @@ def test_bench_command_normal_execution_all_runtimes(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p kitchensink-runtime --profile release --features=runtime-benchmarks"),
- call("forklift cargo build -p westend-runtime --profile release --features=runtime-benchmarks"),
- call("forklift cargo build -p rococo-runtime --profile release --features=runtime-benchmarks"),
- call("forklift cargo build -p asset-hub-westend-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p kitchensink-runtime --profile production --features=runtime-benchmarks"),
+ call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
+ call("forklift cargo build -p rococo-runtime --profile production --features=runtime-benchmarks"),
+ call("forklift cargo build -p asset-hub-westend-runtime --profile production --features=runtime-benchmarks"),
call(get_mock_bench_output(
runtime='kitchensink',
@@ -150,7 +150,7 @@ def test_bench_command_normal_execution_all_runtimes(self):
def test_bench_command_normal_execution(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['westend'],
pallet=['pallet_balances', 'pallet_staking'],
fail_fast=True,
@@ -170,7 +170,7 @@ def test_bench_command_normal_execution(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p westend-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
@@ -193,7 +193,7 @@ def test_bench_command_normal_execution(self):
def test_bench_command_normal_execution_xcm(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['westend'],
pallet=['pallet_xcm_benchmarks::generic'],
fail_fast=True,
@@ -213,7 +213,7 @@ def test_bench_command_normal_execution_xcm(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p westend-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
@@ -229,7 +229,7 @@ def test_bench_command_normal_execution_xcm(self):
def test_bench_command_two_runtimes_two_pallets(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['westend', 'rococo'],
pallet=['pallet_balances', 'pallet_staking'],
fail_fast=True,
@@ -250,8 +250,8 @@ def test_bench_command_two_runtimes_two_pallets(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p westend-runtime --profile release --features=runtime-benchmarks"),
- call("forklift cargo build -p rococo-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p westend-runtime --profile production --features=runtime-benchmarks"),
+ call("forklift cargo build -p rococo-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
runtime='westend',
@@ -287,7 +287,7 @@ def test_bench_command_two_runtimes_two_pallets(self):
def test_bench_command_one_dev_runtime(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['dev'],
pallet=['pallet_balances'],
fail_fast=True,
@@ -309,7 +309,7 @@ def test_bench_command_one_dev_runtime(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p kitchensink-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p kitchensink-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
runtime='kitchensink',
@@ -324,7 +324,7 @@ def test_bench_command_one_dev_runtime(self):
def test_bench_command_one_cumulus_runtime(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['asset-hub-westend'],
pallet=['pallet_assets'],
fail_fast=True,
@@ -344,7 +344,7 @@ def test_bench_command_one_cumulus_runtime(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p asset-hub-westend-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p asset-hub-westend-runtime --profile production --features=runtime-benchmarks"),
# Asset-hub-westend runtime calls
call(get_mock_bench_output(
runtime='asset-hub-westend',
@@ -359,7 +359,7 @@ def test_bench_command_one_cumulus_runtime(self):
def test_bench_command_one_cumulus_runtime_xcm(self):
self.mock_parse_args.return_value = (argparse.Namespace(
- command='bench',
+ command='bench-omni',
runtime=['asset-hub-westend'],
pallet=['pallet_xcm_benchmarks::generic', 'pallet_assets'],
fail_fast=True,
@@ -379,7 +379,7 @@ def test_bench_command_one_cumulus_runtime_xcm(self):
expected_calls = [
# Build calls
- call("forklift cargo build -p asset-hub-westend-runtime --profile release --features=runtime-benchmarks"),
+ call("forklift cargo build -p asset-hub-westend-runtime --profile production --features=runtime-benchmarks"),
# Asset-hub-westend runtime calls
call(get_mock_bench_output(
runtime='asset-hub-westend',
diff --git a/.github/scripts/common/lib.sh b/.github/scripts/common/lib.sh
index e3dd6224f29b..00f8c089831e 100755
--- a/.github/scripts/common/lib.sh
+++ b/.github/scripts/common/lib.sh
@@ -237,24 +237,52 @@ fetch_release_artifacts() {
popd > /dev/null
}
-# Fetch the release artifacts like binary and signatures from S3. Assumes the ENV are set:
+# Fetch deb package from S3. Assumes the ENV are set:
# - RELEASE_ID
# - GITHUB_TOKEN
# - REPO in the form paritytech/polkadot
-fetch_release_artifacts_from_s3() {
+fetch_debian_package_from_s3() {
BINARY=$1
echo "Version : $VERSION"
echo "Repo : $REPO"
echo "Binary : $BINARY"
+ echo "Tag : $RELEASE_TAG"
OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${BINARY}"}
echo "OUTPUT_DIR : $OUTPUT_DIR"
URL_BASE=$(get_s3_url_base $BINARY)
echo "URL_BASE=$URL_BASE"
- URL_BINARY=$URL_BASE/$VERSION/$BINARY
- URL_SHA=$URL_BASE/$VERSION/$BINARY.sha256
- URL_ASC=$URL_BASE/$VERSION/$BINARY.asc
+ URL=$URL_BASE/$RELEASE_TAG/x86_64-unknown-linux-gnu/${BINARY}_${VERSION}_amd64.deb
+
+ mkdir -p "$OUTPUT_DIR"
+ pushd "$OUTPUT_DIR" > /dev/null
+
+ echo "Fetching deb package..."
+
+ echo "Fetching %s" "$URL"
+ curl --progress-bar -LO "$URL" || echo "Missing $URL"
+
+ pwd
+ ls -al --color
+ popd > /dev/null
+
+}
+
+# Fetch the release artifacts like binary and signatures from S3. Assumes the ENV are set:
+# inputs: binary (polkadot), target(aarch64-apple-darwin)
+fetch_release_artifacts_from_s3() {
+ BINARY=$1
+ TARGET=$2
+ OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${TARGET}/${BINARY}"}
+ echo "OUTPUT_DIR : $OUTPUT_DIR"
+
+ URL_BASE=$(get_s3_url_base $BINARY)
+ echo "URL_BASE=$URL_BASE"
+
+ URL_BINARY=$URL_BASE/$VERSION/$TARGET/$BINARY
+ URL_SHA=$URL_BASE/$VERSION/$TARGET/$BINARY.sha256
+ URL_ASC=$URL_BASE/$VERSION/$TARGET/$BINARY.asc
# Fetch artifacts
mkdir -p "$OUTPUT_DIR"
@@ -269,7 +297,7 @@ fetch_release_artifacts_from_s3() {
pwd
ls -al --color
popd > /dev/null
-
+ unset OUTPUT_DIR
}
# Pass the name of the binary as input, it will
@@ -277,15 +305,26 @@ fetch_release_artifacts_from_s3() {
function get_s3_url_base() {
name=$1
case $name in
- polkadot | polkadot-execute-worker | polkadot-prepare-worker | staking-miner)
+ polkadot | polkadot-execute-worker | polkadot-prepare-worker )
printf "https://releases.parity.io/polkadot"
;;
- polkadot-parachain)
- printf "https://releases.parity.io/cumulus"
+ polkadot-parachain)
+ printf "https://releases.parity.io/polkadot-parachain"
+ ;;
+
+ polkadot-omni-node)
+ printf "https://releases.parity.io/polkadot-omni-node"
;;
- *)
+ chain-spec-builder)
+ printf "https://releases.parity.io/chain-spec-builder"
+ ;;
+
+ frame-omni-bencher)
+ printf "https://releases.parity.io/frame-omni-bencher"
+ ;;
+ *)
printf "UNSUPPORTED BINARY $name"
exit 1
;;
@@ -468,3 +507,16 @@ validate_stable_tag() {
exit 1
fi
}
+
+# Prepare docker stable tag form the polkadot stable tag
+# input: tag (polkaodot-stableYYMM(-X) or polkadot-stableYYMM(-X)-rcX)
+# output: stableYYMM(-X) or stableYYMM(-X)-rcX
+prepare_docker_stable_tag() {
+ tag="$1"
+ if [[ "$tag" =~ stable[0-9]{4}(-[0-9]+)?(-rc[0-9]+)? ]]; then
+ echo "${BASH_REMATCH[0]}"
+ else
+ echo "Tag is invalid: $tag"
+ exit 1
+ fi
+}
diff --git a/.github/scripts/release/build-linux-release.sh b/.github/scripts/release/build-linux-release.sh
index a6bd658d292a..874c9b44788b 100755
--- a/.github/scripts/release/build-linux-release.sh
+++ b/.github/scripts/release/build-linux-release.sh
@@ -3,6 +3,8 @@
# This is used to build our binaries:
# - polkadot
# - polkadot-parachain
+# - polkadot-omni-node
+#
# set -e
BIN=$1
@@ -21,7 +23,7 @@ time cargo build --profile $PROFILE --locked --verbose --bin $BIN --package $PAC
echo "Artifact target: $ARTIFACTS"
cp ./target/$PROFILE/$BIN "$ARTIFACTS"
-pushd "$ARTIFACTS" > /dev/nul
+pushd "$ARTIFACTS" > /dev/null
sha256sum "$BIN" | tee "$BIN.sha256"
EXTRATAG="$($ARTIFACTS/$BIN --version |
diff --git a/.github/scripts/release/build-macos-release.sh b/.github/scripts/release/build-macos-release.sh
new file mode 100755
index 000000000000..ba6dcc65d650
--- /dev/null
+++ b/.github/scripts/release/build-macos-release.sh
@@ -0,0 +1,37 @@
+#!/usr/bin/env bash
+
+# This is used to build our binaries:
+# - polkadot
+# - polkadot-parachain
+# - polkadot-omni-node
+# set -e
+
+BIN=$1
+PACKAGE=${2:-$BIN}
+
+PROFILE=${PROFILE:-production}
+# parity-macos runner needs a path where it can
+# write, so make it relative to github workspace.
+ARTIFACTS=$GITHUB_WORKSPACE/artifacts/$BIN
+VERSION=$(git tag -l --contains HEAD | grep -E "^v.*")
+
+echo "Artifacts will be copied into $ARTIFACTS"
+mkdir -p "$ARTIFACTS"
+
+git log --pretty=oneline -n 1
+time cargo build --profile $PROFILE --locked --verbose --bin $BIN --package $PACKAGE
+
+echo "Artifact target: $ARTIFACTS"
+
+cp ./target/$PROFILE/$BIN "$ARTIFACTS"
+pushd "$ARTIFACTS" > /dev/null
+sha256sum "$BIN" | tee "$BIN.sha256"
+
+EXTRATAG="$($ARTIFACTS/$BIN --version |
+ sed -n -r 's/^'$BIN' ([0-9.]+.*-[0-9a-f]{7,13})-.*$/\1/p')"
+
+EXTRATAG="${VERSION}-${EXTRATAG}-$(cut -c 1-8 $ARTIFACTS/$BIN.sha256)"
+
+echo "$BIN version = ${VERSION} (EXTRATAG = ${EXTRATAG})"
+echo -n ${VERSION} > "$ARTIFACTS/VERSION"
+echo -n ${EXTRATAG} > "$ARTIFACTS/EXTRATAG"
diff --git a/.github/scripts/release/distributions b/.github/scripts/release/distributions
new file mode 100644
index 000000000000..a430ec76c6ba
--- /dev/null
+++ b/.github/scripts/release/distributions
@@ -0,0 +1,39 @@
+Origin: Parity
+Label: Parity
+Codename: release
+Architectures: amd64
+Components: main
+Description: Apt repository for software made by Parity Technologies Ltd.
+SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
+
+Origin: Parity
+Label: Parity Staging
+Codename: staging
+Architectures: amd64
+Components: main
+Description: Staging distribution for Parity Technologies Ltd. packages
+SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
+
+Origin: Parity
+Label: Parity stable2407
+Codename: stable2407
+Architectures: amd64
+Components: main
+Description: Apt repository for software made by Parity Technologies Ltd.
+SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
+
+Origin: Parity
+Label: Parity stable2409
+Codename: stable2409
+Architectures: amd64
+Components: main
+Description: Apt repository for software made by Parity Technologies Ltd.
+SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
+
+Origin: Parity
+Label: Parity stable2412
+Codename: stable2412
+Architectures: amd64
+Components: main
+Description: Apt repository for software made by Parity Technologies Ltd.
+SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
diff --git a/.github/scripts/release/release_lib.sh b/.github/scripts/release/release_lib.sh
index f5032073b617..984709f2ea03 100644
--- a/.github/scripts/release/release_lib.sh
+++ b/.github/scripts/release/release_lib.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Set the new version by replacing the value of the constant given as patetrn
+# Set the new version by replacing the value of the constant given as pattern
# in the file.
#
# input: pattern, version, file
@@ -119,21 +119,79 @@ set_polkadot_parachain_binary_version() {
upload_s3_release() {
+ alias aws='podman run --rm -it docker.io/paritytech/awscli -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_BUCKET aws'
+
+ product=$1
+ version=$2
+ target=$3
+
+ echo "Working on product: $product "
+ echo "Working on version: $version "
+ echo "Working on platform: $target "
+
+ URL_BASE=$(get_s3_url_base $product)
+
+ echo "Current content, should be empty on new uploads:"
+ aws s3 ls "s3://${URL_BASE}/${version}/${target}" --recursive --human-readable --summarize || true
+ echo "Content to be uploaded:"
+ artifacts="release-artifacts/$target/$product/"
+ ls "$artifacts"
+ aws s3 sync --acl public-read "$artifacts" "s3://${URL_BASE}/${version}/${target}"
+ echo "Uploaded files:"
+ aws s3 ls "s3://${URL_BASE}/${version}/${target}" --recursive --human-readable --summarize
+ echo "✅ The release should be at https://${URL_BASE}/${version}/${target}"
+}
+
+# Upload runtimes artifacts to s3 release bucket
+#
+# input: version (stable release tage.g. polkadot-stable2412 or polkadot-stable2412-rc1)
+# output: none
+upload_s3_runtimes_release_artifacts() {
alias aws='podman run --rm -it docker.io/paritytech/awscli -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_BUCKET aws'
- product=$1
- version=$2
+ version=$1
- echo "Working on product: $product "
echo "Working on version: $version "
echo "Current content, should be empty on new uploads:"
- aws s3 ls "s3://releases.parity.io/polkadot/${version}/" --recursive --human-readable --summarize || true
+ aws s3 ls "s3://releases.parity.io/polkadot/runtimes/${version}/" --recursive --human-readable --summarize || true
echo "Content to be uploaded:"
- artifacts="artifacts/$product/"
+ artifacts="artifacts/runtimes/"
ls "$artifacts"
- aws s3 sync --acl public-read "$artifacts" "s3://releases.parity.io/polkadot/${version}/"
+ aws s3 sync --acl public-read "$artifacts" "s3://releases.parity.io/polkadot/runtimes/${version}/"
echo "Uploaded files:"
- aws s3 ls "s3://releases.parity.io/polkadot/${version}/" --recursive --human-readable --summarize
- echo "✅ The release should be at https://releases.parity.io/polkadot/${version}"
+ aws s3 ls "s3://releases.parity.io/polkadot/runtimes/${version}/" --recursive --human-readable --summarize
+ echo "✅ The release should be at https://releases.parity.io/polkadot/runtimes/${version}"
+}
+
+
+# Pass the name of the binary as input, it will
+# return the s3 base url
+function get_s3_url_base() {
+ name=$1
+ case $name in
+ polkadot | polkadot-execute-worker | polkadot-prepare-worker )
+ printf "releases.parity.io/polkadot"
+ ;;
+
+ polkadot-parachain)
+ printf "releases.parity.io/polkadot-parachain"
+ ;;
+
+ polkadot-omni-node)
+ printf "releases.parity.io/polkadot-omni-node"
+ ;;
+
+ chain-spec-builder)
+ printf "releases.parity.io/chain-spec-builder"
+ ;;
+
+ frame-omni-bencher)
+ printf "releases.parity.io/frame-omni-bencher"
+ ;;
+ *)
+ printf "UNSUPPORTED BINARY $name"
+ exit 1
+ ;;
+ esac
}
diff --git a/.github/workflows/build-misc.yml b/.github/workflows/build-misc.yml
index a9b433a94b64..c4a7281b9ebc 100644
--- a/.github/workflows/build-misc.yml
+++ b/.github/workflows/build-misc.yml
@@ -44,7 +44,7 @@ jobs:
forklift cargo check -p rococo-runtime
forklift cargo check -p polkadot-test-runtime
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
@@ -73,7 +73,7 @@ jobs:
cd ./substrate/bin/utils/subkey
forklift cargo build --locked --release
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
diff --git a/.github/workflows/check-frame-omni-bencher.yml b/.github/workflows/check-frame-omni-bencher.yml
index b47c9d49feaf..bc0ff82b6774 100644
--- a/.github/workflows/check-frame-omni-bencher.yml
+++ b/.github/workflows/check-frame-omni-bencher.yml
@@ -41,7 +41,7 @@ jobs:
forklift cargo build --locked --quiet --release -p asset-hub-westend-runtime --features runtime-benchmarks
forklift cargo run --locked --release -p frame-omni-bencher --quiet -- v1 benchmark pallet --runtime target/release/wbuild/asset-hub-westend-runtime/asset_hub_westend_runtime.compact.compressed.wasm --all --steps 2 --repeat 1 --quiet
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
@@ -99,7 +99,7 @@ jobs:
echo "Running command: $cmd"
eval "$cmd"
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
diff --git a/.github/workflows/check-links.yml b/.github/workflows/check-links.yml
index dd9d3eaf824f..cea6b9a8636a 100644
--- a/.github/workflows/check-links.yml
+++ b/.github/workflows/check-links.yml
@@ -33,7 +33,7 @@ jobs:
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.1.0 (22. Sep 2023)
- name: Lychee link checker
- uses: lycheeverse/lychee-action@7cd0af4c74a61395d455af97419279d86aafaede # for v1.9.1 (10. Jan 2024)
+ uses: lycheeverse/lychee-action@f81112d0d2814ded911bd23e3beaa9dda9093915 # for v1.9.1 (10. Jan 2024)
with:
args: >-
--config .config/lychee.toml
diff --git a/.github/workflows/check-semver.yml b/.github/workflows/check-semver.yml
index 78602410cdf6..11b386da21e9 100644
--- a/.github/workflows/check-semver.yml
+++ b/.github/workflows/check-semver.yml
@@ -11,7 +11,7 @@ concurrency:
cancel-in-progress: true
env:
- TOOLCHAIN: nightly-2024-06-01
+ TOOLCHAIN: nightly-2024-11-19
jobs:
preflight:
@@ -74,10 +74,15 @@ jobs:
- name: install parity-publish
# Set the target dir to cache the build.
- run: CARGO_TARGET_DIR=./target/ cargo install parity-publish@0.8.0 --locked -q
+ run: CARGO_TARGET_DIR=./target/ cargo install parity-publish@0.10.2 --locked -q
- name: check semver
run: |
+ if [ -z "$PR" ]; then
+ echo "Skipping master/merge queue"
+ exit 0
+ fi
+
export CARGO_TARGET_DIR=target
export RUSTFLAGS='-A warnings -A missing_docs'
export SKIP_WASM_BUILD=1
diff --git a/.github/workflows/checks-quick.yml b/.github/workflows/checks-quick.yml
index 4fcaf80c83fc..4c26b85a6303 100644
--- a/.github/workflows/checks-quick.yml
+++ b/.github/workflows/checks-quick.yml
@@ -30,7 +30,7 @@ jobs:
id: required
run: cargo +nightly fmt --all -- --check
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
@@ -97,7 +97,6 @@ jobs:
--exclude
"substrate/frame/contracts/fixtures/build"
"substrate/frame/contracts/fixtures/contracts/common"
- "substrate/frame/revive/fixtures/build"
"substrate/frame/revive/fixtures/contracts/common"
- name: deny git deps
run: python3 .github/scripts/deny-git-deps.py .
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
index c240504fa1e7..02428711811f 100644
--- a/.github/workflows/checks.yml
+++ b/.github/workflows/checks.yml
@@ -36,7 +36,7 @@ jobs:
cargo clippy --all-targets --locked --workspace --quiet
cargo clippy --all-targets --all-features --locked --workspace --quiet
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
@@ -62,7 +62,7 @@ jobs:
# experimental code may rely on try-runtime and vice-versa
forklift cargo check --locked --all --features try-runtime,experimental --quiet
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
@@ -91,7 +91,7 @@ jobs:
./check-features-variants.sh
cd -
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
diff --git a/.github/workflows/cmd.yml b/.github/workflows/cmd.yml
index 525ab0c0fc23..b6a50ea0d15e 100644
--- a/.github/workflows/cmd.yml
+++ b/.github/workflows/cmd.yml
@@ -19,10 +19,10 @@ jobs:
steps:
- name: Generate token
id: generate_token
- uses: tibdex/github-app-token@v2.1.0
+ uses: actions/create-github-app-token@v1
with:
- app_id: ${{ secrets.CMD_BOT_APP_ID }}
- private_key: ${{ secrets.CMD_BOT_APP_KEY }}
+ app-id: ${{ secrets.CMD_BOT_APP_ID }}
+ private-key: ${{ secrets.CMD_BOT_APP_KEY }}
- name: Check if user is a member of the organization
id: is-member
@@ -227,7 +227,8 @@ jobs:
cat .github/env >> $GITHUB_OUTPUT
if [ -n "$IMAGE_OVERRIDE" ]; then
- echo "IMAGE=$IMAGE_OVERRIDE" >> $GITHUB_OUTPUT
+ IMAGE=$IMAGE_OVERRIDE
+ echo "IMAGE=$IMAGE" >> $GITHUB_OUTPUT
fi
if [[ $BODY == "/cmd bench"* ]]; then
@@ -237,6 +238,10 @@ jobs:
else
echo "RUNNER=ubuntu-latest" >> $GITHUB_OUTPUT
fi
+ - name: Print outputs
+ run: |
+ echo "RUNNER=${{ steps.set-image.outputs.RUNNER }}"
+ echo "IMAGE=${{ steps.set-image.outputs.IMAGE }}"
# Get PR branch name, because the issue_comment event does not contain the PR branch name
get-pr-branch:
@@ -283,10 +288,24 @@ jobs:
env:
JOB_NAME: "cmd"
runs-on: ${{ needs.set-image.outputs.RUNNER }}
- timeout-minutes: 4320 # 72 hours -> 3 days; as it could take a long time to run all the runtimes/pallets
container:
image: ${{ needs.set-image.outputs.IMAGE }}
+ timeout-minutes: 1440 # 24 hours per runtime
steps:
+ - name: Generate token
+ uses: actions/create-github-app-token@v1
+ id: generate_token
+ with:
+ app-id: ${{ secrets.CMD_BOT_APP_ID }}
+ private-key: ${{ secrets.CMD_BOT_APP_KEY }}
+
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ token: ${{ steps.generate_token.outputs.token }}
+ repository: ${{ needs.get-pr-branch.outputs.repo }}
+ ref: ${{ needs.get-pr-branch.outputs.pr-branch }}
+
- name: Get command
uses: actions-ecosystem/action-regex-match@v2
id: get-pr-comment
@@ -340,13 +359,7 @@ jobs:
repo: context.repo.repo,
body: `Command "${{ steps.get-pr-comment.outputs.group2 }}" has started 🚀 [See logs here](${job_url})`
})
-
- - name: Checkout
- uses: actions/checkout@v4
- with:
- repository: ${{ needs.get-pr-branch.outputs.repo }}
- ref: ${{ needs.get-pr-branch.outputs.pr-branch }}
-
+
- name: Install dependencies for bench
if: startsWith(steps.get-pr-comment.outputs.group2, 'bench')
run: |
@@ -364,6 +377,7 @@ jobs:
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory '*'
git remote -v
+ cat /proc/cpuinfo
python3 -m pip install -r .github/scripts/generate-prdoc.requirements.txt
python3 .github/scripts/cmd/cmd.py $CMD $PR_ARG
git status
@@ -389,16 +403,30 @@ jobs:
- name: Commit changes
run: |
if [ -n "$(git status --porcelain)" ]; then
- git config --local user.email "action@github.com"
- git config --local user.name "GitHub Action"
+ git config --global user.name command-bot
+ git config --global user.email "<>"
+ git config --global pull.rebase false
+
+ # Push the results to the target branch
+ git remote add \
+ github \
+ "https://token:${{ steps.generate_token.outputs.token }}@github.com/${{ github.event.repository.owner.login }}/${{ github.event.repository.name }}.git" || :
+
+ push_changes() {
+ git push github "HEAD:${{ needs.get-pr-branch.outputs.pr-branch }}"
+ }
git add .
git restore --staged Cargo.lock # ignore changes in Cargo.lock
git commit -m "Update from ${{ github.actor }} running command '${{ steps.get-pr-comment.outputs.group2 }}'" || true
- git pull --rebase origin ${{ needs.get-pr-branch.outputs.pr-branch }}
-
- git push origin ${{ needs.get-pr-branch.outputs.pr-branch }}
+ # Attempt to push changes
+ if ! push_changes; then
+ echo "Push failed, trying to rebase..."
+ git pull --rebase github "${{ needs.get-pr-branch.outputs.pr-branch }}"
+ # After successful rebase, try pushing again
+ push_changes
+ fi
else
echo "Nothing to commit";
fi
diff --git a/.github/workflows/command-backport.yml b/.github/workflows/command-backport.yml
index 8f23bcd75f01..8a017a434525 100644
--- a/.github/workflows/command-backport.yml
+++ b/.github/workflows/command-backport.yml
@@ -40,7 +40,7 @@ jobs:
uses: korthout/backport-action@v3
id: backport
with:
- target_branches: stable2407 stable2409
+ target_branches: stable2407 stable2409 stable2412
merge_commits: skip
github_token: ${{ steps.generate_token.outputs.token }}
pull_description: |
@@ -86,7 +86,7 @@ jobs:
const reviewer = '${{ github.event.pull_request.user.login }}';
for (const pullNumber of pullNumbers) {
- await github.pulls.createReviewRequest({
+ await github.pulls.requestReviewers({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: parseInt(pullNumber),
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index cc84e7f9ad3b..b7c70c9e6d66 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -29,7 +29,7 @@ jobs:
env:
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
@@ -69,7 +69,7 @@ jobs:
retention-days: 1
if-no-files-found: error
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
diff --git a/.github/workflows/publish-check-compile.yml b/.github/workflows/publish-check-compile.yml
new file mode 100644
index 000000000000..83cd3ff8fa90
--- /dev/null
+++ b/.github/workflows/publish-check-compile.yml
@@ -0,0 +1,48 @@
+name: Check publish build
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ types: [opened, synchronize, reopened, ready_for_review]
+ merge_group:
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ preflight:
+ uses: ./.github/workflows/reusable-preflight.yml
+
+ check-publish:
+ timeout-minutes: 90
+ needs: [preflight]
+ runs-on: ${{ needs.preflight.outputs.RUNNER }}
+ container:
+ image: ${{ needs.preflight.outputs.IMAGE }}
+ steps:
+ - uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.1.7
+
+ - name: Rust Cache
+ uses: Swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2.7.5
+ with:
+ cache-on-failure: true
+
+ - name: install parity-publish
+ run: cargo install parity-publish@0.10.2 --locked -q
+
+ - name: parity-publish update plan
+ run: parity-publish --color always plan --skip-check --prdoc prdoc/
+
+ - name: parity-publish apply plan
+ run: parity-publish --color always apply --registry
+
+ - name: parity-publish check compile
+ run: |
+ packages="$(parity-publish apply --print)"
+
+ if [ -n "$packages" ]; then
+ cargo --color always check $(printf -- '-p %s ' $packages)
+ fi
diff --git a/.github/workflows/publish-check-crates.yml b/.github/workflows/publish-check-crates.yml
index 3fad3b641474..1e5a8054e2c7 100644
--- a/.github/workflows/publish-check-crates.yml
+++ b/.github/workflows/publish-check-crates.yml
@@ -24,7 +24,7 @@ jobs:
cache-on-failure: true
- name: install parity-publish
- run: cargo install parity-publish@0.8.0 --locked -q
+ run: cargo install parity-publish@0.10.2 --locked -q
- name: parity-publish check
run: parity-publish --color always check --allow-unpublished
diff --git a/.github/workflows/publish-claim-crates.yml b/.github/workflows/publish-claim-crates.yml
index 37bf06bb82d8..845b57a61b96 100644
--- a/.github/workflows/publish-claim-crates.yml
+++ b/.github/workflows/publish-claim-crates.yml
@@ -18,7 +18,7 @@ jobs:
cache-on-failure: true
- name: install parity-publish
- run: cargo install parity-publish@0.8.0 --locked -q
+ run: cargo install parity-publish@0.10.2 --locked -q
- name: parity-publish claim
env:
diff --git a/.github/workflows/release-branchoff-stable.yml b/.github/workflows/release-10_branchoff-stable.yml
similarity index 100%
rename from .github/workflows/release-branchoff-stable.yml
rename to .github/workflows/release-10_branchoff-stable.yml
diff --git a/.github/workflows/release-10_rc-automation.yml b/.github/workflows/release-11_rc-automation.yml
similarity index 100%
rename from .github/workflows/release-10_rc-automation.yml
rename to .github/workflows/release-11_rc-automation.yml
diff --git a/.github/workflows/release-20_build-rc.yml b/.github/workflows/release-20_build-rc.yml
new file mode 100644
index 000000000000..d4c7055c37c5
--- /dev/null
+++ b/.github/workflows/release-20_build-rc.yml
@@ -0,0 +1,263 @@
+name: Release - Build node release candidate
+
+on:
+ workflow_dispatch:
+ inputs:
+ binary:
+ description: Binary to be build for the release
+ default: all
+ type: choice
+ options:
+ - polkadot
+ - polkadot-parachain
+ - polkadot-omni-node
+ - frame-omni-bencher
+ - chain-spec-builder
+ - all
+
+ release_tag:
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX or polkadot-stableYYMM(-X)
+ type: string
+
+jobs:
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
+ validate-inputs:
+ needs: [check-synchronization]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
+
+ - name: Validate inputs
+ id: validate_inputs
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
+ echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
+
+ build-polkadot-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'polkadot' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["polkadot", "polkadot-prepare-worker", "polkadot-execute-worker"]'
+ package: polkadot
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: x86_64-unknown-linux-gnu
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-polkadot-parachain-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["polkadot-parachain"]'
+ package: "polkadot-parachain-bin"
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: x86_64-unknown-linux-gnu
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-polkadot-omni-node-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'polkadot-omni-node' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["polkadot-omni-node"]'
+ package: "polkadot-omni-node"
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: x86_64-unknown-linux-gnu
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-frame-omni-bencher-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'frame-omni-bencher' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["frame-omni-bencher"]'
+ package: "frame-omni-bencher"
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: x86_64-unknown-linux-gnu
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-chain-spec-builder-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["chain-spec-builder"]'
+ package: staging-chain-spec-builder
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: x86_64-unknown-linux-gnu
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-polkadot-macos-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'polkadot' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["polkadot", "polkadot-prepare-worker", "polkadot-execute-worker"]'
+ package: polkadot
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: aarch64-apple-darwin
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-polkadot-parachain-macos-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["polkadot-parachain"]'
+ package: polkadot-parachain-bin
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: aarch64-apple-darwin
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-polkadot-omni-node-macos-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'polkadot-omni-node' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["polkadot-omni-node"]'
+ package: polkadot-omni-node
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: aarch64-apple-darwin
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-frame-omni-bencher-macos-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'frame-omni-bencher' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["frame-omni-bencher"]'
+ package: frame-omni-bencher
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: aarch64-apple-darwin
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
+
+ build-chain-spec-builder-macos-binary:
+ needs: [validate-inputs]
+ if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
+ uses: "./.github/workflows/release-reusable-rc-buid.yml"
+ with:
+ binary: '["chain-spec-builder"]'
+ package: staging-chain-spec-builder
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: aarch64-apple-darwin
+ secrets:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
diff --git a/.github/workflows/release-30_publish_release_draft.yml b/.github/workflows/release-30_publish_release_draft.yml
index 376f5fbce909..78ceea91f100 100644
--- a/.github/workflows/release-30_publish_release_draft.yml
+++ b/.github/workflows/release-30_publish_release_draft.yml
@@ -1,19 +1,46 @@
name: Release - Publish draft
-on:
- push:
- tags:
- # Catches v1.2.3 and v1.2.3-rc1
- - v[0-9]+.[0-9]+.[0-9]+*
- # - polkadot-stable[0-9]+* Activate when the release process from release org is setteled
+# This workflow runs in paritytech-release and creates full release draft with:
+# - release notes
+# - info about the runtimes
+# - attached artifacts:
+# - runtimes
+# - binaries
+# - signatures
+on:
workflow_dispatch:
inputs:
- version:
- description: Current release/rc version
+ release_tag:
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX or polkadot-stableYYMM(-X)
+ required: true
+ type: string
jobs:
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
+ validate-inputs:
+ needs: [ check-synchronization ]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Validate inputs
+ id: validate_inputs
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
+ echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
+
get-rust-versions:
+ needs: [ validate-inputs ]
runs-on: ubuntu-latest
outputs:
rustc-stable: ${{ steps.get-rust-versions.outputs.stable }}
@@ -24,47 +51,28 @@ jobs:
echo "stable=$RUST_STABLE_VERSION" >> $GITHUB_OUTPUT
build-runtimes:
+ needs: [ validate-inputs ]
uses: "./.github/workflows/release-srtool.yml"
with:
excluded_runtimes: "asset-hub-rococo bridge-hub-rococo contracts-rococo coretime-rococo people-rococo rococo rococo-parachain substrate-test bp cumulus-test kitchensink minimal-template parachain-template penpal polkadot-test seedling shell frame-try sp solochain-template polkadot-sdk-docs-first"
build_opts: "--features on-chain-release-build"
-
- build-binaries:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- # Tuples of [package, binary-name]
- binary: [ [frame-omni-bencher, frame-omni-bencher], [staging-chain-spec-builder, chain-spec-builder], [polkadot-omni-node, polkadot-omni-node] ]
- steps:
- - name: Checkout sources
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
-
- - name: Install protobuf-compiler
- run: |
- sudo apt update
- sudo apt install -y protobuf-compiler
-
- - name: Build ${{ matrix.binary[1] }} binary
- run: |
- cargo build --locked --profile=production -p ${{ matrix.binary[0] }} --bin ${{ matrix.binary[1] }}
- target/production/${{ matrix.binary[1] }} --version
-
- - name: Upload ${{ matrix.binary[1] }} binary
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
- with:
- name: ${{ matrix.binary[1] }}
- path: target/production/${{ matrix.binary[1] }}
-
+ profile: production
+ permissions:
+ id-token: write
+ attestations: write
+ contents: read
publish-release-draft:
runs-on: ubuntu-latest
- needs: [ get-rust-versions, build-runtimes ]
+ environment: release
+ needs: [ validate-inputs, get-rust-versions, build-runtimes ]
outputs:
release_url: ${{ steps.create-release.outputs.html_url }}
asset_upload_url: ${{ steps.create-release.outputs.upload_url }}
+
steps:
- name: Checkout
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Download artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
@@ -87,20 +95,21 @@ jobs:
GLUTTON_WESTEND_DIGEST: ${{ github.workspace}}/glutton-westend-runtime/glutton-westend-srtool-digest.json
PEOPLE_WESTEND_DIGEST: ${{ github.workspace}}/people-westend-runtime/people-westend-srtool-digest.json
WESTEND_DIGEST: ${{ github.workspace}}/westend-runtime/westend-srtool-digest.json
+ RELEASE_TAG: ${{ needs.validate-inputs.outputs.release_tag }}
shell: bash
run: |
. ./.github/scripts/common/lib.sh
export REF1=$(get_latest_release_tag)
- if [[ -z "${{ inputs.version }}" ]]; then
+ if [[ -z "$RELEASE_TAG" ]]; then
export REF2="${{ github.ref_name }}"
echo "REF2: ${REF2}"
else
- export REF2="${{ inputs.version }}"
+ export REF2="$RELEASE_TAG"
echo "REF2: ${REF2}"
fi
echo "REL_TAG=$REF2" >> $GITHUB_ENV
- export VERSION=$(echo "$REF2" | sed -E 's/.*(stable[0-9]+).*$/\1/')
+ export VERSION=$(echo "$REF2" | sed -E 's/.*(stable[0-9]{4}(-[0-9]+)?).*$/\1/')
./scripts/release/build-changelogs.sh
@@ -112,19 +121,29 @@ jobs:
scripts/release/context.json
**/*-srtool-digest.json
+ - name: Generate content write token for the release automation
+ id: generate_write_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ vars.POLKADOT_SDK_RELEASE_RW_APP_ID }}
+ private-key: ${{ secrets.POLKADOT_SDK_RELEASE_RW_APP_KEY }}
+ owner: paritytech
+ repositories: polkadot-sdk
+
- name: Create draft release
id: create-release
- uses: actions/create-release@0cb9c9b65d5d1901c1f53e5e66eaf4afd303e70e # v1.1.4
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- tag_name: ${{ env.REL_TAG }}
- release_name: Polkadot ${{ env.REL_TAG }}
- body_path: ${{ github.workspace}}/scripts/release/RELEASE_DRAFT.md
- draft: true
+ GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
+ run: |
+ gh release create ${{ env.REL_TAG }} \
+ --repo paritytech/polkadot-sdk \
+ --draft \
+ --title "Polkadot ${{ env.REL_TAG }}" \
+ --notes-file ${{ github.workspace}}/scripts/release/RELEASE_DRAFT.md
publish-runtimes:
- needs: [ build-runtimes, publish-release-draft ]
+ needs: [ validate-inputs, build-runtimes, publish-release-draft ]
+ environment: release
continue-on-error: true
runs-on: ubuntu-latest
strategy:
@@ -132,7 +151,7 @@ jobs:
steps:
- name: Checkout sources
- uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.0.0
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Download artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
@@ -144,44 +163,83 @@ jobs:
>>$GITHUB_ENV echo ASSET=$(find ${{ matrix.chain }}-runtime -name '*.compact.compressed.wasm')
>>$GITHUB_ENV echo SPEC=$(<${JSON} jq -r .runtimes.compact.subwasm.core_version.specVersion)
+ - name: Generate content write token for the release automation
+ id: generate_write_token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ vars.POLKADOT_SDK_RELEASE_RW_APP_ID }}
+ private-key: ${{ secrets.POLKADOT_SDK_RELEASE_RW_APP_KEY }}
+ owner: paritytech
+ repositories: polkadot-sdk
+
- name: Upload compressed ${{ matrix.chain }} v${{ env.SPEC }} wasm
- if: ${{ matrix.chain != 'rococo-parachain' }}
- uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5 #v1.0.2
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- upload_url: ${{ needs.publish-release-draft.outputs.asset_upload_url }}
- asset_path: ${{ env.ASSET }}
- asset_name: ${{ matrix.chain }}_runtime-v${{ env.SPEC }}.compact.compressed.wasm
- asset_content_type: application/wasm
+ GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
+ run: |
+ gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
+ --repo paritytech/polkadot-sdk \
+ '${{ env.ASSET }}#${{ matrix.chain }}_runtime-v${{ env.SPEC }}.compact.compressed.wasm'
- publish-binaries:
- needs: [ publish-release-draft, build-binaries ]
+ publish-release-artifacts:
+ needs: [ validate-inputs, publish-release-draft ]
+ environment: release
continue-on-error: true
runs-on: ubuntu-latest
strategy:
matrix:
- binary: [frame-omni-bencher, chain-spec-builder, polkadot-omni-node]
+ binary: [ polkadot, polkadot-execute-worker, polkadot-prepare-worker, polkadot-parachain, polkadot-omni-node, frame-omni-bencher, chain-spec-builder ]
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
steps:
- - name: Download artifacts
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Fetch binaries from s3 based on version
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ VERSION="${{ needs.validate-inputs.outputs.release_tag }}"
+ fetch_release_artifacts_from_s3 ${{ matrix.binary }} ${{ matrix.target }}
+
+ - name: Rename aarch64-apple-darwin binaries
+ if: ${{ matrix.target == 'aarch64-apple-darwin' }}
+ working-directory: ${{ github.workspace}}/release-artifacts/${{ matrix.target }}/${{ matrix.binary }}
+ run: |
+ mv ${{ matrix.binary }} ${{ matrix.binary }}-aarch64-apple-darwin
+ mv ${{ matrix.binary }}.asc ${{ matrix.binary }}-aarch64-apple-darwin.asc
+ mv ${{ matrix.binary }}.sha256 ${{ matrix.binary }}-aarch64-apple-darwin.sha256
+
+ - name: Generate content write token for the release automation
+ id: generate_write_token
+ uses: actions/create-github-app-token@v1
with:
- name: ${{ matrix.binary }}
+ app-id: ${{ vars.POLKADOT_SDK_RELEASE_RW_APP_ID }}
+ private-key: ${{ secrets.POLKADOT_SDK_RELEASE_RW_APP_KEY }}
+ owner: paritytech
+ repositories: polkadot-sdk
- - name: Upload ${{ matrix.binary }} binary
- uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5 #v1.0.2
+ - name: Upload ${{ matrix.binary }} binary to release draft
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- with:
- upload_url: ${{ needs.publish-release-draft.outputs.asset_upload_url }}
- asset_path: ${{ github.workspace}}/${{ matrix.binary }}
- asset_name: ${{ matrix.binary }}
- asset_content_type: application/octet-stream
+ GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
+ working-directory: ${{ github.workspace}}/release-artifacts/${{ matrix.target }}/${{ matrix.binary }}
+ run: |
+ if [[ ${{ matrix.target }} == "aarch64-apple-darwin" ]]; then
+ gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
+ --repo paritytech/polkadot-sdk \
+ ${{ matrix.binary }}-aarch64-apple-darwin \
+ ${{ matrix.binary }}-aarch64-apple-darwin.asc \
+ ${{ matrix.binary }}-aarch64-apple-darwin.sha256
+ else
+ gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
+ --repo paritytech/polkadot-sdk \
+ ${{ matrix.binary }} \
+ ${{ matrix.binary }}.asc \
+ ${{ matrix.binary }}.sha256
+ fi
post_to_matrix:
runs-on: ubuntu-latest
- needs: publish-release-draft
+ needs: [ validate-inputs, publish-release-draft ]
environment: release
strategy:
matrix:
@@ -197,5 +255,5 @@ jobs:
access_token: ${{ secrets.RELEASENOTES_MATRIX_V2_ACCESS_TOKEN }}
server: m.parity.io
message: |
- **New version of polkadot tagged**: ${{ github.ref_name }}
- Draft release created: ${{ needs.publish-release-draft.outputs.release_url }}
+ **New version of polkadot tagged**: ${{ needs.validate-inputs.outputs.release_tag }}
+ And release draft is release created in [polkadot-sdk repo](https://github.com/paritytech/polkadot-sdk/releases)
diff --git a/.github/workflows/release-31_promote-rc-to-final.yml b/.github/workflows/release-31_promote-rc-to-final.yml
new file mode 100644
index 000000000000..6aa9d4bddd1d
--- /dev/null
+++ b/.github/workflows/release-31_promote-rc-to-final.yml
@@ -0,0 +1,125 @@
+name: Release - Promote RC to final candidate on S3
+
+on:
+ workflow_dispatch:
+ inputs:
+ binary:
+ description: Binary to be build for the release
+ default: all
+ type: choice
+ options:
+ - polkadot
+ - polkadot-parachain
+ - polkadot-omni-node
+ - frame-omni-bencher
+ - chain-spec-builder
+ - all
+ release_tag:
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX
+ type: string
+
+
+jobs:
+
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
+ validate-inputs:
+ needs: [ check-synchronization ]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
+ final_tag: ${{ steps.validate_inputs.outputs.final_tag }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Validate inputs
+ id: validate_inputs
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
+ echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
+
+ promote-polkadot-rc-to-final:
+ if: ${{ inputs.binary == 'polkadot' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: polkadot
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-polkadot-parachain-rc-to-final:
+ if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: polkadot-parachain
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-polkadot-omni-node-rc-to-final:
+ if: ${{ inputs.binary == 'polkadot-omni-node' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: polkadot-omni-node
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-frame-omni-bencher-rc-to-final:
+ if: ${{ inputs.binary == 'frame-omni-bencher' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: frame-omni-bencher
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ promote-chain-spec-builder-rc-to-final:
+ if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
+ needs: [ validate-inputs ]
+ uses: ./.github/workflows/release-reusable-promote-to-final.yml
+ strategy:
+ matrix:
+ target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
+ with:
+ package: chain-spec-builder
+ release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
+ target: ${{ matrix.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/release-40_publish-deb-package.yml b/.github/workflows/release-40_publish-deb-package.yml
new file mode 100644
index 000000000000..3c5411ab16f0
--- /dev/null
+++ b/.github/workflows/release-40_publish-deb-package.yml
@@ -0,0 +1,152 @@
+name: Release - Publish polakdot deb package
+
+on:
+ workflow_dispatch:
+ inputs:
+ tag:
+ description: Current final release tag in the format polakdot-stableYYMM or polkadot-stable-YYMM-X
+ default: polkadot-stable2412
+ required: true
+ type: string
+
+ distribution:
+ description: Distribution where to publish deb package (release, staging, stable2407, etc)
+ default: staging
+ required: true
+ type: string
+
+jobs:
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
+ validate-inputs:
+ needs: [check-synchronization]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
+ runs-on: ubuntu-latest
+ outputs:
+ release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
+
+ - name: Validate inputs
+ id: validate_inputs
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ RELEASE_TAG=$(validate_stable_tag ${{ inputs.tag }})
+ echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
+
+
+ fetch-artifacts-from-s3:
+ runs-on: ubuntu-latest
+ needs: [validate-inputs]
+ env:
+ REPO: ${{ github.repository }}
+ RELEASE_TAG: ${{ needs.validate-inputs.outputs.release_tag }}
+ outputs:
+ VERSION: ${{ steps.fetch_artifacts_from_s3.outputs.VERSION }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
+
+ - name: Fetch rc artifacts or release artifacts from s3 based on version
+ id: fetch_artifacts_from_s3
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ VERSION="$(get_polkadot_node_version_from_code)"
+ echo "VERSION=${VERSION}" >> $GITHUB_OUTPUT
+
+ fetch_debian_package_from_s3 polkadot
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
+ with:
+ name: release-artifacts
+ path: release-artifacts/polkadot/*.deb
+
+ publish-deb-package:
+ runs-on: ubuntu-latest
+ needs: [fetch-artifacts-from-s3]
+ environment: release
+ env:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_DEB_PATH: "s3://releases-package-repos/deb"
+ LOCAL_DEB_REPO_PATH: ${{ github.workspace }}/deb
+ VERSION: ${{ needs.fetch-artifacts-from-s3.outputs.VERSION }}
+
+ steps:
+ - name: Install pgpkkms
+ run: |
+ # Install pgpkms that is used to sign built artifacts
+ python3 -m pip install "pgpkms @ git+https://github.com/paritytech-release/pgpkms.git@1f8555426662ac93a3849480a35449f683b1c89f"
+ echo "PGPKMS_REPREPRO_PATH=$(which pgpkms-reprepro)" >> $GITHUB_ENV
+
+ - name: Install awscli
+ run: |
+ python3 -m pip install awscli
+ which aws
+
+ - name: Checkout sources
+ uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
+
+ - name: Import gpg keys
+ shell: bash
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ import_gpg_keys
+
+ - name: Download artifacts
+ uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
+ with:
+ name: release-artifacts
+ path: release-artifacts
+
+ - name: Setup local deb repo
+ run: |
+ sudo apt-get install -y reprepro
+ which reprepro
+
+ sed -i "s|^SignWith:.*|SignWith: ! ${PGPKMS_REPREPRO_PATH}|" ${{ github.workspace }}/.github/scripts/release/distributions
+
+ mkdir -p ${{ github.workspace }}/deb/conf
+ cp ${{ github.workspace }}/.github/scripts/release/distributions ${{ github.workspace }}/deb/conf/distributions
+ cat ${{ github.workspace }}/deb/conf/distributions
+
+ - name: Sync local deb repo
+ env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ run: |
+ # Download the current state of the deb repo
+ aws s3 sync "$AWS_DEB_PATH/db" "$LOCAL_DEB_REPO_PATH/db"
+ aws s3 sync "$AWS_DEB_PATH/pool" "$LOCAL_DEB_REPO_PATH/pool"
+ aws s3 sync "$AWS_DEB_PATH/dists" "$LOCAL_DEB_REPO_PATH/dists"
+
+ - name: Add deb package to local repo
+ env:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ run: |
+ # Add the new deb to the repo
+ reprepro -b "$LOCAL_DEB_REPO_PATH" includedeb "${{ inputs.distribution }}" "release-artifacts/polkadot_${VERSION}_amd64.deb"
+
+ - name: Upload updated deb repo
+ env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ run: |
+ # Upload the updated repo - dists and pool should be publicly readable
+ aws s3 sync "$LOCAL_DEB_REPO_PATH/pool" "$AWS_DEB_PATH/pool" --acl public-read
+ aws s3 sync "$LOCAL_DEB_REPO_PATH/dists" "$AWS_DEB_PATH/dists" --acl public-read
+ aws s3 sync "$LOCAL_DEB_REPO_PATH/db" "$AWS_DEB_PATH/db"
+ aws s3 sync "$LOCAL_DEB_REPO_PATH/conf" "$AWS_DEB_PATH/conf"
+
+ # Invalidate caches to make sure latest files are served
+ aws cloudfront create-invalidation --distribution-id E36FKEYWDXAZYJ --paths '/deb/*'
diff --git a/.github/workflows/release-50_publish-docker.yml b/.github/workflows/release-50_publish-docker.yml
index 627e53bacd88..5c3c3a6e854d 100644
--- a/.github/workflows/release-50_publish-docker.yml
+++ b/.github/workflows/release-50_publish-docker.yml
@@ -4,10 +4,6 @@ name: Release - Publish Docker Image
# It builds and published releases and rc candidates.
on:
- #TODO: activate automated run later
- # release:
- # types:
- # - published
workflow_dispatch:
inputs:
image_type:
@@ -30,16 +26,6 @@ on:
- polkadot-parachain
- chain-spec-builder
- release_id:
- description: |
- Release ID.
- You can find it using the command:
- curl -s \
- -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/$OWNER/$REPO/releases | \
- jq '.[] | { name: .name, id: .id }'
- required: true
- type: number
-
registry:
description: Container registry
required: true
@@ -55,7 +41,7 @@ on:
default: parity
version:
- description: version to build/release
+ description: Version of the polkadot node release in format v1.16.0 or v1.16.0-rc1
default: v0.9.18
required: true
@@ -78,11 +64,15 @@ env:
IMAGE_TYPE: ${{ inputs.image_type }}
jobs:
+ check-synchronization:
+ uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
+
validate-inputs:
+ needs: [check-synchronization]
+ if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
runs-on: ubuntu-latest
outputs:
version: ${{ steps.validate_inputs.outputs.VERSION }}
- release_id: ${{ steps.validate_inputs.outputs.RELEASE_ID }}
stable_tag: ${{ steps.validate_inputs.outputs.stable_tag }}
steps:
@@ -97,11 +87,6 @@ jobs:
VERSION=$(filter_version_from_input "${{ inputs.version }}")
echo "VERSION=${VERSION}" >> $GITHUB_OUTPUT
- RELEASE_ID=$(check_release_id "${{ inputs.release_id }}")
- echo "RELEASE_ID=${RELEASE_ID}" >> $GITHUB_OUTPUT
-
- echo "Release ID: $RELEASE_ID"
-
STABLE_TAG=$(validate_stable_tag ${{ inputs.stable_tag }})
echo "stable_tag=${STABLE_TAG}" >> $GITHUB_OUTPUT
@@ -114,50 +99,26 @@ jobs:
- name: Checkout sources
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- #TODO: this step will be needed when automated triggering will work
- #this step runs only if the workflow is triggered automatically when new release is published
- # if: ${{ env.EVENT_NAME == 'release' && env.EVENT_ACTION != '' && env.EVENT_ACTION == 'published' }}
- # run: |
- # mkdir -p release-artifacts && cd release-artifacts
-
- # for f in $BINARY $BINARY.asc $BINARY.sha256; do
- # URL="https://github.com/${{ github.event.repository.full_name }}/releases/download/${{ github.event.release.tag_name }}/$f"
- # echo " - Fetching $f from $URL"
- # wget "$URL" -O "$f"
- # done
- # chmod a+x $BINARY
- # ls -al
-
- name: Fetch rc artifacts or release artifacts from s3 based on version
- #this step runs only if the workflow is triggered manually
- if: ${{ env.EVENT_NAME == 'workflow_dispatch' && inputs.binary != 'polkadot-omni-node' && inputs.binary != 'chain-spec-builder'}}
+ # if: ${{ env.EVENT_NAME == 'workflow_dispatch' && inputs.binary != 'polkadot-omni-node' && inputs.binary != 'chain-spec-builder'}}
run: |
. ./.github/scripts/common/lib.sh
- VERSION="${{ needs.validate-inputs.outputs.VERSION }}"
+ VERSION="${{ needs.validate-inputs.outputs.stable_tag }}"
if [[ ${{ inputs.binary }} == 'polkadot' ]]; then
bins=(polkadot polkadot-prepare-worker polkadot-execute-worker)
for bin in "${bins[@]}"; do
- fetch_release_artifacts_from_s3 $bin
+ fetch_release_artifacts_from_s3 $bin x86_64-unknown-linux-gnu
done
else
- fetch_release_artifacts_from_s3 $BINARY
+ fetch_release_artifacts_from_s3 $BINARY x86_64-unknown-linux-gnu
fi
- - name: Fetch polkadot-omni-node/chain-spec-builder rc artifacts or release artifacts based on release id
- #this step runs only if the workflow is triggered manually and only for chain-spec-builder
- if: ${{ env.EVENT_NAME == 'workflow_dispatch' && (inputs.binary == 'polkadot-omni-node' || inputs.binary == 'chain-spec-builder') }}
- run: |
- . ./.github/scripts/common/lib.sh
-
- RELEASE_ID="${{ needs.validate-inputs.outputs.RELEASE_ID }}"
- fetch_release_artifacts
-
- name: Upload artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
name: release-artifacts
- path: release-artifacts/${{ env.BINARY }}/**/*
+ path: release-artifacts/x86_64-unknown-linux-gnu/${{ env.BINARY }}/**/*
build-container: # this job will be triggered for the polkadot-parachain rc and release or polkadot rc image build
if: ${{ inputs.binary == 'polkadot-omni-node' || inputs.binary == 'polkadot-parachain' || inputs.binary == 'chain-spec-builder' || inputs.image_type == 'rc' }}
@@ -173,7 +134,7 @@ jobs:
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- name: Check sha256 ${{ env.BINARY }}
- if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
+ # if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
working-directory: release-artifacts
run: |
. ../.github/scripts/common/lib.sh
@@ -182,7 +143,7 @@ jobs:
check_sha256 $BINARY && echo "OK" || echo "ERR"
- name: Check GPG ${{ env.BINARY }}
- if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
+ # if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'polkadot' }}
working-directory: release-artifacts
run: |
. ../.github/scripts/common/lib.sh
@@ -190,35 +151,29 @@ jobs:
check_gpg $BINARY
- name: Fetch rc commit and tag
+ working-directory: release-artifacts
if: ${{ env.IMAGE_TYPE == 'rc' }}
id: fetch_rc_refs
+ shell: bash
run: |
- . ./.github/scripts/common/lib.sh
-
- echo "release=${{ needs.validate-inputs.outputs.stable_tag }}" >> $GITHUB_OUTPUT
+ . ../.github/scripts/common/lib.sh
commit=$(git rev-parse --short HEAD) && \
echo "commit=${commit}" >> $GITHUB_OUTPUT
-
- echo "tag=${{ needs.validate-inputs.outputs.version }}" >> $GITHUB_OUTPUT
+ echo "release=$(echo ${{ needs.validate-inputs.outputs.version }})" >> $GITHUB_OUTPUT
+ echo "tag=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Fetch release tags
working-directory: release-artifacts
if: ${{ env.IMAGE_TYPE == 'release'}}
id: fetch_release_refs
+ shell: bash
run: |
- chmod a+rx $BINARY
-
- if [[ $BINARY != 'chain-spec-builder' ]]; then
- VERSION=$(./$BINARY --version | awk '{ print $2 }' )
- release=$( echo $VERSION | cut -f1 -d- )
- else
- release=$(echo ${{ needs.validate-inputs.outputs.VERSION }} | sed 's/^v//')
- fi
+ . ../.github/scripts/common/lib.sh
echo "tag=latest" >> $GITHUB_OUTPUT
- echo "release=${release}" >> $GITHUB_OUTPUT
- echo "stable=${{ needs.validate-inputs.outputs.stable_tag }}" >> $GITHUB_OUTPUT
+ echo "release=$(echo ${{ needs.validate-inputs.outputs.version }})" >> $GITHUB_OUTPUT
+ echo "stable=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Build Injected Container image for polkadot rc
if: ${{ env.BINARY == 'polkadot' }}
@@ -342,8 +297,10 @@ jobs:
- name: Fetch values
id: fetch-data
run: |
+ . ./.github/scripts/common/lib.sh
date=$(date -u '+%Y-%m-%dT%H:%M:%SZ')
echo "date=$date" >> $GITHUB_OUTPUT
+ echo "stable=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Build and push
id: docker_build
@@ -354,9 +311,9 @@ jobs:
# TODO: The owner should be used below but buildx does not resolve the VARs
# TODO: It would be good to get rid of this GHA that we don't really need.
tags: |
- parity/polkadot:${{ needs.validate-inputs.outputs.stable_tag }}
- parity/polkadot:latest
- parity/polkadot:${{ needs.fetch-latest-debian-package-version.outputs.polkadot_container_tag }}
+ egorpop/polkadot:${{ steps.fetch-data.outputs.stable }}
+ egorpop/polkadot:latest
+ egorpop/polkadot:${{ needs.fetch-latest-debian-package-version.outputs.polkadot_container_tag }}
build-args: |
VCS_REF=${{ github.ref }}
POLKADOT_VERSION=${{ needs.fetch-latest-debian-package-version.outputs.polkadot_apt_version }}
diff --git a/.github/workflows/release-build-rc.yml b/.github/workflows/release-build-rc.yml
deleted file mode 100644
index 94bacf320898..000000000000
--- a/.github/workflows/release-build-rc.yml
+++ /dev/null
@@ -1,82 +0,0 @@
-name: Release - Build node release candidate
-
-on:
- workflow_dispatch:
- inputs:
- binary:
- description: Binary to be build for the release
- default: all
- type: choice
- options:
- - polkadot
- - polkadot-parachain
- - all
-
- release_tag:
- description: Tag matching the actual release candidate with the format stableYYMM-rcX or stableYYMM
- type: string
-
-jobs:
- check-synchronization:
- uses: paritytech-release/sync-workflows/.github/workflows/check-syncronization.yml@main
-
- validate-inputs:
- needs: [check-synchronization]
- if: ${{ needs.check-synchronization.outputs.checks_passed }} == 'true'
- runs-on: ubuntu-latest
- outputs:
- release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
-
- steps:
- - name: Checkout sources
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
-
- - name: Validate inputs
- id: validate_inputs
- run: |
- . ./.github/scripts/common/lib.sh
-
- RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
- echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
-
- build-polkadot-binary:
- needs: [validate-inputs]
- if: ${{ inputs.binary == 'polkadot' || inputs.binary == 'all' }}
- uses: "./.github/workflows/release-reusable-rc-buid.yml"
- with:
- binary: '["polkadot", "polkadot-prepare-worker", "polkadot-execute-worker"]'
- package: polkadot
- release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
- secrets:
- PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
- PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
- AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
- AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
- permissions:
- id-token: write
- attestations: write
- contents: read
-
- build-polkadot-parachain-binary:
- needs: [validate-inputs]
- if: ${{ inputs.binary == 'polkadot-parachain' || inputs.binary == 'all' }}
- uses: "./.github/workflows/release-reusable-rc-buid.yml"
- with:
- binary: '["polkadot-parachain"]'
- package: "polkadot-parachain-bin"
- release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
- secrets:
- PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
- PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
- AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
- AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
- permissions:
- id-token: write
- attestations: write
- contents: read
diff --git a/.github/workflows/release-reusable-promote-to-final.yml b/.github/workflows/release-reusable-promote-to-final.yml
new file mode 100644
index 000000000000..ed4a80a01e82
--- /dev/null
+++ b/.github/workflows/release-reusable-promote-to-final.yml
@@ -0,0 +1,83 @@
+name: Promote rc to final
+
+on:
+ workflow_call:
+ inputs:
+ package:
+ description: Package to be promoted
+ required: true
+ type: string
+
+ release_tag:
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX taht will be changed to final in form of polkadot-stableYYMM(-X)
+ required: true
+ type: string
+
+ target:
+ description: Target triple for which the artifacts are being uploaded (e.g aarch64-apple-darwin)
+ required: true
+ type: string
+
+ secrets:
+ AWS_DEFAULT_REGION:
+ required: true
+ AWS_RELEASE_ACCESS_KEY_ID:
+ required: true
+ AWS_RELEASE_SECRET_ACCESS_KEY:
+ required: true
+
+jobs:
+
+ promote-release-artifacts:
+ environment: release
+ runs-on: ubuntu-latest
+ env:
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+ AWS_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
+
+ - name: Prepare final tag
+ id: prepare_final_tag
+ shell: bash
+ run: |
+ tag="$(echo ${{ inputs.release_tag }} | sed 's/-rc[0-9]*$//')"
+ echo $tag
+ echo "FINAL_TAG=${tag}" >> $GITHUB_OUTPUT
+
+ - name: Fetch binaries from s3 based on version
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ VERSION="${{ inputs.release_tag }}"
+ if [[ ${{ inputs.package }} == 'polkadot' ]]; then
+ packages=(polkadot polkadot-prepare-worker polkadot-execute-worker)
+ for package in "${packages[@]}"; do
+ fetch_release_artifacts_from_s3 $package ${{ inputs.target }}
+ done
+ else
+ fetch_release_artifacts_from_s3 ${{ inputs.package }} ${{ inputs.target }}
+ fi
+
+ - name: Configure AWS Credentials
+ uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
+ with:
+ aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }}
+ aws-region: ${{ env.AWS_REGION }}
+
+ - name: Upload ${{ inputs.package }} ${{ inputs.target }} artifacts to s3
+ run: |
+ . ./.github/scripts/release/release_lib.sh
+
+ if [[ ${{ inputs.package }} == 'polkadot' ]]; then
+ packages=(polkadot polkadot-prepare-worker polkadot-execute-worker)
+ for package in "${packages[@]}"; do
+ upload_s3_release $package ${{ steps.prepare_final_tag.outputs.final_tag }} ${{ inputs.target }}
+ done
+ else
+ upload_s3_release ${{ inputs.package }} ${{ steps.prepare_final_tag.outputs.final_tag }} ${{ inputs.target }}
+ fi
diff --git a/.github/workflows/release-reusable-rc-buid.yml b/.github/workflows/release-reusable-rc-buid.yml
index d925839fb84a..0222b2aa91e2 100644
--- a/.github/workflows/release-reusable-rc-buid.yml
+++ b/.github/workflows/release-reusable-rc-buid.yml
@@ -10,7 +10,7 @@ on:
type: string
package:
- description: Package to be built, for now is either polkadot or polkadot-parachain-bin
+ description: Package to be built, for now can be polkadot, polkadot-parachain-bin, or polkadot-omni-node
required: true
type: string
@@ -19,6 +19,11 @@ on:
required: true
type: string
+ target:
+ description: Target triple for which the artifacts are being built (e.g. x86_64-unknown-linux-gnu)
+ required: true
+ type: string
+
secrets:
PGP_KMS_KEY:
required: true
@@ -57,6 +62,7 @@ jobs:
run: cat .github/env >> $GITHUB_OUTPUT
build-rc:
+ if: ${{ inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [set-image]
runs-on: ubuntu-latest-m
environment: release
@@ -98,7 +104,7 @@ jobs:
./.github/scripts/release/build-linux-release.sh ${{ matrix.binaries }} ${{ inputs.package }}
- name: Generate artifact attestation
- uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
+ uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
with:
subject-path: /artifacts/${{ matrix.binaries }}/${{ matrix.binaries }}
@@ -127,11 +133,127 @@ jobs:
- name: Upload ${{ matrix.binaries }} artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
- name: ${{ matrix.binaries }}
+ name: ${{ matrix.binaries }}_${{ inputs.target }}
path: /artifacts/${{ matrix.binaries }}
+ build-macos-rc:
+ if: ${{ inputs.target == 'aarch64-apple-darwin' }}
+ runs-on: parity-macos
+ environment: release
+ strategy:
+ matrix:
+ binaries: ${{ fromJSON(inputs.binary) }}
+ env:
+ PGP_KMS_KEY: ${{ secrets.PGP_KMS_KEY }}
+ PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ SKIP_WASM_BUILD: 1
+ steps:
+ - name: Checkout sources
+ uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
+ with:
+ ref: ${{ inputs.release_tag }}
+ fetch-depth: 0
+
+ - name: Set rust version from env file
+ run: |
+ RUST_VERSION=$(cat .github/env | sed -E 's/.*ci-unified:([^-]+)-([^-]+).*/\2/')
+ echo $RUST_VERSION
+ echo "RUST_VERSION=${RUST_VERSION}" >> $GITHUB_ENV
+ - name: Set workspace environment variable
+ # relevant for artifacts upload, which can not interpolate Github Action variable syntax when
+ # used within valid paths. We can not use root-based paths either, since it is set as read-only
+ # on the `parity-macos` runner.
+ run: echo "ARTIFACTS_PATH=${GITHUB_WORKSPACE}/artifacts/${{ matrix.binaries }}" >> $GITHUB_ENV
+
+ - name: Set up Homebrew
+ uses: Homebrew/actions/setup-homebrew@1ccc07ccd54b6048295516a3eb89b192c35057dc # master from 12.09.2024
+ - name: Set homebrew binaries location on path
+ run: echo "/opt/homebrew/bin" >> $GITHUB_PATH
+
+ - name: Install rust ${{ env.RUST_VERSION }}
+ uses: actions-rust-lang/setup-rust-toolchain@11df97af8e8102fd60b60a77dfbf58d40cd843b8 # v1.10.1
+ with:
+ cache: false
+ toolchain: ${{ env.RUST_VERSION }}
+ target: wasm32-unknown-unknown
+ components: cargo, clippy, rust-docs, rust-src, rustfmt, rustc, rust-std
+
+ - name: cargo info
+ run: |
+ echo "######## rustup show ########"
+ rustup show
+ echo "######## cargo --version ########"
+ cargo --version
+
+ - name: Install protobuf
+ run: brew install protobuf
+ - name: Install gpg
+ run: |
+ brew install gnupg
+ # Setup for being able to resolve: keyserver.ubuntu.com.
+ # See: https://github.com/actions/runner-images/issues/9777
+ mkdir -p ~/.gnupg/
+ touch ~/.gnupg/dirmngr.conf
+ echo "standard-resolver" > ~/.gnupg/dirmngr.conf
+ - name: Install sha256sum
+ run: |
+ brew install coreutils
+
+ - name: Install pgpkkms
+ run: |
+ # Install pgpkms that is used to sign built artifacts
+ python3 -m pip install "pgpkms @ git+https://github.com/paritytech-release/pgpkms.git@5a8f82fbb607ea102d8c178e761659de54c7af69" --break-system-packages
+
+ - name: Import gpg keys
+ shell: bash
+ run: |
+ . ./.github/scripts/common/lib.sh
+
+ import_gpg_keys
+
+ - name: Build binary
+ run: |
+ git config --global --add safe.directory "${GITHUB_WORKSPACE}" #avoid "detected dubious ownership" error
+ ./.github/scripts/release/build-macos-release.sh ${{ matrix.binaries }} ${{ inputs.package }}
+
+ - name: Generate artifact attestation
+ uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
+ with:
+ subject-path: ${{ env.ARTIFACTS_PATH }}/${{ matrix.binaries }}
+
+ - name: Sign artifacts
+ working-directory: ${{ env.ARTIFACTS_PATH }}
+ run: |
+ python3 -m pgpkms sign --input ${{matrix.binaries }} -o ${{ matrix.binaries }}.asc
+
+ - name: Check sha256 ${{ matrix.binaries }}
+ working-directory: ${{ env.ARTIFACTS_PATH }}
+ shell: bash
+ run: |
+ . "${GITHUB_WORKSPACE}"/.github/scripts/common/lib.sh
+
+ echo "Checking binary ${{ matrix.binaries }}"
+ check_sha256 ${{ matrix.binaries }}
+
+ - name: Check GPG ${{ matrix.binaries }}
+ working-directory: ${{ env.ARTIFACTS_PATH }}
+ shell: bash
+ run: |
+ . "${GITHUB_WORKSPACE}"/.github/scripts/common/lib.sh
+
+ check_gpg ${{ matrix.binaries }}
+
+ - name: Upload ${{ matrix.binaries }} artifacts
+ uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
+ with:
+ name: ${{ matrix.binaries }}_${{ inputs.target }}
+ path: ${{ env.ARTIFACTS_PATH }}
+
build-polkadot-deb-package:
- if: ${{ inputs.package == 'polkadot' }}
+ if: ${{ inputs.package == 'polkadot' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-rc]
runs-on: ubuntu-latest
@@ -156,37 +278,170 @@ jobs:
. "${GITHUB_WORKSPACE}"/.github/scripts/release/build-deb.sh ${{ inputs.package }} ${VERSION}
- name: Generate artifact attestation
- uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
+ uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
with:
subject-path: target/production/*.deb
- name: Upload ${{inputs.package }} artifacts
uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
- name: ${{ inputs.package }}
+ name: ${{ inputs.package }}_${{ inputs.target }}
path: target/production
overwrite: true
upload-polkadot-artifacts-to-s3:
- if: ${{ inputs.package == 'polkadot' }}
+ if: ${{ inputs.package == 'polkadot' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-polkadot-deb-package]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: ${{ inputs.package }}
release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
-
upload-polkadot-parachain-artifacts-to-s3:
- if: ${{ inputs.package == 'polkadot-parachain-bin' }}
+ if: ${{ inputs.package == 'polkadot-parachain-bin' && inputs.target == 'x86_64-unknown-linux-gnu' }}
needs: [build-rc]
uses: ./.github/workflows/release-reusable-s3-upload.yml
with:
package: polkadot-parachain
release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-polkadot-omni-node-artifacts-to-s3:
+ if: ${{ inputs.package == 'polkadot-omni-node' && inputs.target == 'x86_64-unknown-linux-gnu' }}
+ needs: [build-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: ${{ inputs.package }}
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-frame-omni-bencher-artifacts-to-s3:
+ if: ${{ inputs.package == 'frame-omni-bencher' && inputs.target == 'x86_64-unknown-linux-gnu' }}
+ needs: [build-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: ${{ inputs.package }}
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-chain-spec-builder-artifacts-to-s3:
+ if: ${{ inputs.package == 'staging-chain-spec-builder' && inputs.target == 'x86_64-unknown-linux-gnu' }}
+ needs: [build-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: chain-spec-builder
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-polkadot-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'polkadot' && inputs.target == 'aarch64-apple-darwin' }}
+ # TODO: add and use a `build-polkadot-homebrew-package` which packs all `polkadot` binaries:
+ # `polkadot`, `polkadot-prepare-worker` and `polkadot-execute-worker`.
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: ${{ inputs.package }}
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-polkadot-prepare-worker-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'polkadot' && inputs.target == 'aarch64-apple-darwin' }}
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: polkadot-prepare-worker
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-polkadot-execute-worker-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'polkadot' && inputs.target == 'aarch64-apple-darwin' }}
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: polkadot-execute-worker
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-polkadot-omni-node-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'polkadot-omni-node' && inputs.target == 'aarch64-apple-darwin' }}
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: ${{ inputs.package }}
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-polkadot-parachain-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'polkadot-parachain-bin' && inputs.target == 'aarch64-apple-darwin' }}
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: polkadot-parachain
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-frame-omni-bencher-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'frame-omni-bencher' && inputs.target == 'aarch64-apple-darwin' }}
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: ${{ inputs.package }}
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
+ secrets:
+ AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
+ AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
+ AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
+
+ upload-chain-spec-builder-macos-artifacts-to-s3:
+ if: ${{ inputs.package == 'staging-chain-spec-builder' && inputs.target == 'aarch64-apple-darwin' }}
+ needs: [build-macos-rc]
+ uses: ./.github/workflows/release-reusable-s3-upload.yml
+ with:
+ package: chain-spec-builder
+ release_tag: ${{ inputs.release_tag }}
+ target: ${{ inputs.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
diff --git a/.github/workflows/release-reusable-s3-upload.yml b/.github/workflows/release-reusable-s3-upload.yml
index 6776b78da8e6..48c7e53c6c8f 100644
--- a/.github/workflows/release-reusable-s3-upload.yml
+++ b/.github/workflows/release-reusable-s3-upload.yml
@@ -9,7 +9,12 @@ on:
type: string
release_tag:
- description: Tag matching the actual release candidate with the format stableYYMM-rcX or stableYYMM-rcX
+ description: Tag matching the actual release candidate with the format polkadot-stableYYMM(-X)-rcX or polkadot-stableYYMM-rcX
+ required: true
+ type: string
+
+ target:
+ description: Target triple for which the artifacts are being uploaded (e.g aarch64-apple-darwin)
required: true
type: string
@@ -34,11 +39,11 @@ jobs:
- name: Checkout
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- - name: Download artifacts
+ - name: Download amd64 artifacts
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
- name: ${{ inputs.package }}
- path: artifacts/${{ inputs.package }}
+ name: ${{ inputs.package }}_${{ inputs.target }}
+ path: release-artifacts/${{ inputs.target }}/${{ inputs.package }}
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
@@ -50,4 +55,4 @@ jobs:
- name: Upload ${{ inputs.package }} artifacts to s3
run: |
. ./.github/scripts/release/release_lib.sh
- upload_s3_release ${{ inputs.package }} ${{ inputs.release_tag }}
+ upload_s3_release ${{ inputs.package }} ${{ inputs.release_tag }} ${{ inputs.target }}
diff --git a/.github/workflows/release-srtool.yml b/.github/workflows/release-srtool.yml
index 9a29b46d2fc3..fc10496d481b 100644
--- a/.github/workflows/release-srtool.yml
+++ b/.github/workflows/release-srtool.yml
@@ -1,7 +1,7 @@
name: Srtool build
env:
- SUBWASM_VERSION: 0.20.0
+ SUBWASM_VERSION: 0.21.0
TOML_CLI_VERSION: 0.2.4
on:
@@ -11,14 +11,16 @@ on:
type: string
build_opts:
type: string
+ profile:
+ type: string
outputs:
published_runtimes:
value: ${{ jobs.find-runtimes.outputs.runtime }}
- schedule:
- - cron: "00 02 * * 1" # 2AM weekly on monday
-
- workflow_dispatch:
+permissions:
+ id-token: write
+ attestations: write
+ contents: read
jobs:
find-runtimes:
@@ -75,6 +77,7 @@ jobs:
with:
chain: ${{ matrix.chain }}
runtime_dir: ${{ matrix.runtime_dir }}
+ profile: ${{ inputs.profile }}
- name: Summary
run: |
@@ -83,6 +86,11 @@ jobs:
echo "Compact Runtime: ${{ steps.srtool_build.outputs.wasm }}"
echo "Compressed Runtime: ${{ steps.srtool_build.outputs.wasm_compressed }}"
+ - name: Generate artifact attestation
+ uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
+ with:
+ subject-path: ${{ steps.srtool_build.outputs.wasm }}
+
# We now get extra information thanks to subwasm
- name: Install subwasm
run: |
diff --git a/.github/workflows/runtimes-matrix.json b/.github/workflows/runtimes-matrix.json
index f991db55b86d..104e73521331 100644
--- a/.github/workflows/runtimes-matrix.json
+++ b/.github/workflows/runtimes-matrix.json
@@ -8,6 +8,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic,pallet_nomination_pools,pallet_remark,pallet_transaction_storage",
"uri": null,
+ "old_package": "staging-node-cli",
+ "old_bin": "substrate-node",
"is_relay": false
},
{
@@ -19,6 +21,8 @@
"bench_flags": "",
"bench_features": "runtime-benchmarks",
"uri": "wss://try-runtime-westend.polkadot.io:443",
+ "old_package": "polkadot",
+ "old_bin": "polkadot",
"is_relay": true
},
{
@@ -27,9 +31,11 @@
"path": "polkadot/runtime/rococo",
"header": "polkadot/file_header.txt",
"template": "polkadot/xcm/pallet-xcm-benchmarks/template.hbs",
- "uri": "wss://try-runtime-rococo.polkadot.io:443",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
+ "uri": "wss://try-runtime-rococo.polkadot.io:443",
+ "old_package": "polkadot",
+ "old_bin": "polkadot",
"is_relay": true
},
{
@@ -41,6 +47,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://westend-asset-hub-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -52,6 +60,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://rococo-asset-hub-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -63,6 +73,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://rococo-bridge-hub-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -74,6 +86,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "",
"uri": "wss://westend-bridge-hub-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -84,7 +98,10 @@
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "",
- "uri": "wss://westend-collectives-rpc.polkadot.io:443"
+ "uri": "wss://westend-collectives-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
+ "is_relay": false
},
{
"name": "contracts-rococo",
@@ -95,6 +112,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm",
"uri": "wss://rococo-contracts-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -106,6 +125,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic",
"uri": "wss://rococo-coretime-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -117,6 +138,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic",
"uri": "wss://westend-coretime-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -128,6 +151,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none",
"uri": null,
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -139,6 +164,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic",
"uri": "wss://rococo-people-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
},
{
@@ -150,6 +177,8 @@
"bench_features": "runtime-benchmarks",
"bench_flags": "--genesis-builder-policy=none --exclude-pallets=pallet_xcm,pallet_xcm_benchmarks::fungible,pallet_xcm_benchmarks::generic",
"uri": "wss://westend-people-rpc.polkadot.io:443",
+ "old_package": "polkadot-parachain-bin",
+ "old_bin": "polkadot-parachain",
"is_relay": false
}
]
diff --git a/.github/workflows/tests-linux-stable-coverage.yml b/.github/workflows/tests-linux-stable-coverage.yml
index c5af6bcae77f..61e01cda4428 100644
--- a/.github/workflows/tests-linux-stable-coverage.yml
+++ b/.github/workflows/tests-linux-stable-coverage.yml
@@ -102,7 +102,7 @@ jobs:
merge-multiple: true
- run: ls -al reports/
- name: Upload to Codecov
- uses: codecov/codecov-action@v4
+ uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
diff --git a/.github/workflows/tests-linux-stable.yml b/.github/workflows/tests-linux-stable.yml
index b9d0605b2495..3f8dc4fe1240 100644
--- a/.github/workflows/tests-linux-stable.yml
+++ b/.github/workflows/tests-linux-stable.yml
@@ -37,7 +37,7 @@ jobs:
id: required
run: WASM_BUILD_NO_COLOR=1 forklift cargo test -p staging-node-cli --release --locked -- --ignored
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
@@ -63,7 +63,7 @@ jobs:
id: required
run: forklift cargo nextest run --workspace --features runtime-benchmarks benchmark --locked --cargo-profile testnet --cargo-quiet
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
@@ -113,7 +113,7 @@ jobs:
if: ${{ matrix.partition == '1/3' }}
run: forklift cargo nextest run -p sp-api-test --features enable-staging-api --cargo-quiet
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
@@ -155,7 +155,7 @@ jobs:
--filter-expr " !test(/all_security_features_work/) - test(/nonexistent_cache_dir/)" \
--partition count:${{ matrix.partition }} \
- name: Stop all workflows if failed
- if: ${{ failure() && steps.required.conclusion == 'failure' }}
+ if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
diff --git a/.github/workflows/tests-misc.yml b/.github/workflows/tests-misc.yml
index cca32650b106..decd88f2e84c 100644
--- a/.github/workflows/tests-misc.yml
+++ b/.github/workflows/tests-misc.yml
@@ -165,12 +165,14 @@ jobs:
- name: Download artifact (master run)
uses: actions/download-artifact@v4.1.8
+ continue-on-error: true
with:
name: cargo-check-benches-master-${{ github.sha }}
path: ./artifacts/master
- name: Download artifact (current run)
uses: actions/download-artifact@v4.1.8
+ continue-on-error: true
with:
name: cargo-check-benches-current-${{ github.sha }}
path: ./artifacts/current
@@ -183,6 +185,12 @@ jobs:
exit 0
fi
+ # fail if no artifacts
+ if [ ! -d ./artifacts/master ] || [ ! -d ./artifacts/current ]; then
+ echo "No artifacts found"
+ exit 1
+ fi
+
docker run --rm \
-v $PWD/artifacts/master:/artifacts/master \
-v $PWD/artifacts/current:/artifacts/current \
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index f508404f1efa..42a7e87bda43 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -22,7 +22,7 @@ workflow:
variables:
# CI_IMAGE: !reference [ .ci-unified, variables, CI_IMAGE ]
- CI_IMAGE: "docker.io/paritytech/ci-unified:bullseye-1.81.0-2024-09-11-v202409111034"
+ CI_IMAGE: "docker.io/paritytech/ci-unified:bullseye-1.81.0-2024-11-19-v202411281558"
# BUILDAH_IMAGE is defined in group variables
BUILDAH_COMMAND: "buildah --storage-driver overlay2"
RELENG_SCRIPTS_BRANCH: "master"
diff --git a/.gitlab/pipeline/zombienet/parachain-template.yml b/.gitlab/pipeline/zombienet/parachain-template.yml
index 896ba7913be7..d5c1b6558b39 100644
--- a/.gitlab/pipeline/zombienet/parachain-template.yml
+++ b/.gitlab/pipeline/zombienet/parachain-template.yml
@@ -43,4 +43,4 @@ zombienet-parachain-template-smoke:
- ls -ltr $(pwd)/artifacts
- cargo test -p template-zombienet-tests --features zombienet --tests minimal_template_block_production_test
- cargo test -p template-zombienet-tests --features zombienet --tests parachain_template_block_production_test
- # - cargo test -p template-zombienet-tests --features zombienet --tests solochain_template_block_production_test
+ - cargo test -p template-zombienet-tests --features zombienet --tests solochain_template_block_production_test
diff --git a/.gitlab/pipeline/zombienet/polkadot.yml b/.gitlab/pipeline/zombienet/polkadot.yml
index 3dab49a118e5..ac4bdac7ad15 100644
--- a/.gitlab/pipeline/zombienet/polkadot.yml
+++ b/.gitlab/pipeline/zombienet/polkadot.yml
@@ -179,7 +179,7 @@ zombienet-polkadot-elastic-scaling-0001-basic-3cores-6s-blocks:
--local-dir="${LOCAL_DIR}/elastic_scaling"
--test="0001-basic-3cores-6s-blocks.zndsl"
-zombienet-polkadot-elastic-scaling-0002-elastic-scaling-doesnt-break-parachains:
+.zombienet-polkadot-elastic-scaling-0002-elastic-scaling-doesnt-break-parachains:
extends:
- .zombienet-polkadot-common
before_script:
@@ -233,7 +233,7 @@ zombienet-polkadot-functional-0015-coretime-shared-core:
--local-dir="${LOCAL_DIR}/functional"
--test="0016-approval-voting-parallel.zndsl"
-zombienet-polkadot-functional-0017-sync-backing:
+.zombienet-polkadot-functional-0017-sync-backing:
extends:
- .zombienet-polkadot-common
script:
diff --git a/Cargo.lock b/Cargo.lock
index 02d7da8f7657..989430fdfe29 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -899,6 +899,7 @@ dependencies = [
"pallet-xcm-benchmarks 7.0.0",
"pallet-xcm-bridge-hub-router 0.5.0",
"parachains-common 7.0.0",
+ "parachains-runtimes-test-utils 7.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
"polkadot-runtime-common 7.0.0",
@@ -1036,6 +1037,7 @@ dependencies = [
"pallet-xcm-benchmarks 7.0.0",
"pallet-xcm-bridge-hub-router 0.5.0",
"parachains-common 7.0.0",
+ "parachains-runtimes-test-utils 7.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
"polkadot-runtime-common 7.0.0",
@@ -1077,6 +1079,7 @@ dependencies = [
"frame-support 28.0.0",
"frame-system 28.0.0",
"hex-literal",
+ "pallet-asset-conversion 10.0.0",
"pallet-assets 29.1.0",
"pallet-balances 28.0.0",
"pallet-collator-selection 9.0.0",
@@ -1094,6 +1097,7 @@ dependencies = [
"staging-xcm-builder 7.0.0",
"staging-xcm-executor 7.0.0",
"substrate-wasm-builder 17.0.0",
+ "xcm-runtime-apis 0.1.0",
]
[[package]]
@@ -1291,7 +1295,7 @@ dependencies = [
"futures-lite 2.3.0",
"parking",
"polling 3.4.0",
- "rustix 0.38.25",
+ "rustix 0.38.21",
"slab",
"tracing",
"windows-sys 0.52.0",
@@ -1373,7 +1377,7 @@ dependencies = [
"cfg-if",
"event-listener 5.3.1",
"futures-lite 2.3.0",
- "rustix 0.38.25",
+ "rustix 0.38.21",
"tracing",
]
@@ -1389,7 +1393,7 @@ dependencies = [
"cfg-if",
"futures-core",
"futures-io",
- "rustix 0.38.25",
+ "rustix 0.38.21",
"signal-hook-registry",
"slab",
"windows-sys 0.52.0",
@@ -1934,6 +1938,8 @@ dependencies = [
"frame-support 28.0.0",
"parity-scale-codec",
"scale-info",
+ "sp-core 28.0.0",
+ "staging-xcm 7.0.0",
]
[[package]]
@@ -1944,6 +1950,8 @@ dependencies = [
"frame-support 28.0.0",
"parity-scale-codec",
"scale-info",
+ "sp-core 28.0.0",
+ "staging-xcm 7.0.0",
]
[[package]]
@@ -2537,6 +2545,7 @@ dependencies = [
"bp-rococo",
"bp-runtime 0.7.0",
"bp-westend",
+ "bp-xcm-bridge-hub-router 0.6.0",
"bridge-hub-common 0.1.0",
"bridge-hub-test-utils 0.7.0",
"bridge-runtime-common 0.7.0",
@@ -2578,6 +2587,7 @@ dependencies = [
"pallet-xcm-benchmarks 7.0.0",
"pallet-xcm-bridge-hub 0.2.0",
"parachains-common 7.0.0",
+ "parachains-runtimes-test-utils 7.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
"polkadot-runtime-common 7.0.0",
@@ -2774,6 +2784,7 @@ dependencies = [
"bp-rococo",
"bp-runtime 0.7.0",
"bp-westend",
+ "bp-xcm-bridge-hub-router 0.6.0",
"bridge-hub-common 0.1.0",
"bridge-hub-test-utils 0.7.0",
"bridge-runtime-common 0.7.0",
@@ -2815,6 +2826,7 @@ dependencies = [
"pallet-xcm-benchmarks 7.0.0",
"pallet-xcm-bridge-hub 0.2.0",
"parachains-common 7.0.0",
+ "parachains-runtimes-test-utils 7.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
"polkadot-runtime-common 7.0.0",
@@ -3159,6 +3171,7 @@ dependencies = [
name = "chain-spec-guide-runtime"
version = "0.0.0"
dependencies = [
+ "cmd_lib",
"docify",
"frame-support 28.0.0",
"pallet-balances 28.0.0",
@@ -3550,6 +3563,7 @@ dependencies = [
"pallet-utility 28.0.0",
"pallet-xcm 7.0.0",
"parachains-common 7.0.0",
+ "parachains-runtimes-test-utils 7.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
"polkadot-runtime-common 7.0.0",
@@ -3991,6 +4005,7 @@ dependencies = [
"pallet-xcm 7.0.0",
"pallet-xcm-benchmarks 7.0.0",
"parachains-common 7.0.0",
+ "parachains-runtimes-test-utils 7.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
"polkadot-runtime-common 7.0.0",
@@ -4090,6 +4105,7 @@ dependencies = [
"pallet-xcm 7.0.0",
"pallet-xcm-benchmarks 7.0.0",
"parachains-common 7.0.0",
+ "parachains-runtimes-test-utils 7.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
"polkadot-runtime-common 7.0.0",
@@ -4680,6 +4696,7 @@ dependencies = [
"cumulus-relay-chain-interface",
"cumulus-relay-chain-minimal-node",
"futures",
+ "futures-timer",
"polkadot-primitives 7.0.0",
"sc-client-api",
"sc-consensus",
@@ -5965,6 +5982,15 @@ dependencies = [
"dirs-sys-next",
]
+[[package]]
+name = "dirs"
+version = "5.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
+dependencies = [
+ "dirs-sys",
+]
+
[[package]]
name = "dirs-sys"
version = "0.4.1"
@@ -7214,6 +7240,18 @@ dependencies = [
"serde",
]
+[[package]]
+name = "frame-metadata"
+version = "18.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "daaf440c68eb2c3d88e5760fe8c7af3f9fee9181fab6c2f2c4e7cc48dcc40bb8"
+dependencies = [
+ "cfg-if",
+ "parity-scale-codec",
+ "scale-info",
+ "serde",
+]
+
[[package]]
name = "frame-metadata-hash-extension"
version = "0.1.0"
@@ -7221,7 +7259,7 @@ dependencies = [
"array-bytes",
"const-hex",
"docify",
- "frame-metadata 16.0.0",
+ "frame-metadata 18.0.0",
"frame-support 28.0.0",
"frame-system 28.0.0",
"log",
@@ -7306,7 +7344,7 @@ dependencies = [
"bitflags 1.3.2",
"docify",
"environmental",
- "frame-metadata 16.0.0",
+ "frame-metadata 18.0.0",
"frame-support-procedural 23.0.0",
"frame-system 28.0.0",
"impl-trait-for-tuples",
@@ -7484,7 +7522,7 @@ version = "3.0.0"
dependencies = [
"frame-benchmarking 28.0.0",
"frame-executive 28.0.0",
- "frame-metadata 16.0.0",
+ "frame-metadata 18.0.0",
"frame-support 28.0.0",
"frame-support-test-pallet",
"frame-system 28.0.0",
@@ -7677,7 +7715,7 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29f9df8a11882c4e3335eb2d18a0137c505d9ca927470b0cac9c6f0ae07d28f7"
dependencies = [
- "rustix 0.38.25",
+ "rustix 0.38.21",
"windows-sys 0.48.0",
]
@@ -8529,7 +8567,7 @@ dependencies = [
"hyper 1.3.1",
"hyper-util",
"log",
- "rustls 0.23.14",
+ "rustls 0.23.18",
"rustls-native-certs 0.8.0",
"rustls-pki-types",
"tokio",
@@ -8935,7 +8973,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
dependencies = [
"hermit-abi 0.3.9",
- "rustix 0.38.25",
+ "rustix 0.38.21",
"windows-sys 0.48.0",
]
@@ -9144,7 +9182,7 @@ dependencies = [
"http 1.1.0",
"jsonrpsee-core",
"pin-project",
- "rustls 0.23.14",
+ "rustls 0.23.18",
"rustls-pki-types",
"rustls-platform-verifier",
"soketto 0.8.0",
@@ -9197,7 +9235,7 @@ dependencies = [
"hyper-util",
"jsonrpsee-core",
"jsonrpsee-types",
- "rustls 0.23.14",
+ "rustls 0.23.18",
"rustls-platform-verifier",
"serde",
"serde_json",
@@ -10158,9 +10196,9 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
[[package]]
name = "linux-raw-sys"
-version = "0.4.11"
+version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829"
+checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
[[package]]
name = "lioness"
@@ -10194,9 +10232,9 @@ dependencies = [
[[package]]
name = "litep2p"
-version = "0.8.1"
+version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b67484b8ac41e1cfdf012f65fa81e88c2ef5f8a7d6dec0e2678c2d06dc04530"
+checksum = "14e490b5a6d486711fd0284bd30e607a287343f2935a59a9192bd7109e85f443"
dependencies = [
"async-trait",
"bs58",
@@ -10510,13 +10548,13 @@ dependencies = [
[[package]]
name = "merkleized-metadata"
-version = "0.1.0"
+version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f313fcff1d2a4bcaa2deeaa00bf7530d77d5f7bd0467a117dde2e29a75a7a17a"
+checksum = "943f6d92804ed0100803d51fa9b21fd9432b5d122ba4c713dc26fe6d2f619cf6"
dependencies = [
"array-bytes",
"blake3",
- "frame-metadata 16.0.0",
+ "frame-metadata 18.0.0",
"parity-scale-codec",
"scale-decode 0.13.1",
"scale-info",
@@ -14610,11 +14648,8 @@ dependencies = [
"hex",
"hex-literal",
"impl-trait-for-tuples",
- "jsonrpsee",
"log",
- "pallet-assets 29.1.0",
"pallet-balances 28.0.0",
- "pallet-message-queue 31.0.0",
"pallet-proxy 28.0.0",
"pallet-revive-fixtures 0.1.0",
"pallet-revive-proc-macro 0.1.0",
@@ -14624,7 +14659,7 @@ dependencies = [
"pallet-utility 28.0.0",
"parity-scale-codec",
"paste",
- "polkavm 0.13.0",
+ "polkavm 0.17.0",
"pretty_assertions",
"rlp 0.6.1",
"scale-info",
@@ -14720,12 +14755,10 @@ dependencies = [
"anyhow",
"frame-system 28.0.0",
"log",
- "parity-wasm",
- "polkavm-linker 0.14.0",
+ "polkavm-linker 0.17.1",
"sp-core 28.0.0",
"sp-io 30.0.0",
"sp-runtime 31.0.1",
- "tempfile",
"toml 0.8.12",
]
@@ -14842,7 +14875,7 @@ dependencies = [
"bitflags 1.3.2",
"parity-scale-codec",
"paste",
- "polkavm-derive 0.14.0",
+ "polkavm-derive 0.17.0",
"scale-info",
]
@@ -15956,6 +15989,7 @@ dependencies = [
"bp-messages 0.7.0",
"bp-runtime 0.7.0",
"bp-xcm-bridge-hub 0.2.0",
+ "bp-xcm-bridge-hub-router 0.6.0",
"frame-support 28.0.0",
"frame-system 28.0.0",
"log",
@@ -16164,6 +16198,7 @@ dependencies = [
"pallet-session 28.0.0",
"pallet-timestamp 27.0.0",
"pallet-xcm 7.0.0",
+ "parachains-common 7.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
"sp-consensus-aura 0.32.0",
@@ -16175,6 +16210,7 @@ dependencies = [
"staging-xcm 7.0.0",
"staging-xcm-executor 7.0.0",
"substrate-wasm-builder 17.0.0",
+ "xcm-runtime-apis 0.1.0",
]
[[package]]
@@ -16573,6 +16609,7 @@ dependencies = [
"pallet-xcm 7.0.0",
"pallet-xcm-benchmarks 7.0.0",
"parachains-common 7.0.0",
+ "parachains-runtimes-test-utils 7.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
"polkadot-runtime-common 7.0.0",
@@ -16630,6 +16667,7 @@ dependencies = [
"sp-runtime 31.0.1",
"staging-xcm 7.0.0",
"staging-xcm-executor 7.0.0",
+ "westend-runtime",
"westend-runtime-constants 7.0.0",
"westend-system-emulated-network",
]
@@ -16674,6 +16712,7 @@ dependencies = [
"pallet-xcm 7.0.0",
"pallet-xcm-benchmarks 7.0.0",
"parachains-common 7.0.0",
+ "parachains-runtimes-test-utils 7.0.0",
"parity-scale-codec",
"polkadot-parachain-primitives 6.0.0",
"polkadot-runtime-common 7.0.0",
@@ -17553,6 +17592,7 @@ dependencies = [
"rococo-runtime",
"rusty-fork",
"sc-sysinfo",
+ "sc-tracing",
"slotmap",
"sp-core 28.0.0",
"sp-maybe-compressed-blob 11.0.0",
@@ -18541,7 +18581,6 @@ dependencies = [
"pallet-remark 28.0.0",
"pallet-revive 0.1.0",
"pallet-revive-eth-rpc",
- "pallet-revive-fixtures 0.1.0",
"pallet-revive-mock-network 0.1.0",
"pallet-revive-proc-macro 0.1.0",
"pallet-revive-uapi 0.1.0",
@@ -19016,6 +19055,7 @@ version = "0.0.1"
dependencies = [
"assert_cmd",
"chain-spec-guide-runtime",
+ "cmd_lib",
"cumulus-client-service",
"cumulus-pallet-aura-ext 0.7.0",
"cumulus-pallet-parachain-system 0.7.0",
@@ -19672,15 +19712,15 @@ dependencies = [
[[package]]
name = "polkavm"
-version = "0.13.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57e79a14b15ed38cb5b9a1e38d02e933f19e3d180ae5b325fed606c5e5b9177e"
+checksum = "84979be196ba2855f73616413e7b1d18258128aa396b3dc23f520a00a807720e"
dependencies = [
"libc",
"log",
- "polkavm-assembler 0.13.0",
- "polkavm-common 0.13.0",
- "polkavm-linux-raw 0.13.0",
+ "polkavm-assembler 0.17.0",
+ "polkavm-common 0.17.0",
+ "polkavm-linux-raw 0.17.0",
]
[[package]]
@@ -19703,9 +19743,9 @@ dependencies = [
[[package]]
name = "polkavm-assembler"
-version = "0.13.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4e8da55465000feb0a61bbf556ed03024db58f3420eca37721fc726b3b2136bf"
+checksum = "0ba7b434ff630b0f73a1560e8baea807246ca22098abe49f97821e0e2d2accc4"
dependencies = [
"log",
]
@@ -19737,20 +19777,14 @@ dependencies = [
[[package]]
name = "polkavm-common"
-version = "0.13.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "084b4339aae7dfdaaa5aa7d634110afd95970e0737b6fb2a0cb10db8b56b753c"
+checksum = "8f0dbafef4ab6ceecb4982ac3b550df430ef4f9fdbf07c108b7d4f91a0682fce"
dependencies = [
"log",
- "polkavm-assembler 0.13.0",
+ "polkavm-assembler 0.17.0",
]
-[[package]]
-name = "polkavm-common"
-version = "0.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "711952a783e9c5ad407cdacb1ed147f36d37c5d43417c1091d86456d2999417b"
-
[[package]]
name = "polkavm-derive"
version = "0.8.0"
@@ -19780,11 +19814,11 @@ dependencies = [
[[package]]
name = "polkavm-derive"
-version = "0.14.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4832a0aebf6cefc988bb7b2d74ea8c86c983164672e2fc96300f356a1babfc1"
+checksum = "c0c3dbb6c8c7bd3e5f5b05aa7fc9355acf14df7ce5d392911e77d01090a38d0d"
dependencies = [
- "polkavm-derive-impl-macro 0.14.0",
+ "polkavm-derive-impl-macro 0.17.0",
]
[[package]]
@@ -19825,11 +19859,11 @@ dependencies = [
[[package]]
name = "polkavm-derive-impl"
-version = "0.14.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e339fc7c11310fe5adf711d9342278ac44a75c9784947937cce12bd4f30842f2"
+checksum = "42565aed4adbc4034612d0b17dea8db3681fb1bd1aed040d6edc5455a9f478a1"
dependencies = [
- "polkavm-common 0.14.0",
+ "polkavm-common 0.17.0",
"proc-macro2 1.0.86",
"quote 1.0.37",
"syn 2.0.87",
@@ -19867,11 +19901,11 @@ dependencies = [
[[package]]
name = "polkavm-derive-impl-macro"
-version = "0.14.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b569754b15060d03000c09e3bf11509d527f60b75d79b4c30c3625b5071d9702"
+checksum = "86d9838e95241b0bce4fe269cdd4af96464160505840ed5a8ac8536119ba19e2"
dependencies = [
- "polkavm-derive-impl 0.14.0",
+ "polkavm-derive-impl 0.17.0",
"syn 2.0.87",
]
@@ -19907,15 +19941,16 @@ dependencies = [
[[package]]
name = "polkavm-linker"
-version = "0.14.0"
+version = "0.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0959ac3b0f4fd5caf5c245c637705f19493efe83dba31a83bbba928b93b0116a"
+checksum = "0422ead3030d5cde69e2206dbc7d65da872b121876507cd5363f6c6e6aa45157"
dependencies = [
+ "dirs",
"gimli 0.31.1",
"hashbrown 0.14.5",
"log",
"object 0.36.1",
- "polkavm-common 0.14.0",
+ "polkavm-common 0.17.0",
"regalloc2 0.9.3",
"rustc-demangle",
]
@@ -19934,9 +19969,9 @@ checksum = "26e45fa59c7e1bb12ef5289080601e9ec9b31435f6e32800a5c90c132453d126"
[[package]]
name = "polkavm-linux-raw"
-version = "0.13.0"
+version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "686c4dd9c9c16cc22565b51bdbb269792318d0fd2e6b966b5f6c788534cad0e9"
+checksum = "e64c3d93a58ffbc3099d1227f0da9675a025a9ea6c917038f266920c1de1e568"
[[package]]
name = "polling"
@@ -19963,7 +19998,7 @@ dependencies = [
"cfg-if",
"concurrent-queue",
"pin-project-lite",
- "rustix 0.38.25",
+ "rustix 0.38.21",
"tracing",
"windows-sys 0.52.0",
]
@@ -20282,7 +20317,7 @@ dependencies = [
"hex",
"lazy_static",
"procfs-core",
- "rustix 0.38.25",
+ "rustix 0.38.21",
]
[[package]]
@@ -20607,7 +20642,7 @@ dependencies = [
"quinn-proto 0.11.8",
"quinn-udp 0.5.4",
"rustc-hash 2.0.0",
- "rustls 0.23.14",
+ "rustls 0.23.18",
"socket2 0.5.7",
"thiserror",
"tokio",
@@ -20641,7 +20676,7 @@ dependencies = [
"rand",
"ring 0.17.7",
"rustc-hash 2.0.0",
- "rustls 0.23.14",
+ "rustls 0.23.18",
"slab",
"thiserror",
"tinyvec",
@@ -20932,7 +20967,7 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"memchr",
- "regex-automata 0.4.9",
+ "regex-automata 0.4.8",
"regex-syntax 0.8.5",
]
@@ -20953,9 +20988,9 @@ checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69"
[[package]]
name = "regex-automata"
-version = "0.4.9"
+version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
+checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3"
dependencies = [
"aho-corasick",
"memchr",
@@ -21128,7 +21163,7 @@ dependencies = [
"percent-encoding",
"pin-project-lite",
"quinn 0.11.5",
- "rustls 0.23.14",
+ "rustls 0.23.18",
"rustls-pemfile 2.0.0",
"rustls-pki-types",
"serde",
@@ -21682,14 +21717,14 @@ dependencies = [
[[package]]
name = "rustix"
-version = "0.38.25"
+version = "0.38.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc99bc2d4f1fed22595588a013687477aedf3cdcfb26558c559edb67b4d9b22e"
+checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3"
dependencies = [
"bitflags 2.6.0",
"errno",
"libc",
- "linux-raw-sys 0.4.11",
+ "linux-raw-sys 0.4.10",
"windows-sys 0.48.0",
]
@@ -21732,9 +21767,9 @@ dependencies = [
[[package]]
name = "rustls"
-version = "0.23.14"
+version = "0.23.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "415d9944693cb90382053259f89fbb077ea730ad7273047ec63b19bc9b160ba8"
+checksum = "9c9cc1d47e243d655ace55ed38201c19ae02c148ae56412ab8750e8f0166ab7f"
dependencies = [
"log",
"once_cell",
@@ -21804,9 +21839,9 @@ dependencies = [
[[package]]
name = "rustls-pki-types"
-version = "1.9.0"
+version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55"
+checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b"
[[package]]
name = "rustls-platform-verifier"
@@ -21819,7 +21854,7 @@ dependencies = [
"jni",
"log",
"once_cell",
- "rustls 0.23.14",
+ "rustls 0.23.18",
"rustls-native-certs 0.7.0",
"rustls-platform-verifier-android",
"rustls-webpki 0.102.8",
@@ -23126,7 +23161,7 @@ dependencies = [
"parity-scale-codec",
"parking_lot 0.12.3",
"rand",
- "rustls 0.23.14",
+ "rustls 0.23.18",
"sc-block-builder",
"sc-client-api",
"sc-client-db",
@@ -23250,6 +23285,7 @@ dependencies = [
"futures",
"futures-util",
"hex",
+ "itertools 0.11.0",
"jsonrpsee",
"log",
"parity-scale-codec",
@@ -23685,9 +23721,9 @@ dependencies = [
[[package]]
name = "scale-info"
-version = "2.11.5"
+version = "2.11.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1aa7ffc1c0ef49b0452c6e2986abf2b07743320641ffd5fc63d552458e3b779b"
+checksum = "346a3b32eba2640d17a9cb5927056b08f3de90f65b72fe09402c2ad07d684d0b"
dependencies = [
"bitvec",
"cfg-if",
@@ -23699,9 +23735,9 @@ dependencies = [
[[package]]
name = "scale-info-derive"
-version = "2.11.5"
+version = "2.11.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46385cc24172cf615450267463f937c10072516359b3ff1cb24228a4a08bf951"
+checksum = "c6630024bf739e2179b91fb424b28898baf819414262c5d376677dbff1fe7ebf"
dependencies = [
"proc-macro-crate 3.1.0",
"proc-macro2 1.0.86",
@@ -25518,6 +25554,7 @@ dependencies = [
"sp-api 26.0.0",
"sp-consensus",
"sp-core 28.0.0",
+ "sp-metadata-ir 0.6.0",
"sp-runtime 31.0.1",
"sp-state-machine 0.35.0",
"sp-tracing 16.0.0",
@@ -26204,7 +26241,7 @@ dependencies = [
[[package]]
name = "sp-crypto-ec-utils"
version = "0.4.1"
-source = "git+https://github.com/paritytech/polkadot-sdk#838a534da874cf6071fba1df07643c6c5b033ae0"
+source = "git+https://github.com/paritytech/polkadot-sdk#82912acb33a9030c0ef3bf590a34fca09b72dc5f"
dependencies = [
"ark-bls12-377",
"ark-bls12-377-ext",
@@ -26463,7 +26500,7 @@ dependencies = [
"libsecp256k1",
"log",
"parity-scale-codec",
- "polkavm-derive 0.9.1",
+ "polkavm-derive 0.17.0",
"rustversion",
"secp256k1 0.28.2",
"sp-core 28.0.0",
@@ -26649,7 +26686,7 @@ dependencies = [
name = "sp-metadata-ir"
version = "0.6.0"
dependencies = [
- "frame-metadata 16.0.0",
+ "frame-metadata 18.0.0",
"parity-scale-codec",
"scale-info",
]
@@ -26947,7 +26984,7 @@ dependencies = [
"bytes",
"impl-trait-for-tuples",
"parity-scale-codec",
- "polkavm-derive 0.9.1",
+ "polkavm-derive 0.17.0",
"primitive-types 0.13.1",
"rustversion",
"sp-core 28.0.0",
@@ -27027,7 +27064,7 @@ dependencies = [
[[package]]
name = "sp-runtime-interface-proc-macro"
version = "11.0.0"
-source = "git+https://github.com/paritytech/polkadot-sdk#838a534da874cf6071fba1df07643c6c5b033ae0"
+source = "git+https://github.com/paritytech/polkadot-sdk#82912acb33a9030c0ef3bf590a34fca09b72dc5f"
dependencies = [
"Inflector",
"proc-macro-crate 1.3.1",
@@ -28586,12 +28623,12 @@ dependencies = [
"cargo_metadata",
"console",
"filetime",
- "frame-metadata 16.0.0",
+ "frame-metadata 18.0.0",
"jobserver",
"merkleized-metadata",
"parity-scale-codec",
"parity-wasm",
- "polkavm-linker 0.9.2",
+ "polkavm-linker 0.17.1",
"sc-executor 0.32.0",
"shlex",
"sp-core 28.0.0",
@@ -29075,7 +29112,7 @@ dependencies = [
"cfg-if",
"fastrand 2.1.0",
"redox_syscall 0.4.1",
- "rustix 0.38.25",
+ "rustix 0.38.21",
"windows-sys 0.48.0",
]
@@ -29105,7 +29142,7 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7"
dependencies = [
- "rustix 0.38.25",
+ "rustix 0.38.21",
"windows-sys 0.48.0",
]
@@ -29527,7 +29564,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4"
dependencies = [
- "rustls 0.23.14",
+ "rustls 0.23.18",
"rustls-pki-types",
"tokio",
]
@@ -30238,7 +30275,7 @@ dependencies = [
"flate2",
"log",
"once_cell",
- "rustls 0.23.14",
+ "rustls 0.23.18",
"rustls-pki-types",
"serde",
"serde_json",
@@ -31643,6 +31680,7 @@ name = "xcm-procedural"
version = "7.0.0"
dependencies = [
"Inflector",
+ "frame-support 28.0.0",
"proc-macro2 1.0.86",
"quote 1.0.37",
"staging-xcm 7.0.0",
@@ -31908,9 +31946,9 @@ dependencies = [
[[package]]
name = "zombienet-configuration"
-version = "0.2.15"
+version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d7a8cc4f8e8bb3f40757b62d3b054da5c95f43321c775eb321edc89d431583e"
+checksum = "8ad4fc5b0f1aa54de6bf2d6771c449b41cad47e1cf30559af0a71452686b47ab"
dependencies = [
"anyhow",
"lazy_static",
@@ -31928,9 +31966,9 @@ dependencies = [
[[package]]
name = "zombienet-orchestrator"
-version = "0.2.15"
+version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d32fa87851f41443a78971bd7110274f9a66d139ac834de159adc08f90cf8e3"
+checksum = "e4a7dd25842ded75c7f4dc4f38f05fef567bd0b37fd3057c223d4ee34d8fa817"
dependencies = [
"anyhow",
"async-trait",
@@ -31961,9 +31999,9 @@ dependencies = [
[[package]]
name = "zombienet-prom-metrics-parser"
-version = "0.2.15"
+version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9acb9c94bc7c2c83f8eb8e26ed403f757af1632f22b89394d8876412ede990ca"
+checksum = "a63e0c6024dd19b0f8b28afa94f78c211e5c163350ecda4a48084532d74d7cfe"
dependencies = [
"pest",
"pest_derive",
@@ -31972,9 +32010,9 @@ dependencies = [
[[package]]
name = "zombienet-provider"
-version = "0.2.15"
+version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc8f3f71d4d974fc4a2262fa9293c2eedc423540378bd7c1dc1b66cc95d1d1af"
+checksum = "8d87c29390a342d0f4f62b6796861fb82e0e56c49929a272b689e8dbf24eaab9"
dependencies = [
"anyhow",
"async-trait",
@@ -32003,9 +32041,9 @@ dependencies = [
[[package]]
name = "zombienet-sdk"
-version = "0.2.15"
+version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5dbfddce7a6100cdc930b93301f1b6381e6577ecc013d6802258ea6902a2bebd"
+checksum = "829e5111182caf00ba57cd63656cf0bde6ce6add7f6a9747d15821c202a3f27e"
dependencies = [
"async-trait",
"futures",
@@ -32020,9 +32058,9 @@ dependencies = [
[[package]]
name = "zombienet-support"
-version = "0.2.15"
+version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d20567c52b4fd46b600cda254dedb6a6dc30cabf512de91e4f6f78f0f7f4644b"
+checksum = "99568384a1d9645458ab9de377b3517cb543a1ece5aba905aeb58d269139df4e"
dependencies = [
"anyhow",
"async-trait",
diff --git a/Cargo.toml b/Cargo.toml
index 533ea4c9e878..383fc46c4e76 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -779,7 +779,7 @@ frame-benchmarking-pallet-pov = { default-features = false, path = "substrate/fr
frame-election-provider-solution-type = { path = "substrate/frame/election-provider-support/solution-type", default-features = false }
frame-election-provider-support = { path = "substrate/frame/election-provider-support", default-features = false }
frame-executive = { path = "substrate/frame/executive", default-features = false }
-frame-metadata = { version = "16.0.0", default-features = false }
+frame-metadata = { version = "18.0.0", default-features = false }
frame-metadata-hash-extension = { path = "substrate/frame/metadata-hash-extension", default-features = false }
frame-support = { path = "substrate/frame/support", default-features = false }
frame-support-procedural = { path = "substrate/frame/support/procedural", default-features = false }
@@ -848,13 +848,13 @@ linked-hash-map = { version = "0.5.4" }
linked_hash_set = { version = "0.1.4" }
linregress = { version = "0.5.1" }
lite-json = { version = "0.2.0", default-features = false }
-litep2p = { version = "0.8.1", features = ["websocket"] }
+litep2p = { version = "0.8.3", features = ["websocket"] }
log = { version = "0.4.22", default-features = false }
macro_magic = { version = "0.5.1" }
maplit = { version = "1.0.2" }
memmap2 = { version = "0.9.3" }
memory-db = { version = "0.32.0", default-features = false }
-merkleized-metadata = { version = "0.1.0" }
+merkleized-metadata = { version = "0.1.2" }
merlin = { version = "3.0", default-features = false }
messages-relay = { path = "bridges/relays/messages" }
metered = { version = "0.6.1", default-features = false, package = "prioritized-metered-channel" }
@@ -1090,8 +1090,8 @@ polkadot-test-client = { path = "polkadot/node/test/client" }
polkadot-test-runtime = { path = "polkadot/runtime/test-runtime" }
polkadot-test-service = { path = "polkadot/node/test/service" }
polkavm = { version = "0.9.3", default-features = false }
-polkavm-derive = "0.9.1"
-polkavm-linker = "0.9.2"
+polkavm-derive = "0.17.0"
+polkavm-linker = "0.17.1"
portpicker = { version = "0.1.1" }
pretty_assertions = { version = "1.3.0" }
primitive-types = { version = "0.13.1", default-features = false, features = [
@@ -1136,7 +1136,7 @@ rstest = { version = "0.18.2" }
rustc-hash = { version = "1.1.0" }
rustc-hex = { version = "2.1.0", default-features = false }
rustix = { version = "0.36.7", default-features = false }
-rustls = { version = "0.23.14", default-features = false, features = ["logging", "ring", "std", "tls12"] }
+rustls = { version = "0.23.18", default-features = false, features = ["logging", "ring", "std", "tls12"] }
rustversion = { version = "1.0.17" }
rusty-fork = { version = "0.3.0", default-features = false }
safe-mix = { version = "1.0", default-features = false }
@@ -1197,7 +1197,7 @@ sc-tracing-proc-macro = { path = "substrate/client/tracing/proc-macro", default-
sc-transaction-pool = { path = "substrate/client/transaction-pool", default-features = false }
sc-transaction-pool-api = { path = "substrate/client/transaction-pool/api", default-features = false }
sc-utils = { path = "substrate/client/utils", default-features = false }
-scale-info = { version = "2.11.1", default-features = false }
+scale-info = { version = "2.11.6", default-features = false }
schemars = { version = "0.8.13", default-features = false }
schnellru = { version = "0.2.3" }
schnorrkel = { version = "0.11.4", default-features = false }
@@ -1387,7 +1387,7 @@ xcm-procedural = { path = "polkadot/xcm/procedural", default-features = false }
xcm-runtime-apis = { path = "polkadot/xcm/xcm-runtime-apis", default-features = false }
xcm-simulator = { path = "polkadot/xcm/xcm-simulator", default-features = false }
zeroize = { version = "1.7.0", default-features = false }
-zombienet-sdk = { version = "0.2.15" }
+zombienet-sdk = { version = "0.2.16" }
zstd = { version = "0.12.4", default-features = false }
[profile.release]
diff --git a/bridges/bin/runtime-common/src/integrity.rs b/bridges/bin/runtime-common/src/integrity.rs
index 2ff6c4c9165a..535f1a26e5e8 100644
--- a/bridges/bin/runtime-common/src/integrity.rs
+++ b/bridges/bin/runtime-common/src/integrity.rs
@@ -89,13 +89,11 @@ macro_rules! assert_bridge_messages_pallet_types(
/// Macro that combines four other macro calls - `assert_chain_types`, `assert_bridge_types`,
/// and `assert_bridge_messages_pallet_types`. It may be used
-/// at the chain that is implementing complete standard messages bridge (i.e. with bridge GRANDPA
-/// and messages pallets deployed).
+/// at the chain that is implementing standard messages bridge with messages pallets deployed.
#[macro_export]
macro_rules! assert_complete_bridge_types(
(
runtime: $r:path,
- with_bridged_chain_grandpa_instance: $gi:path,
with_bridged_chain_messages_instance: $mi:path,
this_chain: $this:path,
bridged_chain: $bridged:path,
@@ -186,34 +184,55 @@ where
);
}
-/// Parameters for asserting bridge pallet names.
+/// Parameters for asserting bridge GRANDPA pallet names.
#[derive(Debug)]
-pub struct AssertBridgePalletNames<'a> {
+struct AssertBridgeGrandpaPalletNames<'a> {
/// Name of the GRANDPA pallet, deployed at this chain and used to bridge with the bridged
/// chain.
pub with_bridged_chain_grandpa_pallet_name: &'a str,
- /// Name of the messages pallet, deployed at this chain and used to bridge with the bridged
- /// chain.
- pub with_bridged_chain_messages_pallet_name: &'a str,
}
/// Tests that bridge pallet names used in `construct_runtime!()` macro call are matching constants
/// from chain primitives crates.
-fn assert_bridge_pallet_names(params: AssertBridgePalletNames)
+fn assert_bridge_grandpa_pallet_names(params: AssertBridgeGrandpaPalletNames)
where
- R: pallet_bridge_grandpa::Config + pallet_bridge_messages::Config,
+ R: pallet_bridge_grandpa::Config,
GI: 'static,
- MI: 'static,
{
// check that the bridge GRANDPA pallet has required name
assert_eq!(
- pallet_bridge_grandpa::PalletOwner::::storage_value_final_key().to_vec(),
+ pallet_bridge_grandpa::PalletOwner::::storage_value_final_key().to_vec(),
+ bp_runtime::storage_value_key(
+ params.with_bridged_chain_grandpa_pallet_name,
+ "PalletOwner",
+ )
+ .0,
+ );
+ assert_eq!(
+ pallet_bridge_grandpa::PalletOperatingMode::::storage_value_final_key().to_vec(),
bp_runtime::storage_value_key(
params.with_bridged_chain_grandpa_pallet_name,
- "PalletOwner",
- ).0,
+ "PalletOperatingMode",
+ )
+ .0,
);
+}
+/// Parameters for asserting bridge messages pallet names.
+#[derive(Debug)]
+struct AssertBridgeMessagesPalletNames<'a> {
+ /// Name of the messages pallet, deployed at this chain and used to bridge with the bridged
+ /// chain.
+ pub with_bridged_chain_messages_pallet_name: &'a str,
+}
+
+/// Tests that bridge pallet names used in `construct_runtime!()` macro call are matching constants
+/// from chain primitives crates.
+fn assert_bridge_messages_pallet_names(params: AssertBridgeMessagesPalletNames)
+where
+ R: pallet_bridge_messages::Config,
+ MI: 'static,
+{
// check that the bridge messages pallet has required name
assert_eq!(
pallet_bridge_messages::PalletOwner::::storage_value_final_key().to_vec(),
@@ -223,6 +242,14 @@ where
)
.0,
);
+ assert_eq!(
+ pallet_bridge_messages::PalletOperatingMode::::storage_value_final_key().to_vec(),
+ bp_runtime::storage_value_key(
+ params.with_bridged_chain_messages_pallet_name,
+ "PalletOperatingMode",
+ )
+ .0,
+ );
}
/// Parameters for asserting complete standard messages bridge.
@@ -246,9 +273,11 @@ pub fn assert_complete_with_relay_chain_bridge_constants(
assert_chain_constants::(params.this_chain_constants);
assert_bridge_grandpa_pallet_constants::();
assert_bridge_messages_pallet_constants::();
- assert_bridge_pallet_names::(AssertBridgePalletNames {
+ assert_bridge_grandpa_pallet_names::(AssertBridgeGrandpaPalletNames {
with_bridged_chain_grandpa_pallet_name:
>::BridgedChain::WITH_CHAIN_GRANDPA_PALLET_NAME,
+ });
+ assert_bridge_messages_pallet_names::(AssertBridgeMessagesPalletNames {
with_bridged_chain_messages_pallet_name:
>::BridgedChain::WITH_CHAIN_MESSAGES_PALLET_NAME,
});
@@ -256,21 +285,43 @@ pub fn assert_complete_with_relay_chain_bridge_constants(
/// All bridge-related constants tests for the complete standard parachain messages bridge
/// (i.e. with bridge GRANDPA, parachains and messages pallets deployed).
-pub fn assert_complete_with_parachain_bridge_constants(
+pub fn assert_complete_with_parachain_bridge_constants(
params: AssertCompleteBridgeConstants,
) where
R: frame_system::Config
- + pallet_bridge_grandpa::Config
+ + pallet_bridge_parachains::Config
+ pallet_bridge_messages::Config,
- GI: 'static,
+ >::BridgedRelayChain: ChainWithGrandpa,
+ PI: 'static,
+ MI: 'static,
+{
+ assert_chain_constants::(params.this_chain_constants);
+ assert_bridge_grandpa_pallet_constants::();
+ assert_bridge_messages_pallet_constants::();
+ assert_bridge_grandpa_pallet_names::(
+ AssertBridgeGrandpaPalletNames {
+ with_bridged_chain_grandpa_pallet_name:
+ <>::BridgedRelayChain>::WITH_CHAIN_GRANDPA_PALLET_NAME,
+ },
+ );
+ assert_bridge_messages_pallet_names::(AssertBridgeMessagesPalletNames {
+ with_bridged_chain_messages_pallet_name:
+ >::BridgedChain::WITH_CHAIN_MESSAGES_PALLET_NAME,
+ });
+}
+
+/// All bridge-related constants tests for the standalone messages bridge deployment (only with
+/// messages pallets deployed).
+pub fn assert_standalone_messages_bridge_constants(params: AssertCompleteBridgeConstants)
+where
+ R: frame_system::Config + pallet_bridge_messages::Config,
MI: 'static,
- RelayChain: ChainWithGrandpa,
{
assert_chain_constants::(params.this_chain_constants);
- assert_bridge_grandpa_pallet_constants::();
assert_bridge_messages_pallet_constants::();
- assert_bridge_pallet_names::(AssertBridgePalletNames {
- with_bridged_chain_grandpa_pallet_name: RelayChain::WITH_CHAIN_GRANDPA_PALLET_NAME,
+ assert_bridge_messages_pallet_names::(AssertBridgeMessagesPalletNames {
with_bridged_chain_messages_pallet_name:
>::BridgedChain::WITH_CHAIN_MESSAGES_PALLET_NAME,
});
diff --git a/bridges/bin/runtime-common/src/mock.rs b/bridges/bin/runtime-common/src/mock.rs
index 6cf04b452da7..88037d9deff5 100644
--- a/bridges/bin/runtime-common/src/mock.rs
+++ b/bridges/bin/runtime-common/src/mock.rs
@@ -196,6 +196,7 @@ impl pallet_bridge_messages::Config for TestRuntime {
type DeliveryConfirmationPayments = pallet_bridge_relayers::DeliveryConfirmationPaymentsAdapter<
TestRuntime,
(),
+ (),
ConstU64<100_000>,
>;
type OnMessagesDelivered = ();
diff --git a/bridges/chains/chain-asset-hub-rococo/Cargo.toml b/bridges/chains/chain-asset-hub-rococo/Cargo.toml
index 363a869048aa..4eb93ab52bc9 100644
--- a/bridges/chains/chain-asset-hub-rococo/Cargo.toml
+++ b/bridges/chains/chain-asset-hub-rococo/Cargo.toml
@@ -19,10 +19,14 @@ scale-info = { features = ["derive"], workspace = true }
# Substrate Dependencies
frame-support = { workspace = true }
+sp-core = { workspace = true }
# Bridge Dependencies
bp-xcm-bridge-hub-router = { workspace = true }
+# Polkadot dependencies
+xcm = { workspace = true }
+
[features]
default = ["std"]
std = [
@@ -30,4 +34,6 @@ std = [
"codec/std",
"frame-support/std",
"scale-info/std",
+ "sp-core/std",
+ "xcm/std",
]
diff --git a/bridges/chains/chain-asset-hub-rococo/src/lib.rs b/bridges/chains/chain-asset-hub-rococo/src/lib.rs
index de2e9ae856d1..4ff7b391acd0 100644
--- a/bridges/chains/chain-asset-hub-rococo/src/lib.rs
+++ b/bridges/chains/chain-asset-hub-rococo/src/lib.rs
@@ -18,10 +18,13 @@
#![cfg_attr(not(feature = "std"), no_std)]
+extern crate alloc;
+
use codec::{Decode, Encode};
use scale_info::TypeInfo;
pub use bp_xcm_bridge_hub_router::XcmBridgeHubRouterCall;
+use xcm::latest::prelude::*;
/// `AssetHubRococo` Runtime `Call` enum.
///
@@ -44,5 +47,27 @@ frame_support::parameter_types! {
pub const XcmBridgeHubRouterTransactCallMaxWeight: frame_support::weights::Weight = frame_support::weights::Weight::from_parts(200_000_000, 6144);
}
+/// Builds an (un)congestion XCM program with the `report_bridge_status` call for
+/// `ToWestendXcmRouter`.
+pub fn build_congestion_message(
+ bridge_id: sp_core::H256,
+ is_congested: bool,
+) -> alloc::vec::Vec> {
+ alloc::vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact {
+ origin_kind: OriginKind::Xcm,
+ fallback_max_weight: Some(XcmBridgeHubRouterTransactCallMaxWeight::get()),
+ call: Call::ToWestendXcmRouter(XcmBridgeHubRouterCall::report_bridge_status {
+ bridge_id,
+ is_congested,
+ })
+ .encode()
+ .into(),
+ },
+ ExpectTransactStatus(MaybeErrorCode::Success),
+ ]
+}
+
/// Identifier of AssetHubRococo in the Rococo relay chain.
pub const ASSET_HUB_ROCOCO_PARACHAIN_ID: u32 = 1000;
diff --git a/bridges/chains/chain-asset-hub-westend/Cargo.toml b/bridges/chains/chain-asset-hub-westend/Cargo.toml
index 430d9b6116cf..22071399f4d1 100644
--- a/bridges/chains/chain-asset-hub-westend/Cargo.toml
+++ b/bridges/chains/chain-asset-hub-westend/Cargo.toml
@@ -19,10 +19,14 @@ scale-info = { features = ["derive"], workspace = true }
# Substrate Dependencies
frame-support = { workspace = true }
+sp-core = { workspace = true }
# Bridge Dependencies
bp-xcm-bridge-hub-router = { workspace = true }
+# Polkadot dependencies
+xcm = { workspace = true }
+
[features]
default = ["std"]
std = [
@@ -30,4 +34,6 @@ std = [
"codec/std",
"frame-support/std",
"scale-info/std",
+ "sp-core/std",
+ "xcm/std",
]
diff --git a/bridges/chains/chain-asset-hub-westend/src/lib.rs b/bridges/chains/chain-asset-hub-westend/src/lib.rs
index 9de1c8809894..9d245e08f7cc 100644
--- a/bridges/chains/chain-asset-hub-westend/src/lib.rs
+++ b/bridges/chains/chain-asset-hub-westend/src/lib.rs
@@ -18,10 +18,13 @@
#![cfg_attr(not(feature = "std"), no_std)]
+extern crate alloc;
+
use codec::{Decode, Encode};
use scale_info::TypeInfo;
pub use bp_xcm_bridge_hub_router::XcmBridgeHubRouterCall;
+use xcm::latest::prelude::*;
/// `AssetHubWestend` Runtime `Call` enum.
///
@@ -44,5 +47,27 @@ frame_support::parameter_types! {
pub const XcmBridgeHubRouterTransactCallMaxWeight: frame_support::weights::Weight = frame_support::weights::Weight::from_parts(200_000_000, 6144);
}
+/// Builds an (un)congestion XCM program with the `report_bridge_status` call for
+/// `ToRococoXcmRouter`.
+pub fn build_congestion_message(
+ bridge_id: sp_core::H256,
+ is_congested: bool,
+) -> alloc::vec::Vec> {
+ alloc::vec![
+ UnpaidExecution { weight_limit: Unlimited, check_origin: None },
+ Transact {
+ origin_kind: OriginKind::Xcm,
+ fallback_max_weight: Some(XcmBridgeHubRouterTransactCallMaxWeight::get()),
+ call: Call::ToRococoXcmRouter(XcmBridgeHubRouterCall::report_bridge_status {
+ bridge_id,
+ is_congested,
+ })
+ .encode()
+ .into(),
+ },
+ ExpectTransactStatus(MaybeErrorCode::Success),
+ ]
+}
+
/// Identifier of AssetHubWestend in the Westend relay chain.
pub const ASSET_HUB_WESTEND_PARACHAIN_ID: u32 = 1000;
diff --git a/bridges/chains/chain-polkadot-bulletin/src/lib.rs b/bridges/chains/chain-polkadot-bulletin/src/lib.rs
index c5c18beb2cad..070bc7b0ba3d 100644
--- a/bridges/chains/chain-polkadot-bulletin/src/lib.rs
+++ b/bridges/chains/chain-polkadot-bulletin/src/lib.rs
@@ -225,4 +225,4 @@ impl ChainWithMessages for PolkadotBulletin {
}
decl_bridge_finality_runtime_apis!(polkadot_bulletin, grandpa);
-decl_bridge_messages_runtime_apis!(polkadot_bulletin, bp_messages::HashedLaneId);
+decl_bridge_messages_runtime_apis!(polkadot_bulletin, bp_messages::LegacyLaneId);
diff --git a/bridges/modules/relayers/src/extension/mod.rs b/bridges/modules/relayers/src/extension/mod.rs
index 34d280d26d6e..d562ed9bcd0e 100644
--- a/bridges/modules/relayers/src/extension/mod.rs
+++ b/bridges/modules/relayers/src/extension/mod.rs
@@ -129,7 +129,7 @@ pub struct BridgeRelayersTransactionExtension(
impl BridgeRelayersTransactionExtension
where
Self: 'static + Send + Sync,
- R: RelayersConfig
+ R: RelayersConfig
+ BridgeMessagesConfig
+ TransactionPaymentConfig,
C: ExtensionConfig,
@@ -250,7 +250,7 @@ where
// let's also replace the weight of slashing relayer with the weight of rewarding relayer
if call_info.is_receive_messages_proof_call() {
post_info_weight = post_info_weight.saturating_sub(
- ::WeightInfo::extra_weight_of_successful_receive_messages_proof_call(),
+ >::WeightInfo::extra_weight_of_successful_receive_messages_proof_call(),
);
}
@@ -278,7 +278,7 @@ impl TransactionExtension
for BridgeRelayersTransactionExtension
where
Self: 'static + Send + Sync,
- R: RelayersConfig
+ R: RelayersConfig
+ BridgeMessagesConfig
+ TransactionPaymentConfig,
C: ExtensionConfig,
@@ -326,7 +326,9 @@ where
};
// we only boost priority if relayer has staked required balance
- if !RelayersPallet::::is_registration_active(&data.relayer) {
+ if !RelayersPallet::::is_registration_active(
+ &data.relayer,
+ ) {
return Ok((Default::default(), Some(data), origin))
}
@@ -382,7 +384,11 @@ where
match call_result {
RelayerAccountAction::None => (),
RelayerAccountAction::Reward(relayer, reward_account, reward) => {
- RelayersPallet::::register_relayer_reward(reward_account, &relayer, reward);
+ RelayersPallet::::register_relayer_reward(
+ reward_account,
+ &relayer,
+ reward,
+ );
log::trace!(
target: LOG_TARGET,
@@ -394,7 +400,7 @@ where
);
},
RelayerAccountAction::Slash(relayer, slash_account) =>
- RelayersPallet::::slash_and_deregister(
+ RelayersPallet::::slash_and_deregister(
&relayer,
ExplicitOrAccountParams::Params(slash_account),
),
diff --git a/bridges/modules/relayers/src/lib.rs b/bridges/modules/relayers/src/lib.rs
index f06c2e16ac24..d1c71b6d3051 100644
--- a/bridges/modules/relayers/src/lib.rs
+++ b/bridges/modules/relayers/src/lib.rs
@@ -22,8 +22,9 @@
use bp_relayers::{
ExplicitOrAccountParams, PaymentProcedure, Registration, RelayerRewardsKeyProvider,
- RewardsAccountParams, StakeAndSlash,
+ StakeAndSlash,
};
+pub use bp_relayers::{RewardsAccountOwner, RewardsAccountParams};
use bp_runtime::StorageDoubleMapKeyProvider;
use frame_support::fail;
use sp_arithmetic::traits::{AtLeast32BitUnsigned, Zero};
@@ -31,7 +32,7 @@ use sp_runtime::{traits::CheckedSub, Saturating};
use sp_std::marker::PhantomData;
pub use pallet::*;
-pub use payment_adapter::DeliveryConfirmationPaymentsAdapter;
+pub use payment_adapter::{DeliveryConfirmationPaymentsAdapter, PayRewardFromAccount};
pub use stake_adapter::StakeAndSlashNamed;
pub use weights::WeightInfo;
pub use weights_ext::WeightInfoExt;
diff --git a/bridges/modules/relayers/src/mock.rs b/bridges/modules/relayers/src/mock.rs
index d186e968e648..7dc213249379 100644
--- a/bridges/modules/relayers/src/mock.rs
+++ b/bridges/modules/relayers/src/mock.rs
@@ -171,14 +171,14 @@ pub type TestStakeAndSlash = pallet_bridge_relayers::StakeAndSlashNamed<
frame_support::construct_runtime! {
pub enum TestRuntime
{
- System: frame_system::{Pallet, Call, Config, Storage, Event},
+ System: frame_system,
Utility: pallet_utility,
- Balances: pallet_balances::{Pallet, Call, Storage, Config, Event},
- TransactionPayment: pallet_transaction_payment::{Pallet, Storage, Event},
- BridgeRelayers: pallet_bridge_relayers::{Pallet, Call, Storage, Event},
- BridgeGrandpa: pallet_bridge_grandpa::{Pallet, Call, Storage, Event},
- BridgeParachains: pallet_bridge_parachains::{Pallet, Call, Storage, Event},
- BridgeMessages: pallet_bridge_messages::{Pallet, Call, Storage, Event, Config},
+ Balances: pallet_balances,
+ TransactionPayment: pallet_transaction_payment,
+ BridgeRelayers: pallet_bridge_relayers,
+ BridgeGrandpa: pallet_bridge_grandpa,
+ BridgeParachains: pallet_bridge_parachains,
+ BridgeMessages: pallet_bridge_messages,
}
}
@@ -267,6 +267,7 @@ impl pallet_bridge_messages::Config for TestRuntime {
type DeliveryConfirmationPayments = pallet_bridge_relayers::DeliveryConfirmationPaymentsAdapter<
TestRuntime,
(),
+ (),
ConstU64<100_000>,
>;
type OnMessagesDelivered = ();
diff --git a/bridges/modules/relayers/src/payment_adapter.rs b/bridges/modules/relayers/src/payment_adapter.rs
index 5383cba5ecbd..5af0d8f9dfbf 100644
--- a/bridges/modules/relayers/src/payment_adapter.rs
+++ b/bridges/modules/relayers/src/payment_adapter.rs
@@ -22,6 +22,7 @@ use bp_messages::{
source_chain::{DeliveryConfirmationPayments, RelayersRewards},
MessageNonce,
};
+pub use bp_relayers::PayRewardFromAccount;
use bp_relayers::{RewardsAccountOwner, RewardsAccountParams};
use bp_runtime::Chain;
use frame_support::{sp_runtime::SaturatedConversion, traits::Get};
@@ -31,15 +32,16 @@ use sp_std::{collections::vec_deque::VecDeque, marker::PhantomData, ops::RangeIn
/// Adapter that allows relayers pallet to be used as a delivery+dispatch payment mechanism
/// for the messages pallet.
-pub struct DeliveryConfirmationPaymentsAdapter(
- PhantomData<(T, MI, DeliveryReward)>,
+pub struct DeliveryConfirmationPaymentsAdapter(
+ PhantomData<(T, MI, RI, DeliveryReward)>,
);
-impl DeliveryConfirmationPayments>
- for DeliveryConfirmationPaymentsAdapter
+impl DeliveryConfirmationPayments>
+ for DeliveryConfirmationPaymentsAdapter
where
- T: Config + pallet_bridge_messages::Config::LaneId>,
+ T: Config + pallet_bridge_messages::Config>::LaneId>,
MI: 'static,
+ RI: 'static,
DeliveryReward: Get,
{
type Error = &'static str;
@@ -54,7 +56,7 @@ where
bp_messages::calc_relayers_rewards::(messages_relayers, received_range);
let rewarded_relayers = relayers_rewards.len();
- register_relayers_rewards::(
+ register_relayers_rewards::(
confirmation_relayer,
relayers_rewards,
RewardsAccountParams::new(
@@ -70,7 +72,7 @@ where
}
// Update rewards to given relayers, optionally rewarding confirmation relayer.
-fn register_relayers_rewards(
+fn register_relayers_rewards, I: 'static>(
confirmation_relayer: &T::AccountId,
relayers_rewards: RelayersRewards,
lane_id: RewardsAccountParams,
@@ -84,7 +86,7 @@ fn register_relayers_rewards(
let relayer_reward = T::Reward::saturated_from(messages).saturating_mul(delivery_fee);
if relayer != *confirmation_relayer {
- Pallet::::register_relayer_reward(lane_id, &relayer, relayer_reward);
+ Pallet::::register_relayer_reward(lane_id, &relayer, relayer_reward);
} else {
confirmation_relayer_reward =
confirmation_relayer_reward.saturating_add(relayer_reward);
@@ -92,7 +94,7 @@ fn register_relayers_rewards(
}
// finally - pay reward to confirmation relayer
- Pallet::::register_relayer_reward(
+ Pallet::::register_relayer_reward(
lane_id,
confirmation_relayer,
confirmation_relayer_reward,
@@ -115,7 +117,7 @@ mod tests {
#[test]
fn confirmation_relayer_is_rewarded_if_it_has_also_delivered_messages() {
run_test(|| {
- register_relayers_rewards::(
+ register_relayers_rewards::(
&RELAYER_2,
relayers_rewards(),
test_reward_account_param(),
@@ -136,7 +138,7 @@ mod tests {
#[test]
fn confirmation_relayer_is_not_rewarded_if_it_has_not_delivered_any_messages() {
run_test(|| {
- register_relayers_rewards::(
+ register_relayers_rewards::(
&RELAYER_3,
relayers_rewards(),
test_reward_account_param(),
diff --git a/bridges/modules/xcm-bridge-hub-router/src/benchmarking.rs b/bridges/modules/xcm-bridge-hub-router/src/benchmarking.rs
index 3c4a10f82e7d..ff06a1e3c8c5 100644
--- a/bridges/modules/xcm-bridge-hub-router/src/benchmarking.rs
+++ b/bridges/modules/xcm-bridge-hub-router/src/benchmarking.rs
@@ -18,9 +18,9 @@
#![cfg(feature = "runtime-benchmarks")]
-use crate::{DeliveryFeeFactor, MINIMAL_DELIVERY_FEE_FACTOR};
+use crate::{Bridge, BridgeState, Call, MINIMAL_DELIVERY_FEE_FACTOR};
use frame_benchmarking::{benchmarks_instance_pallet, BenchmarkError};
-use frame_support::traits::{Get, Hooks};
+use frame_support::traits::{EnsureOrigin, Get, Hooks, UnfilteredDispatchable};
use sp_runtime::traits::Zero;
use xcm::prelude::*;
@@ -45,16 +45,35 @@ pub trait Config: crate::Config {
benchmarks_instance_pallet! {
on_initialize_when_non_congested {
- DeliveryFeeFactor::::put(MINIMAL_DELIVERY_FEE_FACTOR + MINIMAL_DELIVERY_FEE_FACTOR);
+ Bridge::::put(BridgeState {
+ is_congested: false,
+ delivery_fee_factor: MINIMAL_DELIVERY_FEE_FACTOR + MINIMAL_DELIVERY_FEE_FACTOR,
+ });
}: {
crate::Pallet::::on_initialize(Zero::zero())
}
on_initialize_when_congested {
- DeliveryFeeFactor::::put(MINIMAL_DELIVERY_FEE_FACTOR + MINIMAL_DELIVERY_FEE_FACTOR);
+ Bridge::::put(BridgeState {
+ is_congested: false,
+ delivery_fee_factor: MINIMAL_DELIVERY_FEE_FACTOR + MINIMAL_DELIVERY_FEE_FACTOR,
+ });
let _ = T::ensure_bridged_target_destination()?;
T::make_congested();
}: {
crate::Pallet::::on_initialize(Zero::zero())
}
+
+ report_bridge_status {
+ Bridge::::put(BridgeState::default());
+
+ let origin: T::RuntimeOrigin = T::BridgeHubOrigin::try_successful_origin().expect("expected valid BridgeHubOrigin");
+ let bridge_id = Default::default();
+ let is_congested = true;
+
+ let call = Call::::report_bridge_status { bridge_id, is_congested };
+ }: { call.dispatch_bypass_filter(origin)? }
+ verify {
+ assert!(Bridge::::get().is_congested);
+ }
}
diff --git a/bridges/modules/xcm-bridge-hub-router/src/lib.rs b/bridges/modules/xcm-bridge-hub-router/src/lib.rs
index fe8f5a2efdfb..7361696faba7 100644
--- a/bridges/modules/xcm-bridge-hub-router/src/lib.rs
+++ b/bridges/modules/xcm-bridge-hub-router/src/lib.rs
@@ -30,9 +30,10 @@
#![cfg_attr(not(feature = "std"), no_std)]
-pub use bp_xcm_bridge_hub_router::XcmChannelStatusProvider;
+pub use bp_xcm_bridge_hub_router::{BridgeState, XcmChannelStatusProvider};
use codec::Encode;
use frame_support::traits::Get;
+use sp_core::H256;
use sp_runtime::{FixedPointNumber, FixedU128, Saturating};
use sp_std::vec::Vec;
use xcm::prelude::*;
@@ -98,6 +99,8 @@ pub mod pallet {
/// Checks the XCM version for the destination.
type DestinationVersion: GetVersion;
+ /// Origin of the sibling bridge hub that is allowed to report bridge status.
+ type BridgeHubOrigin: EnsureOrigin;
/// Actual message sender (`HRMP` or `DMP`) to the sibling bridge hub location.
type ToBridgeHubSender: SendXcm;
/// Local XCM channel manager.
@@ -120,95 +123,112 @@ pub mod pallet {
return T::WeightInfo::on_initialize_when_congested()
}
+ // if bridge has reported congestion, we don't change anything
+ let mut bridge = Self::bridge();
+ if bridge.is_congested {
+ return T::WeightInfo::on_initialize_when_congested()
+ }
+
// if we can't decrease the delivery fee factor anymore, we don't change anything
- let mut delivery_fee_factor = Self::delivery_fee_factor();
- if delivery_fee_factor == MINIMAL_DELIVERY_FEE_FACTOR {
+ if bridge.delivery_fee_factor == MINIMAL_DELIVERY_FEE_FACTOR {
return T::WeightInfo::on_initialize_when_congested()
}
- let previous_factor = delivery_fee_factor;
- delivery_fee_factor =
- MINIMAL_DELIVERY_FEE_FACTOR.max(delivery_fee_factor / EXPONENTIAL_FEE_BASE);
+ let previous_factor = bridge.delivery_fee_factor;
+ bridge.delivery_fee_factor =
+ MINIMAL_DELIVERY_FEE_FACTOR.max(bridge.delivery_fee_factor / EXPONENTIAL_FEE_BASE);
+
log::info!(
target: LOG_TARGET,
"Bridge channel is uncongested. Decreased fee factor from {} to {}",
previous_factor,
- delivery_fee_factor,
+ bridge.delivery_fee_factor,
);
Self::deposit_event(Event::DeliveryFeeFactorDecreased {
- new_value: delivery_fee_factor,
+ new_value: bridge.delivery_fee_factor,
});
- DeliveryFeeFactor::::put(delivery_fee_factor);
+ Bridge::::put(bridge);
T::WeightInfo::on_initialize_when_non_congested()
}
}
- /// Initialization value for the delivery fee factor.
- #[pallet::type_value]
- pub fn InitialFactor() -> FixedU128 {
- MINIMAL_DELIVERY_FEE_FACTOR
+ #[pallet::call]
+ impl, I: 'static> Pallet {
+ /// Notification about congested bridge queue.
+ #[pallet::call_index(0)]
+ #[pallet::weight(T::WeightInfo::report_bridge_status())]
+ pub fn report_bridge_status(
+ origin: OriginFor,
+ // this argument is not currently used, but to ease future migration, we'll keep it
+ // here
+ bridge_id: H256,
+ is_congested: bool,
+ ) -> DispatchResult {
+ let _ = T::BridgeHubOrigin::ensure_origin(origin)?;
+
+ log::info!(
+ target: LOG_TARGET,
+ "Received bridge status from {:?}: congested = {}",
+ bridge_id,
+ is_congested,
+ );
+
+ Bridge::::mutate(|bridge| {
+ bridge.is_congested = is_congested;
+ });
+ Ok(())
+ }
}
- /// The number to multiply the base delivery fee by.
+ /// Bridge that we are using.
///
- /// This factor is shared by all bridges, served by this pallet. For example, if this
- /// chain (`Config::UniversalLocation`) opens two bridges (
- /// `X2(GlobalConsensus(Config::BridgedNetworkId::get()), Parachain(1000))` and
- /// `X2(GlobalConsensus(Config::BridgedNetworkId::get()), Parachain(2000))`), then they
- /// both will be sharing the same fee factor. This is because both bridges are sharing
- /// the same local XCM channel with the child/sibling bridge hub, which we are using
- /// to detect congestion:
- ///
- /// ```nocompile
- /// ThisChain --- Local XCM channel --> Sibling Bridge Hub ------
- /// | |
- /// | |
- /// | |
- /// Lane1 Lane2
- /// | |
- /// | |
- /// | |
- /// \ / |
- /// Parachain1 <-- Local XCM channel --- Remote Bridge Hub <------
- /// |
- /// |
- /// Parachain1 <-- Local XCM channel ---------
- /// ```
- ///
- /// If at least one of other channels is congested, the local XCM channel with sibling
- /// bridge hub eventually becomes congested too. And we have no means to detect - which
- /// bridge exactly causes the congestion. So the best solution here is not to make
- /// any differences between all bridges, started by this chain.
+ /// **bridges-v1** assumptions: all outbound messages through this router are using single lane
+ /// and to single remote consensus. If there is some other remote consensus that uses the same
+ /// bridge hub, the separate pallet instance shall be used, In `v2` we'll have all required
+ /// primitives (lane-id aka bridge-id, derived from XCM locations) to support multiple bridges
+ /// by the same pallet instance.
#[pallet::storage]
- #[pallet::getter(fn delivery_fee_factor)]
- pub type DeliveryFeeFactor, I: 'static = ()> =
- StorageValue<_, FixedU128, ValueQuery, InitialFactor>;
+ #[pallet::getter(fn bridge)]
+ pub type Bridge, I: 'static = ()> = StorageValue<_, BridgeState, ValueQuery>;
impl, I: 'static> Pallet {
/// Called when new message is sent (queued to local outbound XCM queue) over the bridge.
pub(crate) fn on_message_sent_to_bridge(message_size: u32) {
- // if outbound channel is not congested, do nothing
- if !T::LocalXcmChannelManager::is_congested(&T::SiblingBridgeHubLocation::get()) {
- return
- }
+ log::trace!(
+ target: LOG_TARGET,
+ "on_message_sent_to_bridge - message_size: {message_size:?}",
+ );
+ let _ = Bridge::::try_mutate(|bridge| {
+ let is_channel_with_bridge_hub_congested =
+ T::LocalXcmChannelManager::is_congested(&T::SiblingBridgeHubLocation::get());
+ let is_bridge_congested = bridge.is_congested;
+
+ // if outbound queue is not congested AND bridge has not reported congestion, do
+ // nothing
+ if !is_channel_with_bridge_hub_congested && !is_bridge_congested {
+ return Err(())
+ }
+
+ // ok - we need to increase the fee factor, let's do that
+ let message_size_factor = FixedU128::from_u32(message_size.saturating_div(1024))
+ .saturating_mul(MESSAGE_SIZE_FEE_BASE);
+ let total_factor = EXPONENTIAL_FEE_BASE.saturating_add(message_size_factor);
+ let previous_factor = bridge.delivery_fee_factor;
+ bridge.delivery_fee_factor =
+ bridge.delivery_fee_factor.saturating_mul(total_factor);
- // ok - we need to increase the fee factor, let's do that
- let message_size_factor = FixedU128::from_u32(message_size.saturating_div(1024))
- .saturating_mul(MESSAGE_SIZE_FEE_BASE);
- let total_factor = EXPONENTIAL_FEE_BASE.saturating_add(message_size_factor);
- DeliveryFeeFactor::::mutate(|f| {
- let previous_factor = *f;
- *f = f.saturating_mul(total_factor);
log::info!(
target: LOG_TARGET,
"Bridge channel is congested. Increased fee factor from {} to {}",
previous_factor,
- f,
+ bridge.delivery_fee_factor,
);
- Self::deposit_event(Event::DeliveryFeeFactorIncreased { new_value: *f });
- *f
+ Self::deposit_event(Event::DeliveryFeeFactorIncreased {
+ new_value: bridge.delivery_fee_factor,
+ });
+ Ok(())
});
}
}
@@ -310,9 +330,9 @@ impl, I: 'static> ExporterFor for Pallet {
let message_size = message.encoded_size();
let message_fee = (message_size as u128).saturating_mul(T::ByteFee::get());
let fee_sum = base_fee.saturating_add(message_fee);
-
- let fee_factor = Self::delivery_fee_factor();
+ let fee_factor = Self::bridge().delivery_fee_factor;
let fee = fee_factor.saturating_mul_int(fee_sum);
+
let fee = if fee > 0 { Some((T::FeeAsset::get(), fee).into()) } else { None };
log::info!(
@@ -427,24 +447,47 @@ mod tests {
use frame_system::{EventRecord, Phase};
use sp_runtime::traits::One;
+ fn congested_bridge(delivery_fee_factor: FixedU128) -> BridgeState {
+ BridgeState { is_congested: true, delivery_fee_factor }
+ }
+
+ fn uncongested_bridge(delivery_fee_factor: FixedU128) -> BridgeState {
+ BridgeState { is_congested: false, delivery_fee_factor }
+ }
+
#[test]
fn initial_fee_factor_is_one() {
run_test(|| {
- assert_eq!(DeliveryFeeFactor::::get(), MINIMAL_DELIVERY_FEE_FACTOR);
+ assert_eq!(
+ Bridge::::get(),
+ uncongested_bridge(MINIMAL_DELIVERY_FEE_FACTOR),
+ );
})
}
#[test]
fn fee_factor_is_not_decreased_from_on_initialize_when_xcm_channel_is_congested() {
run_test(|| {
- DeliveryFeeFactor::::put(FixedU128::from_rational(125, 100));
+ Bridge::::put(uncongested_bridge(FixedU128::from_rational(125, 100)));
TestLocalXcmChannelManager::make_congested(&SiblingBridgeHubLocation::get());
// it should not decrease, because queue is congested
- let old_delivery_fee_factor = XcmBridgeHubRouter::delivery_fee_factor();
+ let old_delivery = XcmBridgeHubRouter::bridge();
XcmBridgeHubRouter::on_initialize(One::one());
- assert_eq!(XcmBridgeHubRouter::delivery_fee_factor(), old_delivery_fee_factor);
+ assert_eq!(XcmBridgeHubRouter::bridge(), old_delivery);
+ assert_eq!(System::events(), vec![]);
+ })
+ }
+
+ #[test]
+ fn fee_factor_is_not_decreased_from_on_initialize_when_bridge_has_reported_congestion() {
+ run_test(|| {
+ Bridge::::put(congested_bridge(FixedU128::from_rational(125, 100)));
+ // it should not decrease, because bridge congested
+ let old_bridge = XcmBridgeHubRouter::bridge();
+ XcmBridgeHubRouter::on_initialize(One::one());
+ assert_eq!(XcmBridgeHubRouter::bridge(), old_bridge);
assert_eq!(System::events(), vec![]);
})
}
@@ -453,16 +496,19 @@ mod tests {
fn fee_factor_is_decreased_from_on_initialize_when_xcm_channel_is_uncongested() {
run_test(|| {
let initial_fee_factor = FixedU128::from_rational(125, 100);
- DeliveryFeeFactor::::put(initial_fee_factor);
+ Bridge::::put(uncongested_bridge(initial_fee_factor));
- // it shold eventually decreased to one
- while XcmBridgeHubRouter::delivery_fee_factor() > MINIMAL_DELIVERY_FEE_FACTOR {
+ // it should eventually decrease to one
+ while XcmBridgeHubRouter::bridge().delivery_fee_factor > MINIMAL_DELIVERY_FEE_FACTOR {
XcmBridgeHubRouter::on_initialize(One::one());
}
- // verify that it doesn't decreases anymore
+ // verify that it doesn't decrease anymore
XcmBridgeHubRouter::on_initialize(One::one());
- assert_eq!(XcmBridgeHubRouter::delivery_fee_factor(), MINIMAL_DELIVERY_FEE_FACTOR);
+ assert_eq!(
+ XcmBridgeHubRouter::bridge(),
+ uncongested_bridge(MINIMAL_DELIVERY_FEE_FACTOR)
+ );
// check emitted event
let first_system_event = System::events().first().cloned();
@@ -582,7 +628,7 @@ mod tests {
// but when factor is larger than one, it increases the fee, so it becomes:
// `(BASE_FEE + BYTE_FEE * msg_size) * F + HRMP_FEE`
let factor = FixedU128::from_rational(125, 100);
- DeliveryFeeFactor::::put(factor);
+ Bridge::::put(uncongested_bridge(factor));
let expected_fee =
(FixedU128::saturating_from_integer(BASE_FEE + BYTE_FEE * (msg_size as u128)) *
factor)
@@ -598,7 +644,7 @@ mod tests {
#[test]
fn sent_message_doesnt_increase_factor_if_queue_is_uncongested() {
run_test(|| {
- let old_delivery_fee_factor = XcmBridgeHubRouter::delivery_fee_factor();
+ let old_bridge = XcmBridgeHubRouter::bridge();
assert_eq!(
send_xcm::(
Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(1000)]),
@@ -609,7 +655,7 @@ mod tests {
);
assert!(TestToBridgeHubSender::is_message_sent());
- assert_eq!(old_delivery_fee_factor, XcmBridgeHubRouter::delivery_fee_factor());
+ assert_eq!(old_bridge, XcmBridgeHubRouter::bridge());
assert_eq!(System::events(), vec![]);
});
@@ -620,7 +666,39 @@ mod tests {
run_test(|| {
TestLocalXcmChannelManager::make_congested(&SiblingBridgeHubLocation::get());
- let old_delivery_fee_factor = XcmBridgeHubRouter::delivery_fee_factor();
+ let old_bridge = XcmBridgeHubRouter::bridge();
+ assert_ok!(send_xcm::(
+ Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(1000)]),
+ vec![ClearOrigin].into(),
+ )
+ .map(drop));
+
+ assert!(TestToBridgeHubSender::is_message_sent());
+ assert!(
+ old_bridge.delivery_fee_factor < XcmBridgeHubRouter::bridge().delivery_fee_factor
+ );
+
+ // check emitted event
+ let first_system_event = System::events().first().cloned();
+ assert!(matches!(
+ first_system_event,
+ Some(EventRecord {
+ phase: Phase::Initialization,
+ event: RuntimeEvent::XcmBridgeHubRouter(
+ Event::DeliveryFeeFactorIncreased { .. }
+ ),
+ ..
+ })
+ ));
+ });
+ }
+
+ #[test]
+ fn sent_message_increases_factor_if_bridge_has_reported_congestion() {
+ run_test(|| {
+ Bridge::::put(congested_bridge(MINIMAL_DELIVERY_FEE_FACTOR));
+
+ let old_bridge = XcmBridgeHubRouter::bridge();
assert_ok!(send_xcm::(
Location::new(2, [GlobalConsensus(BridgedNetworkId::get()), Parachain(1000)]),
vec![ClearOrigin].into(),
@@ -628,7 +706,9 @@ mod tests {
.map(drop));
assert!(TestToBridgeHubSender::is_message_sent());
- assert!(old_delivery_fee_factor < XcmBridgeHubRouter::delivery_fee_factor());
+ assert!(
+ old_bridge.delivery_fee_factor < XcmBridgeHubRouter::bridge().delivery_fee_factor
+ );
// check emitted event
let first_system_event = System::events().first().cloned();
diff --git a/bridges/modules/xcm-bridge-hub-router/src/mock.rs b/bridges/modules/xcm-bridge-hub-router/src/mock.rs
index 095572883920..ac642e108c2a 100644
--- a/bridges/modules/xcm-bridge-hub-router/src/mock.rs
+++ b/bridges/modules/xcm-bridge-hub-router/src/mock.rs
@@ -80,6 +80,7 @@ impl pallet_xcm_bridge_hub_router::Config<()> for TestRuntime {
type DestinationVersion =
LatestOrNoneForLocationVersionChecker>;
+ type BridgeHubOrigin = frame_system::EnsureRoot;
type ToBridgeHubSender = TestToBridgeHubSender;
type LocalXcmChannelManager = TestLocalXcmChannelManager;
diff --git a/bridges/modules/xcm-bridge-hub-router/src/weights.rs b/bridges/modules/xcm-bridge-hub-router/src/weights.rs
index d9a0426fecaf..8f5012c9de26 100644
--- a/bridges/modules/xcm-bridge-hub-router/src/weights.rs
+++ b/bridges/modules/xcm-bridge-hub-router/src/weights.rs
@@ -52,6 +52,7 @@ use sp_std::marker::PhantomData;
pub trait WeightInfo {
fn on_initialize_when_non_congested() -> Weight;
fn on_initialize_when_congested() -> Weight;
+ fn report_bridge_status() -> Weight;
}
/// Weights for `pallet_xcm_bridge_hub_router` that are generated using one of the Bridge testnets.
@@ -85,6 +86,19 @@ impl WeightInfo for BridgeWeight {
// Minimum execution time: 4_239 nanoseconds.
Weight::from_parts(4_383_000, 3547).saturating_add(T::DbWeight::get().reads(1_u64))
}
+ /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1)
+ ///
+ /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added:
+ /// 512, mode: `MaxEncodedLen`)
+ fn report_bridge_status() -> Weight {
+ // Proof Size summary in bytes:
+ // Measured: `53`
+ // Estimated: `1502`
+ // Minimum execution time: 10_427 nanoseconds.
+ Weight::from_parts(10_682_000, 1502)
+ .saturating_add(T::DbWeight::get().reads(1_u64))
+ .saturating_add(T::DbWeight::get().writes(1_u64))
+ }
}
// For backwards compatibility and tests
@@ -120,4 +134,17 @@ impl WeightInfo for () {
// Minimum execution time: 4_239 nanoseconds.
Weight::from_parts(4_383_000, 3547).saturating_add(RocksDbWeight::get().reads(1_u64))
}
+ /// Storage: `XcmBridgeHubRouter::Bridge` (r:1 w:1)
+ ///
+ /// Proof: `XcmBridgeHubRouter::Bridge` (`max_values`: Some(1), `max_size`: Some(17), added:
+ /// 512, mode: `MaxEncodedLen`)
+ fn report_bridge_status() -> Weight {
+ // Proof Size summary in bytes:
+ // Measured: `53`
+ // Estimated: `1502`
+ // Minimum execution time: 10_427 nanoseconds.
+ Weight::from_parts(10_682_000, 1502)
+ .saturating_add(RocksDbWeight::get().reads(1_u64))
+ .saturating_add(RocksDbWeight::get().writes(1_u64))
+ }
}
diff --git a/bridges/modules/xcm-bridge-hub/Cargo.toml b/bridges/modules/xcm-bridge-hub/Cargo.toml
index fe58b910a94e..251dcfb45bcb 100644
--- a/bridges/modules/xcm-bridge-hub/Cargo.toml
+++ b/bridges/modules/xcm-bridge-hub/Cargo.toml
@@ -39,6 +39,7 @@ sp-io = { workspace = true }
bp-runtime = { workspace = true }
bp-header-chain = { workspace = true }
pallet-xcm-bridge-hub-router = { workspace = true }
+bp-xcm-bridge-hub-router = { workspace = true }
polkadot-parachain-primitives = { workspace = true }
[features]
@@ -47,6 +48,7 @@ std = [
"bp-header-chain/std",
"bp-messages/std",
"bp-runtime/std",
+ "bp-xcm-bridge-hub-router/std",
"bp-xcm-bridge-hub/std",
"codec/std",
"frame-support/std",
diff --git a/bridges/modules/xcm-bridge-hub/src/exporter.rs b/bridges/modules/xcm-bridge-hub/src/exporter.rs
index 5afb9f36bc94..93b6093b42af 100644
--- a/bridges/modules/xcm-bridge-hub/src/exporter.rs
+++ b/bridges/modules/xcm-bridge-hub/src/exporter.rs
@@ -364,7 +364,7 @@ mod tests {
use bp_runtime::RangeInclusiveExt;
use bp_xcm_bridge_hub::{Bridge, BridgeLocations, BridgeState};
- use frame_support::assert_ok;
+ use frame_support::{assert_ok, traits::EnsureOrigin};
use pallet_bridge_messages::InboundLaneStorage;
use xcm_builder::{NetworkExportTable, UnpaidRemoteExporter};
use xcm_executor::traits::{export_xcm, ConvertLocation};
@@ -381,9 +381,8 @@ mod tests {
BridgedUniversalDestination::get()
}
- fn open_lane() -> (BridgeLocations, TestLaneIdType) {
+ fn open_lane(origin: RuntimeOrigin) -> (BridgeLocations, TestLaneIdType) {
// open expected outbound lane
- let origin = OpenBridgeOrigin::sibling_parachain_origin();
let with = bridged_asset_hub_universal_location();
let locations =
XcmOverBridge::bridge_locations_from_origin(origin, Box::new(with.into())).unwrap();
@@ -439,7 +438,7 @@ mod tests {
}
fn open_lane_and_send_regular_message() -> (BridgeId, TestLaneIdType) {
- let (locations, lane_id) = open_lane();
+ let (locations, lane_id) = open_lane(OpenBridgeOrigin::sibling_parachain_origin());
// now let's try to enqueue message using our `ExportXcm` implementation
export_xcm::(
@@ -473,7 +472,7 @@ mod tests {
fn exporter_does_not_suspend_the_bridge_if_outbound_bridge_queue_is_not_congested() {
run_test(|| {
let (bridge_id, _) = open_lane_and_send_regular_message();
- assert!(!TestLocalXcmChannelManager::is_bridge_suspened());
+ assert!(!TestLocalXcmChannelManager::is_bridge_suspended(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Opened);
});
}
@@ -490,7 +489,7 @@ mod tests {
}
open_lane_and_send_regular_message();
- assert!(!TestLocalXcmChannelManager::is_bridge_suspened());
+ assert!(!TestLocalXcmChannelManager::is_bridge_suspended(&bridge_id));
});
}
@@ -502,11 +501,11 @@ mod tests {
open_lane_and_send_regular_message();
}
- assert!(!TestLocalXcmChannelManager::is_bridge_suspened());
+ assert!(!TestLocalXcmChannelManager::is_bridge_suspended(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Opened);
open_lane_and_send_regular_message();
- assert!(TestLocalXcmChannelManager::is_bridge_suspened());
+ assert!(TestLocalXcmChannelManager::is_bridge_suspended(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Suspended);
});
}
@@ -523,7 +522,7 @@ mod tests {
OUTBOUND_LANE_UNCONGESTED_THRESHOLD + 1,
);
- assert!(!TestLocalXcmChannelManager::is_bridge_resumed());
+ assert!(!TestLocalXcmChannelManager::is_bridge_resumed(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Suspended);
});
}
@@ -537,7 +536,7 @@ mod tests {
OUTBOUND_LANE_UNCONGESTED_THRESHOLD,
);
- assert!(!TestLocalXcmChannelManager::is_bridge_resumed());
+ assert!(!TestLocalXcmChannelManager::is_bridge_resumed(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Opened);
});
}
@@ -554,7 +553,7 @@ mod tests {
OUTBOUND_LANE_UNCONGESTED_THRESHOLD,
);
- assert!(TestLocalXcmChannelManager::is_bridge_resumed());
+ assert!(TestLocalXcmChannelManager::is_bridge_resumed(&bridge_id));
assert_eq!(XcmOverBridge::bridge(&bridge_id).unwrap().state, BridgeState::Opened);
});
}
@@ -648,7 +647,10 @@ mod tests {
let dest = Location::new(2, BridgedUniversalDestination::get());
// open bridge
- let (_, expected_lane_id) = open_lane();
+ let origin = OpenBridgeOrigin::sibling_parachain_origin();
+ let origin_as_location =
+ OpenBridgeOriginOf::::try_origin(origin.clone()).unwrap();
+ let (_, expected_lane_id) = open_lane(origin);
// check before - no messages
assert_eq!(
@@ -662,18 +664,24 @@ mod tests {
);
// send `ExportMessage(message)` by `UnpaidRemoteExporter`.
- TestExportXcmWithXcmOverBridge::set_origin_for_execute(SiblingLocation::get());
+ ExecuteXcmOverSendXcm::set_origin_for_execute(origin_as_location);
assert_ok!(send_xcm::<
UnpaidRemoteExporter<
NetworkExportTable,
- TestExportXcmWithXcmOverBridge,
+ ExecuteXcmOverSendXcm,
UniversalLocation,
>,
>(dest.clone(), Xcm::<()>::default()));
+ // we need to set `UniversalLocation` for `sibling_parachain_origin` for
+ // `XcmOverBridgeWrappedWithExportMessageRouterInstance`.
+ ExportMessageOriginUniversalLocation::set(Some(SiblingUniversalLocation::get()));
// send `ExportMessage(message)` by `pallet_xcm_bridge_hub_router`.
- TestExportXcmWithXcmOverBridge::set_origin_for_execute(SiblingLocation::get());
- assert_ok!(send_xcm::(dest.clone(), Xcm::<()>::default()));
+ ExecuteXcmOverSendXcm::set_origin_for_execute(SiblingLocation::get());
+ assert_ok!(send_xcm::(
+ dest.clone(),
+ Xcm::<()>::default()
+ ));
// check after - a message ready to be relayed
assert_eq!(
@@ -765,7 +773,7 @@ mod tests {
);
// ok
- let _ = open_lane();
+ let _ = open_lane(OpenBridgeOrigin::sibling_parachain_origin());
let mut dest_wrapper = Some(bridged_relative_destination());
assert_ok!(XcmOverBridge::validate(
BridgedRelayNetwork::get(),
@@ -780,4 +788,77 @@ mod tests {
assert_eq!(None, dest_wrapper);
});
}
+
+ #[test]
+ fn congestion_with_pallet_xcm_bridge_hub_router_works() {
+ run_test(|| {
+ // valid routable destination
+ let dest = Location::new(2, BridgedUniversalDestination::get());
+
+ fn router_bridge_state() -> pallet_xcm_bridge_hub_router::BridgeState {
+ pallet_xcm_bridge_hub_router::Bridge::<
+ TestRuntime,
+ XcmOverBridgeWrappedWithExportMessageRouterInstance,
+ >::get()
+ }
+
+ // open two bridges
+ let origin = OpenBridgeOrigin::sibling_parachain_origin();
+ let origin_as_location =
+ OpenBridgeOriginOf::::try_origin(origin.clone()).unwrap();
+ let (bridge_1, expected_lane_id_1) = open_lane(origin);
+
+ // we need to set `UniversalLocation` for `sibling_parachain_origin` for
+ // `XcmOverBridgeWrappedWithExportMessageRouterInstance`.
+ ExportMessageOriginUniversalLocation::set(Some(SiblingUniversalLocation::get()));
+
+ // check before
+ // bridges are opened
+ assert_eq!(
+ XcmOverBridge::bridge(bridge_1.bridge_id()).unwrap().state,
+ BridgeState::Opened
+ );
+
+ // the router is uncongested
+ assert!(!router_bridge_state().is_congested);
+ assert!(!TestLocalXcmChannelManager::is_bridge_suspended(bridge_1.bridge_id()));
+ assert!(!TestLocalXcmChannelManager::is_bridge_resumed(bridge_1.bridge_id()));
+
+ // make bridges congested with sending too much messages
+ for _ in 1..(OUTBOUND_LANE_CONGESTED_THRESHOLD + 2) {
+ // send `ExportMessage(message)` by `pallet_xcm_bridge_hub_router`.
+ ExecuteXcmOverSendXcm::set_origin_for_execute(origin_as_location.clone());
+ assert_ok!(send_xcm::(
+ dest.clone(),
+ Xcm::<()>::default()
+ ));
+ }
+
+ // checks after
+ // bridges are suspended
+ assert_eq!(
+ XcmOverBridge::bridge(bridge_1.bridge_id()).unwrap().state,
+ BridgeState::Suspended,
+ );
+ // the router is congested
+ assert!(router_bridge_state().is_congested);
+ assert!(TestLocalXcmChannelManager::is_bridge_suspended(bridge_1.bridge_id()));
+ assert!(!TestLocalXcmChannelManager::is_bridge_resumed(bridge_1.bridge_id()));
+
+ // make bridges uncongested to trigger resume signal
+ XcmOverBridge::on_bridge_messages_delivered(
+ expected_lane_id_1,
+ OUTBOUND_LANE_UNCONGESTED_THRESHOLD,
+ );
+
+ // bridge is again opened
+ assert_eq!(
+ XcmOverBridge::bridge(bridge_1.bridge_id()).unwrap().state,
+ BridgeState::Opened
+ );
+ // the router is uncongested
+ assert!(!router_bridge_state().is_congested);
+ assert!(TestLocalXcmChannelManager::is_bridge_resumed(bridge_1.bridge_id()));
+ })
+ }
}
diff --git a/bridges/modules/xcm-bridge-hub/src/lib.rs b/bridges/modules/xcm-bridge-hub/src/lib.rs
index 1b2536598a20..682db811efa7 100644
--- a/bridges/modules/xcm-bridge-hub/src/lib.rs
+++ b/bridges/modules/xcm-bridge-hub/src/lib.rs
@@ -145,8 +145,8 @@
use bp_messages::{LaneState, MessageNonce};
use bp_runtime::{AccountIdOf, BalanceOf, RangeInclusiveExt};
-pub use bp_xcm_bridge_hub::{Bridge, BridgeId, BridgeState};
-use bp_xcm_bridge_hub::{BridgeLocations, BridgeLocationsError, LocalXcmChannelManager};
+pub use bp_xcm_bridge_hub::{Bridge, BridgeId, BridgeState, LocalXcmChannelManager};
+use bp_xcm_bridge_hub::{BridgeLocations, BridgeLocationsError};
use frame_support::{traits::fungible::MutateHold, DefaultNoBound};
use frame_system::Config as SystemConfig;
use pallet_bridge_messages::{Config as BridgeMessagesConfig, LanesManagerError};
diff --git a/bridges/modules/xcm-bridge-hub/src/mock.rs b/bridges/modules/xcm-bridge-hub/src/mock.rs
index 9f06b99ef6d5..d186507dab17 100644
--- a/bridges/modules/xcm-bridge-hub/src/mock.rs
+++ b/bridges/modules/xcm-bridge-hub/src/mock.rs
@@ -24,10 +24,10 @@ use bp_messages::{
};
use bp_runtime::{messages::MessageDispatchResult, Chain, ChainId, HashOf};
use bp_xcm_bridge_hub::{BridgeId, LocalXcmChannelManager};
-use codec::Encode;
+use codec::{Decode, Encode};
use frame_support::{
assert_ok, derive_impl, parameter_types,
- traits::{EnsureOrigin, Equals, Everything, OriginTrait},
+ traits::{EnsureOrigin, Equals, Everything, Get, OriginTrait},
weights::RuntimeDbWeight,
};
use polkadot_parachain_primitives::primitives::Sibling;
@@ -44,7 +44,7 @@ use xcm_builder::{
InspectMessageQueues, NetworkExportTable, NetworkExportTableItem, ParentIsPreset,
SiblingParachainConvertsVia,
};
-use xcm_executor::XcmExecutor;
+use xcm_executor::{traits::ConvertOrigin, XcmExecutor};
pub type AccountId = AccountId32;
pub type Balance = u64;
@@ -63,7 +63,7 @@ frame_support::construct_runtime! {
Balances: pallet_balances::{Pallet, Event},
Messages: pallet_bridge_messages::{Pallet, Call, Event},
XcmOverBridge: pallet_xcm_bridge_hub::{Pallet, Call, HoldReason, Event},
- XcmOverBridgeRouter: pallet_xcm_bridge_hub_router,
+ XcmOverBridgeWrappedWithExportMessageRouter: pallet_xcm_bridge_hub_router = 57,
}
}
@@ -208,17 +208,27 @@ impl pallet_xcm_bridge_hub::Config for TestRuntime {
type BlobDispatcher = TestBlobDispatcher;
}
-impl pallet_xcm_bridge_hub_router::Config<()> for TestRuntime {
+/// A router instance simulates a scenario where the router is deployed on a different chain than
+/// the `MessageExporter`. This means that the router sends an `ExportMessage`.
+pub type XcmOverBridgeWrappedWithExportMessageRouterInstance = ();
+impl pallet_xcm_bridge_hub_router::Config
+ for TestRuntime
+{
type RuntimeEvent = RuntimeEvent;
type WeightInfo = ();
- type UniversalLocation = UniversalLocation;
+ type UniversalLocation = ExportMessageOriginUniversalLocation;
type SiblingBridgeHubLocation = BridgeHubLocation;
type BridgedNetworkId = BridgedRelayNetwork;
type Bridges = NetworkExportTable;
type DestinationVersion = AlwaysLatest;
- type ToBridgeHubSender = TestExportXcmWithXcmOverBridge;
+ // We convert to root `here` location with `BridgeHubLocationXcmOriginAsRoot`
+ type BridgeHubOrigin = frame_system::EnsureRoot;
+ // **Note**: The crucial part is that `ExportMessage` is processed by `XcmExecutor`, which
+ // calls the `ExportXcm` implementation of `pallet_xcm_bridge_hub` as the
+ // `MessageExporter`.
+ type ToBridgeHubSender = ExecuteXcmOverSendXcm;
type LocalXcmChannelManager = TestLocalXcmChannelManager;
type ByteFee = ConstU128<0>;
@@ -230,7 +240,7 @@ impl xcm_executor::Config for XcmConfig {
type RuntimeCall = RuntimeCall;
type XcmSender = ();
type AssetTransactor = ();
- type OriginConverter = ();
+ type OriginConverter = BridgeHubLocationXcmOriginAsRoot;
type IsReserve = ();
type IsTeleporter = ();
type UniversalLocation = UniversalLocation;
@@ -270,8 +280,8 @@ thread_local! {
///
/// Note: The crucial part is that `ExportMessage` is processed by `XcmExecutor`, which calls the
/// `ExportXcm` implementation of `pallet_xcm_bridge_hub` as `MessageExporter`.
-pub struct TestExportXcmWithXcmOverBridge;
-impl SendXcm for TestExportXcmWithXcmOverBridge {
+pub struct ExecuteXcmOverSendXcm;
+impl SendXcm for ExecuteXcmOverSendXcm {
type Ticket = Xcm<()>;
fn validate(
@@ -298,7 +308,7 @@ impl SendXcm for TestExportXcmWithXcmOverBridge {
Ok(hash)
}
}
-impl InspectMessageQueues for TestExportXcmWithXcmOverBridge {
+impl InspectMessageQueues for ExecuteXcmOverSendXcm {
fn clear_messages() {
todo!()
}
@@ -307,12 +317,51 @@ impl InspectMessageQueues for TestExportXcmWithXcmOverBridge {
todo!()
}
}
-impl TestExportXcmWithXcmOverBridge {
+impl ExecuteXcmOverSendXcm {
pub fn set_origin_for_execute(origin: Location) {
EXECUTE_XCM_ORIGIN.with(|o| *o.borrow_mut() = Some(origin));
}
}
+/// A dynamic way to set different universal location for the origin which sends `ExportMessage`.
+pub struct ExportMessageOriginUniversalLocation;
+impl ExportMessageOriginUniversalLocation {
+ pub(crate) fn set(universal_location: Option) {
+ EXPORT_MESSAGE_ORIGIN_UNIVERSAL_LOCATION.with(|o| *o.borrow_mut() = universal_location);
+ }
+}
+impl Get for ExportMessageOriginUniversalLocation {
+ fn get() -> InteriorLocation {
+ EXPORT_MESSAGE_ORIGIN_UNIVERSAL_LOCATION.with(|o| {
+ o.borrow()
+ .clone()
+ .expect("`EXPORT_MESSAGE_ORIGIN_UNIVERSAL_LOCATION` is not set!")
+ })
+ }
+}
+thread_local! {
+ pub static EXPORT_MESSAGE_ORIGIN_UNIVERSAL_LOCATION: RefCell