// Small script used to calculate the matrix of tests that are going to be1// performed for a CI run.2//3// This is invoked by the `determine` step and is written in JS because I4// couldn't figure out how to write it in bash.56const fs = require('fs');7const { spawn } = require('node:child_process');89// Number of generic buckets to shard crates into. Note that we additionally add10// single-crate buckets for our biggest crates.11const GENERIC_BUCKETS = 3;1213// Crates which are their own buckets. These are the very slowest to14// compile-and-test crates.15const SINGLE_CRATE_BUCKETS = ["wasmtime", "wasmtime-cli", "wasmtime-wasi"];1617const ubuntu = 'ubuntu-24.04';18const windows = 'windows-2025';19const macos = 'macos-15';2021// This is the small, fast-to-execute matrix we use for PRs before they enter22// the merge queue. Same schema as `FULL_MATRIX`.23const FAST_MATRIX = [24{25"os": ubuntu,26"name": "Test Linux x86_64",27"filter": "linux-x64",28"isa": "x64",29},30];3132// This is the full, unsharded, and unfiltered matrix of what we test on33// CI. This includes a number of platforms and a number of cross-compiled34// targets that are emulated with QEMU. This must be kept tightly in sync with35// the `test` step in `main.yml`.36//37// The supported keys here are:38//39// * `os` - the github-actions name of the runner os40//41// * `name` - the human-readable name of the job42//43// * `filter` - a string which if `prtest:$filter` is in the commit messages44// it'll force running this test suite on PR CI.45//46// * `isa` - changes to `cranelift/codegen/src/$isa` will automatically run this47// test suite.48//49// * `target` - used for cross-compiles if present. Effectively Cargo's50// `--target` option for all its operations.51//52// * `gcc_package`, `gcc`, `qemu`, `qemu_target` - configuration for building53// QEMU and installing cross compilers to execute a cross-compiled test suite54// on CI.55//56// * `rust` - the Rust version to install, and if unset this'll be set to57// `default`58const FULL_MATRIX = [59...FAST_MATRIX,60{61"os": ubuntu,62"name": "Test MSRV on Linux x86_64",63"filter": "linux-x64",64"isa": "x64",65"rust": "msrv",66},67{68"os": ubuntu,69"name": "Test Linux x86_64 with MPK",70"filter": "linux-x64",71"isa": "x64"72},73{74"os": ubuntu,75"name": "Test Linux x86_64 with ASAN",76"filter": "asan",77"rust": "wasmtime-ci-pinned-nightly",78"target": "x86_64-unknown-linux-gnu",79},80{81"os": macos,82"name": "Test macOS x86_64",83"filter": "macos-x64",84"target": "x86_64-apple-darwin",85},86{87"os": macos,88"name": "Test macOS arm64",89"filter": "macos-arm64",90"target": "aarch64-apple-darwin",91},92{93"os": windows,94"name": "Test Windows MSVC x86_64",95"filter": "windows-x64",96},97{98"os": windows,99"target": "x86_64-pc-windows-gnu",100"name": "Test Windows MinGW x86_64",101"filter": "mingw-x64"102},103{104"os": ubuntu + '-arm',105"target": "aarch64-unknown-linux-gnu",106"name": "Test Linux arm64",107"filter": "linux-arm64",108"isa": "aarch64",109},110{111"os": ubuntu,112"target": "s390x-unknown-linux-gnu",113"gcc_package": "gcc-s390x-linux-gnu",114"gcc": "s390x-linux-gnu-gcc",115"qemu": "qemu-s390x -L /usr/s390x-linux-gnu",116"qemu_target": "s390x-linux-user",117"name": "Test Linux s390x",118"filter": "linux-s390x",119"isa": "s390x"120},121{122"os": ubuntu,123"target": "riscv64gc-unknown-linux-gnu",124"gcc_package": "gcc-riscv64-linux-gnu",125"gcc": "riscv64-linux-gnu-gcc",126"qemu": "qemu-riscv64 -cpu rv64,v=true,vlen=256,vext_spec=v1.0,zfa=true,zfh=true,zba=true,zbb=true,zbc=true,zbs=true,zbkb=true,zcb=true,zicond=true,zvfh=true -L /usr/riscv64-linux-gnu",127"qemu_target": "riscv64-linux-user",128"name": "Test Linux riscv64",129"filter": "linux-riscv64",130"isa": "riscv64",131},132{133"name": "Tests on i686-unknown-linux-gnu",134"os": ubuntu,135"target": "i686-unknown-linux-gnu",136"gcc_package": "gcc-i686-linux-gnu",137"gcc": "i686-linux-gnu-gcc",138},139{140"name": "Tests on armv7-unknown-linux-gnueabihf",141"os": ubuntu,142"target": "armv7-unknown-linux-gnueabihf",143"gcc_package": "gcc-arm-linux-gnueabihf",144"gcc": "arm-linux-gnueabihf-gcc",145"qemu": "qemu-arm -L /usr/arm-linux-gnueabihf -E LD_LIBRARY_PATH=/usr/arm-linux-gnueabihf/lib",146"qemu_target": "arm-linux-user",147},148];149150/// Get the workspace's full list of member crates.151async function getWorkspaceMembers() {152// Spawn a `cargo metadata` subprocess, accumulate its JSON output from153// `stdout`, and wait for it to exit.154const child = spawn("cargo", ["metadata"], { encoding: "utf8" });155let data = "";156child.stdout.on("data", chunk => data += chunk);157await new Promise((resolve, reject) => {158child.on("close", resolve);159child.on("error", reject);160});161162// Get the names of the crates in the workspace from the JSON metadata by163// building a package-id to name map and then translating the package-ids164// listed as workspace members.165const metadata = JSON.parse(data);166const id_to_name = {};167for (const pkg of metadata.packages) {168id_to_name[pkg.id] = pkg.name;169}170return metadata.workspace_members.map(m => id_to_name[m]);171}172173/// For each given target configuration, shard the workspace's crates into174/// buckets across that config.175///176/// This is essentially a `flat_map` where each config that logically tests all177/// crates in the workspace is mapped to N sharded configs that each test only a178/// subset of crates in the workspace. Each sharded config's subset of crates to179/// test are disjoint from all its siblings, and the union of all these siblings'180/// crates to test is the full workspace members set.181///182/// With some poetic license around a `crates_to_test` key that doesn't actually183/// exist, logically each element of the input `configs` list gets transformed184/// like this:185///186/// { os: "ubuntu-latest", isa: "x64", ..., crates: "all" }187///188/// ==>189///190/// [191/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["wasmtime"] },192/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["wasmtime-cli"] },193/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["wasmtime-wasi"] },194/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["cranelift", "cranelift-codegen", ...] },195/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["wasmtime-slab", "cranelift-entity", ...] },196/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["cranelift-environ", "wasmtime-cli-flags", ...] },197/// ...198/// ]199///200/// Note that `crates: "all"` is implicit in the input and omitted. Similarly,201/// `crates: [...]` in each output config is actually implemented via adding a202/// `bucket` key, which contains the CLI flags we must pass to `cargo` to run203/// tests for just this config's subset of crates.204async function shard(configs) {205const members = await getWorkspaceMembers();206207// Divide the workspace crates into N disjoint subsets. Crates that are208// particularly expensive to compile and test form their own singleton subset.209const buckets = Array.from({ length: GENERIC_BUCKETS }, _ => new Set());210let i = 0;211for (const crate of members) {212if (SINGLE_CRATE_BUCKETS.indexOf(crate) != -1) continue;213buckets[i].add(crate);214i = (i + 1) % GENERIC_BUCKETS;215}216for (crate of SINGLE_CRATE_BUCKETS) {217buckets.push(new Set([crate]));218}219220// For each config, expand it into N configs, one for each disjoint set we221// created above.222const sharded = [];223for (const config of configs) {224for (const bucket of buckets) {225sharded.push(Object.assign(226{},227config,228{229name: `${config.name} (${Array.from(bucket).join(', ')})`,230// We run tests via `cargo test --workspace`, so exclude crates that231// aren't in this bucket, rather than naming only the crates that are232// in this bucket.233bucket: members234.map(c => bucket.has(c) ? `--package ${c}` : `--exclude ${c}`)235.join(" "),236}237));238}239}240return sharded;241}242243async function main() {244// Our first argument is a file that is a giant json blob which contains at245// least all the messages for all of the commits that were a part of this PR.246// This is used to test if any commit message includes a string.247const commits = fs.readFileSync(process.argv[2]).toString();248249// The second argument is a file that contains the names of all files modified250// for a PR, used for file-based filters.251const names = fs.readFileSync(process.argv[3]).toString();252253for (let config of FULL_MATRIX) {254if (config.rust === undefined) {255config.rust = 'default';256}257}258259// If the optional third argument to this script is `true` then that means all260// tests are being run and no filtering should happen.261if (process.argv[4] == 'true') {262console.log(JSON.stringify(await shard(FULL_MATRIX), undefined, 2));263return;264}265266// When we aren't running the full CI matrix, filter configs down to just the267// relevant bits based on files changed in this commit or if the commit asks268// for a certain config to run.269const filtered = FULL_MATRIX.filter(config => {270// If an ISA-specific test was modified, then include that ISA config.271if (config.isa && names.includes(`cranelift/codegen/src/isa/${config.isa}`)) {272return true;273}274275// If any runtest was modified, include all ISA configs as runtests can276// target any backend.277if (names.includes(`cranelift/filetests/filetests/runtests`)) {278if (config.isa !== undefined)279return true;280}281282// If the commit explicitly asks for this test config, then include it.283if (config.filter && commits.includes(`prtest:${config.filter}`)) {284return true;285}286287return false;288});289290// If at least one test is being run via our filters then run those tests.291if (filtered.length > 0) {292console.log(JSON.stringify(await shard(filtered), undefined, 2));293return;294}295296// Otherwise if nothing else is being run, run the fast subset of the matrix.297console.log(JSON.stringify(await shard(FAST_MATRIX), undefined, 2));298}299300main()301302303