|
|
@ -5,43 +5,55 @@ |
|
|
|
// couldn't figure out how to write it in bash.
|
|
|
|
|
|
|
|
const fs = require('fs'); |
|
|
|
const { spawn } = require('node:child_process'); |
|
|
|
|
|
|
|
// Our first argument is a file that is a giant json blob which contains at
|
|
|
|
// least all the messages for all of the commits that were a part of this PR.
|
|
|
|
// This is used to test if any commit message includes a string.
|
|
|
|
const commits = fs.readFileSync(process.argv[2]).toString(); |
|
|
|
// Number of generic buckets to shard crates into. Note that we additionally add
|
|
|
|
// single-crate buckets for our biggest crates.
|
|
|
|
const GENERIC_BUCKETS = 3; |
|
|
|
|
|
|
|
// The second argument is a file that contains the names of all files modified
|
|
|
|
// for a PR, used for file-based filters.
|
|
|
|
const names = fs.readFileSync(process.argv[3]).toString(); |
|
|
|
// Crates which are their own buckets. These are the very slowest to
|
|
|
|
// compile-and-test crates.
|
|
|
|
const SINGLE_CRATE_BUCKETS = ["wasmtime", "wasmtime-cli", "wasmtime-wasi"]; |
|
|
|
|
|
|
|
// This is the small, fast-to-execute matrix we use for PRs before they enter
|
|
|
|
// the merge queue. Same schema as `FULL_MATRIX`.
|
|
|
|
const FAST_MATRIX = [ |
|
|
|
{ |
|
|
|
"os": "ubuntu-latest", |
|
|
|
"name": "Test Linux x86_64", |
|
|
|
"filter": "linux-x64", |
|
|
|
"isa": "x64", |
|
|
|
}, |
|
|
|
]; |
|
|
|
|
|
|
|
// This is the full matrix of what we test on CI. This includes a number of
|
|
|
|
// platforms and a number of cross-compiled targets that are emulated with QEMU.
|
|
|
|
// This must be kept tightly in sync with the `test` step in `main.yml`.
|
|
|
|
// This is the full, unsharded, and unfiltered matrix of what we test on
|
|
|
|
// CI. This includes a number of platforms and a number of cross-compiled
|
|
|
|
// targets that are emulated with QEMU. This must be kept tightly in sync with
|
|
|
|
// the `test` step in `main.yml`.
|
|
|
|
//
|
|
|
|
// The supported keys here are:
|
|
|
|
//
|
|
|
|
// * `os` - the github-actions name of the runner os
|
|
|
|
//
|
|
|
|
// * `name` - the human-readable name of the job
|
|
|
|
//
|
|
|
|
// * `filter` - a string which if `prtest:$filter` is in the commit messages
|
|
|
|
// it'll force running this test suite on PR CI.
|
|
|
|
//
|
|
|
|
// * `isa` - changes to `cranelift/codegen/src/$isa` will automatically run this
|
|
|
|
// test suite.
|
|
|
|
//
|
|
|
|
// * `target` - used for cross-compiles if present. Effectively Cargo's
|
|
|
|
// `--target` option for all its operations.
|
|
|
|
//
|
|
|
|
// * `gcc_package`, `gcc`, `qemu`, `qemu_target` - configuration for building
|
|
|
|
// QEMU and installing cross compilers to execute a cross-compiled test suite
|
|
|
|
// on CI.
|
|
|
|
// * `isa` - changes to `cranelift/codegen/src/$isa` will automatically run this
|
|
|
|
// test suite.
|
|
|
|
//
|
|
|
|
// * `rust` - the Rust version to install, and if unset this'll be set to
|
|
|
|
// `default`
|
|
|
|
const array = [ |
|
|
|
{ |
|
|
|
"os": "ubuntu-latest", |
|
|
|
"name": "Test Linux x86_64", |
|
|
|
"filter": "linux-x64", |
|
|
|
"isa": "x64", |
|
|
|
"extra_features": "--features wasmtime-wasi-nn/onnx" |
|
|
|
}, |
|
|
|
const FULL_MATRIX = [ |
|
|
|
...FAST_MATRIX, |
|
|
|
{ |
|
|
|
"os": "ubuntu-latest", |
|
|
|
"name": "Test MSRV on Linux x86_64", |
|
|
@ -59,20 +71,17 @@ const array = [ |
|
|
|
"os": "macos-13", |
|
|
|
"name": "Test macOS x86_64", |
|
|
|
"filter": "macos-x64", |
|
|
|
"extra_features": "--features wasmtime-wasi-nn/onnx" |
|
|
|
}, |
|
|
|
{ |
|
|
|
"os": "macos-14", |
|
|
|
"name": "Test macOS arm64", |
|
|
|
"filter": "macos-arm64", |
|
|
|
"target": "aarch64-apple-darwin", |
|
|
|
"extra_features": "--features wasmtime-wasi-nn/onnx" |
|
|
|
}, |
|
|
|
{ |
|
|
|
"os": "windows-latest", |
|
|
|
"name": "Test Windows MSVC x86_64", |
|
|
|
"filter": "windows-x64", |
|
|
|
"extra_features": "--features wasmtime-wasi-nn/onnx" |
|
|
|
}, |
|
|
|
{ |
|
|
|
"os": "windows-latest", |
|
|
@ -115,44 +124,153 @@ const array = [ |
|
|
|
} |
|
|
|
]; |
|
|
|
|
|
|
|
for (let config of array) { |
|
|
|
if (config.rust === undefined) { |
|
|
|
config.rust = 'default'; |
|
|
|
/// Get the workspace's full list of member crates.
|
|
|
|
async function getWorkspaceMembers() { |
|
|
|
// Spawn a `cargo metadata` subprocess, accumulate its JSON output from
|
|
|
|
// `stdout`, and wait for it to exit.
|
|
|
|
const child = spawn("cargo", ["metadata"], { encoding: "utf8" }); |
|
|
|
let data = ""; |
|
|
|
child.stdout.on("data", chunk => data += chunk); |
|
|
|
await new Promise((resolve, reject) => { |
|
|
|
child.on("close", resolve); |
|
|
|
child.on("error", reject); |
|
|
|
}); |
|
|
|
|
|
|
|
// Get the names of the crates in the workspace from the JSON metadata by
|
|
|
|
// building a package-id to name map and then translating the package-ids
|
|
|
|
// listed as workspace members.
|
|
|
|
const metadata = JSON.parse(data); |
|
|
|
const id_to_name = {}; |
|
|
|
for (const pkg of metadata.packages) { |
|
|
|
id_to_name[pkg.id] = pkg.name; |
|
|
|
} |
|
|
|
return metadata.workspace_members.map(m => id_to_name[m]); |
|
|
|
} |
|
|
|
|
|
|
|
function myFilter(item) { |
|
|
|
if (item.isa && names.includes(`cranelift/codegen/src/isa/${item.isa}`)) { |
|
|
|
return true; |
|
|
|
/// For each given target configuration, shard the workspace's crates into
|
|
|
|
/// buckets across that config.
|
|
|
|
///
|
|
|
|
/// This is essentially a `flat_map` where each config that logically tests all
|
|
|
|
/// crates int he workspace is mapped to N sharded configs that each test only a
|
|
|
|
/// subset of crates in the workspace. Each sharded config's subset of crates to
|
|
|
|
/// test are disjoint from all its siblings, and the union of all thes siblings'
|
|
|
|
/// crates to test is the full workspace members set.
|
|
|
|
///
|
|
|
|
/// With some poetic license around a `crates_to_test` key that doesn't actually
|
|
|
|
/// exist, logically each element of the input `configs` list gets transformed
|
|
|
|
/// like this:
|
|
|
|
///
|
|
|
|
/// { os: "ubuntu-latest", isa: "x64", ..., crates: "all" }
|
|
|
|
///
|
|
|
|
/// ==>
|
|
|
|
///
|
|
|
|
/// [
|
|
|
|
/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["wasmtime"] },
|
|
|
|
/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["wasmtime-cli"] },
|
|
|
|
/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["wasmtime-wasi"] },
|
|
|
|
/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["cranelift", "cranelift-codegen", ...] },
|
|
|
|
/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["wasmtime-slab", "cranelift-entity", ...] },
|
|
|
|
/// { os: "ubuntu-latest", isa: "x64", ..., crates: ["cranelift-environ", "wasmtime-cli-flags", ...] },
|
|
|
|
/// ...
|
|
|
|
/// ]
|
|
|
|
///
|
|
|
|
/// Note that `crates: "all"` is implicit in the input and omitted. Similarly,
|
|
|
|
/// `crates: [...]` in each output config is actually implemented via adding a
|
|
|
|
/// `bucket` key, which contains the CLI flags we must pass to `cargo` to run
|
|
|
|
/// tests for just this config's subset of crates.
|
|
|
|
async function shard(configs) { |
|
|
|
const members = await getWorkspaceMembers(); |
|
|
|
|
|
|
|
// Divide the workspace crates into N disjoint subsets. Crates that are
|
|
|
|
// particularly expensive to compile and test form their own singleton subset.
|
|
|
|
const buckets = Array.from({ length: GENERIC_BUCKETS }, _ => new Set()); |
|
|
|
let i = 0; |
|
|
|
for (const crate of members) { |
|
|
|
if (SINGLE_CRATE_BUCKETS.indexOf(crate) != -1) continue; |
|
|
|
buckets[i].add(crate); |
|
|
|
i = (i + 1) % GENERIC_BUCKETS; |
|
|
|
} |
|
|
|
if (item.filter && commits.includes(`prtest:${item.filter}`)) { |
|
|
|
return true; |
|
|
|
for (crate of SINGLE_CRATE_BUCKETS) { |
|
|
|
buckets.push(new Set([crate])); |
|
|
|
} |
|
|
|
|
|
|
|
// If any runtest was modified, re-run the whole test suite as those can
|
|
|
|
// target any backend.
|
|
|
|
if (names.includes(`cranelift/filetests/filetests/runtests`)) { |
|
|
|
return true; |
|
|
|
// For each config, expand it into N configs, one for each disjoint set we
|
|
|
|
// created above.
|
|
|
|
const sharded = []; |
|
|
|
for (const config of configs) { |
|
|
|
for (const bucket of buckets) { |
|
|
|
sharded.push(Object.assign( |
|
|
|
{}, |
|
|
|
config, |
|
|
|
{ |
|
|
|
name: `${config.name} (${Array.from(bucket).join(', ')})`, |
|
|
|
// We run tests via `cargo test --workspace`, so exclude crates that
|
|
|
|
// aren't in this bucket, rather than naming only the crates that are
|
|
|
|
// in this bucket.
|
|
|
|
bucket: members |
|
|
|
.map(c => bucket.has(c) ? `--package ${c}` : `--exclude ${c}`) |
|
|
|
.join(" "), |
|
|
|
} |
|
|
|
|
|
|
|
return false; |
|
|
|
)); |
|
|
|
} |
|
|
|
} |
|
|
|
return sharded; |
|
|
|
} |
|
|
|
|
|
|
|
const filtered = array.filter(myFilter); |
|
|
|
async function main() { |
|
|
|
// Our first argument is a file that is a giant json blob which contains at
|
|
|
|
// least all the messages for all of the commits that were a part of this PR.
|
|
|
|
// This is used to test if any commit message includes a string.
|
|
|
|
const commits = fs.readFileSync(process.argv[2]).toString(); |
|
|
|
|
|
|
|
// The second argument is a file that contains the names of all files modified
|
|
|
|
// for a PR, used for file-based filters.
|
|
|
|
const names = fs.readFileSync(process.argv[3]).toString(); |
|
|
|
|
|
|
|
for (let config of FULL_MATRIX) { |
|
|
|
if (config.rust === undefined) { |
|
|
|
config.rust = 'default'; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
// If the optional third argument to this script is `true` then that means all
|
|
|
|
// tests are being run and no filtering should happen.
|
|
|
|
if (process.argv[4] == 'true') { |
|
|
|
console.log(JSON.stringify(array)); |
|
|
|
console.log(JSON.stringify(await shard(FULL_MATRIX), undefined, 2)); |
|
|
|
return; |
|
|
|
} |
|
|
|
|
|
|
|
// When we aren't running the full CI matrix, filter configs down to just the
|
|
|
|
// relevant bits based on files changed in this commit or if the commit asks
|
|
|
|
// for a certain config to run.
|
|
|
|
const filtered = FULL_MATRIX.filter(config => { |
|
|
|
// If an ISA-specific test was modified, then include that ISA config.
|
|
|
|
if (config.isa && names.includes(`cranelift/codegen/src/isa/${config.isa}`)) { |
|
|
|
return true; |
|
|
|
} |
|
|
|
|
|
|
|
// If any runtest was modified, include all ISA configs as runtests can
|
|
|
|
// target any backend.
|
|
|
|
if (names.includes(`cranelift/filetests/filetests/runtests`)) { |
|
|
|
return config.isa !== undefined; |
|
|
|
} |
|
|
|
|
|
|
|
// If the commit explicitly asks for this test config, then include it.
|
|
|
|
if (config.filter && commits.includes(`prtest:${config.filter}`)) { |
|
|
|
return true; |
|
|
|
} |
|
|
|
|
|
|
|
return false; |
|
|
|
}); |
|
|
|
|
|
|
|
// If at least one test is being run via our filters then run those tests.
|
|
|
|
if (filtered.length > 0) { |
|
|
|
console.log(JSON.stringify(filtered)); |
|
|
|
console.log(JSON.stringify(await shard(filtered), undefined, 2)); |
|
|
|
return; |
|
|
|
} |
|
|
|
|
|
|
|
// Otherwise if nothing else is being run, run the first one which is Ubuntu
|
|
|
|
// Linux which should be the fastest for now.
|
|
|
|
console.log(JSON.stringify([array[0]])); |
|
|
|
// Otherwise if nothing else is being run, run the fast subset of the matrix.
|
|
|
|
console.log(JSON.stringify(await shard(FAST_MATRIX), undefined, 2)); |
|
|
|
} |
|
|
|
|
|
|
|
main() |
|
|
|