Browse Source

Reformat some long lines and macros.

pull/534/head
Dan Gohman 5 years ago
parent
commit
e8f08193fc
  1. 5
      build.rs
  2. 57
      crates/environ/src/cache/worker.rs
  3. 22
      crates/jit/src/link.rs
  4. 28
      crates/lightbeam/src/function_body.rs
  5. 2
      crates/wasi-common/src/memory.rs
  6. 3
      src/bin/wasm2obj.rs
  7. 13
      src/bin/wasmtime.rs
  8. 3
      src/bin/wast.rs

5
build.rs

@ -58,7 +58,10 @@ fn main() -> anyhow::Result<()> {
)
.expect("generating tests");
} else {
println!("cargo:warning=The spec testsuite is disabled. To enable, run `git submodule update --remote`.");
println!(
"cargo:warning=The spec testsuite is disabled. To enable, run `git submodule \
update --remote`."
);
}
writeln!(out, "}}")?;

57
crates/environ/src/cache/worker.rs

@ -265,7 +265,11 @@ impl WorkerThread {
let errno_val = errno::errno().0;
if errno_val != 0 {
warn!("Failed to lower worker thread priority. It might affect application performance. errno: {}", errno_val);
warn!(
"Failed to lower worker thread priority. It might affect application performance. \
errno: {}",
errno_val
);
} else {
debug!("New nice value of worker thread: {}", current_nice);
}
@ -333,12 +337,9 @@ impl WorkerThread {
.ok()
})
.and_then(|cache_bytes| {
zstd::encode_all(
&cache_bytes[..],
opt_compr_lvl,
)
.map_err(|err| warn!("Failed to compress cached code: {}", err))
.ok()
zstd::encode_all(&cache_bytes[..], opt_compr_lvl)
.map_err(|err| warn!("Failed to compress cached code: {}", err))
.ok()
})
.and_then(|recompressed_cache_bytes| {
fs::write(&lock_path, &recompressed_cache_bytes)
@ -379,21 +380,31 @@ impl WorkerThread {
// the cache file and the stats file (they are not updated together atomically)
// Possible solution is to use directories per cache entry, but it complicates the system
// and is not worth it.
debug!("DETECTED task did more than once (or race with new file): recompression of {}. \
Note: if optimized compression level setting has changed in the meantine, \
the stats file might contain inconsistent compression level due to race.", path.display());
}
else {
debug!(
"DETECTED task did more than once (or race with new file): \
recompression of {}. Note: if optimized compression level setting \
has changed in the meantine, the stats file might contain \
inconsistent compression level due to race.",
path.display()
);
} else {
new_stats.compression_level = opt_compr_lvl;
let _ = write_stats_file(stats_path.as_ref(), &new_stats);
}
if new_stats.usages < stats.usages {
debug!("DETECTED lower usage count (new file or race with counter increasing): file {}", path.display());
debug!(
"DETECTED lower usage count (new file or race with counter \
increasing): file {}",
path.display()
);
}
}
else {
debug!("Can't read stats file again to update compression level (it might got cleaned up): file {}", stats_path.display());
} else {
debug!(
"Can't read stats file again to update compression level (it might got \
cleaned up): file {}",
stats_path.display()
);
}
});
@ -690,11 +701,15 @@ impl WorkerThread {
add_unrecognized_and!(
[file: stats_path],
unwrap_or!(
mod_metadata.modified(),
add_unrecognized_and!([file: stats_path, file: mod_path], continue),
"Failed to get mtime, deleting BOTH module cache and stats files",
mod_path
)
mod_metadata.modified(),
add_unrecognized_and!(
[file: stats_path, file: mod_path],
continue
),
"Failed to get mtime, deleting BOTH module cache and stats \
files",
mod_path
)
),
"Failed to get metadata/mtime, deleting the file",
stats_path

22
crates/jit/src/link.rs

@ -41,11 +41,11 @@ pub fn link_module(
if signature != *import_signature {
// TODO: If the difference is in the calling convention,
// we could emit a wrapper function to fix it up.
return Err(LinkError(
format!("{}/{}: incompatible import type: exported function with signature {} incompatible with function import with signature {}",
module_name, field,
signature, import_signature)
));
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported function with signature {} \
incompatible with function import with signature {}",
module_name, field, signature, import_signature
)));
}
dependencies.insert(unsafe { InstanceHandle::from_vmctx(vmctx) });
function_imports.push(VMFunctionImport {
@ -81,7 +81,8 @@ pub fn link_module(
let import_table = &module.table_plans[index];
if !is_table_compatible(&table, import_table) {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported table incompatible with table import",
"{}/{}: incompatible import type: exported table incompatible with \
table import",
module_name, field,
)));
}
@ -119,7 +120,8 @@ pub fn link_module(
let import_memory = &module.memory_plans[index];
if !is_memory_compatible(&memory, import_memory) {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported memory incompatible with memory import",
"{}/{}: incompatible import type: exported memory incompatible with \
memory import",
module_name, field
)));
}
@ -167,7 +169,8 @@ pub fn link_module(
Some(export_value) => match export_value {
Export::Table { .. } | Export::Memory { .. } | Export::Function { .. } => {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported global incompatible with global import",
"{}/{}: incompatible import type: exported global incompatible with \
global import",
module_name, field
)));
}
@ -179,7 +182,8 @@ pub fn link_module(
let imported_global = module.globals[index];
if !is_global_compatible(&global, &imported_global) {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported global incompatible with global import",
"{}/{}: incompatible import type: exported global incompatible with \
global import",
module_name, field
)));
}

28
crates/lightbeam/src/function_body.rs

@ -486,7 +486,11 @@ where
if block.calling_convention.is_some() {
let new_cc = block.calling_convention.clone();
assert!(cc.is_none() || cc == new_cc, "Can't pass different params to different elements of `br_table` yet");
assert!(
cc.is_none() || cc == new_cc,
"Can't pass different params to different elements of `br_table` \
yet"
);
cc = new_cc;
}
@ -500,22 +504,22 @@ where
.to_drop
.as_ref()
.map(|t| t.clone().count())
.unwrap_or_default() as u32
.unwrap_or_default() as u32,
);
}
let cc = cc.map(|cc| {
match cc {
let cc = cc
.map(|cc| match cc {
Left(cc) => Left(ctx.serialize_block_args(&cc, max_params)),
Right(cc) => Right(cc),
}
}).unwrap_or_else(||
if max_num_callers.map(|callers| callers <= 1).unwrap_or(false) {
Right(ctx.virtual_calling_convention())
} else {
Left(ctx.serialize_args(max_params))
}
);
})
.unwrap_or_else(|| {
if max_num_callers.map(|callers| callers <= 1).unwrap_or(false) {
Right(ctx.virtual_calling_convention())
} else {
Left(ctx.serialize_args(max_params))
}
});
for target in targets.iter().chain(std::iter::once(&default)).unique() {
let block = blocks.get_mut(&target.target).unwrap();

2
crates/wasi-common/src/memory.rs

@ -181,7 +181,7 @@ pub(crate) fn enc_slice_of_wasi32_uintptr(
}
macro_rules! dec_enc_scalar {
( $ty:ident, $dec_byref:ident, $enc_byref:ident) => {
($ty:ident, $dec_byref:ident, $enc_byref:ident) => {
pub(crate) fn $dec_byref(memory: &mut [u8], ptr: wasi32::uintptr_t) -> Result<wasi::$ty> {
dec_int_byref::<wasi::$ty>(memory, ptr)
}

3
src/bin/wasm2obj.rs

@ -67,7 +67,8 @@ The translation is dependent on the environment chosen.
The default is a dummy environment that produces placeholder values.
Usage:
wasm2obj [--target TARGET] [-Odg] [--disable-cache | --cache-config=<cache_config_file>] [--enable-simd] [--lightbeam | --cranelift] <file> -o <output>
wasm2obj [--target TARGET] [-Odg] [--disable-cache | --cache-config=<cache_config_file>] \
[--enable-simd] [--lightbeam | --cranelift] <file> -o <output>
wasm2obj --create-cache-config [--cache-config=<cache_config_file>]
wasm2obj --help | --version

13
src/bin/wasmtime.rs

@ -62,8 +62,12 @@ including calling the start function if one is present. Additional functions
given with --invoke are then called.
Usage:
wasmtime [-odg] [--enable-simd] [--wasi-c] [--disable-cache | --cache-config=<cache_config_file>] [--preload=<wasm>...] [--env=<env>...] [--dir=<dir>...] [--mapdir=<mapping>...] [--lightbeam | --cranelift] <file> [<arg>...]
wasmtime [-odg] [--enable-simd] [--wasi-c] [--disable-cache | --cache-config=<cache_config_file>] [--env=<env>...] [--dir=<dir>...] [--mapdir=<mapping>...] --invoke=<fn> [--lightbeam | --cranelift] <file> [<arg>...]
wasmtime [-odg] [--enable-simd] [--wasi-c] [--disable-cache | \
--cache-config=<cache_config_file>] [--preload=<wasm>...] [--env=<env>...] [--dir=<dir>...] \
[--mapdir=<mapping>...] [--lightbeam | --cranelift] <file> [<arg>...]
wasmtime [-odg] [--enable-simd] [--wasi-c] [--disable-cache | \
--cache-config=<cache_config_file>] [--env=<env>...] [--dir=<dir>...] \
[--mapdir=<mapping>...] --invoke=<fn> [--lightbeam | --cranelift] <file> [<arg>...]
wasmtime --create-cache-config [--cache-config=<cache_config_file>]
wasmtime --help | --version
@ -128,7 +132,10 @@ fn compute_preopen_dirs(flag_dir: &[String], flag_mapdir: &[String]) -> Vec<(Str
for mapdir in flag_mapdir {
let parts: Vec<&str> = mapdir.split("::").collect();
if parts.len() != 2 {
println!("--mapdir argument must contain exactly one double colon ('::'), separating a guest directory name and a host directory name");
println!(
"--mapdir argument must contain exactly one double colon ('::'), separating a \
guest directory name and a host directory name"
);
exit(1);
}
let (key, value) = (parts[0], parts[1]);

3
src/bin/wast.rs

@ -42,7 +42,8 @@ const USAGE: &str = "
Wast test runner.
Usage:
wast [-do] [--enable-simd] [--disable-cache | --cache-config=<cache_config_file>] [--lightbeam | --cranelift] <file>...
wast [-do] [--enable-simd] [--disable-cache | --cache-config=<cache_config_file>] [--lightbeam \
| --cranelift] <file>...
wast --create-cache-config [--cache-config=<cache_config_file>]
wast --help | --version

Loading…
Cancel
Save