rollup merge of #21413: ahmedcharles/remove-test-features

This isn't the entire set of changes, there are more coming. #19145
This commit is contained in:
Alex Crichton 2015-01-21 09:15:57 -08:00
commit 81504f211c
4 changed files with 6 additions and 148 deletions

1
configure vendored
View File

@ -509,7 +509,6 @@ opt optimize-tests 1 "build tests with optimizations"
opt libcpp 1 "build with llvm with libc++ instead of libstdc++ when using clang"
opt llvm-assertions 1 "build LLVM with assertions"
opt debug 1 "build with extra debug fun"
opt ratchet-bench 0 "ratchet benchmarks"
opt fast-make 0 "use .gitmodules as timestamp for submodule deps"
opt ccache 0 "invoke gcc/clang via ccache to reuse object files between builds"
opt local-rust 0 "use an installed rustc rather than downloading a snapshot"

View File

@ -115,20 +115,6 @@ pub struct Config {
// Write out a parseable log of tests that were run
pub logfile: Option<Path>,
// Write out a json file containing any metrics of the run
pub save_metrics: Option<Path>,
// Write and ratchet a metrics file
pub ratchet_metrics: Option<Path>,
// Percent change in metrics to consider noise
pub ratchet_noise_percent: Option<f64>,
// "Shard" of the testsuite to pub run: this has the form of
// two numbers (a,b), and causes only those tests with
// positional order equal to a mod b to run.
pub test_shard: Option<(uint,uint)>,
// A command line to prefix program execution with,
// for running under valgrind
pub runtool: Option<String>,

View File

@ -77,10 +77,6 @@ pub fn parse_config(args: Vec<String> ) -> Config {
optopt("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS"),
optflag("", "verbose", "run tests verbosely, showing all output"),
optopt("", "logfile", "file to log test execution to", "FILE"),
optopt("", "save-metrics", "file to save metrics to", "FILE"),
optopt("", "ratchet-metrics", "file to ratchet metrics against", "FILE"),
optopt("", "ratchet-noise-percent",
"percent change in metrics to consider noise", "N"),
optflag("", "jit", "run tests under the JIT"),
optopt("", "target", "the target to build for", "TARGET"),
optopt("", "host", "the host to build for", "HOST"),
@ -90,7 +86,6 @@ pub fn parse_config(args: Vec<String> ) -> Config {
optopt("", "adb-path", "path to the android debugger", "PATH"),
optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH"),
optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH"),
optopt("", "test-shard", "run shard A, of B shards, worth of the testsuite", "A.B"),
optflag("h", "help", "show this message"));
assert!(!args.is_empty());
@ -152,12 +147,6 @@ pub fn parse_config(args: Vec<String> ) -> Config {
filter: filter,
cfail_regex: Regex::new(errors::EXPECTED_PATTERN).unwrap(),
logfile: matches.opt_str("logfile").map(|s| Path::new(s)),
save_metrics: matches.opt_str("save-metrics").map(|s| Path::new(s)),
ratchet_metrics:
matches.opt_str("ratchet-metrics").map(|s| Path::new(s)),
ratchet_noise_percent:
matches.opt_str("ratchet-noise-percent")
.and_then(|s| s.as_slice().parse::<f64>()),
runtool: matches.opt_str("runtool"),
host_rustcflags: matches.opt_str("host-rustcflags"),
target_rustcflags: matches.opt_str("target-rustcflags"),
@ -176,7 +165,6 @@ pub fn parse_config(args: Vec<String> ) -> Config {
opt_str2(matches.opt_str("adb-test-dir")).as_slice() &&
!opt_str2(matches.opt_str("adb-test-dir")).is_empty(),
lldb_python_dir: matches.opt_str("lldb-python-dir"),
test_shard: test::opt_shard(matches.opt_str("test-shard")),
verbose: matches.opt_present("verbose"),
}
}
@ -210,10 +198,6 @@ pub fn log_config(config: &Config) {
logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir));
logv(c, format!("adb_device_status: {}",
config.adb_device_status));
match config.test_shard {
None => logv(c, "test_shard: (all)".to_string()),
Some((a,b)) => logv(c, format!("test_shard: {}.{}", a, b))
}
logv(c, format!("verbose: {}", config.verbose));
logv(c, format!("\n"));
}
@ -284,15 +268,8 @@ pub fn test_opts(config: &Config) -> test::TestOpts {
logfile: config.logfile.clone(),
run_tests: true,
run_benchmarks: true,
ratchet_metrics: config.ratchet_metrics.clone(),
ratchet_noise_percent: config.ratchet_noise_percent.clone(),
save_metrics: config.save_metrics.clone(),
test_shard: config.test_shard.clone(),
nocapture: false,
color: test::AutoColor,
show_boxplot: false,
boxplot_width: 50,
show_all_stats: false,
}
}

View File

@ -300,16 +300,9 @@ pub struct TestOpts {
pub run_ignored: bool,
pub run_tests: bool,
pub run_benchmarks: bool,
pub ratchet_metrics: Option<Path>,
pub ratchet_noise_percent: Option<f64>,
pub save_metrics: Option<Path>,
pub test_shard: Option<(uint,uint)>,
pub logfile: Option<Path>,
pub nocapture: bool,
pub color: ColorConfig,
pub show_boxplot: bool,
pub boxplot_width: uint,
pub show_all_stats: bool,
}
impl TestOpts {
@ -320,16 +313,9 @@ impl TestOpts {
run_ignored: false,
run_tests: false,
run_benchmarks: false,
ratchet_metrics: None,
ratchet_noise_percent: None,
save_metrics: None,
test_shard: None,
logfile: None,
nocapture: false,
color: AutoColor,
show_boxplot: false,
boxplot_width: 50,
show_all_stats: false,
}
}
}
@ -342,28 +328,14 @@ fn optgroups() -> Vec<getopts::OptGroup> {
getopts::optflag("", "test", "Run tests and not benchmarks"),
getopts::optflag("", "bench", "Run benchmarks instead of tests"),
getopts::optflag("h", "help", "Display this message (longer with --help)"),
getopts::optopt("", "save-metrics", "Location to save bench metrics",
"PATH"),
getopts::optopt("", "ratchet-metrics",
"Location to load and save metrics from. The metrics \
loaded are cause benchmarks to fail if they run too \
slowly", "PATH"),
getopts::optopt("", "ratchet-noise-percent",
"Tests within N% of the recorded metrics will be \
considered as passing", "PERCENTAGE"),
getopts::optopt("", "logfile", "Write logs to the specified file instead \
of stdout", "PATH"),
getopts::optopt("", "test-shard", "run shard A, of B shards, worth of the testsuite",
"A.B"),
getopts::optflag("", "nocapture", "don't capture stdout/stderr of each \
task, allow printing directly"),
getopts::optopt("", "color", "Configure coloring of output:
auto = colorize if stdout is a tty and tests are run on serially (default);
always = always colorize output;
never = never colorize output;", "auto|always|never"),
getopts::optflag("", "boxplot", "Display a boxplot of the benchmark statistics"),
getopts::optopt("", "boxplot-width", "Set the boxplot width (default 50)", "WIDTH"),
getopts::optflag("", "stats", "Display the benchmark min, max, and quartiles"))
never = never colorize output;", "auto|always|never"))
}
fn usage(binary: &str) {
@ -428,19 +400,6 @@ pub fn parse_opts(args: &[String]) -> Option<OptRes> {
let run_tests = ! run_benchmarks ||
matches.opt_present("test");
let ratchet_metrics = matches.opt_str("ratchet-metrics");
let ratchet_metrics = ratchet_metrics.map(|s| Path::new(s));
let ratchet_noise_percent = matches.opt_str("ratchet-noise-percent");
let ratchet_noise_percent =
ratchet_noise_percent.map(|s| s.as_slice().parse::<f64>().unwrap());
let save_metrics = matches.opt_str("save-metrics");
let save_metrics = save_metrics.map(|s| Path::new(s));
let test_shard = matches.opt_str("test-shard");
let test_shard = opt_shard(test_shard);
let mut nocapture = matches.opt_present("nocapture");
if !nocapture {
nocapture = os::getenv("RUST_TEST_NOCAPTURE").is_some();
@ -456,63 +415,19 @@ pub fn parse_opts(args: &[String]) -> Option<OptRes> {
v))),
};
let show_boxplot = matches.opt_present("boxplot");
let boxplot_width = match matches.opt_str("boxplot-width") {
Some(width) => {
match FromStr::from_str(width.as_slice()) {
Some(width) => width,
None => {
return Some(Err(format!("argument for --boxplot-width must be a uint")));
}
}
}
None => 50,
};
let show_all_stats = matches.opt_present("stats");
let test_opts = TestOpts {
filter: filter,
run_ignored: run_ignored,
run_tests: run_tests,
run_benchmarks: run_benchmarks,
ratchet_metrics: ratchet_metrics,
ratchet_noise_percent: ratchet_noise_percent,
save_metrics: save_metrics,
test_shard: test_shard,
logfile: logfile,
nocapture: nocapture,
color: color,
show_boxplot: show_boxplot,
boxplot_width: boxplot_width,
show_all_stats: show_all_stats,
};
Some(Ok(test_opts))
}
pub fn opt_shard(maybestr: Option<String>) -> Option<(uint,uint)> {
match maybestr {
None => None,
Some(s) => {
let mut it = s.split('.');
match (it.next().and_then(|s| s.parse::<uint>()),
it.next().and_then(|s| s.parse::<uint>()),
it.next()) {
(Some(a), Some(b), None) => {
if a <= 0 || a > b {
panic!("tried to run shard {a}.{b}, but {a} is out of bounds \
(should be between 1 and {b}", a=a, b=b)
}
Some((a, b))
}
_ => None,
}
}
}
}
#[derive(Clone, PartialEq)]
pub struct BenchSamples {
ns_iter_summ: stats::Summary<f64>,
@ -568,9 +483,9 @@ impl<T: Writer> ConsoleTestState<T> {
out: out,
log_out: log_out,
use_color: use_color(opts),
show_boxplot: opts.show_boxplot,
boxplot_width: opts.boxplot_width,
show_all_stats: opts.show_all_stats,
show_boxplot: false,
boxplot_width: 50,
show_all_stats: false,
total: 0u,
passed: 0u,
failed: 0u,
@ -913,15 +828,7 @@ pub fn run_tests_console(opts: &TestOpts, tests: Vec<TestDescAndFn> ) -> io::IoR
None => {}
}
try!(run_tests(opts, tests, |x| callback(&x, &mut st)));
match opts.save_metrics {
None => (),
Some(ref pth) => {
try!(st.metrics.save(pth));
try!(st.write_plain(format!("\nmetrics saved to: {:?}",
pth.display()).as_slice()));
}
}
return st.write_run_finish(&opts.ratchet_metrics, opts.ratchet_noise_percent);
return st.write_run_finish(&None, None);
}
#[test]
@ -1095,18 +1002,7 @@ pub fn filter_tests(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> Vec<TestDescA
// Sort the tests alphabetically
filtered.sort_by(|t1, t2| t1.desc.name.as_slice().cmp(t2.desc.name.as_slice()));
// Shard the remaining tests, if sharding requested.
match opts.test_shard {
None => filtered,
Some((a,b)) => {
filtered.into_iter().enumerate()
// note: using a - 1 so that the valid shards, for example, are
// 1.2 and 2.2 instead of 0.2 and 1.2
.filter(|&(i,_)| i % b == (a - 1))
.map(|(_,t)| t)
.collect()
}
}
filtered
}
pub fn run_test(opts: &TestOpts,