#
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#
#
# Copyright 2008 Sun Microsystems, Inc. All rights reserved.
# Use is subject to license terms.
#
#
# Copyright (c) 2011, Joyent, Inc. All rights reserved.
# Copyright (c) 2012 by Delphix. All rights reserved.
#
require 5.8.4;
use Cwd;
use Cwd 'abs_path';
$PNAME = $0;
$PNAME =~ s:.*/::;
$OPTSTR = 'abd:fFghi:jlnqsx:';
$USAGE = "Usage: $PNAME [-abfFghjlnqs] [-d dir] [-i isa] "
. "[-x opt[=arg]] [file | dir ...]\n";
@dtrace_argv = ();
@files = ();
%exceptions = ();
%results = ();
$errs = 0;
#
# If no test files are specified on the command-line, execute a find on "."
# and append any tst.*.d, tst.*.ksh, err.*.d or drp.*.d files found within
# the directory tree.
#
sub wanted
{
}
sub dirname {
my($s) = @_;
my($i);
$s = substr($s, 0, $i) if (($i = rindex($s, '/')) != -1);
return $i == -1 ? '.' : $i == 0 ? '/' : $s;
}
sub inpath
{
my ($exec) = (@_);
return 1;
}
}
return 0;
}
sub usage
{
print $USAGE;
print "\t -a execute test suite using anonymous enablings\n";
print "\t -b execute bad ioctl test program\n";
print "\t -d specify directory for test results files and cores\n";
print "\t -g enable libumem debugging when running tests\n";
print "\t -f force bypassed tests to run\n";
print "\t -F force tests to be run, even if missing dependencies\n";
print "\t -h display verbose usage message\n";
print "\t -i specify ISA to test instead of isaexec(3C) default\n";
print "\t -j execute test suite using jdtrace (Java API) only\n";
print "\t -l save log file of results and PIDs used by tests\n";
print "\t -n execute test suite using dtrace(1m) only\n";
print "\t -q set quiet mode (only report errors and summary)\n";
print "\t -s save results files even for tests that pass\n";
print "\t -x pass corresponding -x argument to dtrace(1M)\n";
exit(2);
}
sub errmsg
{
my($msg) = @_;
$errs++;
}
sub fail
{
my(@parms) = @_;
my($n) = 0;
$n++;
}
exit(125);
}
if (scalar @parms > 1) {
print README "; see $errfile\n";
} else {
if (-f "$opt_d/$pid.core") {
print README "; see $pid.core\n";
} else {
print README "\n";
}
}
close(README);
if (-f "$opt_d/$pid.out") {
}
if (-f "$opt_d/$pid.err") {
}
if (-f "$opt_d/$pid.core") {
}
if (scalar @parms > 1) {
} else {
}
}
sub logmsg
{
my($msg) = @_;
}
# Trim leading and trailing whitespace
sub trim {
my($s) = @_;
$s =~ s/^\s*//;
$s =~ s/\s*$//;
return $s;
}
# Load exception set of skipped tests from the file at the given
# pathname. The test names are assumed to be paths relative to $dt_tst,
# for example: common/aggs/tst.neglquant.d, and specify tests to be
# skipped.
sub load_exceptions {
my($listfile) = @_;
my($line) = "";
%exceptions = ();
if (length($listfile) > 0) {
while (<STDIN>) {
chomp;
$line = $_;
# line is non-empty and not a comment
}
}
}
}
# Return 1 if the test is found in the exception set, 0 otherwise.
sub is_exception {
my($file) = @_;
my($i) = -1;
if (scalar(keys(%exceptions)) == 0) {
return 0;
}
# hash absolute pathname after $dt_tst/
if ($i == 0) {
return $exceptions{$file};
}
return 0;
}
#
# Iterate over the set of test files specified on the command-line or by a find
# one. If the test file is executable, we fork and exec it. If the test is a
# .ksh file, we run it with $ksh_path. Otherwise we run dtrace -s on it. If
# the file is named tst.* we assume it should return exit status 0. If the
# file is named err.* we assume it should return exit status 1. If the file is
# named err.D_[A-Z0-9]+[.*].d we use dtrace -xerrtags and examine stderr to
# ensure that a matching error tag was produced. If the file is named
# drp.[A-Z0-9]+[.*].d we use dtrace -xdroptags and examine stderr to ensure
# that a matching drop tag was produced. If any *.out or *.err files are found
# we perform output comparisons.
#
# run_tests takes two arguments: The first is the pathname of the dtrace
# command to invoke when running the tests. The second is the pathname
# of a file (may be the empty string) listing tests that ought to be
# skipped (skipped tests are listed as paths relative to $dt_tst, for
# example: common/aggs/tst.neglquant.d).
#
sub run_tests {
my($dtrace, $exceptions_path) = @_;
my($passed) = 0;
my($bypassed) = 0;
my($total) = 0;
die "$PNAME: $dtrace not found; aborting\n" unless (-x "$dtrace");
logmsg("executing tests using $dtrace ...\n");
$file =~ m:.*/((.*)\.(\w+)):;
$name = $1;
$base = $2;
$ext = $3;
$isksh = 0;
$tag = 0;
$droptag = 0;
$status = 0;
$status = 1;
$tag = $1;
} elsif ($name =~ /^err\./) {
$status = 1;
$status = 0;
$droptag = $1;
} else {
errmsg("ERROR: $file is not a valid test file name\n");
next;
}
$fullname = "$dir/$name";
$exe = "$dir/$base.exe";
$exe_pid = -1;
$bypassed++;
next;
}
$bypassed++;
next;
}
if (($exe_pid = fork()) == -1) {
"ERROR: failed to fork to run $exe: $!\n");
next;
}
if ($exe_pid == 0) {
exec($exe);
warn "ERROR: failed to exec $exe: $!\n";
}
}
logmsg("testing $file ... ");
if (($pid = fork()) == -1) {
errmsg("ERROR: failed to fork to run test $file: $!\n");
next;
}
if ($pid == 0) {
unless (chdir($dir)) {
warn "ERROR: failed to chdir for $file: $!\n";
exit(126);
}
if ($isksh) {
} elsif (-x $name) {
warn "ERROR: $name is executable\n";
exit(1);
} else {
push(@dtrace_argv, '-A');
}
push(@dtrace_argv, '-C');
push(@dtrace_argv, '-s');
push(@dtrace_argv, $name);
exec($dtrace, @dtrace_argv);
}
warn "ERROR: failed to exec for $file: $!\n";
exit(127);
}
errmsg("ERROR: timed out waiting for $file\n");
kill(9, $pid);
next;
}
#
# We can chuck the earler output.
#
unlink($pid . '.out');
unlink($pid . '.err');
#
# This is an anonymous enabling. We need to get
# the module unloaded.
#
system("svcadm disable -s " .
system("modunload -i 0 ; modunload -i 0 ; " .
"modunload -i 0");
if (!system("modinfo | grep dtrace")) {
warn "ERROR: couldn't unload dtrace\n";
system("svcadm enable " .
exit(124);
}
#
# DTrace is gone. Now update_drv(1M), and rip
# everything out again.
#
system("update_drv dtrace");
system("modunload -i 0 ; modunload -i 0 ; " .
"modunload -i 0");
if (!system("modinfo | grep dtrace")) {
warn "ERROR: couldn't unload dtrace\n";
system("svcadm enable " .
exit(124);
}
#
# Now bring DTrace back in.
#
system("sync ; sync");
system("svcadm enable -s " .
#
# That should have caused DTrace to reload with
# the new configuration file. Now we can try to
# snag our anonymous state.
#
if (($pid = fork()) == -1) {
errmsg("ERROR: failed to fork to run " .
"test $file: $!\n");
next;
}
if ($pid == 0) {
push(@dtrace_argv, '-a');
unless (chdir($dir)) {
warn "ERROR: failed to chdir " .
"for $file: $!\n";
exit(126);
}
exec($dtrace, @dtrace_argv);
warn "ERROR: failed to exec for $file: $!\n";
exit(127);
}
errmsg("ERROR: timed out waiting for $file\n");
kill(9, $pid);
next;
}
}
logmsg("[$pid]\n");
$wstat = $?;
if (!$wifexited) {
fail("died from signal $wtermsig");
next;
}
if ($wexitstat == 125) {
die "$PNAME: failed to create output file in $opt_d " .
"(cd elsewhere or use -d)\n";
}
fail("returned $wexitstat instead of $status");
next;
}
if (-f "$file.out" &&
system("cmp -s $file.out $opt_d/$pid.out") != 0) {
next;
}
if (-f "$file.err" &&
system("cmp -s $file.err $opt_d/$pid.err") != 0) {
fail("stderr mismatch: see $pid.err");
next;
}
if ($tag) {
open(TSTERR, "<$opt_d/$pid.err");
close(TSTERR);
fail("errtag mismatch: see $pid.err");
next;
}
}
if ($droptag) {
$found = 0;
open(TSTERR, "<$opt_d/$pid.err");
while (<TSTERR>) {
if (/\[$droptag\] /) {
$found = 1;
last;
}
}
close (TSTERR);
unless ($found) {
fail("droptag mismatch: see $pid.err");
next;
}
}
unless ($opt_s) {
unlink($pid . '.out');
unlink($pid . '.err');
}
}
if ($opt_a) {
#
# If we're running with anonymous enablings, we need to
# restore the .conf file.
#
system("modunload -i 0 ; modunload -i 0 ; modunload -i 0");
system("update_drv dtrace");
}
"passed" => $passed,
"bypassed" => $bypassed,
"failed" => $failed,
"total" => $total
};
}
if (-f $arg) {
} elsif (-d $arg) {
} else {
die "$PNAME: $arg is not a valid file or directory\n";
}
}
if (!$opt_F) {
for my $dep (@dependencies) {
die "$PNAME: '$dep' not found (use -F to force run)\n";
}
}
}
$dtrace_path = '/usr/sbin/dtrace';
$jdtrace_path = "$bindir/jdtrace";
@dtrace_cmds = ();
} else {
@dtrace_cmds = ($dtrace_path);
}
if ($opt_d) {
die "$PNAME: -d arg must be absolute path\n" unless ($opt_d =~ /^\//);
die "$PNAME: -d arg $opt_d is not a directory\n" unless (-d "$opt_d");
system("coreadm -p $opt_d/%p.core");
} else {
system("coreadm -p $dir/%p.core");
$opt_d = '.';
}
if ($opt_x) {
push(@dtrace_argv, '-x');
push(@dtrace_argv, $opt_x);
}
die "$PNAME: failed to open $PNAME.$$.log: $!\n"
if ($opt_g) {
}
if ($opt_b) {
logmsg("badioctl'ing ... ");
if (($badioctl = fork()) == -1) {
errmsg("ERROR: failed to fork to run badioctl: $!\n");
next;
}
if ($badioctl == 0) {
exec($bindir . "/badioctl");
warn "ERROR: failed to exec badioctl: $!\n";
exit(127);
}
logmsg("[$badioctl]\n");
#
# If we're going to be bad, we're just going to iterate over each
# test file.
#
next;
}
logmsg("baddof'ing $file ... ");
if (($pid = fork()) == -1) {
errmsg("ERROR: failed to fork to run baddof: $!\n");
next;
}
if ($pid == 0) {
unless (chdir($dir)) {
warn "ERROR: failed to chdir for $file: $!\n";
exit(126);
}
warn "ERROR: failed to exec for $file: $!\n";
exit(127);
}
sleep 60;
kill(9, $pid);
waitpid($pid, 0);
logmsg("[$pid]\n");
unless ($opt_s) {
unlink($pid . '.out');
unlink($pid . '.err');
}
}
kill(9, $badioctl);
waitpid($badioctl, 0);
unless ($opt_s) {
unlink($badioctl . '.out');
unlink($badioctl . '.err');
}
exit(0);
}
#
# Run all the tests specified on the command-line (the entire test suite
# by default) once for each dtrace command tested, skipping any tests
# not valid for that command.
#
foreach $dtrace_cmd (@dtrace_cmds) {
}
logmsg("\n==== TEST RESULTS ====\n");
if ($bypassed) {
}
}
exit($errs != 0);