[OPTIONS] [TESTDIRS ...]\n" ); print ( "\nModes are:\n" ); print ( "g, gen\t\t\tgenerate reference ('model') test results\n" ); print ( "t, test\t\t\trun tests and compare results to reference\n" ); print ( "qt\t\t\tsame as test, but skips user-configured slow tests\n" ); print ( "\nOptions are:\n" ); print ( "-u, --user \tuse 'USER' as MySQL user\n" ); print ( "-p, --password \tuse 'PASS' as MySQL password\n" ); print ( "-i, --indexer \tpath to indexer\n" ); print ( "-s, --searchd \tpath to searchd\n" ); print ( "--strict\t\tterminate on the first failure (for automatic runs)\n" ); print ( "--strict-verbose\tterminate on the first failure and copy the last report to report.txt (for automatic runs)\n" ); print ( "--managed\t\tdon't run searchd during test (for debugging)\n" ); print ( "--skip-indexer\t\tskip DB creation and indexer stages and go directly to queries/custom tests\n"); print ( "--rt\t\t\ttest RT backend (auto-convert all local indexes)\n" ); print ( "--no-drop-db\t\tKeep test db tables after the test (for debugging)\n"); print ( "--no-demo\t\tJust skip all tests without models. Else - run them, but never fail (for debugging)\n"); print ( "--no-marks\t\tDon't mark the output of every test in the logs.\n"); print ( "\nEnvironment variables are:\n" ); print ( "DBUSER\t\t\tuse 'USER' as MySQL user\n" ); print ( "DBPASS\t\t\tuse 'PASS' as MySQL password\n" ); print ( "\nTests can be specified by full name, or list of IDs, or range of IDs.\n" ); print ( "\nUsage examples:\n" ); print ( "php ubertest.php gen\n" ); print ( "php ubertest.php t --user test --password test\n" ); print ( "php ubertest.php t test_015\n" ); print ( "php ubertest.php t 31 37 41 53-64\n" ); print ( "DBPASS=test make check\n" ); exit ( 0 ); } $locals = array(); $locals['rt_mode'] = false; if ( array_key_exists ( "DBUSER", $_ENV ) && $_ENV["DBUSER"] ) $locals['db-user'] = $_ENV["DBUSER"]; if ( array_key_exists ( "DBPASS", $_ENV ) && $_ENV["DBPASS"] ) $locals['db-password'] = $_ENV["DBPASS"]; $run = false; $test_dirs = array(); $test_range = array(); $user_skip = false; for ( $i=0; $i=$test_range[0] && $test_id<=$test_range[1] ) ) { $tests[] = $entry; } } sort ( $tests ); // full name to short alias function ShortTestName ( $full ) { if ( substr ( $full,0,5 )=="test_" ) return substr ( $full, 5 ); return $full; } // run tests $total_tests = 0; $total_tests_failed = 0; $total_subtests = 0; $total_subtests_failed = 0; $total_skipped = $user_skipped; $failed_tests = array(); foreach ( $tests as $test ) { if ( $windows && !$sd_managed_searchd ) { // avoid an issue with daemons stuck in exit(0) for some seconds $sd_port += 10; $agent_port += 10; $agent_port_sql += 10; $agents = array ( array ( "address" => $sd_address, "port" => $sd_port, "sqlport" => $sd_sphinxql_port ), array ( "address" => $agent_address, "port" => $agent_port, "sqlport" => $agent_port_sql ), array ( "address" => $agent_address, "port" => $agent_port+1, "sqlport" => $agent_port_sql+1 ) ); } if ( file_exists ( $test."/test.xml" ) ) { $total_tests++; $res = RunTest ( $test, $g_skipdemo, $g_usemarks ); if ( !is_array($res) ) { // failed to run that test at all $total_tests_failed++; $failed_tests[] = ShortTestName ( $test ); continue; } $total_subtests += $res["tests_total"]; $total_skipped += $res["tests_skipped"]; if ( $res["tests_failed"] ) { $total_tests_failed++; $total_subtests_failed += $res["tests_failed"]; $failed_tests[] = ShortTestName ( $test ); if ( $g_strict ) { if ( $g_strictverbose ) { $report = file_get_contents ( "$test/report.txt" ); $report.= "\n Test $test failed\n"; file_put_contents("report.txt",$report); $report = ""; } break; } } } elseif ( file_exists ( $test."/test.inc" ) ) { $run_func = create_function ( '$test_path', file_get_contents ( $test."/test.inc" ) ); $total_tests++; $total_subtests++; if ( !$run_func ( $test ) ) { $total_tests_failed++; $total_subtests_failed++; $failed_tests[] = ShortTestName ( $test ); } } } // cleanup @unlink ( "config.conf" ); @unlink ( "error.txt" ); $nfile = 1; while ( file_exists ( "config_$nfile.conf" ) ) { @unlink ( "config_$nfile.conf" ); $nfile++; } $nfile = 1; while ( file_exists ( "error_$nfile.txt" ) ) { @unlink ( "error_$nfile.txt" ); $nfile++; } // summarize if ( $total_tests_failed ) { printf ( "\nTo re-run failed tests only:\nphp ubertest.php t %s\n", join ( " ", $failed_tests ) ); printf ( "\n%d of %d tests and %d of %d subtests failed, %d tests skipped, %.2f sec elapsed\nTHERE WERE FAILURES!\n", $total_tests_failed, $total_tests, $total_subtests_failed, $total_subtests,$total_skipped, MyMicrotime()-$t ); exit ( 1 ); } else { printf ( "\n%d tests and %d subtests succesful, %d tests skipped, %.2f sec elapsed\nALL OK\n", $total_tests, $total_subtests, $total_skipped, MyMicrotime()-$t ); exit ( 0 ); } // // $Id$ // ?>