Commit ab3ec788 authored by Andrey Kamaev's avatar Andrey Kamaev

Perf tests: improved reporting when test fails because of exception

parent 5ab6d5a0
......@@ -187,6 +187,7 @@ typedef struct CV_EXPORTS performance_metrics
TERM_ITERATIONS = 0,
TERM_TIME = 1,
TERM_INTERRUPT = 2,
TERM_EXCEPTION = 3,
TERM_UNKNOWN = -1
};
......
import testlog_parser, sys, os, xml, glob
import testlog_parser, sys, os, xml, glob, re
from table_formatter import *
from optparse import OptionParser
......@@ -14,6 +14,8 @@ if __name__ == "__main__":
parser.add_option("-f", "--filter", dest="filter", help="regex to filter tests", metavar="REGEX", default=None)
parser.add_option("", "--no-relatives", action="store_false", dest="calc_relatives", default=True, help="do not output relative values")
parser.add_option("", "--show-all", action="store_true", dest="showall", default=False, help="also include empty and \"notrun\" lines")
parser.add_option("", "--match", dest="match", default=None)
parser.add_option("", "--match-replace", dest="match_replace", default="")
(options, args) = parser.parse_args()
options.generateHtml = detectHtmlOutputType(options.format)
......@@ -41,7 +43,9 @@ if __name__ == "__main__":
tests = testlog_parser.parseLogFile(arg)
if options.filter:
expr = re.compile(options.filter)
tests = [t for t in tests if expr.search(str(t))]
tests = [t for t in tests if expr.search(str(t))]
if options.match:
tests = [t for t in tests if t.get("status") != "notrun"]
if tests:
test_sets.append((os.path.basename(arg), tests))
except IOError as err:
......@@ -57,9 +61,14 @@ if __name__ == "__main__":
setsCount = len(test_sets)
test_cases = {}
name_extractor = lambda name: str(name)
if options.match:
reg = re.compile(options.match)
name_extractor = lambda name: reg.sub(options.match_replace, str(name))
for i in range(setsCount):
for case in test_sets[i][1]:
name = str(case)
name = name_extractor(case)
if name not in test_cases:
test_cases[name] = [None] * setsCount
test_cases[name][i] = case
......
......@@ -570,7 +570,7 @@ performance_metrics& TestBase::calcMetrics()
metrics.samples = (unsigned int)times.size();
metrics.outliers = 0;
if (metrics.terminationReason != performance_metrics::TERM_INTERRUPT)
if (metrics.terminationReason != performance_metrics::TERM_INTERRUPT && metrics.terminationReason != performance_metrics::TERM_EXCEPTION)
{
if (currentIter == nIters)
metrics.terminationReason = performance_metrics::TERM_ITERATIONS;
......@@ -716,6 +716,9 @@ void TestBase::reportMetrics(bool toJUnitXML)
case performance_metrics::TERM_INTERRUPT:
LOGD("termination reason: aborted by the performance testing framework");
break;
case performance_metrics::TERM_EXCEPTION:
LOGD("termination reason: unhandled exception");
break;
case performance_metrics::TERM_UNKNOWN:
default:
LOGD("termination reason: unknown");
......@@ -823,10 +826,12 @@ void TestBase::RunPerfTestBody()
}
catch(cv::Exception e)
{
metrics.terminationReason = performance_metrics::TERM_EXCEPTION;
FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what();
}
catch(...)
{
metrics.terminationReason = performance_metrics::TERM_EXCEPTION;
FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws.";
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment