diff options
author | Markus Lehtonen <markus.lehtonen@linux.intel.com> | 2016-06-27 15:16:34 +0300 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2016-08-17 10:35:22 +0100 |
commit | 249d99cd7ec00b3227c194eb4b9b21ea4dcb7315 (patch) | |
tree | 8d046d830cda9c77814b89ee0253c2c63b7cb2cc /scripts/oe-build-perf-test | |
parent | 4e81967131863df7ee6c8356cb41be51f1b8c260 (diff) | |
download | openembedded-core-249d99cd7ec00b3227c194eb4b9b21ea4dcb7315.tar.gz openembedded-core-249d99cd7ec00b3227c194eb4b9b21ea4dcb7315.tar.bz2 openembedded-core-249d99cd7ec00b3227c194eb4b9b21ea4dcb7315.zip |
oe-build-perf-test: use new unittest based framework
Convert scripts/oe-build-perf-test to be compatible with the new Python
unittest based buildperf test framework.
Signed-off-by: Markus Lehtonen <markus.lehtonen@linux.intel.com>
Signed-off-by: Ross Burton <ross.burton@intel.com>
Diffstat (limited to 'scripts/oe-build-perf-test')
-rwxr-xr-x | scripts/oe-build-perf-test | 19 |
1 files changed, 13 insertions, 6 deletions
diff --git a/scripts/oe-build-perf-test b/scripts/oe-build-perf-test index 8142b0332b..786c715dfc 100755 --- a/scripts/oe-build-perf-test +++ b/scripts/oe-build-perf-test @@ -21,12 +21,15 @@ import logging import os import shutil import sys +import unittest from datetime import datetime sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + '/lib') import scriptpath scriptpath.add_oe_lib_path() -from oeqa.buildperf import BuildPerfTestRunner, KernelDropCaches +import oeqa.buildperf +from oeqa.buildperf import (BuildPerfTestLoader, BuildPerfTestResult, + BuildPerfTestRunner, KernelDropCaches) from oeqa.utils.commands import runCmd @@ -123,19 +126,23 @@ def main(argv=None): # Check our capability to drop caches and ask pass if needed KernelDropCaches.check() + # Load build perf tests + loader = BuildPerfTestLoader() + suite = loader.discover(start_dir=os.path.dirname(oeqa.buildperf.__file__)) # Set-up log file out_dir = args.out_dir.format(date=datetime.now().strftime('%Y%m%d%H%M%S')) setup_file_logging(os.path.join(out_dir, 'output.log')) # Run actual tests - runner = BuildPerfTestRunner(out_dir) archive_build_conf(out_dir) - ret = runner.run_tests() - if not ret: + runner = BuildPerfTestRunner(out_dir, verbosity=2) + result = runner.run(suite) + if result.wasSuccessful(): if args.globalres_file: - runner.update_globalres_file(args.globalres_file) + result.update_globalres_file(args.globalres_file) + return 0 - return ret + return 1 if __name__ == '__main__': |