1#!/usr/bin/env python 2# 3# Copyright 2018, The Android Open Source Project 4# 5# Licensed under the Apache License, Version 2.0 (the "License"); 6# you may not use this file except in compliance with the License. 7# You may obtain a copy of the License at 8# 9# http://www.apache.org/licenses/LICENSE-2.0 10# 11# Unless required by applicable law or agreed to in writing, software 12# distributed under the License is distributed on an "AS IS" BASIS, 13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14# See the License for the specific language governing permissions and 15# limitations under the License. 16 17""" 18Atest Argument Parser class for atest. 19""" 20 21# pylint: disable=line-too-long 22 23import argparse 24import pydoc 25 26import constants 27 28# Constants used for AtestArgParser and EPILOG_TEMPLATE 29HELP_DESC = ('A command line tool that allows users to build, install, and run ' 30 'Android tests locally, greatly speeding test re-runs without ' 31 'requiring knowledge of Trade Federation test harness command line' 32 ' options.') 33 34# Constants used for arg help message(sorted in alphabetic) 35ALL_ABI = 'Set to run tests for all abis.' 36BUILD = 'Run a build.' 37CLEAR_CACHE = 'Wipe out the test_infos cache of the test.' 38COLLECT_TESTS_ONLY = ('Collect a list test cases of the instrumentation tests ' 39 'without testing them in real.') 40DISABLE_TEARDOWN = 'Disable test teardown and cleanup.' 41DRY_RUN = 'Dry run atest without building, installing and running tests in real.' 42ENABLE_FILE_PATTERNS = 'Enable FILE_PATTERNS in TEST_MAPPING.' 43HISTORY = ('Show test results in chronological order(with specified number or ' 44 'all by default).') 45HOST = ('Run the test completely on the host without a device. ' 46 '(Note: running a host test that requires a device without ' 47 '--host will fail.)') 48INCLUDE_SUBDIRS = 'Search TEST_MAPPING files in subdirs as well.' 49INFO = 'Show module information.' 50INSTALL = 'Install an APK.' 51INSTANT = ('Run the instant_app version of the module if the module supports it. ' 52 'Note: Nothing\'s going to run if it\'s not an Instant App test and ' 53 '"--instant" is passed.') 54ITERATION = 'Loop-run tests until the max iteration is reached. (10 by default)' 55LATEST_RESULT = 'Print latest test result.' 56LIST_MODULES = 'List testable modules for the given suite.' 57NO_METRICS = 'Do not send metrics.' 58REBUILD_MODULE_INFO = ('Forces a rebuild of the module-info.json file. ' 59 'This may be necessary following a repo sync or ' 60 'when writing a new test.') 61RERUN_UNTIL_FAILURE = ('Rerun all tests until a failure occurs or the max ' 62 'iteration is reached. (10 by default)') 63RETRY_ANY_FAILURE = ('Rerun failed tests until passed or the max iteration ' 64 'is reached. (10 by default)') 65SERIAL = 'The device to run the test on.' 66TEST = ('Run the tests. WARNING: Many test configs force cleanup of device ' 67 'after test run. In this case, "-d" must be used in previous test run to ' 68 'disable cleanup for "-t" to work. Otherwise, device will need to be ' 69 'setup again with "-i".') 70TEST_MAPPING = 'Run tests defined in TEST_MAPPING files.' 71TF_TEMPLATE = ('Add extra tradefed template for ATest suite, ' 72 'e.g. atest <test> --tf-template <template_key>=<template_path>') 73TF_DEBUG = 'Enable tradefed debug mode with a specify port. Default value is 10888.' 74SHARDING = 'Option to specify sharding count. The default value is 2' 75UPDATE_CMD_MAPPING = ('Update the test command of input tests. Warning: result ' 76 'will be saved under tools/tradefederation/core/atest/test_data.') 77USER_TYPE = 'Run test with specific user type, e.g. atest <test> --user-type secondary_user' 78VERBOSE = 'Display DEBUG level logging.' 79VERIFY_CMD_MAPPING = 'Verify the test command of input tests.' 80VERSION = 'Display version string.' 81WAIT_FOR_DEBUGGER = 'Wait for debugger prior to execution (Instrumentation tests only).' 82 83def _positive_int(value): 84 """Verify value by whether or not a positive integer. 85 86 Args: 87 value: A string of a command-line argument. 88 89 Returns: 90 int of value, if it is an positive integer. 91 Otherwise, raise argparse.ArgumentTypeError. 92 """ 93 err_msg = "invalid positive int value: '%s'" % value 94 try: 95 converted_value = int(value) 96 if converted_value < 1: 97 raise argparse.ArgumentTypeError(err_msg) 98 return converted_value 99 except ValueError: 100 raise argparse.ArgumentTypeError(err_msg) 101 102 103class AtestArgParser(argparse.ArgumentParser): 104 """Atest wrapper of ArgumentParser.""" 105 106 def __init__(self): 107 """Initialise an ArgumentParser instance.""" 108 super(AtestArgParser, self).__init__( 109 description=HELP_DESC, add_help=False) 110 111 def add_atest_args(self): 112 """A function that does ArgumentParser.add_argument()""" 113 self.add_argument('tests', nargs='*', help='Tests to build and/or run.') 114 # Options that to do with testing. 115 self.add_argument('-a', '--all-abi', action='store_true', help=ALL_ABI) 116 self.add_argument('-b', '--build', action='append_const', dest='steps', 117 const=constants.BUILD_STEP, help=BUILD) 118 self.add_argument('-d', '--disable-teardown', action='store_true', 119 help=DISABLE_TEARDOWN) 120 self.add_argument('--host', action='store_true', help=HOST) 121 self.add_argument('-i', '--install', action='append_const', 122 dest='steps', const=constants.INSTALL_STEP, 123 help=INSTALL) 124 self.add_argument('-m', constants.REBUILD_MODULE_INFO_FLAG, 125 action='store_true', help=REBUILD_MODULE_INFO) 126 self.add_argument('-s', '--serial', help=SERIAL) 127 self.add_argument('--sharding', nargs='?', const=2, 128 type=_positive_int, default=0, 129 help=SHARDING) 130 self.add_argument('-t', '--test', action='append_const', dest='steps', 131 const=constants.TEST_STEP, help=TEST) 132 self.add_argument('-w', '--wait-for-debugger', action='store_true', 133 help=WAIT_FOR_DEBUGGER) 134 135 # Options related to Test Mapping 136 self.add_argument('-p', '--test-mapping', action='store_true', 137 help=TEST_MAPPING) 138 self.add_argument('--include-subdirs', action='store_true', 139 help=INCLUDE_SUBDIRS) 140 # TODO(146980564): Remove enable-file-patterns when support 141 # file-patterns in TEST_MAPPING by default. 142 self.add_argument('--enable-file-patterns', action='store_true', 143 help=ENABLE_FILE_PATTERNS) 144 145 # Options for information queries and dry-runs: 146 # A group of options for dry-runs. They are mutually exclusive 147 # in a command line. 148 group = self.add_mutually_exclusive_group() 149 group.add_argument('--collect-tests-only', action='store_true', 150 help=COLLECT_TESTS_ONLY) 151 group.add_argument('--dry-run', action='store_true', help=DRY_RUN) 152 self.add_argument('-h', '--help', action='store_true', 153 help='Print this help message.') 154 self.add_argument('--info', action='store_true', help=INFO) 155 self.add_argument('-L', '--list-modules', help=LIST_MODULES) 156 self.add_argument('-v', '--verbose', action='store_true', help=VERBOSE) 157 self.add_argument('-V', '--version', action='store_true', help=VERSION) 158 159 # Obsolete options that will be removed soon. 160 self.add_argument('--generate-baseline', nargs='?', 161 type=int, const=5, default=0, 162 help='Generate baseline metrics, run 5 iterations by' 163 'default. Provide an int argument to specify ' 164 '# iterations.') 165 self.add_argument('--generate-new-metrics', nargs='?', 166 type=int, const=5, default=0, 167 help='Generate new metrics, run 5 iterations by ' 168 'default. Provide an int argument to specify ' 169 '# iterations.') 170 self.add_argument('--detect-regression', nargs='*', 171 help='Run regression detection algorithm. Supply ' 172 'path to baseline and/or new metrics folders.') 173 174 # Options related to module parameterization 175 self.add_argument('--instant', action='store_true', help=INSTANT) 176 self.add_argument('--user-type', help=USER_TYPE) 177 178 # Option for dry-run command mapping result and cleaning cache. 179 self.add_argument('-c', '--clear-cache', action='store_true', 180 help=CLEAR_CACHE) 181 self.add_argument('-u', '--update-cmd-mapping', action='store_true', 182 help=UPDATE_CMD_MAPPING) 183 self.add_argument('-y', '--verify-cmd-mapping', action='store_true', 184 help=VERIFY_CMD_MAPPING) 185 # Options for Tradefed debug mode. 186 self.add_argument('-D', '--tf-debug', nargs='?', const=10888, 187 type=_positive_int, default=0, 188 help=TF_DEBUG) 189 # Options for Tradefed customization related. 190 self.add_argument('--tf-template', action='append', 191 help=TF_TEMPLATE) 192 193 # A group of options for rerun strategy. They are mutually exclusive 194 # in a command line. 195 group = self.add_mutually_exclusive_group() 196 # Option for rerun tests for the specified number iterations. 197 group.add_argument('--iterations', nargs='?', 198 type=_positive_int, const=10, default=0, 199 metavar='MAX_ITERATIONS', help=ITERATION) 200 group.add_argument('--rerun-until-failure', nargs='?', 201 type=_positive_int, const=10, default=0, 202 metavar='MAX_ITERATIONS', help=RERUN_UNTIL_FAILURE) 203 group.add_argument('--retry-any-failure', nargs='?', 204 type=_positive_int, const=10, default=0, 205 metavar='MAX_ITERATIONS', help=RETRY_ANY_FAILURE) 206 207 # A group of options for history. They are mutually exclusive 208 # in a command line. 209 history_group = self.add_mutually_exclusive_group() 210 # History related options. 211 history_group.add_argument('--latest-result', action='store_true', 212 help=LATEST_RESULT) 213 history_group.add_argument('--history', nargs='?', const='99999', 214 help=HISTORY) 215 216 # Options for disabling collecting data for metrics. 217 self.add_argument(constants.NO_METRICS_ARG, action='store_true', 218 help=NO_METRICS) 219 220 # This arg actually doesn't consume anything, it's primarily used for 221 # the help description and creating custom_args in the NameSpace object. 222 self.add_argument('--', dest='custom_args', nargs='*', 223 help='Specify custom args for the test runners. ' 224 'Everything after -- will be consumed as ' 225 'custom args.') 226 227 def get_args(self): 228 """This method is to get args from actions and return optional args. 229 230 Returns: 231 A list of optional arguments. 232 """ 233 argument_list = [] 234 # The output of _get_optional_actions(): [['-t', '--test'], [--info]] 235 # return an argument list: ['-t', '--test', '--info'] 236 for arg in self._get_optional_actions(): 237 argument_list.extend(arg.option_strings) 238 return argument_list 239 240 241def print_epilog_text(): 242 """Pagination print EPILOG_TEXT. 243 244 Returns: 245 STDOUT from pydoc.pager(). 246 """ 247 epilog_text = EPILOG_TEMPLATE.format(ALL_ABI=ALL_ABI, 248 BUILD=BUILD, 249 CLEAR_CACHE=CLEAR_CACHE, 250 COLLECT_TESTS_ONLY=COLLECT_TESTS_ONLY, 251 DISABLE_TEARDOWN=DISABLE_TEARDOWN, 252 DRY_RUN=DRY_RUN, 253 ENABLE_FILE_PATTERNS=ENABLE_FILE_PATTERNS, 254 HELP_DESC=HELP_DESC, 255 HISTORY=HISTORY, 256 HOST=HOST, 257 INCLUDE_SUBDIRS=INCLUDE_SUBDIRS, 258 INFO=INFO, 259 INSTALL=INSTALL, 260 INSTANT=INSTANT, 261 ITERATION=ITERATION, 262 LATEST_RESULT=LATEST_RESULT, 263 LIST_MODULES=LIST_MODULES, 264 NO_METRICS=NO_METRICS, 265 REBUILD_MODULE_INFO=REBUILD_MODULE_INFO, 266 RERUN_UNTIL_FAILURE=RERUN_UNTIL_FAILURE, 267 RETRY_ANY_FAILURE=RETRY_ANY_FAILURE, 268 SERIAL=SERIAL, 269 SHARDING=SHARDING, 270 TEST=TEST, 271 TEST_MAPPING=TEST_MAPPING, 272 TF_DEBUG=TF_DEBUG, 273 TF_TEMPLATE=TF_TEMPLATE, 274 USER_TYPE=USER_TYPE, 275 UPDATE_CMD_MAPPING=UPDATE_CMD_MAPPING, 276 VERBOSE=VERBOSE, 277 VERSION=VERSION, 278 VERIFY_CMD_MAPPING=VERIFY_CMD_MAPPING, 279 WAIT_FOR_DEBUGGER=WAIT_FOR_DEBUGGER) 280 return pydoc.pager(epilog_text) 281 282 283EPILOG_TEMPLATE = r'''ATEST(1) ASuite/ATest 284 285NAME 286 atest - {HELP_DESC} 287 288 289SYNOPSIS 290 atest [OPTION]... [TEST_TARGET]... -- [CUSTOM_ARGS]... 291 292 293OPTIONS 294 Below arguments are catagorised by features and purposes. Arguments marked with default will apply even the user does not pass it explicitly. 295 296 [ Testing ] 297 -a, --all-abi 298 {ALL_ABI} 299 300 -b, --build: 301 {BUILD} (default) 302 303 -d, --disable-teardown 304 {DISABLE_TEARDOWN} 305 306 -D --tf-debug 307 {TF_DEBUG} 308 309 --history 310 {HISTORY} 311 312 --host 313 {HOST} 314 315 -i, --install 316 {INSTALL} (default) 317 318 -m, --rebuild-module-info 319 {REBUILD_MODULE_INFO} (default) 320 321 -s, --serial 322 {SERIAL} 323 324 --sharding 325 {SHARDING} 326 327 -t, --test 328 {TEST} (default) 329 330 --tf-template 331 {TF_TEMPLATE} 332 333 -w, --wait-for-debugger 334 {WAIT_FOR_DEBUGGER} 335 336 337 [ Test Mapping ] 338 -p, --test-mapping 339 {TEST_MAPPING} 340 341 --include-subdirs 342 {INCLUDE_SUBDIRS} 343 344 --enable-file-patterns 345 {ENABLE_FILE_PATTERNS} 346 347 348 [ Information/Queries ] 349 --collect-tests-only 350 {COLLECT_TESTS_ONLY} 351 352 --info 353 {INFO} 354 355 -L, --list-modules 356 {LIST_MODULES} 357 358 --latest-result 359 {LATEST_RESULT} 360 361 -v, --verbose 362 {VERBOSE} 363 364 -V, --version 365 {VERSION} 366 367 368 [ Dry-Run and Caching ] 369 --dry-run 370 {DRY_RUN} 371 372 -c, --clear-cache 373 {CLEAR_CACHE} 374 375 -u, --update-cmd-mapping 376 {UPDATE_CMD_MAPPING} 377 378 -y, --verify-cmd-mapping 379 {VERIFY_CMD_MAPPING} 380 381 382 [ Module Parameterization ] 383 --instant 384 {INSTANT} 385 386 --user-type 387 {USER_TYPE} 388 389 390 [ Iteration Testing ] 391 --iterations 392 {ITERATION} 393 394 --rerun-until-failure 395 {RERUN_UNTIL_FAILURE} 396 397 --retry-any-failure 398 {RETRY_ANY_FAILURE} 399 400 [ Metrics ] 401 --no-metrics 402 {NO_METRICS} 403 404 405EXAMPLES 406 - - - - - - - - - 407 IDENTIFYING TESTS 408 - - - - - - - - - 409 410 The positional argument <tests> should be a reference to one or more of the tests you'd like to run. Multiple tests can be run in one command by separating test references with spaces. 411 412 Usage template: atest <reference_to_test_1> <reference_to_test_2> 413 414 A <reference_to_test> can be satisfied by the test's MODULE NAME, MODULE:CLASS, CLASS NAME, TF INTEGRATION TEST, FILE PATH or PACKAGE NAME. Explanations and examples of each follow. 415 416 417 < MODULE NAME > 418 419 Identifying a test by its module name will run the entire module. Input the name as it appears in the LOCAL_MODULE or LOCAL_PACKAGE_NAME variables in that test's Android.mk or Android.bp file. 420 421 Note: Use < TF INTEGRATION TEST > to run non-module tests integrated directly into TradeFed. 422 423 Examples: 424 atest FrameworksServicesTests 425 atest CtsJankDeviceTestCases 426 427 428 < MODULE:CLASS > 429 430 Identifying a test by its class name will run just the tests in that class and not the whole module. MODULE:CLASS is the preferred way to run a single class. MODULE is the same as described above. CLASS is the name of the test class in the .java file. It can either be the fully qualified class name or just the basic name. 431 432 Examples: 433 atest FrameworksServicesTests:ScreenDecorWindowTests 434 atest FrameworksServicesTests:com.android.server.wm.ScreenDecorWindowTests 435 atest CtsJankDeviceTestCases:CtsDeviceJankUi 436 437 438 < CLASS NAME > 439 440 A single class can also be run by referencing the class name without the module name. 441 442 Examples: 443 atest ScreenDecorWindowTests 444 atest CtsDeviceJankUi 445 446 However, this will take more time than the equivalent MODULE:CLASS reference, so we suggest using a MODULE:CLASS reference whenever possible. Examples below are ordered by performance from the fastest to the slowest: 447 448 Examples: 449 atest FrameworksServicesTests:com.android.server.wm.ScreenDecorWindowTests 450 atest FrameworksServicesTests:ScreenDecorWindowTests 451 atest ScreenDecorWindowTests 452 453 < TF INTEGRATION TEST > 454 455 To run tests that are integrated directly into TradeFed (non-modules), input the name as it appears in the output of the "tradefed.sh list configs" cmd. 456 457 Examples: 458 atest example/reboot 459 atest native-benchmark 460 461 462 < FILE PATH > 463 464 Both module-based tests and integration-based tests can be run by inputting the path to their test file or dir as appropriate. A single class can also be run by inputting the path to the class's java file. 465 466 Both relative and absolute paths are supported. 467 468 Example - 2 ways to run the `CtsJankDeviceTestCases` module via path: 469 1. run module from android <repo root>: 470 atest cts/tests/jank/jank 471 472 2. from <android root>/cts/tests/jank: 473 atest . 474 475 Example - run a specific class within CtsJankDeviceTestCases module from <android repo> root via path: 476 atest cts/tests/jank/src/android/jank/cts/ui/CtsDeviceJankUi.java 477 478 Example - run an integration test from <android repo> root via path: 479 atest tools/tradefederation/contrib/res/config/example/reboot.xml 480 481 482 < PACKAGE NAME > 483 484 Atest supports searching tests from package name as well. 485 486 Examples: 487 atest com.android.server.wm 488 atest android.jank.cts 489 490 491 - - - - - - - - - - - - - - - - - - - - - - - - - - 492 SPECIFYING INDIVIDUAL STEPS: BUILD, INSTALL OR RUN 493 - - - - - - - - - - - - - - - - - - - - - - - - - - 494 495 The -b, -i and -t options allow you to specify which steps you want to run. If none of those options are given, then all steps are run. If any of these options are provided then only the listed steps are run. 496 497 Note: -i alone is not currently support and can only be included with -t. 498 Both -b and -t can be run alone. 499 500 Examples: 501 atest -b <test> (just build targets) 502 atest -t <test> (run tests only) 503 atest -it <test> (install apk and run tests) 504 atest -bt <test> (build targets, run tests, but skip installing apk) 505 506 507 Atest now has the ability to force a test to skip its cleanup/teardown step. Many tests, e.g. CTS, cleanup the device after the test is run, so trying to rerun your test with -t will fail without having the --disable-teardown parameter. Use -d before -t to skip the test clean up step and test iteratively. 508 509 atest -d <test> (disable installing apk and cleanning up device) 510 atest -t <test> 511 512 Note that -t disables both setup/install and teardown/cleanup of the device. So you can continue to rerun your test with just 513 514 atest -t <test> 515 516 as many times as you want. 517 518 519 - - - - - - - - - - - - - 520 RUNNING SPECIFIC METHODS 521 - - - - - - - - - - - - - 522 523 It is possible to run only specific methods within a test class. To run only specific methods, identify the class in any of the ways supported for identifying a class (MODULE:CLASS, FILE PATH, etc) and then append the name of the method or method using the following template: 524 525 <reference_to_class>#<method1> 526 527 Multiple methods can be specified with commas: 528 529 <reference_to_class>#<method1>,<method2>,<method3>... 530 531 Examples: 532 atest com.android.server.wm.ScreenDecorWindowTests#testMultipleDecors 533 534 atest FrameworksServicesTests:ScreenDecorWindowTests#testFlagChange,testRemoval 535 536 537 - - - - - - - - - - - - - 538 RUNNING MULTIPLE CLASSES 539 - - - - - - - - - - - - - 540 541 To run multiple classes, deliminate them with spaces just like you would when running multiple tests. Atest will handle building and running classes in the most efficient way possible, so specifying a subset of classes in a module will improve performance over running the whole module. 542 543 544 Examples: 545 - two classes in same module: 546 atest FrameworksServicesTests:ScreenDecorWindowTests FrameworksServicesTests:DimmerTests 547 548 - two classes, different modules: 549 atest FrameworksServicesTests:ScreenDecorWindowTests CtsJankDeviceTestCases:CtsDeviceJankUi 550 551 552 - - - - - - - - - - - 553 RUNNING NATIVE TESTS 554 - - - - - - - - - - - 555 556 Atest can run native test. 557 558 Example: 559 - Input tests: 560 atest -a libinput_tests inputflinger_tests 561 562 Use -a|--all-abi to run the tests for all available device architectures, which in this example is armeabi-v7a (ARM 32-bit) and arm64-v8a (ARM 64-bit). 563 564 To select a specific native test to run, use colon (:) to specify the test name and hashtag (#) to further specify an individual method. For example, for the following test definition: 565 566 TEST_F(InputDispatcherTest, InjectInputEvent_ValidatesKeyEvents) 567 568 You can run the entire test using: 569 570 atest inputflinger_tests:InputDispatcherTest 571 572 or an individual test method using: 573 574 atest inputflinger_tests:InputDispatcherTest#InjectInputEvent_ValidatesKeyEvents 575 576 577 - - - - - - - - - - - - - - 578 RUNNING TESTS IN ITERATION 579 - - - - - - - - - - - - - - 580 581 To run tests in iterations, simply pass --iterations argument. No matter pass or fail, atest won't stop testing until the max iteration is reached. 582 583 Example: 584 atest <test> --iterations # 10 iterations(by default). 585 atest <test> --iterations 5 # run <test> 5 times. 586 587 Two approaches that assist users to detect flaky tests: 588 589 1) Run all tests until a failure occurs or the max iteration is reached. 590 591 Example: 592 - 10 iterations(by default). 593 atest <test> --rerun-until-failure 594 - stop when failed or reached the 20th run. 595 atest <test> --rerun-until-failure 20 596 597 2) Run failed tests until passed or the max iteration is reached. 598 599 Example: 600 - 10 iterations(by default). 601 atest <test> --retry-any-failure 602 - stop when passed or reached the 20th run. 603 atest <test> --retry-any-failure 20 604 605 606 - - - - - - - - - - - - - - - - 607 REGRESSION DETECTION (obsolute) 608 - - - - - - - - - - - - - - - - 609 610 ********************** Warning ********************** 611 Please STOP using arguments below -- they are obsolete and will be removed in a near future: 612 --detect-regression 613 --generate-baseline 614 --generate-new-metrics 615 616 Please check RUNNING TESTS IN ITERATION out for alternatives. 617 ****************************************************** 618 619 Generate pre-patch or post-patch metrics without running regression detection: 620 621 Example: 622 atest <test> --generate-baseline <optional iter> 623 atest <test> --generate-new-metrics <optional iter> 624 625 Local regression detection can be run in three options: 626 627 1) Provide a folder containing baseline (pre-patch) metrics (generated previously). Atest will run the tests n (default 5) iterations, generate a new set of post-patch metrics, and compare those against existing metrics. 628 629 Example: 630 atest <test> --detect-regression </path/to/baseline> --generate-new-metrics <optional iter> 631 632 2) Provide a folder containing post-patch metrics (generated previously). Atest will run the tests n (default 5) iterations, generate a new set of pre-patch metrics, and compare those against those provided. Note: the developer needs to revert the device/tests to pre-patch state to generate baseline metrics. 633 634 Example: 635 atest <test> --detect-regression </path/to/new> --generate-baseline <optional iter> 636 637 3) Provide 2 folders containing both pre-patch and post-patch metrics. Atest will run no tests but the regression detection algorithm. 638 639 Example: 640 atest --detect-regression </path/to/baseline> </path/to/new> 641 642 643 - - - - - - - - - - - - 644 TESTS IN TEST MAPPING 645 - - - - - - - - - - - - 646 647 Atest can run tests in TEST_MAPPING files: 648 649 1) Run presubmit tests in TEST_MAPPING files in current and parent 650 directories. You can also specify a target directory. 651 652 Example: 653 atest (run presubmit tests in TEST_MAPPING files in current and parent directories) 654 atest --test-mapping </path/to/project> 655 (run presubmit tests in TEST_MAPPING files in </path/to/project> and its parent directories) 656 657 2) Run a specified test group in TEST_MAPPING files. 658 659 Example: 660 atest :postsubmit 661 (run postsubmit tests in TEST_MAPPING files in current and parent directories) 662 atest :all 663 (Run tests from all groups in TEST_MAPPING files) 664 atest --test-mapping </path/to/project>:postsubmit 665 (run postsubmit tests in TEST_MAPPING files in </path/to/project> and its parent directories) 666 667 3) Run tests in TEST_MAPPING files including sub directories 668 669 By default, atest will only search for tests in TEST_MAPPING files in current (or given directory) and its parent directories. If you want to run tests in TEST_MAPPING files in the sub-directories, you can use option --include-subdirs to force atest to include those tests too. 670 671 Example: 672 atest --include-subdirs [optional </path/to/project>:<test_group_name>] 673 (run presubmit tests in TEST_MAPPING files in current, sub and parent directories) 674 A path can be provided optionally if you want to search for tests in a given directory, with optional test group name. By default, the test group is presubmit. 675 676 677 - - - - - - - - - - - - - - 678 ADDITIONAL ARGS TO TRADEFED 679 - - - - - - - - - - - - - - 680 681 When trying to pass custom arguments for the test runners, everything after '--' 682 will be consumed as custom args. 683 684 Example: 685 atest -v <test> -- <custom_args1> <custom_args2> 686 687 688 2019-12-19 689''' 690