def testCantRestoreInvalidSnapshot(self): with sudo.SudoKeepAlive(): test_file = os.path.join(self.chroot, 'etc', 'test_file') osutils.Touch(test_file) code, _ = self._crosSdk(['--snapshot-restore', 'test']) self.assertNotEqual(code, 0) # Failed restore leaves the existing snapshot in place. self.assertExists(test_file)
def main(argv): options = ParseCommandLine(argv) FinishParsing(options) cros_build_lib.AssertInsideChroot() with sudo.SudoKeepAlive(ttyless_sudo=False): with osutils.TempDir(set_global=True, sudo_rm=True) as tempdir: sysroot = os.path.join(tempdir, SYSROOT) os.mkdir(sysroot) GenerateSysroot(sysroot, options).Perform()
def testCreateSnapshotMountsAsNeeded(self): with sudo.SudoKeepAlive(): cros_sdk_lib.CleanupChrootMount(self.chroot) code, _ = self._crosSdk(['--snapshot-create', 'test']) self.assertEqual(code, 0) self.assertExists(self.chroot_version_file) code, output = self._crosSdk(['--snapshot-list']) self.assertEqual(code, 0) self.assertEqual(output.strip(), 'test')
def main(argv): options = ParseCommandLine(argv) FinishParsing(options) cros_build_lib.AssertInsideChroot() with sudo.SudoKeepAlive(ttyless_sudo=False): with osutils.TempDir(set_global=True, sudo_rm=True) as tempdir: warnings_dir = os.path.join(tempdir, TIDY_WARNINGS) os.mkdir(warnings_dir) GenerateTidyWarnings(warnings_dir, options).Perform()
def setUp(self): with sudo.SudoKeepAlive(): # Create just enough of a chroot to fool cros_sdk into accepting it. self.chroot = os.path.join(self.tempdir, 'chroot') cros_sdk_lib.MountChroot(self.chroot, create=True) logging.debug('Chroot mounted on %s', self.chroot) chroot_etc = os.path.join(self.chroot, 'etc') osutils.SafeMakedirsNonRoot(chroot_etc) self.chroot_version_file = os.path.join(chroot_etc, 'cros_chroot_version') osutils.Touch(self.chroot_version_file, makedirs=True)
def testRestoreSnapshot(self): with sudo.SudoKeepAlive(): test_file = os.path.join(self.chroot, 'etc', 'test_file') osutils.Touch(test_file) code, _ = self._crosSdk(['--snapshot-create', 'test']) self.assertEqual(code, 0) osutils.SafeUnlink(test_file) code, _ = self._crosSdk(['--snapshot-restore', 'test']) self.assertEqual(code, 0) self.assertTrue(cros_sdk_lib.MountChroot(self.chroot, create=False)) self.assertExists(test_file) code, output = self._crosSdk(['--snapshot-list']) self.assertEqual(code, 0) self.assertEqual(output, '')
def testCreateGroups(self): """Run many cros_sdk processes in parallel to test for race conditions.""" with sudo.SudoKeepAlive(): with cgroups.SimpleContainChildren('example', sigterm_timeout=5): parallel.RunTasksInProcessPool(self._CrosSdk, [[]] * 20, processes=10)
def tearDown(self): with sudo.SudoKeepAlive(): cros_sdk_lib.CleanupChrootMount(self.chroot, delete_image=True)
def testLosetupCommandPresent(self): """Check for commands from the mount package.""" with sudo.SudoKeepAlive(): cmd = ['losetup', '--help'] result = cros_build_lib.RunCommand(cmd, error_code_ok=True) self.assertEqual(result.returncode, 0)
def testThinProvisioningToolsPresent(self): """Check for commands from the thin-provisioning-tools package.""" with sudo.SudoKeepAlive(): cmd = ['thin_check', '-V'] result = cros_build_lib.RunCommand(cmd, error_code_ok=True) self.assertEqual(result.returncode, 0)
def testLvmCommandsPresent(self): """Check for commands from the lvm2 package.""" with sudo.SudoKeepAlive(): cmd = ['lvs', '--version'] result = cros_build_lib.RunCommand(cmd, error_code_ok=True) self.assertEqual(result.returncode, 0)
def main(): test_helper.SetupCommonLoggingFormat() parser = optparse.OptionParser() parser.add_option('-b', '--base_image', help='path to the base image.') parser.add_option('-r', '--board', help='board for the images.') parser.add_option('--no_delta', action='store_false', default=True, dest='delta', help='Disable using delta updates.') parser.add_option('--no_graphics', action='store_true', help='Disable graphics for the vm test.') parser.add_option('-j', '--jobs', default=test_helper.CalculateDefaultJobs(), type=int, help='Number of simultaneous jobs') parser.add_option( '--payload_signing_key', default=None, help='Path to the private key used to sign payloads with.') parser.add_option('-q', '--quick_test', default=False, action='store_true', help='Use a basic test to verify image.') parser.add_option('-m', '--remote', help='Remote address for real test.') parser.add_option('-t', '--target_image', help='path to the target image.') parser.add_option('--test_results_root', default=None, help='Root directory to store test results. Should ' 'be defined relative to chroot root.') parser.add_option('--test_prefix', default='test', help='Only runs tests with specific prefix i.e. ' 'testFullUpdateWipeStateful.') parser.add_option( '-p', '--type', default='vm', help='type of test to run: [vm, real, gce]. Default: vm.') parser.add_option('--verbose', default=True, action='store_true', help='Print out rather than capture output as much as ' 'possible.') parser.add_option('--whitelist_chrome_crashes', default=False, dest='whitelist_chrome_crashes', action='store_true', help='Treat Chrome crashes as non-fatal.') parser.add_option('--verify_suite_name', default=None, help='Specify the verify suite to run.') parser.add_option( '--parallel', default=False, dest='parallel', action='store_true', help='Run multiple test stages in parallel (applies only ' 'to vm tests). Default: False') parser.add_option( '--ssh_private_key', default=None, help='Path to the private key to use to ssh into the image ' 'as the root user.') (options, leftover_args) = parser.parse_args() CheckOptions(parser, options, leftover_args) # Generate cache of updates to use during test harness. update_cache = _ReadUpdateCache(options.type, options.target_image) if not update_cache: msg = ('No update cache found. Update testing will not work. Run ' ' cros_generate_update_payloads if this was not intended.') logging.info(msg) # Create download folder for payloads for testing. download_folder = os.path.join(os.path.realpath(os.path.curdir), 'latest_download') if not os.path.exists(download_folder): os.makedirs(download_folder) with sudo.SudoKeepAlive(): au_worker.AUWorker.SetUpdateCache(update_cache) my_server = None try: # Only start a devserver if we'll need it. if update_cache: my_server = dev_server_wrapper.DevServerWrapper( port=dev_server_wrapper.DEFAULT_PORT, log_dir=options.test_results_root) my_server.Start() if options.type == 'vm' or options.type == 'gce' and options.parallel: _RunTestsInParallel(options) else: # TODO(sosa) - Take in a machine pool for a real test. # Can't run in parallel with only one remote device. test_suite = _PrepareTestSuite(options) test_result = unittest.TextTestRunner().run(test_suite) if not test_result.wasSuccessful(): cros_build_lib.Die('Test harness failed.') finally: if my_server: my_server.Stop()
def main(): test_helper.SetupCommonLoggingFormat() parser = optparse.OptionParser() # Options related to which payloads to generate. parser.add_option('--basic_suite', default=False, action='store_true', help='Prepare to run the basic au test suite.') parser.add_option('--full_suite', default=False, action='store_true', help='Prepare to run the full au test suite.') parser.add_option('--full_payload', default=False, action='store_true', help='Generate the full update payload and store it in ' 'the nplus1 archive dir.') parser.add_option( '--nplus1', default=False, action='store_true', help='Produce nplus1 updates for testing in lab and store ' 'them in the nplus1 archive dir.') parser.add_option('--nplus1_archive_dir', default=None, help='Archive nplus1 updates into this directory.') # Options related to how to generate test payloads for the test harness. parser.add_option('--novm', default=True, action='store_false', dest='vm', help='Test Harness payloads will not be tested in a VM.') parser.add_option('--private_key', help='Private key to sign payloads for test harness.') parser.add_option('--public_key', help='Public key to verify payloads for test harness.') # Options related to the images to test. parser.add_option('--board', help='Board used for the images.') parser.add_option('--base', help='Image we want to test updates from.') parser.add_option( '--base_latest_from_dir', help='Ignore the base ' 'option and use the latest image from the specified ' 'directory as the base image. If none exists, default to ' 'target image.') parser.add_option('--target', help='Image we want to test updates to.') # Miscellaneous options. parser.add_option('--jobs', default=test_helper.CalculateDefaultJobs(), type=int, help='Number of payloads to generate in parallel.') options = parser.parse_args()[0] CheckOptions(parser, options) if options.nplus1_archive_dir and not os.path.exists( options.nplus1_archive_dir): os.makedirs(options.nplus1_archive_dir) # Don't allow this code to be run more than once at a time. lock_path = os.path.join(os.path.dirname(__file__), '.lock_file') with locking.FileLock(lock_path, 'generate payloads lock') as lock: lock.write_lock() with sudo.SudoKeepAlive(): generator = UpdatePayloadGenerator(options) generator.GenerateImagesForTesting() generator.GeneratePayloadRequirements() cache = generator.GeneratePayloads() generator.DumpCacheToDisk(cache)