def test_worker_doesnt_leak_fds(self): repo, repoName, commitHash, worker = self.get_worker("simple_project") self.assertTrue( self.runWorkerTest(worker, "testId", repoName, commitHash, "build/linux", WorkerState.DummyWorkerCallbacks(), False)[0]) testIx = 0 #need to use the connection pools because they can leave some sockets open for _ in xrange(3): testIx += 1 self.assertTrue( self.runWorkerTest(worker, "testId%s" % testIx, repoName, commitHash, "good/linux", WorkerState.DummyWorkerCallbacks(), False)[0]) fds = len(self.get_fds()) #but want to verify we're not actually leaking FDs once we're in a steadystate for _ in xrange(3): testIx += 1 self.assertTrue( self.runWorkerTest(worker, "testId%s" % testIx, repoName, commitHash, "good/linux", WorkerState.DummyWorkerCallbacks(), False)[0]) fds2 = len(self.get_fds()) self.assertEqual(fds, fds2)
def test_worker_basic(self): repo, repoName, commitHash, worker = self.get_worker("simple_project") result = self.runWorkerTest(worker, "testId", repoName, commitHash, "build/linux", WorkerState.DummyWorkerCallbacks(), False)[0] self.assertTrue(result) self.assertTrue( self.runWorkerTest(worker, "testId2", repoName, commitHash, "good/linux", WorkerState.DummyWorkerCallbacks(), False)[0]) self.assertFalse( self.runWorkerTest(worker, "testId3", repoName, commitHash, "bad/linux", WorkerState.DummyWorkerCallbacks(), False)[0]) testHash = self.get_fully_resolved_definition(worker, repoName, commitHash, "bad/linux").hash keys = worker.artifactStorage.testResultKeysFor(testHash, "testId3") self.assertEqual( sorted(keys), ["bad_s_linux.tar.gz", "test_looper_log.txt", "test_result.json"]) data = worker.artifactStorage.testContents(testHash, "testId3", keys[0]) self.assertTrue(len(data) > 0)
def test_worker_build_artifacts_go_to_correct_place(self): repo, repoName, commitHash, worker = self.get_worker("simple_project") self.assertTrue( self.runWorkerTest(worker, "testId", repoName, commitHash, "build/linux", WorkerState.DummyWorkerCallbacks(), False)[0]) self.assertTrue( self.runWorkerTest(worker, "testId2", repoName, commitHash, "check_build_output/linux", WorkerState.DummyWorkerCallbacks(), False)[0], self.get_failure_log(worker, repoName, commitHash, "testId2"))
def test_subdocker_retains_network(self): container_count = len(docker_client.containers.list()) repo, repoName, commitHash, worker = self.get_worker("simple_project") self.assertTrue( self.runWorkerTest(worker, "testId", repoName, commitHash, "build/linux", WorkerState.DummyWorkerCallbacks(), False)[0]) self.assertTrue( self.runWorkerTest(worker, "testId2", repoName, commitHash, "docker/linux", WorkerState.DummyWorkerCallbacks(), False)[0], self.get_failure_log(worker, repoName, commitHash, "testId2"))
def test_individual_test_results(self): repo, repoName, commitHash, worker = self.get_worker("simple_project") success, results = self.runWorkerTest( worker, "testId1", repoName, commitHash, "test_with_individual_failures_1/linux", WorkerState.DummyWorkerCallbacks(), isDeploy=False) self.assertTrue(success) testsWithLogs = [t for t in results if results[t][1]] self.assertTrue( testsWithLogs, self.get_failure_log(worker, repoName, commitHash, "testId1")) test_def = self.get_fully_resolved_definition( worker, repoName, commitHash, "test_with_individual_failures_1/linux") for t in testsWithLogs: keysAndSizes = worker.artifactStorage.testResultKeysAndSizesForIndividualTest( test_def.hash, "testId1", t) self.assertTrue(keysAndSizes) for k, s in keysAndSizes: self.assertTrue( worker.artifactStorage.testContentsHtml( test_def.hash, "testId1", k))
def test_worker_cant_run_tests_without_build(self): repo, repoName, commitHash, worker = self.get_worker("simple_project") result = self.runWorkerTest(worker, "testId", repoName, commitHash, "good/linux", WorkerState.DummyWorkerCallbacks(), False)[0] self.assertFalse(result)
def test_worker_stage_flow(self): repo, repoName, commitHash, worker = self.get_worker( "project_with_stages") callbacks1 = WorkerState.DummyWorkerCallbacks() callbacks2 = WorkerState.DummyWorkerCallbacks() callbacks3 = WorkerState.DummyWorkerCallbacks() self.assertTrue( self.runWorkerTest(worker, "test0", repoName, commitHash, "build_with_stages", callbacks1, isDeploy=False)[0], self.get_failure_log(worker, repoName, commitHash, "test0")) self.assertEqual(callbacks1.artifacts, ["first_stage", "second_stage"]) self.assertTrue( self.runWorkerTest(worker, "test2", repoName, commitHash, "build_consuming_stage_1", callbacks2, isDeploy=False)[0]) self.assertTrue( self.runWorkerTest(worker, "test3", repoName, commitHash, "build_consuming_stage_2", callbacks3, isDeploy=False)[0])
def test_commit_messages(self): repo, repoName, commitHash, worker = self.get_worker("simple_project") repo2, _, commit2 = self.get_repo("simple_project_2") commit2Name, commit2Hash = commit2[0] callbacks1 = WorkerState.DummyWorkerCallbacks() callbacks2 = WorkerState.DummyWorkerCallbacks() self.assertTrue( self.runWorkerTest(worker, "test0", commit2Name, commit2Hash, "test_commit_message/linux", callbacks1, isDeploy=False)[0]) self.assertTrue( self.runWorkerTest(worker, "test2", commit2Name, commit2Hash, "test_commit_message_in_dependencies/linux", callbacks2, isDeploy=False)[0])
def get_worker(self, repo_name): source_repo, source_control, c = self.get_repo(repo_name) repoName, commitHash = c[0] worker = WorkerState.WorkerState( "test_looper_testing", os.path.join(self.testdir, "worker"), source_control, ArtifactStorage.LocalArtifactStorage( Config.ArtifactsConfig.LocalDisk( path_to_build_artifacts=os.path.join( self.testdir, "build_artifacts"), path_to_test_artifacts=os.path.join( self.testdir, "test_artifacts"))), "worker", Config.HardwareConfig(cores=1, ram_gb=4)) return source_repo, repoName, commitHash, worker
def test_summary(self): repo, repoName, commitHash, worker = self.get_worker("simple_project") repo2, _, commit2 = self.get_repo("simple_project_2") commit2Name, commit2Hash = commit2[0] self.assertEqual( self.runWorkerTest(worker, "testId1", commit2Name, commit2Hash, "test_with_individual_failures/linux", WorkerState.DummyWorkerCallbacks(), isDeploy=False)[1], { "Test1": (True, False), "Test2": (False, False) })
def createTestWorker(config, worker_path, machineId): config = algebraic_to_json.Encoder().from_json(config, Config.WorkerConfig) source_control = SourceControlFromConfig.getFromConfig( os.path.join(worker_path, "worker_repo_cache"), config.source_control) artifact_storage = ArtifactStorage.storageFromConfig(config.artifacts) workerState = WorkerState.WorkerState( name_prefix="test_looper_worker", worker_directory=worker_path, source_control=source_control, artifactStorage=artifact_storage, machineId=machineId, hardwareConfig=Config.HardwareConfig( cores=multiprocessing.cpu_count(), ram_gb=int(psutil.virtual_memory().total / 1024.0 / 1024.0 / 1024.0 + .1)), docker_image_repo=config.server_ports.docker_image_repo) return TestLooperWorker.TestLooperWorker(workerState, machineId, config.server_ports, True, 2.0)
def boot_worker(self, hardware_config, os_config): with self._lock: assert hardware_config in self.all_hardware_configs() logging.info("Trying to boot %s / %s", hardware_config, os_config) if os_config.matches.WindowsVM: machineId = "worker_" + str(self.windows_oneshots) logging.info("Booted fake windows one-shot worker %s" % machineId) self.windows_oneshots += 1 self._machineBooted(machineId, hardware_config, os_config, True) return machineId else: if not os_config.matches.LinuxWithDocker: raise UnbootableWorkerCombination(hardware_config, os_config) machineId = "worker_" + str(uuid.uuid4()).replace("-","")[:10] worker = TestLooperWorker.TestLooperWorker( WorkerState.WorkerState( self.config.machine_management.docker_scope + "_" + machineId, os.path.join(self.config.machine_management.local_storage_path, machineId), self.source_control, self.artifactStorage, machineId, hardware_config ), machineId, self.config.server_ports, False, 2.0 ) self._machineBooted(machineId, hardware_config, os_config, worker) worker.start() return machineId
def test_cross_project_dependencies(self): repo, repoName, commitHash, worker = self.get_worker("simple_project") repo2, _, commit2 = self.get_repo("simple_project_2") commit2Name, commit2Hash = commit2[0] self.assertTrue( self.runWorkerTest(worker, "testId", repoName, commitHash, "build/linux", WorkerState.DummyWorkerCallbacks(), False)[0]) self.assertTrue( self.runWorkerTest(worker, "testId2", commit2Name, commit2Hash, "build2/linux", WorkerState.DummyWorkerCallbacks(), False)[0], self.get_failure_log(worker, commit2Name, commit2Hash, "testId2")) self.assertTrue( self.runWorkerTest(worker, "testId3", commit2Name, commit2Hash, "test2/linux", WorkerState.DummyWorkerCallbacks(), False)[0], self.get_failure_log(worker, commit2Name, commit2Hash, "testId3")) self.assertTrue( self.runWorkerTest(worker, "testId6", commit2Name, commit2Hash, "test2/linux_dependent", WorkerState.DummyWorkerCallbacks(), False)[0], self.get_failure_log(worker, commit2Name, commit2Hash, "testId6")) self.assertTrue( self.runWorkerTest(worker, "testId7", commit2Name, commit2Hash, "test3/linux_dependent", WorkerState.DummyWorkerCallbacks(), False)[0], self.get_failure_log(worker, commit2Name, commit2Hash, "testId7")) self.assertFalse( self.runWorkerTest(worker, "testId4", commit2Name, commit2Hash, "test2_fails/linux", WorkerState.DummyWorkerCallbacks(), False)[0]) self.assertTrue( self.runWorkerTest(worker, "testId5", commit2Name, commit2Hash, "test2_dep_from_env/linux2", WorkerState.DummyWorkerCallbacks(), False)[0], self.get_failure_log(worker, commit2Name, commit2Hash, "testId5"))
def runTest(name): testId[0] += 1 testName = "test_" + str(testId[0]) callbacks = WorkerState.DummyWorkerCallbacks() self.assertTrue( self.runWorkerTest(worker, testName, repoName, commitHash, name, callbacks, False)[0], "".join(callbacks.logMessages)) resolver = TestDefinitionResolver.TestDefinitionResolver( worker.getRepoCacheByName) testHash = resolver.testDefinitionsFor(repoName, commitHash)[name].hash if not name.startswith("build/"): contents = worker.artifactStorage.testContents( testHash, testName, worker.artifactStorage.sanitizeName(name + ".tar.gz")) with tarfile.open(fileobj=StringIO.StringIO(contents)) as tf: return [ x.strip() for x in tf.extractfile( "./results.txt").read().split("\n") if x.strip() ]
def runBuildOrTest(self, all_tests, reponame, testDef, cores, nologcapture, nodeps, interactive, seen_already, explicit_cmd=None, artifactSubsetByBuildName=None): #walk all the repo definitions and make sure everything is up-to-date path = self.build_path(testDef.name) if path in seen_already: return True seen_already.add(path) if not nodeps: for depname, dep in testDef.dependencies.iteritems(): if dep.matches.Build: test_and_repo = None for t in all_tests: if all_tests[t][0].hash == dep.buildHash: test_and_repo = all_tests[t] if test_and_repo: subdef, subrepo = test_and_repo if not self.runBuildOrTest(all_tests, subrepo, subdef, cores, nologcapture, nodeps, interactive, seen_already, artifactSubsetByBuildName= artifactSubsetByBuildName): print "Dependent build ", self.repoShortname( subrepo.split(":")[-1]), subdef.name, " failed" return False print "Building", self.repoShortname( reponame.split(":")[-1]), testDef.name artifactsNeeded = None if testDef.matches.Build: artifactsDefined = self.artifactsInTestDef(testDef) artifactsRequested = artifactSubsetByBuildName[testDef.name] #determine if we just want to run a subset of the stages in the build. if artifactsDefined and set(artifactsRequested) != set( artifactsDefined ) and artifactsDefined[-1] not in artifactsRequested: print "\tOnly building until we've produced the following: ", artifactSubsetByBuildName[ testDef.name] artifactsNeeded = artifactSubsetByBuildName[testDef.name] worker_state = WorkerStateOverride("test_looper_interactive_", path, self, cores) if nologcapture: logfile = sys.stdout else: logfile_dir = os.path.join(path, "logs") worker_state.ensureDirectoryExists(logfile_dir) t = time.gmtime() log_path = os.path.join( logfile_dir, "Log-%s-%s-%s-%s-%s-%s.txt" % (t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec)) logfile = open(log_path, "w") print "\tlogging output to ", log_path if not interactive: class Callbacks: def __init__(self): self.t0 = time.time() self.total_lines = 0 def recordArtifactUploaded(self, artifact): pass def heartbeat(self, logMessage=None): if logMessage: logfile.write(logMessage) self.total_lines += logMessage.count("\n") if time.time() - self.t0 > 10 and not nologcapture: print "\t", time.asctime( ), " - ", self.total_lines, " logged" self.t0 = time.time() logfile.flush() def terminalOutput(self, output): pass def subscribeToTerminalInput(self, callback): pass callbacks = Callbacks() else: callbacks = WorkerState.DummyWorkerCallbacks(localTerminal=True) def onStageFinished(artifact): print "\tFinished producing artifact", artifact if artifactsNeeded is not None: if artifact in artifactsNeeded: artifactsNeeded.remove(artifact) if not artifactsNeeded: #condition for early stopping print "Stopping build early after artifact", artifact, "completed." return True callbacks.recordArtifactUploaded = onStageFinished if not worker_state.runTest("interactive", callbacks, testDef, interactive, command_override=explicit_cmd)[0]: print "Build failed. Exiting." return False return True
def test_cached_source_builds(self): repo, repoName, commitHash, worker = self.get_worker("simple_project") repo2, _, commit2 = self.get_repo("simple_project_2") commit2Name, commit2Hash = commit2[0] callbacks1 = WorkerState.DummyWorkerCallbacks() callbacks2 = WorkerState.DummyWorkerCallbacks() callbacks3 = WorkerState.DummyWorkerCallbacks() self.assertTrue( self.runWorkerTest(worker, "testId1", commit2Name, commit2Hash, "test3_dep_on_cached_source/linux", callbacks1, isDeploy=False)[0], "\n".join(callbacks1.logMessages)) self.assertTrue( self.runWorkerTest(worker, "testId2", commit2Name, commit2Hash, "test3_dep_on_cached_source/linux", callbacks2, isDeploy=False)[0]) logs1 = "\n".join(callbacks1.logMessages) logs2 = "\n".join(callbacks2.logMessages) test1_uploaded = "Building source cache" in logs1 test1_downloaded = "Downloading source cache" in logs1 test1_extracted = "Extracting source cache" in logs1 test2_uploaded = "Building source cache" in logs2 test2_downloaded = "Downloading source cache" in logs2 test2_extracted = "Extracting source cache" in logs2 self.assertTrue(test1_uploaded and not test1_downloaded and not test1_extracted) self.assertTrue( test2_extracted and not test2_downloaded and not test2_uploaded, (test2_extracted, test2_downloaded, test2_uploaded)) self.assertTrue( worker.artifactStorage.build_exists(commitHash, "source-linux.tar.gz")) #after purging, we should have to download the build worker.purge_build_cache(0) self.assertTrue( self.runWorkerTest(worker, "testId3", commit2Name, commit2Hash, "test3_dep_on_cached_source/linux", callbacks3, isDeploy=False)[0]) logs3 = "\n".join(callbacks3.logMessages) test3_uploaded = "Building source cache" in logs3 test3_downloaded = "Downloading source cache" in logs3 test3_extracted = "Extracting source cache" in logs3 self.assertTrue(test3_downloaded and test3_extracted and not test3_uploaded)