def test_file_path(default_repo, default_job, default_testcase): artifact = Artifact( repository=default_repo, job=default_job, testcase=default_testcase, ) assert artifact.file.path == 'artifacts'
def save(self, test_list): if not test_list: return job = self.job organization_id = job.organization_id project_id = job.project_id # create all test cases for test in test_list: testcase = TestCase( job=job, organization_id=organization_id, project_id=project_id, hash=test.hash, name=test.name, duration=test.duration, message=test.message, result=test.result, ) db.session.add(testcase) if test.artifacts: for ta in test.artifacts: testartifact = Artifact( organization_id=organization_id, project_id=project_id, testcase_id=testcase.id, job_id=job.id, name=ta['name'], # TODO(dcramer): mimetype detection? # type=getattr(Artifactta['type'], ) testartifact.save_base64_content(ta['base64']) db.session.add(testartifact) db.session.commit() try: self._record_test_counts(test_list) self._record_test_failures(test_list) self._record_test_duration(test_list) except Exception: current_app.logger.exception( 'Failed to record aggregate test statistics')
def save(self, test_list): if not test_list: return job = self.job repository_id = job.repository_id # create all test cases for test in test_list: testcase = TestCase( job=job, repository_id=repository_id, hash=test.hash, name=test.name, duration=test.duration, message=test.message, result=test.result, ) db.session.add(testcase) if test.artifacts: for ta in test.artifacts: testartifact = Artifact( repository_id=repository_id, testcase_id=testcase.id, job_id=job.id, name=ta["name"], # TODO(dcramer): mimetype detection? # type=getattr(Artifactta['type'], ) testartifact.save_base64_content(ta["base64"]) db.session.add(testartifact) db.session.flush() try: self._record_test_counts(test_list) self._record_test_failures(test_list) self._record_test_duration(test_list) except Exception: current_app.logger.exception( "Failed to record aggregate test statistics")
def build_instance(self, data, **kwargs): return Artifact(**data)
def build_instance(self, data): return Artifact(**data)
def process_pending_artifact(pending_artifact_id, **kwargs): pending_artifact = PendingArtifact.query.unrestricted_unsafe().get( pending_artifact_id) if pending_artifact is None: current_app.logger.error("PendingArtifact %s not found", pending_artifact_id) return db.session.delete(pending_artifact) db.session.flush() auth.set_current_tenant( auth.RepositoryTenant(repository_id=pending_artifact.repository_id)) build = Build.query.filter( Build.repository_id == pending_artifact.repository_id, Build.provider == pending_artifact.provider, Build.external_id == pending_artifact.external_build_id, ).first() if not build: raise UnknownBuild job = Job.query.filter( Job.repository_id == pending_artifact.repository_id, Job.build_id == build.id, Job.provider == pending_artifact.provider, Job.external_id == pending_artifact.external_job_id, ).first() if not job: raise UnknownJob artifact = Artifact( job_id=job.id, repository_id=pending_artifact.repository_id, name=pending_artifact.name, status=Status.queued, ) try: db.session.add(artifact) db.session.flush() except IntegrityError: current_app.logger.error( "Skipping pending artifact processing (%s) - duplicate key", pending_artifact_id, ) # XXX(dcramer): this is more of an error but we make an assumption # that this happens because it was already sent db.session.rollback() db.session.delete(pending_artifact) db.session.commit() return artifact.file.save( pending_artifact.file, # XXX(dcramer): we reference the same file, so it lives in the old path # "{0}/{1}/{2}_{3}".format( # job.id.hex[:4], job.id.hex[4:], artifact.id.hex, artifact.name # ), ) db.session.add(artifact) if job.status == Status.finished and job.result != Result.aborted: job.status = Status.collecting_results db.session.add(job) db.session.commit() process_artifact.delay(artifact_id=artifact.id)