def analyses(app): e1 = Ecosystem(name='npm', backend=EcosystemBackend.npm) p1 = Package(ecosystem=e1, name='arrify') v1 = Version(package=p1, identifier='1.0.1') model1 = Analysis(version=v1, started_at=now, finished_at=later) app.rdb.session.add(model1) e2 = Ecosystem(name='pypi', backend=EcosystemBackend.pypi) p2 = Package(ecosystem=e2, name='flexmock') v2 = Version(package=p2, identifier='0.10.1') model2 = Analysis(version=v2, started_at=later, access_count=1) app.rdb.session.add(model2) app.rdb.session.commit() worker_results2 = {'a': 'b', 'c': 'd', 'e': 'f', 'g': 'h', 'i': 'j', 'digests': {'details': [{'artifact': True, 'sha1': '6be7ae55bae2372c7be490321bbe5ead278bb51b'}]}} for w, tr in worker_results2.items(): app.rdb.session.add(WorkerResult(analysis_id=model2.id, worker=w, task_result=tr)) model3 = Analysis(version=v2, started_at=later, access_count=1, audit={'audit': {'audit': 'audit', 'e': 'f', 'g': 'h'}, 'a': 'b', 'c': 'd'}) app.rdb.session.add(model3) app.rdb.session.commit() worker_results3 = {'digests': {'details': [{'artifact': True, 'sha1': '6be7ae55bae2372c7be490321bbe5ead278bb51b'}]}} for w, tr in worker_results3.items(): app.rdb.session.add(WorkerResult(analysis_id=model3.id, worker=w, task_result=tr)) app.rdb.session.commit() return (model1, model2, model3)
def fill_analyses(app): ecosystems = [ Ecosystem(name='pypi', backend=EcosystemBackend.pypi, url='https://pypi.python.org/', fetch_url='https://pypi.python.org/pypi'), Ecosystem(name='npm', backend=EcosystemBackend.npm, url='https://www.npmjs.com/', fetch_url='https://registry.npmjs.org/'), Ecosystem(name='go', backend=EcosystemBackend.scm), ] packages = [ Package(name='flexmock', ecosystem=ecosystems[0]), Package(name='requests', ecosystem=ecosystems[0]), Package(name='sequence', ecosystem=ecosystems[1]), Package(name='arrify', ecosystem=ecosystems[1]), Package(name='serve-static', ecosystem=ecosystems[1]), ] versions = [ Version(identifier='0.10.1', package=packages[0]), Version(identifier='0.9.1', package=packages[0]), Version(identifier='2.0.0', package=packages[1]), Version(identifier='2.2.1', package=packages[2]), Version(identifier='1.0.1', package=packages[3]), Version(identifier='1.7.1', package=packages[4]), ] analyses = [ Analysis(version=versions[0], started_at=now), # pypi/flexmock/0.10.1 Analysis(version=versions[0], started_at=later, access_count=1), # pypi/flexmock/0.10.1 Analysis(version=versions[1], started_at=even_later), # pypi/flexmock/0.9.1 Analysis(version=versions[2], started_at=now), # pypi/requests/2.0.0 Analysis(version=versions[3], started_at=later), # npm/sequence/2.2.1 Analysis(version=versions[4], started_at=now, finished_at=later), # npm/arrify/1.0.1 Analysis(version=versions[5], started_at=now, finished_at=later, release='npm:serve-static:1.7.1'), # npm/serve-static/1.7.1 ] # worker results that correspond to analyses above worker_results = [ WorkerResult(worker='digests', analysis=analyses[1], task_result={'details': [{'artifact': True, 'sha1': '6be7ae55bae2372c7be490321bbe5ead278bb51b'}]}), WorkerResult(worker='static_analysis', task_result={'details': []}, analysis=analyses[1]), WorkerResult(worker='source_licenses', task_result={'schema': {'name': 'source_licenses', 'version': '1-0-0'}}, analysis=analyses[1]) ] package_gh_usage = [ PackageGHUsage(name='arrify', count=100, ecosystem_backend='npm') ] for a in ecosystems + packages + versions + analyses + worker_results + package_gh_usage: app.rdb.session.add(a) app.rdb.session.commit() return (ecosystems, packages, versions, analyses, worker_results, package_gh_usage)
def test_retrieve_normal(self): wid = 'x' w = 'y' tr = {'1': '2'} wr = WorkerResult(analysis=self.a, worker_id=wid, worker=w, task_result=tr) self.s.add(wr) self.s.commit() assert self.bp.retrieve('whatever', w, wid) == tr
def store_error(self, node_args, flow_name, task_name, task_id, exc_info): if not self.is_connected(): self.connect() res = WorkerResult( worker=task_name, worker_id=task_id, analysis_id=node_args.get('document_id'), task_result=None, error=True, external_request_id=node_args.get('external_request_id')) try: self.session.add(res) self.session.commit() except: self.session.rollback() raise
def store(self, node_args, flow_name, task_name, task_id, result): if not self.is_connected(): self.connect() res = WorkerResult( worker=task_name, worker_id=task_id, analysis_id=node_args.get('document_id'), task_result=result, error=result.get('status') == 'error', external_request_id=node_args.get('external_request_id')) try: self.session.add(res) self.session.commit() except: self.session.rollback() raise
def _create_result_entry(self, node_args, flow_name, task_name, task_id, result, error=False): return WorkerResult( worker=task_name, worker_id=task_id, analysis_id=node_args.get('document_id') if isinstance( node_args, dict) else None, task_result=result, error=error or result.get('status') == 'error' if isinstance( result, dict) else None, external_request_id=node_args.get('external_request_id') if isinstance(node_args, dict) else None)
def test_retrieve_s3(self): wid = 'x' w = 'y' tr = {'version_id': 123} res = {'real': 'result'} wr = WorkerResult(analysis=self.a, worker_id=wid, worker=w, task_result=tr) self.s.add(wr) self.s.commit() s3_storage = flexmock() s3_storage.\ should_receive('retrieve_task_result').\ with_args(self.en, self.pn, self.vi, w).\ and_return(res) flexmock(selinon.StoragePool).\ should_receive('get_connected_storage').\ with_args('S3Data').\ and_return(s3_storage) assert self.bp.retrieve('blahblah', w, wid) == res