def server_create_analysis(ecosystem, package, version, api_flow=True, force=False, force_graph_sync=False): """Create bayesianApiFlow handling analyses for specified EPV :param ecosystem: ecosystem for which the flow should be run :param package: package for which should be flow run :param version: package version :param force: force run flow even specified EPV exists :param force_graph_sync: force synchronization to graph :return: dispatcher ID handling flow """ args = { 'ecosystem': ecosystem, 'name': MavenCoordinates.normalize_str(package) if ecosystem == 'maven' else package, 'version': version, 'force': force, 'force_graph_sync': force_graph_sync } if api_flow: return server_run_flow('bayesianApiFlow', args) else: return server_run_flow('bayesianFlow', args)
def get(self, ecosystem, package, version): if ecosystem == 'maven': package = MavenCoordinates.normalize_str(package) result = get_analyses_from_graph(ecosystem, package, version) current_app.logger.warn("%r" % result) if result != None: # Known component for Bayesian return result if os.environ.get("INVOKE_API_WORKERS", "") == "1": # Enter the unknown path server_create_analysis(ecosystem, package, version, api_flow=True, force=False, force_graph_sync=True) msg = "{ecosystem} Package {package}/{version} is unavailable. The package will be available shortly,"\ " please retry after some time.".format(ecosystem=ecosystem, package=package, version=version) raise HTTPError(202, msg) else: server_create_analysis(ecosystem, package, version, api_flow=False, force=False, force_graph_sync=True) msg = "No data found for {ecosystem} Package {package}/{version}".format(ecosystem=ecosystem,\ package=package, version=version) raise HTTPError(404, msg)
def get(self, ecosystem, package): args = pagination_parser.parse_args() package = urllib.parse.unquote(package) if ecosystem == 'maven': package = MavenCoordinates.normalize_str(package) package_found = rdb.session.query(Package).\ join(Ecosystem).\ filter(Ecosystem.name == ecosystem, Package.name == package).\ count() if package_found == 0: raise HTTPError(404, error="Package '{e}/{p}' not tracked".format( p=package, e=ecosystem)) query = rdb.session.query(Version).\ join(Package).join(Ecosystem).\ filter(Ecosystem.name == ecosystem, Package.name == package) count = query.count() versions = query.\ filter(Ecosystem.name == ecosystem, Package.name == package).\ order_by(Version.identifier.asc()).\ offset(get_item_skip(args['page'], args['per_page'])).\ limit(get_item_relative_limit(args['page'], args['per_page'])) items = [{ 'ecosystem': ecosystem, 'package': package, 'version': v.identifier } for v in versions] return {TOTAL_COUNT_KEY: count, 'items': items}
def get_latest_analysis_for(ecosystem, package, version): """Note: has to be called inside flask request context""" try: if ecosystem == 'maven': package = MavenCoordinates.normalize_str(package) return rdb.session.query(Analysis).\ join(Version).join(Package).join(Ecosystem).\ filter(Ecosystem.name == ecosystem).\ filter(Package.name == package).\ filter(Version.identifier == version).\ order_by(Analysis.started_at.desc()).\ first() except NoResultFound: return None
def get_analysis_count(self, ecosystem, package): """Get count of previously scheduled analyses for given ecosystem-package. :param ecosystem: str, Ecosystem name :param package: str, Package name :return: analysis count """ if ecosystem == 'maven': package = MavenCoordinates.normalize_str(package) count = PostgresBase.session.query(PackageAnalysis).\ join(Package).join(Ecosystem).\ filter(Ecosystem.name == ecosystem).\ filter(Package.name == package).\ count() return count
def get_analysis_by_id(self, ecosystem, package, analysis_id): """Get result of previously scheduled analysis for given ecosystem-package triplet by analysis ID :param ecosystem: str, Ecosystem name :param package: str, Package name :param analysis_id: str, ID of analysis :return: analysis result """ if ecosystem == 'maven': package = MavenCoordinates.normalize_str(package) found = self.session.query(PackageAnalysis).\ filter(Ecosystem.name == ecosystem).\ filter(Package.name == package).\ filter(PackageAnalysis.id == analysis_id).\ one() return found
def get_analysis_count(self, ecosystem, package, version): """Get count of previously scheduled analysis for given EPV triplet :param ecosystem: str, Ecosystem name :param package: str, Package name :param version: str, Package version :return: analysis count """ if ecosystem == 'maven': package = MavenCoordinates.normalize_str(package) count = self.session.query(Analysis).\ join(Version).join(Package).join(Ecosystem).\ filter(Ecosystem.name == ecosystem).\ filter(Package.name == package).\ filter(Version.identifier == version).\ count() return count
def _create_analysis_arguments(ecosystem, name, version): return { 'ecosystem': ecosystem, 'name': MavenCoordinates.normalize_str(name) if ecosystem == 'maven' else name, 'version': version }