def license_analysis(self, file_id): ''' Analyse a file for licenses @param file_id: a file id of a file that needs to be analysed @return: list of all licenses found ''' ret = ServiceResult() self.tmpfile_path = self.get_tmp_filename() with self.get_system() as system: f = system.download(file_id, self.tmpfile_path) self.extracted1_path = self.get_tmp_dirname() d = f.unpack(self.extracted1_path) if isinstance(d, ExtractedRpmFile): src_path = d.get_content_path() elif isinstance(d, ExtractedTarballFile): src_path = d.get_path() elif isinstance(d, ExtractedSrpmFile): # we have to unpack tarball first t = d.get_tarball() self.extracted2_path = self.get_tmp_dirname() d = f.unpack(self.extracted2_path) src_path = d.get_path() else: raise ValueError("Filetype %s cannot be processed" % (d.get_type(),)) stdout, stderr, _ = runcmd(["licenselib/cucos_license_check.py", src_path]) ret.result = json.loads(stdout) ret.meta['stderr'] = stderr ret.meta['tool'] = "cucos_license_check" return ret
def spec_patch_listing(self, package_name, branch=None, commit=None): """ Get listing of downstream patches for package packaged in Fedora @param package_name: package name @param branch: branch (e.g. "f23", ...); if omitted, "rawhide" is used @param commit: fedpkg git commit; if omitted, the latest commit is used @return: list of downstream patches """ ret = ServiceResult() ret.result = [] # prevent from accessing suspicious files package_name = os.path.basename(package_name) if branch is None or branch == "rawhide": branch = "master" with self.get_lock(package_name): self._git_tree_prepare(package_name, branch, commit) path = os.path.join(self.pkg_dir, package_name) for f in os.listdir(path): if f.endswith(".patch"): ret.result.append(f) return ret
def tarball_get(self, upstream_url, commit): ''' Retrieve a tarball from upstream, try to detect upstream provider @param upstream_url: an upstream url @param commit: commit of upstream file @return: tarball file id ''' res = ServiceResult() if upstream_url.startswith('https://github.com'): m = re.search('https://github.com/([a-z]+)/([a-z-_]+)/?', upstream_url) if m is None: raise ValueError("Expected URL in form 'https://github.com/<USER>/<REPO>/, got %s", (upstream_url,)) tarball_url = self._get_github_tarball_url(m.group(1), m.group(2), commit) filename = self._get_github_file_name(m.group(1), m.group(2), commit) elif upstream_url.startswith('https://bitbucket.org'): m = re.search('https://bitbucket.org/([a-z]+)/([a-z-_]+)/?', upstream_url) if m is None: raise ValueError("Expected URL in form 'https://bitbucket.org/<USER>/<REPO>/, got %s", (upstream_url,)) tarball_url = self._get_bitbucket_tarball_url(m.group(1), m.group(2), commit) filename = self._get_bitbucket_file_name(m.group(1), m.group(2), commit) else: raise NotImplementedError("Unknown upstream provider %s" % (upstream_url,)) with self.get_lock(filename): if self.dircache.is_available(filename): res.result = FileId.construct(self, self.dircache.get_file_path(filename)) else: res.result = self._download_tarball(tarball_url, filename) return res
def spec_get(self, package_name, branch=None, commit=None): ''' Get specfile of a package packaged in Fedora @param package_name: package name @param branch: branch (e.g. "f23", ...); if omitted "rawhide" is used @param commit: fedpkg git commit; if omitted, the latest commit is used @return: specfile file id ''' ret = ServiceResult() # prevent from accessing suspicious files package_name = os.path.basename(package_name) if branch is None or branch == "rawhide": branch = "master" # we have to ensure that such package/branch/commit exist with self.get_lock(package_name): path = self._git_tree_prepare(package_name, branch, commit) ident = "%s/%s/%s/%s.spec" % ( package_name, branch, self._commit2ident(commit), package_name) ret.result = FileId.construct(self, ident, path=os.path.join( path, "%s.spec" % package_name)) return ret
def spec_patch_listing(self, package_name, branch=None, commit=None): ''' Get listing of downstream patches for package packaged in Fedora @param package_name: package name @param branch: branch (e.g. "f23", ...); if omitted, "rawhide" is used @param commit: fedpkg git commit; if omitted, the latest commit is used @return: list of downstream patches ''' ret = ServiceResult() ret.result = [] # prevent from accessing suspicious files package_name = os.path.basename(package_name) if branch is None or branch == "rawhide": branch = "master" with self.get_lock(package_name): self._git_tree_prepare(package_name, branch, commit) path = os.path.join(self.pkg_dir, package_name) for f in os.listdir(path): if f.endswith('.patch'): ret.result.append(f) return ret
def spec_patch_get(self, package_name, patch_name, branch=None, commit=None): """ Get file id of a downstream patch of a package packaged in Fedora @param package_name: package name @param patch_name: name of the patch file @param branch: branch (e.g. "f23", ...); if omitted, "rawhide" is used @param commit: fedpkg git commit; if omitted, the latest commit is used @return: file id of the patch """ ret = ServiceResult() ret.result = [] # prevent from accessing suspicious files package_name = os.path.basename(package_name) patch_name = os.path.basename(patch_name) if branch is None or branch == "rawhide": branch = "master" with self.get_lock(package_name): path = self._git_tree_prepare(package_name, branch, commit) patch_path = os.path.join(path, patch_name) if not os.path.isfile(patch_path): raise ValueError( "There is not patch %s for package %s, branch %s and commit %s" % (patch_name, package_name, branch, commit) ) ident = ("%s/%s/%s/%s" % (package_name, branch, self._commit2ident(commit), patch_name),) ret.result = FileId.construct(self, ident, path=patch_path) return ret
def scm_log(self, repo_url, max_depth=None, since_date=None, branch=None): ''' Get SCM log of a repo @param repo_url: Git repo URL @param max_depth: log depth @param since_date: since date @param branch: repo branch @return: list of scm commits (abbreviated hash, author, author email, author time, subject) ''' ret = ServiceResult() if branch is not None and branch != "master": raise NotImplementedError("Handling different branch than master is not implement") dirname = self._get_clone_dir_name(repo_url) dst_path = self.dircache.get_location(dirname) with self.get_lock(dirname): if self.dircache.is_available(dirname): repo_type = self._scm_pull(dst_path) self.dircache.mark_used(dirname) else: repo_type = self._scm_clone(repo_url, dst_path) self.dircache.register(dirname) if repo_type == REPO_TYPE_GIT: ret.result = GitCmd.git_log_repo(dst_path, max_depth, since_date) elif repo_type == REPO_TYPE_MERCURIAL: ret.result = self._hg_log(dst_path, max_depth, since_date) else: raise ValueError("Internal Error: Unhandled repo type") return ret
def api_analysis(self, file_id, opts=None): ''' Get API of a file @param file_id: file to be analysed @param opts: additional analysis options @return: list of exported API ''' default_opts = {'language': 'detect', 'tool': 'default'} ret = ServiceResult() if opts is None: opts = default_opts else: default_opts.update(opts) opts = default_opts self.tmpfile_path = self.get_tmp_filename() with self.get_system() as system: f = system.download(file_id, self.tmpfile_path) if not isinstance(f, RpmFile) and not isinstance(f, SrpmFile) and not isinstance(f, TarballFile): raise ValueError("Unable to process filetype %s" % (f.get_type(),)) self.extracted1_path = self.get_tmp_dirname() d = f.unpack(self.extracted1_path) if isinstance(d, ExtractedRpmFile): src_path = d.get_content_path() elif isinstance(d, ExtractedTarballFile): src_path = d.get_path() elif isinstance(d, ExtractedSrpmFile): # we have to unpack tarball first t = d.get_tarball() self.extracted2_path = self.get_tmp_dirname() d = f.unpack(self.extracted2_path) src_path = d.get_path() else: raise ValueError("Filetype %s cannot be processed" % (d.get_type(),)) # TODO: handle detect if (opts['language'] == 'golang' and opts['tool'] == 'gofedlib') or opts['language'] == 'detect': try: ret.result = gofedlib.api(src_path) except: exc_info = sys.exc_info() ret.meta['error'] = [ str(exc_info[0]), str(exc_info[1]), str(exc_info[2])] finally: ret.meta['language'] = 'golang' ret.meta['tool'] = 'gofedlib' else: raise NotImplementedError() return ret
def golang_package2upstream(self, package_name): ''' Convert a package name packaged in Fedora to upstream URL @param upstream_url: URL of a project @return: URL of package upstream ''' ret = ServiceResult() with self.get_lock(self.mappings_json): t = GoTranslator(self.mappings_json) ret.result = t.pkgname2upstream(package_name) return ret
def golang_upstream2package(self, upstream_url): ''' Convert an upstream URL to a package name packaged in Fedora @param upstream_url: URL of a project @return: package name in Fedora ''' ret = ServiceResult() with self.get_lock(self.mappings_json): t = GoTranslator(self.mappings_json) ret.result = t.upstream2pkgname(upstream_url) return ret
def api_diff(self, api1, api2): ''' Make a diff of APIs @param api1: the first API @param api2: the second API @return: list of API differences ''' ret = ServiceResult() ret.result = apidiff(api1, api2) ret.meta = {'language': 'golang', 'tool': 'gofedlib'} return ret
def scm_store(self, repo_url, commit=None, branch=None): ''' Store a SCM repo @param repo_url: repo URL @param commit: commit hash; if None, the latest is used @param branch: branch; if None, "master" is used @return: ''' ret = ServiceResult() if not branch: branch = "master" if commit: commit = commit[:7] dirname = self._get_dirname(repo_url, commit, branch) filename = self._get_filename(dirname) dst_path = self.dircache.get_location(dirname) with self.get_lock(dirname): if not self.dircache.is_available(filename): repo = GitCmd.git_clone_repo(repo_url, dst_path) repo.git_checkout(branch) if commit: repo.git_checkout(commit) else: commit = repo.git_rev_parse_head(dst_path)[:7] # if user did not supplied commit, we have to check it explicitly filename_old = filename filename = self._get_filename( self._get_dirname(repo_url, commit, branch)) # we have to move it so it will be available with specified commit and branch if filename_old != filename: shutil.move(filename_old, filename) if not self.dircache.is_available(filename): # if user did not supplied commit, we have to pack the repo self._pack_repo(dirname, filename) shutil.rmtree(dst_path) if not self.dircache.is_available(filename): self.dircache.register(filename) ret.result = FileId.construct(self, self.dircache.get_file_path(filename)) ret.meta = {'origin': repo_url} return ret
def spec_buildrequires(self, file_id): ''' Get all buildrequires for a package @param specfile_id: a file id of a specfile/src.rpm stored in the system @return: list of buildrequires per package ''' ret = ServiceResult() input_path = self._prepare_file(file_id) output = self._specker_call(SpecFileRenderer.buildrequires_show, input_path) ret.result = self._parse_specker_output(output) ret.meta = {'tool': 'specker'} return ret
def scm_store(self, repo_url, commit=None, branch=None): ''' Store a SCM repo @param repo_url: repo URL @param commit: commit hash; if None, the latest is used @param branch: branch; if None, "master" is used @return: ''' ret = ServiceResult() if not branch: branch = "master" if commit: commit = commit[:7] dirname = self._get_dirname(repo_url, commit, branch) filename = self._get_filename(dirname) dst_path = self.dircache.get_location(dirname) with self.get_lock(dirname): if not self.dircache.is_available(filename): repo = GitCmd.git_clone_repo(repo_url, dst_path) repo.git_checkout(branch) if commit: repo.git_checkout(commit) else: commit = repo.git_rev_parse_head(dst_path)[:7] # if user did not supplied commit, we have to check it explicitly filename_old = filename filename = self._get_filename(self._get_dirname(repo_url, commit, branch)) # we have to move it so it will be available with specified commit and branch if filename_old != filename: shutil.move(filename_old, filename) if not self.dircache.is_available(filename): # if user did not supplied commit, we have to pack the repo self._pack_repo(dirname, filename) shutil.rmtree(dst_path) if not self.dircache.is_available(filename): self.dircache.register(filename) ret.result = FileId.construct(self, self.dircache.get_file_path(filename)) ret.meta = {'origin': repo_url} return ret
def deps_package_listing(self): ''' Listing of all available packages with analyzed dependencies @return: list of all available packages ''' ret = ServiceResult() ret.result = [] filtering = {'version': 0, '_id': 0, 'deps': 0, 'meta': 0} cursor = self.deps_package.find({}, filtering) for item in cursor: if item['package'] not in ret: ret.result.append(item['package']) return ret
def deps_project_listing(self): ''' Listing of all available projects with analyzed dependencies @return: list of all available projects with analyzed dependencies ''' ret = ServiceResult() ret.result = [] filtering = {'commit': 0, '_id': 0, 'deps': 0, 'meta': 0, 'commit-date': 0} cursor = self.deps_project.find({}, filtering) for item in cursor: if item['project'] not in ret.result: ret.result.append(item['project']) return ret
def deps_analysis(self, file_id, opts=None): ''' Get deps of a file @param file_id: file to be analysed @param opts: additional analysis opts @return: list of dependencies ''' ret = ServiceResult() default_opts = {'language': 'detect', 'tool': 'default'} if opts is None: opts = default_opts else: default_opts.update(opts) opts = default_opts self.tmpfile_path = self.get_tmp_filename() with self.get_system() as system: f = system.download(file_id, self.tmpfile_path) self.extracted1_path = self.get_tmp_dirname() d = f.unpack(self.extracted1_path) if isinstance(d, ExtractedRpmFile): src_path = d.get_content_path() elif isinstance(d, ExtractedTarballFile): src_path = d.get_path() elif isinstance(d, ExtractedSrpmFile): # we have to unpack tarball first t = d.get_tarball() self.extracted2_path = self.get_tmp_dirname() d = f.unpack(self.extracted2_path) src_path = d.get_path() else: raise ValueError("Filetype %s cannot be processed" % (d.get_type(),)) # TODO: handle opts try: ret.result = gofedlib.project_packages(src_path) except: exc_info = sys.exc_info() ret.meta['error'] = [ str(exc_info[0]), str(exc_info[1]), str(exc_info[2])] finally: ret.meta['language'] = 'golang' ret.meta['tool'] = 'gofedlib' return ret
def deps_project_commit_listing(self, project): ''' Get all available commits of a project with analyzed dependencies @param project: project name @return: list of all available commits with analyzed dependencies ''' ret = ServiceResult() ret.result = [] filtering = {'_id': 0, 'deps': 0, 'project': 0, 'meta': 0, 'commit-date': 0} cursor = self.deps_project.find({'project': project}, filtering) for item in cursor: if item['commit'] not in ret.result: ret.result.append(item['commit']) return ret
def deps_project(self, project, commit): ''' Dependencies of the given project in specified commit @param project: project name @param commit: commit hash @return: list of deps of the project with analysis metadata ''' ret = ServiceResult() ret.result = [] filtering = {'commit': 0, '_id': 0, 'project': 0, 'commit-date': 0} cursor = self.deps_project.find({'project': project, 'commit': commit}, filtering) for item in cursor: ret.result.append({'deps': item['deps'], 'meta': item['meta'], 'commit-date': item['commit-date']}) return ret
def api_package(self, package, version): ''' API of the given project in specified commit @param package: package name @param version: package version @return: list of APIs of the project with analysis metadata ''' ret = ServiceResult() ret.result = [] filtering = {'version': 0, '_id': 0, 'package': 0} cursor = self.api_package.find({'package': package, 'version': version}, filtering) for item in cursor: ret.result.append({'api': item['api'], 'meta': item['meta']}) return ret
def deps_package_distro_listing(self, package, distro): ''' Get all available versions of a package within distro @param package: package name @param distro: distribution @return: list of all available versions in distribution ''' ret = ServiceResult() ret.result = [] filtering = {'_id': 0, 'deps': 0, 'package': 0, 'meta': 0} cursor = self.deps_package.find({'package': package, 'distro': distro}, filtering) for item in cursor: if item['distro'] not in ret: ret.result.append(item['distro']) return ret
def deps_package(self, package, version, distro): ''' Dependencies of the given project in specified commit @param package: package name @param version: package version @param distro: distribution @return: list of dependendencies of package with analysis metadata ''' ret = ServiceResult() ret.result = [] filtering = {'version': 0, '_id': 0, 'package': 0} cursor = self.deps_package.find({'package': package, 'version': version, 'distro': distro}, filtering) for item in cursor: ret.result.append({'deps': item['deps'], 'meta': item['meta']}) return ret
def api_package_distro_listing(self, package, distro): ''' Get all available commits of a project @param package: package name @param distro: distribution @return: list of all available versions based on distro ''' ret = ServiceResult() ret.result = [] filtering = {'_id': 0, 'api': 0, 'package': 0, 'meta': 0} cursor = self.api_package.find({'package': package, 'distro': distro}, filtering) for item in cursor: if item['distro'] not in ret.result: ret.result.append(item['distro']) return ret
def goland_package_listing(self): ''' List of all available golang packages packaged in fedora @return: packages packaged in fedora ''' ret = ServiceResult() def data_cached(): return self.packages['packages'] is not None and (time() - self.packages['updated'] < self.update_interval) if not data_cached(): with self.get_lock(self._fedora_pkgdb_packages_list): if not data_cached(): self.packages['packages'] = self._fedora_pkgdb_packages_list() self.packages['updated'] = time() ret.result = self.packages['packages'] return ret
def goland_package_listing(self): ''' List of all available golang packages packaged in fedora @return: packages packaged in fedora ''' ret = ServiceResult() def data_cached(): return self.packages['packages'] is not None and ( time() - self.packages['updated'] < self.update_interval) if not data_cached(): with self.get_lock(self._fedora_pkgdb_packages_list): if not data_cached(): self.packages[ 'packages'] = self._fedora_pkgdb_packages_list() self.packages['updated'] = time() ret.result = self.packages['packages'] return ret
def deps_package_version_listing(self, package): ''' Get all available versions of a package @param package: package name @return: list of all available versions based on distro ''' ret = ServiceResult() ret.result = {} filtering = {'_id': 0, 'deps': 0, 'package': 0, 'meta': 0} cursor = self.deps_package.find({'package': package}, filtering) for item in cursor: if item['distro'] not in ret: ret[item['distro']] = [] if item['version'] not in ret[item['distro']]: ret.result[item['distro']].append(item['version']) return ret
def api_package(self, package, version): ''' API of the given project in specified commit @param package: package name @param version: package version @return: list of APIs of the project with analysis metadata ''' ret = ServiceResult() ret.result = [] filtering = {'version': 0, '_id': 0, 'package': 0} cursor = self.api_package.find({ 'package': package, 'version': version }, filtering) for item in cursor: ret.result.append({'api': item['api'], 'meta': item['meta']}) return ret
def _common_get(self, url, filename): ret = ServiceResult() with self.get_lock(filename): if self.dircache.is_available(filename): ret.result = FileId.construct(self, self.dircache.get_file_path(filename)) elif remote_exists(url): log.debug("Downloading from %s" % (url,)) response = urllib2.urlopen(url) blob = response.read() h = blob_hash(blob) self.dircache.store(blob, filename) ret.result = FileId.construct(self, self.dircache.get_file_path(filename), hash_ = h) else: raise KeyError("Desired file '%s' does not exist ( %s )" % (filename, url)) ret.meta['origin'] = url return ret
def api_package_distro_listing(self, package, distro): ''' Get all available commits of a project @param package: package name @param distro: distribution @return: list of all available versions based on distro ''' ret = ServiceResult() ret.result = [] filtering = {'_id': 0, 'api': 0, 'package': 0, 'meta': 0} cursor = self.api_package.find({ 'package': package, 'distro': distro }, filtering) for item in cursor: if item['distro'] not in ret.result: ret.result.append(item['distro']) return ret
def upload(self, blob): ''' Upload file to the system @param blob: a file content to be store @return: file id ''' res = ServiceResult() log.info("uploading") h = blob_hash(blob) dst = os.path.join(self.upload_dir, h) with self.get_lock(): with open(dst, 'wb') as f: f.write(blob) creation_time = datetime_parse(time.ctime(os.path.getctime(dst))) # TODO: remove, use dircache instead valid_until = creation_time + self.file_lifetime res.result = FileId.construct(self, dst, h) return res
def deps_package_distro_listing(self, package, distro): ''' Get all available versions of a package within distro @param package: package name @param distro: distribution @return: list of all available versions in distribution ''' ret = ServiceResult() ret.result = [] filtering = {'_id': 0, 'deps': 0, 'package': 0, 'meta': 0} cursor = self.deps_package.find({ 'package': package, 'distro': distro }, filtering) for item in cursor: if item['distro'] not in ret: ret.result.append(item['distro']) return ret
def deps_store_project(self, project, commit, commit_date, deps, meta): ''' Store API of a project @param project: project name @param commit: commit @param commit_date: commit date @param deps: project deps @param meta: metadata from analysis ''' ret = ServiceResult() item = { 'project': project, 'commit': commit, 'commit-date': commit_date, 'deps': deps, 'meta': meta } self.deps_project.insert(item) ret.result = True return ret
def deps_project_listing(self): ''' Listing of all available projects with analyzed dependencies @return: list of all available projects with analyzed dependencies ''' ret = ServiceResult() ret.result = [] filtering = { 'commit': 0, '_id': 0, 'deps': 0, 'meta': 0, 'commit-date': 0 } cursor = self.deps_project.find({}, filtering) for item in cursor: if item['project'] not in ret.result: ret.result.append(item['project']) return ret
def deps_store_package(self, package, version, distro, deps, meta): ''' Store API of a package @param package: package name to store @param version: version of package @param distro: distribution @param deps: package dependencies @param meta: metadata from analysis ''' ret = ServiceResult() item = { 'package': package, 'version': version, 'distro': distro, 'deps': deps, 'meta': meta } self.deps_package.insert(item) ret.result = True return ret
def spec_get(self, package_name, branch=None, commit=None): """ Get specfile of a package packaged in Fedora @param package_name: package name @param branch: branch (e.g. "f23", ...); if omitted "rawhide" is used @param commit: fedpkg git commit; if omitted, the latest commit is used @return: specfile file id """ ret = ServiceResult() # prevent from accessing suspicious files package_name = os.path.basename(package_name) if branch is None or branch == "rawhide": branch = "master" # we have to ensure that such package/branch/commit exist with self.get_lock(package_name): path = self._git_tree_prepare(package_name, branch, commit) ident = "%s/%s/%s/%s.spec" % (package_name, branch, self._commit2ident(commit), package_name) ret.result = FileId.construct(self, ident, path=os.path.join(path, "%s.spec" % package_name)) return ret
def api_store_project(self, project, commit, commit_date, api, meta): ''' Store API of a project @param project: project name @param commit: commit @param commit_date: commit date @param api: exported API @param meta: metadata from analysis ''' ret = ServiceResult() item = { 'project': project, 'commit': commit, 'commit-date': commit_date, 'api': api, 'meta': meta } self.api_project.insert(item) ret.result = True return ret
def deps_project_commit_listing(self, project): ''' Get all available commits of a project with analyzed dependencies @param project: project name @return: list of all available commits with analyzed dependencies ''' ret = ServiceResult() ret.result = [] filtering = { '_id': 0, 'deps': 0, 'project': 0, 'meta': 0, 'commit-date': 0 } cursor = self.deps_project.find({'project': project}, filtering) for item in cursor: if item['commit'] not in ret.result: ret.result.append(item['commit']) return ret
def deps_package(self, package, version, distro): ''' Dependencies of the given project in specified commit @param package: package name @param version: package version @param distro: distribution @return: list of dependendencies of package with analysis metadata ''' ret = ServiceResult() ret.result = [] filtering = {'version': 0, '_id': 0, 'package': 0} cursor = self.deps_package.find( { 'package': package, 'version': version, 'distro': distro }, filtering) for item in cursor: ret.result.append({'deps': item['deps'], 'meta': item['meta']}) return ret
def license_analysis(self, file_id): ''' Analyse a file for licenses @param file_id: a file id of a file that needs to be analysed @return: list of all licenses found ''' ret = ServiceResult() self.tmpfile_path = self.get_tmp_filename() with self.get_system() as system: f = system.download(file_id, self.tmpfile_path) self.extracted1_path = self.get_tmp_dirname() d = f.unpack(self.extracted1_path) if isinstance(d, ExtractedRpmFile): src_path = d.get_content_path() elif isinstance(d, ExtractedTarballFile): src_path = d.get_path() elif isinstance(d, ExtractedSrpmFile): # we have to unpack tarball first t = d.get_tarball() self.extracted2_path = self.get_tmp_dirname() d = f.unpack(self.extracted2_path) src_path = d.get_path() else: raise ValueError("Filetype %s cannot be processed" % (d.get_type(), )) stdout, stderr, _ = runcmd( ["licenselib/cucos_license_check.py", src_path]) ret.result = json.loads(stdout) ret.meta['stderr'] = stderr ret.meta['tool'] = "cucos_license_check" return ret
def spec_patch_get(self, package_name, patch_name, branch=None, commit=None): ''' Get file id of a downstream patch of a package packaged in Fedora @param package_name: package name @param patch_name: name of the patch file @param branch: branch (e.g. "f23", ...); if omitted, "rawhide" is used @param commit: fedpkg git commit; if omitted, the latest commit is used @return: file id of the patch ''' ret = ServiceResult() ret.result = [] # prevent from accessing suspicious files package_name = os.path.basename(package_name) patch_name = os.path.basename(patch_name) if branch is None or branch == "rawhide": branch = "master" with self.get_lock(package_name): path = self._git_tree_prepare(package_name, branch, commit) patch_path = os.path.join(path, patch_name) if not os.path.isfile(patch_path): raise ValueError( "There is not patch %s for package %s, branch %s and commit %s" % (patch_name, package_name, branch, commit)) ident = "%s/%s/%s/%s" % (package_name, branch, self._commit2ident(commit), patch_name), ret.result = FileId.construct(self, ident, path=patch_path) return ret
def deps_project(self, project, commit): ''' Dependencies of the given project in specified commit @param project: project name @param commit: commit hash @return: list of deps of the project with analysis metadata ''' ret = ServiceResult() ret.result = [] filtering = {'commit': 0, '_id': 0, 'project': 0, 'commit-date': 0} cursor = self.deps_project.find({ 'project': project, 'commit': commit }, filtering) for item in cursor: ret.result.append({ 'deps': item['deps'], 'meta': item['meta'], 'commit-date': item['commit-date'] }) return ret
def api_store_package(self, package, version, release, distro, api, meta): ''' Store API of a package @param package: package name to store @param version: version of package @param release: a package release @param distro: distribution @param api: exported api @param meta: metadata from analysis ''' ret = ServiceResult() item = { 'package': package, 'version': version, 'release': release, 'distro': distro, 'api': api, 'meta': meta } self.api_package.insert(item) ret.result = True return ret
def deps_analysis(self, file_id, opts=None): ''' Get deps of a file @param file_id: file to be analysed @param opts: additional analysis opts @return: list of dependencies ''' ret = ServiceResult() default_opts = {'language': 'detect', 'tool': 'default'} if opts is None: opts = default_opts else: default_opts.update(opts) opts = default_opts self.tmpfile_path = self.get_tmp_filename() with self.get_system() as system: f = system.download(file_id, self.tmpfile_path) self.extracted1_path = self.get_tmp_dirname() d = f.unpack(self.extracted1_path) if isinstance(d, ExtractedRpmFile): src_path = d.get_content_path() elif isinstance(d, ExtractedTarballFile): src_path = d.get_path() elif isinstance(d, ExtractedSrpmFile): # we have to unpack tarball first t = d.get_tarball() self.extracted2_path = self.get_tmp_dirname() d = f.unpack(self.extracted2_path) src_path = d.get_path() else: raise ValueError("Filetype %s cannot be processed" % (d.get_type(), )) # TODO: handle opts try: ret.result = gofedlib.project_packages(src_path) except: exc_info = sys.exc_info() ret.meta['error'] = [ str(exc_info[0]), str(exc_info[1]), str(exc_info[2]) ] finally: ret.meta['language'] = 'golang' ret.meta['tool'] = 'gofedlib' return ret
def deps_diff(self, deps1, deps2, opts=None): ''' Make a diff of dependencies @param deps1: the first dependency list @param deps2: the second dependency list @param opts: additional analysis opts @return: list of dependency differences ''' default_opts = {'language': 'detect', 'tool': 'default'} ret = ServiceResult() if opts is None: opts = default_opts else: default_opts.update(opts) opts = default_opts # TODO: implement deps difference raise NotImplementedError("Currently not implemented") return ret
def api_analysis(self, file_id, opts=None): ''' Get API of a file @param file_id: file to be analysed @param opts: additional analysis options @return: list of exported API ''' default_opts = {'language': 'detect', 'tool': 'default'} ret = ServiceResult() if opts is None: opts = default_opts else: default_opts.update(opts) opts = default_opts self.tmpfile_path = self.get_tmp_filename() with self.get_system() as system: f = system.download(file_id, self.tmpfile_path) if not isinstance(f, RpmFile) and not isinstance( f, SrpmFile) and not isinstance(f, TarballFile): raise ValueError("Unable to process filetype %s" % (f.get_type(), )) self.extracted1_path = self.get_tmp_dirname() d = f.unpack(self.extracted1_path) if isinstance(d, ExtractedRpmFile): src_path = d.get_content_path() elif isinstance(d, ExtractedTarballFile): src_path = d.get_path() elif isinstance(d, ExtractedSrpmFile): # we have to unpack tarball first t = d.get_tarball() self.extracted2_path = self.get_tmp_dirname() d = f.unpack(self.extracted2_path) src_path = d.get_path() else: raise ValueError("Filetype %s cannot be processed" % (d.get_type(), )) # TODO: handle detect if (opts['language'] == 'golang' and opts['tool'] == 'gofedlib') or opts['language'] == 'detect': try: ret.result = gofedlib.api(src_path) except: exc_info = sys.exc_info() ret.meta['error'] = [ str(exc_info[0]), str(exc_info[1]), str(exc_info[2]) ] finally: ret.meta['language'] = 'golang' ret.meta['tool'] = 'gofedlib' else: raise NotImplementedError() return ret