def download(self, pkg_name, pkg_version=None, outdir=None, binary=False, with_dep=False): pip_cmd = 'pip2' if self._get_py_version( pkg_name=pkg_name, pkg_version=pkg_version) == 'python2' else 'pip3' if pkg_version: download_cmd = [ pip_cmd, 'download', '%s==%s' % (pkg_name, pkg_version) ] else: download_cmd = [pip_cmd, 'download', pkg_name] if not binary: download_cmd += ['--no-binary', ':all:'] if not with_dep: download_cmd += ['--no-deps'] exec_command('pip download', download_cmd, cwd=outdir) download_path = join( outdir, self._get_pkg_fname(pkg_name=pkg_name, pkg_version=pkg_version)) if pkg_version is None: download_paths = glob.glob(download_path) if len(download_paths) == 1: return download_paths[0] else: if exists(download_path): return download_path logging.error("failed to download pkg %s ver %s", pkg_name, pkg_version) return None
def uninstall(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, sudo=False, install_dir=None, outdir=None): if pkg_version: uninstall_cmd = [ 'npm', 'uninstall', '%s@%s' % (pkg_name, pkg_version) ] else: uninstall_cmd = ['npm', 'uninstall', pkg_name] if sudo: uninstall_cmd = ['sudo'] + uninstall_cmd + ['-g'] uninstall_cmd = self.decorate_strace( pkg_name=pkg_name, pkg_version=pkg_version, trace=trace, trace_string_size=trace_string_size, sudo=sudo, outdir=outdir, command=uninstall_cmd) exec_command('npm uninstall', uninstall_cmd, cwd=install_dir)
def install(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, install_dir=None, outdir=None, sudo=False): # mvn dependency:get -Dartifact=com.google.protobuf:protobuf-java:3.5.1 install_cmd = [ 'mvn', 'dependency:get', '-Dartifact=%s:%s' % (pkg_name.replace('/', ':'), self._get_sanitized_version(pkg_name=pkg_name, pkg_version=pkg_version)) ] install_cmd = self.decorate_strace(pkg_name=pkg_name, pkg_version=pkg_version, trace=trace, trace_string_size=trace_string_size, sudo=sudo, outdir=outdir, command=install_cmd) # ~/.m2/repository/com/google/protobuf/protobuf-java/3.5.1/protobuf-java-3.5.1.jar exec_command('mvn install', install_cmd)
def install(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, install_dir=None, outdir=None, sudo=False): install_cmd = ['gem', 'install', pkg_name] if pkg_version: install_cmd += ['-v', pkg_version] if install_dir: # NOTE: --install-dir and --user-install are conflicting options, and cannot be both specified install_cmd += ['--install-dir', install_dir] if sudo: install_cmd = ['sudo'] + install_cmd else: if sudo: install_cmd = ['sudo'] + install_cmd else: install_cmd += ['--user-install'] install_cmd = self.decorate_strace(pkg_name=pkg_name, pkg_version=pkg_version, trace=trace, trace_string_size=trace_string_size, sudo=sudo, outdir=outdir, command=install_cmd) exec_command('gem install', install_cmd)
def download(self, pkg_name, pkg_version=None, outdir=None, binary=False, with_dep=False): if pkg_version: download_cmd = ['npm', 'pack', '%s@%s' % (pkg_name, pkg_version)] else: download_cmd = ['npm', 'pack', pkg_name] # Node.js tool for easy binary deployment of C++ addons # https://github.com/mapbox/node-pre-gyp/ if binary: logging.warning("support for binary downloading is not added yet!") # npm pack with dependencies # https://github.com/npm/npm/issues/4210 if with_dep: logging.warning( "support for packing dependencies is not added yet!") exec_command('npm pack', download_cmd, cwd=outdir) download_path = join( outdir, self._get_pkg_fname(pkg_name=pkg_name, pkg_version=pkg_version)) if pkg_version is None: download_paths = glob.glob(download_path) if len(download_paths) == 1: return download_paths[0] else: if exists(download_path): return download_path logging.error("failed to download pkg %s ver %s", pkg_name, pkg_version) return None
def exercise(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, sudo=False, install_dir=None, outdir=None, timeout=None): # assume that the package is installed, and get the root directory for the installed package. npm_root = self._get_npm_root(sudo=sudo, install_dir=install_dir) exercise_cmd = ['node', 'exercise.js', pkg_name] # require(pkg_name) and trigger the events or initialize its global classes or objects exercise_src_location = 'pm_proxy/scripts/exercise.js' exercise_tgt_location = join(npm_root, 'exercise.js') if sudo: # FIXME: /usr/bin/node_modules is generated by sudo user and requires sudo privilege to write to it copyfile_cmd = [ 'sudo', 'cp', exercise_src_location, exercise_tgt_location ] exec_command('copy exercise.js', copyfile_cmd) else: shutil.copyfile(exercise_src_location, exercise_tgt_location) # parse the dependencies and install them dep_names = json.load(open('pm_proxy/scripts/package.json', 'r'))['dependencies'] for dep_name in dep_names: self.install(pkg_name=dep_name, install_dir=npm_root, sudo=sudo) exec_command('node exercise.js', exercise_cmd, cwd=npm_root, timeout=timeout)
def install(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, install_dir=None, outdir=None, sudo=False): # use composer require or composer install # printf '{\n\t"require": {\n\t\t"%s":"*.*"\n\t}\n}' $1 >> composer.json && php /usr/local/bin/composer install # how require command is implemented internally # https://github.com/composer/composer/blob/master/src/Composer/Command/RequireCommand.php # there is an extension that supports installing or writing to arbitrary path # https://github.com/composer/installers#current-supported-package-types self._install_init(install_dir=install_dir, sudo=sudo) if sudo: # You can use this to install CLI utilities globally, all you need is to add the COMPOSER_HOME/vendor/bin # dir to your PATH env var. # https://github.com/consolidation/cgr/issues/2 install_cmd = ['sudo', 'composer', 'global', 'require', pkg_name] else: install_cmd = ['composer', 'require', pkg_name] if pkg_version: install_cmd += [pkg_version] install_cmd = self.decorate_strace(pkg_name=pkg_name, pkg_version=pkg_version, trace=trace, trace_string_size=trace_string_size, sudo=sudo, outdir=outdir, command=install_cmd) exec_command('composer require', install_cmd, cwd=install_dir)
def main(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, sudo=False, install_dir=None, outdir=None, timeout=None): # assume that the package is installed and check for folder bin, and executables there main_cmd = [ 'python', 'main.py', pkg_name, '-m', 'packagist', '-r', self._get_composer_root(sudo=sudo, install_dir=install_dir) ] # get the binaries to run pkg_info = self.get_metadata(pkg_name=pkg_name, pkg_version=pkg_version) versions = pkg_info['package']['versions'] binaries = self._get_version_info(versions=versions, pkg_version=pkg_version)['bin'] for binary in binaries: main_cmd += ['-b', binary] exec_command('python main.py', main_cmd, cwd="pm_proxy/scripts", timeout=timeout)
def install_dep(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, sudo=False, install_dir=None, outdir=None): # install the package, get its dependency, and then install the dependencies # https://stackoverflow.com/questions/23213849/what-is-the-syntax-for-gem-install-multiple-gems-specifying-versions-for-each dep_pkgs = self.get_dep(pkg_name=pkg_name, pkg_version=pkg_version) # NOTE: name:version doesn't work for all gem commands, particularly gem uninstall doesn't support this dep_pkgs_args = [ '%s:%s' % (dep_name, dep_version) for dep_name, dep_version in dep_pkgs.items() ] install_dep_cmd = ['gem', 'install' ] + dep_pkgs_args + ['--ignore-dependencies'] if sudo: install_dep_cmd = ['sudo'] + install_dep_cmd else: install_dep_cmd = install_dep_cmd + ['--user-install'] install_dep_cmd = self.decorate_strace( pkg_name=pkg_name, pkg_version=pkg_version, trace=trace, trace_string_size=trace_string_size, sudo=sudo, outdir=outdir, command=install_dep_cmd, is_dep=True) exec_command('gem install dependency', install_dep_cmd, cwd=install_dir)
def install_dep(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, sudo=False, install_dir=None, outdir=None): # install the package, get its dependency, and then install the dependencies dep_file = self.get_dep_file(pkg_name=pkg_name, pkg_version=pkg_version) pip_cmd = 'pip2' if self._get_py_version( pkg_name=pkg_name, pkg_version=pkg_version) == 'python2' else 'pip3' install_dep_cmd = [pip_cmd, 'install', '-r', dep_file] if sudo: install_dep_cmd = ['sudo'] + install_dep_cmd else: install_dep_cmd += ['--user'] install_dep_cmd = self.decorate_strace( pkg_name=pkg_name, pkg_version=pkg_version, trace=trace, trace_string_size=trace_string_size, sudo=sudo, outdir=outdir, command=install_dep_cmd, is_dep=True) exec_command('pip install dependency', install_dep_cmd)
def uninstall(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, sudo=False, install_dir=None, outdir=None): pip_cmd = 'pip2' if self._get_py_version( pkg_name=pkg_name, pkg_version=pkg_version) == 'python2' else 'pip3' if pkg_version: uninstall_cmd = [ pip_cmd, 'uninstall', '%s==%s' % (pkg_name, pkg_version) ] else: uninstall_cmd = [pip_cmd, 'uninstall', pkg_name] if sudo: uninstall_cmd = ['sudo'] + uninstall_cmd else: uninstall_cmd += ['--user'] uninstall_cmd = self.decorate_strace( pkg_name=pkg_name, pkg_version=pkg_version, trace=trace, trace_string_size=trace_string_size, sudo=sudo, outdir=outdir, command=uninstall_cmd) exec_command('pip uninstall', uninstall_cmd)
def download(self, pkg_name, pkg_version=None, outdir=None, binary=False, with_dep=False): # download using gem fetch logging.warning("consider platform to download %s ver %s", pkg_name, pkg_version) download_cmd = ['gem', 'fetch', pkg_name] if pkg_version: download_cmd += ['-v', pkg_version] if binary: logging.warning("support for binary downloading is not added yet!") # ruby download with dependencies # https://gist.github.com/Milly/909564 if with_dep: logging.warning( "support for downloading dependencies is not added yet!") exec_command('gem fetch', download_cmd, cwd=outdir) download_path = join( outdir, self._get_pkg_fname(pkg_name=pkg_name, pkg_version=pkg_version)) download_paths = glob.glob(download_path) if len(download_paths) == 1: return download_paths[0] logging.error("failed to download pkg %s ver %s using gem fetch", pkg_name, pkg_version) # download using wget, ignore the platform logging.warning( "fallback to ignore platform to download pkg %s ver %s") # ignore platform # https://rubygems.org/api/v1/gems/json-jruby.json # https://rubygems.org/gems/json-jruby-1.5.0-java.gem pkg_info = self.get_metadata(pkg_name=pkg_name, pkg_version=pkg_version) if pkg_info is None: return None gem_uri = pkg_info['gem_uri'] if gem_uri: # FIXME: use wget, rather than curl, to follow redirects download_fname = gem_uri.rsplit('/', 1)[-1] download_cmd = ['wget', gem_uri, '-O', download_fname] if binary: logging.warning( "support for binary downloading is not added yet!") if with_dep: logging.warning( "support for downloading dependencies is not added yet!") exec_command('gem fetch (wget)', download_cmd, cwd=outdir) download_path = join(outdir, download_fname) if exists(download_path): return download_path logging.error("failed to download pkg %s ver %s", pkg_name, pkg_version) return None
def astgen(self, inpath, outfile, root=None, configpath=None, pkg_name=None, pkg_version=None, evaluate_smt=False): analyze_path, is_decompress_path, outfile, root, configpath = self._sanitize_astgen_args( inpath=inpath, outfile=outfile, root=root, configpath=configpath, language=self.language) astgen_cmd = ['java', '-jar', 'target/astgen-java-1.0.0-jar-with-dependencies.jar', '-inpath', analyze_path, '-outfile', outfile, '-config', configpath] if isdir(analyze_path): raise Exception("Soot doesn't take a directory as input: %s", analyze_path) if analyze_path.endswith((".apk", ".dex")): # processing android apps requires android.jar astgen_cmd.extend(['-android_jar_dir', 'platforms/']) if analyze_path.endswith(".apk"): astgen_cmd.extend(['-intype', 'APK' '-process_dir', analyze_path]) elif analyze_path.endswith(".dex"): astgen_cmd.extend(['-intype', 'DEX', '-process_dir', analyze_path]) elif analyze_path.endswith((".java",)): astgen_cmd.extend(['-intype', 'SOURCE', '-process_dir', dirname(analyze_path)]) elif analyze_path.endswith((".class",)): astgen_cmd.extend(['-intype', 'CLASS', '-process_dir', dirname(analyze_path)]) elif analyze_path.endswith((".jar",)): # this is the default input type astgen_cmd.extend(['-intype', 'JAR', '-process_dir', analyze_path]) elif analyze_path.endswith((".aar",)): # aar contains /classes.jar # https://developer.android.com/studio/projects/android-library astgen_cmd.extend(['-android_jar_dir', 'platforms/']) aar_file = get_file_with_meta(analyze_path) class_jar_content = aar_file.accessor.read('classes.jar') analyze_path_jar = join(dirname(analyze_path), splitext(basename(analyze_path))[0] + '.jar') open(analyze_path_jar, 'wb').write(class_jar_content) astgen_cmd.extend(['-intype', 'JAR', '-process_dir', analyze_path_jar]) elif analyze_path.endswith((".war",)): # war contains lots of jar files in /WEB-INF/lib/ # http://one-jar.sourceforge.net/ logging.error("Not handling .war file yet: %s", analyze_path) else: logging.error("Input path has unexpected suffix: %s", analyze_path) # root is not used here if pkg_name is not None: astgen_cmd.extend(['-package_name', pkg_name]) if pkg_version is not None: astgen_cmd.extend(['-package_version', pkg_version]) exec_command("java astgen", astgen_cmd, cwd="static_proxy/astgen-java") # optionally evaluate smt formula if evaluate_smt: resultpb = PkgAstResults() read_proto_from_file(resultpb, filename=outfile, binary=False) satisfied = self._check_smt(astgen_results=[resultpb], configpath=configpath) resultpb.pkgs[0].config.smt_satisfied = satisfied write_proto_to_file(resultpb, filename=outfile, binary=False) # clean up residues self._cleanup_astgen(analyze_path=analyze_path, is_decompress_path=is_decompress_path)
def _install_init(self, install_dir, sudo=False): # configure stability to be dev if sudo: init_cmd = [ 'sudo', 'composer', 'global', 'init', '--stability', 'dev', '--no-interaction' ] else: init_cmd = [ 'composer', 'init', '--stability', 'dev', '--no-interaction' ] exec_command('composer init', init_cmd, cwd=install_dir)
def astgen(self, inpath, outfile, root=None, configpath=None, pkg_name=None, pkg_version=None, evaluate_smt=False): analyze_path, is_decompress_path, outfile, root, configpath = self._sanitize_astgen_args( inpath=inpath, outfile=outfile, root=root, configpath=configpath, language=self.language) # ./vendor/nikic/php-parser/bin/php-parse -d ../testdata/test-eval-exec.php configpb = AstLookupConfig() configpath_bin = configpath + '.bin' # create binary config from text format self._pb_text_to_bin(proto=configpb, infile=configpath, outfile=configpath_bin) astgen_cmd = [ 'php', 'astgen.php', '-c', configpath_bin, '-i', analyze_path, '-o', outfile ] if root is not None: astgen_cmd.extend(['-b', root]) if pkg_name is not None: astgen_cmd.extend(['-n', pkg_name]) if pkg_version is not None: astgen_cmd.extend(['-v', pkg_version]) exec_command("php astgen", astgen_cmd, cwd="static_proxy") # convert binary output to text format resultpb = PkgAstResults() read_proto_from_file(resultpb, filename=outfile, binary=True) # optionally evaluate smt formula if evaluate_smt: satisfied = self._check_smt(astgen_results=[resultpb], configpath=configpath) resultpb.pkgs[0].config.smt_satisfied = satisfied # save resultpb write_proto_to_file(resultpb, filename=outfile, binary=False) # clean up residues self._cleanup_astgen(analyze_path=analyze_path, is_decompress_path=is_decompress_path)
def download(self, pkg_name, pkg_version=None, outdir=None, binary=False, with_dep=False): # Download artifact from jitpack # https://coderwall.com/p/qbozzq/download-an-artifact-from-jitpack-io-using-maven # mvn dependency:get -DremoteRepositories=https://jitpack.io -Dartifact=com.github.dubasdey:coinbase-pro-client:0.0.4 -Dtransitive=false -Ddest=/tmp/ # mvn dependency:get -DremoteRepositories=https://jitpack.io -Dartifact=com.google.protobuf:protobuf-java:3.5.1 -Dtransitive=false -Ddest=/tmp/ # FIXME: assumes that pkg_version is always specified if binary: logging.warning("support for binary downloading is not added yet!") if with_dep: logging.warning( "support for packing dependencies is not added yet!") possible_extensions = ('jar', 'aar', 'war') for extension in possible_extensions: # /tmp/protobuf-java-3.5.1.jar if extension != 'jar': download_artifact = '%s:%s:%s' % (pkg_name.replace( '/', ':'), pkg_version, extension) else: download_artifact = '%s:%s' % (pkg_name.replace( '/', ':'), pkg_version) download_cmd = [ 'mvn', 'dependency:get', '-DremoteRepositories=https://jitpack.io', '-Dartifact=%s' % download_artifact, '-Dtransitive=false', '-Ddest=%s' % outdir ] exec_command('mvn dependency:get', download_cmd) # cleanup intermediate folders temp_install_path = expanduser( join( '~/.m2/repository', self._get_pkg_dir(pkg_name=pkg_name, pkg_version=pkg_version))) shutil.rmtree(temp_install_path) # check if download path exists to see if the download is successful or not download_path = join( outdir, self._get_pkg_fname(pkg_name=pkg_name, pkg_version=pkg_version, suffix=extension)) if exists(download_path): return download_path logging.error("failed to download pkg %s ver %s", pkg_name, pkg_version) return None
def _get_npm_root(self, sudo, install_dir): if not sudo and install_dir is None: logging.error( "for npmjs nonsudo, install_dir in main is None, doesn't make sense!" ) return if sudo: npm_root = exec_command('npm root', ['npm', 'root', '-g'], ret_stdout=True).strip() else: npm_root = exec_command('npm root', ['npm', 'root'], cwd=install_dir, ret_stdout=True).strip() return npm_root
def _sanitize_astgen_args(inpath, outfile, root, configpath, language): # get the absolute path inpath = abspath(inpath) outfile = abspath(outfile) if root is not None: root = abspath(root) if configpath is not None: configpath = abspath(configpath) # handle the input path analyze_path = None is_decompress_path = False if not exists(inpath): raise Exception("inpath %s doesn't exist!" % inpath) if isdir(inpath): logging.debug("inpath %s is a directory!", inpath) analyze_path = inpath else: logging.debug( "inpath %s is a file, checking whether it is a compressed file!", inpath) if inpath.endswith(Language2Extensions[language]): logging.debug( "inpath %s is a single file, directly analyze it!", inpath) analyze_path = inpath elif inpath.endswith(".gem"): # Handle gem file using `gem unpack` logging.debug( "inpath %s is a gem file, decompress using gem unpack and analyze it!", inpath) import tempfile analyze_path = tempfile.mkdtemp(prefix='gem-') gem_unpack_cmd = [ 'gem', 'unpack', inpath, '--target', analyze_path ] exec_command("gem unpack", gem_unpack_cmd) is_decompress_path = True elif get_file_with_meta(inpath) is not None: logging.debug( "inpath %s is a compressed file, decompress and analyze it!", inpath) analyze_path = decompress_file(inpath) is_decompress_path = True else: raise Exception( "inpath %s is unhandled type for language %s!" % (inpath, language)) return analyze_path, is_decompress_path, outfile, root, configpath
def main(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, sudo=False, install_dir=None, outdir=None, timeout=None): # run the python scripts created for package main. main_cmd = ['python', 'main.py', pkg_name, '-m', 'rubygems'] exec_command('python main.py', main_cmd, cwd='pm_proxy/scripts', timeout=timeout)
def exercise(self, pkg_name, pkg_version=None, trace=False, trace_string_size=1024, sudo=False, install_dir=None, outdir=None, timeout=None): # run the ruby script created for package exercise. exercise_cmd = ['ruby', 'exercise.rb', pkg_name] exec_command('ruby exercise.rb', exercise_cmd, cwd='pm_proxy/scripts', timeout=timeout)
def download(self, pkg_name, pkg_version=None, outdir=None, binary=False, with_dep=False): # mvn dependency:get -DremoteRepositories=http://jcenter.bintray.com/ -Dartifact=com.google.protobuf:protobuf-java:3.5.1 -Dtransitive=false -Ddest=/tmp/ pkg_version = self._get_sanitized_version(pkg_name=pkg_name, pkg_version=pkg_version) if binary: logging.warning("support for binary downloading is not added yet!") if with_dep: logging.warning( "support for packing dependencies is not added yet!") possible_extensions = ('jar', 'aar', 'war') for extension in possible_extensions: # /tmp/protobuf-java-3.5.1.jar if extension != 'jar': download_artifact = '%s:%s:%s' % (pkg_name.replace( '/', ':'), pkg_version, extension) else: download_artifact = '%s:%s' % (pkg_name.replace( '/', ':'), pkg_version) download_cmd = [ 'mvn', 'dependency:get', '-DremoteRepositories=http://jcenter.bintray.com/', '-Dartifact=%s' % download_artifact, '-Dtransitive=false', '-Ddest=%s' % outdir ] exec_command('mvn dependency:get', download_cmd) # cleanup intermediate folders temp_install_path = expanduser( join( '~/.m2/repository', self._get_pkg_dir(pkg_name=pkg_name, pkg_version=pkg_version))) shutil.rmtree(temp_install_path) # check if download path exists to see if the download is successful or not download_path = join( outdir, self._get_pkg_fname(pkg_name=pkg_name, pkg_version=pkg_version, suffix=extension)) if exists(download_path): return download_path logging.error("failed to download pkg %s ver %s", pkg_name, pkg_version) return None
def get_version_hash(self, pkg_name, pkg_version, algorithm='sha1'): if algorithm not in ('sha1', 'md5'): raise Exception("algorithm %s is not supported!", algorithm) temp_repo_dir = tempfile.mkdtemp(prefix='get_version_hash-') self.download(pkg_name=pkg_name, pkg_version=pkg_version, outdir=temp_repo_dir) possible_extensions = ('jar', 'aar', 'war') version_hash = None for extension in possible_extensions: temp_repo_filepath = join( temp_repo_dir, self._get_pkg_fname(pkg_name=pkg_name, pkg_version=pkg_version, suffix=extension)) if not exists(temp_repo_filepath) or getsize( temp_repo_filepath) == 0: continue hash_command = '%ssum' % algorithm version_hash = exec_command(hash_command, [hash_command, temp_repo_filepath], ret_stdout=True) version_hash = version_hash.split(' ')[0] break shutil.rmtree(temp_repo_dir) if version_hash is None: logging.error( "fail in get_version_hash for pkg %s ver %s, ignoring!", pkg_name, pkg_version) return None return version_hash
def _get_pip_freeze_pkgs(self, pkg_name, pkg_version=None, install_env=None): # NOTE: pkg_version is a placeholder for callbacks and is not used here. dep_pkg_names = self._get_pip_dep_pkgs(pkg_name=pkg_name, pkg_version=pkg_version, install_env=install_env) # run pip freeze to get the dependencies pip_cmd = 'pip2' if self._get_py_version( pkg_name=pkg_name, pkg_version=pkg_version) == 'python2' else 'pip3' freeze_cmd = [pip_cmd, 'freeze'] installed_pkgs_str = exec_command('pip freeze', freeze_cmd, ret_stdout=True, env=install_env) installed_pkgs = [ dep_pkg.split('==') for dep_pkg in filter(bool, installed_pkgs_str.split('\n')) if len(dep_pkg.split('==')) == 2 ] dep_pkgs = { dep_name: dep_version for dep_name, dep_version in installed_pkgs if dep_name.lower() in dep_pkg_names } return dep_pkgs
def _get_gem_list_pkgs(self, pkg_name, pkg_version=None, install_env=None): # NOTE: pkg_version is a placeholder for callbacks and is not used here. dep_pkg_names = self._get_gem_dep_pkgs(pkg_name=pkg_name, pkg_version=pkg_version) # run gem list to get the dependencies list_cmd = ['gem', 'list'] installed_pkgs_str = exec_command('gem list', list_cmd, ret_stdout=True, env=install_env) # e.g. google-protobuf (3.6.1 x86_64-linux) # e.g. couchbase (1.3.15) # e.g. csv (default: 1.0.0) # e.g. parser (2.5.1.2, 2.5.1.0) installed_pkgs = [ (dep_name, dep_version.split(': ')[-1].split(', ')[0].split(' ')[0]) for dep_name, dep_version in [ installed_pkg.strip(')').split(' (') for installed_pkg in filter(bool, installed_pkgs_str.split('\n')) ] ] dep_pkgs = { dep_name: dep_version for dep_name, dep_version in installed_pkgs if dep_name in dep_pkg_names } return dep_pkgs
def get_metadata(self, pkg_name, pkg_version=None): # load cached metadata information pkg_info_dir = self.get_pkg_info_dir(pkg_name=pkg_name) if pkg_info_dir is not None: metadata_fname = self.get_metadata_fname(pkg_name=pkg_name, pkg_version=pkg_version, fmt=self.metadata_format) metadata_file = join(pkg_info_dir, metadata_fname) if exists(metadata_file): logging.warning("get_metadata: using cached metadata_file %s!", metadata_file) if self.metadata_format == 'json': try: pkg_info = json.load(open(metadata_file, 'r')) if (len(pkg_info) == 1 and "error" in pkg_info and pkg_info["error"]["summary"] == "getaddrinfo ENOTFOUND registry.npmjs.us registry.npmjs.us:443" ): logging.error( "previous fetch of metadata failed, regenerating!" ) else: return pkg_info except: logging.debug( "fail to load metadata_file: %s, regenerating!", metadata_file) else: logging.error( "get_metadata: output format %s is not supported!", self.metadata_format) return None # run npm view to get the package info, show/info/v are aliases of view view_cmd = ['npm', 'view', pkg_name, '--json'] try: pkg_info_str = exec_command('npm view', view_cmd, ret_stdout=True) pkg_info = json.loads(pkg_info_str) except: logging.error("fail in get_metadata for pkg %s, ignoring!", pkg_name) return None # optionally cache metadata if pkg_info_dir is not None: if not exists(pkg_info_dir): os.makedirs(pkg_info_dir) metadata_fname = self.get_metadata_fname(pkg_name=pkg_name, pkg_version=pkg_version, fmt=self.metadata_format) metadata_file = join(pkg_info_dir, metadata_fname) if self.metadata_format == 'json': json.dump(pkg_info, open(metadata_file, 'w'), indent=2) else: logging.error( "get_metadata: output format %s is not supported!", self.metadata_format) return pkg_info
def download(self, pkg_name, pkg_version=None, outdir=None, binary=False, with_dep=False): # maybe extract the download link and metadata from json description # metadata from packagist, for the whole package # http://repo.packagist.org/p/google/protobuf%2433e4a753c56e2dfb44962115259d93682cf3d2f8b33e4a7af972bcd8a0513ef2.json # reference id from the package metadata, can be used to construct the download URL. # https://api.github.com/repos/google/protobuf/zipball/48cb18e5c419ddd23d9badcfe4e9df7bde1979b2 pkg_info = self.get_metadata(pkg_name=pkg_name, pkg_version=pkg_version) if not ('package' in pkg_info and 'versions' in pkg_info['package']): logging.error("download: cannot find a download link for %s", pkg_name) return versions = pkg_info['package']['versions'] version_info = self._get_version_info(versions=versions, pkg_version=pkg_version) if not version_info: logging.error("download: cannot get version info for %s ver %s", pkg_name, pkg_version) return dist_link = version_info['dist']['url'] dist_type = version_info['dist']['type'] download_fname = self._get_pkg_fname( pkg_name=self.get_sanitized_pkgname(pkg_name=pkg_name), pkg_version=version_info['version'], suffix=dist_type) download_cmd = ['wget', dist_link, '-O', download_fname] if binary: logging.warning("support for binary downloading is not added yet!") if with_dep: logging.warning( "support for packing dependencies is not added yet!") exec_command('composer download (wget)', download_cmd, cwd=outdir) download_path = join(outdir, download_fname) if exists(download_path): return download_path logging.error("failed to download pkg %s ver %s", pkg_name, pkg_version) return None
def install_file(self, infile, trace=False, trace_string_size=1024, sudo=False, install_dir=None, outdir=None): # FIXME: install prebuilt C++ addons to avoid building dependencies install_cmd = ['npm', 'install', infile] if sudo: install_cmd = ['sudo'] + install_cmd + ['-g'] install_cmd = self.decorate_strace_file( infile=infile, trace=trace, trace_string_size=trace_string_size, sudo=sudo, outdir=outdir, command=install_cmd) exec_command('npm install file', install_cmd, cwd=install_dir)
def install_file(self, infile, trace=False, trace_string_size=1024, sudo=False, install_dir=None, outdir=None): install_cmd = ['gem', 'install', infile] if sudo: install_cmd = ['sudo'] + install_cmd else: install_cmd += ['--user-install'] install_cmd = self.decorate_strace_file( infile=infile, trace=trace, trace_string_size=trace_string_size, sudo=sudo, outdir=outdir, command=install_cmd) exec_command('gem install file', install_cmd)
def has_main(self, pkg_name, pkg_version=None, binary=False, with_dep=False): # assume that the package is installed and check for folder bin, and executables there gem_path_cmd = ['gem', 'path', pkg_name.replace('-', '/')] gem_path = exec_command('gem path', gem_path_cmd, ret_stdout=True).strip() # FIXME: this assumes that the executables are placed inside bin folder. This may not hold all the time. bin_path = join(gem_path, 'bin') return exists(bin_path)
def progpilot_run(pkg_path, config_path, out_path): """ Run progpilot on customized config. Example commands: php progpilot/builds/progpilot.phar --configuration progpilot/projects/example_config/configuration.yml ../../testdata/test-eval-exec.php php progpilot/builds/progpilot.phar --configuration ../../config/static_php_progpilot.yml ../../testdata/test-eval-exec.php """ # Convert astgen_php_smt.config to progpilot sources/sinks/progpilot.yml file logging.warning("Generating progpilot config file from input config file") temp_sources_path = tempfile.NamedTemporaryFile(suffix=".json") temp_sinks_path = tempfile.NamedTemporaryFile(suffix=".json") temp_configuration_path = tempfile.NamedTemporaryFile(suffix=".yml") temp_result_path = tempfile.NamedTemporaryFile(suffix=".json") all_sources, all_sinks = ast_to_progpilot( config_path=config_path, out_path=temp_result_path.name, new_sources_path=temp_sources_path.name, new_sinks_path=temp_sinks_path.name, new_configuration_path=temp_configuration_path.name) # Run progpilot on given package, output JSON-formatted results logging.warning( "Running progpilot analysis on %s with sources %s sinks %s and config %s", pkg_path, temp_sources_path.name, temp_sinks_path.name, temp_configuration_path.name) progpilot_cmd = [ 'php', 'progpilot/builds/progpilot.phar', '--configuration', temp_configuration_path.name, pkg_path ] exec_command('progpilot', progpilot_cmd, cwd=package_directory) # Format progpilot (.json) results into proper protobuf outputs logging.warning("Converting results in %s to protobuf format", temp_result_path.name) reformat(apis_file=config_path, all_sources=all_sources, all_sinks=all_sinks, json_result_file=temp_result_path.name, outfile=out_path)