def extract_forms(url, follow = "false", cookie_jar = None, filename = "forms.json"): utils.remove_file(os.path.join(os.path.dirname(__file__), filename)) if cookie_jar == None: try: out = utils.run_command('{} && {}'.format( utils.cd(os.path.dirname(os.path.abspath(__file__))), 'scrapy crawl form -o {} -a start_url="{}" -a follow={} -a proxy={}'.format(filename, url, follow, HTTP_PROXY)), EXTRACT_WAIT_TIME) except: out = utils.run_command('{} && {}'.format( utils.cd(os.path.dirname(os.path.abspath(__file__))), 'scrapy crawl form -o {} -a start_url="{}" -a follow={}'.format(filename, url, follow)), EXTRACT_WAIT_TIME) else: cookie_jar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), filename.replace('.json', '.txt')) cookie_jar.save(cookie_jar_path) out = utils.run_command('{} && {}'.format( utils.cd(os.path.dirname(os.path.abspath(__file__))), 'scrapy crawl form_with_cookie -o {} -a start_url="{}" -a cookie_jar={}'.format(filename, url, cookie_jar_path)), EXTRACT_WAIT_TIME) with open(os.path.join(os.path.dirname(__file__), filename)) as json_forms: forms = json.load(json_forms) utils.remove_file(os.path.join(os.path.dirname(__file__), filename)) return forms
def sync_server(self, path): LOG.info('Syncing server ...') command = '{} && {} && unset DJANGO_SETTINGS_MODULE && python manage.py syncdb --noinput'.format( utils.to_env(self.base_path), utils.cd(path)) output = utils.run_command(command) if 'Unknown command' in output[2]: command = '{} && {} && unset DJANGO_SETTINGS_MODULE && python manage.py migrate --noinput'.format( utils.to_env(self.base_path), utils.cd(path)) return utils.run_command(command)
def test_single_relative(self): """ Simple auto-gen key; relative paths; deletes source file """ # use only the first test file test_file_path = self.file_contents.keys()[0] with cd(self.working_dir): # encrypt the test file encrypt( inputfiles=[test_file_path], outputfile='sesame.encrypted', keys=[self.key], ) # delete the source file os.remove(test_file_path) # decrypt the test file decrypt( inputfile='sesame.encrypted', keys=[self.key], output_dir=os.getcwd(), # default in argparse ) # ensure file has been created assert os.path.exists(test_file_path) # verify decrypted contents with open(test_file_path, 'r') as f: assert self.file_contents[test_file_path] == f.read()
def apply_compute_changes(): # checkout the files needed for the compute_nodes path_dict = {'virt_driver.py': 'compute/monitors/membw', '__init__.py': 'compute/monitors/membw', 'driver.py': 'virt/libvirt', 'pcp_utils.py': 'virt/libvirt', 'driver.py': 'virt', '__init__.py': 'compute/monitors', 'base.py': 'compute/monitors', 'claims.py': 'compute'} git_repo = "https://github.com/sudswas/nova.git" utils.helper.git_clone(git_repo, 'nova', "stable/liberty") # Copy the changes now assuming all the files have been # copied into the present directory. dir_to_create = py_path + '/compute/monitors/membw' utils.helper.execute_command("mkdir " + dir_to_create) with utils.cd('nova/nova'): for file_name, dir in path_dict.iteritems(): rel_path = dir + "/" + file_name sys_file_path = py_path + '/' + rel_path utils.helper.execute_command("mv " + rel_path + " " + sys_file_path) utils.helper.execute_command("openstack-config " + "--set " + "/etc/nova/nova.conf " + "DEFAULT" + " " + "compute_monitors" + " " + "membw.virt_driver") print "Please restart nova-compute"
def clone(self): if exists(self.name): with cd(self.name): shell('git fetch origin --tags') else: shell('git clone --depth=1 --branch {} {}'.format(self.branch, self.url))
def test_tutorials(enaml_run, tmpdir, tutorial): # Run normally to generate cache files dir_path = os.path.abspath(os.path.split(os.path.dirname(__file__))[0]) source = os.path.join(dir_path, 'examples', 'tutorial', tutorial) example = os.path.join(tmpdir.strpath, tutorial) # Copy to a tmp dir shutil.copytree(source, example) clean_cache(example) # To be safe # Run compileall compileall.compile_dir(example) # Remove source files clean_source(example) # Add to example folder to the sys path or we get an import error with cd(example, add_to_sys_path=True): # Python only uses pyc files if copied from the pycache folder for f in os.listdir('__pycache__'): cf = ".".join(f.split(".")[:-2]) + ".pyc" shutil.copy(os.path.join('__pycache__', f), cf) # Verify it's clean assert not os.path.exists(tutorial + ".py") assert not os.path.exists(tutorial + "_view.enaml") # Now run from cache mod = importlib.import_module(tutorial) mod.main()
def test_multiple_absolute(self): """ Test a directory hierarchy with absolute paths """ # convert the files list to absolute paths test_input_files = [ os.path.join(self.working_dir, path) for path in self.file_contents.keys() ] with cd(self.working_dir): # encrypt all the test files encrypt( inputfiles=test_input_files, outputfile='sesame.encrypted', keys=[self.key], ) # delete the source files for path in self.file_contents.keys(): delete_path(path) # decrypt the test files decrypt( inputfile='sesame.encrypted', keys=[self.key], output_dir=os.getcwd(), # default in argparse ) for test_file_path in self.file_contents.keys(): # the file will be extracted on the absolute path test_file_path_abs = os.path.join(self.working_dir, test_file_path)[1:] # verify decrypted contents at the absolute extracted path with open(test_file_path_abs, 'r') as f: assert self.file_contents[test_file_path] == f.read()
def test_single_relative_force(self): """ Simple auto-gen key; relative paths; with force flag to overwrite source file """ # use only the first test file test_file_path = self.file_contents.keys()[0] with cd(self.working_dir): # encrypt the test file encrypt( inputfiles=[test_file_path], outputfile='sesame.encrypted', keys=[self.key], ) # sleep before decrypt to ensure file ctime is different time.sleep(1) # decrypt the test file decrypt( inputfile='sesame.encrypted', keys=[self.key], output_dir=os.getcwd(), # default in argparse force=True, ) # ensure file has been overwritten assert self.file_timestamps[test_file_path] < os.stat(test_file_path).st_ctime # verify decrypted contents with open(test_file_path, 'r') as f: assert self.file_contents[test_file_path] == f.read()
def test_single_relative_overwrite_false(self): """ Simple auto-gen key; relative paths; answer no to overwrite the source file """ # use only the first test file test_file_path = self.file_contents.keys()[0] with cd(self.working_dir): # encrypt the test file encrypt( inputfiles=[test_file_path], outputfile='sesame.encrypted', keys=[self.key], ) # sleep before decrypt to ensure file ctime is different time.sleep(1) # decrypt the test file; mock responds no to overwrite the existing file with mock.patch('__builtin__.raw_input', return_value='n'): # decrypt the test file decrypt( inputfile='sesame.encrypted', keys=[self.key], output_dir=os.getcwd(), # default in argparse ) # ensure no file has been decrypted assert self.file_timestamps[test_file_path] == os.stat(test_file_path).st_ctime
def run_json(json_name): runjson = retrieve_and_validate_run_json('tests', json_name=json_name) with ut.cd('tests'): setup_sindarins(runjson) whizard = Whizard(runjson, False) runs = fill_all_runs(runjson) return map(whizard.run_process, runs)
def compile_nnfusion_rt(rt_dir): with cd(rt_dir): command = "cmake ." assert os.system(command) == 0 command = "make -j" assert os.system(command) == 0
def umount(self): """ umounts fs if method commit() was executed else makes working suvolume default (saves changes on disk), else deletes working subvolume (restores its state) """ if self.save_changes: tdir = self.mpoint else: tdir = "/" with cd(tdir): if self.save_changes: shell_exec('btrfs subvolume set-default "{0}" "{1}"'.format(self.snap, self.mpoint)) self.save_changes = False else: shell_exec('umount "{0}"'.format(self.mpoint)) shell_exec('mount "{0}" "{1}"'.format(self.device, self.mpoint)) os.chdir(self.mpoint) shell_exec('btrfs subvolume delete "{0}"'.format(self.snap)) os.chdir("/") shell_exec('umount "{0}"'.format(self.mpoint)) if self.image_file is not None: shell_exec('losetup -d "{0}"'.format(self.device))
def install_libpfm(): git_path = "git://git.code.sf.net/u/hkshaw1990/perfmon2 perfmon2-libpfm4" utils.helper.git_clone(git_path, "perfmon2-libpfm4") commands = ['make', 'make install PREFIX='] with utils.cd("~/perfmon2-libpfm4"): utils.helper.execute_command(commands) with utils.cd("~/perfmon2-libpfm4/examples"): out, err = utils.helper.execute_command('./check_events') if out: if "POWERPC_NEST_MEM_BW" in out: print "Libpfm is ready for Memory BW measurement" else: print "There was an error during make of libpfm", err
def test_tutorials(enaml_run, tmpdir, tutorial): # Run normally to generate cache files dir_path = os.path.abspath(os.path.split(os.path.dirname(__file__))[0]) source = os.path.join(dir_path, 'examples', 'tutorial', tutorial) example = os.path.join(tmpdir.strpath, tutorial) # Copy to a tmp dir shutil.copytree(source, example) clean_cache(example) # To be safe # Run compileall compileall.compile_dir(example) # Remove source files clean_source(example) # Add to example folder to the sys path or we get an import error with cd(example, add_to_sys_path=True): if IS_PY3: # PY3 only uses pyc files if copied from the pycache folder for f in os.listdir('__pycache__'): cf = ".".join(f.split(".")[:-2]) + ".pyc" shutil.copy(os.path.join('__pycache__', f), cf) # Verify it's clean assert not os.path.exists(tutorial+".py") assert not os.path.exists(tutorial+"_view.enaml") # Now run from cache mod = importlib.import_module(tutorial) mod.main()
def test_multiple_relative(self): """ Test a directory hierarchy with relative paths """ with cd(self.working_dir): # encrypt all the test files encrypt( inputfiles=self.file_contents.keys(), outputfile='sesame.encrypted', keys=[self.key], ) # delete the source files for path in self.file_contents.keys(): delete_path(path) # decrypt the test files decrypt( inputfile='sesame.encrypted', keys=[self.key], output_dir=os.getcwd(), # default in argparse ) for test_file_path in self.file_contents.keys(): # ensure files have been created assert os.path.exists(test_file_path) # verify decrypted contents with open(test_file_path, 'r') as f: assert self.file_contents[test_file_path] == f.read()
def clone(self): if exists(self.name): with cd(self.name): shell('git fetch origin --tags') else: shell('git clone --depth=1 --branch {} {}'.format( self.branch(), self.url))
def __call__(self, project, source_file): src = basename(project.dir) logger.info('instrumenting printfs of {} source'.format(src)) with cd(project.dir): return_code = subprocess.call(['instrument-printf', source_file], stderr=self.subproc_output, stdout=self.subproc_output) try: with open(source_file, 'r+') as f: content = f.read() f.seek(0, 0) f.write('#ifndef ANGELIX_OUTPUT\n#define ANGELIX_OUTPUT(type, expr, id) expr\n#endif\n' + content) except: raise Exception("Error when instrumenting %s!" % (source_file)) finally: f.close() if return_code != 0: if self.config['ignore_trans_errors']: logger.error("transformation of {} failed".format(relpath(project.dir))) else: logger.error("transformation of {} failed".format(relpath(project.dir))) raise TransformationError() pass
def __call__(self, project, patch): src = basename(project.dir) logger.info('applying patch to {} source'.format(src)) environment = dict(os.environ) dirpath = tempfile.mkdtemp() patch_file = join(dirpath, 'patch') with open(patch_file, 'w') as file: for e, p in patch.items(): file.write('{} {} {} {}\n'.format(*e)) file.write(p + "\n") if self.config['semfix']: environment['ANGELIX_SEMFIX_MODE'] = 'YES' environment['ANGELIX_PATCH'] = patch_file with cd(project.dir): return_code = subprocess.call(['apply-patch', project.buggy], stderr=self.subproc_output, stdout=self.subproc_output, env=environment) if return_code != 0: if self.config['ignore_trans_errors']: logger.error("transformation of {} failed".format( relpath(project.dir))) else: logger.error("transformation of {} failed".format( relpath(project.dir))) raise TransformationError() shutil.rmtree(dirpath) pass
def run_curve(curve): run_folder = curve[0] + '/' + curve[0] sindarin = run_folder + '/run.sin' runfolder = run_folder + '-' + str(curve[2]) mkdirs(runfolder) shutil.copyfile(sindarin, os.path.join(runfolder, 'run.sin')) with cd(runfolder): replace_file('run.sin', curve[1], curve[2]) call('rm -f *grid', shell=True) if (not dryrun and not os.path.isfile('done')): print 'Running ' + runfolder ret = whizard_run('whizard -r', 'run.sin') # ret = whizard_run('whizard', 'run.sin') if ret != 0: print runfolder + ' Whizard return code ' + str(ret) return runfolder + ' Whizard return code ' + str(ret) # raise Exception(runfolder, 'Whizards return code', ret) else: with open('done', 'a'): os.utime('done', None) print 'done with ' + runfolder return 'done with ' + runfolder else: # print 'Skipping ' + runfolder return 'skipping ' + runfolder
def test_workbench(enaml_run, qtbot): from enaml.qt.QtCore import Qt # Run normally to generate cache files dir_path = os.path.abspath(os.path.split(os.path.dirname(__file__))[0]) example = os.path.join(dir_path, 'examples', 'workbench') def handler(app, window): widget = window.proxy.widget qtbot.wait(1000) for i in range(1, 4): qtbot.keyClick(widget, str(i), Qt.ControlModifier) qtbot.wait(100) # TODO: Verify each screen somehow qtbot.keyClick(widget, 'q', Qt.ControlModifier) # Wait for exit, otherwise it unregisters the commands qtbot.wait(100) enaml_run.run = handler # Add to example folder to the sys path or we get an import error with cd(example, add_to_sys_path=True): # Now run from cache mod = importlib.import_module('sample') mod.main()
def test_single_relative_output_dir(self): """ Simple auto-gen key; relative paths; deletes source file; change output directory """ # use only the first test file test_file_path = self.file_contents.keys()[0] with cd(self.working_dir): # encrypt the test file encrypt( inputfiles=[test_file_path], outputfile='sesame.encrypted', keys=[self.key], ) # create a new temporary directory to extract into with make_secure_temp_directory() as output_dir: # decrypt the test file decrypt( inputfile='sesame.encrypted', keys=[self.key], output_dir=output_dir ) # ensure file has been created in the output_dir assert os.path.exists(os.path.join(output_dir, test_file_path)) # verify decrypted contents with open(os.path.join(output_dir, test_file_path), 'r') as f: assert self.file_contents[test_file_path] == f.read()
def apply_scheduler_changes(): # checkout the files needed for the compute_nodes path_dict = {'virt/': 'hardware.py', 'scheduler/filters' : 'numa_topology_filter.py'} git_repo = "https://github.com/openstack/nova.git" utils.helper.git_clone(git_repo, 'nova', "stable/liberty") # Copy the changes now assuming all the files have been # copied into the present directory. with utils.cd('nova/nova'): for dir, file_name in path_dict.iteritems(): rel_path = dir + "/" + file_name sys_file_path = py_path + rel_path utils.helper.execute_command("mv " + rel_path + " " + sys_file_path) filters = utils.helper.execute_command("openstack-config " + "--get " + "/etc/nova/nova.conf" + "DEFAULT" + " " + "default_scheduler_filters") if "NUMA" not in filters: utils.helper.execute_command("openstack-config " + "--set " + "/etc/nova/nova.conf" + "DEFAULT" + " " + "default_scheduler_filters" + " " + + filters + ",NUMATopologyFilter") print "please restart openstack-nova-scheduler"
def test_single_absolute(self): """ Simple auto-gen key; absolute paths """ # use only the first test file test_file_path = self.file_contents.keys()[0] with cd(self.working_dir): # encrypt the test file encrypt( inputfiles=[os.path.join(self.working_dir, test_file_path)], outputfile=os.path.join(self.working_dir, 'sesame.encrypted'), keys=[self.key], ) # delete the source file os.remove(test_file_path) # sleep before decrypt to ensure file ctime is different time.sleep(1) # decrypt the test file decrypt( inputfile=os.path.join(self.working_dir, 'sesame.encrypted'), keys=[self.key], output_dir=os.getcwd(), # default in argparse ) # the file will be extracted on the absolute path test_file_path_abs = os.path.join(self.working_dir, test_file_path)[1:] # verify decrypted contents at the absolute extracted path with open(test_file_path_abs, 'r') as f: assert self.file_contents[test_file_path] == f.read()
def __call__(self, project): src = basename(project.dir) logger.info('instrumenting repairable of {} source'.format(src)) environment = dict(os.environ) if 'if-conditions' in self.config['defect']: environment['ANGELIX_IF_CONDITIONS_DEFECT_CLASS'] = 'YES' if 'assignments' in self.config['defect']: environment['ANGELIX_ASSIGNMENTS_DEFECT_CLASS'] = 'YES' if 'loop-conditions' in self.config['defect']: environment['ANGELIX_LOOP_CONDITIONS_DEFECT_CLASS'] = 'YES' if 'deletions' in self.config['defect']: environment['ANGELIX_DELETIONS_DEFECT_CLASS'] = 'YES' if 'guards' in self.config['defect']: environment['ANGELIX_GUARDS_DEFECT_CLASS'] = 'YES' if self.config['ignore_trivial']: environment['ANGELIX_IGNORE_TRIVIAL'] = 'YES' if self.config['semfix']: environment['ANGELIX_SEMFIX_MODE'] = 'YES' if self.config['use_semfix_syn']: environment['ANGELIX_USE_SEMFIX_SYN'] = 'YES' with cd(project.dir): return_code = subprocess.call(['instrument-repairable', project.buggy], stderr=self.subproc_output, stdout=self.subproc_output, env=environment) if return_code != 0: if self.config['ignore_trans_errors']: logger.warning("transformation of {} failed".format(relpath(project.dir))) else: logger.error("transformation of {} failed".format(relpath(project.dir))) raise TransformationError()
def __call__(self, project, patch): src = basename(project.dir) logger.info('applying patch to {} source'.format(src)) environment = dict(os.environ) dirpath = tempfile.mkdtemp() patch_file = join(dirpath, 'patch') with open(patch_file, 'w') as file: for e, p in patch.items(): file.write('{} {} {} {}\n'.format(*e)) file.write(p + "\n") if self.config['semfix']: environment['ANGELIX_SEMFIX_MODE'] = 'YES' environment['ANGELIX_PATCH'] = patch_file with cd(project.dir): return_code = subprocess.call(['apply-patch', project.buggy], stderr=self.subproc_output, stdout=self.subproc_output, env=environment) if return_code != 0: if self.config['ignore_trans_errors']: logger.error("transformation of {} failed".format(relpath(project.dir))) else: logger.error("transformation of {} failed".format(relpath(project.dir))) raise TransformationError() shutil.rmtree(dirpath) pass
def prepare_new_app(config): """Init a new app, build it, and launch it on a connected device. :param config: :return: """ app_dir = "tmp/test_benchmarks/" config["app_dir"] = app_dir #: Create an app to to test if exists(app_dir): #: If using an emulator enable forwarding if "emulator-" in sh.adb("devices"): sh.adb("forward", "tcp:8888", "tcp:8888") return # App already made # if config['app_built']: # return # App already made # else: # #: Cleanup the old app # cleanup_app(config) enamlnative = sh.Command("./enaml-native") print( enamlnative( "init", "Benchmarks", "com.codelv.enamlnative.benchmarks", "tmp/test_benchmarks/", ) ) config["app_built"] = True with cd(join(app_dir, "Benchmarks")): with source_activated("venv", "enaml-native") as enamlnative: #: Now build python print(enamlnative("build-python")) #: Build and do a gradle sync, this will NOT include jni and native libs! print(enamlnative("build-android")) #: Now build python (again) to put them in the correct spot print(enamlnative("build-python")) #: Now try to run it and see if it crashes #: Requires emulator or device assert len(sh.adb("devices").strip().split("\n")) > 0, ( "No device is connected, " "can't test the build!" ) #: Flush logcat sh.adb("logcat", "--clear") #: Do a build and run print(enamlnative("run-android")) #: Wait a few seconds #: If using an emulator enable forwarding if "emulator-" in sh.adb("devices"): sh.adb("forward", "tcp:8888", "tcp:8888")
def start_and_test_with_db(db): info('Setting up seafile server with %s database', db) server = ServerCtl(INSTALLDIR, db) server.setup() with server.run(): info('Testing with %s database', db) with cd(SeafileServer().projectdir): shell('py.test', env=server.get_seaserv_envs())
def run_server(self, path, port): self.configure_network() LOG.info('Running server ...') command = '{} && {} && unset DJANGO_SETTINGS_MODULE && python manage.py runserver 0.0.0.0:{}'.format( utils.to_env(self.base_path), utils.cd(path), port) return utils.run_command_async(command)
def load_fixtures(self, path): LOG.info('Loading fixtures ...') for file in os.listdir(os.path.join(path, 'fixtures')): LOG.info('Loading fixtures: {}'.format(file)) command = '{} && {} && unset DJANGO_SETTINGS_MODULE && {}'.format( utils.to_env(self.base_path), utils.cd(path), "python manage.py loaddata {}".format(os.path.join(path, 'fixtures', file))) utils.run_command(command)
def create_superuser(self, path): LOG.info('Creating superuser ...') command = '{} && {} && unset DJANGO_SETTINGS_MODULE && {}'.format( utils.to_env(self.base_path), utils.cd(path), """ echo "from django.contrib.auth.models import User; User.objects.create_superuser('admin', '*****@*****.**', 'admin')" | python manage.py shell """) return utils.run_command(command)
def install_requirements(self, path): if path: command = '{} && npm install'.format(utils.cd(path)) out = utils.run_command(command) if out[1] == '': return out[2] else: return out[1] return ''
def predict(self, target_word, test_xml): if target_word in self.target_words: curr_dir = os.getcwd() target_model_dir = os.path.join(curr_dir, self.model_dir, target_word) test_xml = os.path.join(curr_dir, test_xml) with cd(self.ims_lib_path): out = "/tmp" command = "{} {} {} {}".format(self.test_sh, target_model_dir, test_xml, out) check_output(command.split())
def install_requirements(self, path): if path: command = '{} && export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64 && chmod 777 grailsw && ./grailsw compile'.format( utils.cd(path)) out = utils.run_command(command) if out[1] == '': return out[2] else: return out[1] return ''
def clean_source(path): """ Clean the source files in the path """ with cd(path): for f in os.listdir('.'): if os.path.splitext(f)[-1] in ( '.py', '.enaml', ): os.remove(f)
def install_pcp(): with utils.su("root"): utils.helper.execute_command("groupadd -r pcp") commands = ['useradd -c "Performance Co-Pilot"', ' -g pcp -d /var/lib/pcp', ' -M -r -s /usr/sbin/nologin pcp'] utils.helper.execute_command(commands) configure = ['./configure --prefix=/usr --libexecdir=/usr/lib', ' --sysconfdir=/etc --localstatedir=/var', ' --libdir=/lib64/ --with-rcdir=/etc/init.d'] with utils.cd("~/pcp"): utils.helper.execute_command(configure) utils.helper.execute_command("make") utils.helper.execute_command("make install") with utils.cd("src/pmdas/perfevent"): utils.helper.execute_command("sh install") print "please restart pcp to let the changes take effect"
def __call__(self, project, test, dump=None, trace=None, klee=False, env=os.environ): src = basename(project.dir) if klee: logger.info('running test \'{}\' of {} source with KLEE'.format(test, src)) else: logger.info('running test \'{}\' of {} source'.format(test, src)) environment = dict(env) if dump is not None: environment['ANGELIX_WITH_DUMPING'] = dump reachable_dir = join(dump, 'reachable') # maybe it should be done in other place? os.mkdir(reachable_dir) if trace is not None: environment['ANGELIX_WITH_TRACING'] = trace if (trace is not None) or (dump is not None): environment['ANGELIX_RUN'] = 'angelix-run-test' if klee: environment['ANGELIX_RUN'] = 'angelix-run-klee' # using stub library to make lli work environment['LLVMINTERP'] = 'lli -load {}/libkleeRuntest.so'.format(os.environ['KLEE_LIBRARY_PATH']) environment['ANGELIX_WORKDIR'] = self.workdir environment['ANGELIX_TEST_ID'] = test dirpath = tempfile.mkdtemp() executions = join(dirpath, 'executions') environment['ANGELIX_RUN_EXECUTIONS'] = executions if self.config['verbose']: subproc_output = sys.stderr else: subproc_output = subprocess.DEVNULL with cd(project.dir): proc = subprocess.Popen(self.oracle + " " + test, env=environment, stdout=subproc_output, stderr=subproc_output, shell=True) if klee or self.config['test_timeout'] is None: # KLEE has its own timeout code = proc.wait() else: code = proc.wait(timeout=self.config['test_timeout']) if dump is not None or trace is not None or klee: if exists(executions): with open(executions) as file: content = file.read() if len(content) > 1: logger.warning("ANGELIX_RUN is executed multiple times by test {}".format(test)) else: logger.warning("ANGELIX_RUN is not executed by test {}".format(test)) return code == 0
def run_python_seafile_tests(): python_seafile = Project('python-seafile') if not exists(python_seafile.projectdir): python_seafile.clone() shell('pip install -r {}/requirements.txt'.format( python_seafile.projectdir)) with cd(python_seafile.projectdir): # install python-seafile because seafdav tests needs it shell('python setup.py install') shell('py.test')
def clean_cache(path): """ Clean the cache files in the path """ with cd(path): if os.path.exists('__enamlcache__'): shutil.rmtree('__enamlcache__') if os.path.exists('__pycache__'): shutil.rmtree('__pycache__') for f in os.listdir('.'): if f.endswith('.pyc'): os.remove(f)
def start_server(cfg): with cd(cfg.installdir): shell('find . -maxdepth 2 | sort | xargs ls -lhd') seafile_sh = get_script(cfg, 'seafile.sh') shell('{} start'.format(seafile_sh)) info('starting seahub') seahub_sh = get_script(cfg, 'seahub.sh') answers = [ # admin email/pass ('admin email', ADMIN_USERNAME), ('admin password', ADMIN_PASSWORD), ('admin password again', ADMIN_PASSWORD), ] _answer_questions('{} start'.format(abspath(seahub_sh)), answers) with cd(cfg.installdir): shell('find . -maxdepth 2 | sort | xargs ls -lhd') # shell('sqlite3 ccnet/PeerMgr/usermgr.db "select * from EmailUser"', cwd=INSTALLDIR) shell('http -v localhost:8000/api2/server-info/ || true') # shell('http -v -f POST localhost:8000/api2/auth-token/ [email protected] password=adminadmin || true') shell('netstat -nltp')
def start_and_test_with_db(db): info('Setting up seafile server with %s database', db) server = ServerCtl( INSTALLDIR, db=db, # Use the newly built seaf-server (to avoid "make install" each time when developping locally) seaf_server_bin=join(SeafileServer().projectdir, 'server/seaf-server')) server.setup() with server.run(): info('Testing with %s database', db) with cd(SeafileServer().projectdir): shell('py.test', env=server.get_seaserv_envs())
def hg(self, local_dir, destination, args): self.info('# hg') with utils.cd(local_dir): try: ssh = ' '.join(self._ssh_options) args = ('hg', 'push', '--ssh', ssh, '-f', destination) ret = subprocess.call(args, close_fds=True) if ret != 0: self.warning_ssh() return ret except OSError: self.die('hg')
def copy_remote_files(path_to_vm, local, remote): ''' copy files from the current machine to the remote one Before copy - rm remote tree ''' if path_to_vm.startswith('ssh://'): user_passwd, ip = path_to_vm[len('ssh://'):].split('@', 1) if '+' in ip: ip, port = ip.split('+') else: port = 22 user, passwd = user_passwd.split(':', 1) fname = str(uuid.uuid1()) + '.tgz' out_fl = "/tmp/" + fname with cd(os.path.dirname(local)): subprocess.check_call("tar cvzf {0} {1}".format(out_fl, os.path.basename(local)), shell=True) t = paramiko.Transport((ip, port)) t.connect(username=user, password=passwd, hostkey=None) sftp = paramiko.SFTPClient.from_transport(t) try: rfl = os.path.join(remote, fname) logger.debug("Write {0} => {1}".format(out_fl, rfl)) fl = sftp.open(rfl, "wb") fl.write(open(out_fl, 'rb').read()) fl.close() finally: t.close() ssh = SSHCMDExecutor(ip, user, passwd, port=port) ssh.exec_simple_check('''cd {0} ; tar xfz {1}'''.format(remote, rfl)) ssh.exec_simple_check('cd {0} ; rm ' + rfl) else: rpath = os.path.join(path_to_vm, remote[1:]) try: shutil.rmtree(rpath) except: pass shutil.copytree(local, rpath) fake_dec_path = os.path.dirname(sys.modules[ExecTest.__module__].__file__) fake_dec_path = os.path.join(fake_dec_path,"..") fake_dec_rpath = os.path.join(rpath, "UnitTests") shutil.copytree(fake_dec_path, fake_dec_rpath) logger.debug("file tree copied to remote machine")
def configure(self): src = basename(self.dir) logger.info('configuring {} source'.format(src)) if self.configure_cmd is None: return with cd(self.dir): return_code = subprocess.call(self.configure_cmd, shell=True, stderr=self.subproc_output, stdout=self.subproc_output) if return_code != 0: logger.warning("configuration of {} returned non-zero code".format(relpath(dir)))
def modify_nnfusion_rt(rt_dir): with cd(rt_dir): # static -> shared library command = "sed -i '/cuda_add_library(${TARGET_NAME} ${SRC})/s/(${TARGET_NAME} ${SRC})/(${TARGET_NAME} SHARED ${SRC})/g'" + " " + "CMakeLists.txt" assert os.system(command) == 0 # remove culibos command = "sed -i '/target_link_libraries(${TARGET_NAME} cudnn culibos cublas)/s/culibos//g'" + " " + "CMakeLists.txt" assert os.system(command) == 0 # remove cudaDevice reset in cuda_init() command = "sed -i '/cudaDeviceReset()/s:^://:'" + " " + "nnfusion_rt.cu" assert os.system(command) == 0
def predict(self, target_word, test_xml): # LOGGER.info("{} - {}".format(target_word, "\n".join(self.target_words))) out = "/tmp" model_name = get_model_name(target_word) if model_name in self.models: curr_dir = os.getcwd() target_model_dir = os.path.join(curr_dir, self.model_dir, model_name) test_xml = os.path.join(curr_dir, test_xml) with cd(self.ims_lib_path): command = "{} {} {} {}".format(self.test_sh, target_model_dir, test_xml, out) check_output(command.split()) shutil.move(os.path.join(out, "%s.result" % model_name), os.path.join(out, "%s.result" % target_word))
def generate_citus_tarballs(citus_version): tmp_dir = 'tmp_citus_tarballs' citus_old_tarpath = os.path.abspath( os.path.join(tmp_dir, 'install-citus{}.tar'.format(citus_version))) citus_new_tarpath = os.path.abspath( os.path.join(tmp_dir, 'install-citusmaster.tar')) common.initialize_temp_dir_if_not_exists(tmp_dir) local_script_path = os.path.abspath('upgrade/generate_citus_tarballs.sh') with utils.cd(tmp_dir): subprocess.check_call([local_script_path, citus_version]) return [citus_old_tarpath, citus_new_tarpath]
def perform_postgres_upgrade(old_bindir, new_bindir, old_datadir, new_datadir): for node_name in NODE_NAMES: base_new_data_path = os.path.abspath(new_datadir) base_old_data_path = os.path.abspath(old_datadir) with utils.cd(base_new_data_path): abs_new_data_path = os.path.join(base_new_data_path, node_name) abs_old_data_path = os.path.join(base_old_data_path, node_name) command = [ os.path.join(new_bindir, 'pg_upgrade'), '--username', USER, '--old-bindir', old_bindir, '--new-bindir', new_bindir, '--old-datadir', abs_old_data_path, '--new-datadir', abs_new_data_path ] subprocess.call(command)
def process_all_feeds(self): '''Note to self...possible problems with entries getting overwritten? ''' abbr = self.abbr STATE_DATA = join(DATA, abbr, 'feeds') STATE_DATA_RAW = join(STATE_DATA, 'raw') _process_feed = self.process_feed with cd(STATE_DATA_RAW): for fn in os.listdir('.'): with open(fn) as f: entries = json.load(f) _process_feed(entries)
def start_fileserver(self): cmd = [ "./fileserver", "-F", self.central_conf_dir, "-d", self.seafile_conf_dir, "-l", self.fileserver_log, ] fileserver_path = join(self.projectdir, 'fileserver') with cd(fileserver_path): shell("go build") self.fileserver_proc = shell(cmd, wait=False)
def download_files(self, netapp_id: str, uris: dict): cd(self.home + '/netapps') mkdir(netapp_id) for uri in uris: cd(self.home + '/netapps/' + netapp_id) mkdir(uri) cd(uri) download(url=uris[uri])
def setup_sindarin(proc_dict): if not proc_dict.get('disabled', False): ut.logger.info('Setting up sindarins of ' + str(proc_dict)) whizard_folder = 'whizard' with ut.cd(whizard_folder): base_sindarin = proc_dict['process'] + '.sin' template_sindarin = base_sindarin.replace('.sin', '-template.sin') check_for_valid_wizard_sindarin(proc_dict, template_sindarin) integration_sindarin = base_sindarin.replace( '.sin', '-integrate.sin') template_present = os.path.isfile(template_sindarin) scan = proc_dict['purpose'] == 'scan' test_soft = proc_dict['purpose'] == 'test_soft' if template_present: if proc_dict['purpose'] == 'integration' or scan or test_soft: create_integration_sindarin( integration_sindarin, template_sindarin, proc_dict['process'], proc_dict['adaption_iterations'], proc_dict.get('integration_iterations', ' ')) multiply_sindarins(integration_sindarin, proc_dict, proc_dict.get('scale_variation', False), proc_dict['nlo_type']) elif proc_dict['purpose'] == 'histograms' or proc_dict[ 'purpose'] == 'events': create_simulation_sindarin( base_sindarin, template_sindarin, proc_dict['process'], proc_dict['adaption_iterations'], proc_dict.get('integration_iterations', ' '), proc_dict['events_per_batch']) multiply_sindarins(base_sindarin, proc_dict, proc_dict.get('scale_variation', False), proc_dict['nlo_type']) else: if scan: ut.fatal('You have to supply ' + template_sindarin + ' for a scan') else: fallback = integration_sindarin + ' and ' + base_sindarin if os.path.isfile(integration_sindarin) and os.path.isfile( base_sindarin): ut.logger.info('Didnt find ' + template_sindarin + ', will use ' + fallback) return else: ut.fatal('Didnt find ' + template_sindarin + ' nor ' + fallback) else: ut.logger.info('Skipping ' + proc_dict['process'] + ' because it is disabled')
def __call__(self, project, expressions): src = basename(project.dir) logger.info('instrumenting suspicious of {} source'.format(src)) environment = dict(os.environ) dirpath = tempfile.mkdtemp() suspicious_file = join(dirpath, 'suspicious') try: with open(suspicious_file, 'w') as file: for e in expressions: file.write('{} {} {} {}\n'.format(*e)) except: raise Exception("Error when writing to suspicious file %s!\n" % (suspicious_file)) finally: file.close() if self.config['semfix']: environment['ANGELIX_SEMFIX_MODE'] = 'YES' if self.config['synthesis_global_vars']: environment['ANGELIX_GLOBAL_VARIABLES'] = 'YES' if self.config['synthesis_func_params']: environment['ANGELIX_FUNCTION_PARAMETERS'] = 'YES' if self.config['synthesis_used_vars']: environment['ANGELIX_USED_VARIABLES'] = 'YES' if self.config['synthesis_ptr_vars']: environment['ANGELIX_POINTER_VARIABLES'] = 'YES' if self.config['init_uninit_vars']: environment['ANGELIX_INIT_UNINIT_VARS'] = 'YES' environment['ANGELIX_EXTRACTED'] = self.extracted #dir for extracted angelix forest? environment['ANGELIX_SUSPICIOUS'] = suspicious_file with cd(project.dir): return_code = subprocess.call(['instrument-suspicious', project.buggy], stderr=self.subproc_output, stdout=self.subproc_output, env=environment) if return_code != 0: if self.config['ignore_trans_errors']: logger.warning("transformation of {} failed".format(relpath(project.dir))) else: logger.error("transformation of {} failed".format(relpath(project.dir))) raise TransformationError() shutil.rmtree(dirpath)