def build_extension(self, extension): log.info("Building module %s..." % extension) # Prepare folders os.chdir(self.build_dir) module_build_dir = os.path.join(self.build_dir, extension) if os.path.exists(module_build_dir): log.info("Deleting module build folder %s..." % module_build_dir) rmtree(module_build_dir) log.info("Creating module build folder %s..." % module_build_dir) os.mkdir(module_build_dir) os.chdir(module_build_dir) module_src_dir = os.path.join(self.sources_dir, extension) # Build module cmake_cmd = [ "cmake", "-G", self.make_generator, "-DQT_QMAKE_EXECUTABLE=%s" % self.qmake_path, "-DBUILD_TESTS=False", "-DDISABLE_DOCSTRINGS=True", "-DCMAKE_BUILD_TYPE=%s" % self.build_type, "-DCMAKE_INSTALL_PREFIX=%s" % self.install_dir, module_src_dir ] if sys.version_info[0] > 2: cmake_cmd.append("-DPYTHON3_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON3_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON3_DBG_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_DEBUG_LIBRARY=%s" % self.py_library) else: cmake_cmd.append("-DPYTHON_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON_DEBUG_LIBRARY=%s" % self.py_library) if extension.lower() == "shiboken": cmake_cmd.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=yes") if sys.version_info[0] > 2: cmake_cmd.append("-DUSE_PYTHON3=ON") log.info("Configuring module %s (%s)..." % (extension, module_src_dir)) if run_process(cmake_cmd, log) != 0: raise DistutilsSetupError("Error configuring " + extension) log.info("Compiling module %s..." % extension) if run_process([self.make_path], log) != 0: raise DistutilsSetupError("Error compiling " + extension) log.info("Installing module %s..." % extension) if run_process([self.make_path, "install/fast"], log) != 0: raise DistutilsSetupError("Error pseudo installing " + extension) os.chdir(self.script_dir)
def write(file_path, tag_list, attr_name='kMDItemUserTags'): """Writes the list of tags to xattr field of ``file_path`` :param file_path: full path to file :type file_path: ``unicode`` :param tag_list: values to write to attributes :type tag_list: ``list`` :param attr_name: full name of OS X file metadata attribute :type attr_name: ``unicode`` """ tag_data = ['<string>{}</string>'.format(tag) for tag in tag_list] tag_data.insert(0, ('<!DOCTYPE plist PUBLIC' '"-//Apple//DTD PLIST 1.0//EN"' '"http://www.apple.com/DTDs/PropertyList-1.0.dtd">' '<plist version="1.0"><array>')) tag_data.append('</array></plist>') tag_text = ''.join(tag_data) xattr = "com.apple.metadata:{}".format(attr_name) # Other attributes you might want to try: # ['kMDItemOMUserTags', 'kOMUserTags', # 'kMDItemkeywords', 'kMDItemFinderComment'] cmd = ['xattr', '-w', xattr, tag_text.encode("utf8"), file_path] return utils.run_process(cmd)
def build(path): p = subprocess.Popen(["make"], cwd = path,\ stdout = subprocess.PIPE, stderr = subprocess.PIPE) p.wait() stdout, stderr = utils.run_process(p) if stderr and stderr.lower().find("error") != -1: return stderr return None
def build_patchelf(self): if not sys.platform.startswith("linux"): return log.info("Building patchelf...") module_src_dir = os.path.join(self.sources_dir, "patchelf") build_cmd = ["g++", "%s/patchelf.cc" % (module_src_dir), "-o", "patchelf"] if run_process(build_cmd) != 0: raise DistutilsSetupError("Error building patchelf")
def run(self): _install.run(self) # Custom script we run at the end of installing # If self.root has a value, it means we are being "installed" into # some other directory than Python itself (eg, into a temp directory # for bdist_wininst to use) - in which case we must *not* run our # installer if not self.dry_run and not self.root: filename = os.path.join(self.install_scripts, "pyside_postinstall.py") if not os.path.isfile(filename): raise RuntimeError("Can't find '%s'" % (filename,)) print("Executing post install script '%s'..." % filename) cmd = [ os.path.abspath(sys.executable), filename, ] run_process(cmd)
def run(path, option): p = subprocess.Popen(["./dpc_info", option],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if stderr: return stdout if not parse(stdout, option): return stdout return None
def run(path): p = subprocess.Popen(["./ze_sysman"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if stderr: return stdout if not parse(stdout): return stdout return None
def run(path): p = subprocess.Popen(["./gpu_perfmon_set", "4", "-t"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if stderr: return stderr if stdout.find("GPU PefMon configuration is completed") == -1: return stdout return None
def config(path): p = subprocess.Popen(["cmake",\ "-DCMAKE_BUILD_TYPE=" + utils.get_build_flag(), ".."],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) p.wait() stdout, stderr = utils.run_process(p) if stderr and stderr.find("CMake Error") != -1: return stderr return None
def run(path, option): p = subprocess.Popen(["./cl_gemm_itt", option, "1024", "1"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if stderr: return stderr if stdout.find(" CORRECT") == -1: return stdout return None
def run(self): _install.run(self) # Custom script we run at the end of installing # If self.root has a value, it means we are being "installed" into # some other directory than Python itself (eg, into a temp directory # for bdist_wininst to use) - in which case we must *not* run our # installer if not self.dry_run and not self.root: filename = os.path.join(self.install_scripts, "pyside_postinstall.py") if not os.path.isfile(filename): raise RuntimeError("Can't find '%s'" % (filename, )) print("Executing post install script '%s'..." % filename) cmd = [ os.path.abspath(sys.executable), filename, ] run_process(cmd)
def prepare_sources(): for module_name, repo_name in submodules.items(): module_dir = os.path.join("sources", module_name) if os.path.exists(module_dir) and repo_name != QT_CI_TESTED_SUBMODULE: sub_repo = git_server + "/pyside/" + repo_name + ".git" try: shutil.move(module_dir, module_dir + "_removed") except Exception as e: raise Exception("!!!!!!!!!!!!! Failed to rename %s " % module_dir) git_checkout_cmd = ["git", "clone", sub_repo, module_dir] if run_process(git_checkout_cmd) != 0: raise Exception("!!!!!!!!!!!!! Failed to clone the git submodule %s" % sub_repo) print("************************* CLONED **********************") for module_name, repo_name in submodules.items(): print("***** Preparing %s" % module_name) if repo_name == QT_CI_TESTED_SUBMODULE: print("Skipping tested module %s and using sources from Coin storage instead" % module_name) module_dir = os.path.join("sources", module_name) storage_dir = os.path.join("..", QT_CI_TESTED_SUBMODULE) try: shutil.move(module_dir, module_dir + "_replaced_as_tested") except Exception as e: raise Exception("!!!!!!!!!!!!! Failed to rename %s " % module_dir) shutil.move(storage_dir, module_dir) else: module_dir = os.path.join("sources", module_name) os.chdir(module_dir) #Make sure the branch exists, if not use dev _branch = SUBMODULE_BRANCH git_list_branch_cmd = ["git", "ls-remote", "origin", "refs/heads/" + _branch] shell = (sys.platform == "win32") result = Popen(git_list_branch_cmd, stdout=PIPE, shell=shell) if len(result.communicate()[0].split())==0: print("Warning: Requested %s branch doesn't exist so we'll fall back to 'dev' branch instead"\ % SUBMODULE_BRANCH) _branch = "dev" print("Checking out submodule %s to branch %s" % (module_name, _branch)) git_checkout_cmd = ["git", "checkout", _branch] if run_process(git_checkout_cmd) != 0: print("Failed to initialize the git submodule %s" % module_name) else: print("Submodule %s has branch %s checked out" % (module_name, _branch)) os.chdir(script_dir)
def open_urls(): with open("urls.txt", 'r') as file: for link in file: for line in run_process( "adb shell am start -a android.intent.action.VIEW " "-c android.intent.category.BROWSABLE -d \"{}\"".format(link) ): if line: logger.info(line) input("Next...")
def execute(self): env = {} env['CONTENT_DIR'] = self._content_dir if self.get_current_branch(): env['CURRENT_BRANCH'] = self.get_current_branch() if self.get_current_version(): env['CURRENT_VERSION'] = self.get_current_version() if self._versions: env['VERSIONS'] = ','.join(['v' + v for v in self._versions]) ver_dir = '' ver_title = self.get_title() if self._versions: if not self.get_current_version(): raise Exception( "Current version not set when versions are %s" % self._versions) if self.get_current_version() != self._versions[0]: ver_dir = 'v' + self.get_current_version() ver_title = ver_title + ' ' + self.get_current_version() env['HUGO_TITLE'] = ver_title dest = self.get_dest_dir() base_url = self.get_host() base_url = os.path.join(base_url, self.get_base_url_prefix()) if ver_dir: dest = os.path.join(dest, ver_dir) base_url = base_url + '/' + ver_dir temp_config_file = self.generate_config_toml() cmd = [ 'hugo', '--minify', '--config=%s,%s' % (self._base_config, temp_config_file), '--contentDir=%s' % self._content_dir, '--destination=%s' % dest, '--baseURL=%s' % base_url ] run_process(['rm', '-rf', dest]) run_process(cmd, env) print("=== Build result: %s" % dest)
def extract(): copy_lads() print("Extracting") datasets = [ { "src": "/data/scenarios/climate_v1.zip", "dest": str(NISMOD_SCENARIOS_PATH.joinpath("climate/")), }, { "src": "/data/scenarios/population_v1.zip", "dest": str(NISMOD_SCENARIOS_PATH.joinpath("population/")), }, { "src": "/data/scenarios/prices_v2.zip", "dest": str(NISMOD_SCENARIOS_PATH.joinpath("prices/")), }, { "src": "/data/scenarios/socio-economic-1.0.1.zip", "dest": str(NISMOD_SOCIO_ECONOMIC_PATH), }, { "src": "/data/scenarios/ev_transport_trips_v0.1.zip", "dest": str(NISMOD_SCENARIOS_PATH.joinpath("ev_transport_trips/")), }, { "src": "/data/et_module/et_module_v0.5.zip", "dest": str(NISMOD_DATA_PATH.joinpath("et_module/")), }, ] for data in datasets: print("Extracting - " + data["src"] + " - to - " + data["dest"]) unpack_archive(data["src"], data["dest"], "zip") link_files( Path.joinpath(NISMOD_SOCIO_ECONOMIC_PATH, "socio-economic-1.0.1/"), NISMOD_SOCIO_ECONOMIC_PATH, ) print("Installing ET Module") run_process("cd " + str(NISMOD_PATH) + " && ./provision/install_et_module.sh " + str(NISMOD_PATH))
def run(path, option): p = subprocess.Popen(["./sysmon", option],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if stderr: return stderr if not stdout: return "stdout is empty" if not parse(stdout, option): return stdout return None
def run(path): p = subprocess.Popen(["./ze_metric_info"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if stderr: return stderr if not stdout: return "stdout is empty" if not parse(stdout): return stdout return None
def run(path): p = subprocess.Popen(["./cl_gemm_inst", "1024", "1"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if stderr: return stderr if not stdout: return "stdout is empty" if stdout.find(" CORRECT") == -1: return stdout return None
def run(path): p = subprocess.Popen(["./ze_metric_info"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if stderr: return stdout if stdout.find("Job is successfully completed") == -1: return stdout if not parse(stdout): return stdout return None
def run(path): app_folder = utils.get_sample_build_path("cl_gemm") app_file = os.path.join(app_folder, "cl_gemm") p = subprocess.Popen(["./cl_gpu_metrics", app_file, "gpu", "1024", "1"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if not stdout: return stderr if stdout.find(" CORRECT") == -1: return stdout if not parse(stderr): return stderr return None
def build_patchelf(self): if not sys.platform.startswith('linux'): return log.info("Building patchelf...") module_src_dir = os.path.join(self.sources_dir, "patchelf") build_cmd = [ "g++", "%s/patchelf.cc" % (module_src_dir), "-o", "patchelf", ] if run_process(build_cmd) != 0: raise DistutilsSetupError("Error building patchelf")
def pre_process(self, content_dir, args, version=None): ret = [] for scope_pair in self.get_scopes(): scope = scope_pair[0] base_url_prefix = scope_pair[1] dest_dir = scope_pair[2] out_dir = os.path.join('./_output', 'content_' + args.edition + "_" + scope) if version: out_dir = out_dir + "_" + version # clean out_dir first run_process('rm -rf %s' % out_dir) p = processor.DirProcess(content_dir, out_dir) p.\ include_by_scope(scope).\ include_by_oem(False). \ include_by_edition(args.edition).\ start() ret.append([out_dir, base_url_prefix, dest_dir]) return ret
def compile_classes_and_tests(student): output_dir = 'StudentHomework/ClassFiles' additional_cps = 'Libraries/junit-platform-console-standalone-1.3.1.jar' to_compile = [] # Classes to_compile += get_full_path_files('StudentHomework/SourceFiles/Classes') # Tests to_compile += get_full_path_files('StudentHomework/SourceFiles/Tests') command = 'javac -d %s -cp %s %s' % (output_dir, additional_cps, ' '.join(to_compile)) return run_process(command, student, 'Compilation Error')
def run(path, option): app_folder = utils.get_sample_build_path("ze_gemm") app_file = os.path.join(app_folder, "ze_gemm") p = subprocess.Popen(["./ze_intercept", option, app_file, "1024", "1"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if not stderr: return stdout if stdout.find(" CORRECT") == -1: return stdout if stderr.find("Job is successfully completed") == -1: return stderr return None
def update_branch(self, branch): run_process('git checkout -q %s' % branch) upstream_cmd = 'git rev-parse @{u}' local_cmd = 'git rev-parse @' upstream_out = run_process(upstream_cmd) local_out = run_process(local_cmd) if upstream_out != local_out: run_process('git merge -q upstream/%s' % branch)
def run_tests(student, report_name): # For JUnit4: # for test in [x[:-6] for x in os.listdir('StudentHomework/ClassFiles/Tests')]: # system('java -javaagent:Libraries/jacocoagent.jar -cp Libraries/junit-4.13.jar;Libraries/hamcrest-core-1.3.jar;StudentHomework/ClassFiles/Classes;StudentHomework/ClassFiles/Tests org.junit.runner.JUnitCore ' + test) error_code = 0 command = 'java -javaagent:Libraries/jacocoagent.jar -jar Libraries/junit-platform-console-standalone-1.3.1.jar --cp StudentHomework/ClassFiles --scan-class-path --reports-dir Results/%s/%s' % (student, report_name) tests_succeed = run_process(command, student, 'Runtime Error') if not tests_succeed: error_code = -1 command = 'java -jar Libraries/jacococli.jar report jacoco.exec --classfiles StudentHomework/ClassFiles --sourcefiles StudentHomework/SourceFiles/Classes --csv Results/%s/%s/%s' % (student, report_name, report_name) report_succeed = run_process(command, student, 'Runtime Error') if not report_succeed: error_code += -2 # 0 - success # -1 - tests failed # -2 - jacoco failed # -3 - both failed return error_code
def __exit__(self, type, value, traceback): if self.use: stop_monitoring_server = "{} {} --stop".format( sys.executable, self.monitoring_agent) run_process(stop_monitoring_server) junit_reporter = JunitConverter(self.reporter, self.artifacts_dir, 'performance_results') junit_reporter.generate_junit_report() junit_compare = self.compare_reporter_generator.gen() junit_compare_reporter = None if junit_compare: junit_compare_reporter = JunitConverter(junit_compare, self.artifacts_dir, 'comparison_results') junit_compare_reporter.generate_junit_report() compare_exit_code = ExecutionEnv.report_summary( junit_compare_reporter, "Comparison Test suite") exit_code = ExecutionEnv.report_summary( junit_reporter, "Performance Regression Test suite") sys.exit(0 if 0 == exit_code == compare_exit_code else 3)
def run(path): app_folder = utils.get_sample_build_path("ze_gemm") app_file = os.path.join(app_folder, "ze_gemm") p = subprocess.Popen(["./ze_metric_streamer", app_file, "1024", "1"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if stderr: return stderr if stdout.find(" CORRECT") == -1: return stdout if stdout.find("Job is successfully completed") == -1: return stdout if not parse(stdout): return stdout return None
def info(self): """Dictionary of metadata attribute information. :returns: `description`, `aliases`, `id`, and `name` of instance. :rtype: ``dict`` """ keys = ('id', 'name', 'description', 'aliases') item_cmd = "mdimport -A | grep '{}'".format(self.name) item_data = utils.run_process(item_cmd) if item_data: item_data = [item.replace("'", "") for item in item_data[0].split('\t\t')] return dict(zip(keys, item_data)) else: return item_data
def attributes_generator(): """Generate dictionaries with data for all OS X metadata attributes :returns: data on all OS X metadata attributes :rtype: ``generator`` of ``dict``s """ # get all OS X metadata attributes attributes = utils.run_process(['mdimport', '-A']) # prepare key names for the four columns keys = ('id', 'name', 'description', 'aliases') # create dicts, mapping ``keys`` to an item's columns for attribute in attributes: attribute_data = [item.replace("'", "") for item in attribute.split('\t\t')] keyed_data = itertools.izip(keys, attribute_data) yield dict(keyed_data)
def get_selected_device(): devices = [] for line in run_process("adb devices".split()): line = line.replace("\r", "").replace("\n", "") if "List" not in line: if line: devices.append(re.sub("\t\S*", "", line)) logger.info("Available device IDs:") i = 0 for device in devices: logger.info("%d\t\t%s" % (i, device)) i += 1 dev_id = int(input("Input device ID: ")) if devices[dev_id]: return devices[dev_id] else: return devices[0]
def list(file_path): """Wrapper for OS X `mdls` command. :param file_path: full path to file :type file_path: ``unicode`` :returns: dictionary of metadata attributes and values :rtype: ``dict`` """ output = utils.run_process(['mdls', file_path]) # get metadata into list, allowing for nested attributes md = [[y.strip() for y in line.split('=')] for line in output] # iterate over list to deal with nested attributes # then build dictionary listed_item, md_dict = [], {} for item in md: # item is pair if len(item) == 2: k, v = item # if second item is parens, then first is key if v == '(': listed_key = utils.clean_attribute(k) # else, it's a simple `key: value` pair else: # attempt to convert to `int` try: val = int(v) except (ValueError, TypeError): val = v.replace('"', '') # convert shell nulls to Python `None` if val in ('""', '(null)'): val = None key = utils.clean_attribute(k) md_dict[key] = val # single item is part of a nested attribute elif len(item) == 1 and item[0] != ')': value = item[0].replace('"', '') listed_item.append(value) # single item marks end of a nested attribute elif len(item) == 1 and item[0] == ')': md_dict[listed_key] = listed_item listed_item = [] return md_dict
def run(path): app_folder = utils.get_sample_build_path("cl_gemm") app_file = os.path.join(app_folder, "cl_gemm") p = subprocess.Popen(["./cl_debug_info", app_file, "gpu", "1024", "1"], cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = utils.run_process(p) if not stderr: return stdout if stdout.find(" CORRECT") == -1: return stdout if stderr.find("__kernel") == -1 or stderr.find("for") == -1: return stderr if stderr.find("add") == -1 or stderr.find("mov") == -1 or stderr.find( "send") == -1: return stderr return None
def run(path, option): app_folder = utils.get_sample_build_path("omp_gemm") app_file = os.path.join(app_folder, "omp_gemm") e = utils.add_env(None, "OMP_TOOL_LIBRARIES", "./libomp_hot_regions.so") p = subprocess.Popen([app_file, option, "1024", "1"], env = e,\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if not stderr: return stdout if stdout.find(" CORRECT") == -1: return stdout if stderr == "gpu" and stderr.find("Target") == -1: return stderr if stderr == "cpu" and stderr.find("Parallel") == -1: return stderr if not parse(stderr): return stderr return None
def run(path, option): if option == "dpc": app_folder = utils.get_sample_build_path("dpc_gemm") app_file = os.path.join(app_folder, "dpc_gemm") p = subprocess.Popen(["./ze_metric_query", app_file, "gpu", "1024", "1"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) else: app_folder = utils.get_sample_build_path("ze_gemm") app_file = os.path.join(app_folder, "ze_gemm") p = subprocess.Popen(["./ze_metric_query", app_file, "1024", "1"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if not stdout: return stderr if stdout.find(" CORRECT") == -1: return stdout if not parse(stderr): return stderr return None
def run(path, option): if option == "dpc": app_folder = utils.get_sample_build_path("dpc_gemm") app_file = os.path.join(app_folder, "dpc_gemm") p = subprocess.Popen(["./ze_hot_functions", app_file, "gpu", "1024", "1"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) else: app_folder = utils.get_sample_build_path("ze_gemm") app_file = os.path.join(app_folder, "ze_gemm") p = subprocess.Popen(["./ze_hot_functions", app_file, "1024", "1"],\ cwd = path, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout, stderr = utils.run_process(p) if stderr: return stderr if stdout.find(" CORRECT") == -1: return stdout if stdout.find("Job is successfully completed") == -1: return stdout if not parse(stdout): return stdout return None
def run_test_suite(artifacts_dir, test_dir, pattern, exclude_pattern, jmeter_path, env_name, monit, compare_local): """Collect test suites, run them and generate reports""" logger.info("Artifacts will be stored in directory %s", artifacts_dir) test_dirs = get_sub_dirs(test_dir, exclude_list=[], include_pattern=pattern, exclude_pattern=exclude_pattern) if not test_dirs: logger.info("No test cases are collected...Exiting.") sys.exit(3) else: logger.info("Collected tests %s", test_dirs) with ExecutionEnv(MONITORING_AGENT, artifacts_dir, env_name, compare_local, monit) as prt: pre_command = 'export PYTHONPATH={}:$PYTHONPATH;'.format( os.path.join(str(ROOT_PATH), "agents")) for suite_name in tqdm(test_dirs, desc="Test Suites"): with Timer("Test suite {} execution time".format(suite_name)) as t: suite_artifacts_dir = os.path.join(artifacts_dir, suite_name) options_str = get_taurus_options(suite_artifacts_dir, jmeter_path) env_yaml_path = os.path.join(test_dir, suite_name, "environments", "{}.yaml".format(env_name)) env_yaml_path = "" if not os.path.exists( env_yaml_path) else env_yaml_path test_file = os.path.join(test_dir, suite_name, "{}.yaml".format(suite_name)) with x2junit.X2Junit(suite_name, suite_artifacts_dir, prt.reporter, t, env_name) as s: s.code, s.err = run_process("{} bzt {} {} {} {}".format( pre_command, options_str, test_file, env_yaml_path, GLOBAL_CONFIG_PATH)) update_taurus_metric_files(suite_artifacts_dir, test_file)
def find(query_expression, only_in=None): """Wrapper for OS X `mdfind` command. :param query_expression: file metadata query expression :type query_expression: :class:`MDExpression` object or :class:`MDComparison` object. :param only_in: limit search scope to directory tree path :type only_in: ``unicode`` :returns: full paths to files of any results :rtype: ``list`` """ cmd = ['mdfind'] # add option to limit search scoe if only_in: cmd.append('-onlyin') cmd.append(only_in) # convert `query_expression` into file metadata query expression syntax query = "'" + unicode(query_expression) + "'" cmd.append(query) # run `mdfind` command as shell string, since otherwise it breaks #print(' '.join(cmd)) return utils.run_process(' '.join(cmd))
elif format == "sff": if frg == "": infile = f1 else: infile = frg readpaths = [] filtreadpaths = [] for lib in readlibs: for read in lib.reads: readpaths.append("%s/Preprocess/in/"%(settings.rundir)+read.fname) filtreadpaths.append("%s/Preprocess/out/"%(settings.rundir)+read.fname) if "Preprocess" in forcesteps: for path in readpaths: utils.run_process(settings, "touch %s"%(path),"RunPipeline") os.system("rm %s%sLogs%s*.ok"%(settings.rundir,os.sep,os.sep)) utils.Settings.readpaths = readpaths asmfiles = [] for lib in readlibs: if "Assemble" in forcesteps: utils.run_process(settings, \ "touch %s/Preprocess/out/lib%d.seq"%(settings.rundir,lib.id),\ "RunPipeline") asmfiles.append("%s/Preprocess/out/lib%d.seq"%(settings.rundir,lib.id)) if "MapReads" in forcesteps: utils.run_process(settings, "rm %s/Assemble/out/*.contig.cvg"%(settings.rundir), "RunPipeline") utils.run_process(settings, "rm %s/Logs/mapreads.ok"%(settings.rundir), "RunPipeline")
def build_extension(self, extension): log.info("Building module %s..." % extension) # Prepare folders os.chdir(self.build_dir) module_build_dir = os.path.join(self.build_dir, extension) if os.path.exists(module_build_dir): log.info("Deleting module build folder %s..." % module_build_dir) rmtree(module_build_dir) log.info("Creating module build folder %s..." % module_build_dir) os.makedirs(module_build_dir) os.chdir(module_build_dir) module_src_dir = os.path.join(self.sources_dir, extension) # Build module cmake_cmd = [ OPTION_CMAKE, "-G", self.make_generator, "-DQT_QMAKE_EXECUTABLE=%s" % self.qmake_path, "-DBUILD_TESTS=%s" % self.build_tests, "-DDISABLE_DOCSTRINGS=True", "-DCMAKE_BUILD_TYPE=%s" % self.build_type, "-DCMAKE_INSTALL_PREFIX=%s" % self.install_dir, module_src_dir ] if sys.version_info[0] > 2: cmake_cmd.append("-DPYTHON3_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON3_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON3_DBG_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_DEBUG_LIBRARY=%s" % self.py_library) else: cmake_cmd.append("-DPYTHON_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON_DEBUG_LIBRARY=%s" % self.py_library) if sys.platform == 'win32': cmake_cmd.append("-DCMAKE_DEBUG_POSTFIX=_d") cmake_cmd.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=yes") if sys.version_info[0] > 2: cmake_cmd.append("-DUSE_PYTHON3=ON") if sys.platform == 'darwin': cmake_cmd.append('-DALTERNATIVE_QT_INCLUDE_DIR=' + self.qtinfo.headers_dir) if OPTION_OSXARCH: # also tell cmake which architecture to use cmake_cmd.append("-DCMAKE_OSX_ARCHITECTURES:STRING={}".format(OPTION_OSXARCH)) log.info("Configuring module %s (%s)..." % (extension, module_src_dir)) if run_process(cmake_cmd) != 0: raise DistutilsSetupError("Error configuring " + extension) log.info("Compiling module %s..." % extension) cmd_make = [self.make_path] if OPTION_JOBS: cmd_make.append(OPTION_JOBS) if run_process(cmd_make) != 0: raise DistutilsSetupError("Error compiling " + extension) log.info("Generating Shiboken documentation %s..." % extension) if run_process([self.make_path, "doc"]) != 0: raise DistutilsSetupError("Error generating documentation " + extension) log.info("Installing module %s..." % extension) if run_process([self.make_path, "install/fast"]) != 0: raise DistutilsSetupError("Error pseudo installing " + extension) os.chdir(self.script_dir)
f1 = "" f2 = "" currlibno = 0 newlib = "" usecontigs = False libadded = False for line in inf: line = line.replace("\n","") if "#" in line: continue elif "asmcontigs:" in line: asmc = line.replace("\n","").split("\t")[-1] if len(asmc) <= 2: continue utils.run_process(settings, "cp %s %s/Assemble/out/%s"%(asmc,settings.rundir,\ "proba.asm.contig"),"RunPipeline") usecontigs = True selected_programs["assemble"] = "none" bowtie_mapping = 1 elif "format:" in line: if f1 and not libadded: nread1 = utils.Read(format,f1,mated,interleaved) readobjs.append(nread1) nread2 = "" nlib = utils.readLib(format,mmin,mmax,nread1,nread2,mated,interleaved,innie,linkerType) readlibs.append(nlib) libadded = False format = line.replace("\n","").split("\t")[-1] elif "mated:" in line: mated = utils.str2bool(line.replace("\n","").split("\t")[-1])
def build_extension(self, extension): log.info("Building module %s..." % extension) # Prepare folders os.chdir(self.build_dir) module_build_dir = os.path.join(self.build_dir, extension) if os.path.exists(module_build_dir): log.info("Deleting module build folder %s..." % module_build_dir) rmtree(module_build_dir) log.info("Creating module build folder %s..." % module_build_dir) os.makedirs(module_build_dir) os.chdir(module_build_dir) module_src_dir = os.path.join(self.sources_dir, extension) # Build module cmake_cmd = [ OPTION_CMAKE, "-G", self.make_generator, "-DQT_QMAKE_EXECUTABLE=%s" % self.qmake_path, "-DBUILD_TESTS=%s" % self.build_tests, "-DDISABLE_DOCSTRINGS=True", "-DCMAKE_BUILD_TYPE=%s" % self.build_type, "-DCMAKE_INSTALL_PREFIX=%s" % self.install_dir, module_src_dir ] if sys.version_info[0] > 2: cmake_cmd.append("-DPYTHON3_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON3_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON3_DBG_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_DEBUG_LIBRARY=%s" % self.py_library) else: cmake_cmd.append("-DPYTHON_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON_DEBUG_LIBRARY=%s" % self.py_library) if sys.platform == "win32" and self.build_type.lower() == 'debug': cmake_cmd.append("-DCMAKE_DEBUG_POSTFIX=_d") if extension.lower() == "shiboken": cmake_cmd.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=yes") if sys.version_info[0] > 2: cmake_cmd.append("-DUSE_PYTHON3=ON") if sys.platform == 'darwin': if 'QTDIR' in os.environ: # If the user has QTDIR set, then use it as a prefix for an extra include path cmake_cmd.append('-DALTERNATIVE_QT_INCLUDE_DIR={0}/include'.format(os.environ['QTDIR'])) #:{0}/lib I had problems specifying both dirs. Is it needed? Is there some other way to do it? --Robin else: # Otherwise assume it is a standard install and add the # Frameworks folder as a workaround for a cmake include problem # http://neilweisenfeld.com/wp/120/building-pyside-on-the-mac # https://groups.google.com/forum/#!msg/pyside/xciZZ4Hm2j8/CUmqfJptOwoJ cmake_cmd.append('-DALTERNATIVE_QT_INCLUDE_DIR=/Library/Frameworks') if OPTION_OSXARCH: # also tell cmake which architecture to use cmake_cmd.append("-DCMAKE_OSX_ARCHITECTURES:STRING={}".format(OPTION_OSXARCH)) log.info("Configuring module %s (%s)..." % (extension, module_src_dir)) if run_process(cmake_cmd, log) != 0: raise DistutilsSetupError("Error configuring " + extension) log.info("Compiling module %s..." % extension) cmd_make = [self.make_path] if OPTION_JOBS: cmd_make.append(OPTION_JOBS) if run_process(cmd_make, log) != 0: raise DistutilsSetupError("Error compiling " + extension) if extension.lower() == "shiboken": log.info("Generating Shiboken documentation %s..." % extension) if run_process([self.make_path, "doc"], log) != 0: raise DistutilsSetupError("Error generating documentation " + extension) log.info("Installing module %s..." % extension) if run_process([self.make_path, "install/fast"], log) != 0: raise DistutilsSetupError("Error pseudo installing " + extension) os.chdir(self.script_dir)
def build_extension(self, extension): log.info("Building module %s..." % extension) # Prepare folders os.chdir(self.build_dir) module_build_dir = os.path.join(self.build_dir, extension) if os.path.exists(module_build_dir): log.info("Deleting module build folder %s..." % module_build_dir) rmtree(module_build_dir) log.info("Creating module build folder %s..." % module_build_dir) os.mkdir(module_build_dir) os.chdir(module_build_dir) module_src_dir = os.path.join(self.sources_dir, extension) # Build module cmake_cmd = [ OPTION_CMAKE, "-G", self.make_generator, "-DQT_QMAKE_EXECUTABLE=%s" % self.qmake_path, "-DBUILD_TESTS=False", "-DDISABLE_DOCSTRINGS=True", "-DCMAKE_BUILD_TYPE=%s" % self.build_type, "-DCMAKE_INSTALL_PREFIX=%s" % self.install_dir, module_src_dir ] if sys.version_info[0] > 2: cmake_cmd.append("-DPYTHON3_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON3_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON3_DBG_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_DEBUG_LIBRARY=%s" % self.py_library) else: cmake_cmd.append("-DPYTHON_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON_DEBUG_LIBRARY=%s" % self.py_library) if sys.platform == "win32" and self.build_type.lower() == 'debug': cmake_cmd.append("-DCMAKE_DEBUG_POSTFIX=_d") if extension.lower() == "shiboken": cmake_cmd.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=yes") if sys.version_info[0] > 2: cmake_cmd.append("-DUSE_PYTHON3=ON") elif sys.platform == 'darwin': # Work round cmake include problem # http://neilweisenfeld.com/wp/120/building-pyside-on-the-mac # https://groups.google.com/forum/#!msg/pyside/xciZZ4Hm2j8/CUmqfJptOwoJ cmake_cmd.append('-DALTERNATIVE_QT_INCLUDE_DIR=/Library/Frameworks') log.info("Configuring module %s (%s)..." % (extension, module_src_dir)) if run_process(cmake_cmd, log) != 0: raise DistutilsSetupError("Error configuring " + extension) log.info("Compiling module %s..." % extension) if run_process([self.make_path], log) != 0: raise DistutilsSetupError("Error compiling " + extension) if extension.lower() == "shiboken": log.info("Generating Shiboken documentation %s..." % extension) if run_process([self.make_path, "doc"], log) != 0: raise DistutilsSetupError("Error generating documentation " + extension) log.info("Installing module %s..." % extension) if run_process([self.make_path, "install/fast"], log) != 0: raise DistutilsSetupError("Error pseudo installing " + extension) os.chdir(self.script_dir)
print("Option --version can not be used together with option --ignore-git") sys.exit(1) if not os.path.isdir(".git"): print("Option --version is available only when pyside-setup was cloned from git repository") sys.exit(1) if not OPTION_VERSION in submodules: print("""Invalid version specified %s Use --list-versions option to get list of available versions""" % OPTION_VERSION) sys.exit(1) __version__ = OPTION_VERSION # Initialize, pull and checkout submodules if os.path.isdir(".git") and not OPTION_IGNOREGIT and not OPTION_ONLYPACKAGE: print("Initializing submodules for PySide version %s" % __version__) git_update_cmd = ["git", "submodule", "update", "--init"] if run_process(git_update_cmd) != 0: raise DistutilsSetupError("Failed to initialize the git submodules") git_pull_cmd = ["git", "submodule", "foreach", "git", "fetch", "origin"] if run_process(git_pull_cmd) != 0: raise DistutilsSetupError("Failed to initialize the git submodules") git_pull_cmd = ["git", "submodule", "foreach", "git", "pull", "origin", "master"] if run_process(git_pull_cmd) != 0: raise DistutilsSetupError("Failed to initialize the git submodules") submodules_dir = os.path.join(script_dir, "sources") for m in submodules[__version__]: module_name = m[0] module_version = m[1] print("Checking out submodule %s to branch %s" % (module_name, module_version)) module_dir = os.path.join(submodules_dir, module_name) os.chdir(module_dir) git_checkout_cmd = ["git", "checkout", module_version]
def rpath_cmd(srcpath): cmd = [patchelf_path, '--set-rpath', '$ORIGIN/', srcpath] if run_process(cmd) != 0: raise RuntimeError("Error patching rpath in " + srcpath)
def build_extension(self, extension): # calculate the subrepos folder name folder = get_extension_folder(extension) log.info("Building module %s..." % extension) # Prepare folders os.chdir(self.build_dir) module_build_dir = os.path.join(self.build_dir, extension) skipflag_file = module_build_dir + '-skip' if os.path.exists(skipflag_file): log.info("Skipping %s because %s exists" % (extension, skipflag_file)) return if os.path.exists(module_build_dir): log.info("Deleting module build folder %s..." % module_build_dir) try: rmtree(module_build_dir) except Exception as e: print('***** problem removing "{}"'.format(module_build_dir)) print('ignored error: {}'.format(e)) log.info("Creating module build folder %s..." % module_build_dir) if not os.path.exists(module_build_dir): os.makedirs(module_build_dir) os.chdir(module_build_dir) module_src_dir = os.path.join(self.sources_dir, folder) # Build module cmake_cmd = [ OPTION_CMAKE, "-G", self.make_generator, "-DQT_QMAKE_EXECUTABLE=%s" % self.qmake_path, "-DBUILD_TESTS=%s" % self.build_tests, "-DDISABLE_DOCSTRINGS=True", "-DQt5Help_DIR=%s" % self.qtinfo.docs_dir, "-DCMAKE_BUILD_TYPE=%s" % self.build_type, "-DCMAKE_INSTALL_PREFIX=%s" % self.install_dir, module_src_dir ] cmake_cmd.append("-DPYTHON_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON_DEBUG_LIBRARY=%s" % self.py_library) if extension.lower() == "shiboken2": cmake_cmd.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=yes") if sys.version_info[0] > 2: cmake_cmd.append("-DUSE_PYTHON_VERSION=3.4") if sys.platform == 'darwin': cmake_cmd.append('-DALTERNATIVE_QT_INCLUDE_DIR=' + self.qtinfo.headers_dir) if OPTION_OSXARCH: # also tell cmake which architecture to use cmake_cmd.append("-DCMAKE_OSX_ARCHITECTURES:STRING={}".format(OPTION_OSXARCH)) log.info("Configuring module %s (%s)..." % (extension, module_src_dir)) if run_process(cmake_cmd) != 0: raise DistutilsSetupError("Error configuring " + extension) log.info("Compiling module %s..." % extension) cmd_make = [self.make_path] if OPTION_JOBS: cmd_make.append(OPTION_JOBS) if run_process(cmd_make) != 0: raise DistutilsSetupError("Error compiling " + extension) if extension.lower() == "shiboken2": log.info("Generating Shiboken documentation %s..." % extension) if run_process([self.make_path, "doc"]) != 0: raise DistutilsSetupError("Error generating documentation " + extension) log.info("Installing module %s..." % extension) if run_process([self.make_path, "install/fast"]) != 0: raise DistutilsSetupError("Error pseudo installing " + extension) os.chdir(self.script_dir)