def start(d): tomcatBin = os.path.join(d['tomcatHome'], "bin") tomcatHome = os.path.normpath(d['tomcatHome']) tomcatBin = os.path.normpath(tomcatBin) print "Set CATALINA_HOME=%s" % tomcatHome os.environ["CATALINA_HOME"] = tomcatHome tomcatJavaOpts = d['tomcatJavaOpts'] print "Set JAVA_OPTS=%s" % tomcatJavaOpts os.environ["JAVA_OPTS"] = tomcatJavaOpts if d['platform'] == "windows": actions = [ '%s\\startup.bat -Xmx1024m -XX:PermSize=256m -XX:MaxPermSize=512' % (tomcatBin) # starts tomcat ] else: actions = [ '%s/startup.sh -Xmx1024m -XX:PermSize=256m -XX:MaxPermSize=512' % (tomcatBin) # starts tomcat ] util.call(actions) #start(config.getConfigDir())
def compile_shader(shader): glslang_binary = os.path.join(glslang_path_build, 'StandAlone', 'glslangValidator') shader_path = os.path.join(shader_source_directory, shader) util.call( [glslang_binary, '-V', shader_path, '-o', get_spirv_path(shader)])
def radix_sort(collection: Union, key=None, base=10): result = [None] * len(collection) if key is None: key = identity collection = list(collection) max_item = max(collection, key=key) max_key = call(key, max_item) positions_count = int(np.log10(max_key)) + 1 for position in range(positions_count): counts = [0] * base for item in collection: k = call(key, item) digit = _get_digit(k, base, position) counts[digit] += 1 for i in range(1, len(counts)): counts[i] = counts[i] + counts[i - 1] for item in reversed(collection): k = call(key, item) digit = _get_digit(k, base, position) counts[digit] -= 1 new_position = counts[digit] result[new_position] = item return result
def download_tarball(target_dir): global giturl """ Download tarball at url (global) to target_dir priority for target directory: - target_dir set from args - current directory if options.conf['package'] exists and any of the options match what has been detected. - curdir/name """ tarfile = os.path.basename(url) target = os.path.join(os.getcwd(), name) if os.path.exists(os.path.join(os.getcwd(), 'options.conf')): config_f = configparser.ConfigParser(interpolation=None) config_f.read('options.conf') if "package" in config_f.sections(): if (config_f["package"].get("name") == name or config_f["package"].get("url") == url or config_f["package"].get("archives") == " ".join(archives)): target = os.getcwd() if "giturl" in config_f["package"]: giturl = config_f["package"].get("giturl") if target_dir: target = target_dir build.download_path = target call("mkdir -p {}".format(build.download_path)) # locate the tarball locally or download return check_or_get_file(url, tarfile)
def run_test(self, name, path, num_cores, args=[]): """ Run a single test. """ try: # Compile the program (exit, output) = call([COMPILE, path + '/' + name + '.sire'] + ['-t', 'mpi', '-n', '{}'.format(num_cores)] + args) self.assertTrue(exit) # Simulate execution (exit, output) = call([SIMULATE, '-np', '{}'.format(num_cores), 'a.out'] + SIM_FLAGS) self.assertTrue(exit) # Check the output against the .output file self.assertEqual(output.strip(), read_file(path + '/' + name + '.output').strip()) except Exception as e: sys.stderr.write('Error: {}\n'.format(e)) raise except: sys.stderr.write("Unexpected error: {}\n".format(sys.exc_info()[0])) raise
def makeimages(equations, make_svg=False, make_png=True, png_dpi=96): N = len(equations) if N == 0 or not (make_svg or make_png): return remove_old_files() basename = 'equations' filename = basename + '.tex' fp = os.path.join(latex_dir, filename) with open(fp, 'w') as f: f.write(latex_document(equations)) try: util.call(['latex', '-halt-on-error', filename], cwd=latex_dir) except subprocess.CalledProcessError: return if make_svg: dvisvgm_cmd = [ 'dvisvgm', '--no-fonts', '--exact-bbox', '--page=-', '--bbox=preview', '--output=%f-%4p.svg', basename + '.dvi' ] dvisvgm_result = util.call(dvisvgm_cmd, cwd=latex_dir) geometry = parse_dvisvgm_stderr(dvisvgm_result.stderr) assert len(geometry) == N + 1 images = [] for filename in os.listdir(latex_dir): if filename.endswith('.svg'): images.append(filename) assert len(images) == N + 1 images.sort() for i in range(N): equations[i].svg_path = os.path.join(latex_dir, images[i + 1]) equations[i].svg_geometry = geometry[ i + 1] # Tuple of width, height, depth if make_png: dvipng_cmd = [ 'dvipng', '--width', '--height', '--depth', '-D', str(png_dpi), '-T', 'tight', '-z', '9', '--gamma', '3', '-q', '-o', basename + '-%04d.png', basename + '.dvi' ] dvipng_result = util.call(dvipng_cmd, cwd=latex_dir) geometry = parse_dvipng_stdout(dvipng_result.stdout) assert len(geometry) == N + 1 images = [] for filename in os.listdir(latex_dir): if filename.endswith('.png'): images.append(filename) assert len(images) == N + 1 images.sort() for i in range(N): equations[i].png_path = os.path.join(latex_dir, images[i + 1]) equations[i].png_geometry = geometry[ i + 1] # Tuple of width, height, depth
def clone(self, name, **kwargs): args = [] for k,v in kwargs: args += ['-o', '{0}={1}'.format(k,v)] call(['/sbin/zfs','clone'] + args + [self.name, name]) return FileSystem(name)
def configureOpenDJ(d): cwd = os.getcwd() print 'Configure openDJ (openDS), working directory: %s' % cwd tup = (d['dsHome'], d['ldapHost'], d['ldapDN'], d['ldapPW'], d['ldapPort'], cwd, d['dataGeneratedFile']) if d['platform'] == "windows": actions = ['%s\\bat\\stop-ds.bat' % d['dsHome'], 'copy %s\\%s %s\\config\\schema' % (cwd, d['schemaFN'], d['dsHome']), 'copy %s\\%s %s\\config\\schema' % (cwd, d['userSchemaFN'], d['dsHome']), '%s\\bat\\start-ds.bat' % d['dsHome'], '%s\\bat\\dsconfig.bat set-global-configuration-prop -h %s -p 4444 --trustAll --no-prompt -D "%s" -w %s --set single-structural-objectclass-behavior:accept' % tup[:-3], '%s\\bat\\dsconfig.bat -h %s -p 4444 --trustAll --no-prompt -D "%s" -w %s set-password-policy-prop --policy-name "Default Password Policy" --set allow-pre-encoded-passwords:true' % tup[:-3], '%s\\bat\\dsconfig.bat set-backend-prop --backend-name userRoot --add base-dn:%s -h %s -p 4444 -D "%s" -w %s --trustAll --noPropertiesFile --no-prompt' % (d['dsHome'], d['suffix'], d['ldapHost'], d['ldapDN'], d['ldapPW']), '%s\\bat\\ldapmodify.bat -h %s -D "%s" -w %s -a -p %s -f %s\\%s' % tup] else: actions = ['%s/bin/stop-ds' % d['dsHome'], 'cp %s/%s %s/config/schema' % (cwd, d['schemaFN'], d['dsHome']), 'cp %s/%s %s/config/schema' % (cwd, d['userSchemaFN'], d['dsHome']), '%s/bin/start-ds' % d['dsHome'], '%s/bin/dsconfig set-global-configuration-prop -h %s -p 4444 --trustAll --no-prompt -D "%s" -w %s --set single-structural-objectclass-behavior:accept' % tup[:-3], '%s/bin/dsconfig -h %s -p 4444 --trustAll --no-prompt -D "%s" -w %s set-password-policy-prop --policy-name "Default Password Policy" --set allow-pre-encoded-passwords:true' % tup[:-3], '%s/bin/dsconfig set-backend-prop --backend-name userRoot --add base-dn:%s -h %s -p 4444 -D "%s" -w %s --trustAll --noPropertiesFile --no-prompt' % (d['dsHome'], d['suffix'], d['ldapHost'], d['ldapDN'], d['ldapPW']), '%s/bin/ldapmodify -h %s -D "%s" -w %s -a -p %s -f %s/%s' % tup] util.call(actions)
def examine_abi_host(download_path, results_dir): """Make use of the hostside abireport tool.""" try: util.call("abireport scan-packages \"{}\"".format(results_dir), cwd=download_path) except Exception as e: util.print_fatal("Error invoking abireport: {}".format(e))
def package(filemanager): global round round = round + 1 set_mock() print("Building package " + tarball.name + " round", round) # call(mock_cmd + " -q -r clear --scrub=cache") # call(mock_cmd + " -q -r clear --scrub=all") shutil.rmtree('{}/results'.format(download_path), ignore_errors=True) os.makedirs('{}/results'.format(download_path)) util.call( mock_cmd + " -r clear --buildsrpm --sources=./ --spec={0}.spec --uniqueext={0} --result=results/ --no-cleanup-after" .format(tarball.name), logfile="%s/mock_srpm.log" % download_path, cwd=download_path) util.call("rm -f results/build.log", cwd=download_path) srcrpm = "results/%s-%s-%s.src.rpm" % (tarball.name, tarball.version, tarball.release) returncode = util.call( mock_cmd + " -r clear --result=results/ %s --enable-plugin=ccache --uniqueext=%s --no-cleanup-after" % (srcrpm, tarball.name), logfile="%s/mock_build.log" % download_path, check=False, cwd=download_path) # sanity check the build log if not os.path.exists(download_path + "/results/build.log"): util.print_fatal( "Mock command failed, results log does not exist. User may not have correct permissions." ) exit(1) parse_build_results(download_path + "/results/build.log", returncode, filemanager)
def replace_images(show_calls, v): vmsg(v, 'Creating new executable') util.call([XCC, target_2core(), config.XS1_RUNTIME_PATH+'/container.xc', '-o', FINAL_XE]) util.call([XOBJDUMP, '--split', MASTER_XE], v=show_calls) util.call([XOBJDUMP, FINAL_XE, '-r', '0,0,image_n0c0.elf'], v=show_calls) util.call([XOBJDUMP, '--split', SLAVE_XE], v=show_calls) util.call([XOBJDUMP, FINAL_XE, '-r', '0,1,image_n0c0.elf'], v=show_calls)
def process_markdown(doc, relative=True): if doc.source_data is None: pandoc_result = util.call( ['pandoc', '-t', 'json', '-f', 'markdown', str(doc.source_path)]) else: pandoc_result = util.call(['pandoc', '-t', 'json', '-f', 'markdown'], input=doc.source_data) j = json.loads(pandoc_result.stdout) equations = gather_equations(j) texify.makeimages(equations, make_png=True, make_svg=True, png_dpi=int(96 * png_zoom)) j_new = update_math(j, equations) if relative: j_new = make_links_relative(j_new, doc.relroot) cmd = [ 'pandoc', '-s', '-f', 'json', '-t', 'html', '--template', doc.template ] + doc.pandoc_variable_arguments() pandoc_result = util.call(cmd, input=json.dumps(j_new)) doc.target_data = pandoc_result.stdout
def unmount(self): if self.mounted: logging.info("Unmounting directory %s" % self.mountdir) rc = call(["/bin/umount", self.mountdir]) if rc == 0: self.mounted = False else: logging.warn( "Unmounting directory %s failed, using lazy umount" % self.mountdir) print >> sys.stdout, "Unmounting directory %s failed, using lazy umount" % self.mountdir rc = call(["/bin/umount", "-l", self.mountdir]) if rc != 0: raise MountError("Unable to unmount filesystem at %s" % self.mountdir) else: logging.info("lazy umount succeeded on %s" % self.mountdir) print >> sys.stdout, "lazy umount succeeded on %s" % self.mountdir self.mounted = False if self.rmdir and not self.mounted: try: os.rmdir(self.mountdir) except OSError, e: pass self.rmdir = False
def parse_build_results(filename, returncode, filemanager, config, requirements, content): """Handle build log contents.""" global must_restart global success requirements.verbose = 1 must_restart = 0 infiles = 0 # Flush the build-log to disk, before reading it util.call("sync") with util.open_auto(filename, "r") as buildlog: loglines = buildlog.readlines() for line in loglines: for pat in config.pkgconfig_pats: simple_pattern_pkgconfig(line, *pat, config.config_opts.get('32bit'), requirements) for pat in config.simple_pats: simple_pattern(line, *pat, requirements) for pat in config.failed_pats: failed_pattern(line, config, requirements, *pat) check_for_warning_pattern(line) # Search for files to add to the %files section. # * infiles == 0 before we reach the files listing # * infiles == 1 for the "Installed (but unpackaged) file(s) found" header # and for the entirety of the files listing # * infiles == 2 after the files listing has ended if infiles == 1: for search in ["RPM build errors", "Childreturncodewas", "Child returncode", "Empty %files file"]: if search in line: infiles = 2 for start in ["Building", "Child return code was"]: if line.startswith(start): infiles = 2 if infiles == 0 and "Installed (but unpackaged) file(s) found:" in line: infiles = 1 elif infiles == 1 and "not matching the package arch" not in line: # exclude blank lines from consideration... file = line.strip() if file and file[0] == "/": filemanager.push_file(file, content.name) if line.startswith("Sorry: TabError: inconsistent use of tabs and spaces in indentation"): print(line) returncode = 99 nvr = f"{content.name}-{content.version}-{content.release}" match = f"File not found: /builddir/build/BUILDROOT/{nvr}.x86_64/" if match in line: missing_file = "/" + line.split(match)[1].strip() filemanager.remove_file(missing_file) if line.startswith("Executing(%clean") and returncode == 0: print("RPM build successful") success = 1
def create_download_path(target_dir): """Create download path. priority for target directory: - target_dir set from args - current directory if options.conf['package'] exists and any of the options match what has been detected. - curdir/name Also set giturl from the config (needs config refactor). """ global giturl global domain target = os.path.join(os.getcwd(), name) if os.path.exists(os.path.join(os.getcwd(), 'options.conf')): config_f = configparser.ConfigParser(interpolation=None) config_f.read('options.conf') if "package" in config_f.sections(): if (config_f["package"].get("name") == name or config_f["package"].get("url") == url or config_f["package"].get("archives") == " ".join(archives)): target = os.getcwd() if "giturl" in config_f["package"]: giturl = config_f["package"].get("giturl") if "domain" in config_f["package"]: domain = config_f["package"].get("domain") if target_dir: target = target_dir build.download_path = target call("mkdir -p {}".format(build.download_path)) return target
def prepare_and_extract(extract_cmd): """Prepare the directory and extract the tarball.""" shutil.rmtree(os.path.join(build.base_path, name), ignore_errors=True) shutil.rmtree(os.path.join(build.base_path, tarball_prefix), ignore_errors=True) os.makedirs("{}".format(build.base_path), exist_ok=True) call("mkdir -p %s" % build.download_path) call(extract_cmd)
def remove_clone_archive(path, clone_path, is_fatal): """Remove temporary clone_archive git folder.""" try: call(f"rm -rf {clone_path}", cwd=path) except subprocess.CalledProcessError as err: if is_fatal: print_fatal("Unable to remove {}: {}".format(clone_path, err))
def run_test(self, name, path, num_cores, args=[]): """ Run a single test. """ try: # Compile the program (exit, output) = call([COMPILE, path+'/'+name+'.sire'] + ['-t', 'mpi', '-n', '{}'.format(num_cores)] + args) self.assertTrue(exit) # Simulate execution (exit, output) = call([SIMULATE, '-np', '{}'.format(num_cores), 'a.out'] + SIM_FLAGS) self.assertTrue(exit) # Check the output against the .output file self.assertEqual(output.strip(), read_file(path+'/'+name+'.output').strip()) except Exception as e: sys.stderr.write('Error: {}\n'.format(e)) raise except: sys.stderr.write("Unexpected error: {}\n".format(sys.exc_info()[0])) raise
def assemble_runtime(show_calls, v): vmsg(v, 'Compiling runtime:') for x in RUNTIME_FILES: objfile = x+'.o' vmsg(v, ' '+x+' -> '+objfile) util.call([MPICC, config.MPI_RUNTIME_PATH+'/'+x, '-o', objfile] + ASSEMBLE_FLAGS, show_calls)
def assemble_builtins(show_calls, v): vmsg(v, 'Compiling builtins:') for x in BUILTIN_FILES: objfile = x+'.o' vmsg(v, ' '+x+' -> '+objfile) util.call([CC, config.MPI_SYSTEM_PATH+'/'+x, '-o', objfile] + ASSEMBLE_FLAGS, show_calls)
def parse_build_results(filename, returncode, filemanager): """Handle build log contents.""" global must_restart global success buildreq.verbose = 1 must_restart = 0 infiles = 0 # Flush the build-log to disk, before reading it util.call("sync") with open(filename, "r", encoding="latin-1") as buildlog: loglines = buildlog.readlines() for line in loglines: for pat in config.pkgconfig_pats: simple_pattern_pkgconfig(line, *pat) for pat in config.simple_pats: simple_pattern(line, *pat) for pat in config.failed_pats: failed_pattern(line, *pat) # search for files to add to the %files section # track with infiles. If infiles == 1 we found the header # "Installed (but unpackaged) file(s) found" in the build log # This tells us to look in the next line. Increment infiles if we don't # find a file in the next line. if infiles == 1: for search in [ "RPM build errors", "Childreturncodewas", "Child returncode", "Empty %files file" ]: if search in line: infiles = 2 for start in ["Building", "Child return code was"]: if line.startswith(start): infiles = 2 if "Installed (but unpackaged) file(s) found:" in line: infiles = 1 elif infiles == 1 and "not matching the package arch" not in line: filemanager.push_file(line.strip()) if line.startswith( "Sorry: TabError: inconsistent use of tabs and spaces in indentation" ): print(line) returncode = 99 if "File not found: /builddir/build/BUILDROOT/" in line: left = "File not found: /builddir/build/BUILDROOT/%s-%s-%s.x86_64/" % ( tarball.name, tarball.version, tarball.release) missing_file = "/" + line.split(left)[1].strip() filemanager.remove_file(missing_file) if line.startswith("Executing(%clean") and returncode == 0: print("RPM build successful") success = 1
def create(cls, name, **kwargs): if cls.check(name): raise Exception("FileSystem already exists") args = [] for k,v in kwargs: args += ['-o', '{0}={1}'.format(k,v)] call(['/sbin/zfs','create'] + args + [name]) return cls(name)
def dump_memory_use(): TEXT = 0 DATA = 1 BSS = 2 TOTAL = 3 def size(v): return '{:>6} {:>8}'.format(v, '({:,.2f}KB)'.format(v/1000)) s = util.call([XOBJDUMP, '--size', MASTER_XE]) m = re.findall(r' *([0-9]+) *([0-9]+) *([0-9]+) *([0-9]+)', s) master_sizes = [int(x) for x in m[0]] assert len(m) == 1 s = util.call([XOBJDUMP, '--size', SLAVE_XE]) m = re.findall(r' *([0-9]+) *([0-9]+) *([0-9]+) *([0-9]+)', s) slave_sizes = [int(x) for x in m[0]] assert len(m) == 1 print('Total memory: '+size(defs.RAM_SIZE)) print() print('Kernel stack space: '+size(defs.KERNEL_SPACE)) print('Thread stack space: '+size(defs.THREAD_STACK_SPACE)) print('Number of threads: {:>6}'.format(defs.MAX_THREADS)) thread_stack_use = defs.MAX_THREADS*defs.THREAD_STACK_SPACE total_stack_use = thread_stack_use+defs.KERNEL_SPACE print('Total thread stack use: '+size(thread_stack_use)) print('Total stack use: '+size(total_stack_use)) print() runtime_size = slave_sizes[TEXT]+slave_sizes[DATA] program_size = master_sizes[TEXT]+master_sizes[DATA]-runtime_size print('Runtime size: '+size(runtime_size)) print('Program size: '+size(program_size)) print() print('Master memory use: ') print(' text: '+size(master_sizes[TEXT])) print(' data: '+size(master_sizes[DATA])) print(' bss: '+size(master_sizes[BSS])) print(' stack: '+size(total_stack_use)) print(' '+('-'*39)) master_total = master_sizes[TOTAL]+total_stack_use master_remaining = defs.RAM_SIZE - master_total print(' Total: '+size(master_total)) print(' Remaining: '+size(master_remaining)) print() print('Slave memory use: ') print(' text: '+size(slave_sizes[TEXT])) print(' data: '+size(slave_sizes[DATA])) print(' bss: '+size(slave_sizes[BSS])) print(' stack: '+size(total_stack_use)) print(' '+('-'*39)) slave_total = slave_sizes[TOTAL]+total_stack_use slave_remaining = defs.RAM_SIZE - slave_total print(' Total: '+size(slave_total)) print(' Remaining: '+size(slave_remaining))
def update(self): changes = ['{0.name}={0.value}'.format(prop) for prop in self.properties.values() if prop._needsupdate()] if len(changes) == 0: return call(['/sbin/zfs','set'] + changes + [self.parent]) for prop in self.properties.values(): prop.reset()
def configure(source_path, build_path, arguments=None): """ Configures the build based on the supplied arguments. """ try: makedirs(build_path) except OSError: pass util.call(['cmake', '-B' + build_path, '-H' + source_path] + create_options(arguments))
def randomHourMinutes(): """ Generates random trips for an hour in Eichstaett, then simulates and outputs minutely. """ calls = randomHourMinutes_calls() print 'Generating trips...' call(calls[0]) print 'Running SUMO simulation...' return call(calls[1])
def send_mail(subject, to, content='', tmpfile=None): if tmpfile: cmd = "(echo \"%s\"; cat %s) | mail -s \"%s\" %s" % (content, tmpfile, subject, to) else: cmd = "echo \"%s\" | mail -s \"%s\" %s" % (content, subject, to) try: call(cmd) except RunCommandError as err: logger.error('send mail failed, err: %s' % err)
def run_clean(arguments): """ Cleans the build directory. """ build_path = util.get_build_path(arguments) try: check_configured(arguments) except Error: return util.call(['make', '-s', '-C', build_path, 'clean'])
def configure(arguments): """ Configures the build based on the supplied arguments. """ build_path = util.get_build_path(arguments) try: makedirs(build_path) except OSError: pass util.call(['cmake', '-B' + build_path, '-H' + util.get_base_path()] + create_options(arguments))
def link(show_calls, v): """ Link the complete executable. """ vmsg(v, 'Linking executable -> '+BINARY) util.call([MPICC, 'program.c.o'] + [x+'.o' for x in RUNTIME_FILES] + [x+'.o' for x in BUILTIN_FILES] + ['-o', BINARY] + LINK_FLAGS, show_calls)
def compile_str(name, string, show_calls, v, save_temps=True): """ Compile a buffer containing an XC program. """ srcfile = name + '.xc' outfile = name + '.S' vmsg(v, 'Compiling '+srcfile+' -> '+outfile) util.write_file(srcfile, string) util.call([XCC, srcfile, '-o', outfile] + COMPILE_FLAGS, v=show_calls) if not save_temps: os.remove(srcfile)
def test_call_check(self): """ Test call with check=True (default) and a bad returncode. Should raise a CalledProcessError """ call_backup = subprocess.call util.subprocess.call = mock_gen(rv=1) with self.assertRaises(subprocess.CalledProcessError): util.call('some command') util.subprocess.call = call_backup
def _umount(self): if self.mount: # check if exists call(['/sbin/umount','-a','-F',self.mount]) if self.devfs: call(['/sbin/umount',self.rootdir+'/dev']) if self.procfs: call(['/sbin/umount',self.rootdir+'/proc']) if self.ports: call(['/sbin/umount',self.rootdir+'/usr/ports/distfiles']) call(['/sbin/umount',self.rootdir+'/usr/ports'])
def assemble_str(name, string, show_calls, v, cleanup=True): """ Assemble a buffer containing a c program. """ srcfile = name + '.c' outfile = name + '.o' vmsg(v, 'Assembling '+srcfile+' -> '+outfile) util.write_file(srcfile, string) util.call([MPICC, srcfile, '-o', outfile] + ASSEMBLE_FLAGS, show_calls) if cleanup: os.remove(srcfile)
def git_commit(self): tmp_file = tempfile.mkstemp(suffix='.txt') try: f = open(tmp_file[1], 'w') f.write(self.commit_msg) f.close() util.call([self.conf['git_binary'], 'commit', '-F', tmp_file[1], '-e'], cwd=self.conf['hadoop_repo_directory']) return self finally: os.remove(tmp_file[1])
def store(self, force=False): if (self.id in self._loaded): if not force: return call(['/sbin/devfs','rule','-s',str(self.id),'delset']) self._loaded.remove(self.id) for line in self: call(['/sbin/devfs','rule','-s',str(self.id)]+line.split()) self._loaded.append(self.id)
def mktx(input_addrs, output_addrs, amount=AMOUNT): tf = tempfile.NamedTemporaryFile() command = "sx mktx %s" % tf.name inputs = query_unspent_outputs(input_addrs, amount) if inputs is None: return None for tx_input in inputs: command += " --input %s" % tx_input for address in output_addrs: command += " --output %s:%s" % (address, amount) call(command) return tf.read()
def run(params, xac_files): if len(xac_files) == 0: print "No XDS_ASCII.HKL files provided." return xscale_inp_head = "!MINIMUM_I/SIGMA= 3\n\n" infos = {} d_max, d_min = 0, 100 cells = [] for xds_ascii in xac_files: info = get_xac_info(xds_ascii) infos[xds_ascii] = info resrng = map(float, info["resol_range"].split()) d_max = max(d_max, resrng[0]) d_min = min(d_min, resrng[1]) cells.append(map(float, info["cell"].split())) if params.d_min is not None: d_min = max(params.d_min, d_min) if params.cell == "average": cell_sum = reduce(lambda x, y: map(lambda a: a[0] + a[1], zip(x, y)), cells) cell_mean = map(lambda x: x / float(len(cells)), cell_sum) if params.sgnum is not None: sgnum = str(params.sgnum) else: sgnum = infos[xac_files[0]]["spgr_num"] xscale_inp_head += " SPACE_GROUP_NUMBER= %s\n" % sgnum xscale_inp_head += " UNIT_CELL_CONSTANTS= %s\n" % " ".join( map(lambda x: "%.3f" % x, cell_mean)) #if anomalous_flag is not None: # xscale_inp_head += " FRIEDEL'S_LAW= %s\n" % ("FALSE" if anomalous_flag else "TRUE") xscale_inp_head += make_shells(d_max, d_min, params.nbins) + "\n" xscale_inp_head += " OUTPUT_FILE= %s\n\n" % params.output xscale_inp = os.path.join(params.workdir, "XSCALE.INP") inp_out = open(xscale_inp, "w") inp_out.write(xscale_inp_head) for xds_ascii in xac_files: inp_out.write(" INPUT_FILE= %s\n" % os.path.relpath(xds_ascii, params.workdir)) inp_out.write(" ! INCLUDE_RESOLUTION_RANGE= %s\n\n" % infos[xds_ascii]["resol_range"]) inp_out.close() util.call(xscale_comm, wdir=params.workdir, stdout=sys.stdout)
def download_tarball(target_dir): """ Download tarball at url (global) to target_dir """ global gcov_file tarfile = os.path.basename(url) build.download_path = target_dir if target_dir else os.path.join( os.getcwd(), name) call("mkdir -p {}".format(build.download_path)) # locate the tarball locally or download return check_or_get_file(url, tarfile)
def cleanUpPreviousRun(d): if d['platform'] == "windows": actions = [ 'del /f /q %s' % d['dataGeneratedFile'], # remove dataGeneratedFile.ldif 'del /f /q %s' % d['schemaFN'], # remove schemaFN.ldif ] else: actions = [ 'rm -f %s' % d['dataGeneratedFile'], # remove dataGeneratedFile.ldif 'rm -f %s' % d['schemaFN'], # remove schemaFN.ldif ] util.call(actions)
def __format_filesystem(self): logging.info("Formating %s filesystem on %s" % (self.fstype, self.disk.device)) rc = call(["/sbin/mkfs." + self.fstype, "-F", "-L", self.fslabel, "-m", "1", "-b", str(self.blocksize), self.disk.device]) # str(self.disk.size / self.blocksize)]) if rc != 0: raise MountError("Error creating %s filesystem" % (self.fstype,)) logging.info("Tuning filesystem on %s" % self.disk.device) call(["/sbin/tune2fs", "-c0", "-i0", "-Odir_index", "-ouser_xattr,acl", self.disk.device])
def _ifprestart(self): if self.vlan: # search for existing usable vlan ifc = Popen(['/sbin/ifconfig'], stdout=-1) ifc.wait() pairs = {} for line in ifc.stdout: if not line.startswith('epair'): continue pair = line.split(':')[0][:-1] if pair in pairs: pairs[pair] += 1 else: pairs[pair] = 1 for k,v in sorted(pairs.items()): if v == 2: self._ifpair = k break else: # create a new vlan ifc = Popen(['/sbin/ifconfig','epair','create'], stdout=-1) ifc.wait() self._ifpair = ifc.stdout.readline().rstrip()[:-1] call(['/sbin/ifconfig','bridge0','addm',self._ifpair+'a']) call(['/sbin/ifconfig',self._ifpair+'a','up']) else: for ip in self.ip4: call(['/sbin/ifconfig',self.iface,'alias',ip,'netmask','255.255.255.255']) for ip in self.ip6: call(['/sbin/ifconfig',self.iface,'inet6',ip,'prefixlen','128'])
def enter_final_stage(self): """ Enter the final stage """ signed = [""]*self._nparticipants for sig in self.signatures: signed[sig[0]] = str(sig[1]).strip() res = mix_inputs.mix(signed, self._tx) self._final_tx = res.strip() print "--- FINAL TX -----" print self._final_tx print "------------------" util.call('echo %s | sx ob-broadcast-tx -' % self._final_tx) util.call('echo %s | sx bci-pushtx -' % self._final_tx)
def stop(self): if not self.running: print self.name+' is not running' return False # if not self.enable: # return False print 'stopping '+self.name self._ifprestop() call(['/usr/sbin/jexec',self.jid]+self.exec_stop.split()) call(['/usr/sbin/jail','-r',self.jid]) self._umount() self._ifpoststop() self.running = False return True
def unmount(self): if not self.mounted: return rc = call(["/bin/umount", self.dest]) if rc != 0: logging.info("Unable to unmount %s normally, using lazy unmount" % self.dest) rc = call(["/bin/umount", "-l", self.dest]) if rc != 0: raise MountError("Unable to unmount fs at %s" % self.dest) else: logging.info("lazy umount succeeded on %s" % self.dest) print >> sys.stdout, "lazy umount succeeded on %s" % self.dest self.mounted = False
def urls(self): """Get links to the artist's official site, MusicBrainz site, MySpace site, Wikipedia article, Amazon list, and iTunes page.""" if self._urls is None or not CACHE: response = util.call('get_urls', {'id': self.identifier}).find('artist').getchildren() self._urls = dict((url.tag[:-4], url.text) for url in response if url.tag[-4:] == '_url') return self._urls
def create(self): if self.__created: return self.imgloop.create() self.cowloop.create() self.__name = "imgcreate-%d-%d" % (os.getpid(), random.randint(0, 2**16)) size = os.stat(self.imgloop.lofile)[stat.ST_SIZE] table = "0 %d snapshot %s %s p 8" % (size / 512, self.imgloop.device, self.cowloop.device) args = ["/sbin/dmsetup", "create", self.__name, "-vv", "--verifyudev", "--uuid", "LIVECD-%s" % self.__name, "--table", table] if call(args) != 0: self.cowloop.cleanup() self.imgloop.cleanup() raise SnapshotError("Could not create snapshot device using: " + string.join(args, " ")) self.__created = True
def search(**args): """ TRACK.SEARCH - PARAMETERS q Search within track titles,artists,lyrics q_lyrics Any word in the lyrics page Define the page number for paginated results page_size Define the page size for paginated results. Range is 1 to 100. f_has_lyrics When set, filter only contents with lyrics f_artist_id When set, filter by this artist id f_music_genre_id When set, filter by this music category id f_artist_mbid When set, filter by this artist musicbrainz id f_lyrics_language Filter by the lyrics language (en,it,..) s_track_rating Sort by our popularity index for tracks (asc|desc) s_artist_rating Sort by our popularity index for artists (asc|desc) quorum_factor Search only a part of the given query string. Allowed range is (0.1 – 0.9), default is 1 format Decide the output type (json or xml) """ valid_params = ('q', 'q_track', 'q_artist', 'q_track_artist', 'q_lyrics', 'page', 'page_size', 'f_has_lyrics', 'f_artist_id', 'f_artist_mbid', 'quorum_factor', 'apikey') for k in args.keys(): if (not k) in valid_params: raise apiError.MusixMatchAPIError(-1, """Invalid track search param: """ + str(k)) # track_list = list() params = dict((k, v) for k, v in args.iteritems() if (not v) is None) body = util.call('track.search', params) track_list_dict = body["track_list"] print track_list_dict return track_list_dict
def run_cppcheck(throw=True): """ Runs cppcheck. """ basedir = util.get_base_path() cmd = [ 'cppcheck', '--enable=all', # '--error-exitcode=2', # Uncomment when cppcheck issues are fixed '-UGD_LOG_LEVEL', '-UGD_DISABLE_LOG_COLORS', '--suppressions-list=%s' % (path.join('tools', 'cppcheck-suppr-list')), '--includes-file=%s' % (path.join('tools', 'cppcheck-incl-list')), 'src', 'examples', ] print('') print("Running cppcheck...") try: chdir(basedir) return util.call(cmd, throw) except OSError as e: if e.errno == errno.ENOENT: raise OSError("Cppcheck is not installed.") else: raise