def identify(package, product, base, ascii): """List products that include specified package.""" rpm = RPM.from_name(package) if not rpm: error_exit( f"{package} does not appear to be in valid <name>-<version>-<release>.<arch>[.rpm]" ) products = None conn = sqlite3.connect(db) query_values = rpm.to_dict() query_values["base_product"] = base query_values["product"] = product with conn: cur = conn.cursor() if product == "*" and base == "*": cur.execute(sccpdb.search_products_by_rpm, query_values) elif base != "*": cur.execute(sccpdb.create_product_family_temp_table, query_values) cur.execute(sccpdb.search_product_family_by_rpm, query_values) else: cur.execute(sccpdb.search_product_by_rpm, query_values) products = cur.fetchall() cur.close() conn.close() output = pretty_table( products, colnames=["id", "product", "type", "arch", "description"], fmt="ascii" if ascii else "csv", ) print("\n".join(output))
def setup_eal_opts(args, file_prefix, proc_type='auto', hugedir=None): core_opt = get_core_opt(args) mem_opt = get_mem_opt(args) eal_opts = [ core_opt['attr'], core_opt['val'], '\\', '-n', str(args.nof_memchan), '\\', mem_opt['attr'], mem_opt['val'], '\\', '--proc-type', proc_type, '\\'] if args.dev_ids is None: common.error_exit('--dev-ids') else: dev_ids = dev_ids_to_list(args.dev_ids) socks = [] for dev_id in dev_ids: socks.append({ 'host': '/tmp/sock%d' % dev_id, 'guest': '/var/run/usvhost%d' % dev_id}) for i in range(len(dev_ids)): eal_opts += [ '--vdev', 'virtio_user%d,queues=%d,path=%s' % ( dev_ids[i], args.nof_queues, socks[i]['guest']), '\\'] eal_opts += [ '--file-prefix', file_prefix, '\\', '--', '\\'] return eal_opts
def create_quality_option(args: argparse.Namespace, codec_props: CodecProps) -> List[str]: if args.bitrate: min_b = codec_props["bitrate_min"] max_b = codec_props["bitrate_max"] if args.bitrate < min_b or args.bitrate > max_b: error_exit("Bitrate must be between {} and {}.".format( min_b, max_b)) quality_option = codec_props["bitrate_arg"] + args.bitrate elif args.quality: min_q = codec_props["quality_min"] max_q = codec_props["quality_max"] if args.quality < min_q or args.quality > max_q: error_exit("Quality must be between {} and {}.".format( min_q, max_q)) quality_option = codec_props["quality_arg"] + args.quality elif args.preset: if args.preset == "high": quality_option = codec_props["preset_high"] elif args.preset == "low": quality_option = codec_props["preset_low"] else: quality_option = codec_props["preset_transparent"] else: # Default case quality_option = codec_props["preset_transparent"] return quality_option
def check_upload_submit_consistency(self): issue_id = self.issue self.issue_dict = common.load_issue_info(self.blade_root_dir) if not self.issue_dict.has_key(issue_id): warning('issue %s not found in issue_info_dict' % issue_id) return # TODO(naicaisun): submit.py support --files like upload.py relative_path = os.path.relpath(common.get_cwd(), self.blade_root_dir) file_list = common.get_all_diff_files() file_list = set( [os.path.join(relative_path, filename) for filename in file_list]) # TODO(naicaisun): delete backward compatible after running serveral weeks difference_files = [] if isinstance(self.issue_dict[issue_id], set): difference_files = file_list.symmetric_difference( self.issue_dict[issue_id]) else: issue_info = self.issue_dict[issue_id] if issue_info["upload_path"] != relative_path: error_exit('the submit path: %s is not the same as upload path' ' : %s' % (relative_path, issue_info["upload_path"])) difference_files = file_list.symmetric_difference( issue_info["filelist"]) if difference_files: warning('the following file[s] you submit are not consistent with ' 'the ones you uploaded\n%s' % "\n".join(difference_files)) answer = raw_input('Continue?(y/N) ').strip() if answer != 'y': error_exit('Exit') else: if isinstance(self.issue_dict[issue_id], set): self.issue_dict[issue_id] = file_list else: self.issue_dict[issue_id]["filelist"] = file_list
def evaluate_substitution(subs: str) -> SubsPair: split_subs = subs.split("/") if len(split_subs) != 2: error_exit("‘{}’: invalid substitution format. "\ "Expected ‘old/new’.".format(subs)) return (split_subs[0], split_subs[1])
def get_core_opt(args): # Check core_mask or core_list is defined. if args.core_mask is not None: core_opt = {'attr': '-c', 'val': args.core_mask} elif args.core_list is not None: core_opt = {'attr': '-l', 'val': args.core_list} else: common.error_exit('core_mask or core_list') return core_opt
def run_presubmit(self): if self.options.no_build: return presubmit = find_file_bottom_up('presubmit') if presubmit and os.access(presubmit, os.X_OK): presubmit_dir = os.path.dirname(presubmit) p = subprocess.Popen(presubmit, shell=True, cwd=presubmit_dir) if p.wait() != 0: error_exit('presubmit error') else: self.build_and_runtests()
def check_dependency_between_issues(self): if not self.issue_dict.has_key(self.issue) or len(self.issue_dict) < 2: return build_path = set() current_dir = get_cwd() file_list = [] if isinstance(self.issue_dict[self.issue], set): file_list = self.issue_dict[self.issue] else: file_list = self.issue_dict[self.issue]["filelist"] for f in file_list: ext = os.path.splitext(f)[1] #BUILD has no .h dependency specification, so not check .h if ext in (".c", ".cpp", ".hpp", ".C", ".cxx", ".cc"): find_path = self._find_build_path_contain_filename_bottom_up(f) if find_path: build_path.add(os.path.relpath(find_path, current_dir)) if not build_path: # some issues have no associated BUILD file(eg. modify script files) return blade = find_file_bottom_up('blade') cmd = "%s query --deps %s" % (blade, " ".join(build_path)) (output, ret) = run_shell(cmd.split()) if ret: warning('failed to run %s' % cmd) return submit_issue_deps = set(output.splitlines()) issues_to_pop = [] for issue in self.issue_dict.keys(): if issue == self.issue: continue issue_info_url = "%s%s" % (DEFAULT_REVIEW_INFO_URL, issue) issue_info = json.load(urllib.urlopen(issue_info_url)) if not issue_info['successfully']: warning('failed to get issue_info for issue %s' % issue) continue issue_info_detail = issue_info["requestsWithPagerInfo"][ "requests"][0] issue_state = issue_info_detail['state'] if (issue_state == _ISSUE_STATE.CLOSED or issue_state == _ISSUE_STATE.SUBMITED): # issue already closed or submited issues_to_pop.append(issue) continue if self._has_dependency_relation(submit_issue_deps, issue): warning('the submit issue may depends on the issue %s with' ' title \"%s\"' % (issue, issue_info_detail['name'])) answer = raw_input('Continue?(y/N) ').strip() if answer != 'y': error_exit('Exit') if issues_to_pop: map(self.issue_dict.pop, issues_to_pop)
def cpplint_diff(blade_root_dir, cc_files): """partition cpplint result into two parts: the part not imported by your change and the yes part """ diff_details = common.svn_diff_added(cc_files) p = subprocess.Popen("%s/app/qzap/tools/cpplint.py %s" % (blade_root_dir, " ".join(cc_files)), stderr=subprocess.PIPE, shell=True) stdout, stderr = p.communicate() if p.returncode and p.returncode != 1: # returncode is 0 when no error found, 1 when error found common.error_exit('Failed to run cpplint, returncode=%d' % p.returncode) return [], [] match_pattern = re.compile(r":\d+:|^Done processing") all_err_msgs = filter(lambda err_msg: match_pattern.search(err_msg), stderr.splitlines()) err_stat_info = [(line_num, err_msg.split()[2]) for line_num, err_msg in enumerate(all_err_msgs) if err_msg.startswith('Done processing')] i = 0 old_err_msgs = [] new_err_msgs = [] for err_end_line, filename in err_stat_info: if not filename in diff_details: # ignore file marked as A+ in svn which has no diff_detail i = err_end_line + 1 continue current_diff_info = diff_details[filename] j = 0 while i < err_end_line: err_msg = all_err_msgs[i] line_num = int(err_msg.split(':')[1]) while j < len(current_diff_info): if line_num < current_diff_info[j][0]: old_err_msgs.append(err_msg) break else: if (line_num < current_diff_info[j][1]): new_err_msgs.append(err_msg) break else: j += 1 if j >= len(current_diff_info): old_err_msgs.extend(all_err_msgs[i:err_end_line]) break i += 1 i = err_end_line + 1 return old_err_msgs, new_err_msgs
def check_svn_missing(self): def maybe_missing(status): if not '?' in status: return False filename = status[1] ext = os.path.splitext(filename)[1] return ext not in _EXT_IGNORES missings = [st[1] for st in filter(maybe_missing, self.svn_status)] if missings: warning('The following files has not been added to svn:\n' '%s' % '\n'.join(missings)) answer = raw_input('Continue?(y/N) ').strip() if answer != 'y': error_exit('Exit')
def convert_file(target: dict) -> None: # Notify about the processed file print(target["report"]) # Create the output directories if necessary out_dir = os.path.dirname(target["new"]) os.makedirs(out_dir, exist_ok=True) try: process = sp.run(target["command"], stdout=sp.DEVNULL, stderr=sp.PIPE, check=True) except Exception as e: # Conversion failed somehow. Show the most recent encoder output # and the exception that happened. error_exit("{}\n\n{}".format(process.stderr.decode("utf-8"), e))
def base(ascii): """List base products.""" products = None conn = sqlite3.connect(db) with conn: cur = conn.cursor() cur.execute(sccpdb.list_base_products) products = cur.fetchall() cur.close() conn.close() if not products: error_exit("No base products found, suggest an sccpsync.") output = pretty_table( products, colnames=["id", "product", "type", "arch", "description"], fmt="ascii" if ascii else "csv", ) print("\n".join(output))
def __init__(self, options): self.options = options self.issue = options.issue self.issue_info_url = "%s%s" % (DEFAULT_REVIEW_INFO_URL, options.issue) self.issue_url = "%s%s" % (DEFAULT_REVIEW_URL, options.issue) self.user = os.environ.get('USER') or os.environ.get('USERNAME') self.issue_info = json.load(urllib.urlopen(self.issue_info_url)) if not self.issue_info['successfully']: print self.issue_info['successfully'] error_exit("Invalid issue or server down") self.issue_info_detail = self.issue_info["requestsWithPagerInfo"][ "requests"][0] self.local_svn_info = SvnInfo('.') self.remote_svn_info = SvnInfo(self.local_svn_info.get_remote_path()) self.svn_status = svn_status('.') self.blade_root_dir = find_blade_root_dir()
def main(base, rpmlist): """ Identify product(s) associated with a base containing an RPM. \b Legend: ? Doesn't appear to be "<name>-<version>-<release>.<arch>[.rpm]". - Not found in base product. = Found in repo of base product. + From a module or extension that can be enabled on base. """ db = f"{config['DEFAULT']['data_dir']}/{config['SCCP']['db_name']}" if not sccpdb.checkdb(db): error_exit("Please initialise SCCP database with sccpsync.py.") conn = sqlite3.connect(db) with conn: cur = conn.cursor() try: rpm_list = read_rpm_list(rpmlist) except FileNotFoundError: error_exit(f"{rpmlist} does not appear to exit.") cur.execute(sccpdb.create_product_family_temp_table, {"base_product": base}) for line in rpm_list: rpm = RPM.from_name(line) prods = [] if not rpm: key = "?" else: cur.execute(sccpdb.search_product_family_by_rpm, rpm.to_dict()) prods = [p[1] for p in cur.fetchall()] if prods: if base in prods: key = "=" else: key = "+" else: key = "-" print(f"{key} {rpm}") for prod in prodsort(prods, base): print(f" {str(prod)}") print() cur.close() conn.close()
def setup_eal_opts(args, file_prefix, proc_type='auto', hugedir=None): core_opt = get_core_opt(args) mem_opt = get_mem_opt(args) eal_opts = [ core_opt['attr'], core_opt['val'], '\\', '-n', str(args.nof_memchan), '\\', mem_opt['attr'], mem_opt['val'], '\\', '--proc-type', proc_type, '\\' ] if args.dev_ids is None: common.error_exit('--dev-ids') else: dev_ids = dev_ids_to_list(args.dev_ids) socks = [] for dev_id in dev_ids: socks.append({ 'host': '/tmp/sock%d' % dev_id, 'guest': '/var/run/usvhost%d' % dev_id }) for i in range(len(dev_ids)): eal_opts += [ '--vdev', 'virtio_user%d,queues=%d,path=%s' % (dev_ids[i], args.nof_queues, socks[i]['guest']), '\\' ] if (args.pci_blacklist is not None) and (args.pci_whitelist is not None): common.error_exit("Cannot use both of '-b' and '-w' at once") elif args.pci_blacklist is not None: for bd in args.pci_blacklist: eal_opts += ['-b', bd, '\\'] elif args.pci_whitelist is not None: for wd in args.pci_whitelist: eal_opts += ['-w', wd, '\\'] if args.single_file_segments is True: eal_opts += ['--single-file-segments', '\\'] eal_opts += ['--file-prefix', file_prefix, '\\', '--', '\\'] return eal_opts
def _check_upload_path_change(self): if not self.options.issue: return issue_id = str(self.options.issue) relative_path = os.path.relpath(common.get_cwd(), self.current_source_dir) # backward compatible if (issue_id in self.issue_dict and isinstance(self.issue_dict[issue_id], dict) and "upload_path" in self.issue_dict[issue_id]): old_upload_path = self.issue_dict[issue_id]["upload_path"] if old_upload_path != relative_path: common.warning("the upload path: %s is not the same as " "the last one: %s" % (relative_path, old_upload_path)) answer = raw_input('Continue?(y/N) ').strip() if answer != 'y': common.error_exit('Exit') else: self.issue_dict[issue_id]["upload_path"] = relative_path
def main(): parser = OptionParser() parser.add_option("-i", "--issue", dest="issue", help="Codereview issue number.") parser.add_option("--no-test", dest="no_test", action="store_true", help="Don't test before commit, just build.") parser.add_option("-n", "--no-build", dest="no_build", action="store_true", help="Don't build and test before commit.") parser.add_option("-m", dest="m", help="Target bits") parser.add_option("--build-targets", dest="build_targets", default="...", help="Targets to build, blade target format, " "separated by ','") parser.add_option("--build-dependeds", dest="build_dependeds", action="store_true", help="Also build depended targets to make more safe") parser.add_option("--dry-run", dest="dry_run", action="store_true", help="Just run presubmit checks, not commit.") (options, args) = parser.parse_args() if not options.issue: parser.print_help() error_exit("") committer = IssueCommitter(options) committer.check() if options.dry_run: print "All presubmit checks success" else: committer.commit_to_svn()
def products(base, ascii): """List products associated with specified base.""" products = None conn = sqlite3.connect(db) with conn: cur = conn.cursor() if base == "*": cur.execute(sccpdb.list_all_products) else: cur.execute(sccpdb.list_products_by_base, {"base_product": base}) products = cur.fetchall() cur.close() conn.close() if not products: error_exit("No base products found, suggest an sccpsync.") output = pretty_table( products, colnames=["id", "product", "type", "arch", "description"], fmt="ascii" if ascii else "csv", ) print("\n".join(output))
def commit_to_svn(self): title = self.issue_info_detail["name"] (fd, path) = tempfile.mkstemp("svn_commit") f = open(path, "w") print >> f, title.encode('utf-8') print >> f, "Issue: %s" % self.issue_url print >> f, "--crid=%d" % self.issue_info_detail["id"] m = hashlib.md5() m.update(title.encode('UTF-8')) m.update(self.issue_url) print >> f, "Digest: %s" % m.hexdigest() f.close() os.close(fd) filenames = '.' cmd = "svn commit -F %s ." % path p = subprocess.Popen(cmd, shell=True) p.wait() os.remove(path) if p.returncode != 0: error_exit("Not committed") else: if self.issue_dict.has_key(self.issue): self.issue_dict.pop(self.issue) common.save_issue_info(self.blade_root_dir, self.issue_dict)
def _error_exit(msg): common.error_exit('Upload Error: %s' % msg)
def svn_up_to_date(self): if subprocess.call('svn up %s' % self.blade_root_dir, shell=True) != 0: error_exit('svn up workspace faile.')
def _error_exit(msg): common.error_exit('Secure Scan Error: %s' % msg)
def main(): args = parse_args() # Check core_mask or core_list is defined. if args.core_mask is not None: core_opt = {'attr': '-c', 'val': args.core_mask} elif args.core_list is not None: core_opt = {'attr': '-l', 'val': args.core_list} else: common.error_exit('--core-mask or --core-list') # Check memory option is defined. if args.socket_mem is not None: mem_opt = {'attr': '--socket-mem', 'val': args.socket_mem} else: mem_opt = {'attr': '-m', 'val': str(args.mem)} # Check for other mandatory opitons. if args.dev_ids is None: common.error_exit('--dev-ids') if args.rx_ports is None: common.error_exit('--rx-ports') if args.tx_ports is None: common.error_exit('--tx-ports') if args.worker_lcores is None: common.error_exit('--worker-lcores') if args.lpm is None: common.error_exit('--lpm') # This container is running in backgroud in defualt. if args.foreground is not True: docker_run_opt = '-d' else: docker_run_opt = '-it' # Setup for vhost devices with given device IDs. dev_ids = common.dev_ids_to_list(args.dev_ids) socks = [] for dev_id in dev_ids: socks.append({ 'host': '/tmp/sock%d' % dev_id, 'guest': '/var/run/usvhost%d' % dev_id }) # Setup docker command. docker_cmd = ['sudo', 'docker', 'run', docker_run_opt, '\\'] for sock in socks: docker_cmd += ['-v', '%s:%s' % (sock['host'], sock['guest']), '\\'] docker_cmd += [ '-v', '/dev/hugepages:/dev/hugepages', '\\', conf.spp_container, '\\' ] app_name = 'load_balancer' cmd_path = '%s/examples/%s/%s/%s' % (conf.RTE_SDK, app_name, conf.RTE_TARGET, app_name) # Setup testpmd command. lb_cmd = [cmd_path, '\\'] eal_opts = [ core_opt['attr'], core_opt['val'], '\\', '-n', str(args.nof_memchan), '\\', mem_opt['attr'], mem_opt['val'], '\\', '--proc-type', 'auto', '\\' ] for i in range(len(dev_ids)): eal_opts += [ '--vdev', 'virtio_user%d,path=%s' % (dev_ids[i], socks[i]['guest']), '\\' ] eal_opts += [ '--file-prefix', '%s%d' % (app_name, dev_ids[0]), '\\', '--', '\\' ] lb_opts = [] if args.ring_sizes is not None: lb_opts += ['--ring-sizes', args.ring_sizes, '\\'] if args.burst_sizes is not None: lb_opts += ['--burst-sizes', args.burst_sizes, '\\'] if args.pos_lb is not None: lb_opts += ['--pos-lb', str(args.pos_lb)] rx_ports = '"%s"' % args.rx_ports tx_ports = '"%s"' % args.tx_ports worker_lcores = '"%s"' % args.worker_lcores lpm = '"%s"' % args.lpm lb_opts += [ '--rx', rx_ports, '\\', '--tx', tx_ports, '\\', '--w', worker_lcores, '\\', '--lpm', lpm ] cmds = docker_cmd + lb_cmd + eal_opts + lb_opts common.print_pretty_commands(cmds) if args.dry_run is True: exit() # Remove delimiters for print_pretty_commands(). while '\\' in cmds: cmds.remove('\\') subprocess.call(cmds)
def main() -> None: parser = create_parser() args = parser.parse_args() # print(args.dirs) # Check whether encoders are present on the system ex_v: VersionList = check_executables(CODECS) # Update codecs dict with encoder versions for c, v in ex_v: CODECS[c]["version"] = v if args.info: print(codecs_info(CODECS)) sys.exit() # This will fail if the output directory cannot be created or it already # exists and is not a directory try: os.makedirs(args.output, exist_ok=True) except FileExistsError: error_exit("‘{}’ exists and is not a directory.".format( args.output)) except PermissionError: error_exit("Cannot create output directory ‘{}’ "\ "(insufficient permission).".format(args.output)) # Check whether the output dir is accesible for writes if not check_access(args.output, write=True): error_exit("Cannot write to output directory ‘{}’".format( args.output)) # The selected codec to convert to sel_codec = args.codec # Relevant dict of type CodecProps codec_props = CODECS[sel_codec] if codec_props["version"] == "MISSING": sys.exit("Couldn't find the ‘{}’ encoder. You need to install it "\ "in order to use the ‘{}’ codec.".format( codec_props["encoder"], sel_codec)) # If the -s option has been selected, prepare substitution strings if args.substitutef: subsf = evaluate_substitution(args.substitutef) else: subsf = None # If the -S option has been selected, prepare substitution strings if args.substituted: subsd = evaluate_substitution(args.substituted) else: subsd = None music_map = find_music(args.dirs) # print(music_map) in_out_list = create_in_out_paths(music_map, args.output, subsf, subsd) # print(in_out_list) # for infile, outfile in in_out_list: # print(create_conversion_command(infile, outfile, args, codec_props)) # print(greatest_common_dir([t[0] for t in m])) # print(create_conversion_command("/home/me/song.flac", "/usb/music/song", args)) convert_all_files(in_out_list, args, codec_props) # If the --copy option has been selected, process files to copy if args.copy: print("Copying unmodified files…") in_out_list_copy = create_in_out_paths(music_map, args.output, subsf, subsd, copy=True, c_template=args.copy) for infile, outfile in in_out_list_copy: copyfile(infile, outfile)
def check_approve(self): if not self.issue_info_detail['usersPassRequest']: error_exit("Not approved.")
conn = sqlite3.connect(db) packages = [] with conn: cur = conn.cursor() if product == "*": cur.execute(sccpdb.search_for_all_versions, {"name": name}) else: cur.execute(sccpdb.search_product_for_all_versions, { "name": name, "product": product }) packages = cur.fetchall() cur.close() conn.close() rpmlist = [] for rpm in packages: rpmlist.append(RPM(rpm[0], rpm[1], rpm[2], rpm[3])) rpmlist.sort(reverse=True) print("\n".join(map(str, rpmlist))) if __name__ == "__main__": logging.basicConfig(level=logging.INFO) if not sccpdb.checkdb(db): error_exit("Please initialise SCCP database with sccpsync.py.") app.add_command(identify, "id") app.add_command(base, "base") app.add_command(products, "products") app.add_command(search, "search") app()
def main(): args = parse_args() if args.core_mask is not None: core_opt = {'attr': '-c', 'val': args.core_mask} elif args.core_list is not None: core_opt = {'attr': '-l', 'val': args.core_list} else: common.error_exit('core_mask or core_list') if args.socket_mem is not None: mem_opt = {'attr': '--socket-mem', 'val': args.socket_mem} else: mem_opt = {'attr': '-m', 'val': str(args.mem)} if args.dev_ids is None: common.error_exit('--dev-ids') if args.matrix is not None: matrix = args.matrix else: port_id = 0 core_range = cores_to_range(core_opt) if len(core_range) < 2: print("Error: Two or more cores required!") exit() elif len(core_range) == 2: matrix = '%d.%d' % (core_range[1], port_id) else: matrix = '[%d:%d].%d' % (core_range[1], core_range[2], port_id) # pktgen theme theme_file = 'themes/white-black.theme' docker_cmd = [ 'sudo', 'docker', 'run', '-it', '\\', '--privileged', '\\', '--workdir', '%s/../pktgen-dpdk' % conf.RTE_SDK, '\\', # '-v', '%s:%s' % (sock_host, sock_guest), '\\', '-v', '/dev/hugepages:/dev/hugepages', '\\', '-v', '/var/run/:/var/run/', '\\', '-v', '/tmp/:/tmp/', '\\', conf.spp_container, '\\' ] cmd_path = '%s/../pktgen-dpdk/app/%s/pktgen' % (conf.RTE_SDK, conf.RTE_TARGET) pktgen_cmd = [ cmd_path, '\\', core_opt['attr'], core_opt['val'], '\\', '-n', str(args.nof_memchan), '\\', mem_opt['attr'], mem_opt['val'], '\\', '--proc-type', 'secondary', '\\' ] for dev_id in common.dev_ids_to_list(args.dev_ids): pktgen_cmd += ['--vdev', 'net_ring%d' % dev_id, '\\'] for bl in args.blacklists.split(','): pktgen_cmd += ['-b', bl, '\\'] pktgen_cmd += [ '--log-level', str(args.log_level), '\\', '--', '\\', '-T', '\\', '-P', '\\', '-m', matrix, '\\', '-f', theme_file ] cmds = docker_cmd + pktgen_cmd common.print_pretty_commands(cmds) while '\\' in cmds: cmds.remove('\\') subprocess.call(cmds)