def parseArgs(docstr, arguments, config): args = docopt(docstr, argv=arguments) for key in args: if type(args[key]) is str and ".grapeconfig." in args[key] and config is not None: tokens = args[key].split('.') args[key] = config.get(tokens[2].strip(), tokens[3].strip()) return args
def run(common_args, cmd_argv): args = docopt(__doc__, argv=cmd_argv) # -b option is not supported/needed if (args['-b'] != None): sys.exit( "The '-b' option is not supported/needed. Use a 'remote-ref' as the <id> argument" ) # Default Package name pkg = args['<repo>'] if (args['-p']): pkg = args['-p'] # Set directory for the subtree directory dst = os.path.join(args['<dst>'], pkg) dst = utils.force_unix_dir_sep(dst) utils.print_verbose(f"Location of the copy being updated: {dst}") # Update the 'subtree' cmd = f'git subtree pull --prefix {dst} {args["<origin>"]}/_git/{args["<repo>"]} {args["<id>"]} --squash' t = utils.run_shell(cmd, common_args['-v']) utils.check_results( t, "ERROR: Failed the update a subtree for the specified package/repository." )
def run( common_args, cmd_argv ): args = docopt(scm.umount.USAGE, argv=cmd_argv) # Success Msg if ( args['get-success-msg'] ): print( "Repo unmount. You will need to perform a 'git add/rm' to remove the deleted files" ) return # Error Msg if ( args['get-error-msg'] ): print( "" ) # No addition info return # -b option is not supported/needed if ( args['-b'] != None ): sys.exit( "The '-b' option is not supported/needed. Use a 'remote-ref' as the <id> argument" ) # Default Package name pkg = args['<repo>'] if ( args['-p'] ): pkg = args['-p'] # Set the foreign package directory to be deleted dst = os.path.join( args['<dst>'] , pkg ) if ( not os.path.isdir(dst) ): sys.exit( f"ERROR: The Package/Directory - {dst} - does not exist." ) utils.print_verbose( f"Package/directory being removed: {dst}" ) # The is no 'git subtree rm' command -->we just simply delete the package directory utils.set_tree_readonly( dst, False ) utils.remove_tree( dst )
def initCli(self): """ Apply CLI arguments and print the greeting. """ # Get CLI options argv = QtCore.QCoreApplication.instance().arguments() argv = map( str , argv) del(argv[0]) args = docopt(__docs__, argv=argv, version='Boids ' + VERSION) # CLI options if args['run']: self.cliArgsApply(args) # Presets if args['preset']: if args['wonky']: self.preset_wonky() if args['wacky']: self.preset_wacky() if args['racers']: self.preset_racers() if args['testing']: self.preset_testing() elif args['normal']: pass # Print the greeting print(__greeting__) print(" Version " + VERSION + "\n")
def run(common_args, cmd_argv): args = docopt(__doc__, argv=cmd_argv) # Load the package file json_dict = utils.load_package_file() # Setting all fields if (args['<desc>']): utils.json_update_package_file_info(json_dict, args['<pkgname>'], args['<desc>'], args['<owner>'], args['<email>'], args['<url>'], args['<rname>'], args['<rtype>'], args['<rorigin>']) # Update the packname if (args['--pkgname'] != None): utils.json_update_package_file_info(json_dict, pkgname=args['--pkgname']) pass # Update the description if (args['--desc'] != None): utils.json_update_package_file_info(json_dict, desc=args['--desc']) pass # Update the owner if (args['--owner'] != None): utils.json_update_package_file_info(json_dict, owner=args['--owner']) pass # Update the email if (args['--email'] != None): utils.json_update_package_file_info(json_dict, email=args['--email']) pass # Update the url if (args['--url'] != None): utils.json_update_package_file_info(json_dict, url=args['--url']) pass # Update the repo name if (args['--rname'] != None): utils.json_update_package_file_info(json_dict, rname=args['--rname']) pass # Update the repo type if (args['--rtype'] != None): utils.json_update_package_file_info(json_dict, rtype=args['--rtype']) pass # Update the repo origin if (args['--rorigin'] != None): utils.json_update_package_file_info(json_dict, rorigin=args['--rorigin']) pass # save updates utils.write_package_file(json_dict) info_dict = utils.json_copy_info(json_dict) print(json.dumps(info_dict, indent=2))
def __init__(self): arguments = docopt(__doc__, version='Mininet Topology Utility 1.0') setLogLevel('info') file = 'mn-topo.yml' if arguments['--topology']: file = arguments['--topology'] props = None if (os.path.isfile(file)): with open(file, 'r') as f: props = yaml.load(f) if props is None: print "ERROR: yml topology file not found" sys.exit() if arguments['--controller']: props['controller'] = [] i = 0 for ip in arguments['--controller']: props['controller'].append({'name': "c{}".format(i), 'ip': ip}) i = i + 1 topo = mntopo.topo.Topo(props) topo.start() topo.cli() topo.stop()
def run(common_args, cmd_argv): args = docopt(scm.mount.USAGE, argv=cmd_argv) # Success Msg if (args['get-success-msg']): print("Repo mounted and committed to your repo") return # Error Msg if (args['get-error-msg']): print("") # No message return # Check if there are pending repo changes cmd = f'git diff-index HEAD --exit-code --quiet' t = utils.run_shell(cmd, False) cmd = f'git diff-index --cached HEAD --exit-code --quiet' t2 = utils.run_shell(cmd, False) utils.check_results( t, "ERROR: Your local repo has pending tree modification (i.e. need to do a commit/revert)." ) utils.check_results( t2, "ERROR: Your local repo has pending index modification (i.e. need to do a commit/revert)." ) # -b option is not supported/needed if (args['-b'] != None): sys.exit( "The '-b' option is not supported/needed. Use a 'remote-ref' as the <id> argument" ) # Default Package name pkg = args['<repo>'] if (args['-p']): pkg = args['-p'] # Make sure the Parent destination directory exists dst = args['<dst>'] utils.mkdirs(dst) # Set directory for the subtree directory dst = os.path.join(dst, pkg) dst = utils.force_unix_dir_sep(dst) utils.print_verbose(f"Destination for the copy: {dst}") # Create a 'subtree' cmd = f'git subtree add --prefix {dst} {args["<origin>"]}/{args["<repo>"]}.git {args["<id>"]} --squash' t = utils.run_shell(cmd, common_args['-v']) if (utils.is_error(t)): # Clean-up dst dir if there was failure utils.remove_tree(dst) utils.check_results( t, "ERROR: Failed to create a subtree for the specified package/repository." )
def run(common_args, cmd_argv): args = docopt(scm.findroot.USAGE, argv=cmd_argv) # Update the 'subtree' cmd = f'git rev-parse --show-toplevel' t = utils.run_shell(cmd, common_args['-v']) utils.check_results( t, "ERROR: Failed find to find the root directory of local repository.") print(utils.standardize_dir_sep(t[1]))
def main() -> None: """main entry point (hopefully)""" global _FROMFILE, _FILENAME, _DRYRUN, _TRACERT, _SILENT, _VERBOSE args = docopt.docopt(__doc__, version=__file__ + " " + __version__) # type: Dict[str, Any] _DRYRUN = args["-n"] # type: bool _TRACERT = args["-t"] # type: bool _SILENT = args["-s"] # type: bool _VERBOSE = args["-v"] # type: bool fnames = args["SCRIPT"] # type: str if len(fnames) == 0: interpret(args) elif len(fnames) == 1: try: os.stat(fnames[0]) except IOError as error: print(error, "\nstat: cannot stat '" + fnames[0] + "': no such file or directory, interpreting using stdio instead\n") interpret(args) exit(2) else: _FROMFILE = True _FILENAME = fnames[0] # type: str try: filio = open(_FILENAME, 'r') # type: _io.TextIOWrapper prog = list(filio.read()) # type: List[str] finally: filio.close() mouse.execute(prog) exit(0) # open multiple files at once elif len(fnames) > 1: for fname in fnames: try: os.stat(fname) except IOError as error: print(error, "\nstat: cannot stat '" + fname + "': no such file or directory") else: _FROMFILE = True # type: bool _FILENAME = fname # type: str try: filio = open(_FILENAME, 'r') # type: _io.TextIOWrapper prog = list(filio.read()) # type: List[str] finally: filio.close() mouse.execute(prog) exit(0)
def main() -> None: """main entry point (hopefully)""" global _FROMFILE, _FILENAME, _DRYRUN, _TRACERT, _SILENT, _VERBOSE args = docopt.docopt(__doc__, version=__file__ + " " + __version__) _DRYRUN = args["-n"] _TRACERT = args["-t"] _SILENT = args["-s"] _VERBOSE = args["-v"] fnames = args["SCRIPT"] # type: str if len(fnames) == 0: interpret(args) elif len(fnames) == 1: try: os.stat(fnames[0]) except IOError as error: print(error, "\nstat: cannot stat '" + fnames[0] + "': no such file or directory, interpreting using stdio instead\n") interpret(args) exit(2) else: _FROMFILE = True _FILENAME = fnames[0] try: filio = open(_FILENAME, 'r') prog = list(filio.read()) finally: filio.close() mouse.execute(prog) exit(0) # open multiple files at once elif len(fnames) > 1: for fname in fnames: try: os.stat(fname) except IOError as error: print(error, "\nstat: cannot stat '" + fname + "': no such file or directory") else: _FROMFILE = True _FILENAME = fname try: filio = open(_FILENAME, 'r') prog = list(filio.read()) finally: filio.close() mouse.execute(prog) exit(0)
def main(): """main entry point.""" args = docopt.docopt(__doc__, version=__file__ + " " + __version__) fs = args["SCRIPT"] if fs: for _, e in enumerate(fs): runfile(e, args) else: interpret(args)
def main(): arguments = docopt(__doc__, version="version_chk.py v0.9") verbose = arguments["--verbose"] if arguments["--test"]: print(arguments) self_test() exit(0) elif arguments["<required_version_str>"] and arguments["<version_str>"]: return compare_versions( arguments["<version_str>"], arguments["<required_version_str>"], verbose )
def main(): try: # parse arguments, use file docstring as a parameter definition arguments = docopt.docopt(__doc__) study = arguments['<study_name>'] # handle invalid options except docopt.DocoptExit as e: print("Invalid Command") create_stack() wait() update_stack() create_instance()
def run(argv): print(argv) # Process command line args... args = docopt(usage, version="0.0.1") # Check the environment variables sinpath = os.environ.get("SINELABORE_PATH") if (sinpath == None): exit("ERROR: The SINELABORE_PATH environment variable is not set.") # Filenames fsmdiag = args['<basename>'] + ".cdd" fsm = args['<basename>'] cfg = 'codegen.cfg' context = fsm + '_ctx' trace = fsm + '_trace' # Create the config file for Sinelabore ctx_t = fsm + "_context_t" geneatedCodegenConfig(cfg, fsm, ctx_t) # Build Sinelabore command cmd = 'java -jar -Djava.ext.dirs={} {}/codegen.jar -p CADIFRA -o {} -l cx -Trace {}'.format( sinpath, sinpath, fsm, fsmdiag) cmd = standardize_dir_sep(cmd) # Invoke Sinelabore command print(cmd) p = subprocess.Popen(cmd, shell=True) r = p.communicate() if (p.returncode != 0): exit("ERROR: Sinelabore encounterd an error or failed to run.") # Clean-up auto generated sources files cleanup_context_include(fsm + ".c", fsm) add_include(fsm + ".c", trace + '.h') cleanup_trace(trace + '.h', fsm, 'MY_ENABLE_TRACE_SWITCH') # Clean-up unwanted files os.remove(cfg) delete_file(fsm + "_trace.java") # Generate Context Header file actions, guards = getContextMethods(fsmdiag) generatedContextClass(fsm, ctx_t, context, getHeader(), actions, guards, args['-d'].strip())
def main(): global args, verbose args = docopt(__doc__) print(args) if args["--verbose"]: verbose = 1 else: verbose = 0 if args["patch"]: apply_patches() stage_patched_files() elif args["generate"]: folder_list.append("patches") generate_patches()
def __init__(self): arguments = docopt(__doc__, version='Mininet Topology Utility 1.0') iface = None if arguments['--iface']: iface = arguments['--iface'] count = 1 if arguments['--count']: count = int(arguments['--count']) packets = [] for packet in arguments['<packet>']: packets.append(eval(packet)) sendp(packets, iface=iface, count=count)
def __init__(self): arguments = docopt(__doc__, version='Mininet Topology Utility 2.0') pkt_callback = None count = 1 if arguments['--count']: count = int(arguments['--count']) percentage = 0 if arguments['--percentage']: percentage = int(arguments['--percentage']) if count > 0 and percentage > 0 and percentage <= 100: count = count - ((percentage * count) / 100) if count <= 0: count = 1 iface = None if arguments['--iface']: iface = arguments['--iface'] timeout = 60 if arguments['--timeout']: timeout = int(arguments['--timeout']) if arguments['--seq']: if arguments['<filter>'].lower().find('tcp') < 0: print "ERROR: tcp must be provided in the filter" sys.exit(1) self.sequence = [] pkt_callback = self.pkt_seq_callback if arguments['--timestamp']: self.timestamp = [] pkt_callback = self.check_packet_timestamp wait_packets = count if count <= 0: wait_packets = 1 recv_packets = len(sniff(count=wait_packets, iface=iface, filter=arguments['<filter>'], timeout=timeout, prn=pkt_callback)) if (count == 0 and recv_packets > 0) or recv_packets < count: print "ERROR: received packets '{}' is different from expected '{}'".format(recv_packets, count) sys.exit(1) print "SUCCESS: received '{}' packets successfully. Expected '{}'".format(recv_packets, count)
def __apply__(args, CLI): if type(args) is docoptDict: if args["-v"]: setVerbosity(2) elif args["-q"]: setVerbosity(0) else: setVerbosity(1) if args["--noProgress"]: setShowProgress(False) if args["--np"]: MultiRepoCommandLauncher.numProcs = int(args["--np"]) else: setShowProgress(True) if type(args) is types.ListType: # assume the list has yet to be parsed by docopt into the dict __apply__ expects. return __apply__(docopt(CLI,args, options_first=True), CLI)
def run(common_args, cmd_argv): args = docopt(scm.copy.USAGE, argv=cmd_argv) # Use the mount command so as to have consistent pre/post GIT behavior with adopting non-integrated packages if (not args['--force']): cmd_argv[0] = 'mount' cmd_argv.insert(1, '--noro') scm.git.mount.run(common_args, cmd_argv) # Do a brute force copy else: # -b option is not supported/needed if (args['-b'] != None): sys.exit( "The '-b' option is not supported/needed. Use a 'remote-ref' as the <id> argument" ) # Default Package name pkg = args['<repo>'] if (args['-p']): pkg = args['-p'] # Make sure the destination directory exists dst = os.path.join(os.getcwd(), args['<dst>']) utils.print_verbose(f"Destination for the copy: {dst}") utils.mkdirs(dst) # Create a clone of the repo # NOTE: I hate cloning the entire repo - but I have not found a way to get JUST a snapshot by a remote-ref cmd = f'git clone --branch {args["<id>"]} --depth=1 {args["<origin>"]}/_git/{args["<repo>"]} {pkg}' utils.push_dir(dst) t = utils.run_shell(cmd, common_args['-v']) utils.pop_dir() if (utils.is_error(t)): # Clean-up dst dir if there was failure utils.remove_tree(dst) utils.check_results( t, f"ERROR: Failed the retreive/clone the specified package/repository. Note: the <id> ({args['<id>']}) MUST be a git TAG." ) # Remove the .git directoy since this is a non-tracked copy gitdir = os.path.join(dst, pkg, ".git") utils.remove_tree( gitdir, warn_msg="Not able to remove the .git directory for local copy")
def startup(): versionOutput = git.version().split() versionString = versionOutput.pop() while '.' not in versionString: versionString = versionOutput.pop() versions = versionString.split('.') if int(versions[0]) == 1 and int(versions[1]) < 8: print('Grape requires at least git version 1.8, currently using %s' % versionString) return False #TODO - allow addition grape config file to be specified at command line #additionalConfigFiles = [] #grapeConfig.read(additionalConfigFiles) with open(os.path.join(vinePath, "VERSION"), 'r') as f: grapeVersion = f.read().split()[2] args = docopt(CLI, version=grapeVersion, options_first=True) myMenu = grapeMenu.menu() utility.applyGlobalArgs(args) retval = True try: if (args["<command>"] is None): done = 0 while not done: myMenu.presentTextMenu() choice = utility.userInput( "Please select an option from the above menu", None).split() done = myMenu.applyMenuChoice(choice[0], choice) # If they specified a command line argument, then assume that it's # a menu option, and bypass the menu elif (len(sys.argv) > 1): retval = myMenu.applyMenuChoice(args["<command>"], args["<args>"]) except KeyboardInterrupt: print("GRAPE ERROR: Operation interrupted by user, exiting...") retval = False # Exit the script print("Thank you - good bye") return retval
def load_config(self, doc_str): args = docopt.docopt(doc_str) is_set = [ x.split('=')[0] \ for x in sys.argv[1:] \ if len(x.split('=')) > 0 and x.split('=')[0].startswith('-') ] # set by cmd line config = None if '--json' in args and args['--json']: with open(args['--json']) as json_config: config = json.load(json_config) if config: for key, value in config.iteritems(): if '--%s' % (key) not in is_set: args['--%s' % (key)] = value return args
def startup(): versionOutput = git.version().split() versionString = versionOutput.pop() while '.' not in versionString: versionString = versionOutput.pop() versions = versionString.split('.') if int(versions[0]) == 1 and int(versions[1]) < 8: print('Grape requires at least git version 1.8, currently using %s' % versionString) return False #TODO - allow addition grape config file to be specified at command line #additionalConfigFiles = [] #grapeConfig.read(additionalConfigFiles) with open(os.path.join(vinePath,"VERSION"),'r') as f: grapeVersion = f.read().split()[2] args = docopt(CLI, version=grapeVersion, options_first=True ) myMenu = grapeMenu.menu() utility.applyGlobalArgs(args) retval = True try: if (args["<command>"] is None): done = 0 while not done: myMenu.presentTextMenu() choice = utility.userInput("Please select an option from the above menu", None).split() done = myMenu.applyMenuChoice(choice[0],choice) # If they specified a command line argument, then assume that it's # a menu option, and bypass the menu elif (len(sys.argv) > 1): retval = myMenu.applyMenuChoice(args["<command>"],args["<args>"]) except KeyboardInterrupt: print("GRAPE ERROR: Operation interrupted by user, exiting...") retval = False # Exit the script print("Thank you - good bye") return retval
def run(common_args, cmd_argv): args = docopt(__doc__, argv=cmd_argv) # Display my package info if (not args['<adoptedpkg>']): utils.cat_package_file(int(args['--indent'])) # Display an adopted package else: # Check if the adopted package is actually adopted json_dict = utils.load_package_file() pkgobj, deptype, pkgidx = utils.json_find_dependency( json_dict, args['<adoptedpkg>']) if (pkgobj == None): sys.exit( f"ERROR: The package - {args['<adoptedpkg>']} is NOT an adopted package" ) # OVERLAY package if (pkgobj['pkgtype'] == 'overlay'): utils.cat_package_file(int(args['--indent']), path=os.path.join(OVERLAY_PKGS_DIR(), args['<adoptedpkg>'], PACKAGE_INFO_DIR())) # Readonly/Foreign Packages else: if (pkgobj['parentDir'] == None): sys.exit( f"ERROR: the {PACKAGE_FILE()} file is corrupt. There is no parent directory for the package: {args['<adoptedpkg>']}" ) json_dict = utils.cat_package_file(int(args['--indent']), path=os.path.join( pkgobj['parentDir'], args['<adoptedpkg>'], PACKAGE_INFO_DIR())) if (json_dict == None): sys.exit( f"ERROR: No package information is available for the Readonly/Foreign package: {args['<adoptedpkg>']}" )
def run(common_args, cmd_argv): args = docopt(__doc__, argv=cmd_argv) if (args['<wildcard>'] == None): args['<wildcard>'] = '*' # Get the list of adopted packages json_dict = utils.load_package_file() # Get Dependencies pkgs = utils.get_dependency_list(json_dict, include_immediate=not args['-w'], include_weak=not args['-s']) # Sort the list pkgs = sorted(pkgs, key=lambda i: i['pkgname']) if (not args['-x']): header = "PkgName D PkType AdoptedDate ParentDir " rheader = "RepoName RepoType RepoOrigin " vheader = "SemVer Branch Tag" if (args['-l'] or args['-r']): header = header + rheader if (args['-l'] or args['-v']): header = header + vheader print(header) # display the list for p in pkgs: if (fnmatch.fnmatch(p['pkgname'], args['<wildcard>'])): info = f"{p['pkgname']:<16} {p['depType']} {p['pkgtype']:<8} {p['adoptedDate']} {utils.json_get_dep_parentdir(p):<16}" # Repo info if (args['-l'] or args['-r']): info = info + f" {utils.json_get_dep_repo_name(p):<16} {utils.json_get_dep_repo_type(p):<8} {utils.json_get_dep_repo_origin(p):40}" # Version info if (args['-l'] or args['-v']): info = info + f" {utils.json_get_dep_semver(p):<8} {utils.json_get_dep_branch(p):<16} {utils.json_get_dep_tag(p)}" # display output print(info)
def run( common_args, cmd_argv ): args = docopt(__doc__, argv=cmd_argv) # Get the list of adopted packages json_dict = utils.load_package_file() # Make sure the package is adopted and the it is 'moveable' pkg = args["<pkg>"] if ( json_dict == None ): sys.exit( 'ERROR: No packages have been adopted' ) pkgobj, deptype, pkgidx = utils.json_find_dependency( json_dict, pkg ) if ( pkgobj == None ): sys.exit( f"ERROR: Package ({args['<pkg>']}) not adopted." ) if ( pkgobj['pkgtype'] == 'overlay' ): sys.exit( "ERROR: The mv command cannot move an overlay package" ) # Make sure that '<dst>' does NOT already exist if ( not os.path.isdir( args['<dst>'] ) ): sys.exit( f"ERROR: The parent directory ({args['<dst>']}) does NOT exist" ) # Make sure that '<pkg>' does NOT already exist under '<dst>' dst = os.path.join( args['<dst>'], args["<pkg>"] ) if ( os.path.exists( dst ) ): sys.exit( f"ERROR: {args['<dst>']} already exists under {args['<dst>']}" ) # Physically move the package src = os.path.join( pkgobj["parentDir"], args["<pkg>"] ) try: shutil.move( src, dst ) except Exception as e: sys.exit( f"ERROR: Unable to move the package ({e})" ) # Update the package pkgobj['parentDir'] = args['<dst>'] json_dict['dependencies'][deptype].pop(pkgidx) utils.json_update_package_file_with_new_dep_entry( json_dict, pkgobj, is_weak_dep = True if deptype=='weak' else False ) utils.write_package_file( json_dict ) print( f"Package {src} moved to {dst}" )
def main(): # Parse command line args = docopt(__doc__, version=VERSION, options_first=True) # Add the ratt directory to the system path (so module can access the # 'utils' package sys.path.append(__file__) # Enumrate Windoze COM Ports if (args['--serialports'] == True): ports = utils.get_available_serial_ports(platform="Windows") for p in ports: print(p) sys.exit() # Get Newline option config.newline = '\r\n' if args['--crlf'] else '\n' # Open log file (when not disabled) logfile = None if (args['--log'] == True or args['--vlog'] or args['--dlog']): if (args['--vlog']): config.g_verbose_logs = True if (args['--dlog']): config.g_debug_logs = True logfile = open(utils.append_current_time(args['--logfile']), "wb") ## Created 'Expected' object for a: Windoze executable UUT if (args['--win']): config.g_uut = rexpect.ExpectWindowsConsole( " ".join(args['<executable>']), logfile) # Created 'Expected' object for a: Linux/Wsl executable UUT elif (args['--linux']): config.g_uut = rexpect.ExpectLinuxConsole( " ".join(args['<executable>']), logfile) # Created 'Expected' object for a: UUT via a Windoze COM Port elif (args['--comport']): serial = utils.open_serial_port('com' + args['<comnum>'], timeout=0, baudrate=int(args['--baud']), parity=args['--parity'], stopbits=int(args['--stopbits']), bytesize=int(args['--databits'])) config.g_uut = rexpect.ExpectSerial(serial, logfile) # Create 'Expected' object for a: NO UUT elif (args['--nouut']): config.g_uut = rexpect.ExpectNullConsole(logfile) # Enable output output.set_verbose_mode(args['-v']) output.set_debug_mode(args['--debug']) output.set_output_fd(sys.stdout, logfile) # Get script paths config.g_script_paths.append('.') if (args['--path1'] != None): config.g_script_paths.append(args['--path1']) if (args['--path2'] != None): config.g_script_paths.append(args['--path2']) if (args['--path3'] != None): config.g_script_paths.append(args['--path3']) # Check for batch mode if (args['--input'] != None): input, result = utils.import_file(args['--input'], config.g_script_paths) if (input == None): sys.exit(result) start_time = time() output.writeline( "------------ START: Ratt, ver={}. Start time={}".format( VERSION, strftime("%Y-%m-%d_%H.%M.%S", localtime(start_time)))) output.writeline("------------ RUNNING SUITE CASE: {}".format(result)) passcode = input.main() end_time = time() if (passcode != 0): output.writeline( "------------ TEST SUITE FAILED ({}).".format(passcode)) else: output.writeline("------------ TEST SUITE PASSED.") output.writeline( "------------ END: End time={}, delta={:.2f} mins".format( strftime("%Y-%m-%d_%H.%M.%S", localtime(end_time)), (end_time - start_time) / 60.0)) sys.exit(passcode) # interactive mode else: output.writeline("") output.writeline( "------------ Welcome to Ratt, this is my Kung-Fu and it is strong! ------------" ) output.writeline(" ver={}. Start time={}".format( VERSION, utils.append_current_time("", ""))) output.writeline("") exec('from rattlib import *') exec('import config') while (True): output.write(">") line = sys.stdin.readline().rstrip("\r\n") output.writeline(line, log_only=True) try: exec(line) except Exception as e: output.writeline(str(e))
--moored Moored (anchored) mine. --drifting Drifting mine. --boludo Boludo Mode. """ from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='Naval Fate 2.0') print(arguments) try: # Parse arguments, use file docstring as a parameter definition arguments = docopt.docopt(__doc__) # Count is a mandatory option, caps is optional caps = arguments['--boludo'] # In the definition, we expect one or more TEXT parameters # Each parameter is a word, or a text in quotes: "something like this" # If the user forgets about the quote, the program would print only "something" # Thus, we merge all the specified parameters with space text = ' '.join(arguments['TEXT']) if(caps): #print text.upper() print("Boludo!") else: print text
Some commands are better documented than others, but our use of the docopt.py module guarantees that all available options are at least listed below. """ def __init__(self): self._key = "Tutorial" self._text = Tutorial.__doc__ super(Tutorial, self).__init__(self) def main(fname): """ dumps documentation to a file. Usage: gendocs.py <fname> Arguments: <fname> The file to write documentation to. """ doc = Documentation(grapeMenu.menu()) with open(fname, 'w') as f: doc.write(f) if __name__ == "__main__": args = docopt(main.__doc__, argv=sys.argv[1:]) main(args["<fname>"]) sys.exit(0)
def __init__(self): arguments = docopt(__doc__, version='Topology Table Generator 1.0') file_name = arguments['--file'] if arguments[ '--file'] else 'mn-topo.yml' rows = int(arguments['--rows']) if arguments['--rows'] else 3 columns = int(arguments['--columns']) if arguments['--columns'] else 3 links_per_rows = int(arguments['--links-per-rows'] ) if arguments['--links-per-rows'] else 1 links_per_columns = int(arguments['--links-per-columns'] ) if arguments['--links-per-columns'] else 1 data = dict() data['controller'] = [] if arguments['--controller']: index = 0 for ctrl in arguments['--controller']: data['controller'].append({ 'name': 'c' + str(index), 'ip': ctrl }) index = index + 1 else: data['controller'].append({'name': 'c0', 'ip': '127.0.0.1'}) data['host'] = [] data['switch'] = [] data['link'] = [] # we first calculate the host to ensure they are created in port 1 on all switches for row in range(0, rows): for column in range(0, columns): if row > 0 and row < rows - 1 and column > 0 and column < columns - 1: continue host = get_host(row, column) switch_name = get_switch_name(row, column) data['host'].append(get_host(row, column)) data['link'].append({ 'source': host['name'], 'destination': switch_name }) for row in range(0, rows): for column in range(0, columns): switch = get_switch(row, column) right = get_switch_name(row, column + 1) bottom = get_switch_name(row + 1, column) data['switch'].append(switch) if column < columns - 1: for repeat in range(0, links_per_rows): data['link'].append({ 'source': switch['name'], 'destination': right }) if row < rows - 1: for repeat in range(0, links_per_columns): data['link'].append({ 'source': switch['name'], 'destination': bottom }) data['interface'] = [] if arguments['--interfaces']: for inter in arguments['--interfaces']: name, switch = inter.split(':') data['interface'].append({'name': name, 'switch': switch}) with open(file_name, 'w') as outfile: outfile.write(yaml.dump(data, default_flow_style=False))
elif args["--host"] == "None": args["--host"] = None # Checking Flags if args["--longitudinal"] in ["True", "true", True]: args["--longitudinal"] = True elif args["--longitudinal"] in ["False", "false", False]: args["--longitudinal"] = False else: args["--longitudinal"] = False if args["--name"] in ["None", None]: args["--name"] = None else: args["--name"] = str(args["--name"]) # Setup project = Project(name=args["--name"], data_dir=clean_path(args["--data_dir"]), code_dir=clean_path(args["--code_dir"]), freesurfer_home=clean_path(args["--freesurfer_home"]), is_longitudinal=args["--longitudinal"], host=args["--host"]) project.create_directories() project.write_scripts() project.create_monitor() print("Setup Complete!") #------------------------------------ # Main #------------------------------------ if __name__ == '__main__': args = docopt(doc, version='Setup FreeSurfer v{0}'.format(Version)) run(args)
def run(common_args, cmd_argv): args = docopt(scm.umount.USAGE, argv=cmd_argv) # Return 'error' since this is just a stub exit(1)
at a particular commands documentation using grape <cmd> --help. Some commands are better documented than others, but our use of the docopt.py module guarantees that all available options are at least listed below. """ def __init__(self): self._key = "Tutorial" self._text = Tutorial.__doc__ super(Tutorial, self).__init__(self) def main(fname): """ dumps documentation to a file. Usage: gendocs.py <fname> Arguments: <fname> The file to write documentation to. """ doc = Documentation(grapeMenu.menu()) with open(fname, 'w') as f: doc.write(f) if __name__ == "__main__": args = docopt(main.__doc__, argv=sys.argv[1:]) main(args["<fname>"]) sys.exit(0)
print( "-------------------------------------------------------------------------------" ) bpath = os.path.join(os.path.dirname(__file__), 'scm') if (os.path.exists(bpath)): files = os.listdir(bpath) for f in files: if (os.path.isdir(os.path.join(bpath, f))): print(f) print(' ') #------------------------------------------------------------------------------ # Parse command line args = docopt(__doc__, version=EVIE_VERSION(), options_first=True) # Display list of build engines supported if (args['--qry']): display_scm_types_list() else: # Determine which SCM tool to use scm = os.environ.get(OUTCAST_SCM_ADOPTED_TOOL()) if (scm == None): scm = 'git' if (args['--scm']): scm = args['--scm'] args['--scm'] = scm # Trap help on a specific command
def __init__(self): arguments = docopt.docopt(__doc__, version='fmcheck 1.0') # Reduce urllib3 logging messages logging.getLogger("urllib3").setLevel(logging.WARNING) # Colored logging if arguments['--debug']: logging.getLogger().setLevel(logging.DEBUG) coloredlogs.install(level='DEBUG') # print(arguments) else: logging.getLogger().setLevel(logging.INFO) coloredlogs.install(level='INFO') if arguments['--topology']: topo_file = arguments['--topology'] if not (os.path.isfile(topo_file)): raise Exception( "given topology file {} not found".format(topo_file)) else: topo_file = 'prod-topo.yml' if os.path.isfile('prod-topo.yml') else None topo_file = 'mn-topo.yml' if not topo_file and os.path.isfile( 'mn-topo.yml') else topo_file topo_file = 'fm-topo.yml' if not topo_file and os.path.isfile( 'fm-topo.yml') else topo_file if not topo_file: raise Exception('default topology file not found') props = None if os.path.isfile(topo_file): with open(topo_file, 'r') as f: props = yaml.load(f) if props is None: logging.error("yml topology file %s not loaded", topo_file) sys.exit(1) if arguments['--controller']: props['controller'] = [] i = 0 for ip in arguments['--controller']: props['controller'].append( {'name': "c{}".format(i), 'ip': ip }) i = i + 1 result = None topology = Topology(props) if arguments['links']: should_be_up = True if not arguments['--stopped'] else False include_sr = True if arguments['--segmentrouting'] else False result = topology.validate_links( should_be_up=should_be_up, include_sr=include_sr) elif arguments['nodes']: should_be_up = True if not arguments['--stopped'] else False include_sr = True if arguments['--segmentrouting'] else False result = topology.validate_nodes( should_be_up=should_be_up, include_sr=include_sr) elif arguments['roles']: result = topology.validate_nodes_roles() elif arguments['flows']: result = topology.validate_openflow_elements() elif arguments['sync-status']: result = topology.validate_cluster() # Delete commands elif arguments['delete-groups']: switch = topology.get_switch(arguments['<name>']) if switch: result = switch.delete_groups() else: logging.error("switch %s not found", arguments['<name>']) elif arguments['delete-flows']: result = topology.get_switch(arguments['<name>']).delete_flows() # Get flow stats elif arguments['get-flow-stats-all']: result = topology.get_random_controller().get_flow_stats() elif arguments['get-flow-stats']: result = topology.get_random_controller().get_flow_stats( filters=arguments['<filter>']) elif arguments['get-flow-node-stats-all']: result = topology.get_node_cluster_owner( arguments['<node>']).get_flow_stats(node_name=arguments['<node>']) elif arguments['get-flow-node-stats']: result = topology.get_node_cluster_owner( arguments['<node>']).get_flow_stats(node_name=arguments['<node>'], filters=arguments['<filter>']) # Get group stats elif arguments['get-group-stats-all']: result = topology.get_random_controller().get_group_stats() elif arguments['get-group-stats']: result = topology.get_random_controller().get_group_stats( filters=arguments['<filter>']) elif arguments['get-group-node-stats-all']: result = topology.get_node_cluster_owner( openflow_name=arguments['<node>']).get_group_stats(node_name=arguments['<node>']) elif arguments['get-group-node-stats']: result = topology.get_node_cluster_owner( openflow_name=arguments['<node>']).get_group_stats(filters=arguments['<filter>'], node_name=arguments['<node>']) # Get Eline stats elif arguments['get-eline-stats-all']: result = topology.get_random_controller().get_eline_stats() elif arguments['get-eline-stats']: result = topology.get_random_controller().get_eline_stats( filters=arguments['<filter>']) elif arguments['get-eline-summary-all']: result = topology.get_random_controller().get_eline_summary() elif arguments['get-eline-summary']: result = topology.get_random_controller().get_eline_summary( filters=arguments['<filter>']) # Get Etree stats elif arguments['get-etree-stats-all']: result = fmcheck.openflow.get_etrees( topology.get_random_controller()) result = topology.get_random_controller().get_etree_stats() elif arguments['get-etree-stats']: result = topology.get_random_controller().get_etree_stats( filters=arguments['<filter>']) elif arguments['get-etree-summary-all']: result = topology.get_random_controller().get_etree_summary() elif arguments['get-etree-summary']: result = topology.get_random_controller().get_etree_summary( filters=arguments['<filter>']) # Get Segment Routing info elif arguments['get-sr-summary-all']: result = topology.get_random_controller().get_sr_summary_all( topology.switches_by_openflow_name) elif arguments['get-sr-summary']: result = topology.get_random_controller().get_sr_summary( topology.switches_by_openflow_name, source=arguments['<source>'], destination=arguments['<destination>']) # Get Node Summary elif arguments['get-node-summary']: result = topology.get_random_controller().get_node_summary( topology.switches_by_openflow_name) if not result: sys.exit(1)
def __init__(self): arguments = docopt(__doc__, version='Mininet Topology Tester 1.0') setLogLevel('info') file = 'mn-topo.yml' if arguments['--topology']: file = arguments['--topology'] props = None if (os.path.isfile(file)): with open(file, 'r') as f: props = yaml.load(f) if props is None: print "ERROR: yml topology file {} not found".format(file) sys.exit(1) if arguments['--controller']: props['controller'] = [] i = 0 for ip in arguments['--controller']: props['controller'].append({'name': "c{}".format(i), 'ip': ip}) i = i + 1 checker = mntopo.checker.Checker(props) if arguments['--no-loop']: checker.loop = False if arguments['--no-links']: checker.check_links = False if arguments['--delay']: checker.delay = int(arguments['--delay']) if arguments['--loops']: checker.loop_max = int(arguments['--loops']) if arguments['--dir']: checker.servicesdir = arguments['--dir'] if arguments['--retries']: checker.retries = int(arguments['--retries']) if arguments['--interval']: checker.retry_interval = int(arguments['--interval']) if arguments['--force-pings']: checker.force_pings = True if arguments['--ask-for-retry']: checker.ask_for_retry = True if arguments['links'] and arguments['--stopped']: result = checker._check_links(False) elif arguments['links']: result = checker._check_links() elif arguments['flows']: result = checker._check_flows() elif arguments['nodes'] and arguments['--stopped']: result = checker._check_nodes(False) elif arguments['nodes']: result = checker._check_nodes() elif arguments['stats']: result = checker._check_stats() elif arguments['put-flows']: result = checker.put() elif arguments['save-flows']: result = checker.save() elif arguments['delete-flows']: result = checker.delete() else: result = checker.test() if not result: sys.exit(1)
add_image: string, path. remove_image: int (index of image to remove) add_note: string """ root = cleaned_path(dirpath) update_json(root+"/data/{0}-{1}.json".format(row_id, column_id), callback=cell_updater, root=root, row_id=row_id, column_id=column_id, text=text, background_color=background_color, text_color=text_color, boolean=boolean, animation=animation, add_image=add_image, remove_image=remove_image, add_note=add_note) def query(dirpath, row_id=None, column_id=None, field=None): print() #============================================================================ # Main #============================================================================ if __name__ == '__main__': arguments = docopt(doc, version='Dash v{0}'.format(Version)) if arguments["create"] == True: create(arguments["<dir>"], arguments["--name"]) elif arguments["update"] == True: update(arguments["<dir>"], add_columns=arguments["--addcol"], remove_columns=arguments["--rmcol"], add_rows=arguments["--addrow"], remove_rows=arguments["--rmrow"]) elif arguments["cell"] == True: cell(arguments["<dir>"], row_id=arguments["--row"], column_id=arguments["--col"], text=arguments["--settext"], background_color=arguments["--setbgcolor"], text_color=arguments["--settxtcolor"], boolean=arguments["--setbool"], animation=arguments["--setanimate"],
config = Config(arguments) if config.update: # update issues to add comments and issue state issues = get_issues(config.src_url) existing_issues = get_issues(config.dest_url) update_issues(issues, existing_issues) else: # get milestones and labels milestones = get_milestones(config.src_url) labels = get_labels(config.dest_url) # import milestones and labels, skipping existing labels import_milestones(milestones) existing_labels = get_labels(config.dest_url) import_labels(labels, existing_labels) # get imported milestones and labels milestones = get_milestones(config.dest_url) labels = get_labels(config.dest_url) # create issues issues = get_issues(config.src_url) existing_issues = get_issues(config.dest_url) import_issues(issues, existing_issues, milestones, labels) if __name__ == '__main__': arguments = docopt.docopt(__doc__, version='0.1') main(arguments)
def run( common_args, cmd_argv ): args = docopt(__doc__, argv=cmd_argv) # Housekeeping exit_code = 0; # Load my package info json_dict = utils.load_package_file() # Build dependency tree. root = utils.Node( (json_dict, "ROOT") ) build_tree( root, json_dict ) # Get generation lists children = root.get_children().copy() grand_children = [] for c in children: grand_children.extend( c.get_children() ) # MOVE a dependency if ( args['mv'] ): # Look up the details of the package to be moved pkgobj, deptype, pkgidx = utils.json_find_dependency( json_dict, args['<adoptedpkg>'] ) if ( pkgobj == None ): sys.exit( f"Cannot find the package - {args['<adoptedpkg>'] } - in the list of adopted packages" ); # Remove then add the package from the deps list now_is_weak = True if deptype == 'strong' else False json_dict['dependencies'][deptype].pop(pkgidx) utils.json_update_package_file_with_new_dep_entry( json_dict, pkgobj, now_is_weak ) print( f"Package - {args['<adoptedpkg>']} is now a {'weak' if now_is_weak else 'strong'} dependency " ) sys.exit( 0 ) # CHECK dependencies if ( args['check'] ): print( f"Checking dependencies for package: {utils.json_get_package_name(json_dict)}" ) # Perform checks missing_list = check_missing( root, children, grand_children ) noncompat_list = check_compatibility( root, children, grand_children ) cyclical_list = check_cylical( missing_list, root.get_pkgname() ) # Cyclical deps if ( not args['--noweak'] ): cyclical = filter_for_weak_dependency( cyclical_list ) print_cyclical_list( cyclical, "Warning", "weak" ) if ( not args['--nostrong'] ): cyclical = filter_for_strong_dependency( cyclical_list ) print_cyclical_list( cyclical, "ERROR", "strong" ) if ( len(cyclical) > 0 ): exit_code = 1 # Missing deps if ( not args['--noweak'] ): missing = filter_for_weak_dependency( missing_list ) print_missing_list( missing, "Warning", "weak" ) if ( not args['--nostrong'] ): missing = filter_for_strong_dependency( missing_list ) print_missing_list( missing, "ERROR", "strong" ) if ( len(missing) > 0 ): exit_code = 1 # Compatible check if ( not args['--noweak'] ): noncompat = filter_for_weak_dependency( noncompat_list ) print_noncompat_list( noncompat, "Warning", "weak", children ) if ( not args['--nostrong'] ): noncompat = filter_for_strong_dependency( noncompat_list ) print_noncompat_list( noncompat, "ERROR", "strong", children ) if ( len(noncompat) > 0 ): exit_code = 1 print("All dependency checks completed." ) sys.exit( exit_code ) # SHOW dependencies if ( not args['<adoptedpkg>'] ): print( root ) sys.exit( 0 ) # Filter the tree for a specific adopted package for c in children: if ( c.get_pkgname() != args['<adoptedpkg>'] ): root.remove_child_node( c ) print( root ) sys.exit( 0 )
def run(common_args, cmd_argv): args = docopt(__doc__, argv=cmd_argv) # Help on the publish sequence if (args['help']): print(help_text) sys.exit(0) # Get the package data json_dict = utils.load_package_file() # Edit in place if (args['--edit']): prev = utils.json_get_current_version(json_dict) v = utils.json_create_version_entry(args['<comments>'], args['<semver>'], prev['date']) utils.json_update_current_version(json_dict, v) # Edit a history entry elif (args['--edithist']): utils.json_update_history_version(json_dict, args['--edithist'], args['<comments>'], args['<semver>']) # Create new entry elif (args['<semver>']): v = utils.json_create_version_entry(args['<comments>'], args['<semver>']) utils.json_add_new_version(json_dict, v) # Important files... dirs_file = os.path.join(PACKAGE_INFO_DIR(), PKG_DIRS_FILE()) ignore_file = os.path.join(PACKAGE_INFO_DIR(), IGNORE_DIRS_FILE()) # Ensure the 'dirs list' is up to date if (not args['--nodirs'] and args['<semver>']): if (os.path.isfile(ignore_file)): owndirs = utils.get_owned_dirs(PACKAGE_ROOT()) utils.save_dirs_list_file(owndirs) # display publish info p = utils.json_get_published(json_dict) print(json.dumps(p, indent=2)) # Display warnings if (not args['-w']): warning = False if (not os.path.isfile(dirs_file)): print( f"Warning: NO {dirs_file} file has been created for the package. See the 'orc dirs' command" ) warning = True if (not os.path.isfile(ignore_file)): print( f"Warning: NO {ignore_file} file has been created for the package. Create using a text editor. The file has same semantics as a .gitignore file." ) warning = True if (warning): print() print( f"The above warning(s) can be ignored if the package is NOT intended to be to be adopted as an 'overlay' package." )