import sys from argparse import ArgumentParser from miasm.loader import pe_init parser = ArgumentParser(description="Create a PE from a shellcode") parser.add_argument("filename", help="x86 shellcode filename") parser.add_argument("-p", "--pename", help="new PE filename (default is 'sc_pe.exe')", default="sc_pe.exe") parser.add_argument("-w", "--word-size", help="word size (default is 32 bits)", choices=[32, 64], type=int, default=32) args = parser.parse_args() data = open(args.filename, 'rb').read() pe = pe_init.PE(wsize=args.word_size) s_text = pe.SHList.add_section(name="text", addr=0x1000, data=data) pe.Opthdr.AddressOfEntryPoint = s_text.addr open(args.pename, 'wb').write(bytes(pe))
def cmd_args(): parser = ArgumentParser() # file paths parser.add_argument( "--dict-path", help= "path to dict file containing pickled subtoken, node and target counts", ) # vocabulary settings parser.add_argument("--use-bpe", action="store_true") parser.add_argument("--subtoken-vocab", help="path to subtoken vocab file") parser.add_argument("--subtoken-merges", help="path to subtoken merges") parser.add_argument("--target-vocab", help="path to target vocab") parser.add_argument("--target-merges", help="path to target merges") parser.add_argument("--node-dict", help="path to node dict") parser.add_argument("--train-path", required=True) parser.add_argument("--test-path", required=True) parser.add_argument("--val-path", required=True) # line cache parser.add_argument( "--train-line-cache", required=False, ) parser.add_argument( "--test-line-cache", required=False, ) parser.add_argument( "--val-line-cache", required=False, ) # sequence lengths for path, subtoken and target parser.add_argument("--ast-len", default=9, type=int) parser.add_argument("--subtoken-len", default=7, type=int) parser.add_argument("--target-len", default=4, type=int) parser.add_argument("--max-contexts", default=100, type=int) # embeddings parser.add_argument("--subtoken-embedding-dims", default=128, type=int) parser.add_argument("--ast-embedding-dims", default=128, type=int) parser.add_argument("--target-embedding-dims", default=128, type=int) # some model settings parser.add_argument("--context-encoder-dropout", default=0.5, type=float) parser.add_argument( "--ast-bidirectional", default=True, help="whether or not to use bidirectional LSTM to encode AST nodes", ) parser.add_argument("--ast-hidden-size", default=256, help="number of hidden units in the AST LSTM") # training settings parser.add_argument("--shuffle-contexts", default=True, type=bool) parser.add_argument("--shuffle-batches", action="store_true") parser.add_argument("--batch-size", default=128, type=int) parser.add_argument("--predict-variables", action="store_true") # data loader settings parser.add_argument("--dataloader-num-workers", default=4, type=int) # model checkpointing parser.add_argument("--save-top-k", default=1, type=int) parser.add_argument("--noam-warmup-steps", default=4000, type=int) return parser
print("") print("Unit test for Part Two.") print("Test {inp} gives {res}".format(inp=ex1, res=partTwo(ex1, ex2_rules))) print("Test {inp} gives {res}".format(inp=ex2, res=partTwo(ex2, ex2_rules))) def partOne(molecule, rules_str): rules = getAllRules(rules_str) return len(set(genAllSubstitutions(molecule, rules))) def partTwo(molecule, rules_str): # It goes much faster to go in reverse, for this. rules = getAllReverseRules(rules_str) return AStar(molecule, 'e', rules) if __name__ == '__main__': from argparse import ArgumentParser, FileType args = ArgumentParser() args.add_argument("-t", "--test", help='Unit tests', action='store_true') args.add_argument("-i", "--input", help='Your input file (for rules only)', type=FileType('r')) args.add_argument("-m", "--molecule", help='Your input molecule', type=str) options = args.parse_args() if options.test: UnitTest() if options.input: inp = options.input.read().strip() print("Answer for part one is : {res}".format(res=partOne(options.molecule, inp))) print("Answer for part two is : {res}".format(res=partTwo(options.molecule, inp)))
def _parse_cli_args() -> Namespace: parser = ArgumentParser( description="get French television listings using Télé Loisirs mobile " "API in XMLTV format") parser.add_argument( "--description", action="store_true", help="print the description for this grabber", ) parser.add_argument( "--version", action="store_true", help="show the version of this grabber", ) parser.add_argument( "--capabilities", action="store_true", help="show the capabilities this grabber supports", ) parser.add_argument( "--configure", action="store_true", help="generate the configuration file by asking the users which " "channels to grab", ) parser.add_argument( "--days", type=int, default=_DEFAULT_DAYS, help="grab DAYS days of TV data (default: %(default)s)", ) parser.add_argument( "--offset", type=int, default=_DEFAULT_OFFSET, help="grab TV data starting at OFFSET days in the future (default: " "%(default)s)", ) parser.add_argument( "--output", type=Path, default=Path("/dev/stdout"), help="write the XML data to OUTPUT instead of the standard output", ) parser.add_argument( "--config-file", type=Path, default=_DEFAULT_CONFIG_FILE, help="file name to write/load the configuration to/from (default: " "%(default)s)", ) log_level_group = parser.add_mutually_exclusive_group() log_level_group.add_argument( "--quiet", action="store_true", help="only print error-messages on STDERR", ) log_level_group.add_argument( "--debug", action="store_true", help="provide more information on progress to stderr to help in" "debugging", ) return parser.parse_args()
def make() -> ArgumentParser: return ArgumentParser(prog="foo", add_help=False)
#!/usr/bin/env python import string import random import os from argparse import ArgumentParser parser = ArgumentParser(description="Generate HTTP Test Files") parser.add_argument('--dir', '-d', dest="dir", action="store", help="Directory to store outputs", default="results", required=True) args = parser.parse_args() DIR = args.dir PACKAGE_SIZE = 1500 # Generate a random string # SIZE is number of characters to generate # CHARS is the range of characters to select from def rand_string(size=6, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for x in range(size)) def main(): print "Generate HTTP Test Files" # Create output directory if not os.path.exists(DIR): os.makedirs(DIR)
def parseCmdLine(self, description, args): argParser = ArgumentParser(description) argParser.add_argument('-o', '--out-dir', required=True, type=str, help="""Directory where test results will be written""") return argParser.parse_args(args)
def dm_iinfo(argv=sys.argv[1:]): def fmt_time(timestamp): time_fmt = '%Y-%m-%d %H:%M:%S' return datetime.datetime.fromtimestamp(timestamp).strftime(time_fmt) def format_progress(progress): return progress def print_value(maxlen, f, value, entry={}): colorizer = entry.get('colorizer', None) if 'fmt' in entry: value = entry['fmt'](value) if sys.version_info[0] == 2: if not isinstance(value, str) and not isinstance(value, unicode): value = str(value) else: if not isinstance(value, str): value = str(value) f = format_bold(('{0: <%d}' % maxlen).format(f)) sep = ': ' for line in value.split('\n'): if colorizer is not None: line = colorizer(line) print(f + sep + line) f = ('{0: <%d}' % maxlen).format('') sep = ' ' def count_groups(fields, obj): current_group = '' ret = {'': 0} for entry in fields: if 'group' in entry: current_group = entry.get('group') ret[current_group] = 0 elif 'field' in entry: f = entry.get('field') if obj.get(f, None) is not None: ret[current_group] += 1 elif 'fieldre' in entry: expr = re.compile(entry.get('fieldre')) ret[current_group] += sum( [1 if expr.match(k) else 0 for k in obj.keys()]) return ret fields = [{ 'group': 'Transfer' }, { 'field': 'retries' }, { 'field': 'status', 'colorizer': format_status }, { 'field': 'progress', 'colorizer': format_progress }, { 'field': 'errmsg', 'colorizer': format_error }, { 'field': 'time_created', 'fmt': fmt_time }, { 'field': 'transferred' }, { 'field': 'mode' }, { 'group': 'Local File' }, { 'field': 'local_file' }, { 'field': 'local_atime', 'fmt': fmt_time }, { 'field': 'local_ctime', 'fmt': fmt_time }, { 'field': 'local_size' }, { 'field': 'checksum' }, { 'group': 'Remote Object' }, { 'field': 'remote_file' }, { 'field': 'remote_size' }, { 'field': 'remote_create_time', 'fmt': fmt_time }, { 'field': 'remote_modify_time', 'fmt': fmt_time }, { 'field': 'remote_checksum' }, { 'field': 'collection' }, { 'field': 'object' }, { 'field': 'remote_owner_name' }, { 'field': 'remote_owner_zone' }, { 'field': 'remote_replica_number' }, { 'field': 'remote_replica_status' }, { 'group': 'DMF Data' }, { 'fieldre': 'DMF_.*' }] parser = ArgumentParser(description='Get details for object.') parser.add_argument('file', type=str, help='object') args = parser.parse_args(argv) ensure_daemon_is_running() client = Client(DmIRodsServer.get_socket_file()) code, result = client.request({"info": args.file}) if code != ReturnCode.OK: print_request_error(code, result) sys.exit(8) obj = json.loads(result) if not obj: return maxlen = max([len(v) for v in obj.keys()]) + 2 groups = count_groups(fields, obj) current_group = '' for entry in fields: if 'group' in entry: current_group = entry.get('group') if groups.get(current_group, 0) > 0: print("--------------------------") print(current_group) print("--------------------------") elif 'field' in entry: if groups.get(current_group, 0) > 0: f = entry.get('field') value = obj.get(f, None) if value is not None: print_value(maxlen, f, value, entry) elif 'fieldre' in entry: if groups.get(current_group, 0) > 0: expr = re.compile(entry.get('fieldre')) for f, value in { k: v for k, v in obj.items() if expr.match(k) }.items(): print_value(maxlen, f, value, entry)
def arg_parser(prog=None): from argparse import ArgumentParser, Action, ArgumentError class DictAction(Action): # pylint: disable=R0903 def __init__(self, option_strings, dest, nargs=None, **kwargs): super(DictAction, self).__init__(option_strings, dest, **kwargs) def __call__(self, parser, namespace, value, option_string=None): if not re.match(r"\s*\w+\s*=\s*\d+", value): raise ArgumentError(self, "should be like nloc=20") k, val = value.split("=", 2) getattr(namespace, self.dest)[k.strip()] = int(val.strip()) parser = ArgumentParser(prog=prog) parser.add_argument('paths', nargs='*', default=['.'], help='list of the filename/paths.') parser.add_argument('--version', action='version', version=version) parser.add_argument("-l", "--languages", help='''List the programming languages you want to analyze. if left empty, it'll search for all languages it knows. `lizard -l cpp -l java`searches for C++ and Java code. The available languages are: ''' + ', '.join(x.language_names[0] for x in languages()), action="append", dest="languages", default=[]) parser.add_argument("-V", "--verbose", help="Output in verbose mode (long function name)", action="store_true", dest="verbose", default=False) parser.add_argument("-C", "--CCN", help='''Threshold for cyclomatic complexity number warning. The default value is %d. Functions with CCN bigger than it will generate warning ''' % DEFAULT_CCN_THRESHOLD, type=int, dest="CCN", default=DEFAULT_CCN_THRESHOLD) parser.add_argument("-f", "--input_file", help='''get a list of filenames from the given file ''', type=str, dest="input_file") parser.add_argument("-L", "--length", help='''Threshold for maximum function length warning. The default value is %d. Functions length bigger than it will generate warning ''' % DEFAULT_MAX_FUNC_LENGTH, type=int, dest="length", default=DEFAULT_MAX_FUNC_LENGTH) parser.add_argument("-a", "--arguments", help="Limit for number of parameters", type=int, dest="arguments", default=100) parser.add_argument("-w", "--warnings_only", help='''Show warnings only, using clang/gcc's warning format for printing warnings. http://clang.llvm.org/docs/UsersManual.html#cmdoption-fdiagnostics-format ''', action="store_const", const=print_clang_style_warning, dest="printer") parser.add_argument("--warning-msvs", help='''Show warnings only, using Visual Studio's warning format for printing warnings. https://msdn.microsoft.com/en-us/library/yxkt8b26.aspx ''', action="store_const", const=print_msvs_style_warning, dest="printer") parser.add_argument("-i", "--ignore_warnings", help='''If the number of warnings is equal or less than the number, the tool will exit normally; otherwise, it will generate error. If the number is negative, the tool exits normally regardless of the number of warnings. Useful in makefile for legacy code.''', type=int, dest="number", default=0) parser.add_argument("-x", "--exclude", help='''Exclude files that match the pattern. * matches everything, ? matches any single character, "./folder/*" exclude everything in the folder recursively. Multiple patterns can be specified. Don't forget to add "" around the pattern.''', action="append", dest="exclude", default=[]) parser.add_argument("-t", "--working_threads", help='''number of working threads. The default value is 1. Using a bigger number can fully utilize the CPU and often faster.''', type=int, dest="working_threads", default=1) parser.add_argument("-X", "--xml", help='''Generate XML in cppncss style instead of the tabular output. Useful to generate report in Jenkins server''', action="store_const", const=print_xml, dest="printer") parser.add_argument("--csv", help='''Generate CSV output as a transform of the default output''', action="store_const", const=print_csv, dest="printer") parser.add_argument("-H", "--html", help='''Output HTML report''', action="store_const", const=html_output, dest="printer") parser.add_argument("-m", "--modified", help='''Calculate modified cyclomatic complexity number , which count a switch/case with multiple cases as one CCN.''', action="append_const", const="modified", dest="extensions", default=[]) _extension_arg(parser) parser.add_argument("-s", "--sort", help='''Sort the warning with field. The field can be nloc, cyclomatic_complexity, token_count, p#arameter_count, etc. Or an customized field.''', action="append", dest="sorting", default=[]) parser.add_argument("-T", "--Threshold", help='''Set the limit for a field. The field can be nloc, cyclomatic_complexity, token_count, parameter_count, etc. Or an customized file. Lizard will report warning if a function exceed the limit''', action=DictAction, dest="thresholds", default={}) parser.add_argument("-W", "--whitelist", help='''The path and file name to the whitelist file. It's './whitelizard.txt' by default. Find more information in README.''', type=str, dest="whitelist", default=DEFAULT_WHITELIST) parser.usage = '''lizard [options] [PATH or FILE] [PATH] ...''' parser.description = __doc__ return parser
def main(): def print_version(): print( f'version {__version__}, A downloader that download the HLS/DASH stream.' ) parser = ArgumentParser( prog='XstreamDL-CLI', usage='XstreamDL-CLI [OPTION]... URL/FILE/FOLDER...', description='A downloader that download the HLS/DASH stream', add_help=False) parser.add_argument('-v', '--version', action='store_true', help='print version and exit') parser.add_argument('-h', '--help', action='store_true', help='print help message and exit') parser.add_argument('--speed-up', action='store_true', help='speed up at end') parser.add_argument('--speed-up-left', default='10', help='speed up when left count less than this value') parser.add_argument('--live', action='store_true', help='live mode') parser.add_argument( '--compare-with-url', action='store_true', help='use full url to compare with last segments to get new segments') parser.add_argument( '--dont-split-discontinuity', action='store_true', help='dont take #EXT-X-DISCONTINUITY tag as a new stream') parser.add_argument('--name-from-url', action='store_true', help='get name from segment url') parser.add_argument( '--live-duration', default='', help= 'live record time, format HH:MM:SS, example 00:00:30 will record about 30s' ) parser.add_argument('--live-utc-offset', default='0', help='the value is used to correct utc time') parser.add_argument('--live-refresh-interval', default='3', help='live refresh interval') parser.add_argument('--name', default='', help='specific stream base name') parser.add_argument('--base-url', default='', help='set base url for Stream') parser.add_argument( '--ad-keyword', default='', help='skip #EXT-X-DISCONTINUITY which segment url has this keyword') parser.add_argument( '--resolution', default='', choices=['', '270', '360', '480', '540', '576', '720', '1080', '2160'], help='auto choose target quality') parser.add_argument('--best-quality', action='store_true', help='auto choose best quality for dash streams') parser.add_argument( '--video-only', action='store_true', help='only choose video stream when use --best-quality') parser.add_argument( '--audio-only', action='store_true', help='only choose audio stream when use --best-quality') parser.add_argument('--all-videos', action='store_true', help='choose all video stream to download') parser.add_argument('--all-audios', action='store_true', help='choose all audio stream to download') parser.add_argument('--service', default='', help='set serviceLocation for BaseURL choose') parser.add_argument('--save-dir', default='Downloads', help='set save dir for Stream') parser.add_argument( '--select', action='store_true', help='show stream to select and download, default is to download all') parser.add_argument('--multi-s', action='store_true', help='use this option when S tag number > 0') parser.add_argument( '--disable-force-close', action='store_true', help= 'default make all connections closed securely, but it will make DL speed slower' ) parser.add_argument( '--limit-per-host', default=4, help= 'increase the value if your connection to the stream host is poor, suggest >100 for DASH stream' ) parser.add_argument( '--headers', default='headers.json', help='read headers from headers.json, you can also use custom config') parser.add_argument('--url-patch', default='', help='add some custom strings for all segments link') parser.add_argument('--overwrite', action='store_true', help='overwrite output files') parser.add_argument('--raw-concat', action='store_true', help='concat content as raw') parser.add_argument('--disable-auto-concat', action='store_true', help='disable auto-concat') parser.add_argument('--enable-auto-delete', action='store_true', help='enable auto-delete files after concat success') parser.add_argument( '--disable-auto-decrypt', action='store_true', help='disable auto-decrypt segments before dump to disk') parser.add_argument( '--key', default=None, help= '<id>:<k>, <id> is either a track ID in decimal or a 128-bit KID in hex, <k> is a 128-bit key in hex' ) parser.add_argument( '--b64key', default=None, help= 'base64 format aes key, only for HLS standard AES-128-CBC encryption') parser.add_argument('--hexiv', default=None, help='hex format aes iv') parser.add_argument( '--proxy', default='', help= 'use socks/http proxy, e.g. socks5://127.0.0.1:10808 or http://127.0.0.1:10809' ) parser.add_argument( '--disable-auto-exit', action='store_true', help='disable auto exit after download end, GUI will use this option') parser.add_argument('--parse-only', action='store_true', help='parse only, not to download') parser.add_argument( '--show-init', action='store_true', help='show initialization to help you identify same name stream') parser.add_argument( '--index-to-name', action='store_true', help= 'some dash live have the same name for different stream, use this option to avoid' ) parser.add_argument('--log-level', default='INFO', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'], help='set log level, default is INFO') parser.add_argument( '--redl-code', default='', help= 're-download set of response status codes , e.g. 408,500,502,503,504') parser.add_argument('--hide-load-metadata', action='store_true', help='hide `Load #EXT-X-MEDIA metadata` balabala') parser.add_argument('URI', nargs='*', help='URL/FILE/FOLDER string') args = parser.parse_args() if args.help: print_version() parser.print_help() sys.exit() if args.version: print_version() sys.exit() if len(args.URI) == 0: try: uri = input( 'Paste your URL/FILE/FOLDER string at the end of commands, plz.\nCtrl C to exit or input here:' ) except KeyboardInterrupt: sys.exit() if uri.strip() != '': args.URI.append(uri.strip()) if len(args.URI) == 0: sys.exit('No URL/FILE/FOLDER input') for handler in logger.handlers: # 注意 这里不能拿 StreamHandler 做判断 # 因为 FileHandler 父类是 StreamHandler # 这样当 handler 是 FileHandler 的时候 isinstance 返回 True if isinstance(handler, logging.FileHandler) is False: handler.setLevel(logging.getLevelName(args.log_level)) command_handler(args) logger.info(f'use {__version__}, set URI to {args.URI}') logger.debug(f'args => {args}') daemon = Daemon(args) daemon.daemon() if args.disable_auto_exit: _ = input('press any key to exit.')
signature[idx] = b # Verify the signature we've found is correct resp = s.get('http://localhost:9000/test', params={ 'file': 'some file contents', 'signature': signature.hex() }) assert resp.status_code == 204, 'Guessed hash should return a 204' if __name__ == '__main__': print( 'Challenge #31 - Implement and break HMAC-SHA1 with an artificial timing leak' ) parser = ArgumentParser( description='Launches either a client or server for challenge #31') parser.add_argument('role', choices=('client', 'server'), help='Which role to launch') args = parser.parse_args() if args.role == 'client': artificial_timing_attack() elif args.role == 'server': app.run(host='localhost', port=9000, debug=True)
import random a = [random.uniform(0, 2 * math.pi) for _ in range(100)] b = [random.uniform(0, 2 * math.pi) for _ in range(100)] tasks = list(zip(a, b)) results = pool.map(worker, tasks) pool.close() return results # Now we could save or do something with the results object if __name__ == "__main__": import schwimmbad from argparse import ArgumentParser parser = ArgumentParser(description="Schwimmbad example.") group = parser.add_mutually_exclusive_group() group.add_argument("--ncores", dest="n_cores", default=1, type=int, help="Number of processes (uses " "multiprocessing).") group.add_argument("--mpi", dest="mpi", default=False, action="store_true", help="Run with MPI.") args = parser.parse_args()
from .utils import Player from .utils import Tournament from argparse import ArgumentParser import importlib if __name__ == '__main__': parser = ArgumentParser("Play some Ice Hockey. List any number of players, odd players are in team 1, even players team 2.") parser.add_argument('-s', '--save_loc', help="Do you want to record?") parser.add_argument('-f', '--num_frames', default=500000, type=int, help="How many steps should we play for?") parser.add_argument('players', nargs='+', help="Add any number of players. List python module names or `AI` for AI players). Teams alternate.") args = parser.parse_args() players = [] for i, player in enumerate(args.players): if player == 'AI': players.append(None) else: players.append(Player(importlib.import_module(player).HockeyPlayer(i), i % 2)) tournament = Tournament(players) score = tournament.play(save=args.save_loc, max_frames=args.num_frames) tournament.close() print('Final score', score)
link_libs = importlib.util.module_from_spec(spec) spec.loader.exec_module(link_libs) from argparse import ArgumentParser import pandas as pd import numpy as np import h5py class args: ellipses = 'data/runs/270420/summ_base_ell.csv' margin = 56 image_size = 224 parser = ArgumentParser(description="Calculate average RF size per layer.") parser.add_argument('output_csv', help='Path to csv file where data should be output') parser.add_argument('ellipses', help='Path to csv file containing RF ellipse fits.') parser.add_argument( "margin", type=float, help='Float, number of pixels around border for which RFs are considered' + ' too close to the border.') parser.add_argument('image_size', type=float, help='Pixel size (width and height) of input space.') args = parser.parse_args() ells = pd.read_csv(args.ellipses)
def parse_args(): # type: () -> Namespace parser = ArgumentParser() parser.add_argument('file', type=str, help='Filename to check') parser.add_argument("-c", "--checkers", dest="checkers", default=default_checkers, help="Comma-separated list of checkers") parser.add_argument("-i", "--ignore-codes", dest="ignore_codes", help="Comma-separated list of error codes to ignore") parser.add_argument("-e", "--enable-codes", dest="enable_codes", default='', help="Comma-separated list of error codes to ignore") parser.add_argument('--max-line-length', dest='max_line_length', default=80, action='store', help='Maximum line length') parser.add_argument('--no-merge-configs', dest='merge_configs', action='store_false', help=('Whether to ignore config files found at a ' 'higher directory than this one')) parser.add_argument('--multi-thread', type=str2bool, default=True, action='store', help=('Run checkers sequentially, ' 'rather than simultaneously')) parser.add_argument( '--venv-root', dest='venv_root', default='~/.virtualenvs', action='store', help=( 'Location of all Python virtual environments. ' 'Used with auto-detecting virtual envs created by virtualenvwrapper' )) parser.add_argument( '--venv-path', dest='venv_path', default=None, action='store', help=('The full path to a virtualenv. Used with a directly-created ' '(not using virtualenvwrapper) virtualenv.')) parser.add_argument('--pylint-rcfile', default=None, dest='pylint_rcfile', help='Location of a config file for pylint') parser.add_argument('--mypy-config-file', default=None, dest='mypy_config_file', help='Location of a config file for mypy') parser.add_argument('--flake8-config-file', default=None, dest='flake8_config_file', help='Location of a config file for flake8') parser.add_argument( '--report-checker-errors-inline', type=str2bool, default=True, action='store', help=("Whether to fake failing checkers's STDERR as a reported " "error for easier display.")) parser.add_argument('--mypy-no-implicit-optional', type=str2bool, default=False, action='store') parser.add_argument('--debug', action='store_true', help=('Enable output to help debug pycheckers itself')) return parser.parse_args()
def handle_leave(): app.logger.info("Got leave event") @handler.add(PostbackEvent) def handle_postback(event): if event.postback.data == 'ping': line_bot_api.reply_message( event.reply_token, TextSendMessage(text='pong')) @handler.add(BeaconEvent) def handle_beacon(event): line_bot_api.reply_message( event.reply_token, TextSendMessage(text='Got beacon event. hwid=' + event.beacon.hwid)) if __name__ == "__main__": arg_parser = ArgumentParser( usage='Usage: python ' + __file__ + ' [--port <port>] [--help]' ) arg_parser.add_argument('-p', '--port', default=8000, help='port') arg_parser.add_argument('-d', '--debug', default=False, help='debug') options = arg_parser.parse_args() # create tmp dir for download content make_static_tmp_dir() app.run(debug=options.debug, port=options.port)
"""Jieba command line interface.""" import sys import jieba from argparse import ArgumentParser from ._compat import * parser = ArgumentParser(usage="%s -m jieba [options] filename" % sys.executable, description="Jieba command line interface.", epilog="If no filename specified, use STDIN instead.") parser.add_argument( "-d", "--delimiter", metavar="DELIM", default=' / ', nargs='?', const=' ', help= "use DELIM instead of ' / ' for word delimiter; or a space if it is used without DELIM" ) parser.add_argument( "-p", "--pos", metavar="DELIM", nargs='?', const='_', help= "enable POS tagging; if DELIM is specified, use DELIM instead of '_' for POS delimiter" ) parser.add_argument("-D", "--dict", help="use DICT as dictionary") parser.add_argument(
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0.2, 0, 0, 0, 0, 0, 0.4, 0, 0, 0, 0, 0, 0.4, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0.5, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0.5, 0, 0], [0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]]) if __name__ == '__main__': parser = ArgumentParser( description='Lightweight 3D human pose estimation demo. ' 'Press esc to exit, "p" to (un)pause video or process next image.') parser.add_argument( '-m', '--model', help='Required. Path to checkpoint with a trained model ' '(or an .xml file in case of OpenVINO inference).', type=str, required=True) parser.add_argument('--video', help='Optional. Path to video file or camera id.', type=str, default='') parser.add_argument( '-d', '--device',
if not self.eol(): result.append(self[self.pos]) self.pos += 1 while not self.eol(): if re.match(regexp, self[self.pos]): break result.append(self[self.pos]) self.pos += 1 return result if __name__ == '__main__': # Process command line options. from argparse import ArgumentParser parser = ArgumentParser( description='Run AsciiDoc conformance tests specified in ' 'configuration FILE.') msg = 'Use configuration file CONF_FILE (default configuration file is testasciidoc.conf in' \ 'testasciidoc.py directory)' parser.add_argument('-v', '--version', action='version', version='%(prog)s {}'.format(__version__)) parser.add_argument('-f', '--conf-file', help=msg) subparsers = parser.add_subparsers(metavar='command', dest='command') subparsers.required = True subparsers.add_parser('list', help='List tests') options = ArgumentParser(add_help=False)
outside this range (although they are allowed for testing purposes """ print intro state = BoatState() connection = SerialConnection(port=port) try: while 1: rudder = int(input("Enter rudder position: ")) sails = int(input("Enter sails position: ")) state.set_pos((rudder, sails)) transmit_serial(state, connection) print state except KeyboardInterrupt: print "\nINTERUPT PROGRAM HALT" if __name__ == "__main__": argparser = ArgumentParser(description="Manual Servo Control") argparser.add_argument('-t', action='store', dest='run_number', help='run number used in logging') argparser.add_argument('-port', action='store', dest='port', default='/dev/ttyACM0', help='port for uploader arduino') r = argparser.parse_args() log = (r.run_number is not None) main(port=r.port, log=log, logfilenum=r.run_number)
def parse_cmdline(description, args, createfunc=None, addfunc=None, removefunc=None, startfunc=None, stopfunc=None, showfunc=None, triagefunc=None, coveragefunc=None, destroyfunc=None, validatefunc=None): argParser = ArgumentParser(description) argParser.add_argument('-v', '--verbose', action='store_true', help="""Verbose mode, print information about the progress""", default=False) subparsers = argParser.add_subparsers(description="Orthrus subcommands") # Command 'create' create_parser = subparsers.add_parser('create', help=CREATE_HELP) create_parser.add_argument('-asan', '--afl-asan', action='store_true', help="""Setup binaries for afl with AddressSanitizer""", default=False) create_parser.add_argument('-fuzz', '--afl-harden', action='store_true', help="""Setup binaries for afl in 'harden' mode (stack-protector, fortify)""", default=False) create_parser.add_argument('-cov', '--coverage', action='store_true', help="""Setup binaries to collect coverage information""", default=False) create_parser.add_argument('-d', '--configure-flags', nargs='?', type=str, default="", help='Additional flags for configuring the source') # create_parser.add_argument('-f', '--cflags', nargs='?', # type = str, default="", # help = 'Additional flags to go into CFLAGS for compilation') # create_parser.add_argument('-l', '--ldflags', nargs='?', # type = str, default="", # help = 'Additional flags to go into LDFLAGS for compilation') create_parser.set_defaults(func=createfunc) # Command 'add' add_parser = subparsers.add_parser('add', help=ADD_HELP) add_parser.add_argument('-n', '--job', required=True, type=str, help='Add a job with executable command line invocation string') # add_parser.add_argument('-j', '--job-id', nargs='?', # type=str, default="", # help='Job Id for the job which should be selected') add_parser.add_argument('-i', '--import', dest='_import', nargs='?', type=str, default="", help='Import an AFL fuzzing output directory provided as tar.gz') add_parser.add_argument('-s', '--sample', nargs='?', type=str, default="", help='A single file or directory of afl testcases for fuzzing') add_parser.set_defaults(func=addfunc) # Command 'remove' remove_parser = subparsers.add_parser('remove', help=REMOVE_HELP) remove_parser.add_argument('-j', '--job-id', required=True, type=str, help='Job Id for the job which should be removed') remove_parser.set_defaults(func=removefunc) # Command 'start' start_parser = subparsers.add_parser('start', help=START_HELP) start_parser.add_argument('-j', '--job-id', required=True, type=str, help='Job Id for the job which should be started') start_parser.add_argument('-c', '--coverage', action='store_true', help="""Collect coverage information while fuzzing""", default=False) start_parser.add_argument('-m', '--minimize', action='store_true', help="""Minimize corpus before start""", default=False) start_parser.set_defaults(func=startfunc) # Command 'stop' stop_parser = subparsers.add_parser('stop', help=STOP_HELP) stop_parser.add_argument('-c', '--coverage', action='store_true', help="""Stop afl-cov instances on stop""", default=False) stop_parser.set_defaults(func=stopfunc) # Command 'show' show_parser = subparsers.add_parser('show', help=SHOW_HELP) show_parser.add_argument('-j', '--jobs', action='store_true', help="""Show configured jobs""", default=False) show_parser.add_argument('-cov', '--cov', action='store_true', help="""Show coverage of job""", default=False) show_parser.set_defaults(func=showfunc) # Command 'triage' triage_parser = subparsers.add_parser('triage', help=TRIAGE_HELP) triage_parser.add_argument('-j', '--job-id', nargs='?', type=str, default="", help="""Job Id for the job which should be triaged""") triage_parser.set_defaults(func=triagefunc) # Command 'coverage' coverage_parser = subparsers.add_parser('coverage', help=COVERAGE_HELP) coverage_parser.add_argument('-j', '--job-id', nargs='?', type=str, default="", required=True, help="""Job Id for checking coverage""") coverage_parser.set_defaults(func=coveragefunc) # Command 'destroy' destroy_parser = subparsers.add_parser('destroy', help=DESTROY_HELP) # create_parser.add_argument('-x', type=int, default=1) destroy_parser.set_defaults(func=destroyfunc) # Command 'validate' validate_parser = subparsers.add_parser('validate', help=VALIDATE_HELP) validate_parser.set_defaults(func=validatefunc) return argParser.parse_args(args)
#!/usr/bin/env python3 import uproot import numpy as np import os from argparse import ArgumentParser from multiprocessing import Pool from mixing import tonumpy, readPU from mixing import premixfile as pmf parser = ArgumentParser('premix PU') parser.add_argument('inputFile') parser.add_argument('nOutput', help='number of mixed output files', type=int) parser.add_argument('outputDir') parser.add_argument('-e', help='number of events per output file', default=800, type=int) parser.add_argument('--pu', help='pileup', default=200, type=int) args = parser.parse_args() nPU = args.pu nEvents = args.e outputDir = args.outputDir nOutput = args.nOutput if len(outputDir) < 1: exit() os.system('mkdir -p ' + outputDir)
def get_parser(): parser = ArgumentParser() parser.add_argument('--model') return parser
pass def main(args): u""" main entry of this script :param args: :return: """ meta = pd.read_excel() pass if __name__ == '__main__': parser = ArgumentParser(description="Combine loom from different sample by cell type") parser.add_argument( "-i", "--input", help="Path to CellRanger output directory", type=str, required=True ) parser.add_argument( "-m", "--meta", help="Path to meta data, csv file", type=str, required=True
#print('\033[0;0f\033[0J') # Color Palette CP_R = '\033[31m' CP_G = '\033[32m' CP_Y = '\033[33m' CP_C = '\033[36m' CP_0 = '\033[0m' # List of models which can be run by this example script models = [ 'yolov3', 'yolov3_tiny', 'inception', 'resnet34_50', 'resnet34_18', 'superresolution' ] model_names = sorted(name for name in models) parser = ArgumentParser(description="Micron DLA Examples") _ = parser.add_argument _('--model', type=str, default='linknet', help='Model architecture:' + ' | '.join(model_names) + ' (default: linknet)') _('--bitfile', type=str, default='', help='Path to the bitfile') _('--model-path', type=str, default='', help='Path to the NN model') _('--numfpga', type=int, default=1, help='Number of FPGAs to use') _('--numclus', type=int, default=1, help='Number of clusters to use') args = parser.parse_args() def main(): print('{:=<80}'.format('')) print('{}Micron{} DLA Examples{}'.format(CP_C, CP_Y, CP_0))
def main(test=False): if test: sbdir = osp.join(osp.dirname(__file__), os.pardir, os.pardir, os.pardir, 'sandbox') tmpdir = osp.join(sbdir, 'tobedeleted') # fname = osp.join(tmpdir, 'scipy-0.10.1.win-amd64-py2.7.exe') fname = osp.join(sbdir, 'VTK-5.10.0-Qt-4.7.4.win32-py2.7.exe') print(Package(fname)) sys.exit() target = osp.join(utils.BASE_DIR, 'build', 'winpython-2.7.3', 'python-2.7.3') fname = osp.join(utils.BASE_DIR, 'packages.src', 'docutils-0.9.1.tar.gz') dist = Distribution(target, verbose=True) pack = Package(fname) print(pack.description) # dist.install(pack) # dist.uninstall(pack) else: parser = ArgumentParser(description="WinPython Package Manager: install, "\ "uninstall or upgrade Python packages on a Windows "\ "Python distribution like WinPython.") parser.add_argument('fname', metavar='package', type=str if py3compat.PY3 else unicode, help='path to a Python package') parser.add_argument('-t', '--target', dest='target', default=sys.prefix, help='path to target Python distribution '\ '(default: "%s")' % sys.prefix) parser.add_argument('-i', '--install', dest='install', action='store_const', const=True, default=False, help='install package (this is the default action)') parser.add_argument('-u', '--uninstall', dest='uninstall', action='store_const', const=True, default=False, help='uninstall package') args = parser.parse_args() if args.install and args.uninstall: raise RuntimeError("Incompatible arguments: --install and --uninstall") if not args.install and not args.uninstall: args.install = True if not osp.isfile(args.fname): raise IOError("File not found: %s" % args.fname) if utils.is_python_distribution(args.target): dist = Distribution(args.target) try: package = Package(args.fname) if package.is_compatible_with(dist): if args.install: dist.install(package) else: dist.uninstall(package) else: raise RuntimeError("Package is not compatible with Python "\ "%s %dbit" % (dist.version, dist.architecture)) except NotImplementedError: raise RuntimeError("Package is not (yet) supported by WPPM") else: raise WindowsError("Invalid Python distribution %s" % args.target)
elif ds.type == ds.TYPE_BYTES: val = urlsafe_b64encode(val).decode('ascii').strip('=') elif ds.type == ds.TYPE_BOOL: val = int(val) obj.append('%d%s%s' % (ds.number, types_enc[ds.type], val)) return obj if __name__ == '__main__': from argparse import ArgumentParser from common import load_proto_msgs parser = ArgumentParser(description='Decode a JsProtoUrl text message, providing a .proto.') parser.add_argument('pburl_data') parser.add_argument('proto_file') parser.add_argument('proto_msg_name', nargs='?') args = parser.parse_args() sep = '!' if args.pburl_data[0] == '!' else '&' msg = None for name, cls in load_proto_msgs(args.proto_file): if not args.proto_msg_name or args.proto_msg_name == name: msg = cls() break if not msg: raise ValueError('Provided message name was not found in .proto.')
convert_size(member.size))) tar_f.extractall(directory) remove(new_path) if DEBUG: print("- Deleted %s" % (single.name)) elif single.name.endswith(".img"): # rk30 images if DEBUG: print("> Unpacking %s..." % (single.name)) assert unpack_img(join( directory, single.name)), "Error unpacking system image" if __name__ == "__main__": # create argument parser parser = ArgumentParser( description= "A script to make unpacking and packing Retron 5 updates easier (or actually possible)" ) parser.add_argument("-i", "--in-file", type=str, help="The update file you want to unpack") parser.add_argument("-o", "--out-dir", type=str, default=OUTPUT_DIR, help="The directory you want to extract the update to") parser.add_argument("-l", "--list", action="store_true", help="List files in the update package") parser.add_argument("-e",
from ydk.models.cisco_ios_xr import Cisco_IOS_XR_ethernet_lldp_cfg \ as xr_ethernet_lldp_cfg import os import logging YDK_REPO_DIR = os.path.expanduser("~/.ydk/") def config_lldp(lldp): """Add config data to lldp object.""" pass if __name__ == "__main__": """Execute main program.""" parser = ArgumentParser() parser.add_argument("-v", "--verbose", help="print debugging messages", action="store_true") parser.add_argument("device", help="gNMI device (http://user:password@host:port)") args = parser.parse_args() device = urlparse(args.device) # log debug messages if verbose argument specified if args.verbose: logger = logging.getLogger("ydk") logger.setLevel(logging.INFO) handler = logging.StreamHandler() formatter = logging.Formatter(("%(asctime)s - %(name)s - " "%(levelname)s - %(message)s")) handler.setFormatter(formatter)
# Abuse the overloaded operator to figure out the ordering tallyObjs = sorted(tallyObjs) tallyObjs.reverse() n = 1 while tallyObjs: print("BOOK {N}:".format(N=n)) print("\t{TITLE}\n".format(TITLE=tallyObjs.pop().title) .replace("[", "").replace("]", "")) n += 1 if __name__ == "__main__": parser = ArgumentParser(description="This is a script to order book club " "books given a CSV of ranked choice " "votes per participant") parser.add_argument('-i', '--input', help='Input CSV (will be rankings.csv by default)', default="rankings.csv") parser.add_argument('-c', '--columns', help='Column indices to ignore in input file (will be 0 by default)', type=int, nargs='+', default=[0]) parser.add_argument('-q', '--question', help='Question string from the form to delete from titles' ' (will be \'Please rank your choices \' by default)', default="Please rank your choices ")