def die(msg, exception=None): """Print a message, then exit. Print exception too if one is given.""" print(msg) if exception is not None: print(exception) parser.print_help() sys.exit(1)
def get_params_parser(): """Parse command line arguments""" parser = argparse.ArgumentParser(usage=ARTHUR_USAGE_MSG, description=ARTHUR_DESC_MSG, epilog=ARTHUR_EPILOG_MSG, formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False) ElasticOcean.add_params(parser) parser.add_argument('-h', '--help', action='help', help=argparse.SUPPRESS) parser.add_argument('-g', '--debug', dest='debug', action='store_true', help=argparse.SUPPRESS) parser.add_argument("--no_incremental", action='store_true', help="don't use last state for data source") parser.add_argument("--fetch_cache", action='store_true', help="Use cache for item retrieval") parser.add_argument("--redis", default="redis", help="url for the redis server") parser.add_argument("--enrich", action='store_true', help="Enrich items after retrieving") parser.add_argument("--enrich_only", action='store_true', help="Only enrich items (DEPRECATED, use --only-enrich)") parser.add_argument("--only-enrich", dest='enrich_only', action='store_true', help="Only enrich items (DEPRECATED, use --only-enrich)") parser.add_argument("--events-enrich", dest='events_enrich', action='store_true', help="Enrich events in items") parser.add_argument('--index', help="Ocean index name") parser.add_argument('--index-enrich', dest="index_enrich", help="Ocean enriched index name") parser.add_argument('--db-user', help="User for db connection (default to root)", default="root") parser.add_argument('--db-password', help="Password for db connection (default empty)", default="") parser.add_argument('--db-host', help="Host for db connection (default to mariadb)", default="mariadb") parser.add_argument('--db-projects-map', help="Projects Mapping DB") parser.add_argument('--json-projects-map', help="Projects Mapping JSON file") parser.add_argument('--project', help="Project for the repository (origin)") parser.add_argument('--refresh-projects', action='store_true', help="Refresh projects in enriched items") parser.add_argument('--db-sortinghat', help="SortingHat DB") parser.add_argument('--only-identities', action='store_true', help="Only add identities to SortingHat DB") parser.add_argument('--refresh-identities', action='store_true', help="Refresh identities in enriched items") parser.add_argument('--author_id', help="Field author_id to be refreshed") parser.add_argument('--author_uuid', help="Field author_uuid to be refreshed") parser.add_argument('--github-token', help="If provided, github usernames will be retrieved in git enrich.") parser.add_argument('--studies', action='store_true', help="Execute studies after enrichment.") parser.add_argument('--only-studies', action='store_true', help="Execute only studies.") parser.add_argument('backend', help=argparse.SUPPRESS) parser.add_argument('backend_args', nargs=argparse.REMAINDER, help=argparse.SUPPRESS) if len(sys.argv) == 1: parser.print_help() sys.exit(1) return parser
def command_dispatcher(self, args=None): desc = ('pydl4j, a system to manage your DL4J dependencies from Python.\n') parser = argparse.ArgumentParser(description=desc) parser.add_argument( '-v', '--version', action='version', version=pkg_resources.get_distribution("pydl4j").version, help='Print pydl4j version' ) subparsers = parser.add_subparsers(title='subcommands', dest='command') subparsers.add_parser('init', help='Initialize pydl4j') subparsers.add_parser('install', help='Install jars for pydl4j') argcomplete.autocomplete(parser) args = parser.parse_args(args) self.var_args = vars(args) if not args.command: parser.print_help() return self.command = args.command if self.command == 'init': self.init() return if self.command == 'install': self.install() return
def main(): if puzzle: print >> sys.stderr, 'this is a self decoding file - decoding.' _unpack() return parser = argparse.ArgumentParser() parser.add_argument('value', nargs='?', help="Provide a string to encrypt.") parser.add_argument('-b', '--benchmark', help="Print number of operations per second", required=False, action="store_true") parser.add_argument('-p', '--pack', help="Pack a self decoding python file given a file", required=False, action="store_true") parser.add_argument('-f', '--file', help="Provide a file to encrypt.", required=False) parser.add_argument('-t', '--time', help="Time to decode", required=False, type=int) parser.add_argument('-e', '--encrypt', help="Encrypt a file that can be unencrypted in X seconds", required=False, action="store_true") parser.add_argument('-d', '--decode', help="Encrypt a file that can be unencrypted in X seconds", required=False) parser.add_argument('-u', '--unit', help="Time unit to use when interpreting time input", required=False, default='seconds', choices=[ 'seconds', 'minutes', 'hours', 'days', 'months', 'years', ]) parser.add_argument('-U', '--until-date', nargs="+", help="Encode until a date", required=False) parser.add_argument('--tz', help="Provide a Time Zone. PST/EST or all of the full codes such as US/Eastern", required=False) parser.add_argument('--seconds-until-date', nargs="+", help="Get seconds until a date", required=False) # show help if no args if len(sys.argv) == 1: parser.print_help() sys.exit(1) args = parser.parse_args() Main(args).execute()
def main(): parser = argparse.ArgumentParser('Download a Flickr Set') parser.add_argument('-k', '--api_key', type=str, help='Flickr API key') parser.add_argument('-s', '--api_secret', type=str, help='Flickr API secret') parser.add_argument('-t', '--user_auth', action='store_true', help='Enable user authentication') parser.add_argument('-l', '--list', type=str, metavar='USER', help='List photosets for a user') parser.add_argument('-d', '--download', type=str, metavar='SET_ID', help='Download the given set') parser.add_argument('-q', '--quality', type=str, metavar='SIZE_LABEL', default=None, help='Quality of the picture') parser.set_defaults(**_load_defaults()) args = parser.parse_args() if not args.api_key or not args.api_secret: print >> sys.stderr, 'You need to pass in both "api_key" and "api_secret" arguments' return 1 ret = _init(args.api_key, args.api_secret, args.user_auth) if not ret: return 1 if args.list: print_sets(args.list) elif args.download: download_set(args.download, args.quality) else: print >> sys.stderr, 'ERROR: Must pass either --list or --download\n' parser.print_help() return 1
def validate(date_text, parser): try: datetime.strptime(date_text, '%Y-%m-%d') except ValueError: print "\nFormato de fecha incorrecto, debe ser en el siguiente formato YYYY-MM-DD\n" parser.print_help() sys.exit(0)
def main(): default_days = '12h,1..6d,1..22w' parser = OptionParser(usage="usage: %prog [options] vol_id") parser.add_option('--description', default='snapman', dest='description', help="prefix for snapshot description") parser.add_option('--timeout', type='int', default=0, dest='timeout', help="timeout for creating snapshots (see --days for units)") parser.add_option('--no-snapshot', action='store_false', default=True, dest='snapshot', help="don't do the snapshot (only clean up)") parser.add_option('--no-clean', '--no-cleanup', action='store_false', default=True, dest='cleanup', help="don't clean up (only do the snapshot)") parser.add_option('--logging', default='info') parser.add_option('--days', '-d', default=default_days, help="Time spans to keep [default %default]. Units h=hours, d=days (default), w=weeks, m=months, y=years. n.b. use --simulate to make sure that your setting behaves as you think it will") parser.add_option('--simulate', dest='simulate', help="Simulate and print the progression of backups using the given --days setting [example: --simulate=1d]") parser.add_option('--region', dest='region', default=None, help="Connect to the given EC2 region") (options, args) = parser.parse_args() logging.basicConfig(level=getattr(logging, options.logging.upper())) try: days = parse_days(options.days) except ValueError as e: print e parser.print_help() sys.exit(1) if options.simulate: tickspan = parse_days(options.simulate, single=True) simulate(days, tickspan) sys.exit(0) if len(args) != 1: parser.print_help() sys.exit(1) vol_id = args[0] timeout=None if options.timeout: timeout = timedelta(seconds=parse_days(options.timeout, single=True)) conn = EC2Connection() if options.region is not None: # this is a bit silly but we're working around a bug in boto # where it half-ignores the region set in its own boto.cfg # file regions = dict((x.name, x) for x in conn.get_all_regions()) region = regions[options.region] conn = EC2Connection(region=region) return manage_snapshots(days, conn, vol_id, timeout=timeout, description=options.description, do_snapshot=options.snapshot, do_cleanup=options.cleanup)
def parse_options(): parser = OptionParser() parser.add_option("-i", "--input", help="XML input file") parser.add_option("-o", "--output", help="HTML output file", default="report.html") parser.add_option("-c", "--config", help="Configuration file", default="config") (options, args) = parser.parse_args() if options.input == None : print("\nERROR: Please specify an input file\n") parser.print_help() exit() return options
def main(): config = get_configs('octopy.cfg') if not config['server'] or not config['api_key']: print 'Please, specify Octopus parameters in configuration file!' sys.exit(1) parser = argparse.ArgumentParser( description='Octopy is a small application that prints out information from Octopus in a convenient format.') parser.add_argument('--cmd', dest='command', help="Octopy command (try `env`, `proj`, `rel`, `dep` and `mac`).") parser.add_argument('--cache', dest='cache', action='store_true', help="Read data from cache if available.") parser.add_argument('--headers', dest='headers', action='store_true', help='Display headers in output.') parser.add_argument('--crawl', dest='crawl', action='store_true', help='By default only 30 items per page are returned by API. This parameter enables link crawl. ' 'All resources from the `Link` collection will be crawled by Octopy and data will be saved to cache. ' 'This parameter has no effect on `env` and `proj` commands.') args = parser.parse_args() octopy = Octopy(config) if args.command == 'env': # environments environments = octopy.get_environments(args.cache) if args.headers: print 'Id,Name' for key in environments.keys(): print '%s,%s' % (key, environments[key]) elif args.command == 'proj': # projects projects = octopy.get_projects(args.cache) if args.headers: print 'Id,Name' for key in projects.keys(): print '%s,%s' % (key, projects[key]) elif args.command == 'rel': # releases releases = octopy.get_releases(args.cache, args.crawl) if args.headers: print 'Id,Version' for key in releases.keys(): print '%s,%s' % (key, releases[key]) elif args.command == 'mac': # machines machines = octopy.get_machines(args.cache) if args.headers: print 'Id,Name' for key in machines.keys(): print '%s,%s' % (key, machines[key]) elif args.command == 'dep': # deployments deployments = octopy.get_deployments(args.cache, args.crawl) if args.headers: print 'Date,Time,Environment,Project,Release,SpecificMachines' for dep in deployments: print '%s,%s,%s,%s,%s,%s' %\ (dep['Date'], dep['Time'], dep['Environment'], dep['Project'], dep['Release'], dep['SpecificMachines']) else: print "Unknown command '%s'" % args.command parser.print_help()
def run(self, args=None): parser = self.create_parser() args = parser.parse_args(args) if not hasattr(args, "func"): parser.print_help() return 1 try: exit_code = args.func(args) return exit_code except KeyboardInterrupt: print("*** interrupted by keyboard ***") return 1
def main(): parser = argparse.ArgumentParser(description="Create schedule iCalendar file") parser.add_argument("--input", help="The input HTML file we use to generate the schedule", type=str) parser.add_argument("--output", help="The output ics file we write to", type=str) args = parser.parse_args() if args.input is not None and args.output is not None: try: calendar = build_calendar(args.input) with open(args.output, "wb") as f: f.write(calendar.to_ical()) except: traceback.print_exc() else: parser.print_help()
def main(): parser = argparse.ArgumentParser('Download a Flickr Set') parser.add_argument('-k', '--api_key', type=str, help='Flickr API key') parser.add_argument('-s', '--api_secret', type=str, help='Flickr API secret') parser.add_argument('-t', '--api_token', action='store_true', help='Use OAuth token') parser.add_argument('-l', '--list', type=str, help='List photosets for a user') parser.add_argument('-p', '--photos', type=str, help='List photo of a user') parser.add_argument('-d', '--download', type=str, help='Download the given set') parser.add_argument('-u', '--photostream', type=str, help='Download photostream of user') parser.add_argument('-o', '--photosets', type=str, help='Download all photosets of user') parser.add_argument('-x', '--all', type=str, help='Download all photosets and photos of user') parser.set_defaults(**_load_defaults()) args = parser.parse_args() if not args.api_key or not args.api_secret: print >> sys.stderr, 'You need to pass in both "api_key" and "api_secret" arguments' return 1 _init(args.api_key, args.api_secret, args.api_token) if args.list: print_sets(args.list) elif args.download: download_set(args.download) elif args.photos: list_photos(args.photos) elif args.photosets: download_sets(args.photosets) elif args.photostream: download_photos(args.photostream) elif args.all: download_all(args.all) else: print >> sys.stderr, 'ERROR: Must pass either --list or --download\n' parser.print_help() return 1
def main(): parser = OptionParser() parser.add_option('-m', '--method', dest='method', default='socket', help='define method of interaction with clamav') parser.add_option('-s', '--socket', dest='socket', default='/tmp/clamd.socket', help='clamav unix socket to use') opts, args = parser.parse_args() if len(args) < 1: parser.print_help() exit(-1) filename = args[0] scan = virusscan(filename, method=opts.method, socket=opts.socket) for k, v in scan.items(): print k + ': ' + v
def get_options(): from argparse import ArgumentParser parser = ArgumentParser( description="""Get impression and donation info over time for a campaign. Note that old impression queries can be slow, so don't do too many at once.""" ) parser.add_argument('-s', '--start', dest='start', required=True, help='Start time (UTC)') parser.add_argument( '-e', '--end', dest='end', default=datetime.utcnow().strftime('%Y%m%d%H%M%S'), help='End time (UTC). If not specified, defaults to now.') parser.add_argument( '-i', '--interval', dest='interval', default=24, help='Time (in hours) covered per row. Defaults to 24 hours') parser.add_argument('--campaign', dest='campaign', help='Regexp filter by campaign name') parser.add_argument('--source', dest='source', help='Regexp filter by utm_source / banner name') parser.add_argument('--country', dest='country', help='Filter by country code e.g. GB') parser.add_argument('--language', dest='language', help='Filter by language code e.g. en') if len(sys.argv) == 1: # No arguments, show instructions parser.print_help() sys.exit(1) args = vars(parser.parse_args()) return args
def parse_options(): parser = OptionParser() parser.add_option("-i", "--input", help="XML input file") parser.add_option("-o", "--output", help="HTML output file", default="report.html") parser.add_option("-c", "--config", help="Configuration file", default="config") (options, args) = parser.parse_args() if options.input == None: print("\nERROR: Please specify an input file\n") parser.print_help() exit() return options
def main(): parser = argparse.ArgumentParser('Download a Flickr Set') parser.add_argument('-k', '--api_key', type=str, help='Flickr API key') parser.add_argument('-s', '--api_secret', type=str, help='Flickr API secret') parser.add_argument('-t', '--user_auth', action='store_true', help='Enable user authentication') parser.add_argument('-l', '--list', type=str, metavar='USER', help='List photosets for a user') parser.add_argument('-d', '--download', type=str, metavar='SET_ID', help='Download the given set') parser.add_argument('-u', '--download_user', type=str, metavar='USERNAME', help='Download all sets for a given user') parser.add_argument('-q', '--quality', type=str, metavar='SIZE_LABEL', default=None, help='Quality of the picture') parser.add_argument('-n', '--naming', type=str, metavar='NAMING_MODE', default='title', help='Photo naming mode') parser.set_defaults(**_load_defaults()) args = parser.parse_args() if not args.api_key or not args.api_secret: print ('You need to pass in both "api_key" and "api_secret" arguments', file=sys.stderr) return 1 ret = _init(args.api_key, args.api_secret, args.user_auth) if not ret: return 1 if args.list: print_sets(args.list) elif args.download or args.download_user: try: get_filename = get_filename_handler(args.naming) if args.download: download_set(args.download, get_filename, args.quality) else: download_user(args.download_user, get_filename, args.quality) except KeyboardInterrupt: print('Forcefully aborting. Last photo download might be partial :(', file=sys.stderr) else: print('ERROR: Must pass either --list or --download\n', file=sys.stderr) parser.print_help() return 1
def __init__(self): parser = argparse.ArgumentParser(description='Minimal Flarum API CLI', usage='''%s <command> [<args>] Active commands are: token Retrieve authentication token config Show config users Manage users groups Manage groups tags Manage tags ''' % prog_name) parser.add_argument('command', help='Subcommand to run') args = parser.parse_args(sys.argv[1:2]) if not hasattr(self, args.command): print 'Unrecognized command' parser.print_help() exit(1) getattr(self, args.command)()
def run(options, parser): keep = options.keep days = options.days if options.config: if len(options.args) != 0: parser.print_help() sys.exit("no arguments when config file used") ok = snapshot_with_config(keep, days, options) elif options.directories and len(options.args) >= 2: ok = True for src in options.args[0:-1]: ok = snapshot_directories(src, options.args[-1], keep, days, options) and ok elif len(options.args) == 2: ok = snapshot(options.args[0], options.args[-1], keep, days, options) else: parser.print_help() sys.exit("bad arguments") return 0 if ok else 8
def twistedrun(reactor): parser = argparse.ArgumentParser(description='BnW admin-tasks.') parser.add_argument('--rerender', dest='action', action='store_const', const=action_rerender, default=None, help='regenerate HTML for all messages and comments') parser.add_argument('--since', default='1970-01-01T00:00:00+0000', help='update only messages and comments created after this time') args = parser.parse_args() if args.action is None: parser.print_help() reactor.stop() else: def errdie(*args, **kwargs): from twisted.python import log log.startLogging(sys.stderr) log.err(*args, **kwargs) reactor.stop() args.action(reactor, args).addCallbacks(lambda d: reactor.stop, errdie)
def main(): parser = argparse.ArgumentParser() parser.add_argument("-f", "--forecast", help="7 day forecast", action="store_true") parser.add_argument("-c", "--current", help="Current temperature", action="store_true") parser.add_argument("-p", "--past", help="Temperatures frorm the last 24 hours", action="store_true") args = parser.parse_args() if args.forecast: forecast() elif args.current: current() elif args.past: past() else: parser.print_help() return
def main(argv): print(argv) parser = argparse.ArgumentParser() parser.add_argument('-o', '--org', action="store", nargs="?", help='provide organization value') if '-h' in argv or '--help' in argv: parser.print_help() sys.exit(0) args = parser.parse_args(argv) repo_name = args.org project_name = projectnames(repo_name) branch_list_names = getbranchnames(repo_name, project_name) print branch_list_names branch_commit_info_latest = last_commit_info(branch_list_names) print branch_commit_info_latest filtered_branchduration = branchduration(branch_commit_info_latest) final_branch = (filtered_branches(filtered_branchduration)) print(final_branch) deletebranch(repo_name, final_branch) csvtoHTML.html_codeToHTML("stalebrancheslist.csv", "reportfile.html")
def main(): parser = argparse.ArgumentParser('Download a Flickr Set') parser.add_argument('-k', '--api_key', type=str, help='Flickr API key') parser.add_argument('-s', '--api_secret', type=str, help='Flickr API secret') parser.add_argument('-t', '--user_auth', action='store_true', help='Enable user authentication') parser.add_argument('-l', '--list', type=str, metavar='USER', help='List photosets for a user') parser.add_argument('-d', '--download', type=str, metavar='SET_ID', help='Download the given set') parser.add_argument('-a', '--all_download', type=str, metavar='USER', help='Download all sets for a user') parser.set_defaults(**_load_defaults()) args = parser.parse_args() if not args.api_key or not args.api_secret: print >> sys.stderr, 'You need to pass in both "api_key" and "api_secret" arguments' return 1 ret = _init(args.api_key, args.api_secret, args.user_auth) if not ret: return 1 if args.list: for photoset in get_photosets(args.list): print '{0} - {1}'.format(photoset.id, photoset.title) elif args.download: download_set(args.download) elif args.all_download: download_all_sets(args.all_download) else: print >> sys.stderr, 'ERROR: Must pass either --list or --download\n' parser.print_help() return 1
def main(): parser = argparse.ArgumentParser() parser.add_argument('-i', '--interface', help='interface to connect to (ex. "tcp://host:1234")', required=True) parser.add_argument('-f', '--fields', help='show all detected IPFix fields', action='store_true') parser.add_argument('-s', '--status', help='show collector status', action='store_true') parser.add_argument('-m', '--memory', help='show collector memory usage', action='store_true') parser.add_argument('-d', '--debug', help='debug collector') args = parser.parse_args() if not args.fields and not args.status and not args.memory and not args.debug: print "\nNothing to show...\n\n" parser.print_help() return if args.debug is not None: try: dreq = zmq.utils.jsonapi.loads(args.debug) except: print "\ncan not convert '%s' from JSON" % (args.debug) return else: dreq = None process(args.interface, args.fields, args.status, args.memory, dreq)
def cli(): import argparse time_end = datetime.now() time_start = time_end - timedelta(minutes=1) parser = argparse.ArgumentParser( description='Command line interface for the Data API') parser.add_argument( 'action', type=str, default="", help='Action to be performed. Possibilities: search, save') parser.add_argument("--regex", type=str, help="String to be searched", default="") parser.add_argument("--index-field", type=str, help="field the data is indexed on", default="pulseId", choices=["globalDate", "globalSeconds", "pulseId"]) parser.add_argument("--from_time", type=str, help="Start time for the data query", default=time_start) parser.add_argument("--to_time", type=str, help="End time for the data query", default=time_end) parser.add_argument("--from_pulse", type=str, help="Start pulseId for the data query", default=-1) parser.add_argument("--to_pulse", type=str, help="End pulseId for the data query", default=-1) parser.add_argument("--channels", type=str, help="Channels to be queried, comma-separated list", default="") parser.add_argument("--filename", type=str, help="Name of the output file", default="") parser.add_argument("--url", type=str, help="Base URL of retrieval API", default=default_base_url) parser.add_argument("--overwrite", action="store_true", help="Overwrite the output file", default="") # parser.add_argument("--split", action="store_true", help="Split output file", default="") parser.add_argument("--split", type=str, help="Number of pulses or duration (ISO8601) per file", default="") parser.add_argument( "--print", help="Prints out the downloaded data. Output can be cut.", action="store_true") parser.add_argument("--binary", help="Download as binary", action="store_true", default=False) parser.add_argument("--start_expansion", help="Expand query to next point before start", action="store_true", default=False) parser.add_argument("--end_expansion", help="Expand query to next point after end", action="store_true", default=False) args = parser.parse_args() split = args.split filename = args.filename api_base_url = args.url binary_download = args.binary start_expansion = args.start_expansion end_expansion = args.end_expansion index_field = args.index_field # Check if output files already exist if not args.overwrite and filename != "": import os.path if os.path.isfile(filename): logger.error("File %s already exists" % filename) return n_filename = "%s_%03d.h5" % (re.sub("\.h5$", "", filename), 0) if os.path.isfile(n_filename): logger.error("File %s already exists" % n_filename) return data = None if args.action == "search": if args.regex == "": logger.error("Please specify a regular expression with --regex\n") parser.print_help() return pprint.pprint( search(args.regex, backends=["sf-databuffer", "sf-archiverappliance"], base_url=default_base_url)) elif args.action == "save": if args.filename == "" and not args.print: logger.warning("Please select either --print or --filename") parser.print_help() return if args.from_pulse != -1: if args.to_pulse == -1: logger.error("Please set a range limit with --to_pulse") return start_pulse = int(args.from_pulse) file_counter = 0 while True: end_pulse = int(args.to_pulse) if start_pulse == end_pulse: break if split != "" and filename != "" and ( end_pulse - start_pulse) > int(split): end_pulse = start_pulse + int(split) if filename != "": if split != "": new_filename = re.sub("\.h5$", "", filename) new_filename = "%s_%03d.h5" % (new_filename, file_counter) else: new_filename = filename if binary_download: get_data_iread(args.channels.split(","), start=start_pulse, end=end_pulse, range_type="pulseId", index_field=index_field, filename=new_filename, base_url=api_base_url, start_expansion=start_expansion, end_expansion=end_expansion) else: data = get_data(args.channels.split(","), start=start_pulse, end=end_pulse, range_type="pulseId", index_field=index_field, base_url=api_base_url, start_expansion=start_expansion, end_expansion=end_expansion) if data is not None: if filename != "": to_hdf5(data, filename=new_filename, overwrite=args.overwrite) elif args.print: print(data) else: logger.warning( "Please select either --print or --filename") parser.print_help() start_pulse = end_pulse file_counter += 1 else: start_time = _convert_date(args.from_time) file_counter = 0 while True: end_time = _convert_date(args.to_time) if start_time == end_time: break if split != "" and filename != "" and ( end_time - start_time) > parse_duration(split): end_time = start_time + parse_duration(split) if filename != "": if split != "": new_filename = re.sub("\.h5$", "", filename) new_filename = "%s_%03d.h5" % (new_filename, file_counter) else: new_filename = filename if binary_download: get_data_iread(args.channels.split(","), start=start_time, end=end_time, range_type="globalDate", index_field=index_field, filename=new_filename, base_url=api_base_url, start_expansion=start_expansion, end_expansion=end_expansion) else: data = get_data(args.channels.split(","), start=start_time, end=end_time, range_type="globalDate", index_field=index_field, base_url=api_base_url, start_expansion=start_expansion, end_expansion=end_expansion) if data is not None: if filename != "": to_hdf5(data, filename=new_filename, overwrite=args.overwrite) elif args.print: print(data) else: logger.warning( "Please select either --print or --filename") parser.print_help() start_time = end_time file_counter += 1 else: parser.print_help() return
def command_dispatcher(self, args=None): desc = ( 'Strumpf, Skymind Test Resource Upload Management for Paunchy Files.\n' ) parser = argparse.ArgumentParser(description=desc) parser.add_argument( '-v', '--version', action='version', version=pkg_resources.get_distribution("strumpf").version, help='Print strumpf version') subparsers = parser.add_subparsers(title='subcommands', dest='command') subparsers.add_parser('configure', help='Configure strumpf.') subparsers.add_parser('status', help='Get strumpf status.') file_add_parser = subparsers.add_parser( 'add', help='Add files to strumpf tracking system.') file_add_parser.add_argument('path', type=str, nargs='+', help='Path or file to add to upload.') subparsers.add_parser('upload', help='Upload files to remote source.') subparsers.add_parser('bulk_download', help='Download all remote files') download_parser = subparsers.add_parser( 'download', help='Download file from remote source.') download_parser.add_argument('-f', '--file', help='File to download.') subparsers.add_parser('reset', help='Reset previously staged files.') subparsers.add_parser('blobs', help='List all relevant Azure blobs.') subparsers.add_parser('projects', help='List all projects tracked by Strumpf.') project_parser = subparsers.add_parser( 'set_project', help='Set a project tracked by Strumpf as default.') project_parser.add_argument('project', type=str, nargs='?', help='The project you want to set.') argcomplete.autocomplete(parser) args = parser.parse_args(args) self.var_args = vars(args) if not args.command: parser.print_help() return self.command = args.command if self.command != 'configure' and 'project_name' not in self.config.keys( ): raise Exception( "Can't run this command.\nNo project name found. Did you run 'strumpf configure' before?" ) if self.command == 'configure': self.configure() return if self.command == 'status': self.status() return if self.command == 'add': paths = self.var_args['path'] self.add(paths) return if self.command == 'upload': self.upload() return if self.command == 'download': self.download(self.var_args['file']) return if self.command == 'bulk_download': self.bulk_download() if self.command == 'reset': self.reset() if self.command == 'blobs': self.blobs() if self.command == 'projects': self.projects() if self.command == 'set_project': project = self.var_args['project'] self.set_project(project) return
def usage() -> argparse.Namespace: """Parse the options provided on the command line. Returns: argparse.Namespace: The parameters provided on the command line. """ parser = argparse.ArgumentParser(add_help=False) parser.add_argument('-h', '--help', action='store_true', help='show this help message and exit') group = parser.add_argument_group("General", "Simulation general settings") group.add_argument("--first-date", help="The first date to be processed. " "Default to the current date", type=datetime_type, default=np.datetime64("now")) group.add_argument("--last-date", help="The last date to be processed. " "Default to the last date allowing to cover an entire " "cycle.", type=datetime_type) group = parser.add_argument_group("Execution", "Runtime parameters options ") group.add_argument("--debug", action="store_true", help="Put swot simulator in debug mode") group.add_argument("--log", metavar='PATH', help="Path to the logbook to use", type=argparse.FileType("w")) group.add_argument("--scheduler-file", help="Path to a file with scheduler information to " "launch swot simulator on a cluster. By " "default, use a local cluster.", metavar='PATH', type=argparse.FileType("r")) group = parser.add_argument_group("LocalCluster", "Dask local cluster option") group.add_argument("--n-workers", help="Number of workers to start (Default to 1)", type=int, metavar='N', default=1) group.add_argument("--processes", help="Whether to use processes (True) or threads " "(False). Defaults to False", action="store_true") group.add_argument("--threads-per-worker", help="Number of threads per each worker. " "(Default to 1)", type=int, metavar='N', default=1) group = parser.add_argument_group("Configuration") group.add_argument("--template", help="Writes the default configuration of the " "simulator into the file and ends the program.", metavar="PATH", type=argparse.FileType("w")) namespace = argparse.Namespace() namespace, _ = parser._parse_known_args(sys.argv[1:], namespace) def add_settings(parser): """Added the argument defining the settings of the simulator.""" parser.add_argument("settings", type=argparse.FileType('r'), help="Path to the parameters file") # Displays help and ends the program. if "help" in namespace: add_settings(parser) parser.print_help() parser.exit(0) # Checking exclusive options. if "scheduler_file" in namespace: for item in ["n_workers", "processes", "threads_per_worker"]: if item in namespace: item = item.replace("_", "-") raise RuntimeError( f"--{item}: not allowed with argument --scheduler-file") # Write the template configuration file and ends the program if "template" in namespace: namespace.template.write(settings.template()) sys.stdout.write(f""" The template has been written in the file: {namespace.template.name!r}. """) parser.exit(0) # The partial analysis of the command line arguments is finished, the last # argument is added and parsed one last time. add_settings(parser) return parser.parse_args()
def get_params_parser(): """Parse command line arguments""" parser = argparse.ArgumentParser( usage=ARTHUR_USAGE_MSG, description=ARTHUR_DESC_MSG, epilog=ARTHUR_EPILOG_MSG, formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False) ElasticOcean.add_params(parser) parser.add_argument('-h', '--help', action='help', help=argparse.SUPPRESS) parser.add_argument('-g', '--debug', dest='debug', action='store_true', help=argparse.SUPPRESS) parser.add_argument("--no_incremental", action='store_true', help="don't use last state for data source") parser.add_argument("--fetch_cache", action='store_true', help="Use cache for item retrieval") parser.add_argument("--enrich", action='store_true', help="Enrich items after retrieving") parser.add_argument( "--enrich_only", action='store_true', help="Only enrich items (DEPRECATED, use --only-enrich)") parser.add_argument("--only-enrich", dest='enrich_only', action='store_true', help="Only enrich items") parser.add_argument("--filter-raw", dest='filter_raw', help="Filter raw items. Format: field:value") parser.add_argument( "--filters-raw-prefix", nargs='*', help= "Filter raw items with prefix filter. Format: field:value field:value ..." ) parser.add_argument("--events-enrich", dest='events_enrich', action='store_true', help="Enrich events in items") parser.add_argument('--index', help="Ocean index name") parser.add_argument('--index-enrich', dest="index_enrich", help="Ocean enriched index name") parser.add_argument('--db-user', help="User for db connection (default to root)", default="root") parser.add_argument('--db-password', help="Password for db connection (default empty)", default="") parser.add_argument('--db-host', help="Host for db connection (default to mariadb)", default="mariadb") parser.add_argument('--db-projects-map', help="Projects Mapping DB") parser.add_argument('--json-projects-map', help="Projects Mapping JSON file") parser.add_argument('--project', help="Project for the repository (origin)") parser.add_argument('--refresh-projects', action='store_true', help="Refresh projects in enriched items") parser.add_argument('--db-sortinghat', help="SortingHat DB") parser.add_argument('--only-identities', action='store_true', help="Only add identities to SortingHat DB") parser.add_argument('--refresh-identities', action='store_true', help="Refresh identities in enriched items") parser.add_argument('--author_id', nargs='*', help="Field author_ids to be refreshed") parser.add_argument('--author_uuid', nargs='*', help="Field author_uuids to be refreshed") parser.add_argument( '--github-token', help="If provided, github usernames will be retrieved in git enrich.") parser.add_argument('--jenkins-rename-file', help="CSV mapping file with nodes renamed schema.") parser.add_argument('--studies', action='store_true', help="Execute studies after enrichment.") parser.add_argument('--only-studies', action='store_true', help="Execute only studies.") parser.add_argument( '--bulk-size', default=1000, type=int, help="Number of items per bulk request to Elasticsearch.") parser.add_argument( '--scroll-size', default=100, type=int, help="Number of items to get from Elasticsearch when scrolling.") parser.add_argument('--arthur', action='store_true', help="Read items from arthur redis queue") parser.add_argument('--pair-programming', action='store_true', help="Do pair programming in git enrich") parser.add_argument('--studies-list', nargs='*', help="List of studies to be executed") parser.add_argument('backend', help=argparse.SUPPRESS) parser.add_argument('backend_args', nargs=argparse.REMAINDER, help=argparse.SUPPRESS) if len(sys.argv) == 1: parser.print_help() sys.exit(1) return parser
def get_params_parser(): """Parse command line arguments""" parser = argparse.ArgumentParser( usage=ARTHUR_USAGE_MSG, description=ARTHUR_DESC_MSG, epilog=ARTHUR_EPILOG_MSG, formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False) ElasticOcean.add_params(parser) parser.add_argument('-h', '--help', action='help', help=argparse.SUPPRESS) parser.add_argument('-g', '--debug', dest='debug', action='store_true', help=argparse.SUPPRESS) parser.add_argument("--no_incremental", action='store_true', help="don't use last state for data source") parser.add_argument("--fetch_cache", action='store_true', help="Use cache for item retrieval") parser.add_argument("--loop", action='store_true', help="loop the ocean update until process termination") parser.add_argument("--redis", default="redis", help="url for the redis server") parser.add_argument("--enrich", action='store_true', help="Enrich items after retrieving") parser.add_argument( "--enrich_only", action='store_true', help="Only enrich items (DEPRECATED, use --only-enrich)") parser.add_argument( "--only-enrich", dest='enrich_only', action='store_true', help="Only enrich items (DEPRECATED, use --only-enrich)") parser.add_argument('--index', help="Ocean index name") parser.add_argument('--index-enrich', dest="index_enrich", help="Ocean enriched index name") parser.add_argument('--db-projects-map', help="Projects Mapping DB") parser.add_argument('--project', help="Project for the repository (origin)") parser.add_argument('--db-sortinghat', help="SortingHat DB") parser.add_argument('--only-identities', action='store_true', help="Only add identities to SortingHat DB") parser.add_argument( '--github-token', help="If provided, github usernames will be retrieved in git enrich.") parser.add_argument('--studies', action='store_true', help="Execute studies after enrichment.") parser.add_argument('--only-studies', action='store_true', help="Execute only studies.") parser.add_argument('backend', help=argparse.SUPPRESS) parser.add_argument('backend_args', nargs=argparse.REMAINDER, help=argparse.SUPPRESS) if len(sys.argv) == 1: parser.print_help() sys.exit(1) return parser
def main(argv): parser = argparse.ArgumentParser(description="Download raw events from the Unity Analytics server.") parser.add_argument('url', nargs='?', default='') parser.add_argument('-v', '--version', action='store_const', const=True, help='Retrieve version info for this file.') parser.add_argument('-b', '--verbose', action='store_const', const=True, help='Output more informative errors.') parser.add_argument('-o', '--output', default='', help='Set an output path for results.') parser.add_argument('-f', '--first', help='UNIX timestamp for trimming input.') parser.add_argument('-l', '--last', help='UNIX timestamp for trimming input.') parser.add_argument('-s', '--appStart', action='store_const', const=True, help='Include appStart events.') parser.add_argument('-r', '--appRunning', action='store_const', const=True, help='Include appRunning events.') parser.add_argument('-c', '--custom', action='store_const', const=True, help='Include custom events.') parser.add_argument('-t', '--transaction', action='store_const', const=True, help='Include transaction events.') parser.add_argument('-u', '--userInfo', action='store_const', const=True, help='Include userInfo events.') parser.add_argument('-d', '--deviceInfo', action='store_const', const=True, help='Include deviceInfo events.') args = vars(parser.parse_args()) if 'help' in args: parser.print_help() sys.exit() elif args['version'] == True: version_info() sys.exit() try: # now by default end_date = datetime.datetime.utcnow() if not args['last'] else dateutil.parser.parse(args['last'], fuzzy=False) except: print 'Provided end date could not be parsed. Format should be YYYY-MM-DD.' if args['verbose'] == True: print sys.exc_info()[0] sys.exit() try: # subtract 5 days by default start_date = end_date - datetime.timedelta(days=5) if not args['first'] else dateutil.parser.parse(args['first'], fuzzy=False) except: print 'Provided start date could not be parsed. Format should be YYYY-MM-DD.' if args['verbose'] == True: print sys.exc_info()[0] sys.exit() url = args['url'] # by default, we'll include all. If a flag(s) was selected, use it flags = [] for e in all_events: if args[e]: flags.append(e) if len(flags) == 0: flags = all_events # if first arg isn't a url if 'http' not in url: parser.print_help() sys.exit(2) elif len(url) > 0: print 'Loading batch manifest' manifest_json = load_and_parse(url, args['verbose']) found_items = 0 for manifest_item in manifest_json: # filter dates outside of range date = dateutil.parser.parse(manifest_item["generated_at"]).replace(tzinfo=None) if date < start_date: continue elif date > end_date: continue found_items += 1 print 'Retrieving manifest item from: ' + manifest_item["url"] batches_json = load_and_parse(manifest_item["url"], args['verbose']) batch_id = batches_json["batchid"] for batch in batches_json["data"]: bUrl = batch["url"] for event_type in flags: if event_type in bUrl: output_file_name = args['output'] + batch_id + "_" + event_type + ".txt" try: # finally, load the actual file from S3 print 'Downloading ' + output_file_name urlretrieve(bUrl, output_file_name) except HTTPError as e: print 'The server couldn\'t download the file.' print 'Error code: ', e.code if (args['verbose']): print e sys.exit() except URLError as e: print 'When downloading, we failed to reach a server.' print 'Reason: ', e.reason if (args['verbose']): print e sys.exit() else: print 'TSV file downloaded successfully' if found_items == 0: print 'No data found within specified dates. By default, this script downloads the last five days of data. Use -f (--first) and -l (--last) to specify a date range.' else: print 'get_raw_events.py requires that you specify a URL as the first argument.\nThis URL may be obtained by going to your project settings on the Unity Analytics website.\n\n' parser.print_help() sys.exit(2)
def main(argv): parser = argparse.ArgumentParser(description="Aggregate raw event data into JSON that can be read by the Unity Analytics heatmap system.") parser.add_argument('input', nargs='?', default='', help='The name of an input file, or an array of input files (required).') parser.add_argument('-v', '--version', action='store_const', const=True, help='Retrieve version info for this file.') parser.add_argument('-o', '--output', help='The name of the output file. If omitted, name is auto-generated from first input file.') parser.add_argument('-s', '--space', default=0, help='Numerical scale at which to smooth out spatial data.') parser.add_argument('-t', '--time', default=0, help='Numerical scale at which to smooth out temporal data.') parser.add_argument('-f', '--first', help='UNIX timestamp for trimming input. 365 days before last by default.') parser.add_argument('-l', '--last', help='UNIX timestamp for trimming input. Now by default.') parser.add_argument('-e', '--event-names', help='A string or array of strings, indicating event names to include in the output.') parser.add_argument('-n', '--single-session', action='store_const', const=True, help='Organize the data by individual play sessions. (Unsupported)') parser.add_argument('-d', '--disaggregate-time', action='store_const', const=True, help='Disaggregates events that map to matching x/y/z coordinates, but different moments in time.') args = vars(parser.parse_args()) if 'help' in args: parser.print_help() sys.exit() elif args['version'] == True: version_info() sys.exit() try: # now by default end_date = datetime.datetime.utcnow() if not args['last'] else dateutil.parser.parse(args['last']) except: print 'Provided end date could not be parsed. Format should be YYYY-MM-DD.' sys.exit() try: # allow 'forever' in start_date unspecified start_date = datetime.datetime(2000, 1, 1) if not args['first'] else dateutil.parser.parse(args['first']) except: print 'Provided start date could not be parsed. Format should be YYYY-MM-DD.' sys.exit() space_divisor = args['space'] time_divisor = args['time'] input_files = args['input'].split(',') event_names = args['event_names'].split(',') if args['event_names'] else [] if args['input'] == '' or len(input_files) == 0: print 'heatmap_aggr.py requires that you specify an input file. It\'s not really that much to ask.' parser.print_help() sys.exit(2) else: output_data = {} point_map = {} output_file_name = args['output'] if args['output'] else os.path.splitext(input_files[0])[0] + '.json' #loop and smooth all file data for fname in input_files: with open(fname) as input_file: tsv = csv.reader(input_file, delimiter='\t') for row in tsv: # ignore blank rows if len(row) >= 3: # read the timestamp try: row_date = dateutil.parser.parse(row[0]) except: print 'Process failed while attempting to read a timestamp. Might you have loaded the wrong file?' print 'Couldn\'t parse: ' + str(row[0]) sys.exit() #ignore rows outside any date trimming if row_date <= start_date: continue if row_date >= end_date: continue # read the data try: datum = json.loads(row[3]) except: print 'Process failed while attempting to read a data row. Might you have loaded the wrong file?' print 'Couldn\'t parse: ' + str(row[3]) sys.exit() event = str(datum['unity.name']) # if we're filtering events, pass if not in list if len(event_names) > 0 and event not in event_names: continue # Deal with spatial data # x/y are required try: x = float(datum['x']) y = float(datum['y']) except KeyError: print 'Unable to find x/y in: ' + event + '. Skipping...' continue # ensure we have a list for this event if not event in output_data: output_data[event] = [] # z values are optional (Vector2's use only x/y) try: z = float(datum['z']) except KeyError: z = 0 # construct the point point = {} if space_divisor > 0: x = divide(x, space_divisor) y = divide(y, space_divisor) z = divide(z, space_divisor) point['x'] = x point['y'] = y point['z'] = z # Deal with temporal data, which is also optional try: t = float(datum['t']) if 't' in datum else 1.0 if time_divisor > 0: t = divide(t, time_divisor) point['t'] = t except AttributeError: # We allow for the possibility of the dev not including 't' for time. # This is faster than hasattr(datum, 't') t = 0 # Hash the point, so we can aggregate for density timeKey = point["t"] if args['disaggregate_time'] else None sessionKey = datum[1] if args['single_session'] else None tupl = (event, point["x"], point["y"], point["z"], timeKey, sessionKey) pt = get_existing_point(point_map, tupl) if pt == None: create_key(point_map, tupl, point) point['d'] = 1 output_data[event].append(point) else: pt['d'] += 1 # test if any data was generated has_data = False report = [] for generated in output_data: try: has_data = len(output_data[generated]) > 0 report.append(len(output_data[generated])) except KeyError: pass if has_data: print 'Processed ' + str(len(report)) + ' group(s) with the following numbers of data points: ' + str(report) print output_file_name text_file = open(output_file_name, "w") text_file.write(json.dumps(output_data)) zz = text_file.close() else: print 'The process yielded no results. Could you have misspelled the event name?'
def parse_args(): """Parse command-line arguments with optparse.""" usage = "usage: %prog [OPTIONS] " + \ "--project PROJECT [--project PROJECT] " + \ "--version VERSION [--version VERSION2 ...]" parser = OptionParser( usage=usage, epilog= "Markdown-formatted CHANGES and RELEASENOTES files will be stored" " in a directory named after the highest version provided.") parser.add_option("-i", "--index", dest="index", action="store_true", default=False, help="build an index file") parser.add_option("-l", "--license", dest="license", action="store_true", default=False, help="Add an ASF license") parser.add_option("-p", "--project", dest="projects", action="append", type="string", help="projects in JIRA to include in releasenotes", metavar="PROJECT") parser.add_option("-r", "--range", dest="range", action="store_true", default=False, help="Given versions are a range") parser.add_option( "--sortorder", dest="sortorder", metavar="TYPE", default=SORTORDER, # dec is supported for backward compatibility choices=["asc", "dec", "desc", "newer", "older"], help="Sorting order for sort type (default: %s)" % SORTORDER) parser.add_option("--sorttype", dest="sorttype", metavar="TYPE", default=SORTTYPE, choices=["resolutiondate", "issueid"], help="Sorting type for issues (default: %s)" % SORTTYPE) parser.add_option( "-t", "--projecttitle", dest="title", type="string", help="Title to use for the project (default is Apache PROJECT)") parser.add_option("-u", "--usetoday", dest="usetoday", action="store_true", default=False, help="use current date for unreleased versions") parser.add_option("-v", "--version", dest="versions", action="append", type="string", help="versions in JIRA to include in releasenotes", metavar="VERSION") parser.add_option( "-V", dest="release_version", action="store_true", default=False, help="display version information for releasedocmaker and exit.") parser.add_option("-O", "--outputdir", dest="output_directory", action="append", type="string", help="specify output directory to put release docs to.") parser.add_option("-B", "--baseurl", dest="base_url", action="append", type="string", help="specify base URL of the JIRA instance.") parser.add_option( "--retries", dest="retries", action="append", type="int", help="Specify how many times to retry connection for each URL.") parser.add_option("-X", "--incompatiblelabel", dest="incompatible_label", default="backward-incompatible", type="string", help="Specify the label to indicate backward incompatibility.") Linter.add_parser_options(parser) if len(sys.argv) <= 1: parser.print_help() sys.exit(1) (options, _) = parser.parse_args() # Validate options if not options.release_version: if options.versions is None: parser.error("At least one version needs to be supplied") if options.projects is None: parser.error("At least one project needs to be supplied") if options.base_url is not None: if len(options.base_url) > 1: parser.error("Only one base URL should be given") else: options.base_url = options.base_url[0] if options.output_directory is not None: if len(options.output_directory) > 1: parser.error("Only one output directory should be given") else: options.output_directory = options.output_directory[0] return options
parser.add_argument('-n', metavar='name of folder', nargs=1, help='please give a name of folder') parser.add_argument('-d', metavar='yes or no', nargs=1, help='please choose clear directory or not') parser.add_argument( '-r', metavar='yes or no', nargs=1, help='if there is error occurs in the middle, please choose retry or not') result = parser.parse_args(sys.argv[1:]) if result.v == None: parser.print_help(sys.stderr) sys.exit(1) if result.n == None: parser.print_help(sys.stderr) sys.exit(1) if result.d == None: parser.print_help(sys.stderr) sys.exit(1) if result.r == None: parser.print_help(sys.stderr) sys.exit(1) version = result.v[0] folder_name = result.n[0] clear = result.d[0] recover = result.r[0] if clear == 'yes':
def main(): qp = {} def add_qp(option, opt_str, value, parser): if option.dest == 'query': try: (p, v) = value.split('=', 1) qp[p] = v except ValueError: raise optparse.OptionValueError("%s expects parameter=value" % opt_str) else: qp[option.dest] = value parser = optparse.OptionParser( usage="Usage: %prog [-h|--help] [OPTIONS] -o file", version="%prog " + VERSION, add_help_option=False) parser.set_defaults(url="http://geofon.gfz-potsdam.de/eidaws/routing/1/", timeout=600, retries=10, retry_wait=60, threads=5) parser.add_option("-h", "--help", action="store_true", default=False, help="show help message and exit") parser.add_option("-l", "--longhelp", action="store_true", default=False, help="show extended help message and exit") parser.add_option("-v", "--verbose", action="store_true", default=False, help="verbose mode") parser.add_option("-u", "--url", type="string", help="URL of routing service (default %default)") parser.add_option("-y", "--service", type="string", action="callback", callback=add_qp, help="target service (default dataselect)") parser.add_option("-N", "--network", type="string", action="callback", callback=add_qp, help="network code or pattern") parser.add_option("-S", "--station", type="string", action="callback", callback=add_qp, help="station code or pattern") parser.add_option("-L", "--location", type="string", action="callback", callback=add_qp, help="location code or pattern") parser.add_option("-C", "--channel", type="string", action="callback", callback=add_qp, help="channel code or pattern") parser.add_option("-s", "--starttime", type="string", action="callback", callback=add_qp, help="start time") parser.add_option("-e", "--endtime", type="string", action="callback", callback=add_qp, help="end time") parser.add_option("-q", "--query", type="string", action="callback", callback=add_qp, metavar="PARAMETER=VALUE", help="additional query parameter") parser.add_option("-t", "--timeout", type="int", help="request timeout in seconds (default %default)") parser.add_option("-r", "--retries", type="int", help="number of retries (default %default)") parser.add_option("-w", "--retry-wait", type="int", help="seconds to wait before each retry " "(default %default)") parser.add_option("-n", "--threads", type="int", help="maximum number of download threads " "(default %default)") parser.add_option("-c", "--credentials-file", type="string", help="URL,user,password file (CSV format) for queryauth") parser.add_option("-a", "--auth-file", type="string", help="file that contains the auth token") parser.add_option("-p", "--post-file", type="string", help="request file in FDSNWS POST format") parser.add_option("-f", "--arclink-file", type="string", help="request file in ArcLink format") parser.add_option("-b", "--breqfast-file", type="string", help="request file in breq_fast format") parser.add_option("-o", "--output-file", type="string", help="file where downloaded data is written") parser.add_option("-z", "--no-citation", action="store_true", default=False, help="suppress network citation info") (options, args) = parser.parse_args() if options.help: print(__doc__.split("Usage Examples", 1)[0], end="") parser.print_help() return 0 if options.longhelp: print(__doc__) parser.print_help() return 0 if args or not options.output_file: parser.print_usage() return 1 if bool(options.post_file) + bool(options.arclink_file) + \ bool(options.breqfast_file) > 1: msg("only one of (--post-file, --arclink-file, --breqfast-file) " "can be used") return 1 try: cred = {} authdata = None postdata = None if options.credentials_file: with open(options.credentials_file) as fd: try: for (url, user, passwd) in csv.reader(fd): cred[url] = (user, passwd) except (ValueError, csv.Error): raise Error("error parsing %s" % options.credentials_file) if options.auth_file: with open(options.auth_file) as fd: authdata = fd.read() if options.post_file: with open(options.post_file) as fd: postdata = fd.read() else: parser = None if options.arclink_file: parser = ArclinkParser() parser.parse(options.arclink_file) elif options.breqfast_file: parser = BreqParser() parser.parse(options.breqfast_file) if parser is not None: if parser.failstr: msg(parser.failstr) return 1 postdata = parser.postdata url = RoutingURL(urlparse.urlparse(options.url), qp) dest = open(options.output_file, 'wb') nets = route(url, cred, authdata, postdata, dest, options.timeout, options.retries, options.retry_wait, options.threads, options.verbose) if nets and not options.no_citation: msg("retrieving network citation info", options.verbose) get_citation(nets, options) else: msg("", options.verbose) msg( "In case of problems with your request, plese use the contact " "form at\n\n" " http://www.orfeus-eu.org/organization/contact/form/\n", options.verbose) except (IOError, Error) as e: msg(str(e)) return 1 return 0
def parse_args(): """Parse command-line arguments with optparse.""" usage = "usage: %prog [OPTIONS] " + \ "--project PROJECT [--project PROJECT] " + \ "--version VERSION [--version VERSION2 ...]" parser = OptionParser( usage=usage, epilog="Markdown-formatted CHANGES and RELEASENOTES files will be stored" " in a directory named after the highest version provided.") parser.add_option("-i", "--index", dest="index", action="store_true", default=False, help="build an index file") parser.add_option("-l", "--license", dest="license", action="store_true", default=False, help="Add an ASF license") parser.add_option("-p", "--project", dest="projects", action="append", type="string", help="projects in JIRA to include in releasenotes", metavar="PROJECT") parser.add_option("-r", "--range", dest="range", action="store_true", default=False, help="Given versions are a range") parser.add_option( "--sortorder", dest="sortorder", metavar="TYPE", default=SORTORDER, # dec is supported for backward compatibility choices=["asc", "dec", "desc", "newer", "older"], help="Sorting order for sort type (default: %s)" % SORTORDER) parser.add_option("--sorttype", dest="sorttype", metavar="TYPE", default=SORTTYPE, choices=["resolutiondate", "issueid"], help="Sorting type for issues (default: %s)" % SORTTYPE) parser.add_option( "-t", "--projecttitle", dest="title", type="string", help="Title to use for the project (default is Apache PROJECT)") parser.add_option("-u", "--usetoday", dest="usetoday", action="store_true", default=False, help="use current date for unreleased versions") parser.add_option("-v", "--version", dest="versions", action="append", type="string", help="versions in JIRA to include in releasenotes", metavar="VERSION") parser.add_option( "-V", dest="release_version", action="store_true", default=False, help="display version information for releasedocmaker and exit.") parser.add_option("-O", "--outputdir", dest="output_directory", action="append", type="string", help="specify output directory to put release docs to.") parser.add_option("-B", "--baseurl", dest="base_url", action="append", type="string", help="specify base URL of the JIRA instance.") parser.add_option( "--retries", dest="retries", action="append", type="int", help="Specify how many times to retry connection for each URL.") parser.add_option( "--skip-credits", dest="skip_credits", action="store_true", default=False, help= "While creating release notes skip the 'reporter' and 'contributor' columns" ) parser.add_option( "-X", "--incompatiblelabel", dest="incompatible_label", default="backward-incompatible", type="string", help="Specify the label to indicate backward incompatibility.") Linter.add_parser_options(parser) if len(sys.argv) <= 1: parser.print_help() sys.exit(1) (options, _) = parser.parse_args() # Validate options if not options.release_version: if options.versions is None: parser.error("At least one version needs to be supplied") if options.projects is None: parser.error("At least one project needs to be supplied") if options.base_url is not None: if len(options.base_url) > 1: parser.error("Only one base URL should be given") else: options.base_url = options.base_url[0] if options.output_directory is not None: if len(options.output_directory) > 1: parser.error("Only one output directory should be given") else: options.output_directory = options.output_directory[0] return options
print('TOTAL') print(str(total_table)) print() elif args.action == 'market': while True: market(args.give_asset, args.get_asset) elif args.action == 'purge': config.zeromq_publisher = zeromq.ZeroMQPublisher() config.zeromq_publisher.daemon = True config.zeromq_publisher.start() blocks.purge(db) elif args.action == 'help': parser.print_help() elif args.action == 'server': api_server = api.APIServer() api_server.daemon = True api_server.start() #throw a reference into config so that other modules can reference the thread to put messages into the queue config.zeromq_publisher = zeromq.ZeroMQPublisher() config.zeromq_publisher.daemon = True config.zeromq_publisher.start() blocks.follow(db) else: parser.print_help()
def main(): if puzzle: print >> sys.stderr, 'this is a self decoding file - decoding.' _unpack() return parser = argparse.ArgumentParser() parser.add_argument('value', nargs='?', help="Provide a string to encrypt.") parser.add_argument('-b', '--benchmark', help="Print number of operations per second", required=False, action="store_true") parser.add_argument('-p', '--pack', help="Pack a self decoding python file given a file", required=False, action="store_true") parser.add_argument('-f', '--file', help="Provide a file to encrypt.", required=False) parser.add_argument('-t', '--time', help="Time to decode", required=False, type=int) parser.add_argument( '-e', '--encrypt', help="Encrypt a file that can be unencrypted in X seconds", required=False, action="store_true") parser.add_argument( '-d', '--decode', help="Encrypt a file that can be unencrypted in X seconds", required=False) parser.add_argument('-u', '--unit', help="Time unit to use when interpreting time input", required=False, default='seconds', choices=[ 'seconds', 'minutes', 'hours', 'days', 'months', 'years', ]) parser.add_argument('-U', '--until-date', nargs="+", help="Encode until a date", required=False) parser.add_argument( '--tz', help= "Provide a Time Zone. PST/EST or all of the full codes such as US/Eastern", required=False) parser.add_argument('--seconds-until-date', nargs="+", help="Get seconds until a date", required=False) # show help if no args if len(sys.argv) == 1: parser.print_help() sys.exit(1) args = parser.parse_args() Main(args).execute()
def main(argslist=None): usage = "usage: %prog [OPTION]..." parser = argparse.ArgumentParser() parser.add_argument("-D", "--debug", action="store_true", default=False, help="Save debug info[default=%(default)s]") parser.add_argument("-d", "--delay", type=int, default=20, help="Capture x seconds after QSO log entry [default=%(default)s]") parser.add_argument("-i", "--device-index", type=int, default=None, help="Index of the recording input (use -q to list) [default=%(default)s]") parser.add_argument("-k", "--hot-key", type=str, default="O", help="Hotkey for manual recording Ctrl-Alt-<hot_key> [default=%(default)s]") parser.add_argument("-l", "--buffer-length", type=int, default=45, help="Audio buffer length in secs [default=%(default)s]") parser.add_argument("-C", "--continuous", action="store_true", default=False, help="Record continuous audio stream in addition to individual QSOs[default=%(default)s]") parser.add_argument("-P", "--port", type=int, default=12060, help="UDP Port [default=%(default)s]") parser.add_argument("-p", "--path", type=str, default=None, help="Base directory for audio files [default=%(default)s]") parser.add_argument("-q", "--query-inputs", action="store_true", default=False, help="Query and print input devices [default=%(default)s]") parser.add_argument("-S", "--so2r", action="store_true", default=False, help="SO2R mode, downmix to mono: Left Ch - Radio1 QSOs, Right Ch - Radio2 QSOs [default=%(default)s]") parser.add_argument("-s", "--station-nr", type=int, default=None, help="Network Station Number [default=%(default)s]") parser.add_argument("-r", "--radio-nr", type=int, default=None, help="Radio Number [default=%(default)s]") global options # arglist can be passed from another python script or at the command line options = parser.parse_args(argslist) dqlength = int(options.buffer_length * RATE / CHUNK) + 1 DELAY = options.delay MYPORT = options.port if (options.path): os.chdir(options.path) if (len(options.hot_key) == 1): global HOTKEY HOTKEY = options.hot_key.upper() else: print("Hotkey should be a single character") parser.print_help() exit(-1) if (options.debug): logging.basicConfig(filename=DEBUG_FILE, level=logging.DEBUG, format='%(asctime)s %(message)s') logging.debug('debug log started') logging.debug('qsorder options:') logging.debug(options) # start hotkey monitoring thread if not nopyhk: t = threading.Thread(target=hotkey) t.setDaemon(True) t.start() print("-------------------------------------------------------") print("|\tv2.13 QSO Recorder for N1MM, 2018 K3IT\t") print("-------------------------------------------------------") # global p p = pyaudio.PyAudio() if (options.query_inputs): max_devs = p.get_device_count() print("Detected", max_devs, "devices\n") ################################ print("Device index Description") print("------------ -----------") for i in range(max_devs): p = pyaudio.PyAudio() devinfo = p.get_device_info_by_index(i) if devinfo['maxInputChannels'] > 0: try: if p.is_format_supported(int(RATE), input_device=devinfo['index'], input_channels=devinfo['maxInputChannels'], input_format=pyaudio.paInt16): print("\t", i, "\t", devinfo['name']) except ValueError: pass p.terminate() sys.exit(0) if (options.device_index): try: def_index = p.get_device_info_by_index(options.device_index) print("| Input Device :", def_index['name']) DEVINDEX = options.device_index except IOError as e: print(("Invalid Input device: %s" % e[0])) p.terminate() os._exit(-1) else: try: def_index = p.get_default_input_device_info() print("| Input Device :", def_index['index'], def_index['name']) DEVINDEX = def_index['index'] except IOError as e: print(("No Input devices: %s" % e[0])) p.terminate() os._exit(-1) # queue for chunked recording global frames frames = deque('', dqlength) # queue for continous recording global replay_frames replay_frames = deque('',dqlength) print("| Listening on UDP port", MYPORT) # define callback def callback(in_data, frame_count, time_info, status): frames.append(in_data) # add code for continous recording here replay_frames.append(in_data) return (None, pyaudio.paContinue) stream = p.open(format=FORMAT, channels=CHANNELS, input_device_index=DEVINDEX, rate=RATE, input=True, frames_per_buffer=CHUNK, stream_callback=callback) # start the stream stream.start_stream() sampwidth = p.get_sample_size(FORMAT) print("| %d ch x %d secs audio buffer\n| Delay: %d secs" % (CHANNELS, dqlength * CHUNK / RATE, DELAY)) print("| Output directory", os.getcwd() + "\\<contest...>") if nopyhk: print("| Hotkey functionality is disabled") else: print("| Hotkey: CTRL+ALT+" + HOTKEY) if (options.station_nr and options.station_nr >= 0): print("| Recording only station", options.station_nr, "QSOs") if (options.continuous): print("| Full contest recording enabled.") print("-------------------------------------------------------\n") print(" QSOrder recordings can be shared with the World at:") print("\thttp://qsorder.hamradiomap.com\n") #start continious mp3 writer thread if (options.continuous): mp3 = threading.Thread(target=writer) mp3.setDaemon(True) mp3.start() # listen on UDP port # Receive UDP packets transmitted by a broadcasting service s = socket(AF_INET, SOCK_DGRAM) s.setsockopt(SOL_SOCKET, SO_BROADCAST, 1) s.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) try: s.bind(('', MYPORT)) except: print("Error connecting to the UDP stream.") seen = {} # this is needed to control loop exit from the unit tests def true_func(): return True while stream.is_active() and true_func: try: udp_data = s.recv(2048) check_sum = binascii.crc32(udp_data) try: dom = parseString(udp_data) except xml.parsers.expat.ExpatError as e: pass try: if ("qsorder_exit_loop_DEADBEEF" in udp_data.decode()): print("Received magic Exit packet") break except: pass if (options.debug): logging.debug('UDP Packet Received:') logging.debug(udp_data) # skip packet if duplicate if check_sum in seen: seen[check_sum] += 1 if (options.debug): logging.debug('DUPE packet skipped') else: seen[check_sum] = 1 try: now = datetime.datetime.utcnow() # read UDP fields dom = parseString(udp_data) call = dom.getElementsByTagName("call")[0].firstChild.nodeValue mycall = dom.getElementsByTagName("mycall")[0].firstChild.nodeValue mode = dom.getElementsByTagName("mode")[0].firstChild.nodeValue freq = dom.getElementsByTagName("band")[0].firstChild.nodeValue contest = dom.getElementsByTagName("contestname")[0].firstChild.nodeValue station = dom.getElementsByTagName("NetworkedCompNr")[0].firstChild.nodeValue qso_timestamp = dom.getElementsByTagName("timestamp")[0].firstChild.nodeValue radio_nr = dom.getElementsByTagName("radionr")[0].firstChild.nodeValue # convert qso_timestamp to datetime object timestamp = dateutil.parser.parse(qso_timestamp) # verify that month matches, if not, give DD-MM-YY format precendense if (timestamp.strftime("%m") != now.strftime("%m")): timestamp = dateutil.parser.parse(qso_timestamp, dayfirst=True) # skip packet if not matching network station number specified in the command line if (options.station_nr and options.station_nr >= 0): if (options.station_nr != int(station)): print("QSO:", timestamp.strftime("%m-%d %H:%M:%S"), call, freq, "--- ignoring from stn", station) continue # skip packet if not matching radio number specified in the command line if (options.radio_nr and options.radio_nr >= 0): if (options.radio_nr != int(radio_nr)): print("QSO:", timestamp.strftime("%m-%d %H:%M:%S"), call, freq, "--- ignoring from radio/VFO", radio_nr) continue # skip packet if QSO was more than DELAY seconds ago t_delta = (now - timestamp).total_seconds() if (t_delta > DELAY): print("---:", timestamp.strftime("%m-%d %H:%M:%S"), call, freq, "--- ignoring ",\ t_delta, "sec old QSO. Check clock settings?") continue elif (t_delta < -DELAY): print("---:", timestamp.strftime("%m-%d %H:%M:%S"), call, freq, "--- ignoring ",\ -t_delta, "sec QSO in the 'future'. Check clock settings?") continue calls = call + "_de_" + mycall t = threading.Timer(DELAY, dump_audio, [calls, contest, mode, freq, timestamp, radio_nr, sampwidth]) print("QSO:", timestamp.strftime("%m-%d %H:%M:%S"), call, freq) t.start() except: if (options.debug): logging.debug('Could not parse previous packet') logging.debug(sys.exc_info()) pass # ignore, probably some other udp packet except (KeyboardInterrupt): print("73! K3IT") stream.stop_stream() stream.close() p.terminate() sys.exit(0) # stream.close() p.terminate() sys.exit(0)
def main(): """main function""" parser = argparse.ArgumentParser( description='Github within the Command Line') group = parser.add_mutually_exclusive_group() group.add_argument('-n', '--url', type=str, help="Get repos from the user profile's URL") group.add_argument('-r', '--recursive', type=str, help="Get the file structure from the repo link") group.add_argument('-R', '--readme', type=str, help="Get the raw version of the repo readme from repo link") group.add_argument('-re', '--releases', type=str, help="Get the list of releases from repo link") group.add_argument('-dt', '--tarball', type=str, help="Download the tarball of the given repo") group.add_argument('-dz', '--zipball', type=str, help="Download the zipball of the given repo") group.add_argument('-op', '--openfile', type=str, help="Show the contents of the given file in a repo") group.add_argument('-f', '--followers', type=str, help="Get followers of the user") group.add_argument('-fo', '--following', type=str, help="Get people following the user") group.add_argument('-c', '--contributors', type=str, help="Get contributors of a repo") if len(sys.argv) == 1: parser.print_help() return args = parser.parse_args() # URL if args.url: name = url_parse(args.url) url = GITHUB_API + 'users/' + name + '/repos' # TREE if args.recursive: name = url_parse(args.recursive) url = GITHUB_API + 'repos/' + name + '/branches/master' response = get_req(url) jsondata = json.loads(response) sha = jsondata['commit']['commit']['tree']['sha'] url = GITHUB_API + 'repos/' + name + '/git/trees/' + sha + '?recursive=1' # README if args.readme: name = url_parse(args.readme) url = GITHUB_API + 'repos/' + name + '/readme' # RELEASES if args.releases: name = url_parse(args.releases) url = GITHUB_API + 'repos/' + name + '/releases' # TARBALL/ZIPBALL if args.tarball or args.zipball: if args.tarball: key = '/tarball/' name = url_parse(args.tarball) if args.zipball: key = '/zipball/' name = url_parse(args.zipball) url = GITHUB_API + 'repos/' + name + key + 'master' # OPEN ONE FILE if args.openfile: name = url_parse(args.openfile) position = name.find('/') user = name[:position+1] rest = name[position+1:] position = rest.find('/') repo = rest[:position+1] rest = rest[position+1:] url = GITHUB_API + 'repos/' + user + repo + 'contents/' + rest # GET RESPONSES # TARBALL/ZIPBALL if args.tarball or args.zipball: response_url = geturl_req(url) position = name.find('/') name = name[position+1:] if args.tarball: name = name+'.tar.gz' if args.zipball: name = name+'.zip' print("\nDownloading " + name + '...\n') urllib.request.urlretrieve(response_url, name) print(name + ' has been saved\n') return # FOLLOWERS if args.followers: name = url_parse(args.followers) url = GITHUB_API + 'users/' + name + '/followers' #FOLLOWING if args.following: name = url_parse(args.following) url = GITHUB_API + 'users/' + name + '/following' #CONTRIBUTORS if args.contributors: name = url_parse(args.contributors) url = GITHUB_API + 'repos/' + name + '/contributors' # OTHER OPTIONS response = get_req(url) jsondata = json.loads(response) # USERNAME and URL if args.url: table = PrettyTable([" Repository ", "★ Star"]) table.align[" Repository "] = "l" for i in jsondata: table.add_row([i['name'], i['stargazers_count']]) print(table) # RECURSIVE TREE if args.recursive: table = PrettyTable([" File/Folder ", " Size (Bytes) "]) table.align[" File/Folder "] = "l" for i in jsondata['tree']: size = '-' path = i['path']+'/' if i['type'] == 'blob': size = i['size'] path = path[:-1] table.add_row([path, size]) print(table) # README if args.readme: print(base64.b64decode(jsondata['content']).decode('utf-8')) # RELEASES if args.releases: table = PrettyTable([" Release name ", " Release Date ", " Release Time "]) for i in jsondata: time = str(dateutil.parser.parse(i['published_at'])) date = time[:10] time = time[11:] time = time[:5] time = time + ' UTC' table.add_row([i['tag_name'], date, time]) print(table) # OPEN ONE FILE if args.openfile: try: print(base64.b64decode(jsondata['content']).decode('utf-8')) return except: print("\nDirectory URL was given, hence its contents will be displayed\n") table = PrettyTable(["Folder Contents"]) for i in jsondata: table.add_row([i['name']]) print(table) # GET FOLLOWERS if args.followers: table = PrettyTable([" FOLLOWERS "]) table.align[" FOLLOWERS "] = "l" for i in jsondata: table.add_row([i['login']]) print("Number of followers:"+str(len(jsondata))) print(table) # GET FOLLOWING if args.following: table = PrettyTable([" FOLLOWING "]) table.align[" FOLLOWING "] = "l" for i in jsondata: table.add_row([i['login']]) print("Number of following:"+str(len(jsondata))) print(table) # GET CONTRIBUTORS if args.contributors: table = PrettyTable([" CONTRIBUTORS "]) table.align[" CONTRIBUTORS "] = "l" for i in jsondata: table.add_row([i['login']]) print("Number of contributors:"+str(len(jsondata))) print(table)
def main(): parser = argparse.ArgumentParser( description= 'Website metrics script helps to parse, checks SSL certs and website.') parser.add_argument('--discoverydomain', help='Discovery domain name from URL', type=str) parser.add_argument('--discoveryssl', help='Discovery SSL from URL', type=str) parser.add_argument('--testssl', help='URL for check SSL certificate', type=str) parser.add_argument('--testdomain', help='Test domain name', type=str) parser.add_argument('--testsite', help='Test web site', type=str) parser.add_argument('--testphrase', help='Test web site with phrase', type=str) parser.add_argument('--timeout', help='Timeout for web site test, sec', type=str, default='15') parser.add_argument( '--httpproxy', help= 'Specify a proxy to check SSL. For example: username:password@serverfqdn:port', type=str) parser.add_argument('--useproxy', help='Use proxy to check SSL', type=str, default='False') parser.add_argument('--debug', help='Debug mode with verbose output', type=str, default='False') parser.add_argument('--version', action='version', version='%(prog)s 1.0') args = parser.parse_args() discoverydomain = args.discoverydomain discoveryssl = args.discoveryssl test_domain = args.testdomain test_ssl = args.testssl test_site = args.testsite test_phrase = args.testphrase http_proxy = args.httpproxy timeout = args.timeout if args.useproxy.lower() in ['true', '1']: use_proxy = True else: use_proxy = False if args.debug.lower() in ['true', '1']: logging.basicConfig(stream=sys.stderr, level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') if http_proxy == '' and http_proxy is None: use_proxy = False logging.debug('Input arguments: ' + str(args)) web_checks = WebSiteCheck(http_proxy, use_proxy) # Discovery block # get registred domain from URL if discoverydomain is not None: try: data = [] data.append(web_checks.discovery_domain(discoverydomain)) result = output_json_lld('WEBSITE_METRICS_LLD_DOMAIN_NAME', data) print(result) return 1 except: return 0 # discover ssl if discoveryssl is not None: try: url = web_checks.discovery_ssl(discoveryssl) data = [] if url is not None: data.append(url) result = output_json_lld('WEBSITE_METRICS_LLD_SSL_URL', data) print(result) return 1 except: return 0 # Discovery block end # return JSON for Domain items prototype if test_domain is not None and test_domain != '': try: data = dict() domainname_result = web_checks.domain_get_status(test_domain) if domainname_result['days_to_expire'] > 0: data.update({'domain_status': 1}) data.update({ 'domain_daystoexpire': domainname_result['days_to_expire'] }) data.update( {'domain_registrar': domainname_result['registrar']}) data.update({ 'domain_creationdate': domainname_result['creation_date'] }) data.update({ 'domain_expiredate': domainname_result['expiration_date'] }) else: data.update({'domain_status': 0}) json.dump(data, sys.stdout) return 1 except: return 0 # return JSON for SSL items prototype if test_ssl is not None and test_ssl != '': try: cert_is_valid = web_checks.ssl_verify_cert(test_ssl) data = {'ssl_status': cert_is_valid} if cert_is_valid: ssl_data = web_checks.ssl_get_status(test_ssl) data.update({'ssl_daystoexpire': ssl_data['days_to_expire']}) data.update({'ssl_issuedby': ssl_data['issued_by']}) data.update( {'ssl_expiredate': ssl_data['expiry_date'].strftime("%c")}) data.update( {'ssl_issueddate': ssl_data['issued_date'].strftime("%c")}) data.update({'ssl_serialnumber': ssl_data['serial_number']}) json.dump(data, sys.stdout) return 1 except: return 0 if test_site is not None and test_site != '': try: webtest = web_checks.website_get_status(test_site, test_phrase, timeout) data = {'test_status': webtest['status']} data.update({'test_time': webtest['time']}) data.update({'test_code': webtest['code']}) data.update({'test_speed': webtest['speed']}) data.update({'test_message': webtest['message']}) data.update({'test_phrase_status': webtest['phrase_status']}) json.dump(data, sys.stdout) return 1 except: return 0 # print help message if no args passed if len(sys.argv) == 1: parser.print_help(sys.stderr) sys.exit(1)
def main(): # prompt for and collect needed parameters parser = optparse.OptionParser(usage='Usage %prog <options>') parser.add_option('-a', dest='attackFile', type='string', help='Enter the full path to the attack PCAP file.') parser.add_option('-t', dest='dataFile', type='string', help='Enter the full path to the target PCAP file') parser.add_option('-o', dest='outFile', type='string', help='Enter the full path to the output file') (options, args) = parser.parse_args() attackFile = options.attackFile dataFile = options.dataFile outFile = options.outFile if attackFile == None or dataFile == None or outFile == None: print parser.print_help() exit(0) #logging.info("Finshed processing file. Created {:,.0f}".format(count) + " files") logging.info("CDA Attack Sample Generator") # ensure we have met our prerequisites if not verifyTools(): logging.error("Required tools were not present. Exiting.") exit(0) # get/report/store the data from the attack file logging.info("Selected Attack File: " + attackFile) attackFileData = getPcapDetails(attackFile) # get/report/store the data from the data file logging.info("Selected Data File: " + dataFile) dataFileData = getPcapDetails(dataFile) # ensure that the trarget data file is longer (time-wise) than the attack file if not ensure_Data_Longer_Than_Attack(attackFileData, dataFileData): logging.error("Attack File is longer than the data file") logging.error("Unable to proceed. Exiting") exit(0) logging.info("Verified: target file is longer than attack file.") # setup some temp file names t1 = outFile + "_001" t2 = outFile + "_002" t3 = outFile + "_003" # calculate the time shift needed for attack file relative to the target offset = calculate_Needed_Timeshift(attackFileData, dataFileData) # adjust the time (editcap) r = shift_pcap_time(attackFile, t1, offset) # prompt the user for the vicitim IP from the attack file (show IPs?) # - get and store both the IP and the MAC addr for the original victim logging.info("Determining top talkers in attack file (this may take awhile)...") talkers = get_top_talkers(attackFile, 10) logging.info("Attack File Top Talkers:") for talker in talkers[:-1]: logging.info(talker.strip()) # prompt the user for the vicitim IP from the attack file (show IPs?) oldVictimIp = raw_input("Enter the IP of the victim in the attack pcap: ") logging.info("Selected Victim IP: " + oldVictimIp) logging.info("Determining MAC address for old victim...") oldVictimMac = get_mac_for_ip(attackFile, oldVictimIp) logging.info("Old Victim MAC: " + oldVictimMac) # prompt the user for the attacker (source) IP from the attack file sourceIp = raw_input("Enter the IP of the attacker in the attack pcap: ") logging.info("Selected Attacker IP: " + sourceIp) # prompt the user for the attack label label = raw_input("Enter the label of the attack: ") logging.info("Selected label: " + label) # prompt the user for the victim IP from the target data file (show IPs?) # - get and store both the IP and the MAC addr for the new victim logging.info("Determining top talkers in target file (this may take awhile)...") talkers = get_top_talkers(dataFile, 10) logging.info("Target File Top Talkers:") for talker in talkers[:-1]: logging.info(talker.strip()) newVictimIp = raw_input("Enter the IP of the victim in the target pcap: ") logging.info("Selected Victim IP: " + newVictimIp) logging.info("Determining MAC address for new victim...") newVictimMac = get_mac_for_ip(dataFile, newVictimIp) logging.info("New Victim MAC: " + newVictimMac) # ok, we now have what we need to write out the metadata file logging.info("Writing metadata file...") write_metadata_file(outFile, newVictimIp, sourceIp, label) # change the IP of the victim (bittwiste) logging.info("Swapping victim IPs...") swap_ips(t1, t2, oldVictimIp, newVictimIp) # change the MAC of the victim (bittwiste) logging.info("Swapping victim MACs...") swap_macs(t2, t3, oldVictimMac, newVictimMac) # merge the files logging.info("Merging the modified attack data with the target data...") merge_pcaps(dataFile, t3, outFile) # let's get some summary data finalFileData = getPcapDetails(outFile) listFileDetails(finalFileData) # verify changes were successful # - num of pkts in output should be raw + attack finalPacketCount = long(get_capinfo_pkt_count(finalFileData)) attackPacketCount = long(get_capinfo_pkt_count(attackFileData)) targetPacketCount = long(get_capinfo_pkt_count(dataFileData)) goal = attackPacketCount + targetPacketCount if finalPacketCount == goal: logging.info("Verification: Packet Counts Match!!!") else: logging.error("Packet Count Verification Failed! (" + str(finalPacketCount) + " vs. " + str(goal) + ")") logging.error("Final: " + str(finalPacketCount)) logging.error("Attack: " + str(attackPacketCount)) logging.error("Target: " + str(targetPacketCount)) # - start time of output should == start time of raw finalOffset = calculate_Needed_Timeshift(finalFileData, dataFileData) if finalOffset == 0: logging.info("Verification: Start Times Match!!!") else: logging.error("Start Time Verification Failed! (offset:" + str(finalOffset) + ")") # - end time of output should == end time of raw finalDuration = calculateDurationInSeconds(finalFileData) targetDuration = calculateDurationInSeconds(dataFileData) if finalDuration == targetDuration: logging.info("Verification: Duration Matches!!!") else: logging.error("Capture Duration Verification Failed! (" + str(finalDuration) + " vs. " + str(targetDuration) + ")")
def main(): parser = argparse.ArgumentParser(description='Github within the Command Line') g=parser.add_mutually_exclusive_group() g.add_argument('-n','--username', type=str, help = "Get repos of the given username") g.add_argument('-u','--url', type=str, help = "Get repos from the user profile's URL") g.add_argument('-r','--recursive',type=str, help = "Get the file structure from the repo link") g.add_argument('-R','--readme',type=str, help = "Get the raw version of the repo readme from repo link") g.add_argument('-re','--releases',type=str, help = "Get the list of releases from repo link") if len(sys.argv)==1: parser.print_help() return args = parser.parse_args() #URL if(args.url): name=args.url n=name.find("github.com") if(n>=0): if(n!=0): n1=name.find("www.github.com") n2=name.find("http://github.com") n3=name.find("https://github.com") if(n1*n2*n3!=0): print('-'*150) print("Enter a valid URL. For help, type 'cli-github -h'") print('-'*150) return name=args.url[n+11:] if name.endswith('/'): name = name[:-1] url = GITHUB_API + 'users/' +name + '/repos' else: print('-'*150) print("Enter a valid URL. For help, type 'cli-github -h'") print('-'*150) return #USERNAME if(args.username): name=args.username url = GITHUB_API + 'users/' +name + '/repos' #TREE if(args.recursive): name=args.recursive n=name.find("github.com") if(n>=0): if(n!=0): n1=name.find("www.github.com") n2=name.find("http://github.com") n3=name.find("https://github.com") if(n1*n2*n3!=0): print('-'*150) print("Enter a valid URL. For help, type 'cli-github -h'") print('-'*150) return name=args.recursive[n+11:] if name.endswith('/'): name = name[:-1] url = GITHUB_API + 'repos/' +name + '/branches/master' request = urllib.request.Request(url) request.add_header('Authorization', 'token %s' % API_TOKEN) try: response = urllib.request.urlopen(request).read().decode('utf-8') except urllib.error.HTTPError as err: print('-'*150) print("Invalid Credentials. For help, type 'cli-github -h'") print('-'*150) return else: print('-'*150) print("Enter a valid URL. For help, type 'cli-github -h'") print('-'*150) return jsondata = json.loads(response) sha = jsondata['commit']['commit']['tree']['sha'] url=GITHUB_API+'repos/'+name+'/git/trees/'+sha+'?recursive=1' #README if(args.readme): name=args.readme n=name.find("github.com") if(n>=0): if(n!=0): n1=name.find("www.github.com") n2=name.find("http://github.com") n3=name.find("https://github.com") if(n1*n2*n3!=0): print('-'*150) print("Enter a valid URL. For help, type 'cli-github -h'") print('-'*150) return name=args.readme[n+11:] if name.endswith('/'): name = name[:-1] url = GITHUB_API + 'repos/' +name + '/readme' else: print('-'*150) print("Enter a valid URL. For help, type 'cli-github -h'") print('-'*150) return #RELEASES if(args.releases): name=args.releases n=name.find("github.com") if(n>=0): if(n!=0): n1=name.find("www.github.com") n2=name.find("http://github.com") n3=name.find("https://github.com") if(n1*n2*n3!=0): print('-'*150) print("Enter a valid URL. For help, type 'cli-github -h'") print('-'*150) return name=args.releases[n+11:] if name.endswith('/'): name = name[:-1] url = GITHUB_API + 'repos/' +name + '/releases' else: print('-'*150) print("Enter a valid URL. For help, type 'cli-github -h'") print('-'*150) return request = urllib.request.Request(url) request.add_header('Authorization', 'token %s' % API_TOKEN) try: response = urllib.request.urlopen(request).read().decode('utf-8') except urllib.error.HTTPError as err: print('-'*150) print("Invalid Credentials. For help, type 'clipy-github -h'") print('-'*150) return jsondata = json.loads(response) if(args.url or args.username): x = PrettyTable([" Repository ", "★ Star"]) x.align[u" Repository "] = u"l" for i in jsondata: x.add_row([i['name'],i['stargazers_count']]) print(x) if(args.recursive): x = PrettyTable([" File/Folder ", " Size (Bytes) "]) x.align[u" File/Folder "] = u"l" for i in jsondata['tree']: size='-' path=i['path']+'/' if(i['type']=='blob'): size=i['size'] path=path[:-1] x.add_row([path,size]) print(x) if(args.readme): print(base64.b64decode(jsondata['content']).decode('utf-8')); if(args.releases): x = PrettyTable([" Release name "," Release Date "," Release Time "]) for i in jsondata: ti = dateutil.parser.parse(i['published_at']) ti = str(ti) date = ti[:10] time = ti[11:] time = time[:5] time = time + ' UTC' x.add_row([i['tag_name'],date,time]) print(x)
def parse_args(): """Parse cli arguments with argparse""" time_end = datetime.now() time_start = time_end - timedelta(minutes=1) parser = argparse.ArgumentParser( description='Command line interface for the Data API 2') subparsers = parser.add_subparsers(help='Action to be performed', metavar='action', dest='action') parser_search = subparsers.add_parser('search') parser_search.add_argument("regex", help="String to be searched") parser_save = subparsers.add_parser('save') parser_save.add_argument( "--from_time", help="Start time for the data query (default: now-1m)", default=time_start, metavar='TIMESTAMP', type=_convert_date) parser_save.add_argument("--to_time", help="End time for the data query (default: now)", default=time_end, metavar='TIMESTAMP', type=_convert_date) parser_save.add_argument("--from_pulse", help="Start pulseId for the data query", default=-1, metavar='PULSE_ID') parser_save.add_argument("--to_pulse", help="End pulseId for the data query", default=-1, metavar='PULSE_ID') parser_save.add_argument("filename", help="Name of the output file", default="") parser_save.add_argument( "channels", help="Channels to be queried, comma-separated list", default="") parser_save.add_argument("--overwrite", action="store_true", help="Overwrite the output file", default="") parser_save.add_argument( "--split", help="Number of pulses or duration (ISO8601) per file", default="") parser_save.add_argument( "--print", help="Prints out the downloaded data. Output can be cut.", action="store_true") #parser_save.add_argument( # "--binary", help="Download as binary", action="store_true", default=False) parser_open = subparsers.add_parser('open') parser_open.add_argument("filename", help="Name of the output file", default="") args = parser.parse_args() if args.action is None: parser.print_help() sys.exit(-1) return args
def parse_args(args): class CustomArgparseFormatterClass(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter): pass parser = argparse.ArgumentParser( description= '### FinTS Transaction Tracker ###\n\nThis tool differentiates between `target` and `actual` transactions.\nTargets are added manually, actuals come from your bank account(s).', epilog= "Use `[subcommand] --help` to show help for given command.\n\nSource: https://github.com/phil294/fints-transaction-tracker\nContact <*****@*****.**>\n\nThis script is a fork of https://github.com/jahir/kontify\nand approx. [???]% of the source stems from it.", formatter_class=CustomArgparseFormatterClass) parser.add_argument('-c', '--config-file', help='Config file location', default='kontify.yaml') subparsers = parser.add_subparsers(dest='subcommand', required=True) sync_parser = subparsers.add_parser( 'sync', help= 'Synchronize the database: Fetch the latest bank transactions from the configured accounts (config file) and save them as actual statements.', formatter_class=CustomArgparseFormatterClass) sync_parser.add_argument('--days', '-d', type=int, default=7, help='The amount of days to query for.') sync_parser.set_defaults(func=sync) target_parser = subparsers.add_parser( 'target', help='Manage the *expected* transactions', formatter_class=CustomArgparseFormatterClass) target_subparsers = target_parser.add_subparsers(dest='target_subcommand', required=True) target_parser.set_defaults(func=target) target_get_parser = target_subparsers.add_parser( 'get', help='Get saved targets. By default, resolved targets are hidden.', formatter_class=CustomArgparseFormatterClass) target_get_parser_group = target_get_parser.add_mutually_exclusive_group() target_get_parser_group.add_argument( '--expired-only', '-e', action='store_true', help='Only show targets whose deadline is expired') target_get_parser_group.add_argument('--include-resolved', '-r', action='store_true', help='Include resolved targets.') target_add_parser = target_subparsers.add_parser( 'add', help='Add a new expected transaction (target)', formatter_class=CustomArgparseFormatterClass) target_add_parser.add_argument( 'amount', type=float, help= 'The expected monney change. Floating point number. Can be negative.') target_add_parser.add_argument( 'comment', nargs='?', default="", help= 'Comment to save along the target so you can identify this entry better in the future' ) target_add_parser.add_argument( 'purpose_match', nargs='?', default="", help= 'Matching criteria for target resolution: If set, the purpose field needs to *include* PURPOSE_MATCH.' ) target_add_parser.add_argument( 'name_match', nargs='?', default="", help= 'Matching criteria for target resolution: If set, the application_name field needs to *include* NAME_MATCH.' ) target_add_parser.add_argument( '--from', '-f', type=datetime.datetime.fromisoformat, help= 'Minimum time (ISO-Date) before which the target will not be resolved.' ) target_add_parser.add_argument( '--to', '-t', type=datetime.datetime.fromisoformat, default=date.today() + timedelta(5), help= 'Deadline (ISO-Date) after which the target will be marked as expired when it has not resolved yet. Defaults to today + 5 days.' ) target_add_parser.add_argument( '--interval', '-i', type=int, default=0, help= 'The interval in months, after which this target entry will be repeated. Defaults to 0 aka do not repeat. *If* you set this to any integer greater than 0, the deadline specified by `--to` will be recurring. `--from` for *recurring* targets will be `--to` minus 8 days. Example: Today is February 10th. `kontify.py target add -i 2 -33.44` will expect a bank statement of -33.44 ¤ from [any time] until Feb 15th, and from Apr 07th until Apr 15th, and from Jun 07th until Jun 15th and so on.' ) target_add_parser.add_argument( '--monthly', '-m', dest='interval', action='store_const', const=1, help= 'Alias for `--interval=1`, that is: Expect this target every month.') target_remove_parser = target_subparsers.add_parser( 'remove', help='Remove a target transaction (target) by id.', formatter_class=CustomArgparseFormatterClass) target_remove_parser.add_argument('id', type=int, help='The ID to remove.') statement_parser = subparsers.add_parser( 'actual', help= 'Manage the *actual* transactions. Currently, you can only retrieve them. Adding happens in the `sync` subcommand.', formatter_class=CustomArgparseFormatterClass) statement_subparsers = statement_parser.add_subparsers( dest='statement_subcommand', required=True) statement_parser.set_defaults(func=actual) statement_get_parser = statement_subparsers.add_parser( 'get', help='Get saved actual statements.', formatter_class=CustomArgparseFormatterClass) statement_get_parser.add_argument( '--days', '-d', type=int, default=7, help= 'Returns last [DAYS] days of statements from the database. If < 0, no limit is applied.' ) statement_get_parser.add_argument( '--unexpected', '-u', action='store_true', help= 'Only show actual statements that do not have a matching target assigned (prior with `reconcile` subcommand)' ) reconcile_parser = subparsers.add_parser( 'reconcile', help= 'Reconcile the target/actual state. Goes through all target transactions and tries to match it to any actual fitting, unassigned actual transaction that matches the criteria.', formatter_class=CustomArgparseFormatterClass) reconcile_parser.set_defaults(func=reconcile) info_parser = subparsers.add_parser( 'info', help= 'Shows the current target/actual state: Prints the output of `kontify.py target --expired` and `kontify.py actual get --unexpected --days=-1`, but in human readable format.', formatter_class=CustomArgparseFormatterClass) info_parser.set_defaults(func=info) help_parser = subparsers.add_parser('help') help_parser.add_argument( 'help_subcommand_name', nargs='?', help='Command to show help for. Alternatively, use [subcommand] --help.' ) batch_parser = subparsers.add_parser( 'batch', help= 'Invoke the script multiple times sequentially by passing multiple argument sets. Config file is only parsed once. The only reason to use this subcommand is more speed (significantly, as Python imports are somehow very slow).' ) batch_parser.add_argument( 'batch_arguments', nargs='+', help= 'The batch commands, passed as combined arguments. Leave out the script name. Example: `kontify.py batch "sync --days 7" "reconcile"` first runs sync, then reconcile.' ) batch_parser.set_defaults(func=batch) args_parsed = parser.parse_args(args) if not initialized: initialize(args_parsed) if args_parsed.subcommand == 'help': if args_parsed.help_subcommand_name and args_parsed.help_subcommand_name in subparsers.choices: subparsers.choices[args_parsed.help_subcommand_name].print_help() else: parser.print_help() else: args_parsed.func(args_parsed)
def arg_required(args, fields=[]): for field in fields: if not getattr(args, field): parser.print_help() sys.exit()
def portal_data_builder_entry(cmd_args): ret_code = 0 parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter, description='BDKD Portal Data Builder V%s\nTo build the data of a BDKD Portal so that it is synchronized ' 'with the BDKD Data Repository in an object store.' % (__version__)) parser.add_argument('command', help='The command to execute, which can be:\n' ' update - to update the portal using metadata from the datastore\n' ' daemon - to run the portal update as a daemon process\n' ' setup - setup the organizations in the config file\n' ' purge - purge all datasets from this portal\n' ' reprime - purge and rebuild all datasets for this portal\n' ) parser.add_argument('--cycle', type=int, help='Maximum number of build cycle to run when running as daemon') parser.add_argument('-c', '--config', help='Configuration file') parser.add_argument('-b', '--bucket-name', help='Select the bucket to build from (must be in the config)') parser.add_argument('-l', '--log-ini', help='Specify a logging ini file') parser.add_argument('-v', '--verbose', action='store_true', help='Run in verbose mode (INFO)') parser.add_argument('--debug', action='store_true', help='Run in very verbose mode (DEBUG)') if len(cmd_args)<=1: parser.print_help() sys.exit(1) args = parser.parse_args(args=cmd_args[1:]) if args.command not in ['update','daemon','setup','purge','reprime']: sys.exit('Unknown command %s' % (args.command)) if not args.config: sys.exit('Please specify the configuration file to use') if args.command == 'update': _prepare_logging(args) portal_builder = PortalBuilder() portal_builder.load_config(from_file=args.config) portal_builder.build_portal(repo_name=args.bucket_name) elif args.command == 'setup': _prepare_logging(args) portal_builder = PortalBuilder() portal_builder.load_config(from_file=args.config) portal_builder.setup_organizations(repo_name=args.bucket_name) elif args.command == 'purge' or args.command == 'reprime': _prepare_logging(args) portal_builder = PortalBuilder() portal_builder.load_config(from_file=args.config) portal_builder.remove_all_datasets() if args.command == 'reprime': portal_builder.build_portal() elif args.command == 'daemon': # run builder in daemonize mode (note: setup logging after daemonized) # pidfile = FileLock('/tmp/portal_data_builder.pid')) portal_builder = PortalBuilder() try: portal_builder.load_config(from_file=args.config) except FatalError as e: logging.getLogger(__name__).critical( "Portal data building not started due to a critical failure: " + str(e)) return 1 from lockfile.pidlockfile import PIDLockFile context = daemon.DaemonContext( pidfile = PIDLockFile(portal_builder.get_config('pidfile','/tmp/portal_data_builder.pid')), signal_map = { signal.SIGTERM: sigterm_handler, } ) with context: init_running_state() _prepare_logging(args) nap_duration = portal_builder.get_nap_duration() max_cycle = args.cycle while is_running(): try: portal_builder.build_portal() except FatalError as e: logging.getLogger(__name__).critical( "Portal data building terminating due to a critical failure: " + str(e)) stop_running() ret_code = 1 except Exception as e: logging.getLogger(__name__).error("Portal data building has failed: " + str(e)) # If there is a monitoring system, we will raise an alert here. # Otherwise drop back to sleep, hopefully next cycle the failure # would have recovered. # We don't want to re-raise here or the daemon will terminates. # during testing, we can put cap on the number of build cycles. if max_cycle: max_cycle -= 1 if max_cycle <= 0 and is_running(): stop_running() if is_running(): time.sleep(nap_duration) logging.getLogger(__name__).info("Daemon terminated") return ret_code
def main(): parser = argparse.ArgumentParser() parser.add_argument('-i', metavar='FILE_NAME', help='Input JSON file with appointments', required=True) parser.add_argument('-o', metavar='FILE_NAME', help='Output ICS file') parser.add_argument('--caldav', metavar='CALDAV_URL', help='URL of the CALDAV server') parser.add_argument('-p', '--person_name', metavar='NAME', help='Name to append to calendar entries') args = parser.parse_args() if not args.o and not url: print ('Provide -o command line argument for file output or CALDAV_URL environment variable ' + 'for CalDAV output') parser.print_help() sys.exit(1) appointments = None with open(args.i) as f: appointments = json.load(f) # Generate vcal events from appointments icalendars = [] all_events = Calendar() for appointment in appointments: cal = Calendar() location = get_location(appointment['clinicName']) dt = dateutil.parser.parse(appointment['appointmentDate']) tz = pytz.timezone('Europe/Warsaw') local = dt.replace(tzinfo=tz) event = Event() #event['methdo'] event['uid'] = '{0}@medicover.pl'.format(appointment['id']) event['dtstart'] = vDatetime(local) event['dtend'] = vDatetime(local + datetime.timedelta(minutes=appointment['duration'])) event['dtstamp'] = vDatetime(now) event['summary'] = vText(appointment['specializationName']) summary = appointment['specializationName'] if args.person_name: summary += u' – {0}'.format(args.person_name) event['summary'] = vText(summary) event['description'] = vText(u'{0}, {1}'.format( appointment['specializationName'], appointment['doctorName'])) event['class'] = 'private' if location: event['location'] = vText(u'{0}, {1}, {2}'.format(appointment['clinicName'], location['address'], location['cityname'])) geocode = location['geocode'] if geocode: event['geo'] = '{0};{1}'.format(*geocode['geo']) else: event['location'] = appointment['clinicName'] cal.add_component(event) icalendars.append(cal) all_events.add_component(event) # Write calendar to file. if args.o: print 'Writing ' + args.o output_file = open(args.o, 'w') output_file.write(all_events.to_ical()) # Write calendar to CalDAV. if url: client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() for calendar in calendars: name = calendar.get_properties([dav.DisplayName(),])['{DAV:}displayname'] if name == 'Medicover': for cal in icalendars: print cal.to_ical() event = calendar.add_event(cal.to_ical()) print 'Event', event, 'created'
def parse_args(): """Parse cli arguments with argparse""" time_end = datetime.now() time_start = time_end - timedelta(minutes=1) parser = argparse.ArgumentParser( description='Command line interface for the Data API-3 ' + data_api3.version()) parser.add_argument( "--baseurl", help= "Base url of the service. Example: http://sf-daq-5.psi.ch:8371/api/1.0.1 Old default: http://sf-daq-5.psi.ch:8080/api/v1", required=True, ) parser.add_argument( "--default-backend", help= "Backend to use for channels. Currently only sf-databuffer, sf-imagebuffer, hipa-archive are supported.", required=True, ) subparsers = parser.add_subparsers(help='Action to be performed', metavar='action', dest='action') parser_search = subparsers.add_parser('search') parser_search.add_argument("regex", help="String to be searched", nargs='?', default=".*") parser_save = subparsers.add_parser('save') parser_save.add_argument("filename", help="Name of the output file", metavar="FILE") parser_save.add_argument( "start", help="Start time for the data query (default: now-1m)", default=time_start, metavar='BEGIN_DATE', type=_convert_date) parser_save.add_argument("end", help="End time for the data query (default: now)", default=time_end, metavar='END_DATE', type=_convert_date) parser_save.add_argument( "channels", help="Channels to be queried, space-separated list", nargs='+', metavar="CHANNELS") args = parser.parse_args() if args.action is None: parser.print_help() sys.exit(-1) return args
def main(): qp = {} def add_qp(option, opt_str, value, parser): if option.dest == 'query': try: (p, v) = value.split('=', 1) qp[p] = v except ValueError: raise optparse.OptionValueError("%s expects parameter=value" % opt_str) else: qp[option.dest] = value parser = optparse.OptionParser( usage="Usage: %prog [-h|--help] [OPTIONS] -o file", version="%prog " + VERSION, add_help_option=False) parser.set_defaults(url="http://geofon.gfz-potsdam.de/eidaws/routing/1/", timeout=600, retries=10, retry_wait=60, threads=5) parser.add_option("-h", "--help", action="store_true", default=False, help="show help message and exit") parser.add_option("-l", "--longhelp", action="store_true", default=False, help="show extended help message and exit") parser.add_option("-v", "--verbose", action="store_true", default=False, help="verbose mode") parser.add_option("-u", "--url", type="string", help="URL of routing service (default %default)") parser.add_option("-y", "--service", type="string", action="callback", callback=add_qp, help="target service (default dataselect)") parser.add_option("-N", "--network", type="string", action="callback", callback=add_qp, help="network code or pattern") parser.add_option("-S", "--station", type="string", action="callback", callback=add_qp, help="station code or pattern") parser.add_option("-L", "--location", type="string", action="callback", callback=add_qp, help="location code or pattern") parser.add_option("-C", "--channel", type="string", action="callback", callback=add_qp, help="channel code or pattern") parser.add_option("-s", "--starttime", type="string", action="callback", callback=add_qp, help="start time") parser.add_option("-e", "--endtime", type="string", action="callback", callback=add_qp, help="end time") parser.add_option("-q", "--query", type="string", action="callback", callback=add_qp, metavar="PARAMETER=VALUE", help="additional query parameter") parser.add_option("-t", "--timeout", type="int", help="request timeout in seconds (default %default)") parser.add_option("-r", "--retries", type="int", help="number of retries (default %default)") parser.add_option("-w", "--retry-wait", type="int", help="seconds to wait before each retry " "(default %default)") parser.add_option("-n", "--threads", type="int", help="maximum number of download threads " "(default %default)") parser.add_option("-c", "--credentials-file", type="string", help="URL,user,password file (CSV format) for queryauth") parser.add_option("-a", "--auth-file", type="string", help="file that contains the auth token") parser.add_option("-p", "--post-file", type="string", help="request file in FDSNWS POST format") parser.add_option("-f", "--arclink-file", type="string", help="request file in ArcLink format") parser.add_option("-b", "--breqfast-file", type="string", help="request file in breq_fast format") parser.add_option("-o", "--output-file", type="string", help="file where downloaded data is written") parser.add_option("-z", "--no-citation", action="store_true", default=False, help="suppress network citation info") parser.add_option("-Z", "--no-check", action="store_true", default=False, help="suppress checking received routes and data") (options, args) = parser.parse_args() if options.help: print(__doc__.split("Usage Examples", 1)[0], end="") parser.print_help() return 0 if options.longhelp: print(__doc__) parser.print_help() return 0 if args or not options.output_file: parser.print_usage(sys.stderr) return 1 if bool(options.post_file) + bool(options.arclink_file) + \ bool(options.breqfast_file) > 1: msg("only one of (--post-file, --arclink-file, --breqfast-file) " "can be used") return 1 try: cred = {} authdata = None postdata = None chans_to_check = set() if options.credentials_file: with open(options.credentials_file) as fd: try: for (url, user, passwd) in csv.reader(fd): cred[url] = (user, passwd) except (ValueError, csv.Error): raise Error("error parsing %s" % options.credentials_file) except UnicodeDecodeError: raise Error("invalid unicode character found in %s" % options.credentials_file) if options.auth_file: with open(options.auth_file, 'rb') as fd: authdata = fd.read() else: try: with open(DEFAULT_TOKEN_LOCATION, 'rb') as fd: authdata = fd.read() options.auth_file = DEFAULT_TOKEN_LOCATION except IOError: pass if authdata: msg("using token in %s:" % options.auth_file, options.verbose) try: proc = subprocess.Popen(['gpg', '--decrypt'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate(authdata) if not out: if isinstance(err, bytes): err = err.decode('utf-8') msg(err) return 1 if isinstance(out, bytes): out = out.decode('utf-8') msg(out, options.verbose) except OSError as e: msg(str(e)) if options.post_file: try: with open(options.post_file) as fd: postdata = fd.read() except UnicodeDecodeError: raise Error("invalid unicode character found in %s" % options.post_file) else: parser = None if options.arclink_file: parser = ArclinkParser() try: parser.parse(options.arclink_file) except UnicodeDecodeError: raise Error("invalid unicode character found in %s" % options.arclink_file) elif options.breqfast_file: parser = BreqParser() try: parser.parse(options.breqfast_file) except UnicodeDecodeError: raise Error("invalid unicode character found in %s" % options.breqfast_file) if parser is not None: if parser.failstr: msg(parser.failstr) return 1 postdata = parser.postdata if not options.no_check: if postdata: for line in postdata.splitlines(): nslc = line.split()[:4] if nslc[2] == '--': nslc[2] = '' chans_to_check.add('.'.join(nslc)) else: net = qp.get('network', '*') sta = qp.get('station', '*') loc = qp.get('location', '*') cha = qp.get('channel', '*') for n in net.split(','): for s in sta.split(','): for l in loc.split(','): for c in cha.split(','): if l == '--': l = '' chans_to_check.add('.'.join((n, s, l, c))) url = RoutingURL(urlparse.urlparse(options.url), qp) dest = open(options.output_file, 'wb') nets = route(url, cred, authdata, postdata, dest, chans_to_check, options.timeout, options.retries, options.retry_wait, options.threads, options.verbose) if nets and not options.no_citation: msg("retrieving network citation info", options.verbose) get_citation(nets, options) else: msg("", options.verbose) msg( "In case of problems with your request, plese use the contact " "form at\n\n" " http://www.orfeus-eu.org/organization/contact/form/" "?recipient=EIDA\n", options.verbose) except (IOError, Error) as e: msg(str(e)) return 1 return 0
def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter, description='Downloads one or more Flickr photo sets.\n' '\n' 'To use it you need to get your own Flickr API key here:\n' 'https://www.flickr.com/services/api/misc.api_keys.html\n' '\n' 'For more information see:\n' 'https://github.com/beaufour/flickr-download', epilog='examples:\n' ' list all sets for a user:\n' ' > {app} -k <api_key> -s <api_secret> -l beaufour\n' '\n' ' download a given set:\n' ' > {app} -k <api_key> -s <api_secret> -d 72157622764287329\n' '\n' ' download a given set, keeping duplicate names:\n' ' > {app} -k <api_key> -s <api_secret> -d 72157622764287329 -n title_increment\n' .format(app=sys.argv[0]) ) parser.add_argument('-k', '--api_key', type=str, help='Flickr API key') parser.add_argument('-s', '--api_secret', type=str, help='Flickr API secret') parser.add_argument('-t', '--user_auth', action='store_true', help='Enable user authentication') parser.add_argument('-l', '--list', type=str, metavar='USER', help='List photosets for a user') parser.add_argument('-d', '--download', type=str, metavar='SET_ID', help='Download the given set') parser.add_argument('-u', '--download_user', type=str, metavar='USERNAME', help='Download all sets for a given user') parser.add_argument('-q', '--quality', type=str, metavar='SIZE_LABEL', default=None, help='Quality of the picture') parser.add_argument('-n', '--naming', type=str, metavar='NAMING_MODE', help='Photo naming mode') parser.add_argument('-m', '--list_naming', action='store_true', help='List naming modes') parser.set_defaults(**_load_defaults()) args = parser.parse_args() if args.list_naming: print(get_filename_handler_help()) return 1 if not args.api_key or not args.api_secret: print ('You need to pass in both "api_key" and "api_secret" arguments', file=sys.stderr) return 1 ret = _init(args.api_key, args.api_secret, args.user_auth) if not ret: return 1 if args.list: print_sets(args.list) return 0 if args.download or args.download_user: try: get_filename = get_filename_handler(args.naming) if args.download: download_set(args.download, get_filename, args.quality) else: download_user(args.download_user, get_filename, args.quality) except KeyboardInterrupt: print('Forcefully aborting. Last photo download might be partial :(', file=sys.stderr) return 0 print('ERROR: Must pass either --list or --download\n', file=sys.stderr) parser.print_help() return 1
def main(): # parse args parser = argparse.ArgumentParser( description= 'Gets the top songs of each unique artist in source playlist and adds them to the dest playlist.' ) # TODO: add option to select whether to put songs at top or bottom of playlist... parser.add_argument( 'username', type=str, help='(string) username of spotify account owning the playlist.') parser.add_argument( 'source_playlist_uri', type=str, help= '(string) uri of spotify playlist to source names of artists from (e.g. "spotify:playlist:i0dcGw2CvUDFwx833UdaLf"' ) # (args starting with '--' are made optional) parser.add_argument( 'dest_playlist_uri', type=str, help= '(string) uri of spotify playlist to add the top songs to by the artists found in the source playlist OR set to "" to create a new playlist' ) parser.add_argument( '--delete_after', type=str, default='false', help= '(optional bool) set "true" to delete songs in the source playlist as well. (default "false")' ) parser.add_argument( '--copy_num', type=int, default=3, help= '(optional int) max number of top songs to add from each artist in addition to the initial source song (default 3)' ) args = parser.parse_args() if len(sys.argv) == 1: parser.print_help(sys.stderr) exit(1) args.delete_after = args.delete_after.lower() if args.delete_after not in ["true", "false"]: print("ERROR: argument for --delete_after must be 'true' or 'false'") exit(1) args.delete_after = True if args.delete_after == "true" else False # setup API scope = 'playlist-modify-public' # TODO: note that this^ won't work for modifying a user's private playlists # https://developer.spotify.com/documentation/general/guides/scopes/#playlist-modify-private token = util.prompt_for_user_token(args.username, scope) if not token: print("Can't get Spotify API token for", args.username) exit(1) sp = spotipy.Spotify(auth=token) sp.trace = False # modify playlist print("\nRunning cool_artists: " + str(datetime.today()) + "\n") cool_artists(sp, args.username, args.source_playlist_uri, args.dest_playlist_uri, args.copy_num, args.delete_after)
default="none", help= "Get a current list of feeds available on cymon. Provide filename to save to", metavar="listname") parser.add_option("--max", "--max", dest="s_max", default="none", help="Max number of IPs to be returned", metavar="max") (options, args) = parser.parse_args() if len(sys.argv[1:]) == 0: parser.print_help() if (options.s_category is not "none"): if (options.s_max is not "none"): scanurl = str(options.s_max) category = str( options.s_category ) #Categories that can be queried: malware, botnet,spam,phishing,malicious activity, blacklist, and dnsbl apiurl = url + "/api/nexus/v2/blacklist/ip/" + category + "/?days=1" + "&limit=" + scanurl all_json = send_request(apiurl, category + scanurl, headers) see_ips(all_json) else: scanurl = options.s_category apiurl = url + "/api/nexus/v2/blacklist/ip/" + scanurl all_json = send_request(apiurl, scanurl, headers) see_ips(all_json)
def main(): qp = {} def add_qp(option, opt_str, value, parser): if option.dest == 'query': try: (p, v) = value.split('=', 1) qp[p] = v except ValueError: raise optparse.OptionValueError("%s expects parameter=value" % opt_str) else: qp[option.dest] = value parser = optparse.OptionParser( usage="Usage: %prog [-h|--help] [OPTIONS] -o file", version="%prog " + VERSION, add_help_option=False) parser.set_defaults( url="http://geofon.gfz-potsdam.de/eidaws/routing/1/", timeout=600, retries=10, retry_wait=60, threads=5) parser.add_option("-h", "--help", action="store_true", default=False, help="show help message and exit") parser.add_option("-l", "--longhelp", action="store_true", default=False, help="show extended help message and exit") parser.add_option("-v", "--verbose", action="store_true", default=False, help="verbose mode") parser.add_option("-u", "--url", type="string", help="URL of routing service (default %default)") parser.add_option("-y", "--service", type="string", action="callback", callback=add_qp, help="target service (default dataselect)") parser.add_option("-N", "--network", type="string", action="callback", callback=add_qp, help="network code or pattern") parser.add_option("-S", "--station", type="string", action="callback", callback=add_qp, help="station code or pattern") parser.add_option("-L", "--location", type="string", action="callback", callback=add_qp, help="location code or pattern") parser.add_option("-C", "--channel", type="string", action="callback", callback=add_qp, help="channel code or pattern") parser.add_option("-s", "--starttime", type="string", action="callback", callback=add_qp, help="start time") parser.add_option("-e", "--endtime", type="string", action="callback", callback=add_qp, help="end time") parser.add_option("-q", "--query", type="string", action="callback", callback=add_qp, metavar="PARAMETER=VALUE", help="additional query parameter") parser.add_option("-t", "--timeout", type="int", help="request timeout in seconds (default %default)") parser.add_option("-r", "--retries", type="int", help="number of retries (default %default)") parser.add_option("-w", "--retry-wait", type="int", help="seconds to wait before each retry " "(default %default)") parser.add_option("-n", "--threads", type="int", help="maximum number of download threads " "(default %default)") parser.add_option("-c", "--credentials-file", type="string", help="URL,user,password file (CSV format) for queryauth") parser.add_option("-a", "--auth-file", type="string", help="file that contains the auth token") parser.add_option("-p", "--post-file", type="string", help="request file in FDSNWS POST format") parser.add_option("-f", "--arclink-file", type="string", help="request file in ArcLink format") parser.add_option("-b", "--breqfast-file", type="string", help="request file in breq_fast format") parser.add_option("-o", "--output-file", type="string", help="file where downloaded data is written") parser.add_option("-z", "--no-citation", action="store_true", default=False, help="suppress network citation info") (options, args) = parser.parse_args() if options.help: print(__doc__.split("Usage Examples", 1)[0], end="") parser.print_help() return 0 if options.longhelp: print(__doc__) parser.print_help() return 0 if args or not options.output_file: parser.print_usage() return 1 if bool(options.post_file) + bool(options.arclink_file) + \ bool(options.breqfast_file) > 1: msg("only one of (--post-file, --arclink-file, --breqfast-file) " "can be used") return 1 try: cred = {} authdata = None postdata = None if options.credentials_file: with open(options.credentials_file) as fd: try: for (url, user, passwd) in csv.reader(fd): cred[url] = (user, passwd) except (ValueError, csv.Error): raise Error("error parsing %s" % options.credentials_file) if options.auth_file: with open(options.auth_file) as fd: authdata = fd.read() if options.post_file: with open(options.post_file) as fd: postdata = fd.read() else: parser = None if options.arclink_file: parser = ArclinkParser() parser.parse(options.arclink_file) elif options.breqfast_file: parser = BreqParser() parser.parse(options.breqfast_file) if parser is not None: if parser.failstr: msg(parser.failstr) return 1 postdata = parser.postdata url = RoutingURL(urlparse.urlparse(options.url), qp) dest = open(options.output_file, 'wb') nets = route(url, cred, authdata, postdata, dest, options.timeout, options.retries, options.retry_wait, options.threads, options.verbose) if nets and not options.no_citation: msg("retrieving network citation info", options.verbose) get_citation(nets, options) else: msg("", options.verbose) msg("In case of problems with your request, plese use the contact " "form at\n\n" " http://www.orfeus-eu.org/organization/contact/form/\n", options.verbose) except (IOError, Error) as e: msg(str(e)) return 1 return 0
def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter, description='Downloads one or more Flickr photo sets.\n' '\n' 'To use it you need to get your own Flickr API key here:\n' 'https://www.flickr.com/services/api/misc.api_keys.html\n' '\n' 'For more information see:\n' 'https://github.com/beaufour/flickr-download\n' '\n' 'You can store argument defaults in ' + CONFIG_FILE + '. API keys for example:\n' ' api_key: .....\n' ' api_secret: ...\n', epilog='examples:\n' ' list all sets for a user:\n' ' > {app} -k <api_key> -s <api_secret> -l beaufour\n' '\n' ' download a given set:\n' ' > {app} -k <api_key> -s <api_secret> -d 72157622764287329\n' '\n' ' download a given set, keeping duplicate names:\n' ' > {app} -k <api_key> -s <api_secret> -d 72157622764287329 -n title_increment\n' .format(app=sys.argv[0]) ) parser.add_argument('-k', '--api_key', type=str, help='Flickr API key') parser.add_argument('-s', '--api_secret', type=str, help='Flickr API secret') parser.add_argument('-t', '--user_auth', action='store_true', help='Enable user authentication') parser.add_argument('-l', '--list', type=str, metavar='USER', help='List photosets for a user') parser.add_argument('-d', '--download', type=str, metavar='SET_ID', help='Download the given set') parser.add_argument('-p', '--download_user_photos', type=str, metavar='USERNAME', help='Download all photos for a given user') parser.add_argument('-u', '--download_user', type=str, metavar='USERNAME', help='Download all sets for a given user') parser.add_argument('-i', '--download_photo', type=str, metavar='PHOTO_ID', help='Download one specific photo') parser.add_argument('-q', '--quality', type=str, metavar='SIZE_LABEL', default=None, help='Quality of the picture') parser.add_argument('-n', '--naming', type=str, metavar='NAMING_MODE', help='Photo naming mode') parser.add_argument('-m', '--list_naming', action='store_true', help='List naming modes') parser.add_argument('-o', '--skip_download', action='store_true', help='Skip the actual download of the photo') parser.add_argument('-j', '--save_json', action='store_true', help='Save photo info like description and tags, one .json file per photo') parser.set_defaults(**_load_defaults()) args = parser.parse_args() if args.list_naming: print(get_filename_handler_help()) return 1 if not args.api_key or not args.api_secret: print ('You need to pass in both "api_key" and "api_secret" arguments', file=sys.stderr) return 1 ret = _init(args.api_key, args.api_secret, args.user_auth) if not ret: return 1 # Replace stdout with a non-strict writer that replaces unknown characters instead of throwing # an exception. This "fixes" print issues on the standard Windows terminal, and when there is no # terminal at all. if sys.stdout.isatty(): default_encoding = sys.stdout.encoding else: default_encoding = locale.getpreferredencoding() if default_encoding != 'utf-8': sys.stdout = codecs.getwriter(default_encoding)(sys.stdout, 'replace') if args.list: print_sets(args.list) return 0 if args.skip_download: print('Will skip actual downloading of files') if args.save_json: print('Will save photo info in .json file with same basename as photo') if args.download or args.download_user or args.download_user_photos or args.download_photo: try: with Timer('total run'): get_filename = get_filename_handler(args.naming) if args.download: download_set(args.download, get_filename, args.quality, args.skip_download, args.save_json) elif args.download_user: download_user(args.download_user, get_filename, args.quality, args.skip_download, args.save_json) elif args.download_photo: download_photo(args.download_photo, get_filename, args.quality, args.skip_download, args.save_json) else: download_user_photos(args.download_user_photos, get_filename, args.quality, args.skip_download, args.save_json) except KeyboardInterrupt: print('Forcefully aborting. Last photo download might be partial :(', file=sys.stderr) return 0 print('ERROR: Must pass either --list or --download\n', file=sys.stderr) parser.print_help() return 1
ignore_unknown_config_file_keys=True) parser.add("-c", "--config", required=False, is_config_file=True, help="config file path") parser.add("-f", "--file", env_var="BMRK_FILE", default="~/bookmarks", help="bookmarks file path (default: \"%(default)s\")") parser.add("--date-format", env_var="BMRK_DATE_FORMAT", default="%Y-%m-%d %H:%M:%S %z", help="date format to employ (default: \"%(default)s\")") parser.set_defaults(func=lambda *a: parser.print_help()) subparsers = parser.add_subparsers() cmd_add = subparsers.add_parser("add", aliases=["a"], help="add a new bookmark") cmd_add.add_argument("url", nargs="?", type=str, help="URL to add") cmd_add.add_argument("title", nargs="*", help="title of the bookmark") cmd_add.add_argument("-t", "--tags", nargs="+", help="optional tags") cmd_add.add_argument("-e", "--no-edit", action="store_true", help="add the bookmark directly, without editing") cmd_add.add_argument( "-n",
def main(): signal.signal(signal.SIGINT, signal_handler) parser = OptionParser(usage="usage: %prog [options]") parser.add_option( "--setup", action="store_true", dest="setup", default=False, help="Setup script with Drive App") parser.add_option( "-c", action="store", type=int, dest="creds", default=0, help="Index of creds in credentials array to use (default: 0)") parser.add_option( "-d", action="store_true", dest="d", default=False, help="Delete files with names provided to -s and -r") parser.add_option( "-l", action="store_true", dest="listen", default=False, help="Enable Socket Mode: Listener") parser.add_option( "-i", action="store", dest="ip", default="", help="Enable Socket Mode: Connect") parser.add_option( "-p", action="store", type=int, dest="port", default=8000, help="Port number for socket mode (default: 8000)") parser.add_option( "-s", action="store", dest="send", default='0', help="Sending channel (default: 0)") parser.add_option( "-r", action="store", dest="recv", default='1', help="Receiving channel (default: 1)") parser.add_option( "-P", action="store", type=float, dest="poll", default=0.0, help="Poll every x seconds (default: 0)") parser.add_option( "-j", action="store", type=float, dest="jitter", default=1.0, help="Amount of randomness in polling (default: 1.0)") parser.add_option( "-v", action="store_true", dest="verbose", default=False, help="Enable verbose output") parser.add_option( "--debug", action="store_true", dest="debug", default=False, help="Enable debug output") global opts (opts, args) = parser.parse_args() if len(sys.argv[1:]) == 0: parser.print_help() sys.exit() verbose(opts) verbose(args) if opts.setup: print "Launching Setup..." setup() else: try: credentials = get_credential(opts.creds) except: print "Failed to get credentials at index %s!" % str(opts.creds) print "Run --setup to obtain credentials to add to this script." exit() try: verbose("Authenticating...") global service service = discovery.build('drive', 'v3', credentials=credentials) except Exception, e: sys.exit("Auth failure!")
def main(): """main function""" parser = argparse.ArgumentParser( description='Github within the Command Line') group = parser.add_mutually_exclusive_group() group.add_argument('-n', '--url', type=str, help="Get repos from the user profile's URL") group.add_argument('-r', '--recursive', type=str, help="Get the file structure from the repo link") group.add_argument( '-R', '--readme', type=str, help="Get the raw version of the repo readme from repo link") group.add_argument('-re', '--releases', type=str, help="Get the list of releases from repo link") group.add_argument('-dt', '--tarball', type=str, help="Download the tarball of the given repo") group.add_argument('-dz', '--zipball', type=str, help="Download the zipball of the given repo") group.add_argument('-op', '--openfile', type=str, help="Show the contents of the given file in a repo") group.add_argument('-f', '--followers', type=str, help="Get followers of the user") group.add_argument('-fo', '--following', type=str, help="Get people following the user") group.add_argument('-c', '--contributors', type=str, help="Get contributors of a repo") if len(sys.argv) == 1: parser.print_help() return args = parser.parse_args() # URL if args.url: name = url_parse(args.url) url = GITHUB_API + 'users/' + name + '/repos' # TREE if args.recursive: name = url_parse(args.recursive) url = GITHUB_API + 'repos/' + name + '/branches/master' response = get_req(url) jsondata = json.loads(response) sha = jsondata['commit']['commit']['tree']['sha'] url = GITHUB_API + 'repos/' + name + '/git/trees/' + sha + '?recursive=1' # README if args.readme: name = url_parse(args.readme) url = GITHUB_API + 'repos/' + name + '/readme' # RELEASES if args.releases: name = url_parse(args.releases) url = GITHUB_API + 'repos/' + name + '/releases' # TARBALL/ZIPBALL if args.tarball or args.zipball: if args.tarball: key = '/tarball/' name = url_parse(args.tarball) if args.zipball: key = '/zipball/' name = url_parse(args.zipball) url = GITHUB_API + 'repos/' + name + key + 'master' # OPEN ONE FILE if args.openfile: name = url_parse(args.openfile) position = name.find('/') user = name[:position + 1] rest = name[position + 1:] position = rest.find('/') repo = rest[:position + 1] rest = rest[position + 1:] url = GITHUB_API + 'repos/' + user + repo + 'contents/' + rest # GET RESPONSES # TARBALL/ZIPBALL if args.tarball or args.zipball: response_url = geturl_req(url) position = name.find('/') name = name[position + 1:] if args.tarball: name = name + '.tar.gz' if args.zipball: name = name + '.zip' print("\nDownloading " + name + '...\n') urllib.request.urlretrieve(response_url, name) print(name + ' has been saved\n') return # FOLLOWERS if args.followers: name = url_parse(args.followers) url = GITHUB_API + 'users/' + name + '/followers' #FOLLOWING if args.following: name = url_parse(args.following) url = GITHUB_API + 'users/' + name + '/following' #CONTRIBUTORS if args.contributors: name = url_parse(args.contributors) url = GITHUB_API + 'repos/' + name + '/contributors' # OTHER OPTIONS response = get_req(url) jsondata = json.loads(response) # USERNAME and URL if args.url: table = PrettyTable([" Repository ", "★ Star"]) table.align[" Repository "] = "l" for i in jsondata: table.add_row([i['name'], i['stargazers_count']]) print(table) # RECURSIVE TREE if args.recursive: table = PrettyTable([" File/Folder ", " Size (Bytes) "]) table.align[" File/Folder "] = "l" for i in jsondata['tree']: size = '-' path = i['path'] + '/' if i['type'] == 'blob': size = i['size'] path = path[:-1] table.add_row([path, size]) print(table) # README if args.readme: print(base64.b64decode(jsondata['content']).decode('utf-8')) # RELEASES if args.releases: table = PrettyTable( [" Release name ", " Release Date ", " Release Time "]) for i in jsondata: time = str(dateutil.parser.parse(i['published_at'])) date = time[:10] time = time[11:] time = time[:5] time = time + ' UTC' table.add_row([i['tag_name'], date, time]) print(table) # OPEN ONE FILE if args.openfile: try: print(base64.b64decode(jsondata['content']).decode('utf-8')) return except: print( "\nDirectory URL was given, hence its contents will be displayed\n" ) table = PrettyTable(["Folder Contents"]) for i in jsondata: table.add_row([i['name']]) print(table) # GET FOLLOWERS if args.followers: table = PrettyTable([" FOLLOWERS "]) table.align[" FOLLOWERS "] = "l" for i in jsondata: table.add_row([i['login']]) print("Number of followers:" + str(len(jsondata))) print(table) # GET FOLLOWING if args.following: table = PrettyTable([" FOLLOWING "]) table.align[" FOLLOWING "] = "l" for i in jsondata: table.add_row([i['login']]) print("Number of following:" + str(len(jsondata))) print(table) # GET CONTRIBUTORS if args.contributors: table = PrettyTable([" CONTRIBUTORS "]) table.align[" CONTRIBUTORS "] = "l" for i in jsondata: table.add_row([i['login']]) print("Number of contributors:" + str(len(jsondata))) print(table)
def main_help(args): parser.print_help(sys.stderr)
def main(): parser = argparse.ArgumentParser() parser.add_argument('-L', '--list', help='list all flow fields', action='store_true') parser.add_argument('-i', '--interface', help='interface to connect to (ex. "tcp://host:1234")') parser.add_argument('-f', '--field', help='IPFix field names to capture from "--list"', nargs="+") parser.add_argument('-p', '--period', help='reporting period in seconds', type=int) parser.add_argument('-m', '--method', help='sorting methods', choices=['max', 'min']) parser.add_argument('-s', '--sortby', help='sorting field; considered only when "--method" is provided', choices=['bytes', 'packets']) parser.add_argument('-c', '--count', help='max number of entries to report', type=int) parser.add_argument('-b', '--heartbeat', help='heartbeat interval in seconds (default: %(default)s)', default=Query.heartbeat, type=int) parser.add_argument('--oldest', help='ignore any records older than this in seconds from now') parser.add_argument('--newest', help='ignore any records newer than this in seconds from now') parser.add_argument('--step', help='time step in seconds', type=int) args = parser.parse_args() header = " {0:>6s} {1:>24s} {2:<28s} {3:s}" if args.list: print "Field types (%d):"%(len(names.nameset)) if len(names.nameset) > 0: print header.format('ID', 'NetFlow', 'IPFix', 'Description') print '-'*80 for fid, nf, fix, desc in names.nameset: print header.format(fid, nf, fix, desc) return if args.field or args.oldest or args.newest: fids = {} if args.field: print "Capturing fields:" for fl in args.field: idx = fl.find('=') if idx > 0: nm = fl[:idx] val = fl[idx+1:] vals = val.split(',') if len(vals) > 1: val = vals fset = names.namesmap.get(nm, None) else: fset = names.namesmap.get(fl, None) val = '*' if not fset: print "don't know what to do with '%s' field"%(fl) return print header.format(*fset) fids[fset[0]] = val print if not args.interface: print "interface is not provided" return if args.method and not args.sortby: print "don't know how to sort with '%s'; --sortby is not provided"%(args.method) return if args.oldest or args.newest: tm = (args.oldest, args.newest, args.step if args.step else None) else: tm = None if args.period: if args.oldest or args.newest: print "both period and range specified; ignoring period" else: tm = args.period process(args.interface, tm, args.method, args.sortby, args.count, args.heartbeat, fids) return parser.print_help()