def run(): import optparse parser = optparse.OptionParser() parser.add_option("-s", "--server", type="str", default=1) parser.add_option("-b", "--batch", type="int", default=10) parser.add_option("-c", "--max-connections", type="int", default=3) parser.add_option("-d", "--days", type="int", default=7) parser.add_option("-l", "--loglevel", default="info") options, args = parser.parse_args() if not args: parser.error("specify a group") return 1 logging.basicConfig(level=getattr(logging, options.loglevel.upper())) if options.server.isdigit(): server = Server.objects.get(pk=int(options.server)) else: server = options.server logging.info("[+] using server %s" % (server,)) scanner = Scanner(server, options.max_connections) scanner.start() scanner.index(args[0], days=options.days, batch=options.batch) scanner.run()
def main(): parser = OptionParser() parser.add_option("-c", "--config", dest="config", help="Configuration file") options = parser.parse_args()[0] config = ConfigParser() try: config.read(options.config) except: parser.error("Could not open configuration file") def got_message(*args, **kwargs): receive_message(config, *args, **kwargs) if not options.config: parser.error('Configuration file is required') verbosity = {True: log.DEBUG, False: log.WARN} log.basicConfig( format='%(asctime)s %(message)s', level=verbosity[config.getboolean('shipit-notifier', 'verbose')] ) # Adjust applabel when wanting to run shipit on multiple machines pulse = consumers.BuildConsumer(applabel='shipit-notifier', ssl=False) pulse.configure(topic='build.#.finished', durable=True, callback=got_message) log.info('listening for pulse messages') pulse.listen()
def main(): # options handling usage="""%prog [options]""" description="""A simple script to display voltage/current from aaptos devices. Support for both live stream (from the SOAP server) or database inspection.""" parser = OptionParser(usage=usage,add_help_option=True,description=description) parser.add_option("-l", "--live", action="store_true", dest="live", default=False, help="use the live stream from the SOAP server") parser.add_option("-f", "--from", action="store", type="string", dest="beginning", help="beginning of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]") parser.add_option("-t", "--to", action="store", type="string", dest="end", help="end of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]") parser.add_option("-b", "--buffer", action="store", type="int", dest="bufferdepth", default=500, help="in live mode, depth of the value buffer. When exceeded, first values will be dropped from the display") parser.add_option("-p", "--poll", action="store", type="int", dest="pollingTime", default=AaptosSettings.PoolDelay, help="polling time in seconds") (options, args) = parser.parse_args() if options.live: if options.beginning is not None or options.end is not None: parser.error("options --from and --to are incompatible with --live") main_live(options.bufferdepth, options.pollingTime) else: if options.beginning is None or options.end is None: parser.error("options --from and --to are both mandatory to access the database") try: initialTime = dateutil.parser.parse(options.beginning) except ValueError: parser.error("--from: unknown string format") try: finalTime = dateutil.parser.parse(options.end) except ValueError: parser.error("--from: unknown string format") main_db(initialTime,finalTime)
def add_options(cls, parser): super(OTPTokenImport, cls).add_options(parser) parser.add_option( "-k", "--keyfile", dest="keyfile", help="File containing the key used to decrypt token secrets")
def options(self, parser, env): """Register commandline options. """ parser.add_option('--collect-query', action='store_true', dest=self.enableOpt, default=env.get('NOSE_COLLECT_QUERY'), help="Enable collect-query: %s [COLLECT_QUERY]" % (self.help()))
def main(): import optparse parser = optparse.OptionParser('usage: %prog') parser.add_option('-n', '--resourceNames', default="", help='name of the subsystem to ping') opts, _args = parser.parse_args() resourceNames = opts.resourceNames.replace(' ', '').split(',') setVideoRecorderStatus(resourceNames)
def start(cls): print "Running Watch Folder ..." # Check whether the user define the drop folder path. # Default dop folder path: /tmp/drop parser = OptionParser() parser.add_option("-p", "--project", dest="project", help="Define the project_name.") parser.add_option("-d", "--drop_path", dest="drop_path", help="Define drop folder path") parser.add_option("-s", "--search_type", dest="search_type", help="Define search_type.") parser.add_option("-P", "--process", dest="process", help="Define process.") parser.add_option("-S", "--script_path",dest="script_path", help="Define script_path.") (options, args) = parser.parse_args() if options.project != None : project_code= options.project else: project_code= 'jobs' if options.drop_path!=None : drop_path= options.drop_path else: tmp_dir = Environment.get_tmp_dir() drop_path = "%s/drop" % tmp_dir print " using [%s]" % drop_path if not os.path.exists(drop_path): os.makedirs(drop_path) if options.search_type!=None : search_type = options.search_type else: search_type = 'jobs/media' if options.process!=None : process = options.process else: process= 'publish' if options.script_path!=None : script_path = options.script_path else: script_path="None" task = WatchDropFolderTask(base_dir=drop_path, project_code=project_code,search_type=search_type, process=process,script_path=script_path) scheduler = Scheduler.get() scheduler.add_single_task(task, delay=1) scheduler.start_thread() return scheduler
def main(): import optparse parser = optparse.OptionParser('usage: %prog') parser.add_option('-n', '--subsystemName', default="", help='name of the subsystem to ping') opts, _args = parser.parse_args() setSubsystemStatus(opts)
def add_options(self, parser, env=os.environ): '''Add command-line options for plugin''' env_opt = 'NOSE_PASTE_SETUP_FIRST' parser.add_option('--paste-setup-first', action='store_true', default=env.get('env_opt'), dest='paste_setup', help='Run setup-app in Paste before running tests. [%s]' % env_opt)
def add_parser_options(parser): """Add Linter options to passed optparse parser.""" filter_string = ", ".join("'" + f + "'" for f in Linter._valid_filters) parser.add_option( "-n", "--lint", dest="lint", action="append", type="string", help="Specify lint filters. Valid filters are " + filter_string + ". " + "'all' enables all lint filters. " + "Multiple filters can be specified comma-delimited and " + "filters can be negated, e.g. 'all,-component'.")
def createParser(): parser = OptionParser(version=VERSION, usage=USAGE) parser.add_option( '--database', dest='database', default="stock_quotes.db", help='Database path', ) parser.add_option( '--symbol', dest='symbol', help='Stock symbol', ) parser.add_option( '--offset', type='int', help='offset', dest='offset', ) parser.add_option( '--range', type='string', help='time range as iso YYYY-MM-DD:YYYY-MM-DD', dest='range', ) return parser
def main(argv): """MAIN""" parser = OptionParser(usage='%prog [options] <youtube_username>') parser.add_option("--show", dest="show", action="store_true", default=False, help="show the feed entries") parser.add_option("--output", dest="output", default="-", help="save output to the FILE") parser.add_option("--plot", dest="plot", action="store_true", default=False, help="plot chart with statistics") parser.add_option("--plot-file", dest="plot_file", help="plot chart to the FILE") (opts, args) = parser.parse_args(argv) if len(args) == 1: parser.error("incorrect number of arguments") if not opts.show and not opts.plot: parser.error("no action asked (--show or --plot)") username = args[1] data = get_user_uploads(username) if opts.show: print_video_feed(username, data, opts.output) if opts.plot: if opts.plot_file: outfile = opts.plot_file else: outfile = "%s.png" % username plot_video_stat(username, data, outfile)
def main(): from optparse import OptionParser parser = OptionParser() parser.add_option('-c', '--connnection', dest='connection_string', type='string', help='SQL connection URI such as sqlite:///full/path/to/database.db') parser.add_option('-s', '--search', dest='search', type='string', help='Archive search results such as #foo') parser.add_option('-u', '--user', dest='user', type='string', help='Archive user timeline') parser.add_option('--sql', action='store_true', help='Print backend-specific SQL commands to create database tables') (options, args) = parser.parse_args() if not options.connection_string: parser.print_usage() print 'Try the --help option' sys.exit(1) connect_sql(options.connection_string) if options.sql: print_sql() sys.exit(0) twitter_search = connect_twitter() # process command line if options.search: print '*** Archiving search: %s' % options.search sa = SearchArchiver(options.search, twitter_search) archive_loop(sa) if options.user: for user in options.user.split(','): print '*** Archiving user timelime: %s' % user ta = TimelineArchiver(user, twitter_search) archive_loop(ta) print ''
def main(): parser = OptionParser() parser.add_option('-m', '--method', dest='method', default='socket', help='define method of interaction with clamav') parser.add_option('-s', '--socket', dest='socket', default='/tmp/clamd.socket', help='clamav unix socket to use') opts, args = parser.parse_args() if len(args) < 1: parser.print_help() exit(-1) filename = args[0] scan = virusscan(filename, method=opts.method, socket=opts.socket) for k, v in scan.items(): print k + ': ' + v
def main(): parser = OptionParser() parser.add_option("--storage", help="Local directory containing YAML snapshots.") parser.add_option("--output", help="Local directory to store produced plots.") (options, args) = parser.parse_args() def check(x): if getattr(options, x) is None: print('Please provide "--%s" switch on the command line.' % x) sys.exit(1) check('output') check('storage') busyplot(options.storage, options.output)
def parse_opts(sys_args): helpString = ("\n%prog [options]" "\n%prog --help" "\n") # All the command line options parser = OptionParser(helpString) parser.add_option("--opcFile", help="Path to data file. (default: %default)", dest="opcSummaryFile", default="") parser.add_option( "--opcDir", help= "Path to OPC Data Collection Directory for only One Storage. (default: %default)", dest="opcDataDir", default="../rawdata2/cfeis01nas41_analytics") return parser.parse_args(sys_args)
def main(): parser = OptionParser() parser.add_option("-d", "--dataset", dest="dataset", type="string", help="dataset name", default="") (c_options, args) = parser.parse_args() input_path = args[0] output_file = args[1] dataset_name = c_options.dataset test = ProcessTimeSeries() tables = test.load_json(input_path) # print test.processs(tables) test.write_result_to_file(output_file, test.processs(tables, dataset_name))
def get_parser(): """ Creates and returns Parser object :return: optparse.OptionParser """ parser = OptionParser( description="Situation Data App Example", conflict_handler="resolve") parser.add_option('-?', action='callback', callback=print_help, help=SUPPRESS_HELP) req_group = OptionGroup(parser, "Required Parameters") req_group.add_option('-t', '--type', help='Request Type Query - qry, Subscription = sub )', metavar='type', dest='REQUEST_TYPE', default='sub') req_group.add_option('-d', '--data', help='Data Type (Vehicle, Intersection, Aggregate, Advisory)', metavar='data', dest='DATA', default='veh') req_group.add_option('-h', '--host', help='Hostname', metavar='host', dest='HOST', default='localhost:8080/ode') parser.add_option_group(req_group) group = OptionGroup(parser, "Optional Parameters") group.add_option('-f', '--file', help='Full Path to a File containing JSON output that will be used to' 'validaate ODE output instead of default JSON files', metavar='file', dest='VALIDATION_FILE', default=None, ) group.add_option('-c', '--config', help='Full path to config file File that can be used to override all Settings.' 'Config file will override all command line parameters', metavar='config_file', dest='CONFIG_FILE', default=None) parser.add_option_group(group) return parser
def main(): parser = OptionParser() # TODO: mandatory options parser.add_option('-b', '--blog', dest='blog', help='blog name') parser.add_option('-c', '--config', dest='config', default='salmari.cfg', help='configuration file') parser.add_option('-p', '--path', dest='repos', help='path to the svn repository') parser.add_option('-r', '--rev', dest='rev', help='revision of svn commit') (opts, args) = parser.parse_args() blog = SVNBlog(opts.blog, opts.config, opts.repos, opts.rev)
def main(): parser = OptionParser() parser.add_option("-c", "--config", dest="config", help="Configuration file") options = parser.parse_args()[0] config = ConfigParser() try: config.read(options.config) except: parser.error("Could not open configuration file") def got_message(data, message): try: receive_message(config, data, message) finally: message.ack() if not options.config: parser.error('Configuration file is required') if not all([config.has_section('pulse'), config.has_option('pulse', 'user'), config.has_option('pulse', 'password')]): log.critical('Config file must have a [pulse] section containing and ' 'least "user" and "password" options.') exit(1) verbosity = {True: log.DEBUG, False: log.WARN} log.basicConfig( format='%(asctime)s %(message)s', level=verbosity[config.getboolean('shipit-notifier', 'verbose')] ) pulse_cfg = pconf.PulseConfiguration.read_from_config(config) # Adjust applabel when wanting to run shipit on multiple machines pulse = consumers.BuildConsumer(applabel='shipit-notifier', connect=False) pulse.config = pulse_cfg pulse.configure(topic='build.#.finished', durable=True, callback=got_message) log.info('listening for pulse messages') pulse.listen()
def cmdparameter(argv): if len(argv) == 1: global desc print >> sys.stderr, desc cmd = 'python ' + argv[0] + ' -h' os.system(cmd) sys.exit(1) usages = "%prog -i file" parser = OP(usage=usages) parser.add_option("-i", "--input-file", dest="filein", metavar="FILEIN", help="Data matrix") parser.add_option("-n", "--data-table-name", dest="data_table_name", metavar="FILEIN", help="Name for data table") parser.add_option( "-m", "--max-string_length", dest="max_str_len", type="int", default=21840, help= "If length of string larger than given value (default 21840), this column will be treated as <text>." ) parser.add_option("-v", "--verbose", dest="verbose", action="store_true", help="Show process information") parser.add_option("-D", "--debug", dest="debug", default=False, action="store_true", help="Debug the program") (options, args) = parser.parse_args(argv[1:]) assert options.filein != None, "A filename needed for -i" return (options, args)
def main(argv=None): if argv is None: argv = sys.argv[1:] from optparse import OptionParser usage = "usage: %prog [options]" parser = OptionParser(usage=usage) parser.add_option( "--days-prior", help='how many days ago to start scraping', type="int", default=90 ) from ebpub.utils.script_utils import add_verbosity_options, setup_logging_from_opts add_verbosity_options(parser) options, args = parser.parse_args(argv) setup_logging_from_opts(options, logger) scraper = ColumbiaRestaurantInspScraper() scraper.update() return 0
def parse_options(): from optparse import OptionParser parser = OptionParser() parser.add_option("-i", "--import", dest="import_opml", help="import opml FILE", metavar="FILE") parser.add_option("-d", "--dump", dest="dump_planet", help="dump planet", action="store_true", default=False) parser.add_option( "-c", "--cache", dest="dump_planet_cache", help="dump planet's cache", action="store_true", default=False ) (options, args) = parser.parse_args() if len(args) >= 1: global planets planets.extend(args) if options.dump_planet_cache: for p in planets: curr = Planet(direc=p) print curr.dump_cache() if options.dump_planet: for p in planets: curr = Planet(direc=p) print curr.dump() if options.import_opml: for p in planets: curr = Planet(direc=p) curr.import_opml(options.import_opml)
def getParser(cls, parser=accept.Test.getParser(), usage=None): parser.add_option( "--max-createsliver-time", action="store", type='int', default=MAX_TIME_TO_CREATESLIVER, dest='max_time', help= "Max number of seconds will attempt to check status of a sliver before failing [default: %default]" ) parser.add_option( "--num-slices", action="store", type='int', default=NUM_SLICES, dest='num_slices', help="Number of slices to create [default: %default]") parser.add_option( "--slice-name", action="store", type='string', default=DEFAULT_SLICE_NAME, dest='slice_name', help="Use slice name as base of slice name [default: %default]") return parser
def get_arguments(): parser = optparse.OptionParser() parser.add_option("-i", "--index", dest="index", help="Index to be monitored.") parser.add_option( "-l", "--lookup", dest="lookup", help="Lookup index for the last x minutes document timestamp.") parser.add_option("-u", "--url", dest="url", help="Elasticsearch server URL") (options, args) = parser.parse_args() if not options.index: parser.error( "[-] Please specify the index name --index | -i , use --help for more info." ) elif not options.lookup: parser.error( "[-] Lookup threshold in minutes | -l , use --help for more info.") elif not options.url: parser.error( "[-] Please specify the elasticsearch cluster URL --url | -u , use --help for more info." ) return options
def parse_args(): """ Parse command line options into variables """ parser = optparse.OptionParser(usage="Usage: %prog [options]") parser.add_option("--film-urls", type="string", dest="urls", help=("Film URLs to pick from, separated by commas")) parser.add_option("--film-urls-file-path", type="string", dest="filepath", help=("File that contains film URLs to pick from")) parser.add_option("--optimize-for", type="choice", dest="optimize_for", choices=["day", "evening"], default="evening", help=("Option for optimizing for day or evening times")) (options, args) = parser.parse_args() return options
def main(args): global CONF, SECT parser = OptionParser(usage=usage) parser.add_option("-c", "--config", dest="config", default='config/config.ini', help="Location of config file") parser.add_option("-s", "--section", dest="section", default='ipyno', help="Config section to use") parser.add_option("-t", "--section", dest="test", default='status', help="test a daemon function: delete|cleanup|boot|status") (opts, args) = parser.parse_args() if not ((len(args) == 1) and (args[0] in ['stop','start','restart','foreground'])): sys.stderr.write(usage) return 2 config = get_config(opts.config) daemon = IpyDaemon(config=config, section=opts.section) if 'start' == args[0]: daemon.start() elif 'stop' == args[0]: daemon.stop() elif 'restart' == args[0]: daemon.restart() elif 'foreground' == args[0]: daemon.foreground(now=opts.test) else: sys.stderr.write("[error] unknown command '%s'\n"%args[0]) return 2 return 0
def parse_options(): global config_path, username, password, enableFileLog enableFileLog = False parser = OptionParser() parser.add_option( '-c', '--config_path', dest='config_path', type=str, help="Configuration files path" ) parser.add_option( '-u', '--username', dest='username', type=str, help="username" ) parser.add_option( '-p', '--password', dest='password', type=str, help="password" ) parser.add_option( '-l', '--log', dest='log', type=int, help="enable log" ) opts, args = parser.parse_args() if opts.config_path: config_path = opts.config_path else: config_path = "./sync.yaml" if opts.username: username = opts.username if opts.password: password = opts.password if opts.log: enableFileLog = True
def main(): """Command-line tool. """ import sys, itertools from optparse import OptionParser parser = OptionParser(usage=__usage__, version ="%prog " + __version__) parser.add_option("-f", "--force", action="store_true", dest="force", default=False, help="Force overwrite of merged directories ignoring last build timestamp") parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Print INFO messages to stdout, default=%default") (options, args) = parser.parse_args() if options.verbose: mylogger.setLevel(logging.INFO) # if --force, then rebuild no matter what if options.force: do_merge_all() return() # if not --force, then check last build time try: last_build = dateutil.parser.parse(open(TIMESTAMP_FILE).read()) except: mylogger.info("No last build timestamp found, rebuilding") do_merge_all() return() raw_dirs = [ os.listdir(x[0]) for x in TOMERGE ] raw_dirs = list(itertools.chain(*raw_dirs)) raw_dates= map(lambda s : datetime.datetime.strptime(s, "%y%m%d"), raw_dirs) if any ([last_build < d for d in raw_dates]): mylogger.info("Last build timestamp older than newest raw data, rebuilding") do_merge_all() return() # last build was recent, just return mylogger.info("Last build timestamp newer than newest raw data, not rebuilding") return()
def getopts(): description = 'A Logfile Parser and data shipper for logstash.' parser = OptionParser(version='%prog ' + __version__, description=description) parser.add_option( '-f', '--foreground', default=False, action='store_true', help='Set this option to not Fork in Background. Default: %default') parser.add_option( '-c', '--config', default='/etc/logshipper/logshipper.cfg', help='The path to the logshipper config itself. Default: %default') options, args = parser.parse_args() options.config = os.path.abspath(options.config) return options, args
def get_options(): """Generates command-line options.""" global OPTIONS, FILENAMES parser = optparse.OptionParser() # Standard options parser.add_option( "-b", "--use_base", action="store_true", default=False, dest="use_base", help="Update the Base index. Can be used with --use_solr.", ) parser.add_option( "-s", "--use_solr", action="store_true", default=False, dest="use_solr", help="Update the Solr index. Can be used with --use_base.", ) parser.add_option( "-t", "--test_mode", action="store_true", default=False, dest="test_mode", help="Don't process or upload the data files", ) # Base options base_group = parser.add_option_group("Google Base options") base_group.add_option( "--base_ftp_user", default=pipeline_keys.BASE_FTP_USER, dest="base_ftp_user", help="GBase username" ) base_group.add_option( "--base_ftp_pass", default=pipeline_keys.BASE_FTP_PASS, dest="base_ftp_pass", help="GBase password" ) base_group.add_option( "--base_cust_id", default=pipeline_keys.BASE_CUSTOMER_ID, dest="base_cust_id", help="GBase customer ID." ) # Solr options solr_group = parser.add_option_group("Solr options") solr_group.add_option( "--solr_url", default=pipeline_keys.SOLR_URLS, dest="solr_urls", action="append", help="URL of a Solr instance to be updated. " + "This option may be used multiple times.", ) solr_group.add_option("--solr_user", default=pipeline_keys.SOLR_USER, dest="solr_user", help="Solr username.") solr_group.add_option("--solr_pass", default=pipeline_keys.SOLR_PASS, dest="solr_pass", help="Solr password") solr_group.add_option("--feed_providername", default=None, dest="feed_providername") (OPTIONS, FILENAMES) = parser.parse_args()
def start(cls): print "Running Watch Folder ..." # Check whether the user define the drop folder path. # Default dop folder path: /tmp/drop parser = OptionParser() parser.add_option("-p", "--project", dest="project", help="Define the project_name.") parser.add_option("-d", "--drop_path", dest="drop_path", help="Define drop folder path") parser.add_option("-s", "--search_type", dest="search_type", help="Define search_type.") parser.add_option("-P", "--process", dest="process", help="Define process.") (options, args) = parser.parse_args() if options.project != None : project_code= options.project else: project_code= 'jobs' if options.drop_path!=None : drop_path= options.drop_path else: tmp_dir = Environment.get_tmp_dir() drop_path = "%s/drop" % tmp_dir print " using [%s]" % drop_path if not os.path.exists(drop_path): os.makedirs(drop_path) if options.search_type!=None : search_type = options.search_type else: search_type = 'jobs/media' if options.process!=None : process = options.process else: process= 'publish' task = WatchDropFolderTask(base_dir=drop_path, project_code=project_code,search_type=search_type, process=process) scheduler = Scheduler.get() scheduler.add_single_task(task, delay=1) scheduler.start_thread() return scheduler
def parse_options(): parser = OptionParser() parser.add_option("-i", "--input", help="XML input file") parser.add_option("-o", "--output", help="HTML output file", default="report.html") parser.add_option("-c", "--config", help="Configuration file", default="config") (options, args) = parser.parse_args() if options.input == None : print("\nERROR: Please specify an input file\n") parser.print_help() exit() return options
def addopts(self, parser): parser.add_option( '-f', '--from-tag', dest="cluster_tag", action="store", type="string", default=None, help="The name of the cluster to be cloned" ) parser.add_option( '-t', '--from-template', dest="template", action="store", type="string", default=None, help="The name of a template to clone") parser.add_option( '-a', '--availability-zone', dest="zone", action="store", type="string", default=None, help=("The availability zone into which this cluster" " should be cloned"))
def parse_arguments(): parser = OptionParser() parser.add_option("-s", "--client_secret", dest="client_secret", help="client_secret") parser.add_option("-i", "--client_id", dest="client_id", help="client_id") parser.add_option("-t", "--tokens_file_name", dest="tokens_file_name", help="tokens_file_name") parser.add_option("-d", "--harvest_host", dest="harvest_host", help="harvest_host") options, args = parser.parse_args() global client_secret client_secret = options.client_secret global client_id client_id = options.client_id global tokens_file_name tokens_file_name = options.tokens_file_name global harvest_host harvest_host = options.harvest_host
def parse_options(argv): """Parse args and return options, start, end. args can be from sys.argv. start and end are datetimes. """ usage = "python %prog START [END]" parser = optparse.OptionParser(usage=usage) parser.add_option('-p', '--port', type=int, default=4321) parser.add_option('-m', '--movie', default=False, action='store_true') parser.add_option('-v', '--verbose', default=False, action='store_true') parser.add_option('--profile', default=False, action='store_true', help='Run with Yappi') options, args = parser.parse_args(argv) # args[0] is the program name. if len(args) not in (2, 3): parser.error("incorrect number of arguments") start, end = None, None try: start = hour(dateutil.parser.parse(args[1], ignoretz=True)) except Exception: parser.error("Couldn't parse start date") start = start.replace(tzinfo=pytz.UTC) if len(args) == 3: try: end = hour(dateutil.parser.parse(args[2], ignoretz=True)) except Exception: parser.error("Couldn't parse end date") end = end.replace(tzinfo=pytz.UTC) else: end = next_hour(start) if end - start < datetime.timedelta(hours=1): parser.error("END must be at least an hour after START") return options, start, end
def main(): prg = os.path.basename(sys.argv[0]) usage = prg + " [ <OPTIONS> ] <ARGS>" # # parse command line, result is stored to 'opts' # parser = OptionParser(usage, version="%s version 0.1" % prg) parser.add_option( "-s", "--source-directory", action="store", dest="source_dir", default=None, help="specify input directory" ) parser.add_option( "-t", "--stats", action="store_false", dest="download", default=True, help="specify if fits should be updated" ) parser.add_option("-u", "--user", action="store", dest="user", default=None, help="user name for secure server") parser.add_option("-p", "--pass", action="store", dest="password", default=None, help="password") opts, args = parser.parse_args() source_dir = opts.source_dir Opt.download = opts.download Opt.user = opts.user Opt.password = opts.password # # chek input parameters # if not source_dir: sys.stderr.write("Wrong --source-directory '%s'\n" % opts.source_dir) sys.exit(1) # # Main logic # Category(source_dir)
def main(): # options handling usage = """%prog [options]""" description = """A simple script to display voltage/current from aaptos devices. Support for both live stream (from the SOAP server) or database inspection.""" parser = OptionParser(usage=usage, add_help_option=True, description=description) parser.add_option( "-f", "--from", action="store", type="string", dest="beginning", help= "beginning of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]" ) parser.add_option( "-t", "--to", action="store", type="string", dest="end", help= "end of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]" ) parser.add_option("-o", "--output", action="store", type="string", dest="filename", help="output file name", default="aaptos.root") (options, args) = parser.parse_args() if options.beginning is None or options.end is None: parser.error( "options --from and --to are both mandatory to access the database" ) try: initialTime = dateutil.parser.parse(options.beginning) except ValueError: parser.error("--from: unknown string format") try: finalTime = dateutil.parser.parse(options.end) except ValueError: parser.error("--from: unknown string format") main_db(initialTime, finalTime, options.filename)
def main(): parser = OptionParser() parser.add_option( '-i', '--image_path', dest='img_path', default="./cctv_imgs", help='this path stores all cctv images, default="./cctv_imgs"') parser.add_option( '-d', '--datasets_path', dest='datasets_path', default="./datasets", help='destination path for pickled(dill) datasets, default="./datasets"' ) parser.add_option( '-c', '--cctvid', dest='cctvid', default="nfbCCTV-N1-N-90.01-M", help= 'preparing datasets for "nfbCCTV-N1-N-90.01-M", default="nfbCCTV-N1-N-90.01-M"' ) parser.add_option( '-t', '--test', dest='isTest', default=False, action="store_true", help='test run under 10 speed xml.gz files, default=False') (options, args) = parser.parse_args() logging.basicConfig(level=logging.INFO) preparing(options.img_path, options.cctvid, options.datasets_path, options.isTest)
def parse_options(): parser = OptionParser() parser.add_option("-i", "--input", help="XML input file") parser.add_option("-o", "--output", help="HTML output file", default="report.html") parser.add_option("-c", "--config", help="Configuration file", default="config") (options, args) = parser.parse_args() if options.input == None: print("\nERROR: Please specify an input file\n") parser.print_help() exit() return options
def getParser( cls, parser=accept.Test.getParser(), usage=None): parser.add_option( "--max-createsliver-time", action="store", type='int', default = MAX_TIME_TO_CREATESLIVER, dest='max_time', help="Max number of seconds will attempt to check status of a sliver before failing [default: %default]") parser.add_option( "--num-slices", action="store", type='int', default=NUM_SLICES, dest='num_slices', help="Number of slices to create [default: %default]") parser.add_option( "--slice-name", action="store", type='string', default=DEFAULT_SLICE_NAME, dest='slice_name', help="Use slice name as base of slice name [default: %default]") return parser
def main(args): global CONF, SECT parser = OptionParser(usage=usage) parser.add_option("-c", "--config", dest="config", default='config/config.ini', help="Location of config file") parser.add_option("-s", "--section", dest="section", default='ipyno', help="Config section to use") parser.add_option( "-t", "--section", dest="test", default='status', help="test a daemon function: delete|cleanup|boot|status") (opts, args) = parser.parse_args() if not ((len(args) == 1) and (args[0] in ['stop', 'start', 'restart', 'foreground'])): sys.stderr.write(usage) return 2 config = get_config(opts.config) daemon = IpyDaemon(config=config, section=opts.section) if 'start' == args[0]: daemon.start() elif 'stop' == args[0]: daemon.stop() elif 'restart' == args[0]: daemon.restart() elif 'foreground' == args[0]: daemon.foreground(now=opts.test) else: sys.stderr.write("[error] unknown command '%s'\n" % args[0]) return 2 return 0
def addopts(self, parser): parser.add_option('-f', '--from-tag', dest="cluster_tag", action="store", type="string", default=None, help="The name of the cluster to be cloned") parser.add_option('-t', '--from-template', dest="template", action="store", type="string", default=None, help="The name of a template to clone") parser.add_option('-a', '--availability-zone', dest="zone", action="store", type="string", default=None, help=("The availability zone into which this cluster" " should be cloned"))
def parse_options(): parser = OptionParser() parser.add_option("-f", "--file", dest="file", help="csv file to process", default="") parser.add_option( "-w", "--where", dest="where", help="where cluase to filter out some fields, use simple python code", default="") parser.add_option( "-c", "--columns", dest="columns", help= "comma separated list of column numbers, that you want to see in output", default="") options, args = parser.parse_args() return options.where, options.columns, options.file
def main(): # options handling usage="""%prog [options]""" description="""A simple script to display voltage/current from aaptos devices. Support for both live stream (from the SOAP server) or database inspection.""" parser = OptionParser(usage=usage,add_help_option=True,description=description) parser.add_option("-f", "--from", action="store", type="string", dest="beginning", help="beginning of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]") parser.add_option("-t", "--to", action="store", type="string", dest="end", help="end of the period to plot, in ISO 8601 format, YYYY-MM-DDTHH:MM:SS[.mmmmmm][+HH:MM]") parser.add_option("-o", "--output", action="store", type="string", dest="filename", help="output file name", default="aaptos.root") (options, args) = parser.parse_args() if options.beginning is None or options.end is None: parser.error("options --from and --to are both mandatory to access the database") try: initialTime = dateutil.parser.parse(options.beginning) except ValueError: parser.error("--from: unknown string format") try: finalTime = dateutil.parser.parse(options.end) except ValueError: parser.error("--from: unknown string format") main_db(initialTime,finalTime,options.filename)
def main(): default_days = '12h,1..6d,1..22w' parser = OptionParser(usage="usage: %prog [options] vol_id") parser.add_option('--description', default='snapman', dest='description', help="prefix for snapshot description") parser.add_option('--timeout', type='int', default=0, dest='timeout', help="timeout for creating snapshots (see --days for units)") parser.add_option('--no-snapshot', action='store_false', default=True, dest='snapshot', help="don't do the snapshot (only clean up)") parser.add_option('--no-clean', '--no-cleanup', action='store_false', default=True, dest='cleanup', help="don't clean up (only do the snapshot)") parser.add_option('--logging', default='info') parser.add_option('--days', '-d', default=default_days, help="Time spans to keep [default %default]. Units h=hours, d=days (default), w=weeks, m=months, y=years. n.b. use --simulate to make sure that your setting behaves as you think it will") parser.add_option('--simulate', dest='simulate', help="Simulate and print the progression of backups using the given --days setting [example: --simulate=1d]") parser.add_option('--region', dest='region', default=None, help="Connect to the given EC2 region") (options, args) = parser.parse_args() logging.basicConfig(level=getattr(logging, options.logging.upper())) try: days = parse_days(options.days) except ValueError as e: print e parser.print_help() sys.exit(1) if options.simulate: tickspan = parse_days(options.simulate, single=True) simulate(days, tickspan) sys.exit(0) if len(args) != 1: parser.print_help() sys.exit(1) vol_id = args[0] timeout=None if options.timeout: timeout = timedelta(seconds=parse_days(options.timeout, single=True)) conn = EC2Connection() if options.region is not None: # this is a bit silly but we're working around a bug in boto # where it half-ignores the region set in its own boto.cfg # file regions = dict((x.name, x) for x in conn.get_all_regions()) region = regions[options.region] conn = EC2Connection(region=region) return manage_snapshots(days, conn, vol_id, timeout=timeout, description=options.description, do_snapshot=options.snapshot, do_cleanup=options.cleanup)
#coding:utf-8 # Author: Marco Mescalchin -- <*****@*****.**> # Purpose: Sum hours from google calendar # Created: 02/06/2010 import re,urllib,locale,datetime,calendar import dateutil.parser import gdata.calendar.service from optparse import OptionParser locale.setlocale(locale.LC_ALL,'') def_start = datetime.datetime.now().strftime('%Y-%m-01') def_end = datetime.datetime.now().strftime('%Y-%m-' + str(calendar.monthrange(int(datetime.datetime.now().strftime("%Y")),int(datetime.datetime.now().strftime("%m")))[1])) parser = OptionParser() parser.add_option("-u", "--username", dest="username") parser.add_option("-p", "--password",dest="password") parser.add_option("-c", "--calendar",dest="calendar") parser.add_option("-q", "--query",dest="query") parser.add_option("-s", "--start",dest="start",default=def_start) parser.add_option("-e", "--end",dest="end",default=def_end) (options, args) = parser.parse_args() if not options.username or not options.password or not options.calendar or not options.query: parser.error("incorrect number of arguments") def elapsed_time(seconds, suffixes=['y','w','d','h','m','s'], add_s=False, separator=' '): """ Takes an amount of seconds and turns it into a human-readable amount of time. From: http://snipplr.com/view/5713/python-elapsedtime-human-readable-time-span-given-total-seconds/
def main(): parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option( '-u', '--user', metavar='<email>', # Look for USER and USERNAME (Windows) environment variables. default=os.environ.get('USER', os.environ.get('USERNAME')), help='Filter on user, default=%default') parser.add_option('-b', '--begin', metavar='<date>', help='Filter issues created after the date (mm/dd/yy)') parser.add_option('-e', '--end', metavar='<date>', help='Filter issues created before the date (mm/dd/yy)') quarter_begin, quarter_end = get_quarter_of(datetime.today() - relativedelta(months=2)) parser.add_option( '-Q', '--last_quarter', action='store_true', help='Use last quarter\'s dates, i.e. %s to %s' % (quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d'))) parser.add_option('-Y', '--this_year', action='store_true', help='Use this year\'s dates') parser.add_option('-w', '--week_of', metavar='<date>', help='Show issues for week of the date (mm/dd/yy)') parser.add_option( '-W', '--last_week', action='count', help='Show last week\'s issues. Use more times for more weeks.') parser.add_option( '-a', '--auth', action='store_true', help='Ask to authenticate for instances with no auth cookie') parser.add_option('-d', '--deltas', action='store_true', help='Fetch deltas for changes.') parser.add_option( '--no-referenced-issues', action='store_true', help='Do not fetch issues referenced by owned changes. Useful in ' 'combination with --changes-by-issue when you only want to list ' 'issues that have also been modified in the same time period.') parser.add_option( '--skip-own-issues-without-changes', action='store_true', help='Skips listing own issues without changes when showing changes ' 'grouped by referenced issue(s). See --changes-by-issue for more ' 'details.') activity_types_group = optparse.OptionGroup( parser, 'Activity Types', 'By default, all activity will be looked up and ' 'printed. If any of these are specified, only ' 'those specified will be searched.') activity_types_group.add_option('-c', '--changes', action='store_true', help='Show changes.') activity_types_group.add_option('-i', '--issues', action='store_true', help='Show issues.') activity_types_group.add_option('-r', '--reviews', action='store_true', help='Show reviews.') activity_types_group.add_option( '--changes-by-issue', action='store_true', help='Show changes grouped by referenced issue(s).') parser.add_option_group(activity_types_group) output_format_group = optparse.OptionGroup( parser, 'Output Format', 'By default, all activity will be printed in the ' 'following format: {url} {title}. This can be ' 'changed for either all activity types or ' 'individually for each activity type. The format ' 'is defined as documented for ' 'string.format(...). The variables available for ' 'all activity types are url, title, author, ' 'created and modified. Format options for ' 'specific activity types will override the ' 'generic format.') output_format_group.add_option( '-f', '--output-format', metavar='<format>', default=u'{url} {title}', help='Specifies the format to use when printing all your activity.') output_format_group.add_option( '--output-format-changes', metavar='<format>', default=None, help='Specifies the format to use when printing changes. Supports the ' 'additional variable {reviewers}') output_format_group.add_option( '--output-format-issues', metavar='<format>', default=None, help='Specifies the format to use when printing issues. Supports the ' 'additional variable {owner}.') output_format_group.add_option( '--output-format-reviews', metavar='<format>', default=None, help='Specifies the format to use when printing reviews.') output_format_group.add_option( '--output-format-heading', metavar='<format>', default=u'{heading}:', help='Specifies the format to use when printing headings.') output_format_group.add_option( '--output-format-no-url', default='{title}', help='Specifies the format to use when printing activity without url.') output_format_group.add_option( '-m', '--markdown', action='store_true', help='Use markdown-friendly output (overrides --output-format ' 'and --output-format-heading)') output_format_group.add_option( '-j', '--json', action='store_true', help='Output json data (overrides other format options)') parser.add_option_group(output_format_group) auth.add_auth_options(parser) parser.add_option('-v', '--verbose', action='store_const', dest='verbosity', default=logging.WARN, const=logging.INFO, help='Output extra informational messages.') parser.add_option('-q', '--quiet', action='store_const', dest='verbosity', const=logging.ERROR, help='Suppress non-error messages.') parser.add_option('-M', '--merged-only', action='store_true', dest='merged_only', default=False, help='Shows only changes that have been merged.') parser.add_option( '-C', '--completed-issues', action='store_true', dest='completed_issues', default=False, help='Shows only monorail issues that have completed (Fixed|Verified) ' 'by the user.') parser.add_option( '-o', '--output', metavar='<file>', help='Where to output the results. By default prints to stdout.') # Remove description formatting parser.format_description = (lambda _: parser.description) # pylint: disable=no-member options, args = parser.parse_args() options.local_user = os.environ.get('USER') if args: parser.error('Args unsupported') if not options.user: parser.error('USER/USERNAME is not set, please use -u') # Retains the original -u option as the email address. options.email = options.user options.user = username(options.email) logging.basicConfig(level=options.verbosity) # python-keyring provides easy access to the system keyring. try: import keyring # pylint: disable=unused-import,unused-variable,F0401 except ImportError: logging.warning('Consider installing python-keyring') if not options.begin: if options.last_quarter: begin, end = quarter_begin, quarter_end elif options.this_year: begin, end = get_year_of(datetime.today()) elif options.week_of: begin, end = (get_week_of( datetime.strptime(options.week_of, '%m/%d/%y'))) elif options.last_week: begin, end = ( get_week_of(datetime.today() - timedelta(days=1 + 7 * options.last_week))) else: begin, end = (get_week_of(datetime.today() - timedelta(days=1))) else: begin = dateutil.parser.parse(options.begin) if options.end: end = dateutil.parser.parse(options.end) else: end = datetime.today() options.begin, options.end = begin, end if begin >= end: # The queries fail in peculiar ways when the begin date is in the future. # Give a descriptive error message instead. logging.error( 'Start date (%s) is the same or later than end date (%s)' % (begin, end)) return 1 if options.markdown: options.output_format_heading = '### {heading}\n' options.output_format = ' * [{title}]({url})' options.output_format_no_url = ' * {title}' logging.info('Searching for activity by %s', options.user) logging.info('Using range %s to %s', options.begin, options.end) my_activity = MyActivity(options) my_activity.show_progress('Loading data') if not (options.changes or options.reviews or options.issues or options.changes_by_issue): options.changes = True options.issues = True options.reviews = True # First do any required authentication so none of the user interaction has to # wait for actual work. if options.changes or options.changes_by_issue: my_activity.auth_for_changes() if options.reviews: my_activity.auth_for_reviews() logging.info('Looking up activity.....') try: if options.changes or options.changes_by_issue: my_activity.get_changes() if options.reviews: my_activity.get_reviews() if options.issues or options.changes_by_issue: my_activity.get_issues() if not options.no_referenced_issues: my_activity.get_referenced_issues() except auth.AuthenticationError as e: logging.error('auth.AuthenticationError: %s', e) my_activity.show_progress('\n') my_activity.print_access_errors() output_file = None try: if options.output: output_file = open(options.output, 'w') logging.info('Printing output to "%s"', options.output) sys.stdout = output_file except (IOError, OSError) as e: logging.error('Unable to write output: %s', e) else: if options.json: my_activity.dump_json() else: if options.changes: my_activity.print_changes() if options.reviews: my_activity.print_reviews() if options.issues: my_activity.print_issues() if options.changes_by_issue: my_activity.print_changes_by_issue( options.skip_own_issues_without_changes) finally: if output_file: logging.info('Done printing to file.') sys.stdout = sys.__stdout__ output_file.close() return 0
def main(): signal.signal(signal.SIGINT, signal_handler) parser = OptionParser(usage="usage: %prog [options]") parser.add_option( "--setup", action="store_true", dest="setup", default=False, help="Setup script with Drive App") parser.add_option( "-c", action="store", type=int, dest="creds", default=0, help="Index of creds in credentials array to use (default: 0)") parser.add_option( "-d", action="store_true", dest="d", default=False, help="Delete files with names provided to -s and -r") parser.add_option( "-l", action="store_true", dest="listen", default=False, help="Enable Socket Mode: Listener") parser.add_option( "-i", action="store", dest="ip", default="", help="Enable Socket Mode: Connect") parser.add_option( "-p", action="store", type=int, dest="port", default=8000, help="Port number for socket mode (default: 8000)") parser.add_option( "-s", action="store", dest="send", default='0', help="Sending channel (default: 0)") parser.add_option( "-r", action="store", dest="recv", default='1', help="Receiving channel (default: 1)") parser.add_option( "-P", action="store", type=float, dest="poll", default=0.0, help="Poll every x seconds (default: 0)") parser.add_option( "-j", action="store", type=float, dest="jitter", default=1.0, help="Amount of randomness in polling (default: 1.0)") parser.add_option( "-v", action="store_true", dest="verbose", default=False, help="Enable verbose output") parser.add_option( "--debug", action="store_true", dest="debug", default=False, help="Enable debug output") global opts (opts, args) = parser.parse_args() if len(sys.argv[1:]) == 0: parser.print_help() sys.exit() verbose(opts) verbose(args) if opts.setup: print "Launching Setup..." setup() else: try: credentials = get_credential(opts.creds) except: print "Failed to get credentials at index %s!" % str(opts.creds) print "Run --setup to obtain credentials to add to this script." exit() try: verbose("Authenticating...") global service service = discovery.build('drive', 'v3', credentials=credentials) except Exception, e: sys.exit("Auth failure!")
to process the graph, use visualization/graphfigures.py ''' import sys import optparse import dateutil.parser from datetime import datetime from collections import Counter from progress.bar import Bar from twkit.utils import * if __name__ == '__main__': parser = optparse.OptionParser() parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Make noise.") parser.add_option("-g", "--greek", action="store_true", dest="greek", default=False, help="Only get the part of the graph that is followed or marked greek.") parser.add_option("-o", "--output", action="store", dest="filename", default='quote.txt', help="Output file") parser.add_option("-b", "--before", action="store", dest="before", default=False, help="Before given date.") parser.add_option("-a", "--after", action="store", dest="after", default=False, help="After given date.") parser.add_option("--deleted", action="store_true", dest="deleted", default=False, help="Report quotes even if the quoted has been deleted.") (options, args) = parser.parse_args() verbose(options.verbose) db, _ = init_state(use_cache=False, ignore_api=True) query = { 'quoted_status_id': {'$gt': 1}, 'retweeted_status': None } criteria = {} if options.before:
def start(cls): print "Running Watch Folder ..." # Check whether the user define the drop folder path. # Default dop folder path: /tmp/drop parser = OptionParser() parser.add_option("-p", "--project", dest="project", help="Define the project_name.") parser.add_option("-d", "--drop_path", dest="drop_path", help="Define drop folder path") parser.add_option("-s", "--search_type", dest="search_type", help="Define search_type.") parser.add_option("-P", "--process", dest="process", help="Define process.") parser.add_option("-S", "--script_path", dest="script_path", help="Define script_path.") parser.add_option( "-w", "--watch_folder_code", dest="watch_folder_code", help= "Define watch folder code. If no code is used, then it assumed that this process \ is managed in a standalone script.") parser.add_option("-x", "--site", dest="site", help="Define site.") parser.add_option("-c", "--handler", dest="handler", help="Define Custom Handler Class.") (options, args) = parser.parse_args() if options.project != None: project_code = options.project else: raise Exception("No project specified") if options.drop_path != None: drop_path = options.drop_path else: tmp_dir = Environment.get_tmp_dir() drop_path = "%s/drop" % tmp_dir print " using [%s]" % drop_path if not os.path.exists(drop_path): os.makedirs(drop_path) if options.search_type != None: search_type = options.search_type else: search_type = None if options.process != None: process = options.process else: process = 'publish' if options.script_path != None: script_path = options.script_path else: script_path = None if options.site != None: site = options.site else: site = None if options.handler != None: handler = options.handler else: handler = None if options.watch_folder_code != None: watch_folder_code = options.watch_folder_code else: watch_folder_code = None if watch_folder_code: # record pid in watch folder pid file pid = os.getpid() pid_file = "%s/log/watch_folder.%s" % (Environment.get_tmp_dir(), watch_folder_code) f = open(pid_file, "w") f.write(str(pid)) f.close() Batch(project_code=project_code, site=site) task = WatchDropFolderTask(base_dir=drop_path, site=site, project_code=project_code, search_type=search_type, process=process, script_path=script_path, handler=handler, watch_folder_code=watch_folder_code) scheduler = Scheduler.get() scheduler.add_single_task(task, delay=1) scheduler.start_thread() return scheduler
def main(): parser = OptionParser(usage="usage: %prog [options] filename", version="%prog 1.0") parser.add_option( "-d", "--date", dest="inputDate", help= "date from which to fetch the results for HC in format %Y-%m-%dT%H:%M:%SZ " ) parser.add_option("-o", "--outputDir", dest="outputDir", help="Directory in which to save the output") (options, args) = parser.parse_args() if options.inputDate is None: print "Please input a date with the --date option" exit(-1) else: try: datetmp = dateutil.parser.parse(options.inputDate, ignoretz=True) except: print "I couldn't recognize the date, please give me one like 2015-12-31T23:59:59Z" exit(-1) if options.outputDir is None: print "Please add a directory with option --outputDir" exit(-1) else: if os.path.isdir(options.outputDir) == False: print options.outputDir + " is not a valid directory or you don't have read permissions" exit(-1) # Constants interval = 1439 dateFrom = datetmp - timedelta(minutes=datetmp.minute % interval, seconds=datetmp.second, microseconds=datetmp.microsecond) dateTo = dateFrom + timedelta(minutes=interval) dateFormat = "%Y-%m-%dT%H:%M:%SZ" dateFromStr = datetime.strftime(dateFrom, dateFormat) print dateFromStr dateToStr = datetime.strftime(dateTo, dateFormat) OUTPUT_FILE_NAME = os.path.join(options.outputDir, "site_avail_sum.txt") OUTPUT_FILE_CORRECTIONS = os.path.join(options.outputDir, "site_avail_sum_POST_REQUEST.txt") SAM_COLUMN_NUMBER = "126" print "Getting SAM Score from " + str(dateFrom) + " to " + str(dateTo) samUrl = "http://wlcg-sam-cms.cern.ch/dashboard/request.py/getstatsresultsmin?profile_name=CMS_CRITICAL_FULL&plot_type=quality&start_time=%s&end_time=%s&granularity=single&view=siteavl" % ( dateFromStr, dateToStr) print samUrl # Download the url or die try: print "Fetching url : " + samUrl jsonStr = url.read(samUrl) samInfo = json.loads(jsonStr) except: exit(100) print "Data retrieved!" sitesfromDashboard = [] for samSummary in samInfo['data']: sitesfromDashboard.append(samSummary['name']) print sitesfromDashboard samScoreSites = [] print "Getting SAM for all sites" for site in sitesfromDashboard: for samSummary in samInfo['data']: if samSummary['name'] == site: try: siteOK = float(samSummary['data'][0]['OK']) siteCritical = float(samSummary['data'][0]['CRIT']) siteSched = float(samSummary['data'][0]['SCHED']) if (siteOK + siteCritical + siteSched) > 0.0: siteAvailabilityNum = ( float(siteOK) / (float(siteOK + siteCritical + siteSched))) * 100.0 siteAvailability = int(siteAvailabilityNum) if siteAvailabilityNum > 89.9: siteColor = "green" elif (sites.getTier(site) == 2 or sites.getTier(site) == 3) and siteAvailabilityNum > 79.9: siteColor = "green" else: siteColor = "red" else: siteAvailability = "n/a" siteAvailabilityNum = None siteColor = "white" except: siteAvailability = "Error" siteAvailabilityNum = None siteColor = "white" print site + " OK " + str(siteOK) + " CRIT " + str( siteCritical) + " SCHED " + str( siteSched) + " SCORE : " + str(siteAvailability) samScoreSites.append( dashboard.entry(date=dateTo.strftime("%Y-%m-%d %H:%M:%S"), name=site, value=siteAvailability, color=siteColor, url=getSuccessrateUrl( site, dateFrom, dateTo), nvalue=siteAvailabilityNum)) print str(samScoreSites) if len(samScoreSites) > 1: OutputFile = open(OUTPUT_FILE_NAME, 'w') correctionOutputFile = open(OUTPUT_FILE_CORRECTIONS, 'a') startDateStr = (dateFrom + timedelta(days=1)).replace( hour=0, minute=0, second=1, microsecond=0).strftime("%Y-%m-%d %H:%M:%S") endDateStr = (dateFrom + timedelta(days=1)).replace( hour=23, minute=59, second=59, microsecond=0).strftime("%Y-%m-%d %H:%M:%S") for site in samScoreSites: if site.name != "unknown": OutputFile.write(str(site) + '\n') correctionOutputFile.write(("\t".join([ startDateStr, endDateStr, str(SAM_COLUMN_NUMBER), site.name, str(site.value), site.color, site.url, "nvalue=0" ])) + "\n") print "\n--SAM Score output written to %s" % OUTPUT_FILE_NAME OutputFile.close() correctionOutputFile.close() else: print "There's no data, I quit!"
def parse_args(): """Parse command-line arguments with optparse.""" usage = "usage: %prog [OPTIONS] " + \ "--project PROJECT [--project PROJECT] " + \ "--version VERSION [--version VERSION2 ...]" parser = OptionParser( usage=usage, epilog="Markdown-formatted CHANGES and RELEASENOTES files will be stored" " in a directory named after the highest version provided.") parser.add_option("-i", "--index", dest="index", action="store_true", default=False, help="build an index file") parser.add_option("-l", "--license", dest="license", action="store_true", default=False, help="Add an ASF license") parser.add_option("-p", "--project", dest="projects", action="append", type="string", help="projects in JIRA to include in releasenotes", metavar="PROJECT") parser.add_option("-r", "--range", dest="range", action="store_true", default=False, help="Given versions are a range") parser.add_option( "--sortorder", dest="sortorder", metavar="TYPE", default=SORTORDER, # dec is supported for backward compatibility choices=["asc", "dec", "desc", "newer", "older"], help="Sorting order for sort type (default: %s)" % SORTORDER) parser.add_option("--sorttype", dest="sorttype", metavar="TYPE", default=SORTTYPE, choices=["resolutiondate", "issueid"], help="Sorting type for issues (default: %s)" % SORTTYPE) parser.add_option( "-t", "--projecttitle", dest="title", type="string", help="Title to use for the project (default is Apache PROJECT)") parser.add_option("-u", "--usetoday", dest="usetoday", action="store_true", default=False, help="use current date for unreleased versions") parser.add_option("-v", "--version", dest="versions", action="append", type="string", help="versions in JIRA to include in releasenotes", metavar="VERSION") parser.add_option( "-V", dest="release_version", action="store_true", default=False, help="display version information for releasedocmaker and exit.") parser.add_option("-O", "--outputdir", dest="output_directory", action="append", type="string", help="specify output directory to put release docs to.") parser.add_option("-B", "--baseurl", dest="base_url", action="append", type="string", help="specify base URL of the JIRA instance.") parser.add_option( "--retries", dest="retries", action="append", type="int", help="Specify how many times to retry connection for each URL.") parser.add_option( "--skip-credits", dest="skip_credits", action="store_true", default=False, help= "While creating release notes skip the 'reporter' and 'contributor' columns" ) parser.add_option( "-X", "--incompatiblelabel", dest="incompatible_label", default="backward-incompatible", type="string", help="Specify the label to indicate backward incompatibility.") Linter.add_parser_options(parser) if len(sys.argv) <= 1: parser.print_help() sys.exit(1) (options, _) = parser.parse_args() # Validate options if not options.release_version: if options.versions is None: parser.error("At least one version needs to be supplied") if options.projects is None: parser.error("At least one project needs to be supplied") if options.base_url is not None: if len(options.base_url) > 1: parser.error("Only one base URL should be given") else: options.base_url = options.base_url[0] if options.output_directory is not None: if len(options.output_directory) > 1: parser.error("Only one output directory should be given") else: options.output_directory = options.output_directory[0] return options
helpString = ( "\n%prog [options] [uid]" "\n%prog --help" "\n" "\nRuns NuPIC anomaly detection on a csv file." "\nWe assume the data files have a timestamp field called 'dttm' and" "\na value field called 'value'. All other fields are ignored." "\nNote: it is important to set min and max properly according to data." ) resultsPathDefault = os.path.join("results", "anomaly_scores.csv"); # All the command line options parser = OptionParser(helpString) parser.add_option("--inputFile", help="Path to data file. (default: %default)", dest="inputFile", default="data/hotgym.csv") parser.add_option("--outputFile", help="Output file. Results will be written to this file." " By default 'anomaly_scores_' will be prepended to the " "input file name.", dest="outputFile", default = None) parser.add_option("--outputDir", help="Output Directory. Results files will be place here.", dest="outputDir", default="results") parser.add_option("--max", default=100.0, type=float, help="Maximum number for the value field. [default: %default]") parser.add_option("--min", default=0.0, type=float, help="Minimum number for the value field. [default: %default]") options, args = parser.parse_args(sys.argv[1:])
def add_options(cls, parser): super(OTPTokenImport, cls).add_options(parser) parser.add_option("-k", "--keyfile", dest="keyfile", help="File containing the key used to decrypt token secrets")
def main(): global config, messaging, options, civi parser = OptionParser(usage="usage: %prog [options]") parser.add_option("-c", "--config", dest='configFile', default=[ "paypal-audit.cfg" ], action='append', help='Path to configuration file') parser.add_option("-f", "--auditFile", dest='auditFile', default=None, help='CSV of transaction history') parser.add_option('-l', "--logFile", dest='logFile', default="audit.log", help='Destination logfile. New messages will be appended.') parser.add_option("-n", "--no-effect", dest='noEffect', default=False, action="store_true", help="Dummy no-effect mode") (options, args) = parser.parse_args() path = options.auditFile if re.search(r'[.]gz$', path): f = gzip.open(path, "rb") else: f = open(path, "rU") infile = csv.DictReader(f) config = SafeConfigParser() config.read(options.configFile) if options.noEffect: log("*** Dummy mode! Not injecting stomp messages ***") messaging = Stomp(config) civi = Civicrm(config.items('Db')) locale.setlocale(locale.LC_NUMERIC, "") # fix spurious whitespace around column header names infile.fieldnames = [ name.strip() for name in infile.fieldnames ] ignore_types = [ "Authorization", "Cancelled Fee", # currency conversion is an explanation of amounts which appear elsewhere "Currency Conversion", # TODO: handle in IPN "Temporary Hold", # seems to be the cancellation of a temporary hold "Update to Reversal", "Website Payments Pro API Solution", ] audit_dispatch = { "Reversal": handle_refund, "Chargeback Settlement": handle_refund, "Refund": handle_refund, "Subscription Payment Received": handle_payment, "Web Accept Payment Received": handle_payment, "Shopping Cart Payment Received": handle_payment, "Virtual Debt Card Credit Received": handle_payment, "Payment Received": handle_payment, "Update to eCheck Received": handle_payment, } for line in infile: if line['Type'] in ignore_types: log("Ignoring %s of type %s" % (line['Transaction ID'], line['Type'])) continue if line['Type'] in audit_dispatch: audit_dispatch[line['Type']](line) else: handle_unknown(line)