コード例 #1
0
ファイル: nextaction.py プロジェクト: kylemazur/NextAction
def main():
  parser = argparse.ArgumentParser(description='Add NextAction labels to Todoist.')
  parser.add_argument('--api_token', required=True, help='Your API key')
  args = parser.parse_args()
  logging.basicConfig(level=logging.INFO)
  response = GetResponse(args.api_token)
  initial_data = response.read()
  logging.debug("Got initial data: %s", initial_data)
  initial_data = json.loads(initial_data)
  a = TodoistData(initial_data)
  while True:
    mods = a.GetProjectMods()
    if len(mods) == 0:
      time.sleep(5)
    else:
      logging.info("* Modifications necessary - skipping sleep cycle.")
    logging.info("** Beginning sync")
    sync_state = a.GetSyncState()
    changed_data = DoSyncAndGetUpdated(args.api_token,mods, sync_state).read()
    logging.debug("Got sync data %s", changed_data)
    changed_data = json.loads(changed_data)
    logging.info("* Updating model after receiving sync data")
    a.UpdateChangedData(changed_data)
    logging.info("* Finished updating model")
    logging.info("** Finished sync")
コード例 #2
0
ファイル: resolve.py プロジェクト: XaF/TraktForVLC
 def add_arguments(self, parser):
     parser.add_argument(
         '--meta',
         help='The metadata provided by VLC',
     )
     parser.add_argument(
         '--hash',
         dest='oshash',
         help='The hash of the media for OpenSubtitles resolution',
     )
     parser.add_argument(
         '--size',
         type=float,
         help='The size of the media, in bytes',
     )
     parser.add_argument(
         '--duration',
         type=float,
         help='The duration of the media, in seconds',
     )
     parser.add_argument(
         '--trakt-api-key',
         help='The Trakt API key to be used to resolve series from '
              'Trakt.tv',
     )
コード例 #3
0
ファイル: timeuntil.py プロジェクト: N6UDP/cslbot
def cmd(send, msg, args):
    """Reports the difference between now and some specified time.
    Syntax: {command} <time>
    """
    parser = arguments.ArgParser(args['config'])
    parser.add_argument('date', nargs='*', action=arguments.DateParser)
    try:
        cmdargs = parser.parse_args(msg)
    except arguments.ArgumentException as e:
        send(str(e))
        return
    if not cmdargs.date:
        send("Time until when?")
        return
    delta = dateutil.relativedelta.relativedelta(cmdargs.date, datetime.datetime.now())
    diff = "%s is " % cmdargs.date.strftime("%x")
    if delta.years:
        diff += "%d years " % (delta.years)
    if delta.months:
        diff += "%d months " % (delta.months)
    if delta.days:
        diff += "%d days " % (delta.days)
    if delta.hours:
        diff += "%d hours " % (delta.hours)
    if delta.minutes:
        diff += "%d minutes " % (delta.minutes)
    if delta.seconds:
        diff += "%d seconds " % (delta.seconds)
    diff += "away"
    send(diff)
コード例 #4
0
def parse_args(description=__doc__):
    parser = ArgumentParser(description=description)
    parser.add_argument(
        '-d', metavar='DIR', required=True,
        help='Log dir'
    )
    return parser.parse_args()
コード例 #5
0
def main():
  parser = argparse.ArgumentParser(description='Add NextAction labels to Todoist.')
  parser.add_argument('--api_token', required=True, help='Your API key')
  parser.add_argument('--use_priority', required=False,
      action="store_true", help='Use priority 1 rather than a label to indicate the next actions.')
  global args
  args = parser.parse_args()
  logging.basicConfig(level=logging.DEBUG)
  response = GetResponse(args.api_token)
  initial_data = response.read()
  logging.debug("Got initial data: %s", initial_data)
  initial_data = json.loads(initial_data)
  a = TodoistData(initial_data)
  while True:
    try:   
      mods = a.GetProjectMods()
      if len(mods) == 0:
        time.sleep(5)
      else:
        logging.info("* Modifications necessary - skipping sleep cycle.")
      #mods2 = a.GetProjectMods2()
      logging.info("** Beginning sync")
      sync_state = a.GetSyncState()
      changed_data = DoSyncAndGetUpdated(args.api_token,mods, sync_state).read()
      logging.debug("Got sync data %s", changed_data)
      changed_data = json.loads(changed_data)
      logging.info("* Updating model after receiving sync data")
      a.UpdateChangedData(changed_data)
      logging.info("* Finished updating model")
      logging.info("** Finished sync")
    except:
      print "Network error, try again.."
コード例 #6
0
def parse_args():
    parser = argparse.ArgumentParser(description='Cleans up stale chronos jobs.')
    parser.add_argument('-d', '--soa-dir', dest="soa_dir", metavar="SOA_DIR",
                        default=chronos_tools.DEFAULT_SOA_DIR,
                        help="define a different soa config directory")
    args = parser.parse_args()
    return args
コード例 #7
0
def make_default_options():
  """Helper function for creating default options for runner."""
  parser = argparse.ArgumentParser()
  GitRunner.add_parser_args(parser, {'github_disable_upstream_push': True})
  parser.add_argument('--output_dir',
                      default=os.path.join('/tmp', 'gittest.%d' % os.getpid()))
  return parser.parse_args([])
コード例 #8
0
ファイル: common.py プロジェクト: bzero/JARR
 def reqparse_args(self, req=None, strict=False, default=True, args=None):
     """
     strict: bool
         if True will throw 400 error if args are defined and not in request
     default: bool
         if True, won't return defaults
     args: dict
         the args to parse, if None, self.attrs will be used
     """
     parser = reqparse.RequestParser()
     for attr_name, attrs in (args or self.attrs).items():
         if attrs.pop('force_default', False):
             parser.add_argument(attr_name, location='json', **attrs)
         elif not default and (not request.json
                 or request.json and attr_name not in request.json):
             continue
         else:
             parser.add_argument(attr_name, location='json', **attrs)
     parsed = parser.parse_args(strict=strict) if req is None \
             else parser.parse_args(req, strict=strict)
     for field in self.to_date:
         if parsed.get(field):
             try:
                 parsed[field] = dateutil.parser.parse(parsed[field])
             except Exception:
                 logger.exception('failed to parse %r', parsed[field])
     return parsed
def get_parser():
    parser = ArgumentParser()
    parser.add_argument('--file',
                        '-f',
                        required=True,
                        help='The .jsonl file with all the posts')
    return parser
コード例 #10
0
ファイル: exterminatus.py プロジェクト: jcline/exterminatus
def main():
    parser = argparse.ArgumentParser(description='Declare Exterminatus on a Reddit thread.')
    parser.add_argument('thread', nargs='+', help='The thread(s) to declare Exterminatus on')
    args = parser.parse_args()

    threads = args.thread

    config = credentials.load_or_create_credentials('config')

    if not config:
        print('Could not load or generate config')
        sys.exit(1)

    print('Before declaring Exterminatus, thou shalt update AutoModerator to delete all new comments.')
    char = input('Begin reconnaissance? y/n ')
    if char != 'y':
        return

    client = api(config)

    ids = load_all_comments(client, threads)

    print('Blasphemers:\n %s' % ids)
    char = input('Begin Exterminatus? y/n ')
    if char != 'y':
        return

    print('It is now that we perform our charge.')
    print('In fealty of the God-Emperor (our undying lord) and by the grace of the Golden Throne I declare Exterminatus upon those who would deny our faith. The Emperor protects.')

    exterminatus(client, ids)
コード例 #11
0
ファイル: __init__.py プロジェクト: simmel/mixedmartialtail
def add_argument(parser):
    parser.add_argument("-i", "--replace-line",
            action="store_true",
            help="Force to replace the whole line, not just the part that's JSON.",)
    parser.add_argument("-f", "--force",
            action="store_true",
            help="Force to continue even if the plugins parsers are failing.",)
コード例 #12
0
ファイル: main.py プロジェクト: nijel/odorik
 def add_list_option(parser):
     """Add argparse argument --list."""
     parser.add_argument(
         '--list',
         action='store_true',
         help='List all records (instead of printing summary)'
     )
コード例 #13
0
ファイル: configuration.py プロジェクト: theg5prank/backupmgr
    def parse_args(self):
        parser = argparse.ArgumentParser(prog=self.prog)
        parser.add_argument("-q", "--quiet", action="store_true",
                            help="Be quiet on logging to stdout/stderr")
        subparsers = parser.add_subparsers()

        parser_backup = subparsers.add_parser("backup")
        parser_backup.set_defaults(verb="backup")

        parser_list = subparsers.add_parser("list")
        parser_list.set_defaults(verb="list")
        parser_list.add_argument("--before", dest="before", default=None,
                                 type=parse_simple_date)
        parser_list.add_argument("--after", dest="after", default=None,
                                 type=parse_simple_date)

        parser_restore = subparsers.add_parser("restore")
        parser_restore.set_defaults(verb="restore")
        parser_restore.add_argument("backup", metavar="BACKUPNAME", type=str)
        parser_restore.add_argument("backend", metavar="BACKENDNAME", type=str)
        parser_restore.add_argument("archive_spec", metavar="SPEC", type=str)
        parser_restore.add_argument("destination", metavar="DEST", type=str)

        parser_list_backups = subparsers.add_parser("list-configured-backups")
        parser_list_backups.set_defaults(verb="list-configured-backups")

        parser_list_backends = subparsers.add_parser("list-backends")
        parser_list_backends.set_defaults(verb="list-backends")

        parser_prune = subparsers.add_parser("prune")
        parser_prune.set_defaults(verb="prune")

        return parser.parse_args(self.argv)
コード例 #14
0
 def add_arguments(self, parser):
     parser.add_argument(
         '--update',
         action='store_true',
         default=False,
         help='Update existing articles data',
     )
     parser.add_argument(
         '--update-images',
         action='store_true',
         default=False,
         help='Update existing articles images',
     )
     parser.add_argument(
         '--date',
         help='Fetch json API for specific date instead of RSS',
     )
     parser.add_argument(
         '--days',
         help='Fetch json API fro N days',
     )
     parser.add_argument(
         '--till-date',
         help='Fetch json API till specific date via API',
     )
コード例 #15
0
ファイル: daemon.py プロジェクト: rfinnie/dsari
def parse_args():
    parser = argparse.ArgumentParser(
        description='Do Something and Record It - scheduler daemon ({})'.format(__version__),
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
    )
    parser.add_argument(
        '--version', action='version',
        version=__version__,
        help='report the program version',
    )
    parser.add_argument(
        '--config-dir', '-c', type=str, default=dsari.config.DEFAULT_CONFIG_DIR,
        help='configuration directory for dsari.json',
    )
    parser.add_argument(
        '--fork', action='store_true',
        help='fork into the background after starting',
    )
    parser.add_argument(
        '--debug', action='store_true',
        help='output additional debugging information',
    )
    parser.add_argument(
        '--no-timestamp', action='store_true',
        help='do not show timestamps in logging output',
    )
    return parser.parse_args()
コード例 #16
0
ファイル: __init__.py プロジェクト: loverdos/kamaki
 def update_parser(self, parser, name):
     """Update argument parser with self info"""
     action = 'append' if self.arity < 0 else (
         'store' if self.arity else 'store_true')
     parser.add_argument(
         *self.parsed_name,
         dest=name, action=action, default=self.default, help=self.help)
コード例 #17
0
def cli():
    parser = argparse.ArgumentParser(description="This script creates, edits, or deletes venues in bulk.")
    parser.add_argument('job',choices=['c','C','e','E','d','D'],type=str,default=None)
    parser.add_argument('doc',type=str,default=None)
    args = parser.parse_args()

    return args.job,args.doc
コード例 #18
0
ファイル: tweeprune.py プロジェクト: z3r0fox/tweeprune
def prep_argparse():
    '''
    (None) -> None
    Example command line arguments for argparse. Note that argparse provides
    a default -h | --help option.
    See https://docs.python.org/2/howto/argparse.html for more.
    '''
    parser = Parser(description='A script to remove inactive users from ' +
                                'your Twitter following list. ' +
                                'Requirements: Set up your API access ' +
                                'tokens by following the instructions at ' +
                                'https://dev.twitter.com/oauth/overview/' +
                                'application-owner-access-tokens. ' +
                                'Any dependencies that your Python ' +
                                'installation says it requires when you ' +
                                'try running this script are best installed ' +
                                'with pip, eg; sudo pip install twitter')
    # optional arguments to specify cut off date for accounts
    parser.add_argument('-y', '--years', help='specify idle account cutoff ' +
                        'in years')
    parser.add_argument('-m', '--months', help='specify idle account cutoff' +
                        'in months')
    # twitter API tokens are collected in an argument group
    group = parser.add_argument_group()
    group.add_argument('-t', '--token',
                       help='twitter API access token', required=True)
    group.add_argument('-tk', '--tokenkey',
                       help='twitter API token secret', required=True)
    group.add_argument('-ck', '--conkey',
                       help='twitter API consumer key', required=True)
    group.add_argument('-cs', '--consecret',
                       help='twitter API consumer secret', required=True)
    args = parser.parse_args()
    return args
コード例 #19
0
ファイル: jogger_to_hugo.py プロジェクト: lrem/blog
def main():
  parser = argparse.ArgumentParser(__doc__)
  parser.add_argument(
      "input_file",
      help="Input XML file. Must be uncompressed.")
  parser.add_argument(
      "output_directory",
      help="Output directory, where .md files are written.")
  args = parser.parse_args()
  tree = ET.parse(args.input_file)
  root = tree.getroot()
  abs_out_dir = os.path.abspath(args.output_directory)
  for entry in root.findall('entry'):
    date = dateutil.parser.parse(entry.find('date').text)
    permalink = entry.find('permalink').text
    subject = entry.find('subject').text.strip()
    body = entry.find('body').text
    print(date, permalink, "|", subject)
    out_dir = os.path.join(abs_out_dir, "pl")
    os.makedirs(out_dir, exist_ok=True)
    out_file = os.path.join(out_dir, permalink + '.md')
    with open(out_file, 'w') as out_fd:
      out_fd.write('+++\n')
      out_fd.write('# vim:set nosmartindent nocindent ft=markdown:\n')
      out_fd.write('date = "%s"\n' % date.isoformat())
      out_fd.write('draft = false\n')
      #out_fd.write('type = "post"\n')
      out_fd.write('title = "%s"\n' % subject)
      out_fd.write('+++\n')
      out_fd.write(html2text.html2text(body))
      comments = entry.findall('comment')
      if comments:
        out_fd.write(FormatComments(comments))
コード例 #20
0
ファイル: agrp1.py プロジェクト: fzhurd/fzwork
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("-m",  action = 'append', dest='mdest', required=False, type = str, help="Input the created database name in Postgres")
    parser.add_argument("-n",  dest = 'ndest', required=False,  type = str, help="Input the extension name")

    args = parser.parse_args()

    p1 = args.mdest

    p2 = args.ndest

    print p1
    print p2

    for i in p1:
        print i

    s1 = " ".join(p1)
    print 's1:', s1

    #     q= i+' '
    # print 'q: ', q
    # p1b =p1.split(',')
    # print p1b
    # subprocess.call([ './agrp2.py', '-x', "%s", '-y', "%s"] % (p1, p2))
    subprocess.call('./agrp2.py -x %s -y %s' % (s1, p2), shell=True)
コード例 #21
0
ファイル: piwik.py プロジェクト: VizGrimoire/VizGrimoireUtils
def parse_args():
    parser = ArgumentParser(usage="Usage: '%(prog)s [options] <url> <site_id>")

    # Database options
    group = parser.add_argument_group('Database options')
    group.add_argument('-u', '--user', dest='db_user',
                       help='Database user name',
                       default='root')
    group.add_argument('-p', '--password', dest='db_password',
                       help='Database user password',
                       default='')
    group.add_argument('-d', dest='db_name', required=True,
                       help='Name of the database where data will be stored')
    group.add_argument('--host', dest='db_hostname',
                       help='Name of the host where the database server is running',
                       default='localhost')
    group.add_argument('--port', dest='db_port',
                       help='Port of the host where the database server is running',
                       default='3306')

    # Piwik options
    group = parser.add_argument_group('Piwik options')
    group.add_argument('--start-date', dest='start_date', required=True)
    group.add_argument('--end-date', dest='end_date', default='today')
    group.add_argument('--key', dest='key', required=True,
                       help='Piwik auth key')

    # Positional arguments
    parser.add_argument('url', help='Piwik server URL')
    parser.add_argument('site_id', help='Identifier of the site')

    # Parse arguments
    args = parser.parse_args()

    return args
コード例 #22
0
ファイル: cli.py プロジェクト: akhodakivskiy/deeplearning4j
    def command_dispatcher(self, args=None):
        desc = ('pydl4j,  a system to manage your DL4J dependencies from Python.\n')
        parser = argparse.ArgumentParser(description=desc)
        parser.add_argument(
            '-v', '--version', action='version',
            version=pkg_resources.get_distribution("pydl4j").version,
            help='Print pydl4j version'
        )

        subparsers = parser.add_subparsers(title='subcommands', dest='command')
        subparsers.add_parser('init', help='Initialize pydl4j')
        subparsers.add_parser('install', help='Install jars for pydl4j')

        argcomplete.autocomplete(parser)
        args = parser.parse_args(args)
        self.var_args = vars(args)

        if not args.command:
            parser.print_help()
            return

        self.command = args.command

        if self.command == 'init':
            self.init()
            return

        if self.command == 'install':
            self.install()
            return
コード例 #23
0
def parse_args():

    parser = argparse.ArgumentParser(description='Format a collection of json files output by editor-geocoding and creates a single csv in digraph format.')
    parser.add_argument(
        '--geo_files', 
        metavar='GEOCODING_FILE.json', 
        nargs='+',
        help='any number of appropriately named json files')
    parser.add_argument(
        '-d','--basedir',
        default='/home/erosen/src/dashboard/geowiki/data',
        help='directory in which to find or create the datafiles and datasources directories for the *.csv and *.yaml files')
    parser.add_argument(
        '-b', '--basename',
        default='geo_editors',
        help='base file name for csv and yaml files.  for example: BASEDIR/datasources/BAS_FILENAME_en.yaml')
    parser.add_argument(
        '-k', 
        type=int, 
        default=10, 
        help='the number of countries to include in the selected project datasource')
    parser.add_argument(
        '-p', '--parallel',
        action='store_true',
        default=True,
        help='use a multiprocessing pool to execute per-language analysis in parallel'
        )

    args = parser.parse_args()
    logger.info(pprint.pformat(vars(args), indent=2))
    return args
コード例 #24
0
ファイル: server.py プロジェクト: ricardodani/desafiohu1
 def parse_args(self):
     parser.add_argument('placeId')
     parser.add_argument('placeType')
     parser.add_argument('enterDate')
     parser.add_argument('exitDate')
     parser.add_argument('undefinedDate')
     self.args = parser.parse_args()
コード例 #25
0
def main():
    log.addHandler(logging.StreamHandler(sys.stderr))
    api_log.addHandler(logging.StreamHandler(sys.stderr))

    parser = argparse.ArgumentParser()
    parser.add_argument('-v', '--verbose', action='store_true',
                        help='Display debug messages')
    subparsers = parser.add_subparsers(help='sub-command help')

    parser1 = subparsers.add_parser('upload')
    parser1.add_argument('filename', help='Timesheet csv')
    parser1.add_argument('--dry-run', action='store_true',
                         help='Preview changes')
    parser1.set_defaults(cmd='upload')

    parser2 = subparsers.add_parser('download')
    parser2.add_argument('date', help='List entries for specified week')
    parser2.set_defaults(cmd='download')

    parser3 = subparsers.add_parser('lookups')
    parser3.add_argument('kind', choices=['customer', 'activity'],
                         help='Download specified lookups')
    parser3.set_defaults(cmd='lookups')

    args = parser.parse_args()

    log_level = logging.DEBUG if args.verbose else logging.INFO
    log.setLevel(log_level)
    api_log.setLevel(log_level)

    run(args)
コード例 #26
0
 def add_arguments(self, parser):
     parser.add_argument(
         '--dry-run',
         action='store_true',
         dest='dry-run',
         default=False,
         help='Perform a dry-run',
     )
コード例 #27
0
ファイル: pytimer.py プロジェクト: GjjvdBurg/PyTimer
def parse_options():
    parser = argparse.ArgumentParser(
            description="A timing application for Python")
    parser.add_argument('-n', action='store_true', help="Start a new timer")
    parser.add_argument('-l', action='store_true',
            help="Load an existing timer")
    args = parser.parse_args()
    return args
コード例 #28
0
ファイル: important_network.py プロジェクト: csrhau/sandpit
def process_arguments():
    """ Process command line arguments """
    parser = argparse.ArgumentParser(description="Enron Corpus Parser")
    parser.add_argument('-i', '--infile', type=argparse.FileType('r'), required=True,
                      help='Path to data file to process')
    parser.add_argument('-o', '--outfile', type=argparse.FileType('w'), required=True,
                      help='Path to output data to')
    return parser.parse_args()
コード例 #29
0
ファイル: processing.py プロジェクト: rodney757/mediagoblin
    def generate_parser(cls):
        parser = argparse.ArgumentParser(description=cls.description, prog=cls.name)

        parser.add_argument("--size", nargs=2, metavar=("max_width", "max_height"), type=int)

        parser.add_argument("file", choices=["medium", "thumb"])

        return parser
コード例 #30
0
ファイル: resource.py プロジェクト: interphx/mvs
 def post(self):
     locs = ['files']
     parser = reqparse.RequestParser()
     parser.add_argument('fileToUpload', required=True, location=locs, type=werkzeug.FileStorage)
     args = parser.parse_args()
     file = upload_file(args['fileToUpload'], description='Изображение для страницы сайта', role='other', author_id=None)
     
     return {'file': file.url, 'location': file.url, 'success': True}
コード例 #31
0
 def setup_argparser(cls, parser):
     parser.add_argument("repo", help="repository")
     parser.add_argument("branch", help="branch name")
     parser.add_argument("assignee", help="assignee (username in gitlab)")
     parser.add_argument("title", help="title for the MR")
コード例 #32
0
 def setup_argparser(cls, parser):
     parser.add_argument("repo", help="repository")
     parser.add_argument("ref", help="The name of branch, tag or commit")
     parser.add_argument("path", help="Full path to the file")
コード例 #33
0
 def setup_argparser(cls, parser):
     parser.add_argument("repo", help="repository")
     parser.add_argument("branch", help="The name of branch")
コード例 #34
0
 def get_parser(self, prog_name):
     parser = super().get_parser(prog_name)
     parser.add_argument('tenant_id', help='Tenant ID')
     parser.add_argument('start', help='Start Date')
     parser.add_argument('end', help='End Date')
     return parser
コード例 #35
0
if __name__ == '__main__':

    rc = 3

    #parser = argparse.ArgumentParser(
    parser = rgmbeat.RGMArgumentParser(
        description="""
        Nagios plugin used to return interfaces output queue length from ElasticSearch for Windows machines.
        This plugin return the value of latest metricbeat document pushed by client.
        """,
        usage="""
        """,
        epilog="version {}, copyright {}".format(__version__, __copyright__)
    )
    parser.add_argument('-H', '--hostname', type=str, help='hostname or IP address', required=True)
    parser.add_argument('-w', '--warning', type=int, nargs='?', help='warning trigger', default=90)
    parser.add_argument('-c', '--critical', type=int, nargs='?', help='critical trigger', default=180)
    parser.add_argument('-t', '--timeout', type=int, help='data validity timeout (in minutes)', default=5)
    parser.add_argument(
        '-E', '--elastichost', type=str, help='connection URL of ElasticSearch server',
        default="localhost:9200"
    )
    args = parser.parse_args()

    escnx = elasticsearch_dsl.connections.create_connection(hosts=[args.elastichost], timeout=20)
    # construct an Elasticsearch DSL Search() object, using Q() shortcuts to build the query
    request = elasticsearch_dsl.Search(using=escnx, index="metricbeat-*", doc_type='_doc')
    request = request.query(
        'bool', must=[
            'match_all',
コード例 #36
0
        return {_decode_value(typ['type'], v) for v in value}
    if kind == 'map':
        key_type, val_type = typ['key_type'], typ['val_type']
        return {
            _decode_value(key_type, v[0]): _decode_value(val_type, v[1])
            for v in value
        }
    if kind == 'union':
        type_index, val = value
        return _decode_value(typ['types'][int(type_index)], val)
    if kind == 'enum':
        return typ['symbols'][int(value)]
    if kind in ['error', 'named']:
        return _decode_value(typ['type'], value)
    raise Exception(f'unknown type kind {kind}')


if __name__ == '__main__':
    import argparse
    import pprint

    parser = argparse.ArgumentParser(
        description='Query default Zed lake service and print results.',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('query')
    args = parser.parse_args()

    c = Client()
    for record in c.search(args.query):
        pprint.pprint(record)
コード例 #37
0
import hashlib
import pytz

if sys.version_info[0] < 3:
    reload(sys)
    sys.setdefaultencoding('utf-8')

days = []
de_tz = pytz.timezone('Europe/Amsterdam')
locale.setlocale(locale.LC_ALL, 'de_DE.UTF-8')

# some functions used in multiple files of this script collection
import voc.tools

parser = argparse.ArgumentParser()
parser.add_argument('acronym', help='the event acronym')
parser.add_argument('--offline', action='store_true')
parser.add_argument('-v', action='store_true', dest='verbose')
parser.add_argument('--url', action='store')
parser.add_argument('--output-folder',
                    '-o',
                    action='store',
                    dest='output_folder')
parser.add_argument('--default-language',
                    '-lang',
                    action='store',
                    dest='default_language',
                    default='de')

# output file name (prefix)?
# output dir (base) as config option?
コード例 #38
0
def search():
    q = request.args.get('q', '')  # get the search request
    papers = papers_search(q)  # perform the query and get sorted documents
    ret = encode_json(papers, args.num_results)  # encode the top few to json
    return render_template('main.html',
                           papers=ret,
                           numpapers=len(db),
                           collapsed=0)  # weeee


if __name__ == "__main__":

    parser = argparse.ArgumentParser()
    parser.add_argument('-p',
                        '--prod',
                        dest='prod',
                        action='store_true',
                        help='run in prod?')
    parser.add_argument('-r',
                        '--num_results',
                        dest='num_results',
                        type=int,
                        default=20,
                        help='number of results to return per query')
    args = parser.parse_args()
    print args

    print 'loading db.p...'
    db = pickle.load(open('db.p', 'rb'))

    print 'loading tfidf.p...'
コード例 #39
0
def parse_args():
    parser = argparse.ArgumentParser()
    parser.add_argument("next_directory")
    parser.add_argument("next_to_grade")
    parser.add_argument("which_untrusted")
    return parser.parse_args()
コード例 #40
0
# It does not have a concept of a "kidney" or "patient": these are defined in config.yml
# It *does* have a concept of state variables, state lists (e.g. waitlist), and event lists
# It *does* have a concept of dates and an effective fixed time granularity of one day

import argparse
import yaml
import json
import pandas as pd
import datetime as dt
import dateutil.parser

from utils import load_module

parser = argparse.ArgumentParser()
parser.add_argument('run_spec',
                    metavar='run-spec',
                    help='run specification file')
parser.add_argument('--master-config',
                    default="config.yml",
                    help='top-level config file')
args = parser.parse_args()
#print("Arguments",args)

# Read master config
with open(args.master_config) as f:
    config = yaml.load(f)
#print(json.dumps(config, indent=2))

# Get modular functions
# Note that event lists have parsers and handlers; state lists only parsers
event_parser = dict()
コード例 #41
0
def parse_command_line_arguments(logger):
    """
    Parse command line arguments received, if any
    Print example if invalid arguments are passed

    :param logger:  the logger
    :return:        config_filename passed as argument if any, else DEFAULT_CONFIG_FILENAME
                    export_formats passed as argument if any, else 'pdf'
                    list_export_profiles if passed as argument, else None
                    do_loop False if passed as argument, else True
    """
    parser = argparse.ArgumentParser()
    parser.add_argument('--config',
                        help='config file to use, defaults to ' +
                        DEFAULT_CONFIG_FILENAME)
    parser.add_argument('--format',
                        nargs='*',
                        help='formats to download, valid options are pdf, '
                        'json, docx, csv, media, web-report-link, actions')
    parser.add_argument(
        '--list_export_profiles',
        nargs='*',
        help='display all export profiles, or restrict to specific'
        ' template_id if supplied as additional argument')
    parser.add_argument('--loop',
                        nargs='*',
                        help='execute continuously until interrupted')
    parser.add_argument(
        '--setup',
        action='store_true',
        help='Automatically create new directory containing the '
        'necessary config file.'
        'Directory will be named iAuditor Audit Exports, and will be placed in your current directory'
    )
    args = parser.parse_args()

    config_filename = DEFAULT_CONFIG_FILENAME

    if args.setup:
        initial_setup(logger)
        exit()

    if args.config is not None:
        if os.path.isfile(args.config):
            config_filename = args.config
            logger.debug(config_filename + ' passed as config argument')
        else:
            logger.error(config_filename + ' is not a valid config file')
            sys.exit(1)

    export_formats = ['pdf']
    if args.format is not None and len(args.format) > 0:
        valid_export_formats = [
            'json', 'docx', 'pdf', 'csv', 'media', 'web-report-link', 'actions'
        ]
        export_formats = []
        for option in args.format:
            if option not in valid_export_formats:
                print(
                    '{0} is not a valid export format.  Valid options are pdf, json, docx, csv, web-report-link, '
                    'media, or actions'.format(option))
                logger.info(
                    'invalid export format argument: {0}'.format(option))
            else:
                export_formats.append(option)

    loop_enabled = True if args.loop is not None else False

    return config_filename, export_formats, args.list_export_profiles, loop_enabled
def main():
    parser = argparse.ArgumentParser(description='Free up disk space from docker images')
    parser.add_argument('--minimum-free-space', type=int, default=50,
                        help='Number of GB miniumum free required')
    parser.add_argument('--minimum-free-percent', type=int, default=50,
                        help='Number of percent free required')
    parser.add_argument('--path', type=str, default='/',
                        help='What mount point to introspect')
    parser.add_argument('--logfile', type=str,
                        default='/var/log/jenkins-slave/cleanup_docker_images.log',
                        help='Where to log output')
    parser.add_argument('--min-days', type=int, default=0,
                        help='The minimum age of items to clean up in days.')
    parser.add_argument('--min-hours', type=int, default=10,
                        help='The minimum age of items to clean up in hours, added to days.')
    parser.add_argument('--docker-api-version', type=str, default='1.30',
                        help='The docker server API level.')
    parser.add_argument('--dry-run', '-n', default=False,
                        action='store_true',
                        help='Do not actually clean up, just print to log.')

    args = parser.parse_args()
    dclient = docker.DockerClient(base_url='unix://var/run/docker.sock', version=args.docker_api_version)
    minimum_age = datetime.timedelta(days=args.min_days, hours=args.min_hours)

    # initialize logging
    logging.basicConfig(filename=args.logfile, format='%(asctime)s %(message)s',
                        level=logging.INFO)
    logging.info(">>>>>> Starting run of cleanup_docker_images.py arguments %s" % args)
    if check_done(args):
        logging.info("Disk space satisfied before running, no need to run.")
        return
    print_progress(args)

    filename = '/tmp/cleanup_docker_images.py.marker'
    with open(filename, 'w') as fh:
        try:
            with flocked(fh):
                run_image_cleanup(args, minimum_age, dclient)
        except BlockingIOError as ex:
            logging.error("Failed to get lock on %s aborting. Exception[%s]. "
                          "This most likely means an instance of this script"
                          " is already running." %
                          (filename, ex))
            sys.exit(1)
コード例 #43
0
 def setup_argparser(cls, parser):
     parser.add_argument("repo", help="repository")
     parser.add_argument("branch", help="branch name")
     parser.add_argument("comment", help="branch name")
コード例 #44
0
        **stats
    }

    templateLoader = jinja2.FileSystemLoader(
        searchpath=os.path.dirname(__file__))
    templateEnv = jinja2.Environment(loader=templateLoader,
                                     undefined=jinja2.StrictUndefined)
    template = templateEnv.get_template(TEMPLATE_FILE)
    return template.render(**vars)


if __name__ == "__main__":
    from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
    import time
    parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
    parser.add_argument("fname", nargs="+", help="filename")
    parser.add_argument("--ps", help="Show the processes", action='store_true')
    parser.add_argument("--plot", help="Create a plot")
    parser.add_argument("--html",
                        help="Render HTML files into a new directory")
    parser.add_argument(
        "--json",
        help="Render JSON file and JavaScript HTML files into a new directory")
    args = parser.parse_args()

    stats = []

    if args.html:
        path = args.html
        if os.path.exists(path):
            raise RuntimeError("{}: exists".format(path))
コード例 #45
0
def add_common_args(*parsers):
  for parser in parsers:
    parser.add_argument('--verbose', '-v', action='store_true')
    parser.add_argument(
        '--board', type=str, required=True, help='Type of CrOS device.')
    parser.add_argument(
        '--cros-cache', type=str, default=DEFAULT_CROS_CACHE,
        help='Path to cros cache.')
    parser.add_argument(
        '--path-to-outdir', type=str, required=True,
        help='Path to output directory, all of whose contents will be '
             'deployed to the device.')
    parser.add_argument(
        '--runtime-deps-path', type=str,
        help='Runtime data dependency file from GN.')
    parser.add_argument(
        '--vpython-dir', type=str,
        help='Location on host of a directory containing a vpython binary to '
             'deploy to the device before the test starts. The location of '
             'this dir will be added onto PATH in the device. WARNING: The '
             'arch of the device might not match the arch of the host, so '
             'avoid using "${platform}" when downloading vpython via CIPD.')
    # TODO(bpastene): Switch all uses of "--vm-logs-dir" to "--logs-dir".
    parser.add_argument(
        '--vm-logs-dir', '--logs-dir', type=str, dest='logs_dir',
        help='Will copy everything under /var/log/ from the device after the '
             'test into the specified dir.')

    vm_or_device_group = parser.add_mutually_exclusive_group()
    vm_or_device_group.add_argument(
        '--use-vm', action='store_true',
        help='Will run the test in the VM instead of a device.')
    vm_or_device_group.add_argument(
        '--device', type=str,
        help='Hostname (or IP) of device to run the test on. This arg is not '
             'required if --use-vm is set.')
コード例 #46
0
def add_common_args(*parsers):
  for parser in parsers:
    parser.add_argument('--verbose', '-v', action='store_true')
    parser.add_argument(
        '--board', type=str, required=True, help='Type of CrOS device.')
    parser.add_argument(
        '--cros-cache',
        type=str,
        default=DEFAULT_CROS_CACHE,
        help='Path to cros cache.')
    parser.add_argument(
        '--path-to-outdir',
        type=str,
        required=True,
        help='Path to output directory, all of whose contents will be '
        'deployed to the device.')
    parser.add_argument(
        '--runtime-deps-path',
        type=str,
        help='Runtime data dependency file from GN.')
    parser.add_argument(
        '--vpython-dir',
        type=str,
        help='Location on host of a directory containing a vpython binary to '
        'deploy to the device before the test starts. The location of '
        'this dir will be added onto PATH in the device. WARNING: The '
        'arch of the device might not match the arch of the host, so '
        'avoid using "${platform}" when downloading vpython via CIPD.')
    parser.add_argument(
        '--logs-dir',
        type=str,
        dest='logs_dir',
        help='Will copy everything under /var/log/ from the device after the '
        'test into the specified dir.')
    # Shard args are parsed here since we might also specify them via env vars.
    parser.add_argument(
        '--test-launcher-shard-index',
        type=int,
        default=os.environ.get('GTEST_SHARD_INDEX', 0),
        help='Index of the external shard to run.')
    parser.add_argument(
        '--test-launcher-total-shards',
        type=int,
        default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
        help='Total number of external shards.')
    parser.add_argument(
        '--flash',
        action='store_true',
        help='Will flash the device to the current SDK version before running '
        'the test.')
    parser.add_argument(
        '--public-image',
        action='store_true',
        help='Will flash a public "full" image to the device.')

    vm_or_device_group = parser.add_mutually_exclusive_group()
    vm_or_device_group.add_argument(
        '--use-vm',
        action='store_true',
        help='Will run the test in the VM instead of a device.')
    vm_or_device_group.add_argument(
        '--device',
        type=str,
        help='Hostname (or IP) of device to run the test on. This arg is not '
        'required if --use-vm is set.')
コード例 #47
0
 def add_arguments(self, parser):
     parser.add_argument('dir_name', nargs='+', type=str)
コード例 #48
0
def main():
    # Parse the arguments
    parser = argparse.ArgumentParser()

    parser.add_argument("-n", dest = "firstrun", help = "First run number to process", type = int, required = True)
    parser.add_argument("-i", dest = "lastrun", help = "Last run number to process", type = int, required = True)

    args = parser.parse_args()

    # Exit if no run numbers supplied
    if args.firstrun == "0" or args.lastrun == "0":
    	
	sys.stderr.write("%s - rscheck():ERROR: please supply a start run number using \'-n\'"
                             % (datetime.datetime.now().replace(microsecond = 0)))
 	sys.exit(1)
   
    # Run type bit masks	
    PHYSICS_RUN_MASK = 0x4 # bit 2

    # Detector State bit masks
    DCR_ACTIVITY_MASK = 0x200000 # bit 21
    COMP_COIL_OFF_MASK = 0x400000 # bit 22 
    PMT_OFF_MASK = 0x800000 # bit 23
    SLASSAY_MASK = 0x4000000 # bit 26
    UNUSUAL_ACTIVITY_MASK = 0x8000000 # bit 2

    # Create run list
    runlistname = "runlist_{0}-{1}.txt".format(args.firstrun,args.lastrun)

    runlist = open(runlistname,'w')

    # Write run list header
    rstools.write_header(runlist)

    # Loop over runs from <firstrun> to <lastrun>  
    for run in range(args.firstrun,args.lastrun + 1):	

    	sys.stdout.write("%s - rscheck():INFO: preparing the Run Selection checks for run %s\n" 
     		             % (datetime.datetime.now().replace(microsecond = 0),run))

	
	# Some cosmetics taken from E. Falk
	p = 0
        if ((run % 10 == 0) and (p != 0)):
	   runlist.write("-------|----------|----------|" + \
                  	 "-----------|-----------|" + \
                         "------------------|----"
                         "---------------------|" + \
                         "-----------------------------------------|" + \
                         "--------------------|---------------------\n")
						
        # Read detector state database & alarms
	# TO DO


	runtype = 1
	
	# Read RUN.ratdb
	rundatatuple = ratdbtools.get_table(run, "RUN", settings.RATDB_ADDRESS, settings.RATDB_HOST, 
                                            settings.RATDB_READ_USER, settings.RATDB_READ_PASSWORD,
                                            settings.RATDB_NAME, settings.RATDB_PORT)

        rundata = rundatatuple[1]

        if rundatatuple[0]:

	   runtypemask = rundata['runtype']

	   # NOT a physics run		
	   if runtypemask & PHYSICS_RUN_MASK != PHYSICS_RUN_MASK:
		
	      sys.stdout.write("%s - rscheck():INFO: run %i is not a PHYSICS run\n"
              		           % (datetime.datetime.now().replace(microsecond = 0),run))
     
              continue

           # DCR Activity bit set
	   if runtypemask & DCR_ACTIVITY_MASK == DCR_ACTIVITY_MASK:

	      sys.stdout.write("%s - rscheck():INFO: run %i has DCR Activity bit set\n"
                                   % (datetime.datetime.now().replace(microsecond = 0),run))

              runtype = 0

           # Compensation Coils OFF
           if runtypemask & COMP_COIL_OFF_MASK == COMP_COIL_OFF_MASK:

              sys.stdout.write("%s - rscheck():INFO: run %i has Comp Coils OFF\n"
                                   % (datetime.datetime.now().replace(microsecond = 0),run))

              runtype = 0

           # PMTs OFF
           if runtypemask & PMT_OFF_MASK == PMT_OFF_MASK:

              sys.stdout.write("%s - rscheck():INFO: run %i has PMTs OFF\n"
                                   % (datetime.datetime.now().replace(microsecond = 0),run))

              runtype = 0

           # SLAssay 
           if runtypemask & SLASSAY_MASK == SLASSAY_MASK:

              sys.stdout.write("%s - rscheck():INFO: run %i has SLAssay\n"
                                   % (datetime.datetime.now().replace(microsecond = 0),run))

              runtype = 0

	   # Unusual Activity
           if runtypemask & UNUSUAL_ACTIVITY_MASK == UNUSUAL_ACTIVITY_MASK:

              sys.stdout.write("%s - rscheck():INFO: run %i has Unusual Activity bit set\n"
                                   % (datetime.datetime.now().replace(microsecond = 0),run))

              runtype = 0

        else:

           runtype = 9


	runduration = 1

	cratestatus = 1

	cratedac = 1
	
        # Read DQLL.ratdb
	dqlldatatuple = ratdbtools.get_table(run, "DQLL", settings.RATDB_ADDRESS, settings.RATDB_HOST,
                                             settings.RATDB_READ_USER, settings.RATDB_READ_PASSWORD,
                                             settings.RATDB_NAME,settings.RATDB_PORT)

        dqlldata = dqlldatatuple[1]

        if dqlldatatuple[0]:

           duration = dqlldata['duration_seconds']

           # Duration < 30 minutes
           if duration < 1800:
           
	      sys.stdout.write("%s - rscheck():INFO: run %i duration is less than 30 minutes\n"
                                   % (datetime.datetime.now().replace(microsecond = 0),run))

              runduration = 0

	   crates_status_a = dqlldata['crate_hv_status_a']

	   # At least one crate HV is OFF (A supply)
	   for i in range(len(crates_status_a)):
   
	       if crates_status_a[i] == False:

	          sys.stdout.write("%s - rscheck():INFO: run %i crate %i HV is off\n"
                  	               % (datetime.datetime.now().replace(microsecond = 0),run,i))

		  cratestatus = 0

           # OWLs are OFF (16B supply)
           crate_16B_status = dqlldata['crate_16_hv_status_b']

           if crate_16B_status == False:

	      sys.stdout.write("%s - rscheck():INFO: run %i OWLs HV is off\n"
                                       % (datetime.datetime.now().replace(microsecond = 0),run))

              cratestatus = 0

	   # At least one DAC value is 0 (power supply A)
           crates_dac_a = dqlldata['crate_hv_dac_a']

	   for i in range(len(crates_dac_a)):

	       if crates_dac_a[i] == 0:

	          sys.stdout.write("%s - rscheck():INFO: run %i crate %i DAC value is 0\n"
                                       % (datetime.datetime.now().replace(microsecond = 0),run,i))

                  cratedac = 0

	   # OWLs DAC value is 0 (power supply B)
           crate_16B_dac = dqlldata['crate_16_hv_dac_b']

	   if crate_16B_dac == 0:

              sys.stdout.write("%s - rscheck():INFO: run %i crate %i DAC value is 0\n"
	                           % (datetime.datetime.now().replace(microsecond = 0),run,i))
           
              cratedac = 0
        
        else:

           runduration = 9
	   cratestatus = 9
           cratedac = 9

	# Write run info in run list
        runlist.write(str(run) + ' | ')
	runlist.write(str(runtype) + '        | ')
	runlist.write(str(runduration) + '        | ')
	runlist.write(str(cratestatus) + '         | ')
	runlist.write(str(cratedac) + '         | ')

        # Perform the HL checks
        dqhltools.dqhlPassFailList(run,runlist)

	# Increment p
	p += 1

    runlist.close()	

    return 0  # Success!
コード例 #49
0
ファイル: bidscoiner.py プロジェクト: srikash/bidscoin
def main():
    """Console script usage"""

    # Parse the input arguments and run bidscoiner(args)
    import argparse
    import textwrap
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=textwrap.dedent(__doc__),
        epilog='examples:\n'
        '  bidscoiner /project/foo/raw /project/foo/bids\n'
        '  bidscoiner -f /project/foo/raw /project/foo/bids -p sub-009 sub-030\n '
    )
    parser.add_argument(
        'sourcefolder',
        help=
        'The study root folder containing the raw data in sub-#/[ses-#/]data subfolders (or specify --subprefix and --sesprefix for different prefixes)'
    )
    parser.add_argument(
        'bidsfolder',
        help='The destination / output folder with the bids data')
    parser.add_argument(
        '-p',
        '--participant_label',
        help=
        'Space separated list of selected sub-# names / folders to be processed (the sub- prefix can be removed). Otherwise all subjects in the sourcefolder will be selected',
        nargs='+')
    parser.add_argument(
        '-f',
        '--force',
        help=
        'If this flag is given subjects will be processed, regardless of existing folders in the bidsfolder. Otherwise existing folders will be skipped',
        action='store_true')
    parser.add_argument(
        '-s',
        '--skip_participants',
        help=
        'If this flag is given those subjects that are in participants.tsv will not be processed (also when the --force flag is given). Otherwise the participants.tsv table is ignored',
        action='store_true')
    parser.add_argument(
        '-b',
        '--bidsmap',
        help=
        'The bidsmap YAML-file with the study heuristics. If the bidsmap filename is relative (i.e. no "/" in the name) then it is assumed to be located in bidsfolder/code/bidscoin. Default: bidsmap.yaml',
        default='bidsmap.yaml')
    parser.add_argument(
        '-n',
        '--subprefix',
        help=
        "The prefix common for all the source subject-folders. Default: 'sub-'",
        default='sub-')
    parser.add_argument(
        '-m',
        '--sesprefix',
        help=
        "The prefix common for all the source session-folders. Default: 'ses-'",
        default='ses-')
    parser.add_argument(
        '-v',
        '--version',
        help='Show the BIDS and BIDScoin version',
        action='version',
        version=
        f"BIDS-version:\t\t{bids.bidsversion()}\nBIDScoin-version:\t{bids.version()}"
    )
    args = parser.parse_args()

    bidscoiner(rawfolder=args.sourcefolder,
               bidsfolder=args.bidsfolder,
               subjects=args.participant_label,
               force=args.force,
               participants=args.skip_participants,
               bidsmapfile=args.bidsmap,
               subprefix=args.subprefix,
               sesprefix=args.sesprefix)
コード例 #50
0
        raa = np.abs(saa - vaa_ave)
        raa[raa > 180] -= 180
        ac.output.nc_write(ncfile, 'raa', raa)

    ## return the path to the new file
    return (ncfile)


if __name__ == '__main__':
    import argparse
    import sys, os

    parser = argparse.ArgumentParser(
        description='NBS/OPeNDAP preprocessor for ACOLITE')
    parser.add_argument('--input', help='OPeNDAP URL to L1C Sentinel-2 data')
    parser.add_argument(
        '--output',
        help=
        'Output directory, if not provided will output to current working directory',
        default=None)
    parser.add_argument(
        '--limit',
        help='4 element for cropping ROI in coordinates (default=None)',
        default=None)
    parser.add_argument(
        '--sub',
        help='4 element for cropping ROI in pixels (default=None)',
        default=None)
    parser.add_argument(
        '--geometry',
import os
import time
import datetime
import json
import urllib
import urlparse
import dateutil.parser
from argparse import ArgumentParser
from shutil import copyfile

parser = ArgumentParser()
parser.add_argument("-f", "--file", dest="file",
                    help="media.json file", metavar="FILE")
json_path = parser.parse_args().file

archive_path = os.path.dirname(json_path)

with open(json_path) as f:
    data = json.load(f)

stories = data['stories']
length = len(stories)

download_dir = 'stories'
if not os.path.exists(download_dir):
    os.makedirs(download_dir)

index = 1

for story in stories:
    print("Processing {0}/{1}".format(index, length))
コード例 #52
0
 def add_arguments(self, parser):
     parser.add_argument('path',
                         type=str,
                         help='path of file to be imported')
コード例 #53
0
    return bug


def get_inconsistencies(bugs):
    inconsistencies = []

    for bug in bugs:
        try:
            rollback(bug, do_assert=True)
        except Exception as e:
            print(bug["id"])
            print(e)
            inconsistencies.append(bug)

    return inconsistencies


if __name__ == "__main__":
    import argparse
    from tqdm import tqdm

    parser = argparse.ArgumentParser()
    parser.add_argument("--verbose", help="Verbose mode", action="store_true")
    args = parser.parse_args()

    for bug in tqdm(bugzilla.get_bugs()):
        if args.verbose:
            print(bug["id"])

        rollback(bug, do_assert=True)
コード例 #54
0
 def add_arguments(self, parser):
     parser.add_argument('since', type=str, nargs='?', default=datetime_proxy(), help='Run for each month since the date, defaults to yesterday midnight')
コード例 #55
0
def fill_parser(parser: argparse.ArgumentParser) -> argparse.ArgumentParser:
    """Set up sys.argv parser.

    Arguments:
        parser: argparse.ArgumentParser or argparse.subparser
    """
    parser.add_argument(
        "DATAFILE",
        help="Name of Eclipse DATA file. " + "INIT and EGRID file must lie alongside.",
    )
    parser.add_argument(
        "--vectors",
        nargs="+",
        help="INIT and/or restart wildcards for vectors to include",
        default="*",
    )
    parser.add_argument(
        "--rstdates",
        type=str,
        help="Point in time to grab restart data from, "
        + "either 'first' or 'last', 'all', or a date in "
        + "YYYY-MM-DD format",
        default="",
    )
    parser.add_argument(
        "-o",
        "--output",
        type=str,
        help="Name of output csv file. Use '-' for stdout.",
        default="eclgrid.csv",
    )
    parser.add_argument(
        "--stackdates",
        action="store_true",
        help=(
            "If set, the dates from restart data will not be in the column "
            "but instead there will be a DATE column with the dates. Note "
            "that the static data will be repeated for each DATE."
        ),
    )

    parser.add_argument(
        "--dropconstants",
        action="store_true",
        help="Drop constant columns from the dataset",
    )
    parser.add_argument("-v", "--verbose", action="store_true", help="Be verbose")
    return parser
コード例 #56
0
def main():

    global options
    global logger

    parser = argparse.ArgumentParser()
    parser.add_argument("-v", "--verbose", action="store_true")
    options, args = parser.parse_known_args()

    log_file = os.path.join(config.CONFIG_DIR, "mlbstreamer.log")

    formatter = logging.Formatter(
        "%(asctime)s [%(module)16s:%(lineno)-4d] [%(levelname)8s] %(message)s",
        datefmt="%Y-%m-%d %H:%M:%S")

    fh = logging.FileHandler(log_file)
    fh.setLevel(logging.DEBUG)
    fh.setFormatter(formatter)

    logger = logging.getLogger("mlbstreamer")
    logger.setLevel(logging.INFO)
    logger.addHandler(fh)

    ulh = UrwidLoggingHandler()
    ulh.setLevel(logging.DEBUG)
    ulh.setFormatter(formatter)
    logger.addHandler(ulh)

    logger.debug("mlbstreamer starting")
    config.settings.load()

    state.session = MLBSession.new()

    entries = Dropdown.get_palette_entries()
    entries.update(ScrollingListBox.get_palette_entries())
    entries.update(DataTable.get_palette_entries())
    # raise Exception(entries)
    palette = Palette("default", **entries)
    screen = urwid.raw_display.Screen()
    screen.set_terminal_properties(256)

    view = ScheduleView()

    log_console = widgets.ConsoleWindow()
    # log_box = urwid.BoxAdapter(urwid.LineBox(log_console), 10)
    pile = urwid.Pile([("weight", 1, urwid.LineBox(view)),
                       (10, urwid.LineBox(log_console))])

    def global_input(key):
        if key in ('q', 'Q'):
            raise urwid.ExitMainLoop()
        else:
            return False

    state.loop = urwid.MainLoop(pile,
                                palette,
                                screen=screen,
                                unhandled_input=global_input,
                                pop_ups=True)
    ulh.connect(state.loop.watch_pipe(log_console.log_message))
    logger.info("mlbstreamer starting")
    if options.verbose:
        logger.setLevel(logging.DEBUG)

    state.loop.run()
コード例 #57
0
if __name__ == "__main__":
    import argparse

    locations = {
        "sta": ("5761f0a45e002c13703ed811", 1),
        "wywbmad": ("57b130dd5e002c0388f8b686", 1),
        "wywb805": ("57b130dd5e002c0388f8b686", 2),
        "otbx": ("5502506cb3b70304a8f2e0d2", 1),
        "rccb": ("5aa1a8135e002c0924805971", 1),
        "bufeddies": ("5afe0f3a5e002c0b8060a5b8", 1),
        "rrmad": ("5d657d943527260064257abf", 1),
        "rrdowntown": ("5d657d943527260064257abf", 2),
    }

    parser = argparse.ArgumentParser()
    parser.add_argument("--dump", action="store_true")
    parser.add_argument("--print-logo-url", action="store_true")
    parser.add_argument("location")
    args = parser.parse_args()

    t = DigitalPourParser(locations[args.location])

    if args.dump:
        print(json.dumps(t.fetch(), indent=4))
    else:
        for tap in t.taps():
            if args.print_logo_url:
                print(f'{tap["beer"]["name"]}\t{tap["beer"]["logo_url"]}')
            else:
                print(tap["beer"]["name"])
コード例 #58
0
                        lld_item["{#REGION}"] = Region

                    self.lld_json["data"].append(lld_item)

    def handle_endtag(self, tagname):
        if tagname.lower() == "div":
            self.check = False


if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description=
        'Get RSS list or Zabbix LLD format output from AWS Service Health Dashboard page.'
    )
    parser.add_argument('-b',
                        '--block',
                        default="AP",
                        help='set AWS region block(e.g.:NA or SA or EU or AP)')
    parser.add_argument('-i',
                        '--interval',
                        type=int,
                        help='set interval time (seconds)')
    parser.add_argument(
        '-m',
        '--send-mode',
        default='False',
        help=
        'set True if you send AWS Service Health Dashboard status information. set False if you want to get lld format service list. (e.g.: True or False)'
    )

    block_list = ["NA", "SA", "EU", "AP"]
    args = parser.parse_args()
コード例 #59
0
ファイル: parse.py プロジェクト: samknight/parlparse
import os
import re
import urllib
from xml.sax.saxutils import escape

import bs4
import dateutil.parser
import requests
import requests_cache

# Command line arguments
yesterday = datetime.date.today() - datetime.timedelta(days=1)
parser = argparse.ArgumentParser(
    description='Scrape/parse new Written Answers database.')
parser.add_argument('--house',
                    required=True,
                    choices=['commons', 'lords'],
                    help='Which house to fetch')
parser.add_argument('--type',
                    required=True,
                    choices=['answers', 'statements'],
                    help='What sort of thing to fetch')
parser.add_argument('--date',
                    default=yesterday.isoformat(),
                    help='date to fetch')
parser.add_argument('--members',
                    required=True,
                    help='filename of membership JSON')
parser.add_argument('--out',
                    required=True,
                    help='directory in which to place output')
ARGS = parser.parse_args()
コード例 #60
0
    with open("/Users/hpiwowar/Downloads/perpetual_access_cleaned.csv",
              "w") as csv_file:
        csv_writer = csv.writer(csv_file, encoding="utf-8")
        header = ["username", "issn", "start_date", "end_date"]
        csv_writer.writerow(header)
        for my_dict in results:
            csv_writer.writerow([my_dict[k] for k in header])
    print "/Users/hpiwowar/Downloads/perpetual_access_cleaned.csv"


# python import_accounts.py --filename=~/Downloads/new_accounts.csv
if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Run stuff.")
    parser.add_argument("--filename",
                        type=str,
                        default=None,
                        help="input file to parse")
    parser.add_argument("--username",
                        type=str,
                        default=None,
                        help="username to input")

    parsed_args = parser.parse_args()
    parsed_vars = vars(parsed_args)

    # create_accounts(parsed_vars["filename"])
    # build_counter_import_file(filename=parsed_vars["filename"], username=parsed_vars["username"])

    crkn_ids = read_csv_file(
        u"/Users/hpiwowar/Documents/Projects/tiv2/jump-api/data/crkn_lookup.csv"
    )