def add_arguments(self, parser):
     parser.add_argument('-k', '--api-key', help='specify the api key to use (You can also set "{0}" in settings.py)'.format(
         _settingsKey), required=False, default=None)
     parser.add_argument('-t', '--throttle-rate', help='Number of seconds to put between requests. The default (non-paid) giantbomb api throttle is supposedly 200 requests per resrouce per hour.',
                         default=(60.0*60.0)/200.0, required=False)
     selectionGroup = parser.add_mutually_exclusive_group(required=True)
     selectionGroup.add_argument(
         '-e', '--event', help='specify an event to synchronize')
     selectionGroup.add_argument(
         '-r', '--run', help='Specify a specific run to synchronize', type=int)
     selectionGroup.add_argument(
         '-a', '--all', help='Synchronizes _all_ runs in the database (warning, due to giantbomb api throttling, this may take a long, long time.', action='store_true', default=False)
     parser.add_argument(
         '-f', '--filter', help='A regex for game names to include (uses standard python regex syntax', required=False, default=None)
     parser.add_argument(
         '-x', '--exclude', help='A regex for game names to exclude (a common one might be ".*setup.*"). Always case-insensitive', required=False, default=None)
     idGroup = parser.add_mutually_exclusive_group(required=False)
     idGroup.add_argument('-s', '--skip-with-id', help='Skip any games which already have a giantbomb id',
                          action='store_true', default=False, required=False)
     idGroup.add_argument('-g', '--ignore-id', help='Ignore the id on runs (helpful if an id was set incorrectly',
                          action='store_true', default=False, required=False)
     parser.add_argument('-i', '--interactive', help='Run in interactive mode. Should be used with -s to avoid redundant queries',
                         action='store_true', default=False, required=False)
     parser.add_argument('-l', '--limit', help='Specify the maximum number of runs to return in a search query',
                         default=100, type=int, required=False)
 def add_arguments(self, parser):
     parser.add_argument('-k', '--api-key', help='specify the api key to use (You can also set "{0}" in settings.py)'.format(_settingsKey), required=False, default=None)
     parser.add_argument('-t', '--throttle-rate', help='Number of seconds to put between requests. The default (non-paid) giantbomb api throttle is supposedly 200 requests per resrouce per hour.', default=(60.0*60.0)/200.0, required=False)
     selectionGroup = parser.add_mutually_exclusive_group(required=True)
     selectionGroup.add_argument('-e', '--event', help='specify an event to synchronize')
     selectionGroup.add_argument('-r', '--run', help='Specify a specific run to synchronize', type=int)
     selectionGroup.add_argument('-a', '--all', help='Synchronizes _all_ runs in the database (warning, due to giantbomb api throttling, this may take a long, long time.', action='store_true', default=False)
     parser.add_argument('-f', '--filter', help='A regex for game names to include (uses standard python regex syntax', required=False, default=None)
     parser.add_argument('-x', '--exclude', help='A regex for game names to exclude (a common one might be ".*setup.*"). Always case-insensitive', required=False, default=None)
     idGroup = parser.add_mutually_exclusive_group(required=False)
     idGroup.add_argument('-s', '--skip-with-id', help='Skip any games which already have a giantbomb id', action='store_true', default=False, required=False)
     idGroup.add_argument('-g', '--ignore-id', help='Ignore the id on runs (helpful if an id was set incorrectly', action='store_true', default=False, required=False)
     parser.add_argument('-i', '--interactive', help='Run in interactive mode. Should be used with -s to avoid redundant queries', action='store_true', default=False, required=False)
     parser.add_argument('-l', '--limit', help='Specify the maximum number of runs to return in a search query', default=100, type=int, required=False)
Beispiel #3
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--ignore_preempted",
                        dest="ignore_preempted",
                        action="store_true",
                        help="ignore preempted tasks")
    parser.add_argument(
        "--only_total",
        dest="only_total_cost",
        action="store_true",
        help=
        "print total cost of the workflow instead of the tsv per task costs")
    either = parser.add_mutually_exclusive_group(required=True)
    either.add_argument('-m',
                        '--metadata',
                        dest='metadata',
                        help='metadata file to calculate cost on')
    either.add_argument('--compare',
                        nargs=2,
                        help='compare old to new cost output')

    args = parser.parse_args()

    if args.metadata:
        with open(args.metadata) as data_file:
            metadata = json.load(data_file)
        calculate_cost(metadata, args.ignore_preempted, args.only_total_cost,
                       True)
        if args.only_total_cost:
            print("Total Cost: " + str(TOTAL_WORKFLOW_COST))
            print("Total run time (hours): " + str(TOTAL_RUN_HOURS))
    else:
        old, new = args.compare
        compare(old, new)
Beispiel #4
0
def parse_args():

    parser = argparse.ArgumentParser(epilog='''
        <first> and <last> can be anything dateutil.parser.parse understands
        ''')

    parser.add_argument(
        'last',
        type=dateutil.parser.parse,
        default=(datetime.now() - ONE_DAY),
        nargs='?',
        help='last (or only) day to download (default yesterday)')
    parser.add_argument(
        '--dest',
        default='vivacity_data',
        help='directory in which to store data (default \'%(default)s\')')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--first',
                       '-f',
                       type=dateutil.parser.parse,
                       help='first day to download')
    group.add_argument('--days',
                       '-d',
                       type=int,
                       help='number of days to download')

    args = parser.parse_args()

    if args.days:
        args.first = args.last - timedelta(days=args.days - 1)
    elif args.first is None:
        args.first = args.last

    return args
Beispiel #5
0
def parseArgs():
    parser = argparse.ArgumentParser()
    parser.add_argument('-d', '--debug', action='count', default=0,
                        help='debug output')
    parser.add_argument('-m', '--mailbox', default='[Gmail]/All Mail',
                        help='name of mailbox to use')
    parser.add_argument('-t', '--tokenFile', default=None,
                        help='OAuth token file')
    parser.add_argument('-u', '--username', required=True, help='IMAP username')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('-b', '--before', help='counts before CCYY-MM-DD')
    group.add_argument('-o', '--on', help='counts on CCYY-MM-DD')
    group.add_argument('-s', '--since', help='counts since CCYY-MM-DD')
    group.add_argument('-l', '--list', action='store_true',
                       help='list mailboxes')
    group.add_argument('-c', '--copy', action='store_true',
                       help='copy messages to Maildir compatible filenames')
    group.add_argument('-i', '--interactiveDelete', action='store_true',
                       help='ask to delete messages one by one')
    group.add_argument('-e', '--envelopes', action='store_true',
                       help='print all envelope data')
    group.add_argument('-f', '--flags', action='store_true',
                       help='print all flags')
    group.add_argument('-a', '--append', help='append this file')
    args = parser.parse_args()
    return(args)
Beispiel #6
0
def main():

    # from .state import *
    from . import utils
    import argparse

    global options

    parser = argparse.ArgumentParser()
    group = parser.add_mutually_exclusive_group()
    group.add_argument("-v",
                       "--verbose",
                       action="count",
                       default=0,
                       help="verbose logging")
    group.add_argument("-q",
                       "--quiet",
                       action="count",
                       default=0,
                       help="quiet logging")
    options, args = parser.parse_known_args()

    utils.setup_logging(options.verbose - options.quiet)

    # state.session = MLBStreamSession.new()
    # raise Exception(state.session.token)
    raise Exception(PROVIDERS)
Beispiel #7
0
def add_common_args(*parsers):
    for parser in parsers:
        parser.add_argument('--verbose', '-v', action='store_true')
        parser.add_argument('--board',
                            type=str,
                            required=True,
                            help='Type of CrOS device.')
        parser.add_argument('--cros-cache',
                            type=str,
                            default=DEFAULT_CROS_CACHE,
                            help='Path to cros cache.')
        parser.add_argument(
            '--path-to-outdir',
            type=str,
            required=True,
            help='Path to output directory, all of whose contents will be '
            'deployed to the device.')
        parser.add_argument('--runtime-deps-path',
                            type=str,
                            help='Runtime data dependency file from GN.')
        parser.add_argument(
            '--vpython-dir',
            type=str,
            help=
            'Location on host of a directory containing a vpython binary to '
            'deploy to the device before the test starts. The location of '
            'this dir will be added onto PATH in the device. WARNING: The '
            'arch of the device might not match the arch of the host, so '
            'avoid using "${platform}" when downloading vpython via CIPD.')
        # TODO(bpastene): Switch all uses of "--vm-logs-dir" to "--logs-dir".
        parser.add_argument(
            '--vm-logs-dir',
            '--logs-dir',
            type=str,
            dest='logs_dir',
            help=
            'Will copy everything under /var/log/ from the device after the '
            'test into the specified dir.')
        parser.add_argument(
            '--flash',
            action='store_true',
            help=
            'Will flash the device to the current SDK version before running '
            'the test.')
        parser.add_argument(
            '--public-image',
            action='store_true',
            help='Will flash a public "full" image to the device.')

        vm_or_device_group = parser.add_mutually_exclusive_group()
        vm_or_device_group.add_argument(
            '--use-vm',
            action='store_true',
            help='Will run the test in the VM instead of a device.')
        vm_or_device_group.add_argument(
            '--device',
            type=str,
            help=
            'Hostname (or IP) of device to run the test on. This arg is not '
            'required if --use-vm is set.')
Beispiel #8
0
def build_args():
    parser = argparse.ArgumentParser()
    parser.add_argument("--city",
                        default='ALL',
                        help="A city for which download events (defaults all)")
    parser.add_argument("--board",
                        help="A board ID or shortlink in the Trello",
                        required=True)
    parser.add_argument("--list",
                        help='A list name in the Trello (default:"Events")',
                        default="Wydarzenia")
    parser.add_argument("--antyflood",
                        required=False,
                        type=int,
                        help="A limit of created cards once a run",
                        default=5)

    group = parser.add_mutually_exclusive_group()
    group.add_argument("--archive-only",
                       action="store_true",
                       help="A switch to only archive on run")
    group.add_argument("--add-only",
                       action="store_true",
                       help="A switch to only add cards on run")

    return parser.parse_args()
Beispiel #9
0
def parse_arguments():
    parser = argparse.ArgumentParser(description='Normalize a json.')
    group = parser.add_mutually_exclusive_group(required=True)
    group.add_argument('-f, --file',
                       dest='file',
                       action='store',
                       help='file with twitter accounts')
    return parser.parse_args()
Beispiel #10
0
def cli_main():
    parser = argparse.ArgumentParser(
        'gps_csv',
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description='\n\n'.join((HELP['short'], HELP['full'])))
    parser.add_argument('infile', help='Path to csv')

    exc_group = parser.add_mutually_exclusive_group(required=True)

    exc_group.add_argument('-o', '--outfile', help='Path to output csv')
    parser.add_argument(
        '-c',
        '--columns',
        required=False,
        help='''Output column order, space separated. E.g. C1 C2 BK.
            If a path to a text file is provided instead, columns will be
            searched for on the first line in the same fashion.''',
        nargs='+')
    parser.add_argument('-r',
                        '--retain_empty',
                        action='store_true',
                        default=False,
                        help='Retain rows with empty coordinates.')
    exc_group.add_argument('-i',
                           '--inspect',
                           action='store_true',
                           default=False,
                           help='Show attributes found in CSV.')

    parser.add_argument('-d',
                        '--date_reformat',
                        action='store_true',
                        default=False,
                        help='Reformat dates to EACSD compliant format.')

    args = parser.parse_args()

    if args.columns and os.path.exists(str(args.columns[0])):
        columns = open(args.columns[0]).read().strip().split('\n')[0].split()
        if not columns:
            print('{} does not contain any column data.'.format(args.columns))
            sys.exit(1)
    else:
        columns = args.columns

    try:
        result = reformat_csv(args.infile, args.outfile, columns, args.inspect,
                              args.retain_empty, sys.stdout,
                              args.date_reformat)
    except PermissionError:
        print('ERROR: {} is already open.'.format(args.outpath))

    if result:
        print('\nColumns: {}'.format(result))
Beispiel #11
0
 def add_bool_arg(parser,
                  name,
                  help_text,
                  default=False):  # https://stackoverflow.com/a/31347222
     group = parser.add_mutually_exclusive_group(required=False)
     group.add_argument('--' + name,
                        dest=name,
                        action='store_true',
                        help=help_text)
     group.add_argument('--no-' + name, dest=name, action='store_false')
     parser.set_defaults(**{name: default})
Beispiel #12
0
def parse_cli_args():
    parser = argparse.ArgumentParser()
    group = parser.add_mutually_exclusive_group(required=True)
    group.add_argument('--import',
                       action='store_true',
                       dest='import_queue',
                       help='import submission queue to worksheet')
    group.add_argument('--sync',
                       action='store_true',
                       help='sync worksheet to database')
    return parser.parse_args()
Beispiel #13
0
def parse_command_line(argv):
    me = os.path.basename(argv[0])
    format_from_filename, from_, to = filename2format(me)

    parser = argparse.ArgumentParser(description='Convert between TOML, YAML '
                                     'and JSON.')

    group = parser.add_mutually_exclusive_group()
    group.add_argument('-i', '--input', dest='input_flag', metavar='INPUT',
                       default=None, help='input file')
    group.add_argument('inputfile', nargs='?', default='-', help='input file')

    parser.add_argument('-o', '--output', dest='output', default='-',
                        help='output file')
    if not format_from_filename:
        parser.add_argument('-if', '--input-format', dest='input_format',
                            required=True, help="input format",
                            choices=FORMATS)
        parser.add_argument('-of', '--output-format', dest='output_format',
                            required=True, help="output format",
                            choices=FORMATS)
    if not format_from_filename or to == 'json':
        parser.add_argument('--indent-json', dest='indent_json',
                            action='store_const', const=2, default=None,
                            help='indent JSON output')
    if not format_from_filename or to == 'yaml':
        parser.add_argument('--yaml-style', dest='yaml_style', default=None,
                            help='YAML formatting style',
                            choices=['', '\'', '"', '|', '>'])
    parser.add_argument('--wrap', dest='wrap', default=None,
                        help='wrap the data in a map type with the given key')
    parser.add_argument('--unwrap', dest='unwrap', default=None,
                        help='only output the data stored under the given key')
    parser.add_argument('-v', '--version', action='version',
                        version=__version__)

    args = parser.parse_args(args=argv[1:])

    if args.input_flag is not None:
        args.input = args.input_flag
    else:
        args.input = args.inputfile
    if format_from_filename:
        args.input_format = from_
        args.output_format = to
        if to != 'json':
            args.__dict__['indent_json'] = None
        if to != 'yaml':
            args.__dict__['yaml_style'] = None
    args.__dict__['yaml_options'] = {'default_style': args.yaml_style}
    del args.__dict__['yaml_style']

    return args
Beispiel #14
0
def parse_arguments(cmdline=None):
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument("--fields",
                        help="Comma separated list of fields to display")
    parser.add_argument("--bots",
                        action="store_true",
                        help="Short for --filter %s" % FILTER_BOTS)
    parser.add_argument(
        "--format",
        help="Format string to display fields",
        default=os.getenv("PNL_FORMAT", DEFAULT_FORMAT),
    )
    parser.add_argument("--list-fields",
                        action="store_true",
                        help="Display a list of available fields")
    parser.add_argument(
        "--filter",
        action="append",
        default=[],
        help=
        "Filter to apply. Format: <field>=<str> or <field>~<regex> or <field>!~<regex>",
    )
    parser.add_argument("--verbose",
                        "-v",
                        action="store_true",
                        help="Display debug output")
    parser.add_argument("--ncsa",
                        "--apache",
                        action="store_true",
                        help="Output in NCSA format.")

    fileoptions = parser.add_mutually_exclusive_group()
    fileoptions.add_argument("--filename",
                             help="Path of nginx logfile to parse")
    fileoptions.add_argument(
        "--today",
        action="store_true",
        help="Analyze logs and outputs today's log lines",
    )
    fileoptions.add_argument(
        "--yesterday",
        action="store_true",
        help="Analyze logs and outputs yesterday's log lines",
    )
    fileoptions.add_argument(
        "--days-ago",
        type=int,
        help="Analyze logs and outputs for a specific number of days ago",
    )
    fileoptions.add_argument(
        "--date", help="Analyze logs and outputs for a specific date")

    return parser.parse_args(cmdline)
 def add_arguments(self, parser):
     group = parser.add_mutually_exclusive_group()
     group.add_argument(
         '--hourly',
         action='store_true',
         help='Send hourly alerts'
     )
     group.add_argument(
         '--daily',
         action='store_true',
         help='Send daily alerts'
     )
 def set_argparser(parser):
     filter_group = parser.add_mutually_exclusive_group()
     filter_group.add_argument(
         '--since', metavar="DATE", default=None,
         type=validate_date,
         help=_('show changelog entries since DATE. To avoid ambiguosity, '
                'YYYY-MM-DD format is recommended.'))
     filter_group.add_argument(
         '--count', default=None, type=int,
         help=_('show given number of changelog entries per package'))
     filter_group.add_argument(
         '--upgrades', default=False, action='store_true',
         help=_('show only new changelog entries for packages, that provide an '
                'upgrade for some of already installed packages.'))
     parser.add_argument("package", nargs='*', metavar=_('PACKAGE'))
def parse_args(args):
    version = pkg_resources.get_distribution("seaflowpy").version

    parser = argparse.ArgumentParser(
        description=
        'A program to insert SFL file data into a popcycle sqlite3 database (version %s)'
        % version)

    parser.add_argument('-c',
                        '--cruise',
                        required=True,
                        help='cruise name, e.g. CMOP_3')
    parser.add_argument(
        '-g',
        "--gga",
        action='store_true',
        help='lat/lon input is in GGA format. Convert to decimal degree.')
    parser.add_argument(
        '-w',
        '--west',
        action='store_true',
        help="""Some ships don't provide E/W designations for longitude. Use
        this flag if this is the case and all longitudes should be West
        (negative).""")

    group = parser.add_mutually_exclusive_group(required=True)
    group.add_argument(
        '-e',
        '--evt_dir',
        help=
        'EVT data directory if specific SFL file not provided, e.g ~/SeaFlow/datafiles/evt/'
    )
    group.add_argument('-s',
                       '--sfl',
                       help='SFL file if EVT directory is not provided.')

    parser.add_argument(
        '-d',
        '--db',
        required=True,
        help='''sqlite3 database file, e.g. ~/popcycle/sqlite/popcycle.db. Will
             be created with just an sfl table if doesn't exist.''')

    parser.add_argument("--version",
                        action="version",
                        version="%(prog)s " + version)

    return parser.parse_args()
Beispiel #18
0
def main():  # pragma: no cover
    """Entry point for the `vtes` command"""
    parser = ArgumentParser()
    storage = parser.add_mutually_exclusive_group()
    storage.add_argument("--journal-file",
                         dest="journal",
                         type=PickleStore,
                         default=PickleStore(pathlib.Path.home() /
                                             ".vtes-journal"))
    storage.add_argument("--journal-db", dest="journal", type=DatabaseStore)
    subcommands = parser.add_subparsers()

    add = subcommands.add_parser("add")
    add.add_argument("--date", default=None, type=dateutil.parser.parse)
    add.add_argument("--namespace", default=None, type=parse_namespace)
    add.add_argument("players", action=ParsePlayerAction, nargs='*')
    add.set_defaults(func=add_command)

    games = subcommands.add_parser("games")
    games.add_argument("--namespace", default=None, type=parse_namespace)
    games.set_defaults(func=games_command)

    gamefix = subcommands.add_parser("game-fix")
    gamefix.add_argument("game_index", type=int)
    gamefix.add_argument("--date", default=None, type=dateutil.parser.parse)
    gamefix.add_argument("--namespace", default=None, type=parse_namespace)
    gamefix.add_argument("players", action=ParsePlayerAction, nargs='*')
    gamefix.set_defaults(func=gamefix_command)

    decks = subcommands.add_parser("decks")
    decks.add_argument("player", nargs='?', default=None)
    decks.add_argument("--namespace", default=None, type=parse_namespace)
    decks.set_defaults(func=decks_command)

    stats = subcommands.add_parser("stats")
    stats.add_argument("--namespace", default=None, type=parse_namespace)
    stats.set_defaults(func=stats_command)

    args = parser.parse_args()

    command = args.func
    delattr(args, "func")
    command(**vars(args))
Beispiel #19
0
 def set_argparser(parser):
     filter_group = parser.add_mutually_exclusive_group()
     filter_group.add_argument(
         '--since',
         metavar="DATE",
         default=None,
         type=validate_date,
         help=_('show changelog entries since DATE. To avoid ambiguosity, '
                'YYYY-MM-DD format is recommended.'))
     filter_group.add_argument(
         '--count',
         default=None,
         type=int,
         help=_('show given number of changelog entries per package'))
     filter_group.add_argument(
         '--upgrades',
         default=False,
         action='store_true',
         help=_(
             'show only new changelog entries for packages, that provide an '
             'upgrade for some of already installed packages.'))
     parser.add_argument("package", nargs='*', metavar=_('PACKAGE'))
Beispiel #20
0
  def __createParser(self):
    """
    Construct an argparse parser for use with this context to parse command
    line arguments.

    @returns An argparse parser object which can be used to parse command line
             arguments, specific to git-branchhealth.
    """

    parser = argparse.ArgumentParser(description='''
       Show health (time since creation) of git branches, in order.
    ''', add_help=True)
    parser.add_argument('-v', action='count', dest='logLevel',
                        help='Specify how verbose the output should be (-v to -vvv)')
    parser.add_argument('-b', '--bad-only', action='store_true',
                        help='Only show branches that are ready for pruning (i.e. older than numDays * 2)',
                        dest='badOnly')
    parser.add_argument('-d', '--days', action='store', dest='numDays',
                        help='Specify number of days old where a branch is considered to no longer be \'healthy\'',
                        default=14)
    parser.add_argument('-n', '--nocolor', action='store_true', help="Don't use ANSI colors to display branch health",
                        dest='noColor')
    parser.add_argument('-R', '--repository', action='store',  metavar=('repository'), help='Path to git repository where branches should be listed', nargs='?', default='.', dest='repo')
    parser.add_argument('-D', '--delete', action='store_true', help='Delete old branches that are considered "unhealthy"', dest='deleteOld')
    parser.add_argument('-i-', '--ignore-branches', action='store', help='Ignore a set of branches specified by a comma-separated list of branch names', dest='ignoredBranches', default='master')
    parser.add_argument('-t', '--trunk', action='store', help='Specify the trunk branch name for the given repository', metavar=('trunkBranch'), dest='trunkBranch', default='master')

    # Make sure that only one of -r and --all-remotes is specified
    remoteGroup = parser.add_mutually_exclusive_group()
    remoteGroup.add_argument('-r', '--remote', metavar=('<remote name>'), action='store',
                             help='Operate on specified remote', default=None,
                             dest='remote')
    remoteGroup.add_argument('--all-remotes', action='store_true', dest='allRemotes',
                             help='Run a branch health check for all remotes, including local branches')


    return parser
Beispiel #21
0
def parse_args(args):
    version = pkg_resources.get_distribution("seaflowpy").version

    parser = argparse.ArgumentParser(
        description='A program to insert SFL file data into a popcycle sqlite3 database (version %s)' % version)

    parser.add_argument(
        '-c', '--cruise',
        required=True,
        help='cruise name, e.g. CMOP_3')
    parser.add_argument(
        '-g', "--gga", action='store_true',
        help='lat/lon input is in GGA format. Convert to decimal degree.')
    parser.add_argument(
        '-w', '--west', action='store_true',
        help="""Some ships don't provide E/W designations for longitude. Use
        this flag if this is the case and all longitudes should be West
        (negative).""")

    group = parser.add_mutually_exclusive_group(required=True)
    group.add_argument(
        '-e', '--evt_dir',
        help='EVT data directory if specific SFL file not provided, e.g ~/SeaFlow/datafiles/evt/')
    group.add_argument(
        '-s', '--sfl',
        help='SFL file if EVT directory is not provided.')

    parser.add_argument(
        '-d', '--db',
        required=True,
        help='''sqlite3 database file, e.g. ~/popcycle/sqlite/popcycle.db. Will
             be created with just an sfl table if doesn't exist.''')

    parser.add_argument("--version", action="version", version="%(prog)s " + version)

    return parser.parse_args()
Beispiel #22
0
def dateof(tag_name, tags):
    """Given a list of tags, returns the datetime of the tag with the given name; Otherwise None."""
    for tag in tags:
        if tag['name'] == tag_name:
            r = urllib2.urlopen(tag['commit']['url']).read()
            commit = json.loads(r)
            return parse_timestamp(commit['commit']['committer']['date'])
    return None


if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description=
        'Creates a change log since the given tag using the github API.')
    after_group = parser.add_mutually_exclusive_group(required=True)
    after_group.add_argument(
        '-d',
        metavar='DATE',
        type=str,
        help='select changes that occurred after the given ISO8601 date')
    after_group.add_argument(
        '-t',
        metavar='TAG',
        type=str,
        help='select changes that occurred after the given git tag')
    parser.add_argument(
        '-v',
        metavar='VERSION',
        type=str,
        help="generate header using today's date and the given version")
Beispiel #23
0
def main():
    """main function"""
    parser = argparse.ArgumentParser(
        description='Github within the Command Line')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('-n', '--url', type=str,
                       help="Get repos from the user profile's URL")
    group.add_argument('-r', '--recursive', type=str,
                       help="Get the file structure from the repo link")
    group.add_argument('-R', '--readme', type=str,
                       help="Get the raw version of the repo readme from repo link")
    group.add_argument('-re', '--releases', type=str,
                       help="Get the list of releases from repo link")
    group.add_argument('-dt', '--tarball', type=str,
                       help="Download the tarball of the given repo")
    group.add_argument('-dz', '--zipball', type=str,
                       help="Download the zipball of the given repo")
    group.add_argument('-op', '--openfile', type=str,
                       help="Show the contents of the given file in a repo")
    group.add_argument('-f', '--followers', type=str,
                       help="Get followers of the user")
    group.add_argument('-fo', '--following', type=str,
                       help="Get people following the user")
    group.add_argument('-c', '--contributors', type=str,
                       help="Get contributors of a repo")

    if len(sys.argv) == 1:
        parser.print_help()
        return
    args = parser.parse_args()

# URL

    if args.url:
        name = url_parse(args.url)
        url = GITHUB_API + 'users/' + name + '/repos'

# TREE

    if args.recursive:
        name = url_parse(args.recursive)
        url = GITHUB_API + 'repos/' + name + '/branches/master'
        response = get_req(url)
        jsondata = json.loads(response)
        sha = jsondata['commit']['commit']['tree']['sha']
        url = GITHUB_API + 'repos/' + name + '/git/trees/' + sha + '?recursive=1'

# README

    if args.readme:
        name = url_parse(args.readme)
        url = GITHUB_API + 'repos/' + name + '/readme'

# RELEASES

    if args.releases:
        name = url_parse(args.releases)
        url = GITHUB_API + 'repos/' + name + '/releases'

# TARBALL/ZIPBALL

    if args.tarball or args.zipball:
        if args.tarball:
            key = '/tarball/'
            name = url_parse(args.tarball)
        if args.zipball:
            key = '/zipball/'
            name = url_parse(args.zipball)
        url = GITHUB_API + 'repos/' + name + key + 'master'

# OPEN ONE FILE

    if args.openfile:
        name = url_parse(args.openfile)
        position = name.find('/')
        user = name[:position+1]
        rest = name[position+1:]
        position = rest.find('/')
        repo = rest[:position+1]
        rest = rest[position+1:]
        url = GITHUB_API + 'repos/' + user + repo + 'contents/' + rest

# GET RESPONSES

# TARBALL/ZIPBALL

    if args.tarball or args.zipball:
        response_url = geturl_req(url)
        position = name.find('/')
        name = name[position+1:]
        if args.tarball:
            name = name+'.tar.gz'
        if args.zipball:
            name = name+'.zip'
        print("\nDownloading " + name + '...\n')
        urllib.request.urlretrieve(response_url, name)
        print(name + ' has been saved\n')
        return

# FOLLOWERS

    if args.followers:
        name = url_parse(args.followers)
        url = GITHUB_API + 'users/' + name + '/followers'

#FOLLOWING
    if args.following:
        name = url_parse(args.following)
        url = GITHUB_API + 'users/' + name + '/following'

#CONTRIBUTORS
    if args.contributors:
        name = url_parse(args.contributors)
        url = GITHUB_API + 'repos/' + name + '/contributors'

# OTHER OPTIONS

    response = get_req(url)
    jsondata = json.loads(response)

# USERNAME and URL

    if args.url:
        table = PrettyTable([" Repository ", "★ Star"])
        table.align[" Repository "] = "l"
        for i in jsondata:
            table.add_row([i['name'], i['stargazers_count']])
        print(table)

# RECURSIVE TREE

    if args.recursive:
        table = PrettyTable([" File/Folder ", " Size (Bytes) "])
        table.align[" File/Folder "] = "l"
        for i in jsondata['tree']:
            size = '-'
            path = i['path']+'/'
            if i['type'] == 'blob':
                size = i['size']
                path = path[:-1]
            table.add_row([path, size])
        print(table)

# README

    if args.readme:
        print(base64.b64decode(jsondata['content']).decode('utf-8'))

# RELEASES
    if args.releases:
        table = PrettyTable([" Release name ", " Release Date ", " Release Time "])
        for i in jsondata:
            time = str(dateutil.parser.parse(i['published_at']))
            date = time[:10]
            time = time[11:]
            time = time[:5]
            time = time + ' UTC'
            table.add_row([i['tag_name'], date, time])
        print(table)

# OPEN ONE FILE

    if args.openfile:
        try:
            print(base64.b64decode(jsondata['content']).decode('utf-8'))
            return
        except:
            print("\nDirectory URL was given, hence its contents will be displayed\n")
            table = PrettyTable(["Folder Contents"])
            for i in jsondata:
                table.add_row([i['name']])
            print(table)

# GET FOLLOWERS
    if args.followers:
        table = PrettyTable([" FOLLOWERS "])
        table.align[" FOLLOWERS "] = "l"
        for i in jsondata:
            table.add_row([i['login']])
        print("Number of followers:"+str(len(jsondata)))
        print(table)

# GET FOLLOWING
    if args.following:
        table = PrettyTable([" FOLLOWING "])
        table.align[" FOLLOWING "] = "l"
        for i in jsondata:
            table.add_row([i['login']])
        print("Number of following:"+str(len(jsondata)))
        print(table)

# GET CONTRIBUTORS
    if args.contributors:
        table = PrettyTable(["	CONTRIBUTORS "])
        table.align[" CONTRIBUTORS "] = "l"
        for i in jsondata:
            table.add_row([i['login']])
        print("Number of contributors:"+str(len(jsondata)))
        print(table)
Beispiel #24
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("outfile", help="output filename")
    group = parser.add_mutually_exclusive_group(required=True)
    group.add_argument("-u", dest="umtfile",
                       help="User Motion Trajectory input file")
    group.add_argument("-e", dest="eceftraj",
                       help="Simple single-point trajectory: "
                       "ECEF x,y,z[,vx,vy,vz[,ax,ay,az[,jx,jy,jz]]] "
                       "e.g. 6378137,0,0")
    parser.add_argument("--no-repair", dest="repair", action='store_false',
                       help="Do not attempt to repair a discontinuous input trajectory")
    parser.add_argument("-s", dest="t_start", default=0.0, type=float,
                       help="Start time (seconds)")
    parser.add_argument("-l", dest="t_run", type=float,
                       help="Run length (seconds)")
    parser.add_argument("-t", dest="t_step", default=0.004, type=float,
                        help="Time step (seconds, default: %(default).3f)")
    parser.add_argument("-g", dest="gps_time", default="2015-05-25 00:00:00",
                       help="GPS time referred to trajectory zero time "
                       '(default: "%(default)s")')
    parser.add_argument("-n", dest="noise", type=int,
                       help="Add Gaussian noise - bear in mind int8 range; "
                       "signal amplitude is 16. NOISE=60 to 70 is reasonable.")
    parser.add_argument("-f", dest="outformat", default="int8",
                        choices=['piksi', 'int8', '1bit', '1bitrev'],
                        help="output file format (default: %(default)s)")
    parser.add_argument("-i", dest="fi", default=4092000.0, type=float,
                       help="Intermediate frequency (default: %(default).0f)")
    parser.add_argument("-r", dest="fs", default=16368000.0, type=float,
                       help="Sampling rate (default: %(default).0f)")
    parser.add_argument("-p", dest="prns",
                       help="Comma-separated 1-indexed PRNs to simulate (default: autoselect)")
    parser.add_argument("-v", dest="verbose", action='store_true',
                       help="Increase verbosity")
    args = parser.parse_args()

    if args.umtfile:
        traj = load_umt(args.umtfile);
        if len(traj) < 1:
            print "Couldn't load any trajectory points from %s." % args.umtfile
            return 1
        smooth = check_smooth(traj)
        if not smooth:
            print "WARNING: Input trajectory may not be sufficiently smooth for cycle-accurate results."
            if args.repair:
                print "Repairs will be attempted but may result in momentary large accelerations."

    if args.eceftraj:
        traj = np.array([[0.0] + [float(x) for x in args.eceftraj.split(',')]])
        traj.resize(1,13)  # Pad any unspecified values with zeros
        print "Using Taylor trajectory:"
        print pvaj(traj[0])

    if not args.t_run:
        args.t_run = max(traj[-1][0] - args.t_start, 60)
        print "Run length not specified; using %.1f seconds" % args.t_run

    gpst0 = dateutil.parser.parse(args.gps_time)

    settings = peregrine.initSettings.initSettings()
    ephems = eph.obtain_ephemeris(gpst0, settings)

    if args.prns:
        prns = [int(p) - 1 for p in args.prns.split(',')]
    else:
        [x,y,z] = interp_pv(traj, args.t_start)[0]
        [lat,lon,h] = coord.wgsecef2llh_(x, y, z)
        print "Finding satellites visible above %.2f, %.2f, %.0f on %s" % (
            np.degrees(lat), np.degrees(lon), h,
            gpst0 + timedelta(seconds=args.t_start))
        prns = peregrine.warm_start.whatsup(ephems, [x,y,z], gpst0, mask=10)
    print "Using PRNs:", [p + 1 for p in prns]
    print "Generating samples..."
    s = gen_signal(ephems, traj, gpst0, repair_unsmooth=args.repair,
                   t_run=args.t_run, t_skip=args.t_start, t_step=args.t_step,
                   fs=args.fs, fi=args.fi, prns=prns)
    if args.noise:
        print "Adding noise..."
        add_noise(s, args.noise)
    print "Writing output..."
    peregrine.samples.save_samples(args.outfile, s, file_format=args.outformat)
    print "Saved", args.outfile
Beispiel #25
0
def parse_command_line(argv):
    me = os.path.basename(argv[0])
    format_from_argv0, argv0_from, argv0_to = argv0_to_format(me)

    parser = argparse.ArgumentParser(
        description='Convert between TOML, MessagePack, YAML, JSON, and CBOR.')

    input_group = parser.add_mutually_exclusive_group()
    input_group.add_argument('input',
                             nargs='?',
                             default='-',
                             help='input file')
    input_group.add_argument('-i',
                             '--input',
                             dest='input_flag',
                             metavar='input',
                             default=None,
                             help='input file')

    output_group = parser.add_mutually_exclusive_group()
    output_group.add_argument('output',
                              nargs='?',
                              default='-',
                              help='output file')
    output_group.add_argument('-o',
                              '--output',
                              dest='output_flag',
                              metavar='output',
                              default=None,
                              help='output file')

    if not format_from_argv0:
        parser.add_argument('--if',
                            '-if',
                            '--input-format',
                            dest='input_format',
                            help="input format",
                            choices=FORMATS)
        parser.add_argument('--of',
                            '-of',
                            '--output-format',
                            dest='output_format',
                            help="output format",
                            choices=FORMATS)

    if not format_from_argv0 or argv0_to == 'json':
        parser.add_argument('--indent-json',
                            dest='indent_json',
                            metavar='n',
                            type=int,
                            default=None,
                            help='indent JSON output')

    if not format_from_argv0 or argv0_to == 'yaml':
        parser.add_argument('--yaml-style',
                            dest='yaml_style',
                            default=None,
                            help='YAML formatting style',
                            choices=['', '\'', '"', '|', '>'])

    parser.add_argument('--wrap',
                        dest='wrap',
                        metavar='key',
                        default=None,
                        help='wrap the data in a map type with the given key')
    parser.add_argument('--unwrap',
                        dest='unwrap',
                        metavar='key',
                        default=None,
                        help='only output the data stored under the given key')
    parser.add_argument('-p',
                        '--preserve-key-order',
                        dest='ordered',
                        action='store_true',
                        help='preserve the order of dictionary/mapping keys')
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version=__version__)

    args = parser.parse_args(args=argv[1:])

    # Use the positional input and output arguments.
    if args.input_flag is not None:
        args.input = args.input_flag

    if args.output_flag is not None:
        args.output = args.output_flag

    # Determine the implicit input and output format if possible.
    if format_from_argv0:
        args.input_format = argv0_from
        args.output_format = argv0_to

        if argv0_to != 'json':
            args.__dict__['indent_json'] = None
        if argv0_to != 'yaml':
            args.__dict__['yaml_style'] = None
    else:
        if args.input_format is None:
            args.input_format = extension_to_format(args.input)
            if args.input_format is None:
                parser.error('Need an explicit input format')

        if args.output_format is None:
            args.output_format = extension_to_format(args.output)
            if args.output_format is None:
                parser.error('Need an explicit output format')

    # Wrap yaml_style.
    args.__dict__['yaml_options'] = {'default_style': args.yaml_style}
    del args.__dict__['yaml_style']

    return args
Beispiel #26
0
    return json.loads(r)


def dateof(tag_name, tags):
    """Given a list of tags, returns the datetime of the tag with the given name; Otherwise None."""
    for tag in tags:
        if tag['name'] == tag_name:
            r = urllib2.urlopen(tag['commit']['url']).read()
            commit = json.loads(r)
            return parse_timestamp(commit['commit']['committer']['date'])
    return None


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Creates a bokeh changelog using the github API.')
    after_group = parser.add_mutually_exclusive_group(required=True)
    after_group.add_argument('-d', metavar='DATE',
                             help='select changes that occurred after the given ISO8601 date')
    after_group.add_argument('-t', metavar='TAG',
                             help='select changes that occurred after the given git tag')
    parser.add_argument('-v', metavar='VERSION',
                        help="generate header using today's date and the given version")
    args = parser.parse_args()

    if args.t:
        tags = query_tags()
        after = dateof(args.t, tags)
        label = 'Since {:>14}:'.format(args.t)
    elif args.d:
        after = dateutil.parser.parse(args.d)
        after = after.replace(tzinfo=dateutil.tz.tzlocal())
Beispiel #27
0
def add_common_args(*parsers):
    for parser in parsers:
        parser.add_argument('--verbose', '-v', action='store_true')
        parser.add_argument('--board',
                            type=str,
                            required=True,
                            help='Type of CrOS device.')
        parser.add_argument('--cros-cache',
                            type=str,
                            default=DEFAULT_CROS_CACHE,
                            help='Path to cros cache.')
        parser.add_argument(
            '--path-to-outdir',
            type=str,
            required=True,
            help='Path to output directory, all of whose contents will be '
            'deployed to the device.')
        parser.add_argument('--runtime-deps-path',
                            type=str,
                            help='Runtime data dependency file from GN.')
        parser.add_argument(
            '--vpython-dir',
            type=str,
            help=
            'Location on host of a directory containing a vpython binary to '
            'deploy to the device before the test starts. The location of '
            'this dir will be added onto PATH in the device. WARNING: The '
            'arch of the device might not match the arch of the host, so '
            'avoid using "${platform}" when downloading vpython via CIPD.')
        parser.add_argument(
            '--logs-dir',
            type=str,
            dest='logs_dir',
            help=
            'Will copy everything under /var/log/ from the device after the '
            'test into the specified dir.')
        # Shard args are parsed here since we might also specify them via env vars.
        parser.add_argument('--test-launcher-shard-index',
                            type=int,
                            default=os.environ.get('GTEST_SHARD_INDEX', 0),
                            help='Index of the external shard to run.')
        parser.add_argument('--test-launcher-total-shards',
                            type=int,
                            default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
                            help='Total number of external shards.')
        parser.add_argument(
            '--flash',
            action='store_true',
            help=
            'Will flash the device to the current SDK version before running '
            'the test.')
        parser.add_argument(
            '--public-image',
            action='store_true',
            help='Will flash a public "full" image to the device.')

        vm_or_device_group = parser.add_mutually_exclusive_group()
        vm_or_device_group.add_argument(
            '--use-vm',
            action='store_true',
            help='Will run the test in the VM instead of a device.')
        vm_or_device_group.add_argument(
            '--device',
            type=str,
            help=
            'Hostname (or IP) of device to run the test on. This arg is not '
            'required if --use-vm is set.')
Beispiel #28
0
def generate_changelog(issues, after, heading):
    """Prints out changelog."""
    relevent = relevant_issues(issues, after)
    relevent = sorted(relevent, key=ISSUES_SORT_KEY)

    print(heading + '\n' + '-' * 20)
    for section, issue_group in groupby(relevent, key=ISSUES_BY_SECTION):
        print('  * {}:'.format(section))
        for issue in issue_group:
            print('    - {}'.format(issue_line(issue)))


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Creates a bokeh changelog using the github API.')

    limit_group = parser.add_mutually_exclusive_group(required=True)
    limit_group.add_argument('-d', '--since-date', metavar='DATE',
                             help='select issues that occurred after the given ISO8601 date')
    limit_group.add_argument('-t', '--since-tag', metavar='TAG',
                             help='select issues that occurred after the given git tag')

    parser.add_argument('-c', '--check', action='store_true', default=False,
                        help='check closed issues for BEP 1 compliance')

    data_group = parser.add_mutually_exclusive_group()
    data_group.add_argument('-s', '--save-data', action='store_true', default=False,
                            help='save api query result data; useful for testing')
    data_group.add_argument('-l', '--load-data', action='store_true', default=False,
                            help='load api data from previously saved data; useful for testing')

    args = parser.parse_args()
Beispiel #29
0
    string = b"\xfe\xff"+string.encode("utf-16-be")
    string = string.replace(b'\\', b'\\\\')
    string = string.replace(b'(', b'\\(')
    string = string.replace(b')', b'\\)')
    return string

parser = argparse.ArgumentParser(
    description='Lossless conversion/embedding of images (in)to pdf')
parser.add_argument(
    'images', metavar='infile', type=str,
    nargs='+', help='input file(s)')
parser.add_argument(
    '-o', '--output', metavar='out', type=argparse.FileType('wb'),
    default=getattr(sys.stdout, "buffer", sys.stdout), help='output file (default: stdout)')

sizeopts = parser.add_mutually_exclusive_group()
sizeopts.add_argument(
    '-d', '--dpi', metavar='dpi', type=positive_float,
    help='dpi for pdf output. If input image does not specify dpi the default is 96.0. Must not be specified together with -s/--pagesize.')
sizeopts.add_argument(
    '-s', '--pagesize', metavar='size', type=valid_size,
    default=(None, None),
    help='size of the pages in the pdf output in format AxB with A and B being width and height of the page in points. You can omit either one of them. Must not be specified together with -d/--dpi.')

parser.add_argument(
    '-t', '--title', metavar='title', type=pdf_embedded_string,
    help='title for metadata')
parser.add_argument(
    '-a', '--author', metavar='author', type=pdf_embedded_string,
    help='author for metadata')
parser.add_argument(
Beispiel #30
0
def parse_command_line(argv):
    me = os.path.basename(argv[0])
    format_from_filename, from_, to = filename2format(me)

    parser = argparse.ArgumentParser(description='Convert between TOML, YAML '
                                     'and JSON.')

    group = parser.add_mutually_exclusive_group()
    group.add_argument('-i',
                       '--input',
                       dest='input_flag',
                       metavar='INPUT',
                       default=None,
                       help='input file')
    group.add_argument('inputfile', nargs='?', default='-', help='input file')

    parser.add_argument('-o',
                        '--output',
                        dest='output',
                        default='-',
                        help='output file')
    if not format_from_filename:
        parser.add_argument('-if',
                            '--input-format',
                            dest='input_format',
                            required=True,
                            help="input format",
                            choices=FORMATS)
        parser.add_argument('-of',
                            '--output-format',
                            dest='output_format',
                            required=True,
                            help="output format",
                            choices=FORMATS)
    if not format_from_filename or to == 'json':
        parser.add_argument('--indent-json',
                            dest='indent_json',
                            action='store_const',
                            const=2,
                            default=None,
                            help='indent JSON output')
    if not format_from_filename or to == 'yaml':
        parser.add_argument('--yaml-style',
                            dest='yaml_style',
                            default=None,
                            help='YAML formatting style',
                            choices=['', '\'', '"', '|', '>'])
    parser.add_argument('--wrap',
                        dest='wrap',
                        default=None,
                        help='wrap the data in a map type with the given key')
    parser.add_argument('--unwrap',
                        dest='unwrap',
                        default=None,
                        help='only output the data stored under the given key')
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version=__version__)

    args = parser.parse_args(args=argv[1:])

    if args.input_flag is not None:
        args.input = args.input_flag
    else:
        args.input = args.inputfile
    if format_from_filename:
        args.input_format = from_
        args.output_format = to
        if to != 'json':
            args.__dict__['indent_json'] = None
        if to != 'yaml':
            args.__dict__['yaml_style'] = None
    args.__dict__['yaml_options'] = {'default_style': args.yaml_style}
    del args.__dict__['yaml_style']

    return args
def _parse_cli_args() -> Namespace:
    parser = ArgumentParser(
        description="get French television listings using Bouygues Telecom "
        "mobile API in XMLTV format"
    )
    parser.add_argument(
        "--description",
        action="store_true",
        help="print the description for this grabber",
    )
    parser.add_argument(
        "--version",
        action="store_true",
        help="show the version of this grabber",
    )
    parser.add_argument(
        "--capabilities",
        action="store_true",
        help="show the capabilities this grabber supports",
    )
    parser.add_argument(
        "--configure",
        action="store_true",
        help="generate the configuration file by asking the users which "
        "channels to grab",
    )
    parser.add_argument(
        "--days",
        type=int,
        default=_DEFAULT_DAYS,
        help="grab DAYS days of TV data (default: %(default)s)",
    )
    parser.add_argument(
        "--offset",
        type=int,
        default=_DEFAULT_OFFSET,
        help="grab TV data starting at OFFSET days in the future (default: "
        "%(default)s)",
    )
    parser.add_argument(
        "--output",
        type=Path,
        default=Path("/dev/stdout"),
        help="write the XML data to OUTPUT instead of the standard output",
    )
    parser.add_argument(
        "--config-file",
        type=Path,
        default=_DEFAULT_CONFIG_FILE,
        help="file name to write/load the configuration to/from (default: "
        "%(default)s)",
    )

    log_level_group = parser.add_mutually_exclusive_group()
    log_level_group.add_argument(
        "--quiet",
        action="store_true",
        help="only print error-messages on STDERR",
    )
    log_level_group.add_argument(
        "--debug",
        action="store_true",
        help="provide more information on progress to stderr to help in"
        "debugging",
    )

    return parser.parse_args()
Beispiel #32
0
def main():
    """ Main entry point

        :param args: argparse.ArgumentParser object
    """
    parser = argparse.ArgumentParser(description='Show Phase of the Moon')
    parser.add_argument('-n',
                        '--lines',
                        help='Number of lines to display (size of the moon)',
                        required=False,
                        default=DEFAULTNUMLINES)
    parser.add_argument('-x',
                        '--notext',
                        help='Print no additional information, just the moon',
                        required=False,
                        default=DEFAULTNOTEXT,
                        action="store_true")
    parser.add_argument(
        'date',
        help=
        'Date for that the phase of the Moon must be shown. Today by default',
        nargs='?',
        default=time.strftime("%Y-%m-%d %H:%M:%S"))
    parser.add_argument(
        '-l',
        '--language',
        help=
        'locale for that the phase of the Moon must be shown. English by default',
        nargs='?',
        default=None)

    hemisphere_group = parser.add_mutually_exclusive_group()
    hemisphere_group.add_argument(
        '-s',
        '--hemisphere',
        help=
        'Earth hemisphere from which to observe the Moon. North by default',
        required=False,
        choices=['north', 'south'])

    hemisphere_group.add_argument(
        '-S',
        '--hemispherewarning',
        help=('The same as -s and --hemisphere, but shows an hemisphere '
              'reminder under the phase text.'),
        required=False,
        choices=['north', 'south'])

    args = vars(parser.parse_args())

    try:
        dateobj = time.mktime(dateutil.parser.parse(args['date']).timetuple())
    except Exception as err:  # pylint: disable=broad-except
        fatal(f"Can't parse date: {args['date']}")

    try:
        numlines = int(args['lines'])
        lang = args['language']
    except Exception as err:  # pylint: disable=broad-except
        print(err)
        fatal("Number of lines must be integer")

    try:
        notext = bool(args['notext'])
    except Exception as err:  # pylint: disable=broad-except
        print(err)

    try:
        hemisphere = str(args['hemisphere'])
    except Exception as err:  # pylint: disable=broad-except
        print(err)

    try:
        hemisphere_warning = str(args['hemispherewarning'])
    except Exception as err:  # pylint: disable=broad-except
        print(err)

    if hemisphere == 'None':
        hemisphere = hemisphere_warning if hemisphere_warning != 'None' else DEFAULTHEMISPHERE

    print(
        putmoon(dateobj, numlines, '@', notext, lang, hemisphere,
                hemisphere_warning))
Beispiel #33
0
def parse_command_line(argv):
    me = os.path.basename(argv[0])
    format_from_argv0, argv0_from, argv0_to = argv0_to_format(me)

    parser = argparse.ArgumentParser(description='Convert between TOML, YAML '
                                     'and JSON.')

    input_group = parser.add_mutually_exclusive_group()
    input_group.add_argument('input',
                             nargs='?',
                             default='-',
                             help='input file')
    input_group.add_argument('-i', '--input',
                             dest='input_flag',
                             metavar='input',
                             default=None,
                             help='input file')

    output_group = parser.add_mutually_exclusive_group()
    output_group.add_argument('output',
                              nargs='?',
                              default='-',
                              help='input file')
    output_group.add_argument('-o', '--output',
                              dest='output_flag',
                              metavar='output',
                              default=None,
                              help='output file')

    if not format_from_argv0:
        parser.add_argument('--if', '-if', '--input-format',
                            dest='input_format',
                            help="input format",
                            choices=FORMATS)
        parser.add_argument('--of', '-of', '--output-format',
                            dest='output_format',
                            help="output format",
                            choices=FORMATS)

    if not format_from_argv0 or argv0_to == 'json':
        parser.add_argument('--indent-json',
                            dest='indent_json',
                            metavar='n',
                            type=int,
                            default=None,
                            help='indent JSON output')

    if not format_from_argv0 or argv0_to == 'yaml':
        parser.add_argument('--yaml-style',
                            dest='yaml_style',
                            default=None,
                            help='YAML formatting style',
                            choices=['', '\'', '"', '|', '>'])

    parser.add_argument('--wrap',
                        dest='wrap',
                        metavar='key',
                        default=None,
                        help='wrap the data in a map type with the given key')
    parser.add_argument('--unwrap',
                        dest='unwrap',
                        metavar='key',
                        default=None,
                        help='only output the data stored under the given key')
    parser.add_argument('--preserve-key-order',
                        dest='ordered',
                        action='store_true',
                        help='preserve the order of dictionary/mapping keys')
    parser.add_argument('-v', '--version',
                        action='version',
                        version=__version__)

    args = parser.parse_args(args=argv[1:])

    # Use the positional input and output arguments.
    if args.input_flag is not None:
        args.input = args.input_flag

    if args.output_flag is not None:
        args.output = args.output_flag

    # Determine the implicit input and output format if possible.
    if format_from_argv0:
        args.input_format = argv0_from
        args.output_format = argv0_to

        if argv0_to != 'json':
            args.__dict__['indent_json'] = None
        if argv0_to != 'yaml':
            args.__dict__['yaml_style'] = None
    else:
        if args.input_format is None:
            args.input_format = extension_to_format(args.input)
            if args.input_format is None:
                parser.error('Need an explicit input format')

        if args.output_format is None:
            args.output_format = extension_to_format(args.output)
            if args.output_format is None:
                parser.error('Need an explicit output format')

    # Wrap yaml_style.
    args.__dict__['yaml_options'] = {'default_style': args.yaml_style}
    del args.__dict__['yaml_style']

    return args
def main():

    parser = argparse.ArgumentParser(description="Generates sentiment analysis data a")
    parser.add_argument("file", type=str, default="", help="The Twitter dump's path.")
    
    parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout, help="The file to which the output should be redirected. Default is stdout.")
    
    group = parser.add_mutually_exclusive_group()
    group.add_argument("--sentiwordnet", type=str, help="Generate sentiment analysis with the SentiWordNet lexicon provided")
    group.add_argument("--textblob", action="store_true", help="Generate sentiment analysis with the TextBlob module")
    group.add_argument("--smileys", action="store_true", help="Generate sentiment analysis based on the presence of smileys")
    

    global args
    args = parser.parse_args()


    # Load tweets
    twitterDumpController = TwitterDumpController(args.file)
    tweets = twitterDumpController.load()

    # Analyze tweets
    result = {}

    if args.sentiwordnet != None:
        print "Loading SentiWordNet lexicon.."
        sentiWordNetController = SentiWordNetController(args.sentiwordnet)
        sentiWordNetController.load()
        print "Loaded SentiWordNet lexicon."

        print "Analyzing tweets.."
        count = 0
        for tweet in tweets:
            tweetAnalysis = sentiWordNetController.analyzeSentence(tweet["text"])

            date = dateutil.parser.parse(tweet["created_at"]).strftime("%Y-%m-%d")

            if date in result:
                result[date].append(tweetAnalysis)
            else:
                result[date] = [tweetAnalysis]

            count += 1
            if count % 500 == 0:
                print "Analyzed", count, "tweets.."

        print "Analyzed", count, "tweets."

    elif args.textblob == True:
        from textblob import TextBlob

        print "Analyzing tweets.."
        count = 0
        for tweet in tweets:
            blob = TextBlob(tweet["text"])

            tweetAnalysis = {"polarity": 0, "subjectivity": 0}
            for sentence in blob.sentences:
                tweetAnalysis["polarity"] += sentence.sentiment.polarity
                tweetAnalysis["subjectivity"] += sentence.sentiment.subjectivity
            if len(blob.sentences) > 0:
                tweetAnalysis["polarity"] /= len(blob.sentences)
                tweetAnalysis["subjectivity"] /= len(blob.sentences)

            date = dateutil.parser.parse(tweet["created_at"]).strftime("%Y-%m-%d")

            if date in result:
                result[date].append(tweetAnalysis)
            else:
                result[date] = [tweetAnalysis]

            count += 1
            if count % 500 == 0:
                print "Analyzed", count, "tweets.."

        print "Analyzed", count, "tweets."

    elif args.smileys == True:
        print "Analyzing tweets.."
        count = 0
        for tweet in tweets:
            tweetAnalysis = {"sentiment": 0}
            if any([x in tweet["text"] for x in [":)", ":D", ":d", ";)", "=)", ":>"]]):
                tweetAnalysis = {"sentiment": 1}
            elif any([x in tweet["text"] for x in [":(", ";(", "=(", ":<"]]):
                tweetAnalysis = {"sentiment": -1}

            date = dateutil.parser.parse(tweet["created_at"]).strftime("%Y-%m-%d")

            if date in result:
                result[date].append(tweetAnalysis)
            else:
                result[date] = [tweetAnalysis]

            count += 1
            if count % 500 == 0:
                print "Analyzed", count, "tweets.."

        print "Analyzed", count, "tweets."


    args.output.write(json.dumps(result))
    args.output.close()
Beispiel #35
0
    def set_config_options(usage):
        """
        Take command-line arguments and options from the configuration file.

        Command-line keyword arguments only, not positional -- breaking
        change November 2013.

        In case of conflict, command-line options are meant to override
        options specified in config file.
        """

        parser = ArgumentParser(description=info.DESCRIPTION)
        parser.add_argument('--version', action='version', version=info.VERSION)

        # General options
        parser.add_argument('-b', '--backend', dest='backend',
                            help='Backend used to fetch issues', default=None)
        parser.add_argument('--backend-user', dest='backend_user',
                            help='Backend user', default=None)
        parser.add_argument('--backend-password', dest='backend_password',
                            help='Backend password', default=None)
        parser.add_argument('--backend-token', dest='backend_token',
                            help='Backend authentication token', default=None)
        parser.add_argument('-c', '--cfg', dest='cfgfile',
                            help='Use a custom configuration file', default=None)
        parser.add_argument('-d', '--delay', type=int, dest='delay',
                            help='Delay in seconds betweeen petitions to avoid been banned',
                            default='5')
        parser.add_argument('-g', '--debug', action='store_true', dest='debug',
                            help='Enable debug mode', default=False)
        parser.add_argument('--gerrit-project', dest='gerrit_project',
                            help='Project to be analyzed (gerrit backend)',
                            default=None)
        parser.add_argument('-i', '--input', choices=['url', 'db'],
                            dest='input', help='Input format', default='url')
        parser.add_argument('-o', '--output', choices=['db'],
                            dest='output', help='Output format', default='db')
        parser.add_argument('-p', '--path', dest='path',
                            help='Path where downloaded URLs will be stored',
                            default=None)
        parser.add_argument('-u', '--url', dest='url',
                            help='URL to get issues from using the backend',
                            default=None)
        parser.add_argument('-l', '--logtable', action='store_true',
                            dest='logtable',
                            help='Enable generation of issues log table',
                            default=False)
        parser.add_argument('-n', '--num-issues', type=int, dest='nissues',
                            help='Number of issues requested on each query',
                            default=MAX_ISSUES_PER_QUERY)

        # Options for output database
        group = parser.add_argument_group('Output database specific options')
        group.add_argument('--db-driver-out',
                           choices=['sqlite', 'mysql', 'postgresql'],
                           dest='db_driver_out', help='Output database driver',
                           default='mysql')
        group.add_argument('--db-user-out', dest='db_user_out',
                           help='Database user name', default=None)
        group.add_argument('--db-password-out', dest='db_password_out',
                           help='Database user password', default=None)
        group.add_argument('--db-hostname-out', dest='db_hostname_out',
                           help='Name of the host where database server is running',
                           default='localhost')
        group.add_argument('--db-port-out', dest='db_port_out',
                           help='Port of the host where database server is running',
                           default='3306')
        group.add_argument('--db-database-out', dest='db_database_out',
                           help='Output database name', default=None)

        # Options for input database
        group = parser.add_argument_group('Input database specific options')
        group.add_argument('--db-driver-in',
                           choices=['sqlite', 'mysql', 'postgresql'],
                           dest='db_driver_in', help='Input database driver',
                           default=None)
        group.add_argument('--db-user-in', dest='db_user_in',
                           help='Database user name', default=None)
        group.add_argument('--db-password-in', dest='db_password_in',
                           help='Database user password', default=None)
        group.add_argument('--db-hostname-in', dest='db_hostname_in',
                           help='Name of the host where database server is running',
                           default=None)
        group.add_argument('--db-port-in', dest='db_port_in',
                           help='Port of the host where database server is running',
                           default=None)
        group.add_argument('--db-database-in', dest='db_database_in',
                           help='Input database name', default=None)

        # GitHub options
        group = parser.add_argument_group('GitHub specific options')
        group.add_argument('--newest-first', action='store_true', dest='newest_first',
                           help='Fetch newest issues first', default=False)

        # Maniphest options
        group = parser.add_mutually_exclusive_group()
        group.add_argument('--no-resume', action='store_true', dest='no_resume',
                           help='Disable resume mode (only on maniphest)', default=False)
        group.add_argument('--start-from', dest='start_from',
                           help='Do not retrieve issues after this date (only on maniphest)',
                           default=None)
        group.add_argument('--from-id', dest='from_id',
                           help='Retrieve issues in sequence from the given id (only on maniphest)',
                           default=None)

        args = parser.parse_args()

        if args.cfgfile is not None:  # if a config file was specified on the command line
            Config.load_from_file(args.cfgfile)  # try to load from that file
        else:
            Config.load()  # try to load a config file from default locations

        # Reconciling config file options with command-line options
        Config.__dict__.update(Config.clean_empty_options(args))
        Config.check_config()
Beispiel #36
0
def parse_args(argv):
    """Parse the command line.
    """
    parser = argparse.ArgumentParser(
        description='An interface to tarsnap to manage backups.')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('-q', action='store_true', dest='quiet', help='be quiet')
    group.add_argument('-v', action='store_true', dest='verbose', help='be verbose')
    # We really want nargs=(1,2), but since this isn't available, we can
    # just asl well support an arbitrary number of values for each -o.
    parser.add_argument('-o', metavar=('name', 'value'), nargs='+',
                        dest='tarsnap_options', default=[], action='append',
                        help='option to pass to tarsnap',)
    parser.add_argument('--config', '-c', help='use the given config file')

    group = parser.add_argument_group(
        description='Instead of using a configuration file, you may define '\
                    'a single job on the command line:')
    group.add_argument('--target', help='target filename for the backup')
    group.add_argument('--sources', nargs='+', help='paths to backup',
                        default=[])
    group.add_argument('--deltas', '-d', metavar='DELTA',
                        type=timedelta_string,
                        help='generation deltas', nargs='+')
    group.add_argument('--dateformat', '-f', help='dateformat')

    for plugin in PLUGINS:
        plugin.setup_arg_parser(parser)

    # This will allow the user to break out of an nargs='*' to start
    # with the subcommand. See http://bugs.python.org/issue9571.
    parser.add_argument('-', dest='__dummy', action="store_true",
                        help=argparse.SUPPRESS)

    subparsers = parser.add_subparsers(
        title="commands", description="commands may offer additional options")
    for cmd_name, cmd_klass in COMMANDS.iteritems():
        subparser = subparsers.add_parser(cmd_name, help=cmd_klass.help,
                                          description=cmd_klass.description,
                                          add_help=False)
        subparser.set_defaults(command=cmd_klass)
        group = subparser.add_argument_group(
            title="optional arguments for this command")
        # We manually add the --help option so that we can have a
        # custom group title, but only show a single group.
        group.add_argument('-h', '--help', action='help',
                           default=argparse.SUPPRESS,
                           help='show this help message and exit')
        cmd_klass.setup_arg_parser(group)

        # Unfortunately, we need to redefine the jobs argument for each
        # command, rather than simply having it once, globally.
        subparser.add_argument(
            'jobs', metavar='job', nargs='*',
            help='only process the given job as defined in the config file')

    # This would be in a group automatically, but it would be shown as
    # the very first thing, while it really should be the last (which
    # explicitly defining the group causes to happen).
    #
    # Also, note that we define this argument for each command as well,
    # and the command specific one will actually be parsed. This is
    # because while argparse allows us to *define* this argument globally,
    # and renders the usage syntax correctly as well, it isn't actually
    # able to parse the thing it correctly (see
    # http://bugs.python.org/issue9540).
    group = parser.add_argument_group(title='positional arguments')
    group.add_argument(
        '__not_used', metavar='job', nargs='*',
        help='only process the given job as defined in the config file')

    args = parser.parse_args(argv)

    # Do some argument validation that would be to much to ask for
    # argparse to handle internally.
    if args.config and (args.target or args.dateformat or args.deltas or
                        args.sources):
        raise ArgumentError('If --config is used, then --target, --deltas, '
                            '--sources and --dateformat are not available')
    if args.jobs and not args.config:
        raise ArgumentError(('Specific jobs (%s) can only be given if a '
                            'config file is used') % ", ".join(args.jobs))
    # The command may want to do some validation regarding it's own options.
    args.command.validate_args(args)

    return args
Beispiel #37
0
def main():
    parser = argparse.ArgumentParser(description='Github within the Command Line')
    g=parser.add_mutually_exclusive_group()
    g.add_argument('-n','--username', type=str, 
            help = "Get repos of the given username")
    g.add_argument('-u','--url', type=str,
            help = "Get repos from the user profile's URL")
    g.add_argument('-r','--recursive',type=str,
            help = "Get the file structure from the repo link")
    g.add_argument('-R','--readme',type=str,
            help = "Get the raw version of the repo readme from repo link")
    g.add_argument('-re','--releases',type=str,
            help = "Get the list of releases from repo link")
    

    if len(sys.argv)==1:
        parser.print_help()
        return

    args = parser.parse_args()
    
#URL

    if(args.url):
        name=args.url
        n=name.find("github.com")
        if(n>=0):
            if(n!=0):
                n1=name.find("www.github.com")
                n2=name.find("http://github.com")
                n3=name.find("https://github.com")
                if(n1*n2*n3!=0):
                    print('-'*150)
                    print("Enter a valid URL. For help, type 'cli-github -h'")
                    print('-'*150)
                    return
            name=args.url[n+11:]
            if name.endswith('/'):
                name = name[:-1]
            url = GITHUB_API + 'users/' +name + '/repos'
        else:
            print('-'*150)
            print("Enter a valid URL. For help, type 'cli-github -h'")
            print('-'*150)
            return

#USERNAME

    if(args.username):
        name=args.username
        url = GITHUB_API + 'users/' +name + '/repos'

#TREE

    if(args.recursive):
        name=args.recursive
        n=name.find("github.com")
        if(n>=0):
            if(n!=0):
                n1=name.find("www.github.com")
                n2=name.find("http://github.com")
                n3=name.find("https://github.com")
                if(n1*n2*n3!=0):
                    print('-'*150)
                    print("Enter a valid URL. For help, type 'cli-github -h'")
                    print('-'*150)
                    return
            name=args.recursive[n+11:]
            if name.endswith('/'):
                    name = name[:-1]
            url = GITHUB_API + 'repos/' +name + '/branches/master'
            request = urllib.request.Request(url)
            request.add_header('Authorization', 'token %s' % API_TOKEN)
            try:
                response = urllib.request.urlopen(request).read().decode('utf-8')
            except urllib.error.HTTPError as err:
                print('-'*150)
                print("Invalid Credentials. For help, type 'cli-github -h'")
                print('-'*150)
                return
        else:
            print('-'*150)
            print("Enter a valid URL. For help, type 'cli-github -h'")
            print('-'*150)
            return
 
        jsondata = json.loads(response)
        sha = jsondata['commit']['commit']['tree']['sha']
        url=GITHUB_API+'repos/'+name+'/git/trees/'+sha+'?recursive=1'

#README

    if(args.readme):
        name=args.readme
        n=name.find("github.com")
        if(n>=0):
            if(n!=0):
                n1=name.find("www.github.com")
                n2=name.find("http://github.com")
                n3=name.find("https://github.com")
                if(n1*n2*n3!=0):
                    print('-'*150)
                    print("Enter a valid URL. For help, type 'cli-github -h'")
                    print('-'*150)
                    return
        
            name=args.readme[n+11:]
            if name.endswith('/'):
                    name = name[:-1]
            url = GITHUB_API + 'repos/' +name + '/readme'
        else:
            print('-'*150)
            print("Enter a valid URL. For help, type 'cli-github -h'")
            print('-'*150)
            return

#RELEASES
    
    if(args.releases):
        name=args.releases
        n=name.find("github.com")
        if(n>=0):
            if(n!=0):
                n1=name.find("www.github.com")
                n2=name.find("http://github.com")
                n3=name.find("https://github.com")
                if(n1*n2*n3!=0):
                    print('-'*150)
                    print("Enter a valid URL. For help, type 'cli-github -h'")
                    print('-'*150)
                    return
            name=args.releases[n+11:]
            if name.endswith('/'):
                    name = name[:-1]
            url = GITHUB_API + 'repos/' +name + '/releases'
        else:
            print('-'*150)
            print("Enter a valid URL. For help, type 'cli-github -h'")
            print('-'*150)
            return

    request = urllib.request.Request(url)
    request.add_header('Authorization', 'token %s' % API_TOKEN)
    try:
        response = urllib.request.urlopen(request).read().decode('utf-8')
    except urllib.error.HTTPError as err:
        print('-'*150)
        print("Invalid Credentials. For help, type 'clipy-github -h'")
        print('-'*150)
        return
        
    jsondata = json.loads(response)
    if(args.url or args.username):
        x = PrettyTable([" Repository ", "★ Star"])
        x.align[u" Repository "] = u"l"
        for i in jsondata:
            x.add_row([i['name'],i['stargazers_count']])
        print(x)

    if(args.recursive):
        x = PrettyTable([" File/Folder ", " Size (Bytes) "])
        x.align[u" File/Folder "] = u"l"
        for i in jsondata['tree']:
            size='-'
            path=i['path']+'/'
            if(i['type']=='blob'):
                size=i['size']
                path=path[:-1]
            x.add_row([path,size])
        print(x)
            
    if(args.readme):
        print(base64.b64decode(jsondata['content']).decode('utf-8'));

    if(args.releases):
        x = PrettyTable([" Release name "," Release Date "," Release Time "])
        
        for i in jsondata:
            ti = dateutil.parser.parse(i['published_at'])
            ti = str(ti)
            date = ti[:10]
            time = ti[11:]
            time = time[:5]
            time = time + ' UTC'
            x.add_row([i['tag_name'],date,time])
        print(x)
Beispiel #38
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--verbose', '-v', action='store_true')
    # Required args.
    parser.add_argument('--board',
                        type=str,
                        required=True,
                        help='Type of CrOS device.')
    vm_or_device_group = parser.add_mutually_exclusive_group()
    vm_or_device_group.add_argument(
        '--use-vm',
        action='store_true',
        help='Will run the test in the VM instead of a device.')
    vm_or_device_group.add_argument(
        '--device',
        type=str,
        help='Hostname (or IP) of device to run the test on. This arg is not '
        'required if --use-vm is set.')
    subparsers = parser.add_subparsers(dest='test_type')
    # Host-side test args.
    host_cmd_parser = subparsers.add_parser(
        'host-cmd',
        help='Runs a host-side test. Pass the host-side command to run after '
        '"--". If --use-vm is passed, hostname and port for the device '
        'will be 127.0.0.1:9222.')
    host_cmd_parser.set_defaults(func=host_cmd)
    host_cmd_parser.add_argument('--cros-cache',
                                 type=str,
                                 default=DEFAULT_CROS_CACHE,
                                 help='Path to cros cache.')
    host_cmd_parser.add_argument(
        '--path-to-outdir',
        type=os.path.realpath,
        help='Path to output directory, all of whose contents will be deployed '
        'to the device.')
    host_cmd_parser.add_argument(
        '--deploy-chrome',
        action='store_true',
        help='Will deploy a locally built Chrome binary to the device before '
        'running the host-cmd.')
    host_cmd_parser.add_argument('cmd', nargs=argparse.REMAINDER)
    # GTest args.
    # TODO(bpastene): Rename 'vm-test' arg to 'gtest'.
    gtest_parser = subparsers.add_parser('vm-test',
                                         help='Runs a device-side gtest.')
    gtest_parser.set_defaults(func=device_test)
    gtest_parser.add_argument(
        '--test-exe',
        type=str,
        required=True,
        help='Path to test executable to run inside the device. If the value is '
        '%s, the sanity test that ships with the device image runs instead. '
        'This test smokes-check the system browser (eg: loads a simple '
        'webpage, executes some javascript), so a fully-built Chrome binary '
        'that can get deployed to the device is expected to be available in '
        'the out-dir.' % SANITY_TEST_TARGET)

    # GTest args. Some are passed down to the test binary in the device. Others
    # are parsed here since they might need tweaking or special handling.
    gtest_parser.add_argument(
        '--test-launcher-summary-output',
        type=str,
        help='When set, will pass the same option down to the test and retrieve '
        'its result file at the specified location.')
    # Shard args are parsed here since we might also specify them via env vars.
    gtest_parser.add_argument('--test-launcher-shard-index',
                              type=int,
                              default=os.environ.get('GTEST_SHARD_INDEX', 0),
                              help='Index of the external shard to run.')
    gtest_parser.add_argument('--test-launcher-total-shards',
                              type=int,
                              default=os.environ.get('GTEST_TOTAL_SHARDS', 1),
                              help='Total number of external shards.')

    # Tast test args.
    # pylint: disable=line-too-long
    tast_test_parser = subparsers.add_parser(
        'tast',
        help='Runs a device-side set of Tast tests. For more details, see: '
        'https://chromium.googlesource.com/chromiumos/platform/tast/+/master/docs/running_tests.md'
    )
    tast_test_parser.set_defaults(func=device_test)
    tast_test_parser.add_argument(
        '--suite-name',
        type=str,
        required=True,
        help='Name to apply to the set of Tast tests to run. This has no effect '
        'on what is executed, but is used mainly for test results reporting '
        'and tracking (eg: flakiness dashboard).')
    tast_test_parser.add_argument(
        '--test-launcher-summary-output',
        type=str,
        help='Generates a simple GTest-style JSON result file for the test run.'
    )
    # TODO(bpastene): Change all uses of "--conditional" to use "--attr-expr".
    tast_test_parser.add_argument(
        '--conditional',
        '--attr-expr',
        type=str,
        dest='conditional',
        help='A boolean expression whose matching tests will run '
        '(eg: ("dep:chrome" || "dep:chrome_login")).')
    tast_test_parser.add_argument(
        '--test',
        '-t',
        action='append',
        dest='tests',
        help='A Tast test to run in the device (eg: "ui.ChromeLogin").')
    tast_test_parser.add_argument(
        '--use-host-tast-bin',
        action='store_true',
        help='Use the host-side Tast bin to run the tests instead of the '
        'DUT-side local_test_runner. TODO(bpastene): Make this default.')

    add_common_args(gtest_parser)
    add_common_args(tast_test_parser)
    args, unknown_args = parser.parse_known_args()

    logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN)

    if not args.use_vm and not args.device:
        # If we're not running on a VM, but haven't specified a hostname, assume
        # we're on a lab bot and are trying to run a test on a lab DUT. See if the
        # magic lab DUT hostname resolves to anything. (It will in the lab and will
        # not on dev machines.)
        try:
            socket.getaddrinfo(LAB_DUT_HOSTNAME, None)
        except socket.gaierror:
            logging.error('The default DUT hostname of %s is unreachable.',
                          LAB_DUT_HOSTNAME)
            return 1
    if args.use_vm:
        if not os.path.exists('/dev/kvm'):
            logging.error(
                '/dev/kvm is missing. Is KVM installed on this machine?')
            return 1
        elif not os.access('/dev/kvm', os.W_OK):
            logging.error(
                '/dev/kvm is not writable as current user. Perhaps you should be '
                'root?')
            return 1

    args.cros_cache = os.path.abspath(args.cros_cache)
    return args.func(args, unknown_args)
Beispiel #39
0
def get_options():
    description = "This Python script is a wrapper for Exegol. It can be used to easily manage Exegol on your machine."

    examples = {
        "install (↓ ~8GB max):": "exegol install",
        "check image updates:": "exegol info",
        "get a shell:\t": "exegol start",
        "get a tmux shell:": "exegol --shell tmux start",
        "use wifi/bluetooth:": "exegol --privileged start",
        "use a proxmark:": "exegol --device /dev/ttyACM0 start",
        "use a LOGITacker:": "exegol --device /dev/ttyACM0 start",
        "use an ACR122u:": "exegol --device /dev/bus/usb/ start",
        "use a Crazyradio PA:": "exegol --device /dev/bus/usb/ start",
    }

    epilog = "{}Examples:{}\n".format(GREEN, END)
    for example in examples.keys():
        epilog += "  {}\t{}\n".format(example, examples[example])

    actions = {
        "start":
        "automatically start, resume, create or enter an Exegol container",
        "stop": "stop an Exegol container in a saved state",
        "install":
        "install Exegol image (build or pull depending on the chosen install --mode)",
        "update":
        "update Exegol image (build or pull depending on the chosen update --mode)",
        "remove": "remove Exegol image(s) and/or container(s)",
        "info":
        "print info on containers and local & remote images (name, size, state, ...)",
        "version": "print current version",
    }

    actions_help = ""
    for action in actions.keys():
        actions_help += "{}\t\t{}\n".format(action, actions[action])

    modes = {
        "release":
        "(default) downloads a pre-built image (from DockerHub) (faster)",
        "sources":
        "builds from the local sources in {} (pull from GitHub then docker build, local edits won't be overwritten)"
        .format(EXEGOL_PATH)
    }

    modes_help = ""
    for mode in modes.keys():
        modes_help += "{}\t\t{}\n".format(mode, modes[mode])

    parser = argparse.ArgumentParser(
        description=description,
        epilog=epilog,
        formatter_class=argparse.RawTextHelpFormatter,
    )

    # Required arguments
    parser._positionals.title = "{}Required arguments{}".format(
        "\033[1;32m", END)
    parser.add_argument("action", choices=actions.keys(), help=actions_help)
    parser.add_argument(
        "-k",
        "--insecure",
        dest="verify",
        action="store_false",
        default=True,
        required=False,
        help=
        "Allow insecure server connections for web requests (default: False)",
    )

    # Optional arguments
    parser._optionals.title = "{}Optional arguments{}".format(BLUE, END)
    logging = parser.add_mutually_exclusive_group()
    logging.add_argument(
        "-v",
        "--verbose",
        dest="verbosity",
        action="count",
        default=0,
        help="verbosity level (-v for verbose, -vv for debug)",
    )
    logging.add_argument(
        "-q",
        "--quiet",
        dest="quiet",
        action="store_true",
        default=False,
        help="show no information at all",
    )

    # Install/update options
    install_update = parser.add_argument_group(
        "{}Install/update options{}".format(BLUE, END))
    install_update.add_argument(
        "-m",
        "--mode",
        dest="mode",
        action="store",
        choices=modes.keys(),
        default="release",
        help=modes_help,
    )

    # Default start options
    default_start = parser.add_argument_group(
        "{}Default start options{}".format(BLUE, END),
        description=
        'The following options are enabled by default. They can all be disabled with the advanced option "--no-default". They can then be enabled back separately, for example "exegol --no-default --X11 start"',
    )
    default_start.add_argument(
        "-x",
        "--X11",
        dest="X11",
        action="store_true",
        help="enable display sharing to run GUI-based applications",
    )
    default_start.add_argument(
        "--host-network",
        dest="host_network",
        action="store_true",
        help=
        "let the container share the host's networking namespace (the container shares the same interfaces and has the same adresses, needed for mitm6)",
    )
    default_start.add_argument(
        "--bind-resources",
        dest="bind_resources",
        action="store_true",
        help=
        "mount the /opt/resources of the container in a subdirectory of host\'s {}"
        .format(SHARED_RESOURCES))
    default_start.add_argument(
        "-s",
        "--shell",
        dest="shell",
        action="store",
        choices={"zsh", "bash", "tmux"},
        default="zsh",
        help="select shell to start when entering Exegol (Default: zsh)",
    )

    # Advanced start options
    advanced_start = parser.add_argument_group(
        "{}Advanced start/stop/reset options{}".format(BLUE, END))
    advanced_start.add_argument(
        "-t",
        "--container-tag",
        dest="containertag",
        action="store",
        help="tag to use in the container name",
    )
    advanced_start.add_argument(
        "--no-default",
        dest="no_default",
        action="store_true",
        default=False,
        help="disable the default start options (e.g. --X11, --host-network)",
    )
    advanced_start.add_argument(
        "--privileged",
        dest="privileged",
        action="store_true",
        default=False,
        help=
        "(dangerous) give extended privileges at the container creation (e.g. needed to mount things, to use wifi or bluetooth)",
    )
    advanced_start.add_argument(
        "-d",
        "--device",
        dest="device",
        action="store",
        help="add a host device at the container creation",
    )
    advanced_start.add_argument(
        "-c",
        "--custom-options",
        dest="custom_options",
        action="store",
        default="",
        help="specify custom options for the container creation",
    )
    advanced_start.add_argument(
        "-cwd",
        "--cwd-mount",
        dest="mount_current_dir",
        action="store_true",
        help="mount current dir to container's /workspace",
    )

    options = parser.parse_args()

    if not options.no_default:
        options.X11 = True
        options.host_network = True
        options.bind_resources = True
    options.action = options.action.replace("-", "")
    if options.action == "update":
        options.action = "install"
    return options
Beispiel #40
0
def parse_args():
    parser = argparse.ArgumentParser(description='Git Tarballs')
    parser.add_argument('-v', '--verbose', action='store_true', default=False,
                        help='Enable verbose output')
    parser.add_argument('--scm',
                        help='Specify SCM',
                        choices=['git', 'hg', 'bzr', 'svn', 'tar'])
    parser.add_argument('--url',
                        help='Specify URL of upstream tarball to download')
    parser.add_argument('--obsinfo',
                        help='Specify .obsinfo file to create a tar ball')
    parser.add_argument('--version', default='_auto_',
                        help='Specify version to be used in tarball. '
                             'Defaults to automatically detected value '
                             'formatted by versionformat parameter.')
    parser.add_argument('--versionformat',
                        help='Auto-generate version from checked out source '
                             'using this format string.  This parameter is '
                             'used if the \'version\' parameter is not '
                             'specified.')
    parser.add_argument('--versionprefix',
                        help='Specify a base version as prefix.')
    parser.add_argument('--parent-tag',
                        help='Override base commit for @TAG_OFFSET@')
    parser.add_argument('--revision',
                        help='Specify revision to package')
    parser.add_argument('--extract', action='append',
                        help='Extract a file directly. Useful for build'
                             'descriptions')
    parser.add_argument('--filename',
                        help='Name of package - used together with version '
                             'to determine tarball name')
    parser.add_argument('--extension', default='tar',
                        help='suffix name of package - used together with '
                             'filename to determine tarball name')
    parser.add_argument('--changesgenerate', choices=['enable', 'disable'],
                        default='disable',
                        help='Specify whether to generate changes file '
                             'entries from SCM commit log since a given '
                             'parent revision (see changesrevision).')
    parser.add_argument('--changesauthor',
                        help='The author of the changes file entry to be '
                             'written, defaults to first email entry in '
                             '~/.oscrc or "%s" '
                             'if there is no ~/.oscrc found.' %
                             DEFAULT_AUTHOR)
    parser.add_argument('--subdir', default='',
                        help='Package just a subdirectory of the sources')
    parser.add_argument('--submodules',
                        choices=['enable', 'master', 'disable'],
                        default='enable',
                        help='Whether or not to include git submodules '
                             'from SCM commit log since a given parent '
                             'revision (see changesrevision). Use '
                             '\'master\' to fetch the latest master.')
    parser.add_argument('--sslverify', choices=['enable', 'disable'],
                        default='enable',
                        help='Whether or not to check server certificate '
                             'against installed CAs.')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--include', action='append',
                       default=[], metavar='REGEXP',
                       help='Specifies subset of files/subdirectories to '
                            'pack in the tarball (can be repeated)')
    group.add_argument('--exclude', action='append',
                       default=[], metavar='REGEXP',
                       help='Specifies excludes when creating the '
                            'tarball (can be repeated)')
    parser.add_argument('--package-meta', choices=['yes', 'no'], default='no',
                        help='Package the meta data of SCM to allow the user '
                             'or OBS to update after un-tar')
    parser.add_argument('--outdir', required=True,
                        help='osc service parameter for internal use only '
                             '(determines where generated files go before '
                             'collection')
    parser.add_argument('--history-depth',
                        help='Obsolete osc service parameter that does '
                             'nothing')
    args = parser.parse_args()

    # basic argument validation
    if not os.path.isdir(args.outdir):
        sys.exit("%s: No such directory" % args.outdir)

    args.outdir = os.path.abspath(args.outdir)
    orig_subdir = args.subdir
    args.subdir = os.path.normpath(orig_subdir)
    if args.subdir.startswith('/'):
        sys.exit("Absolute path '%s' is not allowed for --subdir" %
                 orig_subdir)
    if args.subdir == '..' or args.subdir.startswith('../'):
        sys.exit("--subdir path '%s' must stay within repo" % orig_subdir)

    if args.history_depth:
        print "history-depth parameter is obsolete and will be ignored"

    # booleanize non-standard parameters
    if args.changesgenerate == 'enable':
        args.changesgenerate = True
    else:
        args.changesgenerate = False

    if args.package_meta == 'yes':
        args.package_meta = True
    else:
        args.package_meta = False

    args.sslverify = False if args.sslverify == 'disable' else True

    # force verbose mode in test-mode
    if os.getenv('DEBUG_TAR_SCM'):
        args.verbose = True

    return args
Beispiel #41
0
def main():

    global options
    global logger

    today = datetime.now(pytz.timezone('US/Eastern')).date()

    init_parser = argparse.ArgumentParser()
    init_parser.add_argument("-p",
                             "--profile",
                             help="use alternate config profile")
    options, args = init_parser.parse_known_args()

    config.load(merge_default=True)
    if options.profile:
        config.settings.set_profile(options.profile)
    player.Player.load()

    parser = argparse.ArgumentParser()
    group = parser.add_mutually_exclusive_group()
    group.add_argument("-v",
                       "--verbose",
                       action="count",
                       default=0,
                       help="verbose logging")
    group.add_argument("-q",
                       "--quiet",
                       action="count",
                       default=0,
                       help="quiet logging")
    parser.add_argument("-d",
                        "--debug-console",
                        help="show logging console (disables task manager UI)",
                        action="store_true")
    parser.add_argument("spec",
                        metavar="SPECIFIER",
                        help="media specifier",
                        nargs="?")

    options, args = parser.parse_known_args(args)

    state.options = AttrDict(vars(options))

    logger = logging.getLogger()

    providers.load()

    model.init()

    sh = logging.StreamHandler()
    state.logger = setup_logging(options.verbose - options.quiet,
                                 quiet_stdout=False)

    providers.load_config()

    with db_session(optimistic=False):
        model.MediaFeed.purge_all(
            min_items=config.settings.profile.cache.min_items,
            max_items=config.settings.profile.cache.max_items,
            max_age=config.settings.profile.cache.max_age)

    spec = None

    logger.debug(f"{PACKAGE_NAME} starting")
    state.asyncio_loop = asyncio.get_event_loop()
    state.task_manager = tasks.TaskManager()

    state.task_manager_task = state.asyncio_loop.create_task(
        state.task_manager.start())

    log_file = os.path.join(config.CONFIG_DIR, f"{PACKAGE_NAME}.log")
    fh = logging.FileHandler(log_file)
    add_log_handler(fh)
    logging.getLogger("panwid").setLevel(logging.INFO)

    action, provider, selection, opts = providers.parse_spec(options.spec)

    if selection:
        run_cli(action, provider, selection, **opts)
    else:
        run_gui(action, provider, **opts)
 parser.add_argument("--dataset_version",
                     help="dataset version",
                     default="v2.0",
                     required=False)
 parser.add_argument("--user",
                     help="SciHub user",
                     default=None,
                     required=False)
 parser.add_argument("--password",
                     help="SciHub password",
                     default=None,
                     required=False)
 parser.add_argument("--browse",
                     help="create browse images",
                     action='store_true')
 group = parser.add_mutually_exclusive_group()
 group.add_argument("--ingest",
                    help="create and ingest missing datasets",
                    action='store_true')
 group.add_argument("--create_only",
                    help="only create missing datasets",
                    action='store_true')
 parser.add_argument("--purpose",
                     help="scrape or validate or aoi_scrape",
                     default="scrape",
                     required=False)
 parser.add_argument("--report",
                     help="create a report",
                     default=False,
                     action='store_true',
                     required=False)
Beispiel #43
0
def parse_args(argv):
    """Parse the command line.
    """
    parser = argparse.ArgumentParser(
        description='An interface to tarsnap to manage backups.')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('-q',
                       action='store_true',
                       dest='quiet',
                       help='be quiet')
    group.add_argument('-v',
                       action='store_true',
                       dest='verbose',
                       help='be verbose')
    # We really want nargs=(1,2), but since this isn't available, we can
    # just asl well support an arbitrary number of values for each -o.
    parser.add_argument(
        '-o',
        metavar=('name', 'value'),
        nargs='+',
        dest='tarsnap_options',
        default=[],
        action='append',
        help='option to pass to tarsnap',
    )
    parser.add_argument('--config', '-c', help='use the given config file')

    group = parser.add_argument_group(
        description='Instead of using a configuration file, you may define '
        'a single job on the command line:')
    group.add_argument('--target', help='target filename for the backup')
    group.add_argument('--sources',
                       nargs='+',
                       help='paths to backup',
                       default=[])
    group.add_argument('--deltas',
                       '-d',
                       metavar='DELTA',
                       type=timedelta_string,
                       help='generation deltas',
                       nargs='+')
    group.add_argument('--dateformat', '-f', help='dateformat')

    for plugin in PLUGINS:
        plugin.setup_arg_parser(parser)

    # This will allow the user to break out of an nargs='*' to start
    # with the subcommand. See http://bugs.python.org/issue9571.
    parser.add_argument('-',
                        dest='__dummy',
                        action="store_true",
                        help=argparse.SUPPRESS)

    subparsers = parser.add_subparsers(
        title="commands", description="commands may offer additional options")
    for cmd_name, cmd_klass in COMMANDS.items():
        subparser = subparsers.add_parser(cmd_name,
                                          help=cmd_klass.help,
                                          description=cmd_klass.description,
                                          add_help=False)
        subparser.set_defaults(command=cmd_klass)
        group = subparser.add_argument_group(
            title="optional arguments for this command")
        # We manually add the --help option so that we can have a
        # custom group title, but only show a single group.
        group.add_argument('-h',
                           '--help',
                           action='help',
                           default=argparse.SUPPRESS,
                           help='show this help message and exit')
        cmd_klass.setup_arg_parser(group)

        # Unfortunately, we need to redefine the jobs argument for each
        # command, rather than simply having it once, globally.
        subparser.add_argument(
            'jobs',
            metavar='job',
            nargs='*',
            help='only process the given job as defined in the config file')

    # This would be in a group automatically, but it would be shown as
    # the very first thing, while it really should be the last (which
    # explicitly defining the group causes to happen).
    #
    # Also, note that we define this argument for each command as well,
    # and the command specific one will actually be parsed. This is
    # because while argparse allows us to *define* this argument globally,
    # and renders the usage syntax correctly as well, it isn't actually
    # able to parse the thing it correctly (see
    # http://bugs.python.org/issue9540).
    group = parser.add_argument_group(title='positional arguments')
    group.add_argument(
        '__not_used',
        metavar='job',
        nargs='*',
        help='only process the given job as defined in the config file')

    args = parser.parse_args(argv)

    # Do some argument validation that would be to much to ask for
    # argparse to handle internally.
    if args.config and (args.target or args.dateformat or args.deltas
                        or args.sources):
        raise ArgumentError('If --config is used, then --target, --deltas, '
                            '--sources and --dateformat are not available')
    if args.jobs and not args.config:
        raise ArgumentError(('Specific jobs (%s) can only be given if a '
                             'config file is used') % ", ".join(args.jobs))
    # The command may want to do some validation regarding it's own options.
    args.command.validate_args(args)

    return args
Beispiel #44
0
            if button_available:
                if not GPIO.input(self.button_pin) or self.now() - start > duration:
                    pygame.mixer.music.fadeout(250)
            else:
                try:
                    if self.now() - start > duration:
                        raise KeyboardInterrupt
                except KeyboardInterrupt:
                    pygame.mixer.music.fadeout(250)
        print "You stopped the music!"

    def exit(self):
        pygame.mixer.quit()

parser = argparse.ArgumentParser("AlarmPy")
group = parser.add_mutually_exclusive_group()
group.add_argument("-s", "--setalarm", nargs=2, metavar=("YYYY-mm-dd","HH:MM"),
        help="Set an alarm. Format as: YYYY-mm-dd HH:MM, unless -p specified.")
group.add_argument("-t", "--today", nargs=1, metavar="MM:SS", 
        help="Only takes a time parameter; assumes alarm is for today.")
group.add_argument("--tomorrow", nargs=1, metavar="HH:MM",
        help="Specifies an alarm to go off at HH:MM tomorrow.")
group.add_argument("--timer", nargs=1, metavar="MM, or HH:MM",
        help="Specifies an alarm to go off <minutes>/<hours:minutes> from now.")
parser.add_argument("-p", "--precise", action="store_true", 
        help="Allows alarm times with seconds. e.g. YYYY-mm-dd HH:MM:SS")
parser.add_argument("-r", "--recurring", metavar="<comma delim list>",
        help="Sets a recurring alarm in the specified interval. e.g: fr, sa, su")
parser.add_argument("-n", "--name", nargs="+", metavar="desired name",
        help="Names an alarm. If not specified, the name will be \"alarm\"")
Beispiel #45
0
def main():
    """main function"""
    parser = argparse.ArgumentParser(
        description='Github within the Command Line')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('-n',
                       '--url',
                       type=str,
                       help="Get repos from the user profile's URL")
    group.add_argument('-r',
                       '--recursive',
                       type=str,
                       help="Get the file structure from the repo link")
    group.add_argument(
        '-R',
        '--readme',
        type=str,
        help="Get the raw version of the repo readme from repo link")
    group.add_argument('-re',
                       '--releases',
                       type=str,
                       help="Get the list of releases from repo link")
    group.add_argument('-dt',
                       '--tarball',
                       type=str,
                       help="Download the tarball of the given repo")
    group.add_argument('-dz',
                       '--zipball',
                       type=str,
                       help="Download the zipball of the given repo")
    group.add_argument('-op',
                       '--openfile',
                       type=str,
                       help="Show the contents of the given file in a repo")
    group.add_argument('-f',
                       '--followers',
                       type=str,
                       help="Get followers of the user")
    group.add_argument('-fo',
                       '--following',
                       type=str,
                       help="Get people following the user")
    group.add_argument('-c',
                       '--contributors',
                       type=str,
                       help="Get contributors of a repo")

    if len(sys.argv) == 1:
        parser.print_help()
        return
    args = parser.parse_args()

    # URL

    if args.url:
        name = url_parse(args.url)
        url = GITHUB_API + 'users/' + name + '/repos'

# TREE

    if args.recursive:
        name = url_parse(args.recursive)
        url = GITHUB_API + 'repos/' + name + '/branches/master'
        response = get_req(url)
        jsondata = json.loads(response)
        sha = jsondata['commit']['commit']['tree']['sha']
        url = GITHUB_API + 'repos/' + name + '/git/trees/' + sha + '?recursive=1'

# README

    if args.readme:
        name = url_parse(args.readme)
        url = GITHUB_API + 'repos/' + name + '/readme'

# RELEASES

    if args.releases:
        name = url_parse(args.releases)
        url = GITHUB_API + 'repos/' + name + '/releases'

# TARBALL/ZIPBALL

    if args.tarball or args.zipball:
        if args.tarball:
            key = '/tarball/'
            name = url_parse(args.tarball)
        if args.zipball:
            key = '/zipball/'
            name = url_parse(args.zipball)
        url = GITHUB_API + 'repos/' + name + key + 'master'

# OPEN ONE FILE

    if args.openfile:
        name = url_parse(args.openfile)
        position = name.find('/')
        user = name[:position + 1]
        rest = name[position + 1:]
        position = rest.find('/')
        repo = rest[:position + 1]
        rest = rest[position + 1:]
        url = GITHUB_API + 'repos/' + user + repo + 'contents/' + rest

# GET RESPONSES

# TARBALL/ZIPBALL

    if args.tarball or args.zipball:
        response_url = geturl_req(url)
        position = name.find('/')
        name = name[position + 1:]
        if args.tarball:
            name = name + '.tar.gz'
        if args.zipball:
            name = name + '.zip'
        print("\nDownloading " + name + '...\n')
        urllib.request.urlretrieve(response_url, name)
        print(name + ' has been saved\n')
        return

# FOLLOWERS

    if args.followers:
        name = url_parse(args.followers)
        url = GITHUB_API + 'users/' + name + '/followers'

#FOLLOWING
    if args.following:
        name = url_parse(args.following)
        url = GITHUB_API + 'users/' + name + '/following'

#CONTRIBUTORS
    if args.contributors:
        name = url_parse(args.contributors)
        url = GITHUB_API + 'repos/' + name + '/contributors'

# OTHER OPTIONS

    response = get_req(url)
    jsondata = json.loads(response)

    # USERNAME and URL

    if args.url:
        table = PrettyTable([" Repository ", "★ Star"])
        table.align[" Repository "] = "l"
        for i in jsondata:
            table.add_row([i['name'], i['stargazers_count']])
        print(table)

# RECURSIVE TREE

    if args.recursive:
        table = PrettyTable([" File/Folder ", " Size (Bytes) "])
        table.align[" File/Folder "] = "l"
        for i in jsondata['tree']:
            size = '-'
            path = i['path'] + '/'
            if i['type'] == 'blob':
                size = i['size']
                path = path[:-1]
            table.add_row([path, size])
        print(table)

# README

    if args.readme:
        print(base64.b64decode(jsondata['content']).decode('utf-8'))

# RELEASES
    if args.releases:
        table = PrettyTable(
            [" Release name ", " Release Date ", " Release Time "])
        for i in jsondata:
            time = str(dateutil.parser.parse(i['published_at']))
            date = time[:10]
            time = time[11:]
            time = time[:5]
            time = time + ' UTC'
            table.add_row([i['tag_name'], date, time])
        print(table)

# OPEN ONE FILE

    if args.openfile:
        try:
            print(base64.b64decode(jsondata['content']).decode('utf-8'))
            return
        except:
            print(
                "\nDirectory URL was given, hence its contents will be displayed\n"
            )
            table = PrettyTable(["Folder Contents"])
            for i in jsondata:
                table.add_row([i['name']])
            print(table)

# GET FOLLOWERS
    if args.followers:
        table = PrettyTable([" FOLLOWERS "])
        table.align[" FOLLOWERS "] = "l"
        for i in jsondata:
            table.add_row([i['login']])
        print("Number of followers:" + str(len(jsondata)))
        print(table)

# GET FOLLOWING
    if args.following:
        table = PrettyTable([" FOLLOWING "])
        table.align[" FOLLOWING "] = "l"
        for i in jsondata:
            table.add_row([i['login']])
        print("Number of following:" + str(len(jsondata)))
        print(table)

# GET CONTRIBUTORS
    if args.contributors:
        table = PrettyTable(["	CONTRIBUTORS "])
        table.align[" CONTRIBUTORS "] = "l"
        for i in jsondata:
            table.add_row([i['login']])
        print("Number of contributors:" + str(len(jsondata)))
        print(table)
Beispiel #46
0
        func(endofline + append)

    if rtag is not False:
        with open("../CHANGELOG", "r+") as f:
            content = f.read()
            f.seek(0)
            write(f.write, '\n', content)
    else:
        write(print)


if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description='Creates a bokeh changelog using the github API.')

    limit_group = parser.add_mutually_exclusive_group(required=True)
    limit_group.add_argument(
        '-d',
        '--since-date',
        metavar='DATE',
        help='select issues that occurred after the given ISO8601 date')
    limit_group.add_argument(
        '-p',
        '--since-tag',
        metavar='TAG',
        help='select issues that occurred after the given git tag')

    parser.add_argument('-c',
                        '--check',
                        action='store_true',
                        default=False,
Beispiel #47
0
def main():
    # only one argument is meant to be used at a time, but you could, say,
    # add an inbox item and view the lists at the same time if you wanted to

    start_time = time()
    description = 'A command-line GTD app that integrates with Google Calendar.'
    epilog = (
        'Add & at the end to leave the terminal free while data is being '
        'uploaded/downloaded.')
    parser = argparse.ArgumentParser(description=description, epilog=epilog)
    group = parser.add_mutually_exclusive_group()
    group.add_argument('-i',
                       '--inbox',
                       nargs='+',
                       action='store',
                       help='Add text to new Inbox item.')
    group.add_argument('-o',
                       '--overview',
                       action='store_true',
                       help='Print lists to terminal.')
    group.add_argument('-P',
                       '--paste',
                       action='store_true',
                       help='Adds contents of clipboard as new Inbox item.')
    group.add_argument(
        '-q',
        '--quick-add',
        action='store_true',
        dest='quick',
        help='Create Inbox prompt for task entry. For use with global keyboard '
        + 'shortcut.')
    group.add_argument(
        '-u',
        '--update-list',
        action='store',
        dest='update_list',
        nargs='?',
        help='Displays Next Actions and prompts user to delete/mark complete, '
        + 'etc.')
    group.add_argument('-d',
                       '--process-inbox',
                       action='store_true',
                       dest='process_inbox',
                       help='Process Inbox items.')
    args = parser.parse_args()

    # instantiate object responsible for data
    gtd = GTD()
    # Inbox now adds directly to Firebase before loading data to save time.
    # Item will be in app data next time data is loaded from Firebase.
    if args.inbox:  # -i, --inbox
        # add text entered at CLI to inbox
        text = ' '.join(args.inbox)
        gtd.d['inbox'].add(text)
        print('"{}" added to inbox.'.format(text))
        return True
    if args.paste:  # -P, --paste
        # add clipboard contents to inbox
        gtd.d['inbox'].paste()
        return True
    if args.quick:
        # show Inbox prompt. For use with global keyboard shortcut.
        gtd.d['inbox'].quickadd()
        return True

    # load data
    print('Fetching data...')
    gtd.fetch_all()

    if args.overview:  # -o, --overview
        # print lists
        gtd.print_overview()
    if args.update_list:
        s = args.update_list[0].lower()
        if s[0] == 'i':
            which = 'inbox'
        elif s[0] == 'n':
            which = 'next_actions'
        elif s[0] == 'w':
            which = 'waiting_for'
        elif s[0] == 'p':
            which = 'projects'
        elif s[0] == 'm':
            which = 'maybe_someday'
        elif s[0] == 'c':
            which = 'calendar'
        gtd.update_list(which)
    if args.process_inbox:
        gtd.process_inbox()

    # save data to file
    # gtd.d['waiting_for'].items.append({
    #     "created": 1527184756.2047727,
    #     "text": "affidavit",
    #     "due": "2018-06-08 00:00:00",
    #     "who": "PenFed"
    # })
    gtd.print_todosh()
    savethread = threading.Thread(target=gtd.fb_export)
    savethread.start()
    time_elapsed = round(time() - start_time)
    print(f'Done in {time_elapsed} seconds.')