コード例 #1
0
def main() -> 'NoReturn':
    parser = get_parser()
    parsed = parser.parse_args()
    if parsed.verbose:
        basicConfig(level=DEBUG)
    else:
        basicConfig(level=INFO)

    if parsed.version:
        print(__file__, __version__)
        sys.exit(0)

    if parsed.subcommand in ['download', 'd', 'dl']:
        if not subcommand_download(url=parsed.url):
            sys.exit(1)
    elif parsed.subcommand in ['test', 't']:
        if not subcommand_test(command=parsed.command):
            sys.exit(1)
    elif parsed.subcommand is not None:
        logger.error(
            'The subcommand "%s" is not supported in %s. Please use the full version: https://github.com/online-judge-toolgs/oj',
            parsed.subcommand, __file__)
        sys.exit(1)
    else:
        parser.print_help(file=sys.stderr)
        sys.exit(1)
    sys.exit(0)
コード例 #2
0
def main(argv):

    parser = make_argparser()
    args = parser.parse_args(argv[1:])

    logging.basicConfig(stream=args.log,
                        level=args.volume,
                        format='%(message)s')

    clipboard = args.clipboard
    if not distutils.spawn.find_executable('xclip'):
        logging.warning(
            'Warning: Could not find `xclip` command. Will not be able to copy final url to clipboard.'
        )
        clipboard = False

    #TODO: read from stdin
    url = args.url
    if url is None:
        parser.print_help()
        raise URLError(
            'Error: No url argument given and could not find a valid url in clipboard.'
        )

    if not urllib.parse.urlsplit(url).scheme:
        url = 'http://' + url
    if get_loglevel() <= logging.WARNING:
        print(url)

    # Do the actual redirect resolution.
    replies = list(
        follow_redirects(url,
                         max_response=args.max_response,
                         user_agent=args.user_agent))
    for reply_num, reply in enumerate(replies):
        if get_loglevel() <= logging.WARNING or reply_num == len(replies) - 1:
            print(reply.location)

    # Remove starting www. from domain, if present
    domain = urllib.parse.urlsplit(reply.location).netloc
    if domain.startswith('www.') and domain.count('.') > 1:
        domain = domain[4:]

    # Print summary info.
    for reply in replies:
        if reply.type == 'refresh':
            logging.info('meta refresh from  ' +
                         reply.url[:args.terminal_width - 19])
        elif reply.type == 'absolute':
            logging.info('absolute path from ' +
                         reply.url[:args.terminal_width - 19])
        elif reply.type == 'relative':
            logging.info('relative path from ' +
                         reply.url[:args.terminal_width - 19])
    logging.info(f'total redirects: {len(replies)}')

    # Copy final data to clipboard, and open reputation checker in browser, if requested.
    if clipboard:
        if args.browser:
            to_clipboard(reply.location)
        else:
            to_clipboard(domain)
    if args.browser:
        webbrowser.open(args.reputation_url + domain)
コード例 #3
0
    parser.add_option("-c", "--cache", dest="cache",
                      action='store_true',
                      default=False,
                      help="stores and loads data from cache")
    parser.add_option("-d", "--cacheDir", dest="cacheDir",
                      default='_cache',
                      help="the directory where cache files will be stored")
    parser.add_option("-u", "--userAgent", dest="userAgent",
                      default=None,
                      help="The user-agent to use when requesting URLs.")
    parser.add_option("-f", "--filters", dest="filters",
                      default=None,
                      choices=get_filter_names(),
                      help="a comma-delimited list of pre-processing filters to apply, one of [%s]" % '|'.join(get_filter_names()))
    parser.add_option("-v", "--verbose", dest="verbose",
                      action='store_true',
                      default=False,
                      help="displays status messages")
    parser.add_option('-i', '--ignore-robotstxt', dest="ignore_robotstxt",
                        default=False, action="store_true",
                        help="Ignore robots.txt when fetching the content")

    (options, args) = parser.parse_args()

    if len(args) < 1:
        parser.print_help()
        sys.exit()

    url = args[0]
    print(extractFromURL(url=url, **options.__dict__))
コード例 #4
0
        '-x',
        action='store_true',
        default=False,
        help='whether to download inheritance and ibdview tables [False]')
    parser.add_argument('-l',
                        metavar='<FILE>',
                        type=argparse.FileType('w', encoding='UTF-8'),
                        default=sys.stderr,
                        help='output log file [stderr]')

    # extract arguments from the command line
    try:
        parser.error = parser.exit
        args = parser.parse_args()
    except SystemExit:
        parser.print_help()
        exit(2)

    username = args.u if args.u else input("Enter 23andMe username: "******"Enter 23andMe password: ")
    verbose = args.v
    logfile = args.l
    timeout = args.t

    # initialize a session with 23andMe server
    session = Session(username, password, verbose, logfile, timeout)

    # download list of profiles owned by the account
    data = session.get_account()
    out = args.o if args.o else 'out'  # dataLayer[0]['account_id']
コード例 #5
0
def main():
    loop = asyncio.get_event_loop()
    config = loadconfig()
    parser = makeparser()
    args = parser.parse_args()
    applyconfig(args, config)

    host = WebHosting(loop, args.baseurl)

    tasks = []

    if args.command == 'ls':
        tasks.append(listfiles(host, args.dirname, args))
    elif args.command == 'cat':
        tasks.append(downloadfile(host, args.filename, "-"))
    elif args.command == 'tee':
        tasks.append(uploadfile(host, "-", args.filename))
    elif args.command == 'get':
        tasks.append(downloadfile(host, args.filename, args.destination))
    elif args.command == 'put':
        tasks.append(
            uploadfile(host, args.filename,
                       os.path.join(args.destination, args.filename)))
    elif args.command == 'edit':
        tasks.append(editfile(host, args.filename, args.contents))
    elif args.command == 'zip':
        tasks.append(makezip(host, args.dirname, args.zipname, args.files))
    elif args.command == 'unzip':
        tasks.append(unzip(host, args.zipname))
    elif args.command == 'rmdir':
        tasks.append(removedir(host, args.dirname))
    elif args.command == 'mkdir':
        tasks.append(createdir(host, args.dirname))
    elif args.command == 'rm':
        tasks.append(delfiles(host, args.files))
    elif args.command == 'empty':
        tasks.append(emptyfile(host, args.filename))
    elif args.command == 'cp':
        tasks.append(
            copyfiles(host, args.dirname, args.files[:-1], args.files[-1]))
    elif args.command == 'mv':
        tasks.append(
            movefiles(host, args.dirname, args.files[:-1], args.files[-1]))
    elif args.command == 'du':
        tasks.append(calcsize(host, args.dirname, args.files))
    elif args.command == 'help':
        if args.subcommand:
            p = parser.subparsers.get(args.subcommand)
            if p:
                p.print_help()
                sys, exit(0)

        parser.print_help()
        print()
        for p in parser.subparsers.values():
            p.print_usage()
        print()
        sys.exit(0)
    else:
        parser.print_usage()
        sys.exit(1)

    loop.run_until_complete(dologin(host, args))

    try:
        if tasks:
            loop.run_until_complete(asyncio.gather(*tasks))
    except Exception as e:
        print("ERROR", e)
        sys.exit(1)