Example #1
0
def main():
    parser = argparse.ArgumentParser(
        prog="new_release", description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument(
        "--use_password", action="store_true", default=False,
        help="Prompt for the GitHub password, instead of using an API token.")
    parser.add_argument(
        "--token_file", default="~/.config/readonly_github_api_token.txt",
        help="Uses an API token read from this filename, unless "
        "--use_password was given (default: %(default)s)")
    parser.add_argument(
        "--user", metavar="USER", type=str, default=_get_default_username(),
        help="GitHub username (default: %(default)s)")
    parser.add_argument(
        "--verbose", action="store_true", default=False)
    parser.add_argument(
        "--upgrade", metavar="REPOSITORY_NAME", type=str,
        help="(Optional) Instead of reporting on possible upgrades, download"
             " a new archive for the given repository and edit its bzl rules"
             " to match.")
    args = parser.parse_args()
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    if args.use_password and not args.user:
        parser.error("Couldn't guess github username; you must supply --user.")

    # Log in to github.
    if args.use_password:
        prompt = "Password for https://{}@github.com: ".format(args.user)
        gh = github3.login(
            username=args.user,
            password=getpass.getpass(prompt))
    else:
        with open(os.path.expanduser(args.token_file), "r") as f:
            token = f.read().strip()
        gh = github3.login(token=token)

    # Are we operating on all repositories, or just one?
    if args.upgrade:
        repositories = [args.upgrade]
    else:
        # (None denotes "all".)
        repositories = None

    # Grab the workspace metadata.
    print("Collecting bazel repository details...")
    metadata = read_repository_metadata(repositories=repositories)
    if args.verbose:
        print(json.dumps(metadata, sort_keys=True, indent=2))

    if args.upgrade:
        with TemporaryDirectory(prefix='drake_new_release_') as temp_dir:
            _do_upgrade(temp_dir, gh, args.upgrade, metadata)
    else:
        # Run our report of what's available.
        print("Checking for new releases...")
        _check_for_upgrades(gh, args, metadata)
Example #2
0
def main():
    transformed_metadata = {}
    for key, value in read_repository_metadata().items():
        transformed_metadata[key] = {'sha256': value['sha256']}
        for url in value['urls']:
            if url.startswith(BUCKET_URL):
                transformed_metadata[key]['object_key'] = url[len(BUCKET_URL):]
            elif not url.startswith(CLOUDFRONT_URL):
                if 'url' in transformed_metadata[key]:
                    raise Exception(
                        'Multiple non-mirror urls. Verify BUCKET_URL and '
                        'CLOUDFRONT_URL are correct and check for other '
                        'duplicate entries.')
                transformed_metadata[key]['url'] = url
    s3_resource = boto3.resource('s3')
    for value in transformed_metadata.values():
        s3_object = s3_resource.Object(BUCKET_NAME, value['object_key'])
        try:
            s3_object.load()
            print('S3 object key {} already exists'.format(
                value['object_key']))
        except botocore.exceptions.ClientError as exception:
            if exception.response['Error']['Code'] == '404':
                print('S3 object key {} does NOT exist'.format(
                    value['object_key']))
                with tempfile.TemporaryDirectory() as directory:
                    filename = os.path.join(
                        directory, os.path.basename(value['object_key']))
                    print('Downloading from URL {}...'.format(value['url']))
                    with requests.get(value['url'], stream=True) as response:
                        with open(filename, 'wb') as file_object:
                            for chunk in response.iter_content(
                                    chunk_size=CHUNK_SIZE):
                                file_object.write(chunk)
                    print('Computing and verifying SHA-256 checksum...')
                    hash_object = hashlib.sha256()
                    with open(filename, 'rb') as file_object:
                        buffer = file_object.read(CHUNK_SIZE)
                        while buffer:
                            hash_object.update(buffer)
                            buffer = file_object.read(CHUNK_SIZE)
                    if hash_object.hexdigest() != value['sha256']:
                        raise Exception('Checksum mismatch')
                    print('Uploading to S3 object key {}...'.format(
                        value['object_key']))
                    s3_object.upload_file(filename)
            else:
                raise
Example #3
0
def main():
    token = os.getenv("GITHUB_API_TOKEN", None)
    parser = argparse.ArgumentParser(prog="new_release", description=__doc__)
    if sys.version_info[0] != 3:
        parser.error("\n".join([
            "Wrong python version.", "This script only supports python3.",
            "To compile it in that mode, use this command:",
            " bazel build --config python3 //tools/workspace:new_release"
        ]))
    parser.add_argument(
        "--use_token",
        action="store_true",
        default=(token is not None),
        help="When set, uses an API token instead of username + password")
    parser.add_argument("--user",
                        metavar="USER",
                        type=str,
                        default=_get_default_username(),
                        help="GitHub username (default: %(default)s)")
    parser.add_argument("--verbose", action="store_true", default=False)
    args = parser.parse_args()
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    if args.use_token and not token:
        parser.error("Missing environment variable GITHUB_API_TOKEN")
    elif not args.user:
        parser.error("Couldn't guess github username; you must supply --user.")

    # Log in to github.
    if args.use_token:
        gh = github3.login(token=token)
    else:
        prompt = "Password for https://{}@github.com: ".format(args.user)
        gh = github3.login(username=args.user,
                           password=getpass.getpass(prompt))

    # Grab the workspace metadata.
    print("Collecting bazel repository details...")
    metadata = read_repository_metadata()
    if args.verbose:
        print(json.dumps(metadata, sort_keys=True, indent=2))

    # Run our report.
    print("Checking for new releases...")
    run(gh, args, metadata)
def main():
    parser = argparse.ArgumentParser(prog="new_release", description=__doc__)
    parser.add_argument(
        "--use_password",
        action="store_true",
        default=False,
        help="Prompt for the GitHub password, instead of using an API token.")
    parser.add_argument(
        "--token_file",
        default="~/.config/readonly_github_api_token.txt",
        help="Uses an API token read from this filename, unless "
        "--use_password was given (default: %(default)s)")
    parser.add_argument("--user",
                        metavar="USER",
                        type=str,
                        default=_get_default_username(),
                        help="GitHub username (default: %(default)s)")
    parser.add_argument("--verbose", action="store_true", default=False)
    args = parser.parse_args()
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    if args.use_password and not args.user:
        parser.error("Couldn't guess github username; you must supply --user.")

    # Log in to github.
    if args.use_password:
        prompt = "Password for https://{}@github.com: ".format(args.user)
        gh = github3.login(username=args.user,
                           password=getpass.getpass(prompt))
    else:
        with open(os.path.expanduser(args.token_file), "r") as f:
            token = f.read().strip()
        gh = github3.login(token=token)

    # Grab the workspace metadata.
    print("Collecting bazel repository details...")
    metadata = read_repository_metadata()
    if args.verbose:
        print(json.dumps(metadata, sort_keys=True, indent=2))

    # Run our report.
    print("Checking for new releases...")
    run(gh, args, metadata)
Example #5
0
def main():
    token = os.getenv("GITHUB_API_TOKEN", None)
    parser = argparse.ArgumentParser(prog="new_release", description=__doc__)
    if sys.version_info[0] != 3:
        parser.error("\n".join([
            "Wrong python version.",
            "This script only supports python3.",
            "To compile it in that mode, use this command:",
            " bazel build --config python3 //tools/workspace:new_release"]))
    parser.add_argument(
        "--use_token", action="store_true", default=(token is not None),
        help="When set, uses an API token instead of username + password")
    parser.add_argument(
        "--user", metavar="USER", type=str, default=_get_default_username(),
        help="GitHub username (default: %(default)s)")
    parser.add_argument(
        "--verbose", action="store_true", default=False)
    args = parser.parse_args()
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    if args.use_token and not token:
        parser.error("Missing environment variable GITHUB_API_TOKEN")
    elif not args.user:
        parser.error("Couldn't guess github username; you must supply --user.")

    # Log in to github.
    if args.use_token:
        gh = github3.login(token=token)
    else:
        prompt = "Password for https://{}@github.com: ".format(args.user)
        gh = github3.login(
            username=args.user,
            password=getpass.getpass(prompt))

    # Grab the workspace metadata.
    print("Collecting bazel repository details...")
    metadata = read_repository_metadata()
    if args.verbose:
        print(json.dumps(metadata, sort_keys=True, indent=2))

    # Run our report.
    print("Checking for new releases...")
    run(gh, args, metadata)
Example #6
0
def main(argv):
    transformed_metadata = []
    for value in read_repository_metadata().values():
        if 'downloads' in value:
            downloads = value['downloads']
        else:
            downloads = [value]
        for download in downloads:
            transformed_value = {'sha256': download['sha256']}
            for url in download['urls']:
                if url.startswith(BUCKET_URL):
                    transformed_value['object_key'] = url[len(BUCKET_URL):]
                elif not url.startswith(CLOUDFRONT_URL):
                    if 'url' in transformed_value:
                        raise Exception(
                            'Multiple non-mirror urls. Verify BUCKET_URL and '
                            'CLOUDFRONT_URL are correct and check for other '
                            'duplicate entries.')
                    transformed_value['url'] = url
            transformed_metadata.append(transformed_value)
    s3_resource = boto3.resource('s3')
    for value in transformed_metadata:
        if '--no-download' in argv:
            print('Not querying S3 object key {} '
                  'because --no-download was specified'.format(
                      value['object_key']))
            continue
        s3_object = s3_resource.Object(BUCKET_NAME, value['object_key'])
        try:
            s3_object.load()
            print('S3 object key {} already exists'.format(
                value['object_key']))
        except botocore.exceptions.ClientError as exception:
            # https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectHEAD.html#rest-object-head-permissions
            if exception.response['Error']['Code'] in ['403', '404']:
                print('S3 object key {} does NOT exist'.format(
                    value['object_key']))
                with tempfile.TemporaryDirectory() as directory:
                    filename = os.path.join(
                        directory, os.path.basename(value['object_key']))
                    print('Downloading from URL {}...'.format(value['url']))
                    with requests.get(value['url'], stream=True) as response:
                        with open(filename, 'wb') as file_object:
                            for chunk in response.iter_content(
                                    chunk_size=CHUNK_SIZE):
                                file_object.write(chunk)
                    print('Computing and verifying SHA-256 checksum...')
                    hash_object = hashlib.sha256()
                    with open(filename, 'rb') as file_object:
                        buffer = file_object.read(CHUNK_SIZE)
                        while buffer:
                            hash_object.update(buffer)
                            buffer = file_object.read(CHUNK_SIZE)
                    if hash_object.hexdigest() != value['sha256']:
                        raise Exception('Checksum mismatch')
                    if '--no-upload' in argv:
                        print('Not uploading to S3 object key '
                              '{} because --no-upload was specified...'.format(
                                  value['object_key']))
                    else:
                        print('Uploading to S3 object key {}...'.format(
                            value['object_key']))
                        s3_object.upload_file(filename)
            else:
                raise
Example #7
0
def main(argv):
    transformed_metadata = []
    for key, value in read_repository_metadata().items():
        if 'downloads' in value:
            downloads = value['downloads']
        else:
            downloads = [value]
        for download in downloads:
            transformed_value = {'sha256': download['sha256']}
            for url in download['urls']:
                if url.startswith(BUCKET_URL):
                    transformed_value['object_key'] = url[len(BUCKET_URL):]
                elif not url.startswith(CLOUDFRONT_URL):
                    if 'url' in transformed_value:
                        raise Exception(
                            f'Multiple non-mirror urls for @{key}. Verify '
                            f'BUCKET_URL {BUCKET_URL} and CLOUDFRONT_URL '
                            f'{CLOUDFRONT_URL} are correct and check for '
                            f'duplicate url values.')
                    transformed_value['url'] = url
            if 'object_key' not in transformed_value:
                raise Exception(
                    f'Could NOT determine S3 object key for @{key}. Verify '
                    f'BUCKET_URL {BUCKET_URL} is correct and check for '
                    f'missing url value with prefix {BUCKET_URL}.')
            if 'url' not in transformed_value:
                raise Exception(
                    f'Missing non-mirror url for @{key}. Verify BUCKET_URL '
                    f'{BUCKET_URL} is correct and check for missing url value '
                    f'with prefix {BUCKET_URL}.')
            transformed_metadata.append(transformed_value)
    s3_resource = boto3.resource('s3')
    for value in transformed_metadata:
        object_key = value['object_key']
        sha256 = value['sha256']
        url = value['url']
        if '--no-download' in argv:
            print(f'NOT querying S3 object key {object_key} because '
                  f'--no-download was specified')
            continue
        s3_object = s3_resource.Object(BUCKET_NAME, object_key)
        try:
            s3_object.load()
            print(f'S3 object key {object_key} already exists')
        except botocore.exceptions.ClientError as exception:
            # https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectHEAD.html#rest-object-head-permissions
            if exception.response['Error']['Code'] in ['403', '404']:
                print(f'S3 object key {object_key} does NOT exist')
                with tempfile.TemporaryDirectory() as directory:
                    filename = os.path.join(directory,
                                            os.path.basename(object_key))
                    print(f'Downloading from URL {url}...')
                    with requests.get(url, stream=True) as response:
                        with open(filename, 'wb') as file_object:
                            for chunk in response.iter_content(
                                    chunk_size=CHUNK_SIZE):
                                file_object.write(chunk)
                    print(f'Computing and verifying SHA-256 checksum of '
                          f'file {filename}...')
                    hash_object = hashlib.sha256()
                    with open(filename, 'rb') as file_object:
                        buffer = file_object.read(CHUNK_SIZE)
                        while buffer:
                            hash_object.update(buffer)
                            buffer = file_object.read(CHUNK_SIZE)
                    hexdigest = hash_object.hexdigest()
                    if hexdigest != sha256:
                        raise Exception(
                            f'Expected SHA-256 checksum of file {filename} to '
                            f'be {sha256}, but actual checksum was computed '
                            f'to be {hexdigest}')
                    if '--no-upload' in argv:
                        print(f'NOT uploading file {filename} to S3 object '
                              f'key {object_key} because --no-upload was '
                              f'specified')
                    else:
                        print(f'Uploading file {filename} to S3 object key '
                              f'{object_key}...')
                        s3_object.upload_file(filename)
            else:
                raise