def get_commit_tag(version):
  parts = get_raw_version().split('.', 3)
  if (len(parts) == 3):
    parts[2] = 'x'
    return '.'.join(parts)
  else:
    raise(UserWarning("[ERROR] Invalid version name '%s'", get_raw_version()))
def main():
    repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(BRAVE_REPO)

    release = get_draft(repo, get_brave_version())
    commit_tag = get_commit_tag(get_raw_version())

    print("[INFO] Releasing {}".format(release['tag_name']))
    publish_release(repo, release['id'], get_tag(), commit_tag)
Exemple #3
0
def main():

    args = parse_args()

    channel = args.channel
    repo_dir = args.repo_dir
    dist_dir = os.path.join(repo_dir, 'dist')
    gpg_full_key_id = args.gpg_full_key_id
    if channel in ['release']:
        if not args.gpg_passphrase:
            logging.error(
                "Error: --gpg_passphrase required for channel {}".format(
                    channel))
            exit(1)
        else:
            gpg_passphrase = args.gpg_passphrase
    s3_test_buckets = args.s3_test_buckets

    if args.debug:
        logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
        logging.debug('brave_version: {}'.format(get_raw_version()))
        logging.debug('channel: {}'.format(channel))
        logging.debug('repo_dir: {}'.format(repo_dir))
        logging.debug('dist_dir: {}'.format(dist_dir))
        logging.debug('gpg_full_key_id: {}'.format(gpg_full_key_id))
        logging.debug('gpg_passphrase: {}'.format("NOTAREALPASSWORD"))
        logging.debug('s3_test_buckets: {}'.format(s3_test_buckets))

    # verify we have the the GPG key we're expecting in the public keyring
    list_keys_cmd = "/usr/bin/gpg2 --list-keys --with-subkey-fingerprints | grep {}".format(
        gpg_full_key_id)
    logging.info(
        "Verifying the GPG key \'{}\' is in our public keyring...".format(
            gpg_full_key_id))
    logging.debug("Running command: {}".format(list_keys_cmd))
    try:
        output = subprocess.check_output(list_keys_cmd, shell=True)
    except subprocess.CalledProcessError as cpe:
        logging.error("Error: Expected GPG ID not found in keyring!")
        logging.error("Error: {}".format(cpe))
        exit(1)

    saved_path = os.getcwd()
    try:
        os.chdir(dist_dir)
        logging.debug('Changed directory to \"{}\"'.format(dist_dir))
    except OSError as ose:
        message = ('Error: could not change directory to {}: {}'.format(
            dist_dir, ose))
        exit(message)

    logging.info(
        "Downloading RPM/DEB packages to directory: {}".format(dist_dir))

    file_list = download_linux_pkgs_from_github(args, logging)

    # Run rpmsign command for rpm
    for item in file_list:
        if re.match(r'.*\.rpm$', item):
            logging.info("Signing RPM: {}".format(item))

            # Currently only the release channel requires the expect script
            # rpm-resign.exp. Nightly, dev and beta do not, although they will eventually
            # when we use the same signing key for all channels.
            if channel in ['release']:
                rpm_resign_cmd = os.path.join(repo_dir, "rpm-resign.exp")
                cmd = "{} {} {} {}".format(rpm_resign_cmd, gpg_full_key_id,
                                           item, gpg_passphrase)
                log_cmd = "{} {} {} {}".format(rpm_resign_cmd, gpg_full_key_id,
                                               item, 'NOTAREALPASSWORD')
            else:
                cmd = "rpmsign --resign --key-id={} {}".format(
                    gpg_full_key_id, item)
                log_cmd = cmd
            logging.info("Running command: \"{}\"".format(log_cmd))

            try:
                subprocess.check_output(cmd, shell=True)
                logging.info("RPM signing successful!")
            except subprocess.CalledProcessError as cpe:
                logging.error("Error running command: \"{}\"".format(log_cmd))
                exit(1)

    try:
        os.chdir(repo_dir)
        logging.debug('Changed directory to \"{}\"'.format(repo_dir))
    except OSError as ose:
        message = ('Error: could not change directory to {}: {}'.format(
            repo_dir, ose))
        exit(message)

    # remove files older than 120 days from dist_dir
    delete_age = 120 * 86400

    logging.info(
        "Performing removal of files older than 120 days in directory: {}".
        format(dist_dir))

    # act=False is used for testing, instead of actually removing the files
    # we just report what files would be removed
    remove_files_older_x_days(dist_dir, delete_age, act=False)

    # Now upload to aptly and rpm repos

    for item in ['upload_to_aptly', 'upload_to_rpm_repo']:
        bucket = ''
        if re.match(r'.*rpm.*', item):
            bucket = 'brave-browser-rpm-staging-'
        else:
            bucket = 'brave-browser-apt-staging-'

        upload_script = os.path.join(repo_dir, item)

        TESTCHANNEL = 'test'

        if s3_test_buckets:
            upload_cmd = '{} {} {}'.format(
                upload_script, bucket + channel + '-' + TESTCHANNEL,
                gpg_full_key_id)
        else:
            upload_cmd = '{} {} {}'.format(upload_script, bucket + channel,
                                           gpg_full_key_id)
        logging.info("Running command: \"{}\"".format(upload_cmd))
        try:
            subprocess.check_output(upload_cmd, shell=True)
        except subprocess.CalledProcessError as cpe:
            logging.error("Error: {}".format(cpe))
            exit(1)

    # Not sure we need to change back to the original dir here,
    # keeping it for now.
    try:
        os.chdir(saved_path)
        logging.debug('Changed directory to \"{}\"'.format(saved_path))
    except OSError as ose:
        message = ('Error: could not change directory to {}: {}'.format(
            saved_path, ose))
        exit(message)
Exemple #4
0
def main():
    args = parse_args()
    print('[INFO] Running upload...')

    # Enable urllib3 debugging output
    if os.environ.get('DEBUG_HTTP_HEADERS') == 'true':
        logging.basicConfig(level=logging.DEBUG)
        logging.getLogger("urllib3").setLevel(logging.DEBUG)
        logging.debug(
            "DEBUG_HTTP_HEADERS env var is enabled, logging HTTP headers")
        debug_requests_on()

    # BRAVE_REPO is defined in lib/helpers.py for now
    repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(BRAVE_REPO)

    tag = get_brave_version()
    release = get_release(repo, tag, allow_published_release_updates=False)

    if not release:
        print("[INFO] No existing release found, creating new "
              "release for this upload")
        release = create_release_draft(repo, tag)

    print('[INFO] Uploading release {}'.format(release['tag_name']))
    # Upload Brave with GitHub Releases API.
    upload_brave(repo,
                 release,
                 os.path.join(dist_dir(), DIST_NAME),
                 force=args.force)
    upload_brave(repo,
                 release,
                 os.path.join(dist_dir(), SYMBOLS_NAME),
                 force=args.force)
    # if PLATFORM == 'darwin':
    #     upload_brave(repo, release, os.path.join(dist_dir(), DSYM_NAME))
    # elif PLATFORM == 'win32':
    #     upload_brave(repo, release, os.path.join(dist_dir(), PDB_NAME))

    pkgs = get_brave_packages(output_dir(), release_channel(),
                              get_raw_version())

    if PLATFORM == 'darwin':
        for pkg in pkgs:
            upload_brave(repo,
                         release,
                         os.path.join(output_dir(), pkg),
                         force=args.force)
    elif PLATFORM == 'win32':
        if get_target_arch() == 'x64':
            upload_brave(repo,
                         release,
                         os.path.join(output_dir(), 'brave_installer.exe'),
                         'brave_installer-x64.exe',
                         force=args.force)
            for pkg in pkgs:
                upload_brave(repo,
                             release,
                             os.path.join(output_dir(), pkg),
                             force=args.force)
        else:
            upload_brave(repo,
                         release,
                         os.path.join(output_dir(), 'brave_installer.exe'),
                         'brave_installer-ia32.exe',
                         force=args.force)
            for pkg in pkgs:
                upload_brave(repo,
                             release,
                             os.path.join(output_dir(), pkg),
                             force=args.force)
    else:
        if get_target_arch() == 'x64':
            for pkg in pkgs:
                upload_brave(repo,
                             release,
                             os.path.join(output_dir(), pkg),
                             force=args.force)
        else:
            upload_brave(repo,
                         release,
                         os.path.join(output_dir(), 'brave-i386.rpm'),
                         force=args.force)
            upload_brave(repo,
                         release,
                         os.path.join(output_dir(), 'brave-i386.deb'),
                         force=args.force)

    # mksnapshot = get_zip_name('mksnapshot', get_brave_version())
    # upload_brave(repo, release, os.path.join(dist_dir(), mksnapshot))

    # if PLATFORM == 'win32' and not tag_exists:
    #     # Upload PDBs to Windows symbol server.
    #     run_python_script('upload-windows-pdb.py')

    if os.environ.get('DEBUG_HTTP_HEADERS') == 'true':
        debug_requests_off()
    print('[INFO] Finished upload')
Exemple #5
0
def get_upload_version():
    """
    Returns the version of brave-browser
    """
    return get_raw_version()
Exemple #6
0
def main():
    args = parse_args()
    print('[INFO] Running upload...')

    # Repo is defined in lib/helpers.py for now
    repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(BRAVE_REPO)

    tag = get_brave_version()
    release = get_draft(repo, tag)

    if not release:
        print(
            "[INFO] No existing release found, creating new release for this upload"
        )
        release = create_release_draft(repo, tag)

    print('[INFO] Uploading release {}'.format(release['tag_name']))
    # Upload Brave with GitHub Releases API.
    upload_brave(repo,
                 release,
                 os.path.join(dist_dir(), DIST_NAME),
                 force=args.force)
    upload_brave(repo,
                 release,
                 os.path.join(dist_dir(), SYMBOLS_NAME),
                 force=args.force)
    # if PLATFORM == 'darwin':
    #   upload_brave(repo, release, os.path.join(dist_dir(), DSYM_NAME))
    # elif PLATFORM == 'win32':
    #   upload_brave(repo, release, os.path.join(dist_dir(), PDB_NAME))

    # Upload chromedriver and mksnapshot.
    chromedriver = get_zip_name('chromedriver', get_chromedriver_version())
    upload_brave(repo,
                 release,
                 os.path.join(dist_dir(), chromedriver),
                 force=args.force)

    pkgs = yield_brave_packages(output_dir(), release_channel(),
                                get_raw_version())

    if PLATFORM == 'darwin':
        for pkg in pkgs:
            upload_brave(repo,
                         release,
                         os.path.join(output_dir(), pkg),
                         force=args.force)
    elif PLATFORM == 'win32':
        if get_target_arch() == 'x64':
            upload_brave(repo,
                         release,
                         os.path.join(output_dir(), 'brave_installer.exe'),
                         'brave_installer-x64.exe',
                         force=args.force)
        else:
            upload_brave(repo,
                         release,
                         os.path.join(output_dir(), 'brave_installer.exe'),
                         'brave_installer-ia32.exe',
                         force=args.force)
    else:
        if get_target_arch() == 'x64':
            for pkg in pkgs:
                upload_brave(repo,
                             release,
                             os.path.join(output_dir(), pkg),
                             force=args.force)
        else:
            upload_brave(repo,
                         release,
                         os.path.join(output_dir(), 'brave-i386.rpm'),
                         force=args.force)
            upload_brave(repo,
                         release,
                         os.path.join(output_dir(), 'brave-i386.deb'),
                         force=args.force)

    # mksnapshot = get_zip_name('mksnapshot', get_brave_version())
    # upload_brave(repo, release, os.path.join(dist_dir(), mksnapshot))

    # if PLATFORM == 'win32' and not tag_exists:
    #   # Upload PDBs to Windows symbol server.
    #   run_python_script('upload-windows-pdb.py')

    versions = parse_version(args.version)
    version = '.'.join(versions[:3])
    print('[INFO] Finished upload')
Exemple #7
0
def main():

    args = parse_args()
    channel = args.channel
    repo_dir = args.repo_dir
    dist_dir = os.path.join(repo_dir, 'dist')
    gpg_full_key_id = args.gpg_full_key_id

    if args.skip_github and args.github_token:
        exit(
            "Error: --skip_github and --github_token are mutually exclusive, only one allowed"
        )

    if args.unmount is not False and channel in ['beta', 'dev', 'nightly']:
        unmount = args.unmount
    if channel in ['release']:
        if not args.gpg_passphrase:
            logging.error(
                "Error: --gpg_passphrase required for channel {}".format(
                    channel))
            exit(1)
        else:
            gpg_passphrase = args.gpg_passphrase
    s3_test_buckets = args.s3_test_buckets

    if os.environ.get('adrbrowsiel_CORE_DIR'):
        adrbrowsiel_core_dir = os.environ.get('adrbrowsiel_CORE_DIR')
    else:
        logging.error(
            "Error: Required environment variable \'adrbrowsiel_CORE_DIR\' not set! Exiting..."
        )
        exit(1)

    if args.debug:
        logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
        logging.debug('adrbrowsiel_version: {}'.format(get_raw_version()))
        logging.debug('channel: {}'.format(channel))
        logging.debug('repo_dir: {}'.format(repo_dir))
        logging.debug('dist_dir: {}'.format(dist_dir))
        logging.debug('gpg_full_key_id: {}'.format(gpg_full_key_id))
        logging.debug('gpg_passphrase: {}'.format("NOTAREALPASSWORD"))
        logging.debug('s3_test_buckets: {}'.format(s3_test_buckets))
        logging.debug('adrbrowsiel_core_dir: {}'.format(adrbrowsiel_core_dir))

    # verify we have the the GPG key we're expecting in the public keyring
    list_keys_cmd = "/usr/bin/gpg2 --list-keys --with-subkey-fingerprints | grep {}".format(
        gpg_full_key_id)
    logging.info(
        "Verifying the GPG key \'{}\' is in our public keyring...".format(
            gpg_full_key_id))
    logging.debug("Running command: {}".format(list_keys_cmd))
    try:
        output = subprocess.check_output(list_keys_cmd, shell=True)
    except subprocess.CalledProcessError as cpe:
        logging.debug("Expected GPG ID not found in keyring!")
        logging.debug("Output from gpg2 --list-keys command: {}".format(cpe))
        if args.unmount is not False and channel in ['beta', 'dev', 'nightly']:
            logging.debug("Unmounting /home/ubuntu/.gnupg")
            list_keys_cmd = "/usr/bin/gpg2 --list-keys --with-subkey-fingerprints | grep {}".format(
                unmount)
            logging.debug("Running command: {}".format(list_keys_cmd))
            try:
                output = subprocess.check_output(list_keys_cmd, shell=True)
            except subprocess.CalledProcessError as cpe:
                logging.error("Error: {}".format(cpe))
                exit(1)
            try:
                unmount_cmd = "sudo umount /home/ubuntu/.gnupg"
                logging.debug("Running command: {}".format(unmount_cmd))
                output = subprocess.check_output(unmount_cmd, shell=True)
            except subprocess.CalledProcessError as cpe:
                logging.error("Error: {}".format(cpe))
                exit(1)

    saved_path = os.getcwd()
    try:
        os.chdir(dist_dir)
        logging.debug('Changed directory to \"{}\"'.format(dist_dir))
    except OSError as ose:
        message = ('Error: could not change directory to {}: {}'.format(
            dist_dir, ose))
        exit(message)

    if not args.skip_github:
        logging.info(
            "Downloading RPM/DEB packages to directory: {}".format(dist_dir))

        file_list = download_linux_pkgs_from_github(args, logging)

    try:
        os.chdir(repo_dir)
        logging.debug('Changed directory to \"{}\"'.format(repo_dir))
    except OSError as ose:
        message = ('Error: could not change directory to {}: {}'.format(
            repo_dir, ose))
        exit(message)

    # remove files older than 60 days from dist_dir
    delete_age = 60 * 86400
    # do not remove files that match this pattern
    global exclude_patterns
    exclude_patterns = ['.*keyring.*']

    logging.info(
        "Performing removal of files older than 60 days in directory: {}".
        format(dist_dir))

    remove_files_older_x_days(dist_dir, delete_age, act=True)

    # If release channel, unlock GPG signing key which has a cache timeout of 30
    # minutes set in the gpg-agent.conf
    if channel in ['release']:
        gpgconf_cmd = ['gpgconf', '--kill', 'gpg-agent']
        logging.info("Running command: \"{}\"".format(gpgconf_cmd))
        try:
            subprocess.check_output(gpgconf_cmd, shell=True)
            logging.info("\"gpgconf --kill gpg-agent\" succeeded")
        except subprocess.CalledProcessError as cpe:
            loggint.error("Error: {}".format(cpe))
            exit(1)
        cmd = [
            'gpg2', '--batch', '--pinentry-mode', 'loopback', '--passphrase',
            gpg_passphrase, '--sign'
        ]
        log_cmd = [
            'gpg2', '--batch', '--pinentry-mode', 'loopback', '--passphrase',
            'NOTAREALPASSWORD', '--sign'
        ]
        logging.info("Running command: \"{}\"".format(log_cmd))
        try:
            p1 = subprocess.Popen(['echo'], stdout=subprocess.PIPE)
            p2 = subprocess.Popen(cmd, stdin=p1.stdout, stdout=subprocess.PIPE)
            p1.stdout.close()
            (stdoutdata, stderrdata) = p2.communicate()
            if stderrdata is not None:
                logging.error(
                    "subprocess.Popen.communicate() error: {}".format(
                        stderrdata))
            logging.info("gpg2 unlock signing key successful!")
        except Exception as e:
            logging.error("Error running command: \"{}\"".format(log_cmd))
            exit(1)

    # Now upload to aptly and rpm repos

    for item in ['upload_to_aptly', 'upload_to_rpm_repo']:
        bucket = ''
        if re.match(r'.*rpm.*', item):
            bucket = 'adrbrowsiel-browser-rpm-staging-'
        else:
            bucket = 'adrbrowsiel-browser-apt-staging-'

        upload_script = os.path.join(adrbrowsiel_core_dir, 'script', item)

        TESTCHANNEL = 'test'

        if s3_test_buckets:
            upload_cmd = '{} {} {}'.format(
                upload_script, bucket + channel + '-' + TESTCHANNEL,
                gpg_full_key_id)
        else:
            upload_cmd = '{} {} {}'.format(upload_script, bucket + channel,
                                           gpg_full_key_id)
        logging.info("Running command: \"{}\"".format(upload_cmd))
        try:
            subprocess.check_output(upload_cmd, shell=True)
        except subprocess.CalledProcessError as cpe:
            logging.error("Error: {}".format(cpe))
            exit(1)

    # Not sure we need to change back to the original dir here,
    # keeping it for now.
    try:
        os.chdir(saved_path)
        logging.debug('Changed directory to \"{}\"'.format(saved_path))
    except OSError as ose:
        message = ('Error: could not change directory to {}: {}'.format(
            saved_path, ose))
        exit(message)
Exemple #8
0
def get_upload_version():
    """
    Returns the version of brave-browser
    """
    return get_raw_version()
def main():

    args = parse_args()

    channel = args.channel
    repo_dir = args.repo_dir
    dist_dir = os.path.join(repo_dir, 'dist')
    gpg_full_key_id = args.gpg_full_key_id
    if channel in ['release']:
        if not args.gpg_passphrase:
            logging.error(
                "Error: --gpg_passphrase required for channel {}".format(channel))
            exit(1)
        else:
            gpg_passphrase = args.gpg_passphrase
    s3_test_buckets = args.s3_test_buckets

    if args.debug:
        logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
        logging.debug('brave_version: {}'.format(get_raw_version()))
        logging.debug('channel: {}'.format(channel))
        logging.debug('repo_dir: {}'.format(repo_dir))
        logging.debug('dist_dir: {}'.format(dist_dir))
        logging.debug('gpg_full_key_id: {}'.format(gpg_full_key_id))
        logging.debug('gpg_passphrase: {}'.format("NOTAREALPASSWORD"))
        logging.debug('s3_test_buckets: {}'.format(s3_test_buckets))

    # verify we have the the GPG key we're expecting in the public keyring
    list_keys_cmd = "/usr/bin/gpg2 --list-keys --with-subkey-fingerprints | grep {}".format(
        gpg_full_key_id)
    logging.info("Verifying the GPG key \'{}\' is in our public keyring...".format(
        gpg_full_key_id))
    logging.debug("Running command: {}".format(list_keys_cmd))
    try:
        output = subprocess.check_output(list_keys_cmd, shell=True)
    except subprocess.CalledProcessError as cpe:
        logging.error("Error: Expected GPG ID not found in keyring!")
        logging.error("Error: {}".format(cpe))
        exit(1)

    saved_path = os.getcwd()
    try:
        os.chdir(dist_dir)
        logging.debug('Changed directory to \"{}\"'.format(dist_dir))
    except OSError as ose:
        message = (
            'Error: could not change directory to {}: {}'.format(dist_dir, ose))
        exit(message)

    logging.info(
        "Downloading RPM/DEB packages to directory: {}".format(dist_dir))

    file_list = download_linux_pkgs_from_github(args, logging)

    # Run rpmsign command for rpm
    for item in file_list:
        if re.match(r'.*\.rpm$', item):
            logging.info("Signing RPM: {}".format(item))

            # Currently only the release channel requires the expect script
            # rpm-resign.exp. Nightly, dev and beta do not, although they will eventually
            # when we use the same signing key for all channels.
            if channel in ['release']:
                rpm_resign_cmd = os.path.join(repo_dir, "rpm-resign.exp")
                cmd = "{} {} {} {}".format(
                    rpm_resign_cmd, gpg_full_key_id, item, gpg_passphrase)
                log_cmd = "{} {} {} {}".format(
                    rpm_resign_cmd, gpg_full_key_id, item, 'NOTAREALPASSWORD')
            else:
                cmd = "rpmsign --resign --key-id={} {}".format(
                    gpg_full_key_id, item)
                log_cmd = cmd
            logging.info("Running command: \"{}\"".format(log_cmd))

            try:
                subprocess.check_output(cmd, shell=True)
                logging.info("RPM signing successful!")
            except subprocess.CalledProcessError as cpe:
                logging.error("Error running command: \"{}\"".format(log_cmd))
                exit(1)

    try:
        os.chdir(repo_dir)
        logging.debug('Changed directory to \"{}\"'.format(repo_dir))
    except OSError as ose:
        message = (
            'Error: could not change directory to {}: {}'.format(repo_dir, ose))
        exit(message)

    # remove files older than 120 days from dist_dir
    delete_age = 120 * 86400

    logging.info("Performing removal of files older than 120 days in directory: {}".format(dist_dir))

    # act=False is used for testing, instead of actually removing the files
    # we just report what files would be removed
    remove_files_older_x_days(dist_dir, delete_age, act=False)

    # Now upload to aptly and rpm repos

    for item in ['upload_to_aptly', 'upload_to_rpm_repo']:
        bucket = ''
        if re.match(r'.*rpm.*', item):
            bucket = 'brave-browser-rpm-staging-'
        else:
            bucket = 'brave-browser-apt-staging-'

        upload_script = os.path.join(repo_dir, item)

        TESTCHANNEL = 'test'

        if s3_test_buckets:
            upload_cmd = '{} {} {}'.format(upload_script, bucket + channel + '-' +
                                           TESTCHANNEL, gpg_full_key_id)
        else:
            upload_cmd = '{} {} {}'.format(
                upload_script, bucket + channel, gpg_full_key_id)
        logging.info("Running command: \"{}\"".format(upload_cmd))
        try:
            subprocess.check_output(upload_cmd, shell=True)
        except subprocess.CalledProcessError as cpe:
            logging.error("Error: {}".format(cpe))
            exit(1)

    # Not sure we need to change back to the original dir here,
    # keeping it for now.
    try:
        os.chdir(saved_path)
        logging.debug('Changed directory to \"{}\"'.format(saved_path))
    except OSError as ose:
        message = (
            'Error: could not change directory to {}: {}'.format(saved_path, ose))
        exit(message)
Exemple #10
0
def get_upload_version():
    '''
    Returns the version of brave-browser
    '''
    return get_raw_version()