def main(): args = parse_args() if not dist_newer_than_head(): run_python_script('create-dist.py') build_version = get_electron_build_version() if not ELECTRON_VERSION.startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( ELECTRON_VERSION, build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) releases = github.repos(ELECTRON_REPO).releases.get() tag_exists = False for r in releases: if not r['draft'] and r['tag_name'] == args.version: release = r tag_exists = True break assert tag_exists == args.overwrite, \ 'You have to pass --overwrite to overwrite a published release' if not args.overwrite: release = create_or_get_release_draft(github, releases, args.version, tag_exists) # Upload Electron with GitHub Releases API. upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME)) upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME)) if PLATFORM == 'darwin': upload_electron(github, release, os.path.join(DIST_DIR, 'electron-api.json')) upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts')) upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME)) elif PLATFORM == 'win32': upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME)) # Upload free version of ffmpeg. ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg)) # Upload chromedriver and mksnapshot for minor version update. if parse_version(args.version)[2] == '0': chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, chromedriver)) mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot)) if PLATFORM == 'win32' and not tag_exists: # Upload PDBs to Windows symbol server. run_python_script('upload-windows-pdb.py') # Upload node headers. run_python_script('create-node-headers.py', '-v', args.version) run_python_script('upload-node-headers.py', '-v', args.version)
def main(): args = parse_args() if not args.publish_release: build_version = get_electron_build_version() if not get_electron_version().startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( get_electron_version(), build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) releases = github.repos(ELECTRON_REPO).releases.get() tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == args.version: tag_exists = True break release = create_or_get_release_draft(github, releases, args.version, tag_exists) if args.publish_release: # Upload the Node SHASUMS*.txt. run_python_script('upload-node-checksums.py', '-v', get_electron_version(), '--dist-url', args.dist_url) # Upload the index.json. # run_python_script('upload-index-json.py') # Create and upload the Electron SHASUMS*.txt release_electron_checksums(github, release) # Press the publish button. # publish_release(github, release['id']) # Do not upload other files when passed "-p". return # Upload Electron with GitHub Releases API. upload_electron(github, release, os.path.join(dist_dir(), DIST_NAME)) # upload_electron(github, release, os.path.join(dist_dir(), SYMBOLS_NAME)) # if PLATFORM == 'darwin': # upload_electron(github, release, os.path.join(dist_dir(), DSYM_NAME)) # elif PLATFORM == 'win32': # upload_electron(github, release, os.path.join(dist_dir(), PDB_NAME)) # Upload chromedriver and mksnapshot. chromedriver = get_zip_name('chromedriver', get_chromedriver_version()) upload_electron(github, release, os.path.join(dist_dir(), chromedriver)) # mksnapshot = get_zip_name('mksnapshot', get_electron_version()) # upload_electron(github, release, os.path.join(dist_dir(), mksnapshot)) # if PLATFORM == 'win32' and not tag_exists: # # Upload PDBs to Windows symbol server. # run_python_script('upload-windows-pdb.py') # Upload node headers. run_python_script('upload-node-headers.py', '-v', args.version)
def main(): args = parse_args() if not args.publish_release: build_version = get_electron_build_version() if not get_electron_version().startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( get_electron_version(), build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) releases = github.repos(ELECTRON_REPO).releases.get() tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == args.version: tag_exists = True break release = create_or_get_release_draft(github, releases, args.version, tag_exists) if args.publish_release: # Upload the Node SHASUMS*.txt. run_python_script('upload-node-checksums.py', '-v', get_electron_version(), '--dist-url', args.dist_url) # Upload the index.json. # run_python_script('upload-index-json.py') # Create and upload the Electron SHASUMS*.txt release_electron_checksums(github, release) # Press the publish button. # publish_release(github, release['id']) # Do not upload other files when passed "-p". return # Upload Electron with GitHub Releases API. upload_electron(github, release, os.path.join(dist_dir(), DIST_NAME)) upload_electron(github, release, os.path.join(dist_dir(), SYMBOLS_NAME)) # if PLATFORM == 'darwin': # upload_electron(github, release, os.path.join(dist_dir(), DSYM_NAME)) # elif PLATFORM == 'win32': # upload_electron(github, release, os.path.join(dist_dir(), PDB_NAME)) # Upload chromedriver and mksnapshot. chromedriver = get_zip_name('chromedriver', get_chromedriver_version()) upload_electron(github, release, os.path.join(dist_dir(), chromedriver)) # mksnapshot = get_zip_name('mksnapshot', get_electron_version()) # upload_electron(github, release, os.path.join(dist_dir(), mksnapshot)) # if PLATFORM == 'win32' and not tag_exists: # # Upload PDBs to Windows symbol server. # run_python_script('upload-windows-pdb.py') # Upload node headers. run_python_script('upload-node-headers.py', '-v', args.version)
def main(): args = parse_args() if not args.publish_release: if not dist_newer_than_head(): create_dist = os.path.join(SOURCE_ROOT, 'script', 'create-dist.py') execute([sys.executable, create_dist]) build_version = get_atom_shell_build_version() if not ATOM_SHELL_VERSION.startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( ATOM_SHELL_VERSION, build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) releases = github.repos(ATOM_SHELL_REPO).releases.get() tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == args.version: tag_exists = True break release = create_or_get_release_draft(github, releases, args.version, tag_exists) if args.publish_release: # Upload the SHASUMS.txt. execute([sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-checksums.py'), '-v', ATOM_SHELL_VERSION]) # Upload the index.json. execute([sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-index-json.py')]) # Press the publish button. publish_release(github, release['id']) # Do not upload other files when passed "-p". return # Upload atom-shell with GitHub Releases API. upload_atom_shell(github, release, os.path.join(DIST_DIR, DIST_NAME)) upload_atom_shell(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME)) # Upload chromedriver and mksnapshot for minor version update. if parse_version(args.version)[2] == '0': chromedriver = 'chromedriver-{0}-{1}-{2}.zip'.format( get_chromedriver_version(), get_platform_key(), get_target_arch()) upload_atom_shell(github, release, os.path.join(DIST_DIR, chromedriver)) upload_atom_shell(github, release, os.path.join(DIST_DIR, MKSNAPSHOT_NAME)) if PLATFORM == 'win32' and not tag_exists: # Upload node headers. execute([sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-node-headers.py'), '-v', args.version])
def main(): github = GitHub(auth_token()) releases = github.repos(BROWSER_LAPTOP_REPO).releases.get() tag = 'v' + json.load(open('package.json'))['version'] tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == tag: tag_exists = True break release = create_or_get_release_draft(github, releases, tag, tag_exists) for f in get_files_to_upload(): upload_browser_laptop(github, release, f)
def main(): github = GitHub(auth_token()) releases = github.repos(BROWSER_LAPTOP_REPO).releases.get() tag = 'v' + json.load(open('package.json'))['version'] tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == tag: tag_exists = True break release = create_or_get_release_draft(github, releases, tag, tag_exists) for f in get_files_to_upload(): upload_browser_laptop(github,release, f)
def main(): github = GitHub(auth_token()) releases = github.repos(BROWSER_LAPTOP_REPO).releases.get() tag = ('v' + json.load(open('package.json'))['version'] + release_channel()) tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == tag: tag_exists = True break release = create_or_get_release_draft(github, releases, tag, tag_exists) # Press the publish button. publish_release(github, release['id'])
def main(): args = parse_args() github = GitHub(auth_token()) releases = github.repos(MESON_REPO).releases.get() tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == args.version: tag_exists = True break release = create_or_get_release_draft(github, releases, args.version, tag_exists) upload_meson(github, release, os.path.join(DIST_DIR, DIST_NAME))
def main(): args = parse_args() if not dist_newer_than_head(): create_dist = os.path.join(SOURCE_ROOT, 'script', 'create-dist.py') execute([sys.executable, create_dist]) build_version = get_atom_shell_build_version() if not ATOM_SHELL_VERSION.startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( ATOM_SHELL_VERSION, build_version) sys.stderr.write(error) sys.stderr.flush() return 1 # Upload atom-shell with GitHub Releases API. github = GitHub(auth_token()) release_id = create_or_get_release_draft(github, args.version) upload_atom_shell(github, release_id, os.path.join(DIST_DIR, DIST_NAME)) upload_atom_shell(github, release_id, os.path.join(DIST_DIR, SYMBOLS_NAME)) # Upload node's headers to S3. bucket, access_key, secret_key = s3_config() upload_node(bucket, access_key, secret_key, NODE_VERSION) if args.publish_release: # Press the publish button. publish_release(github, release_id) # Upload the SHASUMS.txt. execute([ sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-checksums.py'), '-v', NODE_VERSION ])
def main(): args = parse_args() if args.debug: logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) logging.debug('prerelease: {}'.format(args.prerelease)) if not os.environ.get('npm_config_brave_version'): message = ('Error: Please set the $npm_config_brave_version' 'environment variable') exit(message) BRAVE_VERSION = os.environ.get('npm_config_brave_version') repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(BRAVE_REPO) tag = BRAVE_VERSION logging.debug("Tag: {}".format(tag)) # If we are publishing a prerelease, the release can only be in draft mode. If we # are publishing a full release, it is allowed to already be a published release. if args.prerelease: release = get_draft(repo, tag) else: release = get_release(repo, tag, allow_published_release_updates=True) tag_name = release['tag_name'] logging.debug("release[id]: {}".format(release['id'])) logging.info("Releasing {}".format(tag_name)) publish_release(repo, release['id'], tag_name, args.prerelease, logging)
def main(): """ Download the brave-browser/CHANGELOG.md file, parse it and convert to markdown, then update the release notes for the release specified. """ args = parse_args() if args.debug: logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) changelog_url = args.url tag = args.tag if not re.match(r'^refs/tags/', tag) and not re.match(r'^v', tag): logging.error(" Tag prefix must contain {} or {}".format( "\"refs/tags/\"", "\"v\"")) exit(1) match = re.match(r'^refs/tags/(.*)$', tag) if match: tag = match.group(1) match = re.match(r'^v(.*)$', tag) if match: version = match.group(1) logging.debug("CHANGELOG_URL: {}".format(changelog_url)) logging.debug("TAG: {}".format(tag)) logging.debug("VERSION: {}".format(version)) changelog_txt = download_from_url(args, logging, changelog_url) tag_changelog_txt = render_markdown(changelog_txt, version, logging) # BRAVE_REPO is defined in lib/helpers.py repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(BRAVE_REPO) release = get_release(repo, tag, allow_published_release_updates=True) logging.debug("Release body before update: \n\'{}\'".format( release['body'])) logging.info("Merging original release body with changelog") new_body = release['body'] + '\n\n### Release Notes' + '\n\n' + \ tag_changelog_txt logging.debug("release body is now: \n\'{}\'".format(new_body)) data = dict(tag_name=tag, name=release['name'], body=new_body) id = release['id'] logging.debug("Updating release with id: {}".format(id)) release = retry_func( lambda run: repo.releases.__call__(f'{id}').patch(data=data), catch=requests.exceptions.ConnectionError, retries=3) logging.debug("Release body after update: \n\'{}\'".format( release['body']))
def main(): repo = GitHub(get_env('GITHUB_TOKEN')).repos(BROWSER_LAPTOP_REPO) release = get_draft(repo, get_tag_without_channel()) commit_tag = get_commit_tag(get_version()) print("[INFO] Releasing {}".format(release['tag_name'])) publish_release(repo, release['id'], get_tag(), commit_tag)
def main(): repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(BRAVE_REPO) release = get_draft(repo, get_brave_version()) commit_tag = get_commit_tag(get_raw_version()) print("[INFO] Releasing {}".format(release['tag_name'])) publish_release(repo, release['id'], get_tag(), commit_tag)
def main(): args = parse_args() if not dist_newer_than_head(): create_dist = os.path.join(SOURCE_ROOT, 'script', 'create-dist.py') execute([sys.executable, create_dist]) build_version = get_atom_shell_build_version() if not ATOM_SHELL_VERSION.startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( ATOM_SHELL_VERSION, build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) release_id = create_or_get_release_draft(github, args.version) if args.publish_release: # Upload the SHASUMS.txt. execute([ sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-checksums.py'), '-v', ATOM_SHELL_VERSION ]) # Upload the index.json. execute([ sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-index-json.py') ]) # Press the publish button. publish_release(github, release_id) # Do not upload other files when passed "-p". return # Upload atom-shell with GitHub Releases API. upload_atom_shell(github, release_id, os.path.join(DIST_DIR, DIST_NAME)) upload_atom_shell(github, release_id, os.path.join(DIST_DIR, SYMBOLS_NAME)) # Upload chromedriver and mksnapshot for minor version update. if parse_version(args.version)[2] == '0': upload_atom_shell(github, release_id, os.path.join(DIST_DIR, CHROMEDRIVER_NAME)) upload_atom_shell(github, release_id, os.path.join(DIST_DIR, MKSNAPSHOT_NAME)) if PLATFORM == 'win32': # Upload node headers. execute([ sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-node-headers.py'), '-v', ATOM_SHELL_VERSION ])
def download_linux_pkgs_from_github(args, logging): file_list = [] # BRAVE_REPO defined in helpers.py repo = GitHub(args.github_token).repos(BRAVE_REPO) tag_name = args.tag release = {} releases = get_releases_by_tag(repo, tag_name, include_drafts=True) if releases: if len(releases) > 1: exit("Error: More than 1 release exists with the tag: \'{}\'". format(tag_name)) release = releases[0] if release['assets'] is None: logging.error( 'Error: Could not find GitHub release with tag {}. Exiting...'. format(tag_name)) exit(1) else: logging.info( "Searching for RPM/DEB packages in GitHub release: {}".format( release['url'])) for asset in release['assets']: if re.match(r'.*\.rpm$', asset['name']) \ or re.match(r'.*\.deb$', asset['name']): filename = asset['name'] asset_id = asset['id'] asset_url = asset['url'] if args.debug: logging.debug("GitHub asset_url: {}".format(asset_url + '/' + filename)) # Check if the file exists on disk first, and rename it if so # to prevent a situation where the expect script fails because # the rpm has already been signed rename_file_if_exists(filename, logging) # Instantiate new requests session, versus reusing the repo session above. # Headers was likely being reused in that session, and not allowing us # to set the Accept header to the below. perform_github_download(asset_url, args, logging, filename, file_list) if len(file_list) < 2: logging.error( "Cannot get both RPM and DEB files from Github! " "Removing partially downloaded files from directory: {}".format( dist_dir)) remove_github_downloaded_files(file_list, logging) exit(1) return file_list
def main(): github = GitHub(auth_token()) releases = github.repos(BROWSER_LAPTOP_REPO).releases.get() version = json.load(open('package.json'))['version'] tag = ('v' + version + release_channel()) tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == tag: tag_exists = True break release = create_or_get_release_draft(github, releases, tag, tag_exists) # match version to GitHub milestone commit_tag = None parts = version.split('.', 3) if (len(parts) == 3): parts[2] = 'x' commit_tag = '.'.join(parts) # Press the publish button. if not tag_exists and commit_tag: publish_release(github, release['id'], tag, commit_tag)
def main(): args = parse_args() if not dist_newer_than_head(): create_dist = os.path.join(SOURCE_ROOT, 'script', 'create-dist.py') execute([sys.executable, create_dist]) build_version = get_atom_shell_build_version() if not ATOM_SHELL_VERSION.startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( ATOM_SHELL_VERSION, build_version) sys.stderr.write(error) sys.stderr.flush() return 1 # Upload atom-shell with GitHub Releases API. github = GitHub(auth_token()) release_id = create_or_get_release_draft(github, args.version) upload_atom_shell(github, release_id, os.path.join(DIST_DIR, DIST_NAME)) upload_atom_shell(github, release_id, os.path.join(DIST_DIR, SYMBOLS_NAME)) # Upload chromedriver for minor version update. if parse_version(args.version)[2] == '0': upload_atom_shell(github, release_id, os.path.join(DIST_DIR, CHROMEDRIVER_NAME)) if args.publish_release: # Upload node's headers to S3. bucket, access_key, secret_key = s3_config() upload_node(bucket, access_key, secret_key, ATOM_SHELL_VERSION) # Upload the SHASUMS.txt. execute([ sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-checksums.py'), '-v', ATOM_SHELL_VERSION ]) # Upload PDBs to Windows symbol server. if TARGET_PLATFORM == 'win32': execute([ sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-windows-pdb.py') ]) # Press the publish button. publish_release(github, release_id)
def main(): global tag_names supported_platforms = ['Darwin'] if platform.system() not in supported_platforms: print( 'Error: Platform \'{}\' not supported; acceptable platform(s): {}'. format(platform.system(), ", ".join(supported_platforms))) exit(1) args = parse_args() if args.real_profile and args.use_profile: print( '[ERROR] you can\'t use both `--fresh-profile` AND `--use-profile` at the same time.' ) return 1 github_token = get_github_token() repo = GitHub(github_token).repos(BRAVE_REPO) get_releases(repo) tag_names.sort(key=lambda s: map(int, s.split('.'))) filter_releases(args) first_broken_version, attempts = find_first_broken_version(args) print('DONE: issue first appeared in "' + str(first_broken_version) + '" (found in ' + str(attempts) + ' attempts)') try: broken_index = tag_names.index(first_broken_version) if broken_index > 0: previous_release = tag_names[broken_index - 1] versions = 'v' + previous_release + '..v' + first_broken_version if args.verbose: print( '[INFO] finding commits using "git log --pretty=oneline ' + versions + '"') commits = execute(['git', 'log', '--pretty=oneline', versions]).strip() commit_lines = commits.split('\n') print('Commits specific to tag "v' + first_broken_version + '" (' + str(len(commit_lines)) + ' commit(s)):') print(commits) except Exception as e: print('[ERROR] ' + str(e))
def is_sha(ref): global config repo = GitHub(config.github_token).repos(BRAVE_CORE_REPO) try: repo.git.commits(str(ref)).get() except Exception as e: response = str(e) try: # Unsure of original intention, but github.py (on purpose) throws # an exception if the response body contains `message`. # This catch has the response (in JSON) in exception json_response = json.loads(response) if json_response['sha'] == ref: return True except Exception as e2: return False return False
def main(args): print('[INFO] Running upload...') repo = GitHub(get_env('GITHUB_TOKEN')).repos(BROWSER_LAPTOP_REPO) tag = get_tag_without_channel() release = get_draft(repo, tag) if not release: print( "[INFO] No existing release found, creating new release for this upload" ) release = create_release_draft(repo, tag) print('[INFO] Uploading release {}'.format(release['tag_name'])) for f in get_files_to_upload(): upload_browser_laptop(repo, release, f, force=args.force) print('[INFO] Finished upload')
def main(): args = parse_args() if args.debug: logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) logging.debug('prerelease: {}'.format(args.prerelease)) if not os.environ.get('npm_config_onevn_version'): message = ('Error: Please set the $npm_config_onevn_version' 'environment variable') exit(message) ONEVN_VERSION = os.environ.get('npm_config_onevn_version') repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(ONEVN_REPO) release = get_draft(repo, ONEVN_VERSION) tag_name = release['tag_name'] logging.info("Releasing {}".format(tag_name)) publish_release(repo, release['id'], tag_name, args.prerelease, logging)
def main(): args = parse_args() if args.verbose: print('[INFO] args: ' + str(args)) global config result = config.initialize(args) if result != 0: return result result = fetch_origin_check_staged(BRAVE_CORE_ROOT) if result != 0: return result # get all channel branches (starting at master) brave_browser_version = get_remote_version('master') remote_branches = get_remote_channel_branches(brave_browser_version) top_level_base = 'master' # if starting point is NOT nightly, remove options which aren't desired # also, find the branch which should be used for diffs (for cherry-picking) if not is_nightly(args.start_from): top_level_base = remote_branches[args.start_from] try: start_index = config.channel_names.index(args.start_from) config.channels_to_process = config.channel_names[start_index:] except Exception as e: print('[ERROR] specified `start-from` value "' + args.start_from + '" not found in channel list') return 1 # optionally (instead of having a local branch), allow uplifting a specific PR # this pulls down the pr locally (in a special branch) if args.uplift_using_pr: try: pr_number = int(args.uplift_using_pr) repo = GitHub(config.github_token).repos(BRAVE_CORE_REPO) # get enough details from PR to check out locally response = repo.pulls(pr_number).get() head = response['head'] local_branch = 'pr' + str(pr_number) + '_' + head['ref'] head_sha = head['sha'] top_level_base = response['base']['ref'] top_level_sha = response['base']['sha'] merged_at = str(response['merged_at']).strip() config.title = str(response['title']).strip() except Exception as e: print('[ERROR] Error parsing or error returned from API when looking up pull request "' + str(args.uplift_using_pr) + '":\n' + str(e)) return 1 # set starting point AHEAD of the PR provided config.master_pr_number = pr_number if top_level_base == 'master': config.channels_to_process = config.channel_names[1:] else: branch_index = remote_branches.index(top_level_base) config.channels_to_process = config.channel_names[branch_index:] # if PR was already merged, use the SHA it was PRed against if merged_at != 'None' and len(merged_at) > 0: print('pr was already merged at ' + merged_at + '; using "' + top_level_sha + '" instead of "' + top_level_base + '"') top_level_base = top_level_sha else: # don't allow uplift of PRs which are not merged print('[ERROR] Pull request ' + str(pr_number) + ' has not been merged yet. ' + 'Only merged requests can be uplifted.') return 1 # create local branch which matches the contents of the PR with scoped_cwd(BRAVE_CORE_ROOT): # check if branch exists already try: branch_sha = execute(['git', 'rev-parse', '-q', '--verify', local_branch]) except Exception as e: branch_sha = '' if len(branch_sha) > 0: # branch exists; reset it print('branch "' + local_branch + '" exists; resetting to origin/' + head['ref'] + ' (' + head_sha + ')') execute(['git', 'checkout', local_branch]) execute(['git', 'reset', '--hard', head_sha]) else: # create the branch print('creating branch "' + local_branch + '" using origin/' + head['ref'] + ' (' + head_sha + ')') execute(['git', 'checkout', '-b', local_branch, head_sha]) # If title isn't set already, generate one from first commit local_branch = get_local_branch_name(BRAVE_CORE_ROOT) if not config.title and not args.uplift_using_pr: config.title = get_title_from_first_commit(BRAVE_CORE_ROOT, top_level_base) # Create a branch for each channel print('\nCreating branches...') fancy_print('NOTE: Commits are being detected by diffing "' + local_branch + '" against "' + top_level_base + '"') local_branches = {} branch = '' try: for channel in config.channels_to_process: branch = create_branch(channel, top_level_base, remote_branches[channel], local_branch, args) local_branches[channel] = branch if channel == args.uplift_to: break except Exception as e: print('[ERROR] cherry-pick failed for branch "' + branch + '". Please resolve manually:\n' + str(e)) return 1 print('\nPushing local branches to remote...') push_branches_to_remote(BRAVE_CORE_ROOT, config.branches_to_push, dryrun=config.is_dryrun, token=config.github_token) try: print('\nCreating the pull requests...') for channel in config.channels_to_process: submit_pr( channel, top_level_base, remote_branches[channel], local_branches[channel]) if channel == args.uplift_to: break print('\nDone!') except Exception as e: print('\n[ERROR] Unhandled error while creating pull request; ' + str(e)) return 1 return 0
def main(): args = parse_args() if not args.publish_release: if not dist_newer_than_head(): create_dist = os.path.join(SOURCE_ROOT, 'script', 'create-dist.py') execute([sys.executable, create_dist]) build_version = get_atom_shell_build_version() if not ATOM_SHELL_VERSION.startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( ATOM_SHELL_VERSION, build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) releases = github.repos(ATOM_SHELL_REPO).releases.get() tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == args.version: tag_exists = True break release = create_or_get_release_draft(github, releases, args.version, tag_exists) if args.publish_release: # Upload the SHASUMS.txt. execute([ sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-checksums.py'), '-v', ATOM_SHELL_VERSION ]) # Upload the index.json. execute([ sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-index-json.py') ]) # Press the publish button. publish_release(github, release['id']) # Do not upload other files when passed "-p". return # Upload atom-shell with GitHub Releases API. upload_atom_shell(github, release, os.path.join(DIST_DIR, DIST_NAME)) upload_atom_shell(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME)) # Upload chromedriver and mksnapshot for minor version update. if parse_version(args.version)[2] == '0': chromedriver = 'chromedriver-{0}-{1}-{2}.zip'.format( get_chromedriver_version(), get_platform_key(), get_target_arch()) upload_atom_shell(github, release, os.path.join(DIST_DIR, chromedriver)) upload_atom_shell(github, release, os.path.join(DIST_DIR, MKSNAPSHOT_NAME)) if PLATFORM == 'win32' and not tag_exists: # Upload node headers. execute([ sys.executable, os.path.join(SOURCE_ROOT, 'script', 'upload-node-headers.py'), '-v', args.version ])
def main(): args = parse_args() print('[INFO] Running upload...') # Enable urllib3 debugging output if os.environ.get('DEBUG_HTTP_HEADERS') == 'true': logging.basicConfig(level=logging.DEBUG) logging.getLogger("urllib3").setLevel(logging.DEBUG) logging.debug( "DEBUG_HTTP_HEADERS env var is enabled, logging HTTP headers") debug_requests_on() # BRAVE_REPO is defined in lib/helpers.py for now repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(BRAVE_REPO) tag = get_brave_version() release = get_release(repo, tag, allow_published_release_updates=False) if not release: print("[INFO] No existing release found, creating new " "release for this upload") release = create_release_draft(repo, tag) print('[INFO] Uploading release {}'.format(release['tag_name'])) # Upload Brave with GitHub Releases API. upload_brave(repo, release, os.path.join(dist_dir(), DIST_NAME), force=args.force) upload_brave(repo, release, os.path.join(dist_dir(), SYMBOLS_NAME), force=args.force) # if PLATFORM == 'darwin': # upload_brave(repo, release, os.path.join(dist_dir(), DSYM_NAME)) # elif PLATFORM == 'win32': # upload_brave(repo, release, os.path.join(dist_dir(), PDB_NAME)) pkgs = get_brave_packages(output_dir(), release_channel(), get_raw_version()) if PLATFORM == 'darwin': for pkg in pkgs: upload_brave(repo, release, os.path.join(output_dir(), pkg), force=args.force) elif PLATFORM == 'win32': if get_target_arch() == 'x64': upload_brave(repo, release, os.path.join(output_dir(), 'brave_installer.exe'), 'brave_installer-x64.exe', force=args.force) for pkg in pkgs: upload_brave(repo, release, os.path.join(output_dir(), pkg), force=args.force) else: upload_brave(repo, release, os.path.join(output_dir(), 'brave_installer.exe'), 'brave_installer-ia32.exe', force=args.force) for pkg in pkgs: upload_brave(repo, release, os.path.join(output_dir(), pkg), force=args.force) else: if get_target_arch() == 'x64': for pkg in pkgs: upload_brave(repo, release, os.path.join(output_dir(), pkg), force=args.force) else: upload_brave(repo, release, os.path.join(output_dir(), 'brave-i386.rpm'), force=args.force) upload_brave(repo, release, os.path.join(output_dir(), 'brave-i386.deb'), force=args.force) # mksnapshot = get_zip_name('mksnapshot', get_brave_version()) # upload_brave(repo, release, os.path.join(dist_dir(), mksnapshot)) # if PLATFORM == 'win32' and not tag_exists: # # Upload PDBs to Windows symbol server. # run_python_script('upload-windows-pdb.py') if os.environ.get('DEBUG_HTTP_HEADERS') == 'true': debug_requests_off() print('[INFO] Finished upload')
def main(): args = parse_args() if not args.publish_release: build_version = get_brave_version() if not get_brave_version().startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( get_brave_version(), build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) releases = github.repos(BRAVE_REPO).releases.get() tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == args.version: tag_exists = True break release = create_or_get_release_draft(github, releases, args.version, tag_exists) if args.publish_release: # Create and upload the Brave SHASUMS*.txt release_brave_checksums(github, release) # Press the publish button. # publish_release(github, release['id']) # Do not upload other files when passed "-p". return # Upload Brave with GitHub Releases API. upload_brave(github, release, os.path.join(dist_dir(), DIST_NAME)) upload_brave(github, release, os.path.join(dist_dir(), SYMBOLS_NAME)) # if PLATFORM == 'darwin': # upload_brave(github, release, os.path.join(dist_dir(), DSYM_NAME)) # elif PLATFORM == 'win32': # upload_brave(github, release, os.path.join(dist_dir(), PDB_NAME)) # Upload chromedriver and mksnapshot. chromedriver = get_zip_name('chromedriver', get_chromedriver_version()) upload_brave(github, release, os.path.join(dist_dir(), chromedriver)) if PLATFORM == 'darwin': upload_brave(github, release, os.path.join(output_dir(), 'Brave.dmg')) elif PLATFORM == 'win32': if get_target_arch() == 'x64': upload_brave(github, release, os.path.join(output_dir(), 'brave_installer.exe'), 'brave_installer-x64.exe') else: upload_brave(github, release, os.path.join(output_dir(), 'brave_installer.exe'), 'brave_installer-ia32.exe') # TODO: Enable after linux packaging lands #else: #if get_target_arch() == 'x64': #upload_brave(github, release, os.path.join(output_dir(), 'brave-x86_64.rpm')) #upload_brave(github, release, os.path.join(output_dir(), 'brave-amd64.deb')) #else: #upload_brave(github, release, os.path.join(output_dir(), 'brave-i386.rpm')) #upload_brave(github, release, os.path.join(output_dir(), 'brave-i386.deb')) # mksnapshot = get_zip_name('mksnapshot', get_brave_version()) # upload_brave(github, release, os.path.join(dist_dir(), mksnapshot)) # if PLATFORM == 'win32' and not tag_exists: # # Upload PDBs to Windows symbol server. # run_python_script('upload-windows-pdb.py') versions = parse_version(args.version) version = '.'.join(versions[:3])
def download_from_github(args, logging): file_list = [] # BRAVE_REPO defined in helpers.py repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(BRAVE_REPO) if args.tag: tag_name = args.tag else: tag_name = get_brave_version() release = None releases = get_releases_by_tag(repo, tag_name, include_drafts=True) if releases: if len(releases) > 1: exit("Error: More than 1 release exists with the tag: \'{}\'".format(tag_name)) release = releases[0] for asset in release['assets']: if re.match(r'.*\.dmg$', asset['name']) \ or re.match(r'brave_installer.*\.exe$', asset['name']): filename = asset['name'] asset_url = asset['url'] if args.debug: logging.debug("GitHub asset_url: {}".format( asset_url + '/' + filename)) # Instantiate new requests session, versus reusing the repo session above. # Headers was likely being reused in that session, and not allowing us # to set the Accept header to the below. headers = {'Accept': 'application/octet-stream'} asset_auth_url = asset_url + '?access_token=' + \ os.environ.get('BRAVE_GITHUB_TOKEN') if args.debug: # disable urllib3 logging for this session to avoid showing # access_token in logs logging.getLogger("urllib3").setLevel(logging.WARNING) r = requests.get(asset_auth_url, headers=headers, stream=True) if args.debug: logging.getLogger("urllib3").setLevel(logging.DEBUG) with open(filename, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) logging.debug( "Requests Response status_code: {}".format(r.status_code)) if r.status_code == 200: file_list.append('./' + filename) else: logging.debug( "Requests Response status_code != 200: {}".format(r.status_code)) if len(file_list) < 3: logging.error( "Cannot get all 3 install files from Github! (\'*.dmg\', \'brave_installer-x64.exe\'," " \'brave-installer-ia32.exe\')") remove_github_downloaded_files(file_list, logging) exit(1) return file_list
def main(): args = parse_args() print('[INFO] Running upload...') # Repo is defined in lib/helpers.py for now repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(BRAVE_REPO) tag = get_brave_version() release = get_draft(repo, tag) if not release: print( "[INFO] No existing release found, creating new release for this upload" ) release = create_release_draft(repo, tag) print('[INFO] Uploading release {}'.format(release['tag_name'])) # Upload Brave with GitHub Releases API. upload_brave(repo, release, os.path.join(dist_dir(), DIST_NAME), force=args.force) upload_brave(repo, release, os.path.join(dist_dir(), SYMBOLS_NAME), force=args.force) # if PLATFORM == 'darwin': # upload_brave(repo, release, os.path.join(dist_dir(), DSYM_NAME)) # elif PLATFORM == 'win32': # upload_brave(repo, release, os.path.join(dist_dir(), PDB_NAME)) # Upload chromedriver and mksnapshot. chromedriver = get_zip_name('chromedriver', get_chromedriver_version()) upload_brave(repo, release, os.path.join(dist_dir(), chromedriver), force=args.force) pkgs = yield_brave_packages(output_dir(), release_channel(), get_raw_version()) if PLATFORM == 'darwin': for pkg in pkgs: upload_brave(repo, release, os.path.join(output_dir(), pkg), force=args.force) elif PLATFORM == 'win32': if get_target_arch() == 'x64': upload_brave(repo, release, os.path.join(output_dir(), 'brave_installer.exe'), 'brave_installer-x64.exe', force=args.force) else: upload_brave(repo, release, os.path.join(output_dir(), 'brave_installer.exe'), 'brave_installer-ia32.exe', force=args.force) else: if get_target_arch() == 'x64': for pkg in pkgs: upload_brave(repo, release, os.path.join(output_dir(), pkg), force=args.force) else: upload_brave(repo, release, os.path.join(output_dir(), 'brave-i386.rpm'), force=args.force) upload_brave(repo, release, os.path.join(output_dir(), 'brave-i386.deb'), force=args.force) # mksnapshot = get_zip_name('mksnapshot', get_brave_version()) # upload_brave(repo, release, os.path.join(dist_dir(), mksnapshot)) # if PLATFORM == 'win32' and not tag_exists: # # Upload PDBs to Windows symbol server. # run_python_script('upload-windows-pdb.py') versions = parse_version(args.version) version = '.'.join(versions[:3]) print('[INFO] Finished upload')
def main(): args = parse_args() if not dist_newer_than_head(): run_python_script('create-dist.py') build_version = get_electron_build_version() if not ELECTRON_VERSION.startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( ELECTRON_VERSION, build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) releases = github.repos(ELECTRON_REPO).releases.get() tag_exists = False for r in releases: if not r['draft'] and r['tag_name'] == args.version: release = r tag_exists = True break if not args.upload_to_s3: assert tag_exists == args.overwrite, \ 'You have to pass --overwrite to overwrite a published release' if not args.overwrite: release = create_or_get_release_draft(github, releases, args.version, tag_exists) # Upload Electron with GitHub Releases API. upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME), args.upload_to_s3) if get_target_arch() != 'mips64el': upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME), args.upload_to_s3) if PLATFORM == 'darwin': upload_electron(github, release, os.path.join(DIST_DIR, 'electron-api.json'), args.upload_to_s3) upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'), args.upload_to_s3) upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME), args.upload_to_s3) elif PLATFORM == 'win32': upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME), args.upload_to_s3) # Upload free version of ffmpeg. ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg), args.upload_to_s3) chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, chromedriver), args.upload_to_s3) mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot), args.upload_to_s3) if get_target_arch().startswith('arm'): # Upload the x64 binary for arm/arm64 mksnapshot mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION, 'x64') upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot), args.upload_to_s3) if PLATFORM == 'win32' and not tag_exists and not args.upload_to_s3: # Upload PDBs to Windows symbol server. run_python_script('upload-windows-pdb.py') # Upload node headers. run_python_script('create-node-headers.py', '-v', args.version) run_python_script('upload-node-headers.py', '-v', args.version)
def main(): args = parse_args() if args.upload_to_s3: utcnow = datetime.datetime.utcnow() args.upload_timestamp = utcnow.strftime('%Y-%m-%d_%H:%M:%S') if not dist_newer_than_head(): run_python_script('create-dist.py') build_version = get_electron_build_version() if not ELECTRON_VERSION.startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( ELECTRON_VERSION, build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) releases = github.repos(ELECTRON_REPO).releases.get() tag_exists = False release = None for r in releases: if not r['draft'] and r['tag_name'] == args.version: release = r tag_exists = True break if not args.upload_to_s3: assert tag_exists == args.overwrite, \ 'You have to pass --overwrite to overwrite a published release' if not args.overwrite: release = create_or_get_release_draft(github, releases, args.version, tag_exists) elif release is None: release = dict(tag_name=args.version) # Upload Electron with GitHub Releases API. upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME), args) if get_target_arch() != 'mips64el': upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME), args) if PLATFORM == 'darwin': upload_electron(github, release, os.path.join(DIST_DIR, 'electron-api.json'), args) upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'), args) upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME), args) elif PLATFORM == 'win32': upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME), args) # Upload free version of ffmpeg. ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg), args) chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, chromedriver), args) mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot), args) if get_target_arch().startswith('arm'): # Upload the x64 binary for arm/arm64 mksnapshot mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION, 'x64') upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot), args) if not tag_exists and not args.upload_to_s3: # Upload symbols to symbol server. run_python_script('upload-symbols.py') if PLATFORM == 'win32': # Upload node headers. run_python_script('create-node-headers.py', '-v', args.version) run_python_script('upload-node-headers.py', '-v', args.version)
def main(): args = parse_args() if args.verbose: print('[INFO] args: ' + str(args)) global config result = config.initialize(args) if result != 0: return result result = fetch_origin_check_staged(BRAVE_CORE_ROOT) if result != 0: return result # get all channel branches (starting at master) brave_browser_version = get_remote_version('master') remote_branches = get_remote_channel_branches(brave_browser_version) top_level_base = 'master' # if starting point is NOT nightly, remove options which aren't desired # also, find the branch which should be used for diffs (for cherry-picking) if not is_nightly(args.start_from): top_level_base = remote_branches[args.start_from] try: start_index = config.channel_names.index(args.start_from) config.channels_to_process = config.channel_names[start_index:] except Exception as e: print('[ERROR] specified `start-from` value "' + args.start_from + '" not found in channel list') return 1 # optionally (instead of having a local branch), allow uplifting a specific PR # this pulls down the pr locally (in a special branch) if args.uplift_using_pr: try: pr_number = int(args.uplift_using_pr) repo = GitHub(config.github_token).repos(BRAVE_CORE_REPO) # get enough details from PR to check out locally response = repo.pulls(pr_number).get() head = response['head'] local_branch = 'pr' + str(pr_number) + '_' + head['ref'] head_sha = head['sha'] top_level_base = response['base']['ref'] top_level_sha = response['base']['sha'] merged_at = str(response['merged_at']).strip() config.title = str(response['title']).strip() except Exception as e: print( '[ERROR] Error parsing or error returned from API when looking up pull request "' + str(args.uplift_using_pr) + '":\n' + str(e)) return 1 # set starting point AHEAD of the PR provided config.master_pr_number = pr_number if top_level_base == 'master': config.channels_to_process = config.channel_names[1:] else: branch_index = remote_branches.index(top_level_base) config.channels_to_process = config.channel_names[branch_index:] # if PR was already merged, use the SHA it was PRed against if merged_at != 'None' and len(merged_at) > 0: print('pr was already merged at ' + merged_at + '; using "' + top_level_sha + '" instead of "' + top_level_base + '"') top_level_base = top_level_sha else: # don't allow uplift of PRs which are not merged print('[ERROR] Pull request ' + str(pr_number) + ' has not been merged yet. ' + 'Only merged requests can be uplifted.') return 1 # create local branch which matches the contents of the PR with scoped_cwd(BRAVE_CORE_ROOT): # check if branch exists already try: branch_sha = execute( ['git', 'rev-parse', '-q', '--verify', local_branch]) except Exception as e: branch_sha = '' if len(branch_sha) > 0: # branch exists; reset it print('branch "' + local_branch + '" exists; resetting to origin/' + head['ref'] + ' (' + head_sha + ')') execute(['git', 'checkout', local_branch]) execute(['git', 'reset', '--hard', head_sha]) else: # create the branch print('creating branch "' + local_branch + '" using origin/' + head['ref'] + ' (' + head_sha + ')') execute(['git', 'checkout', '-b', local_branch, head_sha]) # If title isn't set already, generate one from first commit local_branch = get_local_branch_name(BRAVE_CORE_ROOT) if not config.title and not args.uplift_using_pr: config.title = get_title_from_first_commit(BRAVE_CORE_ROOT, top_level_base) # Create a branch for each channel print('\nCreating branches...') fancy_print('NOTE: Commits are being detected by diffing "' + local_branch + '" against "' + top_level_base + '"') local_branches = {} branch = '' try: for channel in config.channels_to_process: branch = create_branch(channel, top_level_base, remote_branches[channel], local_branch) local_branches[channel] = branch if channel == args.uplift_to: break except Exception as e: print('[ERROR] cherry-pick failed for branch "' + branch + '". Please resolve manually:\n' + str(e)) return 1 print('\nPushing local branches to remote...') push_branches_to_remote(BRAVE_CORE_ROOT, config.branches_to_push, dryrun=config.is_dryrun, token=config.github_token) try: print('\nCreating the pull requests...') for channel in config.channels_to_process: submit_pr(channel, top_level_base, remote_branches[channel], local_branches[channel]) if channel == args.uplift_to: break print('\nDone!') except Exception as e: print('\n[ERROR] Unhandled error while creating pull request; ' + str(e)) return 1 return 0
def download_from_github(args, logging): file_list = [] # BRAVE_REPO defined in helpers.py repo = GitHub(get_env_var('GITHUB_TOKEN')).repos(BRAVE_REPO) if args.tag: tag_name = args.tag else: tag_name = get_brave_version() release = None releases = get_releases_by_tag(repo, tag_name, include_drafts=True) if releases: if len(releases) > 1: exit("Error: More than 1 release exists with the tag: \'{}\'". format(tag_name)) release = releases[0] else: exit("Error: Did not get the release \'{}\' from Github.".format( tag_name)) found_assets_in_github_release = {} for asset in release['assets']: if re.match(r'.*\.dmg$', asset['name']): if args.uploaded: if not args.platform: args.platform = [] args.platform.append('darwin') found_assets_in_github_release['darwin'] = {} found_assets_in_github_release['darwin']['name'] = asset['name'] found_assets_in_github_release['darwin']['url'] = asset['url'] elif re.match(r'brave_installer-ia32\.exe$', asset['name']): if args.uploaded: if not args.platform: args.platform = [] args.platform.append('win32') found_assets_in_github_release['win32'] = {} found_assets_in_github_release['win32']['name'] = asset['name'] found_assets_in_github_release['win32']['url'] = asset['url'] elif re.match(r'brave_installer-x64\.exe$', asset['name']): if args.uploaded: if not args.platform: args.platform = [] args.platform.append('win64') found_assets_in_github_release['win64'] = {} found_assets_in_github_release['win64']['name'] = asset['name'] found_assets_in_github_release['win64']['url'] = asset['url'] logging.debug("Found assets in github release: {}".format( found_assets_in_github_release)) for requested_platform in args.platform: logging.debug( "Verifying platform \'{}\' exists in GitHub release".format( requested_platform)) if requested_platform not in found_assets_in_github_release.keys(): logging.error( "Platform \'{}\' does not exist in GitHub release".format( requested_platform)) exit(1) for platform in args.platform: if args.debug: logging.debug("GitHub asset_url: {}".format( found_assets_in_github_release[platform]['url'] + '/' + found_assets_in_github_release[platform]['name'])) # Instantiate new requests session, versus reusing the repo session above. # Headers was likely being reused in that session, and not allowing us # to set the Accept header to the below. headers = { 'Accept': 'application/octet-stream', 'Authorization': 'token ' + get_env_var('GITHUB_TOKEN') } asset_url = found_assets_in_github_release[platform]['url'] if args.debug: # disable urllib3 logging for this session to avoid showing # access_token in logs logging.getLogger("urllib3").setLevel(logging.WARNING) r = requests.get(asset_url, headers=headers, stream=True) if args.debug: logging.getLogger("urllib3").setLevel(logging.DEBUG) logging.debug("Writing GitHub download to file: {}".format( found_assets_in_github_release[platform]['name'])) with open(found_assets_in_github_release[platform]['name'], 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk) logging.debug("Requests Response status_code: {}".format( r.status_code)) if r.status_code == 200: file_list.append('./' + found_assets_in_github_release[platform]['name']) else: logging.debug("Requests Response status_code != 200: {}".format( r.status_code)) if len(file_list) < len(args.platform): for item in args.platform: logging.error("Cannot get requested file from Github! {}".format( found_assets_in_github_release[item]['name'])) remove_github_downloaded_files(file_list, logging) exit(1) return file_list
def main(): args = parse_args() if not args.publish_release: if not dist_newer_than_head(): run_python_script('create-dist.py') build_version = get_electron_build_version() if not ELECTRON_VERSION.startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( ELECTRON_VERSION, build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) releases = github.repos(ELECTRON_REPO).releases.get() tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == args.version: tag_exists = True break release = create_or_get_release_draft(github, releases, args.version, tag_exists) if args.publish_release: # Upload the Node SHASUMS*.txt. run_python_script('upload-node-checksums.py', '-v', ELECTRON_VERSION) # Upload the index.json. run_python_script('upload-index-json.py') # Create and upload the Electron SHASUMS*.txt release_electron_checksums(github, release) # Press the publish button. publish_release(github, release['id']) # Do not upload other files when passed "-p". return # Upload Electron with GitHub Releases API. upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME)) upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME)) if PLATFORM == 'darwin': upload_electron(github, release, os.path.join(DIST_DIR, 'electron-api.json')) upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts')) upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME)) elif PLATFORM == 'win32': upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME)) # Upload free version of ffmpeg. ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg)) # Upload chromedriver and mksnapshot for minor version update. if parse_version(args.version)[2] == '0': chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, chromedriver)) mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot)) if PLATFORM == 'win32' and not tag_exists: # Upload PDBs to Windows symbol server. run_python_script('upload-windows-pdb.py') # Upload node headers. run_python_script('create-node-headers.py', '-v', args.version) run_python_script('upload-node-headers.py', '-v', args.version)
def main(): args = parse_args() if not args.publish_release: if not dist_newer_than_head(): run_python_script('create-dist.py') build_version = get_electron_build_version() if not ELECTRON_VERSION.startswith(build_version): error = 'Tag name ({0}) should match build version ({1})\n'.format( ELECTRON_VERSION, build_version) sys.stderr.write(error) sys.stderr.flush() return 1 github = GitHub(auth_token()) releases = github.repos(ELECTRON_REPO).releases.get() tag_exists = False for release in releases: if not release['draft'] and release['tag_name'] == args.version: tag_exists = True break release = create_or_get_release_draft(github, releases, args.version, tag_exists) if args.publish_release: # Upload the Node SHASUMS*.txt. run_python_script('upload-node-checksums.py', '-v', ELECTRON_VERSION) # Upload the index.json. run_python_script('upload-index-json.py') # Create and upload the Electron SHASUMS*.txt release_electron_checksums(github, release) # Press the publish button. publish_release(github, release['id']) # Do not upload other files when passed "-p". return # Upload Electron with GitHub Releases API. upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME)) upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME)) if PLATFORM == 'darwin': upload_electron(github, release, os.path.join(DIST_DIR, 'electron-api.json')) upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts')) upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME)) elif PLATFORM == 'win32': upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME)) # Upload free version of ffmpeg. ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg)) # Upload chromedriver and mksnapshot for minor version update. if parse_version(args.version)[2] == '0': chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, chromedriver)) mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION) upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot)) if PLATFORM == 'win32' and not tag_exists: # Upload PDBs to Windows symbol server. run_python_script('upload-windows-pdb.py') # Upload node headers. run_python_script('upload-node-headers.py', '-v', args.version)