Beispiel #1
0
def upload_electron(github, release, file_path, args):

  # if upload_to_s3 is set, skip github upload.
  if args.upload_to_s3:
    bucket, access_key, secret_key = s3_config()
    key_prefix = 'electron-artifacts/{0}_{1}'.format(release['tag_name'],
                                                     args.upload_timestamp)
    s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
          key_prefix, [file_path])
    upload_sha256_checksum(release['tag_name'], file_path, key_prefix)
    return

  # Delete the original file before uploading in CI.
  filename = os.path.basename(file_path)
  if os.environ.has_key('CI'):
    try:
      for asset in release['assets']:
        if asset['name'] == filename:
          github.repos(ELECTRON_REPO).releases.assets(asset['id']).delete()
    except Exception:
      pass

  # Upload the file.
  upload_io_to_github(release, filename, file_path)

  # Upload the checksum file.
  upload_sha256_checksum(release['tag_name'], file_path)
Beispiel #2
0
def upload_electron(github, release, file_path, upload_to_s3):

    # if upload_to_s3 is set, skip github upload.
    if upload_to_s3:
        bucket, access_key, secret_key = s3_config()
        key_prefix = 'electron-artifacts/{0}'.format(release['tag_name'])
        s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
              key_prefix, [file_path])
        upload_sha256_checksum(release['tag_name'], file_path, key_prefix)
        return

    # Delete the original file before uploading in CI.
    filename = os.path.basename(file_path)
    if os.environ.has_key('CI'):
        try:
            for asset in release['assets']:
                if asset['name'] == filename:
                    github.repos(ELECTRON_REPO).releases.assets(
                        asset['id']).delete()
        except Exception:
            pass

    # Upload the file.
    upload_io_to_github(release, filename, file_path)

    # Upload the checksum file.
    upload_sha256_checksum(release['tag_name'], file_path)

    # Upload ARM assets without the v7l suffix for backwards compatibility
    # TODO Remove for 2.0
    if 'armv7l' in filename:
        arm_filename = filename.replace('armv7l', 'arm')
        arm_file_path = os.path.join(os.path.dirname(file_path), arm_filename)
        shutil.copy2(file_path, arm_file_path)
        upload_electron(github, release, arm_file_path, upload_to_s3)
Beispiel #3
0
def main():
    safe_mkdir(dist_dir())

    args = parse_args()
    node_headers_dir = os.path.join(dist_dir(),
                                    'node-{0}'.format(args.version))
    node2_headers_dir = os.path.join(dist_dir(),
                                     'node-{0}-headers'.format(args.version))
    iojs_headers_dir = os.path.join(dist_dir(),
                                    'iojs-{0}'.format(args.version))
    iojs2_headers_dir = os.path.join(dist_dir(),
                                     'iojs-{0}-headers'.format(args.version))

    copy_headers(node_headers_dir)
    create_header_tarball(node_headers_dir)
    copy_headers(node2_headers_dir)
    create_header_tarball(node2_headers_dir)
    copy_headers(iojs_headers_dir)
    create_header_tarball(iojs_headers_dir)
    copy_headers(iojs2_headers_dir)
    create_header_tarball(iojs2_headers_dir)

    # Upload node's headers to S3.
    bucket, access_key, secret_key = s3_config()
    upload_node(bucket, access_key, secret_key, args.version)
Beispiel #4
0
def upload_electron(github, release, file_path, upload_to_s3):

  # if upload_to_s3 is set, skip github upload.
  if upload_to_s3:
    bucket, access_key, secret_key = s3_config()
    key_prefix = 'electron-artifacts/{0}'.format(release['tag_name'])
    s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
          key_prefix, [file_path])
    upload_sha256_checksum(release['tag_name'], file_path, key_prefix)
    return

  # Delete the original file before uploading in CI.
  filename = os.path.basename(file_path)
  if os.environ.has_key('CI'):
    try:
      for asset in release['assets']:
        if asset['name'] == filename:
          github.repos(ELECTRON_REPO).releases.assets(asset['id']).delete()
    except Exception:
      pass

  # Upload the file.
  upload_io_to_github(release, filename, file_path)

  # Upload the checksum file.
  upload_sha256_checksum(release['tag_name'], file_path)

  # Upload ARM assets without the v7l suffix for backwards compatibility
  # TODO Remove for 2.0
  if 'armv7l' in filename:
    arm_filename = filename.replace('armv7l', 'arm')
    arm_file_path = os.path.join(os.path.dirname(file_path), arm_filename)
    shutil.copy2(file_path, arm_file_path)
    upload_electron(github, release, arm_file_path, upload_to_s3)
Beispiel #5
0
def upload_electron(github, release, file_path, args):

    # if upload_to_s3 is set, skip github upload.
    if args.upload_to_s3:
        bucket, access_key, secret_key = s3_config()
        key_prefix = 'electron-artifacts/{0}_{1}'.format(
            release['tag_name'], args.upload_timestamp)
        s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
              key_prefix, [file_path])
        upload_sha256_checksum(release['tag_name'], file_path, key_prefix)
        return

    # Delete the original file before uploading in CI.
    filename = os.path.basename(file_path)
    if os.environ.has_key('CI'):
        try:
            for asset in release['assets']:
                if asset['name'] == filename:
                    github.repos(ELECTRON_REPO).releases.assets(
                        asset['id']).delete()
        except Exception:
            pass

    # Upload the file.
    upload_io_to_github(release, filename, file_path)

    # Upload the checksum file.
    upload_sha256_checksum(release['tag_name'], file_path)
def main():
    os.chdir(ELECTRON_DIR)
    if PLATFORM == 'win32':
        for pdb in PDB_LIST:
            run_symstore(pdb, SYMBOLS_DIR, PRODUCT_NAME)
        files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
    else:
        files = glob.glob(SYMBOLS_DIR + '/*/*/*.sym')

    for symbol_file in files:
        print("Generating Sentry src bundle for: " + symbol_file)
        subprocess.check_output([
            NPX_CMD, '@sentry/[email protected]', 'difutil', 'bundle-sources',
            symbol_file
        ])

    files += glob.glob(SYMBOLS_DIR + '/*/*/*.src.zip')

    # The file upload needs to be atom-shell/symbols/:symbol_name/:hash/:symbol
    os.chdir(SYMBOLS_DIR)
    files = [os.path.relpath(f, os.getcwd()) for f in files]

    # The symbol server needs lowercase paths, it will fail otherwise
    # So lowercase all the file paths here
    files = [f.lower() for f in files]

    bucket, access_key, secret_key = s3_config()
    upload_symbols(bucket, access_key, secret_key, files)
Beispiel #7
0
def upload_electron(release, file_path, args):
    filename = os.path.basename(file_path)

    # Strip zip non determinism before upload, in-place operation
    try:
        zero_zip_date_time(file_path)
    except NonZipFileError:
        pass

    # if upload_to_s3 is set, skip github upload.
    if args.upload_to_s3:
        bucket, access_key, secret_key = s3_config()
        key_prefix = 'electron-artifacts/{0}_{1}'.format(
            args.version, args.upload_timestamp)
        s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
              key_prefix, [file_path])
        upload_sha256_checksum(args.version, file_path, key_prefix)
        s3url = 'https://gh-contractor-zcbenz.s3.amazonaws.com'
        print('{0} uploaded to {1}/{2}/{0}'.format(filename, s3url,
                                                   key_prefix))
        return

    # Upload the file.
    upload_io_to_github(release, filename, file_path, args.version)

    # Upload the checksum file.
    upload_sha256_checksum(args.version, file_path)
Beispiel #8
0
def main():
    os.chdir(SOURCE_ROOT)
    if PLATFORM == 'win32':
        for pdb in PDB_LIST:
            run_symstore(pdb, SYMBOLS_DIR, PRODUCT_NAME)
        files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
        files = [f.lower() for f in files]
    else:
        files = glob.glob(SYMBOLS_DIR + '/*/*/*.sym')

    bucket, access_key, secret_key = s3_config()
    upload_symbols(bucket, access_key, secret_key, files)
Beispiel #9
0
def main():
  safe_mkdir(DIST_DIR)

  args = parse_args()
  dist_headers_dir = os.path.join(DIST_DIR, 'node-{0}'.format(args.version))

  copy_headers(dist_headers_dir)
  create_header_tarball(dist_headers_dir)

  # Upload node's headers to S3.
  bucket, access_key, secret_key = s3_config()
  upload_node(bucket, access_key, secret_key, args.version)
Beispiel #10
0
def upload_sha256_checksum(version, file_path):
    bucket, access_key, secret_key = s3_config()
    checksum_path = '{}.sha256sum'.format(file_path)
    sha256 = hashlib.sha256()
    with open(file_path, 'rb') as f:
        sha256.update(f.read())

    filename = os.path.basename(file_path)
    with open(checksum_path, 'w') as checksum:
        checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
    s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
          'atom-shell/tmp/{0}'.format(version), [checksum_path])
def main():
  os.chdir(SOURCE_ROOT)

  rm_rf(SYMBOLS_DIR)
  safe_mkdir(SYMBOLS_DIR)
  for pdb in PDB_LIST:
    run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')

  bucket, access_key, secret_key = s3_config()
  files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
  files = [f.lower() for f in files]
  upload_symbols(bucket, access_key, secret_key, files)
Beispiel #12
0
def main():
    os.chdir(SOURCE_ROOT)

    rm_rf(SYMBOLS_DIR)
    safe_mkdir(SYMBOLS_DIR)
    for pdb in PDB_LIST:
        run_symstore(pdb, SYMBOLS_DIR, PRODUCT_NAME)

    bucket, access_key, secret_key = s3_config()
    files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
    files = [f.lower() for f in files]
    upload_symbols(bucket, access_key, secret_key, files)
Beispiel #13
0
def upload_sha256_checksum(version, file_path):
  bucket, access_key, secret_key = s3_config()
  checksum_path = '{}.sha256sum'.format(file_path)
  sha256 = hashlib.sha256()
  with open(file_path, 'rb') as f:
    sha256.update(f.read())

  filename = os.path.basename(file_path)
  with open(checksum_path, 'w') as checksum:
    checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
  s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
        'atom-shell/tmp/{0}'.format(version), [checksum_path])
Beispiel #14
0
def main():
    os.chdir(SOURCE_ROOT)
    if PLATFORM == 'win32':
        for pdb in PDB_LIST:
            run_symstore(pdb, SYMBOLS_DIR, PRODUCT_NAME)
        files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
    else:
        files = glob.glob(SYMBOLS_DIR + '/*/*/*.sym')
    # The symbol server needs lowercase paths, it will fail otherwise
    # So lowercase all the file paths here
    files = [f.lower() for f in files]

    bucket, access_key, secret_key = s3_config()
    upload_symbols(bucket, access_key, secret_key, files)
Beispiel #15
0
def s3put(prefix, key_prefix, files):
  bucket, access_key, secret_key = s3_config()
  env = os.environ.copy()
  env['AWS_ACCESS_KEY_ID'] = access_key
  env['AWS_SECRET_ACCESS_KEY'] = secret_key
  output = execute([
    'node',
    os.path.join(os.path.dirname(__file__), 's3put.js'),
    '--bucket', bucket,
    '--prefix', prefix,
    '--key_prefix', key_prefix,
    '--grant', 'public-read',
  ] + files, env)
  print(output)
def main():
  # Upload the index.json.
  with scoped_cwd(SOURCE_ROOT):
    atom_shell = os.path.join(OUT_DIR, 'atom')
    if PLATFORM == 'win32':
      atom_shell += '.exe'
    index_json = os.path.relpath(os.path.join(OUT_DIR, 'index.json'))
    execute([atom_shell,
             os.path.join('tools', 'dump-version-info.js'),
             index_json])

    bucket, access_key, secret_key = s3_config()
    s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
          [index_json])
def main():
  args = parse_args()
  bucket_name, access_key, secret_key = s3_config()
  s3 = S3Connection(access_key, secret_key)
  bucket = s3.get_bucket(bucket_name)
  if bucket is None:
    print('S3 bucket "{}" does not exist!'.format(bucket_name), file=sys.stderr)
    return 1
  shasums = [s3_object.get_contents_as_string().strip()
             for s3_object in bucket.list('atom-shell/tmp/', delimiter='/')
             if s3_object.key.endswith('.sha256sum') and
             args.version in s3_object.key]
  print('\n'.join(shasums))
  return 0
Beispiel #18
0
def main():
  os.chdir(SOURCE_ROOT)
  if PLATFORM == 'win32':
    for pdb in PDB_LIST:
      run_symstore(pdb, SYMBOLS_DIR, PRODUCT_NAME)
    files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
  else:
    files = glob.glob(SYMBOLS_DIR + '/*/*/*.sym')
  # The symbol server needs lowercase paths, it will fail otherwise
  # So lowercase all the file paths here
  files = [f.lower() for f in files]

  bucket, access_key, secret_key = s3_config()
  upload_symbols(bucket, access_key, secret_key, files)
def main():
    # Upload the index.json.
    with scoped_cwd(SOURCE_ROOT):
        if sys.platform == "darwin":
            atom_shell = os.path.join(OUT_DIR, "{0}.app".format(PRODUCT_NAME), "Contents", "MacOS", PRODUCT_NAME)
        elif sys.platform == "win32":
            atom_shell = os.path.join(OUT_DIR, "{0}.exe".format(PROJECT_NAME))
        else:
            atom_shell = os.path.join(OUT_DIR, PROJECT_NAME)
        index_json = os.path.relpath(os.path.join(OUT_DIR, "index.json"))
        execute([atom_shell, os.path.join("tools", "dump-version-info.js"), index_json])

        bucket, access_key, secret_key = s3_config()
        s3put(bucket, access_key, secret_key, OUT_DIR, "atom-shell/dist", [index_json])
Beispiel #20
0
def main():
    args = parse_args()

    url = DIST_URL + args.version + "/"
    directory, files = download_files(url, get_files_list(args.version))
    checksums = [
        create_checksum("sha1", directory, "SHASUMS.txt", files),
        create_checksum("sha256", directory, "SHASUMS256.txt", files),
    ]

    bucket, access_key, secret_key = s3_config()
    s3put(bucket, access_key, secret_key, directory, "atom-shell/dist/{0}".format(args.version), checksums)

    rm_rf(directory)
def main():
    # Upload the index.json.
    with scoped_cwd(SOURCE_ROOT):
        atom_shell = os.path.join(OUT_DIR, 'atom')
        if PLATFORM == 'win32':
            atom_shell += '.exe'
        index_json = os.path.relpath(os.path.join(OUT_DIR, 'index.json'))
        execute([
            atom_shell,
            os.path.join('tools', 'dump-version-info.js'), index_json
        ])

        bucket, access_key, secret_key = s3_config()
        s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
              [index_json])
def main():
    args = parse_args()

    url = DIST_URL + args.version + '/'
    directory, files = download_files(url, get_files_list(args.version))
    checksums = [
        create_checksum('sha1', directory, 'SHASUMS.txt', files),
        create_checksum('sha256', directory, 'SHASUMS256.txt', files)
    ]

    bucket, access_key, secret_key = s3_config()
    s3put(bucket, access_key, secret_key, directory,
          'atom-shell/dist/{0}'.format(args.version), checksums)

    rm_rf(directory)
Beispiel #23
0
def main():
    if not authToken or authToken == "":
        raise Exception("Please set META_DUMPER_AUTH_HEADER")
    # Upload the index.json.
    with scoped_cwd(ELECTRON_DIR):
        safe_mkdir(OUT_DIR)
        index_json = os.path.relpath(os.path.join(OUT_DIR, 'index.json'))

        new_content = get_content()

        with open(index_json, "w") as f:
            f.write(new_content)

        bucket, access_key, secret_key = s3_config()
        s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
              [index_json])
Beispiel #24
0
def main():
  if not authToken or authToken == "":
    raise Exception("Please set META_DUMPER_AUTH_HEADER")
  # Upload the index.json.
  with scoped_cwd(SOURCE_ROOT):
    safe_mkdir(OUT_DIR)
    index_json = os.path.relpath(os.path.join(OUT_DIR, 'index.json'))

    new_content = get_content()

    with open(index_json, "w") as f:
      f.write(new_content)

    bucket, access_key, secret_key = s3_config()
    s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
          [index_json])
def main():
  # Upload the index.json.
  with scoped_cwd(SOURCE_ROOT):
    if sys.platform == 'darwin':
      atom_shell = os.path.join(OUT_DIR, '{0}.app'.format(PRODUCT_NAME),
                                'Contents', 'MacOS', PRODUCT_NAME)
    elif sys.platform == 'win32':
      atom_shell = os.path.join(OUT_DIR, '{0}.exe'.format(PROJECT_NAME))
    else:
      atom_shell = os.path.join(OUT_DIR, PROJECT_NAME)
    index_json = os.path.relpath(os.path.join(OUT_DIR, 'index.json'))
    execute([atom_shell,
             os.path.join('tools', 'dump-version-info.js'),
             index_json])

    bucket, access_key, secret_key = s3_config()
    s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
          [index_json])
Beispiel #26
0
def main():
    os.chdir(ELECTRON_DIR)
    files = []
    if PLATFORM == 'win32':
        for pdb in PDB_LIST:
            run_symstore(pdb, SYMBOLS_DIR, PRODUCT_NAME)
        files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')

    files += glob.glob(SYMBOLS_DIR + '/*/*/*.sym')

    for symbol_file in files:
        print("Generating Sentry src bundle for: " + symbol_file)
        npx_env = os.environ.copy()
        npx_env['npm_config_yes'] = 'true'
        subprocess.check_output([
            NPX_CMD, '@sentry/[email protected]', 'difutil', 'bundle-sources',
            symbol_file
        ],
                                env=npx_env)

    files += glob.glob(SYMBOLS_DIR + '/*/*/*.src.zip')

    # The file upload needs to be atom-shell/symbols/:symbol_name/:hash/:symbol
    os.chdir(SYMBOLS_DIR)
    files = [os.path.relpath(f, os.getcwd()) for f in files]

    # The symbol server needs lowercase paths, it will fail otherwise
    # So lowercase all the file paths here
    if is_fs_case_sensitive():
        for f in files:
            lower_f = f.lower()
            if lower_f != f:
                if os.path.exists(lower_f):
                    shutil.rmtree(lower_f)
                lower_dir = os.path.dirname(lower_f)
                if not os.path.exists(lower_dir):
                    os.makedirs(lower_dir)
                shutil.copy2(f, lower_f)
    files = [f.lower() for f in files]
    for f in files:
        assert os.path.exists(f)

    bucket, access_key, secret_key = s3_config()
    upload_symbols(bucket, access_key, secret_key, files)
Beispiel #27
0
def main():
    if not authToken or authToken == "":
        raise Exception("Please set META_DUMPER_AUTH_HEADER")
    # Upload the index.json.
    with scoped_cwd(SOURCE_ROOT):
        safe_mkdir(OUT_DIR)
        index_json = os.path.relpath(os.path.join(OUT_DIR, 'index.json'))

        request = urllib2.Request(BASE_URL + version,
                                  headers={"Authorization": authToken})

        new_content = urllib2.urlopen(request).read()

        with open(index_json, "w") as f:
            f.write(new_content)

        bucket, access_key, secret_key = s3_config()
        s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
              [index_json])
Beispiel #28
0
def main():
    os.chdir(ELECTRON_DIR)
    if PLATFORM == 'win32':
        for pdb in PDB_LIST:
            run_symstore(pdb, SYMBOLS_DIR, PRODUCT_NAME)
        files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
    else:
        files = glob.glob(SYMBOLS_DIR + '/*/*/*.sym')

    # The file upload needs to be atom-shell/symbols/:symbol_name/:hash/:symbol
    os.chdir(SYMBOLS_DIR)
    files = [os.path.relpath(f, os.getcwd()) for f in files]

    # The symbol server needs lowercase paths, it will fail otherwise
    # So lowercase all the file paths here
    files = [f.lower() for f in files]

    bucket, access_key, secret_key = s3_config()
    upload_symbols(bucket, access_key, secret_key, files)
def main():
    args = parse_args()

    bucket_name, access_key, secret_key = s3_config()
    s3 = S3Connection(access_key, secret_key)
    bucket = s3.get_bucket(bucket_name)
    if bucket is None:
        print('S3 bucket "{}" does not exist!'.format(bucket_name),
              file=sys.stderr)
        return 1

    prefix = 'atom-shell/tmp/{0}/'.format(args.version)
    shasums = [
        s3_object.get_contents_as_string().strip()
        for s3_object in bucket.list(prefix, delimiter='/')
        if s3_object.key.endswith('.sha256sum')
    ]
    print('\n'.join(shasums))
    return 0
Beispiel #30
0
def upload_electron(release, file_path, args):
    filename = os.path.basename(file_path)

    # if upload_to_s3 is set, skip github upload.
    if args.upload_to_s3:
        bucket, access_key, secret_key = s3_config()
        key_prefix = 'electron-artifacts/{0}_{1}'.format(
            args.version, args.upload_timestamp)
        s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
              key_prefix, [file_path])
        upload_sha256_checksum(args.version, file_path, key_prefix)
        s3url = 'https://gh-contractor-zcbenz.s3.amazonaws.com'
        print '{0} uploaded to {1}/{2}/{0}'.format(filename, s3url, key_prefix)
        return

    # Upload the file.
    upload_io_to_github(release, filename, file_path, args.version)

    # Upload the checksum file.
    upload_sha256_checksum(args.version, file_path)
Beispiel #31
0
def upload_electron(release, file_path, args):
  filename = os.path.basename(file_path)

  # if upload_to_s3 is set, skip github upload.
  if args.upload_to_s3:
    bucket, access_key, secret_key = s3_config()
    key_prefix = 'electron-artifacts/{0}_{1}'.format(args.version,
                                                     args.upload_timestamp)
    s3put(bucket, access_key, secret_key, os.path.dirname(file_path),
          key_prefix, [file_path])
    upload_sha256_checksum(args.version, file_path, key_prefix)
    s3url = 'https://gh-contractor-zcbenz.s3.amazonaws.com'
    print '{0} uploaded to {1}/{2}/{0}'.format(filename, s3url, key_prefix)
    return

  # Upload the file.
  upload_io_to_github(release, filename, file_path, args.version)

  # Upload the checksum file.
  upload_sha256_checksum(args.version, file_path)
def main():
    args = parse_args()
    dist_url = args.dist_url
    if dist_url[-1] != "/":
        dist_url += "/"

    url = dist_url + args.version + '/'
    directory, files = download_files(url, get_files_list(args.version))
    checksums = [
        create_checksum('sha1', directory, 'SHASUMS.txt', files),
        create_checksum('sha256', directory, 'SHASUMS256.txt', files)
    ]

    if args.target_dir is None:
        bucket, access_key, secret_key = s3_config()
        s3put(bucket, access_key, secret_key, directory,
              'atom-shell/dist/{0}'.format(args.version), checksums)
    else:
        copy_files(checksums, args.target_dir)

    rm_rf(directory)
def main():
  args = parse_args()
  dist_url = args.dist_url
  if dist_url[-1] != "/":
    dist_url += "/"

  url = dist_url + args.version + '/'
  directory, files = download_files(url, get_files_list(args.version))
  checksums = [
    create_checksum('sha1', directory, 'SHASUMS.txt', files),
    create_checksum('sha256', directory, 'SHASUMS256.txt', files)
  ]

  if args.target_dir is None:
    bucket, access_key, secret_key = s3_config()
    s3put(bucket, access_key, secret_key, directory,
          'atom-shell/dist/{0}'.format(args.version), checksums)
  else:
    copy_files(checksums, args.target_dir)

  rm_rf(directory)
Beispiel #34
0
def main():
    args = parse_args()

    # Upload node's headers to S3.
    bucket, access_key, secret_key = s3_config()
    upload_node(bucket, access_key, secret_key, args.version)
def main():
  args = parse_args()

  # Upload node's headers to S3.
  bucket, access_key, secret_key = s3_config()
  upload_node(bucket, access_key, secret_key, args.version)