Esempio n. 1
0
def upload_included_files(files, remote_path):
    ''' Upload the local files if they were to be included. '''
    for filename in files:
        # Skip upload if the file doesn't exist.
        if not fs.exists(filename, remote=False):
            continue

        fs.upload(filename, remote_path)
Esempio n. 2
0
def run_deploy_script(stage, branch):
    ''' Run the deployment script on the remote host. '''
    script_path = get_deploy_dir() + REMOTE_SCRIPT
    init_script_path = get_deploy_dir() + REMOTE_INIT_SCRIPT
    repo_path = get_repo_path()

    # Check if the script exists (with version) on the remote.
    if not fs.exists(script_path):
        with hide('everything'):
            runner.run('mkdir -p ' + repo_path)
            fs.upload(BASE_PATH + '/misc/scripts/init.sh', init_script_path)
            fs.upload(BASE_PATH + '/misc/scripts/remote-source-deploy.sh',
                      script_path)

    env_vars = dict(
        STAGE=stage,
        BRANCH=branch,
        BASE_DIR=get_deploy_dir(),
        INIT_SCRIPT_PATH=init_script_path,
        REPOSITORY_PATH=repo_path,
        REPOSITORY_URL=get_config()['repository_url'],
        SCRIPT_BUILD=runner.get_script_cmd(known_scripts.BUILD),
        SCRIPT_RELOAD=runner.get_script_cmd(known_scripts.RELOAD),
        SCRIPT_INSTALL=runner.get_script_cmd(known_scripts.INSTALL),
        SCRIPT_STATUS_CHECK=runner.get_script_cmd(known_scripts.STATUS_CHECK),
        SCRIPT_PRE_BUILD=runner.get_script_cmd(known_scripts.PRE_BUILD),
        SCRIPT_POST_BUILD=runner.get_script_cmd(known_scripts.POST_BUILD),
        SCRIPT_PRE_INSTALL=runner.get_script_cmd(known_scripts.PRE_INSTALL),
        SCRIPT_POST_INSTALL=runner.get_script_cmd(known_scripts.POST_INSTALL),
        SCRIPT_PRE_DEPLOY=runner.get_script_cmd(known_scripts.PRE_DEPLOY),
        SCRIPT_POST_DEPLOY=runner.get_script_cmd(known_scripts.POST_DEPLOY))

    # Change None to ''
    # TODO: Create a util function map for dictionary
    for k, v in env_vars.iteritems():
        if v is None:
            env_vars[k] = ''

    with hide('running'):
        with shell_env(**env_vars):
            # Run the sync script on the remote
            runner.run('sh ' + script_path)
Esempio n. 3
0
def deploy():
    ''' Zero-Downtime deployment for the backend. '''
    config = get_config()
    stage = shell.get_stage()
    is_first_deployment = not buildman.is_remote_setup()

    branch = git.current_branch(remote=False)
    commit = git.last_commit(remote=False, short=True)
    info('Deploying <{branch}:{commit}> to the {stage} server'.format(
        branch=branch, commit=commit, stage=stage))

    tmp_path = fs.get_temp_filename()
    build_dir = buildman.resolve_local_build_dir()
    included_files = config['deployment']['include_files']

    deployer_user = shell.get_user()

    notif.send(notification_types.DEPLOYMENT_STARTED, {
        'user': deployer_user,
        'commit': commit,
        'branch': branch,
        'stage': stage
    })

    (release_dir, current_path) = buildman.setup_remote()

    timestamp = datetime.utcnow()
    build_id = timestamp.strftime('%Y%m%d%H%M%S')
    build_name = buildman.get_build_name(build_id)
    build_compressed = build_name + '.tar.gz'
    release_path = release_dir + '/' + build_name
    dist_path = build_name + '/dist'

    buildman.build(stage, config)

    info('Compressing the build')
    fs.tar_archive(build_compressed, build_dir, remote=False)

    info('Uploading the build {} to {}'.format(build_compressed, tmp_path))
    fs.upload(build_compressed, tmp_path)

    # Remove the compressed build from the local directory.
    fs.rm(build_compressed, remote=False)

    # Once, the build is uploaded to the remote,
    # set things up in the remote server.
    with cd(release_dir):
        remote_info('Extracting the build {}'.format(build_compressed))
        # Create a new directory for the build in the remote.
        fs.mkdir(dist_path, nested=True)

        # Extract the build.
        fs.tar_extract(tmp_path, dist_path)

        # Remove the uploaded archived from the temp path.
        fs.rm_rf(tmp_path)

        # Upload the files to be included eg: package.json file
        # to the remote build location.
        upload_included_files(included_files, release_path)

        remote_info('Pointing the current symlink to the latest build')
        fs.update_symlink(release_path, current_path)

    # Change directory to the release path.
    with cd(current_path):
        install_remote_dependencies()

    # Start or restart the application service.
    start_or_reload_service(is_first_deployment)

    # Save build history
    buildman.record_history({
        'id': build_id,
        'path': release_path,
        'branch': branch,
        'commit': commit,
        'stage': stage,
        'createdBy': deployer_user,
        'timestamp': timestamp.strftime(buildman.TS_FORMAT)
    })

    # Send deployment finished notification.
    notif.send(notification_types.DEPLOYMENT_FINISHED, {
        'user': deployer_user,
        'branch': branch,
        'commit': commit,
        'stage': stage
    })

    remote_info('Deployment Completed')
Esempio n. 4
0
def deploy():
    ''' Zero-Downtime deployment for the web. '''
    config = get_config()
    stage = shell.get_stage()
    user = get_stage_config(stage)['user']

    # Get the current branch and commit (locally).
    branch = git.current_branch(remote=False)
    commit = git.last_commit(remote=False, short=True)
    info('Deploying <{branch}:{commit}> to the {stage} server'.format(
        branch=branch, commit=commit, stage=stage))

    tmp_path = fs.get_temp_filename()
    build_dir = buildman.resolve_local_build_dir()

    deploy_dir = buildman.get_deploy_dir()
    deployer_user = shell.get_user()

    notif.send(notification_types.DEPLOYMENT_STARTED, {
        'user': deployer_user,
        'branch': branch,
        'commit': commit,
        'stage': stage
    })

    (release_dir, current_path) = buildman.setup_remote()

    timestamp = datetime.utcnow()
    build_id = timestamp.strftime('%Y%m%d%H%M%S')
    build_name = buildman.get_build_name(build_id)
    build_compressed = build_name + '.tar.gz'
    release_path = release_dir + '/' + build_name

    buildman.build(stage, config)

    info('Compressing the build')
    fs.tar_archive(build_compressed, build_dir, remote=False)

    info('Uploading the build {} to {}'.format(build_compressed, tmp_path))
    fs.upload(build_compressed, tmp_path)

    # Remove the compressed build from the local directory.
    fs.rm(build_compressed, remote=False)

    # Once, the build is uploaded to the remote,
    # set things up in the remote server.
    with cd(release_dir):
        remote_info('Extracting the build {}'.format(build_compressed))
        # Create a new directory for the build in the remote.
        fs.mkdir(build_name)

        # Extract the build.
        fs.tar_extract(tmp_path, build_name)

        # Remove the uploaded archived from the temp path.
        fs.rm_rf(tmp_path)

        remote_info('Changing ownership of {} to user {}'.format(
            deploy_dir, user))
        fs.chown(release_path, user, user)

        remote_info('Pointing the current symlink to the latest build')
        fs.update_symlink(release_path, current_path)

    # Save build history
    buildman.record_history({
        'id': build_id,
        'path': release_path,
        'branch': branch,
        'commit': commit,
        'stage': stage,
        'createdBy': deployer_user,
        'timestamp': timestamp.strftime(buildman.TS_FORMAT)
    })

    # Send deployment finished notification.
    notif.send(notification_types.DEPLOYMENT_FINISHED, {
        'user': deployer_user,
        'branch': branch,
        'commit': commit,
        'stage': stage
    })

    remote_info('Deployment Completed')