def extract_notification_params(params): ''' Extract parameters for notification. ''' config = get_config() stage_config = get_stage_config(params['stage']) fallback_public_url = 'http://' + stage_config.get('host') public_url = stage_config.get('public_url') or fallback_public_url repository_url = config.get('repository_url') notif_params = dict(public_url=public_url, repository_url=repository_url, host=stage_config['host'], server_name=params['stage'], project_name=config['project_name'], project_description=config['project_description'], **params) # If commit is provided, send commit_url too. if params.get('commit'): notif_params['commit_url'] = git.get_tree_url(params['commit'], repository_url) # If branch is provided and branch is not HEAD, then add branch & branch_url. # # Note: While deploying from CI eg: Travis sometimes branch is not received # or is received as HEAD, which doesn't really make sense. # So, just hide the branch in those cases. if params.get('branch') and params.get('branch') != 'HEAD': notif_params['branch'] = params['branch'] notif_params['branch_url'] = git.get_tree_url(params['branch'], repository_url) else: notif_params['branch'] = None return notif_params
def setup_remote(quiet=True): ''' Setup remote environment before we can proceed with the deployment process. ''' base_dir = get_deploy_dir() release_dir = get_release_dir() current_path = get_current_path() build_history_path = get_builds_file() preset = get_config()['deployment']['preset'] did_setup = False stage = shell.get_stage() # If the release directory does not exist, create it. if not fs.exists(release_dir): remote_info('Setting up {} server for {} deployment'.format( stage, preset)) remote_info('Creating the releases directory {}'.format( cyan(release_dir))) fs.mkdir(release_dir, nested=True) # Add build history file. remote_info('Creating new build meta file {}'.format( cyan(build_history_path))) save_history(merge(INITIAL_BUILD_HISTORY, {'preset': preset})) # Setup a default web page for web deployment. if preset == presets.WEB: setup_default_html(base_dir) did_setup = True if not did_setup and not quiet: remote_info('Remote already setup for deployment') return (release_dir, current_path)
def configure_env(): ''' Configures the fabric env. ''' config = get_config() stage = get_stage() stage_config = get_stage_config(stage) env.user = stage_config.get('user') or config['user'] env.port = stage_config.get('port') or config['port'] env.cwd = stage_config.get('cwd') or config['cwd'] env.key_filename = stage_config.get( 'key_filename') or config['key_filename'] env.hosts = [stage_config['host']] ssh_forward_agent = stage_config.get( 'ssh_forward_agent') or config['ssh_forward_agent'] env.forward_agent = ( ssh_forward_agent and str(ssh_forward_agent).lower() == 'true' ) # If Verbose logging is turned on show verbose logs. verbose_logging = stage_config.get('verbose_logging') or config[ 'verbose_logging'] if str(verbose_logging).lower() == 'true': set_verbose_logging()
def get_deploy_dir(): ''' Get the deployment base directory path. ''' config = get_stage_config(shell.get_stage()) deploy_dir = '{}/{}'.format(config['deployment']['base_dir'].rstrip('/'), get_config()['project_name']) return deploy_dir
def record_history(build_info): ''' Record a new build in the history. ''' config = get_config() keep_builds = int(config['deployment']['keep_builds']) build_history = load_history() build_history['current'] = build_info['id'] build_history['builds'].insert(0, build_info) build_history['builds'] = build_history['builds'][0:keep_builds] remote_info('Saving the build history') # Update build history json file save_history(build_history) # Delete the previous builds more than the value of `keep_builds`. delete_old_builds(build_history)
def run_deploy_script(stage, branch): ''' Run the deployment script on the remote host. ''' script_path = get_deploy_dir() + REMOTE_SCRIPT init_script_path = get_deploy_dir() + REMOTE_INIT_SCRIPT repo_path = get_repo_path() # Check if the script exists (with version) on the remote. if not fs.exists(script_path): with hide('everything'): runner.run('mkdir -p ' + repo_path) fs.upload(BASE_PATH + '/misc/scripts/init.sh', init_script_path) fs.upload(BASE_PATH + '/misc/scripts/remote-source-deploy.sh', script_path) env_vars = dict( STAGE=stage, BRANCH=branch, BASE_DIR=get_deploy_dir(), INIT_SCRIPT_PATH=init_script_path, REPOSITORY_PATH=repo_path, REPOSITORY_URL=get_config()['repository_url'], SCRIPT_BUILD=runner.get_script_cmd(known_scripts.BUILD), SCRIPT_RELOAD=runner.get_script_cmd(known_scripts.RELOAD), SCRIPT_INSTALL=runner.get_script_cmd(known_scripts.INSTALL), SCRIPT_STATUS_CHECK=runner.get_script_cmd(known_scripts.STATUS_CHECK), SCRIPT_PRE_BUILD=runner.get_script_cmd(known_scripts.PRE_BUILD), SCRIPT_POST_BUILD=runner.get_script_cmd(known_scripts.POST_BUILD), SCRIPT_PRE_INSTALL=runner.get_script_cmd(known_scripts.PRE_INSTALL), SCRIPT_POST_INSTALL=runner.get_script_cmd(known_scripts.POST_INSTALL), SCRIPT_PRE_DEPLOY=runner.get_script_cmd(known_scripts.PRE_DEPLOY), SCRIPT_POST_DEPLOY=runner.get_script_cmd(known_scripts.POST_DEPLOY)) # Change None to '' # TODO: Create a util function map for dictionary for k, v in env_vars.iteritems(): if v is None: env_vars[k] = '' with hide('running'): with shell_env(**env_vars): # Run the sync script on the remote runner.run('sh ' + script_path)
def send(notif_type, **params): ''' Send slack notifications. ''' url = config()['base_url'] + config()['endpoint'] (text, color) = notification.get(notif_type, config=get_config(), notif_config=config(), create_link=create_link, pre_format=as_is, **params) payload = { 'attachments': [{ 'color': color, 'text': text, 'mrkdwn_in': ['text'] }] } requests.post(url, json=payload)
def deploy(): ''' Zero-Downtime deployment for the backend. ''' config = get_config() stage = shell.get_stage() is_first_deployment = not buildman.is_remote_setup() branch = git.current_branch(remote=False) commit = git.last_commit(remote=False, short=True) info('Deploying <{branch}:{commit}> to the {stage} server'.format( branch=branch, commit=commit, stage=stage)) tmp_path = fs.get_temp_filename() build_dir = buildman.resolve_local_build_dir() included_files = config['deployment']['include_files'] deployer_user = shell.get_user() notif.send(notification_types.DEPLOYMENT_STARTED, { 'user': deployer_user, 'commit': commit, 'branch': branch, 'stage': stage }) (release_dir, current_path) = buildman.setup_remote() timestamp = datetime.utcnow() build_id = timestamp.strftime('%Y%m%d%H%M%S') build_name = buildman.get_build_name(build_id) build_compressed = build_name + '.tar.gz' release_path = release_dir + '/' + build_name dist_path = build_name + '/dist' buildman.build(stage, config) info('Compressing the build') fs.tar_archive(build_compressed, build_dir, remote=False) info('Uploading the build {} to {}'.format(build_compressed, tmp_path)) fs.upload(build_compressed, tmp_path) # Remove the compressed build from the local directory. fs.rm(build_compressed, remote=False) # Once, the build is uploaded to the remote, # set things up in the remote server. with cd(release_dir): remote_info('Extracting the build {}'.format(build_compressed)) # Create a new directory for the build in the remote. fs.mkdir(dist_path, nested=True) # Extract the build. fs.tar_extract(tmp_path, dist_path) # Remove the uploaded archived from the temp path. fs.rm_rf(tmp_path) # Upload the files to be included eg: package.json file # to the remote build location. upload_included_files(included_files, release_path) remote_info('Pointing the current symlink to the latest build') fs.update_symlink(release_path, current_path) # Change directory to the release path. with cd(current_path): install_remote_dependencies() # Start or restart the application service. start_or_reload_service(is_first_deployment) # Save build history buildman.record_history({ 'id': build_id, 'path': release_path, 'branch': branch, 'commit': commit, 'stage': stage, 'createdBy': deployer_user, 'timestamp': timestamp.strftime(buildman.TS_FORMAT) }) # Send deployment finished notification. notif.send(notification_types.DEPLOYMENT_FINISHED, { 'user': deployer_user, 'branch': branch, 'commit': commit, 'stage': stage }) remote_info('Deployment Completed')
def deploy(): ''' Zero-Downtime deployment for the web. ''' config = get_config() stage = shell.get_stage() user = get_stage_config(stage)['user'] # Get the current branch and commit (locally). branch = git.current_branch(remote=False) commit = git.last_commit(remote=False, short=True) info('Deploying <{branch}:{commit}> to the {stage} server'.format( branch=branch, commit=commit, stage=stage)) tmp_path = fs.get_temp_filename() build_dir = buildman.resolve_local_build_dir() deploy_dir = buildman.get_deploy_dir() deployer_user = shell.get_user() notif.send(notification_types.DEPLOYMENT_STARTED, { 'user': deployer_user, 'branch': branch, 'commit': commit, 'stage': stage }) (release_dir, current_path) = buildman.setup_remote() timestamp = datetime.utcnow() build_id = timestamp.strftime('%Y%m%d%H%M%S') build_name = buildman.get_build_name(build_id) build_compressed = build_name + '.tar.gz' release_path = release_dir + '/' + build_name buildman.build(stage, config) info('Compressing the build') fs.tar_archive(build_compressed, build_dir, remote=False) info('Uploading the build {} to {}'.format(build_compressed, tmp_path)) fs.upload(build_compressed, tmp_path) # Remove the compressed build from the local directory. fs.rm(build_compressed, remote=False) # Once, the build is uploaded to the remote, # set things up in the remote server. with cd(release_dir): remote_info('Extracting the build {}'.format(build_compressed)) # Create a new directory for the build in the remote. fs.mkdir(build_name) # Extract the build. fs.tar_extract(tmp_path, build_name) # Remove the uploaded archived from the temp path. fs.rm_rf(tmp_path) remote_info('Changing ownership of {} to user {}'.format( deploy_dir, user)) fs.chown(release_path, user, user) remote_info('Pointing the current symlink to the latest build') fs.update_symlink(release_path, current_path) # Save build history buildman.record_history({ 'id': build_id, 'path': release_path, 'branch': branch, 'commit': commit, 'stage': stage, 'createdBy': deployer_user, 'timestamp': timestamp.strftime(buildman.TS_FORMAT) }) # Send deployment finished notification. notif.send(notification_types.DEPLOYMENT_FINISHED, { 'user': deployer_user, 'branch': branch, 'commit': commit, 'stage': stage }) remote_info('Deployment Completed')
def build(): ''' Build the code locally. ''' config = get_config() stage = shell.get_stage() buildman.build(stage, config)
def config(): ''' Get slack configuration. ''' return get_config()['notifications']['slack']
def deploy(): ''' Zero-Downtime deployment for the backend. ''' config = get_config() stage = shell.get_stage() is_remote_setup = buildman.is_remote_setup() is_first_deployment = not is_remote_setup if is_remote_setup and buildman.is_remote_up_to_date(): echo('Remote build is already up to date.') return branch = git.current_branch(remote=False) commit = git.last_commit(remote=False, short=True) info('Deploying <{branch}:{commit}> to the {stage} server'.format( branch=branch, commit=commit, stage=stage )) build_dir = os.path.abspath(buildman.resolve_local_build_dir()) included_files = config['deployment']['include_files'] deployer_user = shell.get_user() notif_params = dict( user=deployer_user, commit=commit, branch=branch, stage=stage ) notif.send(notification_types.DEPLOYMENT_STARTED, notif_params) runner.run_script_safely(known_scripts.PRE_DEPLOY) (release_dir, current_path) = buildman.setup_remote() timestamp = datetime.utcnow() build_id = timestamp.strftime('%Y%m%d%H%M%S') build_name = buildman.get_build_name(build_id) release_path = os.path.join(release_dir + '/' + build_name) dist_path = os.path.join(release_dir, build_name + '/dist') buildman.build(stage, config) uploader = BulkUploader() uploader.add(build_dir, dist_path) # Upload the files to be included eg: package.json file # to the remote build location. for filename in included_files: path = os.path.abspath(filename) # Add for upload if the file exist. if exists_local(path): uploader.add(path, release_path) uploader.upload() remote_info('Updating the current symlink') fs.update_symlink(release_path, current_path) # Once, the build is uploaded to the remote, # set things up in the remote server. # Change directory to the release path. install_remote_dependencies( commit=commit, current_path=current_path, smart_install=get_stage_config(stage)['deployment']['smart_install'] ) # Start or restart the application service. start_or_reload_service(is_first_deployment) # Save build history buildman.record_history({ 'id': build_id, 'path': release_path, 'branch': branch, 'commit': commit, 'stage': stage, 'createdBy': deployer_user, 'timestamp': timestamp.strftime(buildman.TS_FORMAT) }) runner.run_script_safely(known_scripts.POST_DEPLOY) # Send deployment finished notification. notif.send(notification_types.DEPLOYMENT_FINISHED, notif_params) info('Deployment Completed')