Example #1
0
def git_archive_and_upload_tar():
    """
    Create an archive from the current git branch and upload it to target machine.
    """
    cmd = 'git branch | grep "*" | sed "s/* //"'
    current_branch = str(subprocess.Popen(cmd, \
            shell=True,\
            stdin=subprocess.PIPE, \
            stdout=subprocess.PIPE).communicate()[0]).rstrip()
    env.git_branch = current_branch
    local('git archive --format=tar %(git_branch)s > %(release)s.tar' % env)
    local('touch `git describe HEAD`-`git config --get user.email`.tag')
    local('tar rvf %(release)s.tar `git describe HEAD`-`git config --get user.email`.tag; \
            rm `git describe HEAD`-`git config --get user.email`.tag' % env)
    local('gzip %(release)s.tar' % env)

    current_owner, current_group = get_ownership(env.path)
    deploying_user = fabsettings.PROJECT_SITES[env.target].get('USER', 'web')
    deploying_group = fabsettings.PROJECT_SITES[env.target].get('GROUP', deploying_user)

    if not current_owner or not current_owner:
        usersudo(deploying_user)  # Add deploying user to the sudo group
        sudo('mkdir -p %(path)s/releases/%(release)s' % env)
        sudo('mkdir -p %(path)s/packages/' % env)

    if current_owner != deploying_user:
        print("Problem Houston. Our root path {0} for deployments is not owned by our deploying user {1}.".format(env.path, deploying_user))
        print("Attempting to set the correct ownership permissions before proceeding.")
        usersudo(deploying_user)  # Add deploying user to the sudo group
        sudo("sudo chown -R {0}:{1} {2}".format(deploying_user, deploying_group, env.path))
    run('mkdir -p %(path)s/releases/%(release)s' % env)
    run('mkdir -p %(path)s/packages/' % env)
    rsync_project('%(path)s/packages/' % env, '%(release)s.tar.gz' % env, extra_opts='-avz --progress')
    run('cd %(path)s/releases/%(release)s && tar zxf ../../packages/%(release)s.tar.gz' % env)
    local('rm %(release)s.tar.gz' % env)
Example #2
0
def deploy():
    freeze()
    project.rsync_project(
        remote_dir=DEST_PATH,
        local_dir=BUILD_PATH.rstrip('/') + '/',
        exclude=['.DS_Store', 'static/scss'],
        delete=True)
Example #3
0
def setMaster():
    if exists('/etc/hosts0'):
        print 'etc/hosts0 exists'
    else:
        sudo('cp /etc/hosts /etc/hosts0')

    sudo('rm /etc/hosts')
    sudo('cp /etc/hosts0 /etc/hosts')
    put('hosts')
    sudo('cat hosts|sudo tee -a /etc/hosts')
    run('rm hosts')

    run('cat /etc/hosts')

    path1 = '/home/{0}'.format(parm['USER'])
    rsync_project(path1, exclude=['result'])

    path2 = join(path1, basename(realpath('.')))
    path3 = join(path2, parm['programdir'])
    for dst in (path2, path3):
        fi = '{0}/{1}'.format(dst, parm['keyfile'])
        if not exists(fi, use_sudo=True):
            put(parm['keyfile'], dst)
            sudo('chmod 400 {0}'.format(fi))
    execute('genkey')
Example #4
0
def deploy():
    rsync_project(
        remote_dir = "/home/bobuk/V/kittycheck", 
        local_dir = ".", 
        exclude = ["v", "*.pyc", "*.pem", "*.xls", ".v", ".git"],
        delete = True)
    fabtools.supervisor.restart_process('kittycheck')
Example #5
0
def deploy(delete=True):
    now = datetime.datetime.now()
    if local('git diff', capture=True).strip():
        local('git commit -am "Committing deploy version at %s"' % now.strftime('%Y-%m-%d %H:%M'))
        local('git push')
    build()
    rsync_project(env.deploy_dir, '_build/', delete=delete)
Example #6
0
 def refresh_code(self):
   rsync_project(
     remote_dir = '/srv/fabric-demo',
     local_dir = './',
     delete = True,
     exclude = ['*.pyc', 'fabfile.py', '.vagrant', 'Vagrantfile'],
   )
Example #7
0
def _synchronize_node(configfile, node):
    """Performs the Synchronize step of a Chef run:
    Uploads all cookbooks, all roles and all databags to a node and add the
    patch for data bags

    Returns the node object of the node which is about to be configured,
    or None if this node object cannot be found.
    """
    print "Synchronizing node, cookbooks, roles and data bags..."
    # First upload node.json
    remote_file = "/etc/chef/node.json"
    put(configfile, remote_file, use_sudo=True, mode=400)
    root_user = "******"
    if node.get("platform") in ["freebsd", "mac_os_x"]:
        root_user = "******"
    with hide("stdout"):
        sudo("chown root:{0} {1}".format(root_user, remote_file))
    # Remove local temporary node file
    os.remove(configfile)
    # Synchronize kitchen
    rsync_project(
        env.node_work_path,
        "./cookbooks ./data_bags ./roles ./site-cookbooks",
        exclude=("*.svn", ".bzr*", ".git*", ".hg*"),
        delete=True,
        extra_opts="-q",
    )
    _add_search_patch()
Example #8
0
def rsync_media(upload=False, delete=False):
    require('PROJECT')

    local_dir = add_slash(Path(env.PROJECT.package, 'media'))
    remote_dir = add_slash(env.PROJECT.media)

    rsync_project(remote_dir, local_dir, delete=delete, upload=upload)
Example #9
0
def upload_source(gitref, directory):
    """
    Push the new code into a new directory
    """

    with use_tmp_dir() as local_dir:
        local('git archive {ref} | tar x -C {dir}'.format(ref=gitref, dir=local_dir))

        # Add trailing slash for rsync
        if not local_dir.endswith('/'):
            local_dir += '/'

        rsync_project(
            local_dir=local_dir,
            remote_dir=os.path.join(env.cwd, directory),
            delete=True,
            extra_opts=' '.join('--link-dest={}'.format(d) for d in get_src_dir_list()),
            # Fabric defaults to -pthrvz
            # -t preserve the modification time. We want to ignore that.
            # -v print the file being updated
            # We replaced these by:
            # -c will use checksum to compare files
            # -i will print what kind of transfer has been done (copy/upload/...)
            default_opts='-pchriz',
        )

    run('cp -l environment {new}/.env'.format(new=directory))
    run('chmod go+rx {}'.format(directory))
Example #10
0
def deploy():
    # TODO: Might be decent to run tests before deploying?
    excludes = [
            '.pyc', '.git', 'config.py', 'data',
            'syncplug.sh' 'fabfile.py'
            ]
    rsync_project( '/home/resumr/app', 'build/', exclude=excludes )
Example #11
0
def upload_code():
    """upload code from the deploy host"""
    print green("################################################################################")
    print green("### uploading code to deploy hosts...")
    local_dir = "%(checkout_path)s/%(timestamp)s" % env
    remote_dir = "%(releases_path)s/" % env
    rsync_project(local_dir=local_dir, remote_dir=remote_dir, extra_opts="-q")
Example #12
0
def _deploy_sync_project_source():
    tmp_dir = "/tmp/__autoclave_project_sync"
    rsync_project(local_dir= LOCAL_PROJECT_ROOT + "/",
                  remote_dir=tmp_dir,
                  delete=True, exclude=[".git", "*.pyc"])
    _run_as_app_user("rsync -rv {}/ {}/".format(tmp_dir, config.deployment.src_path))
    _run_as_app_user("find {} -name '*.pyc' -delete".format(config.deployment.src_path))
Example #13
0
def deploy_rsync(deploy_configs):
    '''for rsync'''
    project.rsync_project(
        local_dir=env.local_output.rstrip("/")+"/",
        remote_dir=env.remote_output.rstrip("/")+"/",
        delete=env.rsync_delete
    )
Example #14
0
def upload_blobs(prod_user=None, path=None):
    """Upload BLOB part of Zope's data to the server."""
    opts = dict(
        prod_user=prod_user or env.get('prod_user'),
        path=path or env.get('path') or os.getcwd()
    )

    if not env.get('confirm'):
        confirm("This will destroy all current BLOB files on the server. " \
        "Are you sure you want to continue?")

    with cd('/home/%(prod_user)s/niteoweb.%(shortname)s/var' % opts):

        # backup current BLOBs
        if exists('blobstorage'):
            sudo('mv blobstorage blobstorage.bak')

        # remove temporary BLOBs from previous uploads
        if exists('/tmp/blobstorage'):
            sudo('rm -rf /tmp/blobstorage')

        # upload BLOBs to the server and move them to their place
        rsync_project('/tmp', local_dir='%(path)s/var/blobstorage' % opts)
        sudo('mv /tmp/blobstorage ./')
        sudo('chown -R %(prod_user)s:%(prod_user)s blobstorage' % opts)
        sudo('chmod -R 700 blobstorage')
Example #15
0
def _server_djangoapp_setup():
    _DA = _S.DJANGOAPP

    install_path = join(_E.INSTALLPATH, _DA.ROOT_DIR)  # subfolder of root dir that contains the django project
    link_path = join(
        _E.INSTALLPATH, _DA.LINK_DIR
    )  # should be called project dir (the main project dir, where wsgi.py and settings are)
    link_fqfn = join(
        link_path, _DA.CONF_NAME
    )  # where django expects the config file, put a link to the actual config file location here
    configfile_path = join(_E.INSTALLPATH, _S.SVCCONFIG.DIR)  # this is the actual config file location
    configfile_fqfn = join(configfile_path, _DA.CONF_NAME)  # ... and its fqfn
    source_dir = join(_DA.SRCDIR, "")  # add trailing slash

    # create folders
    run("mkdir -p %s" % install_path)
    for f in _DA.REQ_FOLDERS:
        ff = join(install_path)
        run("mkdir -p %s" % ff)

    # upload files
    project.rsync_project(remote_dir=install_path, local_dir=source_dir)

    # link the config file
    if files.exists(link_fqfn):  # this will be false, if the link exists, but not the target!
        warnn("Overwriting config file in %s" % link_fqfn)
        run("rm %s" % link_fqfn)
    with cd(link_path):
        run("ln -sf %s %s" % (configfile_fqfn, _DA.CONF_NAME))
Example #16
0
def pull():
    """ updates development environment """

    x = prompt(blue('Reset local database (r) or flush (f)?'), default="f")

    if x == 'r':
        reset_local_db()
    elif x == 'f':
        print(red(" * flushing database..."))
        local('cd %s '
              '&& . %s '
              '&& python manage.py flush' % (PROJECT_PATH, VIRTUAL_ENVIRONMENT))

    print(red(" * creating database dump..."))
    run('cd %s '
        '&& source venv/bin/activate '
        '&& python manage.py dumpdata --natural-foreign -e contenttypes -e auth.Permission > data.json' % env.path)

    print(red(" * downloading dump..."))
    get('%s/data.json' % env.path, '/tmp/data.json')

    print(red(" * importing the dump locally..."))
    local('cd %s '
          '&& . %s '
          '&& python manage.py loaddata /tmp/data.json' % (PROJECT_PATH, VIRTUAL_ENVIRONMENT), capture=False)

    print(red(" * removing database dump..."))
    run('rm %s/data.json' % env.path)

    print(red(" * syncing media files..."))
    rsync_project('%s/' % env.media_path, settings.MEDIA_ROOT, upload=False, delete=True)
Example #17
0
def remote_dir(src_dir, dest_dir, use_sudo=False, backup=False, backup_dir=None, tmp_dir="/tmp", **kwargs):
  """
  if dest_dir ends with /, it will put the src basedir in the directory
  if dest_dir does not end with /, it will put the src contents in there
  """
  run_func = sudo if use_sudo else run

  # strip of trailing slash
  if dest_dir[-1] == "/":
    dest_dir = dest_dir[0:-1]

  tmp_name = dest_dir[1:].replace("/", "_")

  if backup and dir_exists(dest_dir):
    the_backup_dir = backup_dir if backup_dir else "/home/" + env.user + "/fab_bkup"
    if not dir_exists(the_backup_dir):
      mkdir(the_backup_dir)

    dest_parent = dest_dir[0:dest_dir.rfind("/")]
    dest_base = os.path.basename(dest_dir)
    with cd(dest_parent):
      run("tar cfzh %s/%s %s" % (the_backup_dir, \
        tmp_name + "." + str(int(time.time())) + ".tgz", dest_base))

  tmp_remote_dir = tmp_dir + "/" + tmp_name
  rsync.rsync_project(local_dir=src_dir, remote_dir=tmp_remote_dir, delete=True)

  if not dir_exists(dest_dir):
    mkdir(dest_dir, use_sudo=use_sudo, **kwargs)

  with cd(tmp_remote_dir):
    run_func("mv * %s" % dest_dir)

  path_props(dest_dir, use_sudo=use_sudo, recursive=True, **kwargs)
Example #18
0
def deploy_regression_tests():
    _needs_targetenv()
    target_dir = env.demo_data_root+"/regression"
    if not exists(target_dir):
        sudo("mkdir -p %(target_dir)s" % venv())
        sudo("chown %(user)s %(target_dir)s" % venv())
    rsync_project(target_dir, "%s/testfeeds/regression/" % env.projectroot, exclude=".*")
Example #19
0
def deploy(package_name, settings, build_id):
    i_path = os.path.join(os.path.dirname(__file__), '..', )
    rsync_project(deployment_temp_dir, local_dir=i_path, exclude=['.git', '.svn', '.idea', 'venv', '*.pyc'],
                  delete=True)

    source_dir = os.path.join(deployments_dir, package_name)
    if exists(source_dir):
        run('rm -rf %s' % source_dir)

    variables.update(
        {
            'deployment_dir': source_dir,
            'settings': settings,
            'build_id': build_id
        }
    )

    run('cp -r %(deployment_temp_dir)s %(deployment_dir)s' % variables)

    run('ln -nsf %(deployment_dir)s %(app_dir)s' % variables)

    run('%(pip_path)s install -r %(app_dir)s/requirements.txt' % variables)

    run(
        'find %(deployments_dir)s -maxdepth 1 -type d -name "release-*" | sort -r -n | tail -n +20 | xargs -I %% rm --recursive --force %%' % variables)

    run('echo "REVISION=\'%(build_id)s\'" > %(app_src_dir)s/revision.py' % variables)
    run('echo "RELEASE_TYPE=\'%(settings)s\'" > %(app_src_dir)s/master_config.py' % variables)

    dev_file = os.path.join(app_src_dir, 'developer.py')
    if exists(dev_file):
        print 'DEV FILE EXISTS'
        run('rm %s' % dev_file)

    run('touch %(app_dir)s/reload' % variables)
Example #20
0
def _synchronize_node(configfile):
    """Performs the Synchronize step of a Chef run:
    Uploads all cookbooks, all roles and all databags to a node and add the
    patch for data bags

    Returns the node object of the node which is about to be configured,
    or None if this node object cannot be found.
    """
    print "Synchronizing node, cookbooks, roles and data bags..."
    # First upload node.json
    remote_file = '/etc/chef/node.json'
    put(configfile, remote_file, use_sudo=True, mode=400)
    with hide('stdout'):
        sudo('chown root:root {0}'.format(remote_file)),
    # Remove local temporary node file
    os.remove(configfile)
    # Synchronize kitchen
    rsync_project(
        node_work_path, './',
        exclude=(
            '/auth.cfg',  # might contain user credentials
            '*.svn', '.bzr*', '.git*', '.hg*',  # ignore vcs data
            '/cache/',
            '/site-cookbooks/chef_solo_search_lib/'  # ignore data generated
                                                     # by littlechef
        ),
        delete=True,
        extra_opts="-q",
    )
    _add_search_patch()
Example #21
0
def deploy():
    """ rsync code to remote host """
    require('root', provided_by=('staging', 'production'))
    if env.environment == 'production':
        if not console.confirm('Are you sure you want to deploy production?',
                               default=False):
            utils.abort('Production deployment aborted.')
    # defaults rsync options:
    # -pthrvz
    # -p preserve permissions
    # -t preserve times
    # -h output numbers in a human-readable format
    # -r recurse into directories
    # -v increase verbosity
    # -z compress file data during the transfer
    extra_opts = '--omit-dir-times'
    rsync_project(
        env.root,
        exclude=RSYNC_EXCLUDE,
        delete=True,
        extra_opts=extra_opts,
    )
    # upload django.wsgi file
    source = os.path.join('deploy', 'django.wsgi')
    dest = os.path.join(env.code_root, 'deploy', '%(environment)s.wsgi' % env)
    files.upload_template(source, dest, env)
    # fix permissions
    sudo('chown -R wwwpub %s' % env.home)
    sudo('chmod -R a+rw %s' % env.home)
Example #22
0
def upload_blobs(prod_user=None, path=None, blob_folders=None):
    """Upload BLOB part of Zope's data to the server."""
    opts = dict(
        prod_user=prod_user or env.get('prod_user'),
        path=path or env.get('path') or os.getcwd(),
    )
    blob_folders = blob_folders or env.get('blob_folders') or ['blobstorage']
    confirmed = env.get('confirm') or confirm("This will destroy all current" \
        " BLOB files on the server. Are you sure you want to continue?")

    if not confirmed:
        return

    with cd('/home/%(prod_user)s/var' % opts):
        for folder in blob_folders:
            opts['folder'] = folder

            # backup current BLOBs
            if exists(folder):
                # remove the previous backup
                sudo('rm -rf %(folder)s.bak' % opts)

                # create a backup
                sudo('mv %(folder)s %(folder)s.bak' % opts)

            # remove temporary BLOBs from previous uploads
            if exists('/tmp/%(folder)s' % opts):
                sudo('rm -rf /tmp/%(folder)s' % opts)

            # upload BLOBs to the server and move them to their place
            rsync_project('/tmp', local_dir='%(path)s/var/%(folder)s' % opts)
            sudo('mv /tmp/%(folder)s ./' % opts)
            sudo('chown -R %(prod_user)s:%(prod_user)s %(folder)s' % opts)
            sudo('chmod -R 700 %(folder)s' % opts)
Example #23
0
def deploy():
    # Copy files over.
    run('mkdir -p %s' % env.TARGET_DIR)

    rsync_project(remote_dir=env.TARGET_DIR, local_dir='./',
                    delete=True,
                    # extra_opts='--exclude-from=.gitignore',
                    exclude = [
                        'fabfile*',
                        'app/storage/logs/*',
                        'app/storage/sessions/*',
                        'app/storage/views/*',
                        'app/storage/meta/*',
                        'app/storage/cache/*',
                        'public/attached/*',
                        'public/uploads/*',
                        '.DS_Store',
                        '.log',
                        '.git*'
                    ]
    )

    with cd(env.TARGET_DIR):
        run('mkdir -p public/uploads/')
        sudo('''./artisan migrate''')
        sudo('''composer dump -o''')
Example #24
0
def sync():
    print green("Files synchronization started")
    _check_r_res(env.proj_root_dir, env.proj_root_dir_owner,
        env.proj_root_dir_group, env.proj_root_dir_perms)

    print green("Project files synchronization")
    rsync_project(env.proj_dir, local_dir='%s/' % _project_dir(),
        exclude=env.rsync_exclude, delete=True, extra_opts='-q -L')

    print green("Cleaning files")
    run('find %s -name "*.pyc" -exec rm -f {} \;' % env.proj_dir)

    # project directory
    _fix_r_res(env.proj_dir, env.proj_dir_owner,
        env.proj_dir_group, env.proj_dir_perms)
    _check_r_res(env.proj_dir, env.proj_dir_owner,
        env.proj_dir_group, env.proj_dir_perms)
    # run directory
    run('mkdir -p %s' % env.run_dir)
    _fix_r_res(env.run_dir, env.run_dir_owner,
        env.run_dir_group, env.run_dir_perms)
    _check_r_res(env.run_dir, env.run_dir_owner,
        env.run_dir_group, env.run_dir_perms)
    # settings
    s_f = os.path.join(env.proj_dir, 'src', 'helixauth',
        'conf', 'settings.py')
    _fix_r_res(s_f, env.proj_dir_owner,
        env.proj_dir_group, env.proj_settings_file_perms)
    _check_r_res(s_f, env.proj_dir_owner,
        env.proj_dir_group, env.proj_settings_file_perms)
    print green("Files synchronization complete")
def push_skeleton(local_path, remote_path):
    local_path  = local_path.rstrip('/')  + '/'
    remote_path = remote_path.rstrip('/') + '/'
    # rsync_project(remote_dir=remote_path, local_dir=local_path, exclude='*.append')
    rsync_project(remote_dir=remote_path, local_dir=local_path)

    with lcd(local_path):
        append_filenames = local('find -type f -name \*.append', capture=True).split()
        patch_filenames  = local('find -type f -name \*.patch',  capture=True).split()

    if patch_filenames:
        # TODO: make sue "patch" is installed remotely
        pass

    with cd(remote_path):
        for patch_filename in patch_filenames:
            patch_filename = patch_filename[2:]
            filename = sub('.patch$', '', patch_filename)
            # TODO: Make sure 'patch' returns gracefully if file was already patched
            run('patch %s < %s ; rm %s' % (filename, patch_filename, patch_filename))

        for append_filename in append_filenames:
            append_filename = append_filename[2:]
            filename = sub('.append$', '', append_filename)
            # TODO: Find out whether filename already contains append_filename
            run('cat %s >> %s ; rm %s' % (append_filename, filename, append_filename))
Example #26
0
def sync_project_to_server():
    """Synchronize project with webserver ignoring venv and sqlite db..
    This is a handy way to get your secret key to the server too...

    """
    base_path, code_path, git_url, repo_alias, site_name = get_vars()
    update_venv(code_path)
    #noinspection PyArgumentEqualDefault
    rsync_project(
        base_path,
        delete=False,
        exclude=[
            '*.pyc',
            '.git',
            '*.dmp',
            '.DS_Store',
            'projecta.db',
            'venv',
            'django_project/static'])
    update_migrations()
    with cd(os.path.join(code_path, 'django_project')):
        run('touch core/wsgi.py')
    set_media_permissions(code_path)
    set_db_permissions()
    update_migrations()
    collectstatic()
    fastprint(blue(
        'Your server is now in synchronised to your local project\n'))
Example #27
0
def rsync_update_project():
    rsync_project(
        remote_dir=env.project_path,
        local_dir=env.rsync_local,
        exclude=env.rsync_exclude,
        delete=env.rsync_delete
    )
Example #28
0
def sync():
    """Rysnc local states and pillar data to the master, and checkout margarita."""
    # Check for missing local secrets so that they don't get deleted
    # project.rsync_project fails if host is not set
    sudo("mkdir -p /srv")
    if not have_secrets():
        get_secrets()
    else:
        # Check for differences in the secrets files
        for environment in [env.environment]:
            remote_file = os.path.join('/srv/pillar/', environment, 'secrets.sls')
            with lcd(os.path.join(CONF_ROOT, 'pillar', environment)):
                if files.exists(remote_file):
                    get(remote_file, 'secrets.sls.remote')
                else:
                    local('touch secrets.sls.remote')
                with settings(warn_only=True):
                    result = local('diff -u secrets.sls.remote secrets.sls')
                    if result.failed and files.exists(remote_file) and not confirm(
                            red("Above changes will be made to secrets.sls. Continue?")):
                        abort("Aborted. File have been copied to secrets.sls.remote. " +
                              "Resolve conflicts, then retry.")
                    else:
                        local("rm secrets.sls.remote")
    salt_root = CONF_ROOT if CONF_ROOT.endswith('/') else CONF_ROOT + '/'
    project.rsync_project(local_dir=salt_root, remote_dir='/tmp/salt', delete=True)
    sudo('rm -rf /srv/salt /srv/pillar')
    sudo('mv /tmp/salt/* /srv/')
    sudo('rm -rf /tmp/salt/')
    execute(margarita)
Example #29
0
def push_media():
    """
    Copy the media from local to remote
    """
    if not hasattr(env, 'CFG'):
        puts("You need to load an environment")
        return False

    if PRODUCTION and PRODUCTION == env.CFG['env']:
        puts("SKIPPING push_media: Not allowed to push to production")
        return False

    tmp = sudo('mktemp -d', user=env.user)
    sudo('cp -a %s %s' % (env.CFG['basedir'] + '/media/', tmp))
    sudo('chown -R %s %s' % (env.user, tmp))

    project.rsync_project(
        remote_dir=tmp + '/media',
        local_dir=BASEDIR + '/media/',
        upload=True,
        delete=True,
    )
    sudo('chown -R %s:%s %s' % (env.CFG["user"], env.CFG["group"], tmp))
    sudo('rm -rf %s' % env.CFG['basedir'] + '/media/')
    sudo('mv %s %s' % (tmp + '/media/', env.CFG['basedir']))
    sudo('rm -rf %s' % tmp)
Example #30
0
def sync():
    rsync_project(
        local_dir="./",
        remote_dir="%(deploy)s/" % env,
        exclude=["venv", "*.pyc", ".git"],
        # delete=True,
    )
Example #31
0
def root_rsync(local_dir, remote_dir, exclude=[], delete=False):
    def _end_with_slash(dir_path):
        if dir_path[-1] == '/':
            return dir_path
        else:
            return dir_path + '/'

    local_dir = _end_with_slash(local_dir)
    remote_dir = _end_with_slash(remote_dir)
    m = hashlib.md5()
    m.update(remote_dir)
    me = local('whoami', capture=True)
    remote_tmp_dir = '/tmp/%s/%s/' % (me, m.hexdigest())
    run('mkdir -p %s' % remote_tmp_dir)
    if is_dir(remote_dir):
        run('rsync -a %s %s' % (remote_dir, remote_tmp_dir))  # already exists
    rsync_project(remote_dir=remote_tmp_dir,
                  local_dir=local_dir,
                  exclude=exclude,
                  delete=delete)
    sudo('rsync -a %s %s' % (remote_tmp_dir, remote_dir))
Example #32
0
def rsync_upload():
    """
    Uploads the project with rsync excluding some files and folders.
    """
    excludes = [
        "*.pyc", "*.pyo", "*.db", ".DS_Store", ".coverage",
        "local_settings.py", "/static", "/.git", "/.hg"
    ]
    local_dir = os.getcwd() + os.sep
    return rsync_project(remote_dir=env.proj_path,
                         local_dir=local_dir,
                         exclude=excludes)
Example #33
0
def sync():
    RSYNC_EXCLUDES = [
        '.git',
    ]

    RSYNC_OPTS = ['-f', '":- .gitignore"']

    rsync_project(local_dir='./',
                  remote_dir=BACKEND_DIR,
                  exclude=RSYNC_EXCLUDES,
                  extra_opts=' '.join(RSYNC_OPTS))

    with lcd('static/react'):
        local('npm install')
        local('webpack')

        # Upload bundle.js
        key_name = 'build/bundle.js'
        remote_key_path = os.path.join('static/react', key_name)
        remote_path = os.path.join(BACKEND_DIR, remote_key_path)
        put(key_name, remote_path)
Example #34
0
def init_django_cms():
    """
    """
    setFabricEnv(CONFIGFILE)
    #set vals
    project_name = env.cms_project.split("/")[-1]
    project_zip = project_name + ".zip"
    #copyProject
    if not exists(env.cms_project):
        run("mkdir -p {}".format(env.cms_project))
    Say.action("[+] Syncing project")
    rsync_project(local_dir=env.local_cms_project,
                  remote_dir=env.cms_project,
                  exclude=[".git"])
    Say.action("[+] Updating statics and  database")
    with cd("cd {}".format(env.cms_project)):
        #make_migrations
        run("python manage.py makemigrations")
        run("python manage.py migrate")
        #collect_static
        run("python manage.py collectstatic")
Example #35
0
def deploy():
    # Everything in local_path(exluding the directory itself) is copied to remoter_path directory
    # mind the trailing slash
    local_path = "src/"
    # mind the missing trailing slash
    remote_path = "~/fabric_tryout"
    # rsync options added by programmer
    opt = '-EgolHDu --out-format="info - %i %B %12b %12l %M %f%L"'

    print "==> Deploying application using the following parameters"
    print "\tremote host\t\t %s" % remote_host
    print "\tremote port\t\t %s" % remote_port
    print "\tremote user\t\t %s" % remote_user
    print "t\tlocal directory\t\t %s" % local_path
    print "\tremote directory\t %s" % remote_path

    answer = raw_input("Continue [y/n]?")
    if (answer == "y"):
        rsync_project(remote_path, local_path, delete=True, extra_opts=opt)
    else:
        print "Aborted"
Example #36
0
def deploy(deploy_version=None):
    """
      Deploy a new version of code to production or test server.

      Push code to remote server, install requirements, apply migrations,
      collect and compress static assets, export foreman to upstart,
      restart service
    """
    # TODO: replace this with
    # - zip up working directory
    # - upload and unzip into DEPLOY_PATH
    env.deploy_version = deploy_version or 'production'
    dirname = check_output(
        ["echo \"$(date +'%Y-%m-%d')-$(git log --pretty=format:'%h' -n 1)\""],
        shell=True).strip('\n ')
    deploy_path = os.path.join(HOME_DIR, dirname)
    run('mkdir -p {}'.format(deploy_path))
    print 'Uploading project to %s' % deploy_path
    rsync_project(
        remote_dir=deploy_path,
        local_dir='./',
        ssh_opts='-o StrictHostKeyChecking=no',
        exclude=[
            '.git', 'backups', 'venv', 'static/CACHE', '.vagrant', '*.pyc',
            'dev.db'
        ],
    )
    with cd(deploy_path):
        _ensure_dirs()
        _setup_venv()
        create_database()
        install_requirements(deploy_path)
        run_migrations(deploy_path)
        collect_static(deploy_path)
        # This may cause a bit of downtime
        run('ln -sfn {new} {current}'.format(new=deploy_path,
                                             current=DEPLOY_PATH))
        setup_upstart(deploy_path)
    restart()
    print "Done!"
Example #37
0
def sync():
    '''
    use rsync_project to sync files between local and server
    '''
    # Verifies that we are in the correct directory.
    fabfile_name = local('ls', capture=True)
    if 'fabfile.py' not in fabfile_name:
        print "You must run the Fabric script from the directory with fabfile.py in it!!" + fabfile_name
        sys.exit(1)

    require('code_root', provided_by=('production'))

    rsync_project(env.code_root, LOCAL_DIR, delete=True, extra_opts="-l",
                  exclude=('env',
                           '*.sublime*',
                           '*.pyc', '*.git', '*.gitignore',
                           '.coverage', COVERAGE_REPORT_DIR_NAME,
                           '%s/wsgi.py' % PROJECT_FOLDER,
                           '%s/local_settings.py' % PROJECT_FOLDER,
                           '%s/static' % PROJECT_FOLDER,
                           '%s/migrations' % PROJECT_FOLDER,
                           '%s/build' % PROJECT_FOLDER))
Example #38
0
def update_core_conf():
    """
    Update Solr core's configuration files on the remote machine(s), 
    syncing them with local ones.
    """
    require('domain_root', 'app_domain', 'local_repo_root',  
            provided_by=('staging', 'production'))   
    # update configuration -- on the remote machine -- for the Solr core  
    # serving this OpenOrdini instance (via rsync)
    # defaults rsync options: -pthrvz
    fastprint("Syncing Solr configuration for %(app_domain)s..." % env, show_prefix=True)
    extra_opts = '--omit-dir-times'
    with hide('commands'):
        rsync_project(
            remote_dir=os.path.join(env.domain_root, 'private'),
            local_dir=os.path.join(env.local_repo_root, 'system', 'solr'),
            exclude=('.*', '*~','context_*.xml', 'solr.xml', 'solr.xml.remote'),
            delete=True,
            extra_opts=extra_opts,
        )
        sudo('chown -R %s:www-data %s' % (env.om_user, os.path.join(env.domain_root, 'private', 'solr')))
    fastprint(" done." % env, end='\n')
Example #39
0
def deploy():
    """
    Deploys application to production environment.
    """
    require("site_dir")
    git_revision = local('git rev-parse HEAD', capture=True)
    git_branch = local('git rev-parse --abbrev-ref HEAD', capture=True)
    rsync_project(remote_dir=env.site_dir,
                  local_dir='django_site/',
                  exclude="*.pyc",
                  delete=False)
    with cd(env.site_dir), prefix('source env/bin/activate'), prefix(
            'source envvar'):
        run('pip install -r requirements/production.txt')
        run("python manage.py syncdb")
        run("python manage.py migrate")
        run("python manage.py collectstatic --noinput")
        run('touch reload')
        run('opbeat -o 1b8118e6bdb34aeb98078b4d2082f10e -a d3b642e69c -t 6014a391d67d97e1d6c40ba34e03c35c8aac0690 deployment --component path:. vcs:git rev:%s branch:%s remote_url:[email protected]:vinco/fundacion-proninos'
            % (git_revision, git_branch))

    print green('Deploy exitoso.')
Example #40
0
def deploy():
    # fix permission
    sudo("mkdir -p /srv/scontrol")
    sudo("chown {0} -R /srv/scontrol".format(env.user))

    # rsync deploy
    params = {
        'remote_dir':
        "/srv/",
        'local_dir':
        os.path.dirname(os.path.realpath(__file__)),
        'delete':
        True,
        'extra_opts': ("--force --progress --delay-updates "
                       "--exclude-from=rsync_exclude.txt"),
    }
    with settings(warn_only=True):
        rsync_project(**params)

    sudo("chown root:root -R /srv/scontrol")
    sudo("service salt-master restart")
    sudo("salt '*' saltutil.refresh_pillar")
Example #41
0
def copy_elastic():
   # remove all old snapshots
    snapshotids = local("curl -s http://localhost:9200/_snapshot/lagen_backup/_all?pretty=true|jq -r '.snapshots[]|.snapshot'", capture=True)
    if snapshotids.strip():
        for snapshot_id in snapshotids.split("\n"):
            assert snapshot_id
            local("curl -XDELETE http://localhost:9200/_snapshot/lagen_backup/%s" % snapshot_id)

    snapshot_id = datetime.now().strftime("%y%m%d-%H%M%S")
    # compute new snapshot id YYYYMMDD-HHMMSS
    snapshot_url = "http://localhost:9200/_snapshot/lagen_backup/%s?wait_for_completion=true" % snapshot_id

    # calculate doccount
    local_doccount = local("curl -s http://localhost:9200/lagen/_count?pretty=true|grep count", capture=True)

    # create a new snapshot with the computed id
    local('curl -f -XPUT \'%s\' -d \'{ "indices": "lagen"}\'' % snapshot_url)

#    snapshot_id = "180706-232553"
#    local_doccount = '  "count" : 3607700,'
    # rsync /tmp/elasticsearch/snapshots from local to target (must be
    # same locally and on target)
    snapshotdir = "/tmp/elasticsearch/snapshot/lagen/"
    # sudo("chown -R staffan:staffan %s" % snapshotdir)
    rsync_project(local_dir=snapshotdir,
                  remote_dir=snapshotdir,
                  delete=True,
                  default_opts="-aziO --no-perms")
    # sudo("chown -R elasticsearch:elasticsearch %s" % snapshotdir)

    # on target, curl POST to restore snapshot (close index beforehand
    # and open it after)
    run("curl -XPOST http://localhost:9200/lagen/_close")
    run("curl -f -XPOST http://localhost:9200/_snapshot/lagen_backup/%s/_restore?wait_for_completion=true" % snapshot_id)
    run("curl -XPOST http://localhost:9200/lagen/_open")

    # on target, calculate doccount and compare
    remote_doccount = run("curl -s http://localhost:9200/lagen/_count?pretty=true|grep count")
    assert local_doccount == remote_doccount
Example #42
0
def update_brew_cache(dry_run=None, no_auto_update=None):
    """Rsync's remote brew cache to remote brew cache.

    run:
         brew update && fab -H <host> deploy.update_brew_cache --user=<user>
    so that local brew cache is updated before task starts.
    """
    brew_cache = '~/Library/Caches/Homebrew'
    homebrew = '/usr/local/Homebrew'
    if dry_run:
        if not exists(homebrew):
            warn(yellow(f'{env.host}:  homebrew folder missing'))
        else:
            warn(f'{env.host}:  homebrew folder OK')
        if not exists(brew_cache):
            warn(yellow(f'{env.host}: brew_cache folder missing'))
        else:
            warn(f'{env.host}: brew_cache folder OK')
    else:
        rsync_project(local_dir=homebrew + '/',
                      remote_dir=homebrew,
                      delete=True)
        rsync_project(local_dir=brew_cache + '/',
                      remote_dir=brew_cache,
                      delete=True)
        if no_auto_update:
            run('export HOMEBREW_NO_AUTO_UPDATE=1 && brew install wget')
        else:
            result = run('brew update')
            if 'Error' in result:
                if '/usr/local/share/man/man1/brew.1' in result:
                    run('rm -rf /usr/local/share/man/man1/brew.1',
                        warn_only=True)
                if '/usr/local/share/doc/homebrew' in result:
                    run('rm -rf /usr/local/share/doc/homebrew', warn_only=True)
                result = run('brew update')
                if 'Error' in result:
                    abort(result)
            run('brew install wget')
Example #43
0
def __deploy(environment):
    env.user = "******"
    env.host_string = '*****@*****.**'
    name = 'text.collaborativejs.org' if environment == 'production' else 'text.collaborativejs.stg'

    remote_dir = '/apps/%s' % name

    # sync project files
    rsync_project(remote_dir=remote_dir,
                  local_dir=PROJECT_PATH + '/',
                  exclude=[
                      "bin", ".git", ".gitignore", ".DS_Store", ".idea",
                      "circle.yml", "node_modules"
                  ],
                  delete=True)

    # install project dependencies
    with cd(remote_dir):
        run('npm install')

    # restart supervisor
    run('supervisorctl restart %s' % name)
Example #44
0
def deploy():
    """ rsync code to remote host """
    require('root', provided_by=PROVIDERS)
    # if env.environment == 'production':
    # 	if not console.confirm('Are you sure you want to deploy production?',
    # 						   default=False):
    # 		utils.abort('Production deployment aborted.')
    # defaults rsync options:
    # -pthrvz
    # -p preserve permissions
    # -t preserve times
    # -h output numbers in a human-readable format
    # -r recurse into directories
    # -v increase verbosity
    # -z compress file data during the transfer
    extra_opts = env.extra_opts

    for src, dest in env.folders.iteritems():
        run('mkdir -p {}'.format(os.path.join(env.code_root, dest)))
        rsync_project(
            os.path.join(env.code_root, dest),
            os.path.join(BASEDIR, src),
            exclude=RSYNC_EXCLUDE,
            delete=True,
            extra_opts=extra_opts,
        )
    httpd_config()

    if (hasattr(env, 'supervisor_config_dir')):
        supervisor_config()

    if (hasattr(env, 'folders') and 'backend/' in env.folders):
        run("touch %s" % os.path.join(env.code_root, env.folders['backend/'],
                                      'archers.log'))
        run("chown -R nobody:nobody %s" % os.path.join(
            env.code_root, env.folders['backend/'], 'archers.log'))

    if (hasattr(env, 'supervisor_config_dir')):
        game_restart()
Example #45
0
def deploy():
    """ rsync code to remote host """
    require('root', provided_by=('staging'))
    # defaults rsync options:
    # -pthrvz
    # -p preserve permissions
    # -t preserve times
    # -h output numbers in a human-readable format
    # -r recurse into directories
    # -v increase verbosity
    # -z compress file data during the transfer

    extra_opts = '--omit-dir-times'
    rsync_project(
        env.root,
        exclude=RSYNC_EXCLUDE,
        delete=True,
        extra_opts=extra_opts,
    )
    #touch()
    # run_migrations()
    apache_restart()
Example #46
0
def migratedb():
    setup_paths()

    require("jdbc_url", "jdbc_username", "jdbc_password", "changelog_filename")

    local_tempdir = tempfile.mkdtemp()
    local("tar -C'%s' -xzf '%s'" % (local_tempdir, env.db_script_archive))

    remote_tempdir = run('mktemp -d')
    rsync_project(local_dir="%s/" % local_tempdir,
                  remote_dir="%s/" % remote_tempdir,
                  delete=True)

    with cd(os.path.join(remote_tempdir, 'liquibase', 'changelog')):
        run("sh /usr/bin/liquibase" + " --driver=com.mysql.jdbc.Driver" +
            " --classpath=/usr/share/java/mysql-connector-java.jar" +
            " --url=%s" % (env.jdbc_url) + " --username=%s" %
            (env.jdbc_username) + " --password=%s" % (env.jdbc_password) +
            " --changeLogFile=%s" % (env.changelog_filename) + " update")

    run('rm -rf %s' % (remote_tempdir))
    shutil.rmtree(local_tempdir)
Example #47
0
def deploy(*args):
    """Deploys frontend and backend code to the server if the checking step
    did not report any problems"""
    execute('check.deploy')

    step('\nCompiling static sources...')
    run_local('yarn run prod')

    step('\nPushing changes...')
    run_local('git push origin %(box_branch)s')

    step('\nDeploying new code on server...')
    with cd('%(box_domain)s'):
        run('git fetch')
        run('git reset --hard origin/%(box_branch)s')
        run('find . -name "*.pyc" -delete')
        run('venv/bin/pip install -r requirements.txt')
        run('venv/bin/python manage.py migrate --noinput')

    step('\nUploading static files...')
    rsync_project(
        local_dir='static/',
        remote_dir='%(box_domain)s/static/' % env,
        delete=('clear' in args),
    )

    step('\nCollecting static files...')
    with cd('%(box_domain)s'):
        run('venv/bin/python manage.py collectstatic --noinput')

    step('\nRunning system checks on server...')
    with cd('%(box_domain)s'):
        run('venv/bin/python manage.py check --deploy')

    step('\nRestarting server process...')
    for line in env['box_restart']:
        run(line)

    execute('git.fetch_remote')
Example #48
0
def sync_media_to_server():
    """Sync media to server from local filesystem."""
    base_path, code_path, git_url, repo_alias, site_name = get_vars()
    remote_path = os.path.join(code_path, 'django_project', 'media')
    local_path = os.path.join(
        os.path.dirname(__file__), 'django_project', 'media/')
    rsync_project(
        remote_path,
        local_dir=local_path,
        exclude=['*.pyc', '*.py', '.DS_Store'])

    # Now our sqlite db
    remote_path = os.path.join(
        code_path, 'resources', 'sqlite', 'projecta.db')
    local_path = os.path.join(
        os.path.dirname(__file__), 'resources/sqlite/projecta.db')
    rsync_project(
        remote_path,
        local_dir=local_path,
        exclude=['*.pyc', '*.py', '.DS_Store'])
    set_media_permissions(code_path)
    set_db_permissions()
Example #49
0
def rsync_package_and_config():
    """
    将本地的代码压缩包和配置文件,同步到部署目标服务器的部署目录
    :return:
    """
    logger.info('=' * 30)
    logger.info('将本地的代码压缩包和配置文件,同步到部署目标服务器的部署目录')

    if not files.exists(REMOTE_TEMP_PATH):
        logger.info('make directory for save source zip file: %s' %
                    REMOTE_TEMP_PATH)
        sudo('mkdir -p %s' % REMOTE_TEMP_PATH)
        sudo('chown -R %s:%s %s' %
             (api.env.user, api.env.user, REMOTE_TEMP_PATH))
        logger.info('Done.')

    source_and_config = os.path.join(LOCAL_TEMP_PATH, '*')
    project.rsync_project(
        local_dir=source_and_config,
        remote_dir=REMOTE_TEMP_PATH,
        delete=True,
        ssh_opts='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null')
Example #50
0
def rsync():
    require('root', provided_by=('production', ))
    # if env.environment == 'production':
    # 	if not console.confirm('Are you sure you want to deploy production?',
    # 						   default=False):
    # 		utils.abort('Production deployment aborted.')
    # defaults rsync options:
    # -pthrvz
    # -p preserve permissions
    # -t preserve times
    # -h output numbers in a human-readable format
    # -r recurse into directories
    # -v increase verbosity
    # -z compress file data during the transfer
    extra_opts = '-l --omit-dir-times'
    rsync_project(
        env.code_root,
        '.',
        exclude=RSYNC_EXCLUDE,
        delete=True,
        extra_opts=extra_opts,
    )
Example #51
0
def get_data():
    """
    Copy the data from remote to local
    """

    DATASETS = [
        ('experiments/mouseretina/sparkdatacv', 'experiments/mouseretina/',
         ['predlinks.pickle', 'assign.pickle', 'aggstats.pickle']),
        ('experiments/synthdifferent/sparkdata', 'experiments/synthdifferent/',
         ['aggstats.pickle']),
        ('experiments/celegans/sparkdatacv', 'experiments/celegans/',
         ['predlinks.pickle', 'assign.pickle', 'aggstats.pickle'])
    ]

    for remote_dir, local_dir, globs in DATASETS:

        for g in globs:
            project.rsync_project(
                "/data/connect-disco-paper/" + remote_dir,
                local_dir=local_dir,
                extra_opts="--include '%s' --include='*/' --exclude='*' " % g,
                upload=False)
Example #52
0
def deploy():
    puts('> Cleaning up previous backup and staging dir')
    run('rm -rf %s %s' % (backup_dir, staging_dir))

    puts('> Preparing staging')
    run('cp -r %s %s' % (target_dir, staging_dir))

    puts('> Uploading changes')
    with cd(staging_dir):
        with hide('stdout'):
            extra_opts = '--omit-dir-times'
            rsync_project(
                env.cwd,
                './',
                delete=True,
                exclude=['.git', '*.pyc'],
                extra_opts=extra_opts,
            )

    puts('> Switching changes to live')
    run('mv %s %s' % (target_dir, backup_dir))
    run('mv %s %s' % (staging_dir, target_dir))
Example #53
0
def get_checkpoints_to_local(model_filter="", ext='*'):
    if 'lift' in env.host:
        print("USING LIFT REMOTE")
        REMOTE_CHECKPOINT_DIR = "/data/ericj/nmr/respredict/checkpoints/"
    else:
        REMOTE_CHECKPOINT_DIR = "/data/jonas/nmr/respredict/checkpoints/"
    remote_filename = "/tmp/.{}.filelist".format(time.time())
    print("MODEL FITLER IS '{}'".format(model_filter))
    total_glob = f"{model_filter}*.{ext}"
    run(f'find {REMOTE_CHECKPOINT_DIR} -name "{total_glob}" > {remote_filename}',
        quiet=True)
    print('command done')
    get_res = get(remote_filename)
    ls_str = open(get_res[0], 'r').read()

    files = [f.strip() for f in ls_str.split("\n")]
    print("got list of files")
    #pickle.dump(files, open("files.pickle",'wb'))
    # get the latest checkpoint for each and the meta
    traj_files = files  # [f for f in files if f.endswith(f".{ext}")]

    to_get_files = []
    for m in traj_files:
        if model_filter is not "":
            if not fnmatch.fnmatch(m, "*" + total_glob):
                continue
        to_get_files.append(os.path.basename(m))
    print("getting", len(to_get_files), "files")

    list_of_filenames = ".{}.remote.extfile".format(time.time())
    with open(list_of_filenames, 'w') as fid:
        for f in to_get_files:
            fid.write("{}\n".format(f))

    project.rsync_project("/data/jonas/nmr/respredict/checkpoints",
                          local_dir="checkpoints",
                          extra_opts=f'--files-from={list_of_filenames}',
                          upload=False)
Example #54
0
def deploy():
    """ rsync code to remote host """
    require('root', provided_by=('pro'))
    if env.environment == 'pro':
        if not console.confirm('Are you sure you want to deploy production?', default=False):
            utils.abort('Production deployment aborted.')
    # defaults rsync options:
    # -pthrvz
    # -p preserve permissions
    # -t preserve times
    # -h output numbers in a human-readable format
    # -r recurse into directories
    # -v increase verbosity
    # -z compress file data during the transfer
    extra_opts = '--omit-dir-times'
    rsync_project(
        remote_dir=env.root,
        exclude=RSYNC_EXCLUDE,
        delete=True,
        extra_opts=extra_opts,
    )
    touch()
    update_apache_conf()
Example #55
0
def xdeploy():
    rsync_project(live_dir,
                  local_dir='%sreturns' % local_dir,
                  exclude=['*.pyc', '.git*'],
                  delete=True)
    with cd('%sreturns' % live_dir):
        print(green("get dependencies if any"))
        run('go get')
        print(green("build"))
        run('go build')
        print(green("install new"))
        run('go install')
    # print(green("populating redis"))
    # run("go run %sreturns/scripts/cache-codes.go" % live_dir)
    with cd(install_dir):
        if exists("returns"):
            print(red("remove old returns"))
            run("rm returns")
        print(green("copy new returns"))
        run("cp /home/focus/go/bin/returns .")
    print(green("start service"))
    restart_returns()
    return
Example #56
0
def deploy():

    v = conf.version

    # make a backup of the old directory
    run("rm -rf /www/bokeh/en/%s.bak" % v)
    run("mkdir -p /www/bokeh/en/%s" % v)
    run("cp -ar /www/bokeh/en/%s /www/bokeh/en/%s.bak" % (v, v))

    # switch latest symlink to archive docs
    run("rm /www/bokeh/en/latest")
    run("ln -s /www/bokeh/en/%s.bak /www/bokeh/en/latest" % v)

    rsync_project(local_dir="_build/html/",
                  remote_dir="/www/bokeh/en/%s" % v,
                  delete=True)

    # set permissions
    run("chmod -R g+w /www/bokeh/en/%s" % v)

    # switch the current symlink to new docs
    run("rm /www/bokeh/en/latest")
    run("ln -s /www/bokeh/en/%s /www/bokeh/en/latest" % v)
Example #57
0
def deploy():
    """Performs deployment of previously built binaries to target"""

    #build()
    #run_tests()

    print '------------ DEPLOYING PROJECT ------------'
    require('root_path', 'remote_deploy_path')

    # Upload
    #run('mkdir -p %s/amee' % env.remote_deploy_path)

    # Sync custom bin's
    rsync_project(
        local_dir='%s/bin' % (env.root_path),
        remote_dir='%s/' % (env.remote_deploy_path),
    )

    rsync_project(
        local_dir='%s/amee' % (env.root_path),
        remote_dir='%s' % (env.remote_deploy_path),
        exclude=['build', 'nodes', 'CMakeFiles'],
    )
Example #58
0
def deploy():
    local('git push')
    git_rev = subprocess.check_output(['git', 'describe',
                                       '--always']).decode('utf-8').strip()
    open('frontend/.env', 'w').write(f'REACT_APP_GIT_REV={git_rev}\n')
    rsync_project(remote_dir='~/code/suggestion/models/',
                  local_dir='models/',
                  delete=True)
    with cd('~/code/suggestion'):
        run('git pull')
    with lcd('frontend'):
        local('npm run build')
    rsync_project(remote_dir='~/code/suggestion/frontend/build/',
                  local_dir='frontend/build/',
                  delete=True)
    # rsync -Pax models/ megacomplete-aws:/home/ubuntu/code/suggestion/models/
    with lcd('frontend'):
        local(
            f'sentry-cli releases -o kenneth-arnold -p suggestionfrontend new {git_rev}'
        )
        local(
            f'sentry-cli releases -o kenneth-arnold -p suggestionfrontend files {git_rev} upload-sourcemaps src build'
        )
Example #59
0
def haproxy_install():
    """Install and configure haproxy.
  HAProxy is not controlled by the prod-requirements file, and not easily versioned. As such, we install it in its
  own directory.
  TODO(marc): replace with yum package once 1.5 is stable and rolled out to AWS.
  """
    # rsync the haproxy source.
    fprint('Rsync thirdparty/haproxy ~/haproxy')
    rsync_project(local_dir='third_party/haproxy/',
                  remote_dir='~/haproxy/',
                  ssh_opts='-o StrictHostKeyChecking=no')

    # build haproxy and install it in ~/bin.}
    fprint('Building haproxy')
    run('haproxy/build.sh ~/')

    # Concatenate the certificate and key into a single file (this is expected by haproxy) and push it.
    fprint('Generating viewfinder.pem for haproxy')
    vf_passphrase = load_passphrase_from_file()
    # Staging and prod use the same certs.
    local(
        'scripts/generate_haproxy_certificate.sh viewfinder.co %s viewfinder.pem'
        % vf_passphrase)
    run('mkdir -p ~/conf')
    run('rm -f ~/conf/viewfinder.pem')
    put('viewfinder.pem', '~/conf/viewfinder.pem')
    run('chmod 400 ~/conf/viewfinder.pem')

    # Remove local file.
    local('rm -f viewfinder.pem')

    # Install the config files.
    fprint('Pushing haproxy configs')
    assert env.nodetype, 'no nodetype specified'
    run('ln -f -s ~/viewfinder/scripts/haproxy.conf ~/conf/haproxy.conf')
    run('ln -f -s ~/viewfinder/scripts/haproxy.redirect.%s.conf ~/conf/haproxy.redirect.conf'
        % env.nodetype.lower())
Example #60
0
def virtualenv_install():
    """Install the latest virtual environment if needed.
  We do nothing if the env is already the latest.
  We do install the new environment even if we are using the old style.
  This does not activate (symlink) the newly installed environment.
  """
    # Installs the latest virtual environment from the local prod-requirements.txt.
    prod_rev = latest_requirements_revision()
    assert re.match(r'[0-9a-f]+', prod_rev)

    active_env_rev = active_env()
    if prod_rev == active_env_rev:
        assert virtualenv_verify(prod_rev), 'Active environment is not valid'
        return

    env_dir = 'env.%s' % prod_rev
    package_dir = 'python-package.%s' % prod_rev
    requirements_file = 'prod-requirements.txt.%s' % prod_rev
    if exists(env_dir):
        fprint(
            'prod-requirements (rev %s) already installed, but not active.' %
            prod_rev)
    else:
        fprint('installing environment from prod-requirements (rev %s)' %
               prod_rev)
        run('rm -rf ~/%s ~/%s ~/%s' %
            (env_dir, package_dir, requirements_file))
        rsync_project(local_dir='third_party/python-package/',
                      remote_dir='~/%s/' % package_dir,
                      ssh_opts='-o StrictHostKeyChecking=no')
        put('scripts/prod-requirements.txt', '~/%s' % requirements_file)
        run('python2.7 ~/%s/virtualenv.py --never-download ~/%s/viewfinder' %
            (package_dir, env_dir))

    # Let fabric surface the failure.
    run('~/%s/viewfinder/bin/pip install -f file://$HOME/%s --no-index -r ~/%s'
        % (env_dir, package_dir, requirements_file))