Exemplo n.º 1
0
def XXXX_deploy ():
    """
    Deploy the packages in the deployment machines
    """
    print(green("Installing packages at %s" % str(env.host_string)))

    if confirm(red('Install the packages at the %s?' % (env.host_string)), default = False):
        print(yellow("... stopping XXXX"))
        if _exists('/etc/init.d/XXXX'):
            sudo('service XXXX stop')
            sudo('rm -f /etc/init.d/XXXX')

        with cd(env.admin.prefix):
            print(yellow("... cleaning up old RPMs"))
            if not _exists('tmp'): run('mkdir tmp')
            run('rm -rf tmp/*')

        directory = os.path.join(env.admin.prefix, 'tmp')
        with cd(directory):
            print(yellow("... uploading RPMs"))
            for f in env.packages.rpms:
                put(os.path.join(directory, f), '.')

            print(yellow("... installing software"))
            sudo('yum install -R 2 -q -y --nogpgcheck  *.rpm')

            print(red("... XXXX is STOPPED at %s!" % env.host_string))
Exemplo n.º 2
0
def deploy(app_env):
    """Deploy it to server"""
    if not app_env:
        print "fab pack deploy:<ENV>"
        sys.exit(1)

    with open('.newrelic_key') as f:
        newrelic_key = f.read().strip()
    if not newrelic_key:
        print "cannot find newrelic_key in .newrelic_key file"
        sys.exit(1)

    app_path = '/var/www/%s/%s' % (APP_NAME, app_env)
    out = run('ls -t %s | grep -v current | grep daimaduan.com' % app_path)
    versions = [i.strip() for i in out.split("\n")]
    # figure out the release name and version
    dist = local('python setup.py --fullname', capture=True).strip()
    # upload the source tarball to the temporary folder on the server
    put('dist/%s.tar.gz' % dist, '%s/%s.tar.gz' % (app_path, dist))
    with cd(app_path):
        run('tar xzf %s.tar.gz' % dist)
    with cd('%s/%s' % (app_path, dist)):
        run('%s/venv/bin/python setup.py install > /dev/null 2>&1' % app_path)
    run('rm -f %s/current' % app_path)
    run('ln -s %s/%s/daimaduan %s/current' % (app_path, dist, app_path))
    run('cp %s/shared/config.cfg %s/current' % (app_path, app_path))
    run('cp %s/shared/deploy.py %s/current' % (app_path, app_path))

    # touching uwsgi ini file will reload this app
    sudo('touch /etc/uwsgi.d/daimaduan_%s.ini' % app_env)

    run('rm -f %s/%s.tar.gz' % (app_path, dist))

    # after deploying, we need to test if deployment succeed
    is_deploy_succeed = True
    resp = urllib2.urlopen(TEST_WEBSITE[app_env])
    if resp.code != 200:
        is_deploy_succeed = False
        if versions:
            print "Deploy failed, switch back to previous version"
            run('rm -f %s/current' % app_path)
            run('ln -s %s/%s/daimaduan %s/current' % (app_path, versions[0], app_path))
            sudo('touch /etc/uwsgi.d/daimaduan_%s.ini' % app_env)
            sys.exit(1)

    # clean old versions
    if is_deploy_succeed:
        versions.insert(0, dist)
    else:
        versions.append(dist)
    if len(versions) > 4:
        versions = ["%s/%s" % (app_path, i) for i in versions]
        versions_to_delete = " ".join(versions[3:])
        run('rm -rf %s' % versions_to_delete)

    # send deployments notification to newrelic
    version = local('python setup.py --version', capture=True).strip()
    local('curl -H "x-api-key:%s" -d "deployment[application_id]=%s" '
          '-d "deployment[revision]=%s" -d "deployment[user]=David Xie" '
          'https://api.newrelic.com/deployments.xml' % (newrelic_key, NEWRELIC_APPLICATION_ID[app_env], version))
def update_project(project_name, env_type):
    """ Updates the source files, and then consecutively runs syncdb, migrate
    and collectstatic. The env_type argument shows the environment type
    development/production/etc."""
    user_home_path = run('echo $HOME')
    project_path = os.path.join(user_home_path, project_name)
    activate_prefix = '. %s' % os.path.join(project_path, 'bin', 'activate')
    source_path = os.path.join(project_path, SOURCE_DIRECTORY_NAME) 
    with cd(source_path):
        run('git pull origin master')
        with prefix(activate_prefix):
            run('pip install -r required_packages.txt')
            with cd(project_name):
                run('python manage.py syncdb')
                run('python manage.py migrate')
                run('python manage.py collectstatic --noinput')
    uwsgi_conf_name = '%s.%s.uwsgi' % (project_name, env_type)
    sudo('initctl reload-configuration')
    with settings(warn_only=True):
        result = sudo('initctl restart %s' % uwsgi_conf_name)
        if result.failed:
            result = sudo('initctl start %s' % uwsgi_conf_name)
            if result.failed:
                print 'Failed to restart/start job %s' % uwsgi_conf_name
    sudo('/etc/init.d/nginx restart')
def run(aws):
    op.run("echo Downloading Automatic-Server-Setup ...")
    with cm.cd("/var/www"):
        op.run("sudo rm -fr Automatic-Server-Setup")
        op.run("sudo git clone git://github.com/Open-org/Automatic-Server-Setup.git")
        op.run("sudo mv Automatic-Server-Setup/startsandbox.php /var/www/html/")
        op.run("sudo chown apache Automatic-Server-Setup/")

        op.run("echo Copy robots.txt file to main server folder ...")
        op.run("sudo cp /var/www/Automatic-Server-Setup/tools/robots.txt /var/www/html/")

    op.run("echo Downloading PhotoAccounting ...")
    with cm.cd("/var/www/html"):
        op.run("sudo git clone git://github.com/Open-org/PhotoAccounting.git")
        op.run("sudo mv /var/www/Automatic-Server-Setup/tools/index.php /var/www/html/")
        op.run(
            'sudo sed -i "1i host photo_accounting photo_editor 127.0.0.1/32 trust" /var/lib/pgsql9/data/pg_hba.conf'
        )
        op.run("sudo /etc/init.d/postgresql reload")
        op.run("sudo -u root sudo -u postgres psql -c \"CREATE USER photo_editor WITH PASSWORD 'Htbp4SAaxm6K'\"")
        op.run('sudo -u root sudo -u postgres psql -c "CREATE DATABASE photo_accounting OWNER photo_editor"')
        op.run(
            "sudo -u root sudo -u postgres psql -d photo_accounting -U photo_editor -h localhost -f /var/www/html/PhotoAccounting/sql/photo_accounting.sql"
        )
        op.run('sudo sed -i "1d" /var/lib/pgsql9/data/pg_hba.conf')
        op.run(
            'sudo sed -i "1i host    photo_accounting    photo_editor    127.0.0.1/32    md5" /var/lib/pgsql9/data/pg_hba.conf'
        )
        op.run("sudo /etc/init.d/postgresql reload")
Exemplo n.º 5
0
def setup_virtualenv(python=None):
    'Create a virtualenv and install required packages on the remote server.'
    python_opt = '--python=' + python if python else ''

    with cd('%(remote_path)s/%(build_dir)s' % env):
        # TODO: we should be using an http proxy here  (how?)
        # create the virtualenv under the build dir
        sudo('virtualenv --no-site-packages %s env' % (python_opt,),
             user=env.remote_acct)

    with cd('%(remote_path)s/%(build_dir)s' % env):
        # create the virtualenv under the build dir
        sudo('virtualenv --no-site-packages --prompt=\'[%s]\' %s env' \
            % (env['build_dir'], python_opt), user=env.remote_acct)
        # activate the environment and install required packages
        with prefix('source env/bin/activate'):
            pip_cmd = 'pip install -r pip-install-req.txt'
            if env.remote_proxy:
                pip_cmd += ' --proxy=%(remote_proxy)s' % env
            sudo(pip_cmd, user=env.remote_acct)
            if files.exists('../pip-local-req.txt'):
                pip_cmd = 'pip install -r ../pip-local-req.txt'
                if env.remote_proxy:
                    pip_cmd += ' --proxy=%(remote_proxy)s' % env
                sudo(pip_cmd, user=env.remote_acct)
Exemplo n.º 6
0
def cluster_start(slave_type='c4.2xlarge', slaves=2):
    print green('========= Starting Spark cluster =========')
    HADOOP_HOME = '/opt/dev/hadoop-2.6.2'
    SPARK_HOME = '/opt/dev/spark-1.5.1-bin-hadoop2.6'

    print 'Starting master...'
    master_inst = 'i-54c44cd9'
    local('aws ec2 start-instances --instance-ids={}'.format(master_inst))
    sleep(60)

    if slaves > 0:
        run_slave_cmd = ('aws ec2 run-instances --image-id ami-6d2a8a1e --instance-type {} --count {} '
                         '--key-name sm_spark_cluster --security-group-ids sg-921b7ff6').format(slave_type, slaves)
        out = json.loads(local(run_slave_cmd, capture=True))
        slave_hosts = [inst_out['PrivateDnsName'] for inst_out in out['Instances']]

        with cd(HADOOP_HOME):
            run('echo "localhost" > etc/hadoop/slaves')
            for host in slave_hosts:
                run('echo "{}" >> etc/hadoop/slaves'.format(host))

        with cd(SPARK_HOME):
            run('echo "localhost" > conf/slaves')
            for host in slave_hosts:
                run('echo "{}" >> conf/slaves'.format(host))

    with cd(HADOOP_HOME):
        run('sbin/start-dfs.sh')

    with cd(SPARK_HOME):
        run('sbin/start-all.sh')

    run('jps -l')
Exemplo n.º 7
0
def deploy(user='******', git_hash=None):
    source = 'https://github.com/rapidpro/data_api.git'
    proc_name = 'api'
    path = '/var/www/data_api'
    workon_home = '/var/www/.virtualenvs/api/bin/'

    print "Starting deployment"
    with settings(warn_only=True):
        if run("test -d %s" % path).failed:
            run("git clone %s %s" % (source, path))
            with cd(path):
                run("git config core.filemode false")
    with cd(path):
        run("git stash")
        if not git_hash:
            run("git pull %s master" % source)
        else:
            run("git fetch")
            run("git checkout %s" % git_hash)
        run('%spip install -r requirements.txt' % workon_home)
        run('%spython manage.py collectstatic --noinput' % workon_home)

        sudo("chown -R %s:%s ." % (user, user))
        sudo("chmod -R ug+rwx .")

    sudo("supervisorctl restart %s" % proc_name)
Exemplo n.º 8
0
def deploy():
    timestamp = datetime.datetime.utcnow().strftime(env.timestamp_format)
    version = local('git rev-parse HEAD', capture=True).stdout.strip()
    run('mkdir -p %s' % env.dest)
    with cd(env.dest):
        run('mkdir %s' % timestamp)
        with cd(timestamp):
            remote_archive = '/tmp/weddingplanner-%s-%s.tar.gz' % (timestamp, version)
            # TODO: use rsync in a '3-way' mode (--link-dest) to minimize files transfered
            #  (do the same for the locally built virtualenv)
            put('build/deploy.tar.gz', remote_archive)
            # TODO: remove --no-same-owner when built with fakeroot
            run('tar xfz %s --no-same-owner' % remote_archive)

            with hide('stdout'):
                run('virtualenv env')

                # NOTE: Temporary solution: install through running pip remotely
                run('env/bin/pip install -r requirements/%s.txt' % env.requirements)
                # NOTE: take it from the settings...
                run('mkdir assets')
                # NOTE: can also be run locally
                run('env/bin/python manage collectstatic -v 0 -l --noinput -c')

            run('env/bin/python manage migrate')

        with settings(warn_only=True):
            result =  run('supervisorctl status | grep "%s\s\+RUNNING"' % env.service_name)

        if not result.failed:
            run('supervisorctl stop %s' % env.service_name)

        run('ln -sfn %s current' % timestamp)
        run('supervisorctl start %s' % env.service_name)
Exemplo n.º 9
0
def update_source_code(path=None, deploy=False):
    """ Update project's source dir from git repo"""
    # If no repo is present, clone from localhost
    path = env.SRC_PATH if path is None else path

    if not exists(path):
        # Make a new deployment key if not already there
        if not exists(env.DEPLOYMENT_KEY, use_sudo=True):
            create_deployment_key()
        if deploy:
            git_cmd = 'git clone [email protected]:codingindonesia/tolong.git {0}'.format(path)
        else:
            git_cmd = 'cd {0} && git pull'.format(path)
        require.files.directory(env.PROJECT_PATH, use_sudo=True)
        sudo('chown -R www-data {0}'.format(env.PROJECT_PATH))
        with cd(env.PROJECT_PATH):
            with settings(warn_only=True):
                result = sudo(git_cmd, user='******')
            if result.failed:
                abort("""Failed to clone git repository. Make sure that root
                    is allowed to clone repository by running this
                    on the server:
                    sudo -u www-data git clone [email protected]:codingindonesia/tolong.git""".format(env.REPO_NAME))

    # Get current git hash on the local machine
    git_hash = local('git rev-parse HEAD', capture=True)
    with cd(path):
        sudo('git fetch', user='******')
        sudo('git checkout {0}'.format(git_hash), user='******')
    if path == env.SRC_PATH:
        with cd(env.PROJECT_PATH):
            sudo('echo "{0}:{1}:{2}" >> revisions.log'.format(env.TIME, git_hash, env.user))
Exemplo n.º 10
0
def rollback():
    """
        Rolls back currently deployed version to its predecessor
    """
    with cd(env.basepath):
        run('mv current/rollback rollback')
        run('mv current undeployed')
        run('mv rollback current')
        version = run('readlink current')
        previous = run('readlink undeployed')
        puts(green('>>> Rolled back from %(previous)s to %(version)s' % { 'previous': previous, 'version': version }))
        run('rm -fr %s' % previous)
        run('rm undeployed')
        sudo('service nginx reload')
    with cd(env.nodejs):
        for n in [1, 2]:
            with settings(warn_only=True):
                sudo('stop nodejs N=%s' % n)
            run('mv instance%s/rollback rollback%s' % (n, n))
            run('mv instance%s undeployed' % n)
            run('mv rollback%s instance%s' % (n, n))
            version = run('readlink instance%s' % n)
            previous = run('readlink undeployed')
            puts(green('>>> Rolled back nodejs %(n)s from %(previous)s to %(version)s' % { 'n': n, 'previous': previous, 'version': version }))
            run('rm -fr %s' % previous)
            run('rm undeployed')
            sudo('start nodejs N=%s' % n)
Exemplo n.º 11
0
def bootstrap():
    with cd(env.path):
        print("\nStep 1: Install required PHP extensions/apps")

        if confirm('Continue installing requirements? Can skip if already installed.'):
            env.run('sudo setup_env.sh')

        print("\nStep 2: Database and basic Wordpress setup")

        with settings(warn_only=True):
            env.run('rm wp-config.php');
        env.run(env.prefix + './manage.sh setup_wp-config')

        create_db()
        env.run(env.prefix + './manage.sh install')
        env.run(env.prefix + './manage.sh install_network')

        with settings(warn_only=True):
            env.run('rm wp-config.php');
        env.run(env.prefix + './manage.sh setup_wp-config --finish')

        print("\nStep 3: Setup plugins")

        env.run(env.prefix + './manage.sh setup_plugins')

        print("\nStep 4: Cleanup, create blogs")

        env.run(env.prefix + './manage.sh set_root_blog_defaults')

    if confirm("Create child blogs?"): create_blogs()

    with cd(env.path):
        env.run(env.prefix + './manage.sh setup_upload_dirs')
Exemplo n.º 12
0
def put_dir_with_sudo(local_path, remote_path):
    # TODO: implement remote_path=None & return remote_path

    with lcd(local_path):
        source_basename = os.path.basename(local_path)

        print('Uploading {0} to {1}:{2}…'.format(local_path, env.host_string,
              remote_path), end='')
        sys.stdout.flush()

        save_file_name = '../{0}-copy-{1}.tar.gz'.format(source_basename,
                                                         generate_random_name())
        while os.path.exists(save_file_name):
            save_file_name = '../{0}-copy-{1}.tar.gz'.format(
                source_basename, generate_random_name())

        with hide('running', 'stdout', 'stderr'):
            local("tar -czf '{0}' . ".format(save_file_name))

        remote_dirname, remote_basename = remote_path.rsplit(os.sep, 1)

        with hide('running', 'stdout', 'stderr'):
            put(save_file_name, remote_dirname, use_sudo=True)
            local('rm -f "{0}"'.format(save_file_name))

        with cd(remote_dirname):
            with hide('running', 'stdout', 'stderr'):
                sudo('mkdir -p "{0}"'.format(remote_basename))

                with cd(remote_basename):
                    sudo('tar -xzf "{0}"'.format(save_file_name))
                    sudo('rm -f "{0}"'.format(save_file_name))

            print(' done.')
Exemplo n.º 13
0
    def inner_initialize_1():
        run('mkdir %s ' % dir_name)
        series_dir = os.path.dirname(__file__)
        local_benchmark_script = os.path.join(series_dir, benchmark_script)
        if LOCAL:
            with cd(dir_name):
                run('rm -rf ./*')
                put(local_benchmark_script, benchmark_script, mode=0744)
            #                run('sed -i "s/\/opt\/ycsb\/bin\/ycsb \$\*/python nbody.py \$\*/g" %s' % benchmark_script)
        else:
            # if not LOCAL
            with cd(dir_name):
                put(local_benchmark_script, benchmark_script, mode=0744)

        # continue init
        # clear all the tasks that submitted so far
        with cd(dir_name):
            tasks = run('atq').split('\r\n')
            tid = []
            for task in tasks:
                m = search('^(\d+)\t', task)
                if m:
                    tid.append(m.group(1))
            if tid:
                run('atrm %s' % ' '.join(tid))
            print green('host %s initialized ' % hosts.env.host)
Exemplo n.º 14
0
def _checkout_or_update_git(vcs_root_dir, revision=None):
    # if the .git directory exists, do an update, otherwise do
    # a clone
    if files.exists(path.join(vcs_root_dir, ".git")):
        with cd(vcs_root_dir):
            sudo_or_run('git remote rm origin')
            sudo_or_run('git remote add origin %s' % env.repository)
            # fetch now, merge later (if on branch)
            sudo_or_run('git fetch origin')

        if revision is None:
            revision = env.revision

        with cd(vcs_root_dir):
            stash_result = sudo_or_run('git stash')
            sudo_or_run('git checkout %s' % revision)
            # check if revision is a branch, and do a merge if it is
            with settings(warn_only=True):
                rev_is_branch = sudo_or_run('git branch -r | grep %s' % revision)
            # use old fabric style here to support Ubuntu 10.04
            if not rev_is_branch.failed:
                sudo_or_run('git merge origin/%s' % revision)
            # if we did a stash, now undo it
            if not stash_result.startswith("No local changes"):
                sudo_or_run('git stash pop')
    else:
        with cd(env.server_project_home):
            default_branch = env.default_branch.get(env.environment, 'master')
            sudo_or_run('git clone -b %s %s %s' %
                    (default_branch, env.repository, vcs_root_dir))

    if files.exists(path.join(vcs_root_dir, ".gitmodules")):
        with cd(vcs_root_dir):
            sudo_or_run('git submodule update --init')
Exemplo n.º 15
0
    def _install_requirements(self):
        # needed for private repos, local keys get forwarded
        env.forward_agent = True
        env.user = config['USER']
        env.remote_home = "/home/" + config['USER']

        virtualenv_dir = '%s/.virtualenvs/%s' % (env.remote_home, env.project_name)

        info("DJANGO: install python modules in virtualenv %s" % virtualenv_dir)
        # from requirements.txt
        with prefix('. %s/bin/activate' % virtualenv_dir):
            with cd("%s/code/%s" % (env.remote_home, env.project_name)):
                cmd = "pip install " \
                      "--download-cache=/var/cache/pip " \
                      "-r requirements.txt"
                debug("PIP: " + cmd)
                out = self.execute(run, cmd)
                for host, value in out.iteritems():
                    debug(value, host=host)

        # from class variable
        if hasattr(self.__class__, "python_packages"):
            for package in self.__class__.python_packages.split(" "):
                with prefix('. %s/.virtualenvs/%s/bin/activate' % (env.remote_home, env.project_name)):
                    with cd("%s/code/%s" % (env.remote_home, env.project_name)):
                        out = self.execute(run, "pip install --download-cache=/var/cache/pip %s"
                                                % (package))
                        for host, value in out.iteritems():
                            debug(value, host=host)
Exemplo n.º 16
0
def deploy(name):
    """
    Pull the latest code from remote
    """
    project_root, env = _app_paths(name)
    with cd(project_root):
        run('git pull', pty=False)

    environ = supervisor._get_environment(name)

    with cd(project_root), prefix('source %s/bin/activate' % env), hide('running'), shell_env(**environ):
        install_requirements(name)

        # initialize the database
        _info("./manage.py syncdb ... \n")
        run('python manage.py syncdb')

        # run south migrations
        _info("./manage.py migrate ... \n")
        run('python manage.py migrate', quiet=True)

        # collect static files
        _info("./manage.py collectstatic --noinput ... \n")
        run('python manage.py collectstatic --noinput')
        supervisor.restart(name)
Exemplo n.º 17
0
def _deb_install_extras():
    with cd('/srv'):
        if not exists('gnupg'):
            sudo('mkdir gnupg')
        if not exists('aptcache'):
            sudo('mkdir aptcache ; chown autobuild-ceph:autobuild-ceph aptcache')

        sudo('chown autobuild-ceph:autobuild-ceph gnupg ; chmod 700 gnupg')
        with cd('gnupg'):
            if not exists('pubring.gpg'):
                # put doesn't honor cd() for some reason
                put('gnupg/pubring.gpg')
                put('gnupg/secring.gpg')
                sudo("mv /home/ubuntu/*.gpg ./")
                sudo('chown autobuild-ceph:autobuild-ceph pubring.gpg secring.gpg')
                sudo('chmod 600 pubring.gpg secring.gpg')
        if not exists('ceph-build'):
            sudo('git clone https://github.com/ceph/ceph-build.git')
        with cd('ceph-build'):
            sudo('git pull')
        if not exists('debian-base'):
            sudo('mkdir debian-base')
        with cd('debian-base'):
            for dist in ['squeeze','oneiric']:
                if not exists('%s.tgz' % (dist)):
                    sudo('wget -q http://ceph.newdream.net/qa/%s.tgz' % (dist))
        sudo('grep -q autobuild-ceph /etc/sudoers || echo "autobuild-ceph ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers')
Exemplo n.º 18
0
def sf_server_restore():
    """Restore seafile from latest backup in /backup/"""
    docker_vars = _sf_server_docker_vars()

    sf_stop()

    fabtools.require.files.directories(['/backup/data', '/backup/databases'])

    # restore library data
    run('rsync -az /backup/data/seafile/* /seafile/')

    # fix permissions / executable flags
    with cd('/seafile/seafile-server-%(seafile_version)s' % docker_vars):
        run('chmod +x *.sh')
        with cd('seafile/bin'):
            run('chmod +x *')

    # add latest link
    run('ln -s /seafile/seafile-server%(seafile_version)s /seafile/seafile-server-latest' % docker_vars)

    # restore databases
    with cd('/seafile'):
        _make_old_copy(['ccnet/PeerMgr/usermgr.db', 'ccnet/GroupMgr/groupmgr.db',
                        'seafile-data/seafile.db', 'seahub.db'])

        run('sqlite3 ccnet/PeerMgr/usermgr.db < %s' % _latest_file('/backup/databases/usermgr.db.bak*'))
        run('sqlite3 ccnet/GroupMgr/groupmgr.db < %s' % _latest_file('/backup/databases/groupmgr.db.bak*'))
        run('sqlite3 seafile-data/seafile.db < %s' % _latest_file('/backup/databases/seafile.db.bak*'))
        run('sqlite3 seahub.db < %s ' % _latest_file('/backup/databases/seahub.db.bak*'))


    sf_start()
Exemplo n.º 19
0
def build_openresty(version='1.9.15.1',configure_cmd=default_configure_cmd):

    make_cmd = 'make -j4'
    install_cmd = 'make all install DESTDIR=$PWD/buildoutput'

    source_file = 'openresty-%s.tar.gz' % (version,)
    source_url = 'http://openresty.org/download' #'http://10.131.237.143/openresty'

    ensure_local_dir('build-temp')

    with lcd('./build-temp'):
        if not local_file_exists(source_file):
            local('wget -O %s %s/%s' % (source_file,source_url,source_file))
            local('wget https://github.com/pintsized/lua-resty-http/archive/v0.07.tar.gz -O lua-resty-http.tar.gz')
            local('wget https://github.com/openresty/stream-lua-nginx-module/archive/master.tar.gz -O stream-lua-nginx-module.tar.gz')

        #console.confirm('Do you want to continue?', default=True)
        ensure_remote_dir('build-temp')
        put(source_file,'build-temp')
        put('lua-resty-http.tar.gz', 'build-temp')
        put('stream-lua-nginx-module.tar.gz', 'build-temp')
        with cd('build-temp'):
            run('tar xzf %s' % (source_file,))
            run('tar xzf lua-resty-http.tar.gz')
            run('tar xzf stream-lua-nginx-module.tar.gz')
            with cd('openresty-%s' % (version,)):
                # add lua-resty-http
                run('mv ../lua-resty-http-* bundle/lua-resty-http-0.07')
                run("sed -i 's/for my $key (qw(/for my $key (qw(http /g' configure")
                # add external modules
                run('mv ../stream-lua-nginx-module-master bundle/')
                configure_cmd+=' --with-stream --with-stream_ssl_module --add-module=bundle/stream-lua-nginx-module-master'
                run('ls -la bundle/')
                # build
                run('%s && %s && %s' % (configure_cmd,make_cmd,install_cmd))
Exemplo n.º 20
0
def bootstrap_python(extra_args=""):
    version = api.env['python-version']

    versionParsed = '.'.join(version.split('.')[:3])
    
    d = dict(version=versionParsed)
    
    prefix = api.env["python-path"]
    if not prefix:
        raise "No path for python set"
    save_path = api.env.path # the pwd may not yet exist
    api.env.path = "/"
    with cd('/'):
        runescalatable('mkdir -p %s' % prefix)
    #api.run("([-O %s])"%prefix)
    
    with asbuildoutuser():
      with cd('/tmp'):
        get_url('http://python.org/ftp/python/%(version)s/Python-%(version)s.tgz'%d)
        api.run('tar -xzf Python-%(version)s.tgz'%d)
        with cd('Python-%(version)s'%d):
#            api.run("sed 's/#readline/readline/' Modules/Setup.dist > TMPFILE && mv TMPFILE Modules/Setup.dist")
#            api.run("sed 's/#_socket/_socket/' Modules/Setup.dist > TMPFILE && mv TMPFILE Modules/Setup.dist")
            
            api.run('./configure --prefix=%(prefix)s  --enable-unicode=ucs4 --with-threads --with-readline --with-dbm --with-zlib --with-ssl --with-bz2 %(extra_args)s' % locals())
            api.run('make')
            runescalatable('make altinstall')
        api.run("rm -rf /tmp/Python-%(version)s"%d)
    api.env["system-python-use-not"] = True
    api.env.path = save_path
Exemplo n.º 21
0
def install_cairo():
    """
    installs latest version of pixman and cairo backend
    """
    # graphite is not satisfied with versions available through "yum install"
    if exists('/usr/local/lib/libcairo.so'):
        return
    sudo('yum -y -q install pkgconfig valgrind-devel libpng-devel freetype-devel fontconfig-devel')
    with cd('/tmp'):
        # install pixman
        sudo('rm -rf pixman*')
        run('wget http://cairographics.org/releases/pixman-0.20.2.tar.gz')
        run('tar xfz pixman-0.20.2.tar.gz')
        with cd('pixman-0.20.2'):
            with prefix('export PKG_CONFIG_PATH=/usr/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/local/lib/pkgconfig'):
                run('./configure')
            sudo('make install')
            # install cairo
        sudo('rm -rf cairo*')
        run('wget http://cairographics.org/releases/cairo-1.10.2.tar.gz')
        run('tar xfz cairo-1.10.2.tar.gz')
        with cd('cairo-1.10.2'):
            with prefix('export PKG_CONFIG_PATH=/usr/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/local/lib/pkgconfig'):
                run('./configure --enable-xlib=no --disable-gobject')
            sudo('make install')
Exemplo n.º 22
0
def deploy_project(project_name, env_type, repo):
    """ Deploys project to remote server, requires project name, environment
    type(development/production) and the repository of the project """
    import ipdb;ipdb.set_trace()
#     with cd(env.DEV_VIRTUALENV_PATH):
#         create_virtual_env(project_name)
        
    #make project dir
    project_path = os.path.abspath(os.path.join(env.DEV_PROJECT_DIR, project_name))
    run('mkdir -p {}'.format(project_path))
    
    with cd(project_path):
#         run('mkdir %s' % SOURCE_DIRECTORY_NAME)
        
        run('git clone %s .' % repo)
        with cd(SOURCE_DIRECTORY_NAME):
            source_path = run('pwd')
            nginx_uwsgi_conf_name = '%s.production.nginx.uwsgi' % project_name
            uwsgi_conf_name = '%s.production.uwsgi' % project_name
            with settings(warn_only=True):
                sudo('ln -s %s.conf /etc/nginx/sites-enabled/' % os.path.join(source_path,
                                                                         nginx_uwsgi_conf_name))
                sudo('ln -s %s.conf /etc/init/' % os.path.join(source_path,
                                                           uwsgi_conf_name))
            local_settings_path = os.path.join(source_path, project_name,
                                               project_name, 'settings',
                                               'local.py')
            if generate_local_config(project_name, local_settings_path):
                update_project(project_name, env_type)
Exemplo n.º 23
0
def install_packages_in(env, directory, patterns, remote_upload_dir="/tmp/"):
    """
    Install all the packages in the `directory` that match `pattern` in a remote machine.
    """
    SUBDIR = "inst"

    matches = set()
    for root, dirnames, filenames in os.walk(directory):
        for pattern in patterns:
            for filename in fnmatch.filter(filenames, pattern):
                full_filename = os.path.join(root, filename)
                matches.add(full_filename)
                break

    if len(matches) == 0:
        print(red("no packages to install in %s" % directory))
        return

    print(green("... the following packages will be installed"))
    print(green("...... %s" % ", ".join(matches)))

    with cd(remote_upload_dir):
        print(yellow("... cleaning up old packages"))
        if exists(SUBDIR):
            run("rm -rf {subdir}".format(subdir=SUBDIR))
        run("mkdir {subdir}".format(subdir=SUBDIR))

    with cd(os.path.join(remote_upload_dir, SUBDIR)):
        print(yellow("... uploading packages"))
        for f in matches:
            put(f, ".")

        print(yellow("... installing software"))
        sudo("dpkg --install  *.deb")
Exemplo n.º 24
0
def deploy_base():
    """
    Deploys the code and installs the base libraries for all server types
    """

    # update apt-cache library
    with hide('stdout', 'stderr'):
        sudo('apt-get update')

    # ubuntu goodies
    sudo("apt-get install --assume-yes build-essential python-pip python-dev python-mysqldb git-core sqlite3 python-mysqldb fabric python-lxml")

    # Make top-level folder if it doesn't exist
    sudo('mkdir -p /srv')

    with cd('/srv'):
        # Clone git repo if it doesn't exist
        if not exists('directedstudies'):
            sudo('git clone %s directedstudies' % github_repo)

        with cd('/srv/directedstudies/'):
            sudo('git pull')

            # Install newer pip version since Ubuntu ships with broken one
            sudo('pip install --upgrade pip')

            # Install python libs
            sudo('pip install -r requirements.txt')

            sudo('python setup.py develop')

    update_config_file()
Exemplo n.º 25
0
def bundlestrap():
    """
    Bootstrap the uploaded project package on the remote server.
    """
    ## Install bare server requirements
    if silentrun('which pip').failed:
        ops.sudo('easy_install pip')
    if silentrun('which virtualenv').failed:
        ops.sudo('pip install virtualenv')
    if silentrun('which fab').failed:
        ops.sudo('apt-get install python-dev')
        ops.sudo('pip intall Fabric')
    deployment_dir = '~/%s/%s' % (settings.deployment_dir, env.role)
    ops.run('mkdir -p ' + deployment_dir)
    # temporarily disable .pydistutils.cfg, see https://github.com/pypa/virtualenv/issues/88
    pydistutils = files.exists('.pydistutils.cfg')
    if pydistutils:
        ops.run("mv ~/.pydistutils.cfg ~/.pydistutils.cfg.disabled")

    with ctx.cd(deployment_dir):
        ops.run('rm -rf %s' % prj.build_name)
        ops.run('tar xmzf ~/builds/%s.tar.gz' % prj.build_name)
        ops.run('tar xmzf project.tar.gz')
        ops.run('virtualenv %s/.ve --python=%s --system-site-packages' % (
            prj.build_name, settings.py_version
        ) + ' --distribute' if settings.use_distribute else '')
        ops.run('%s/.ve/bin/pip install -I project-deps.zip' % prj.build_name)
        ops.run('rm -rf %s/.ve/build' % prj.build_name)
        ops.run('rm -f project-deps.zip project.tar.gz')

    with ctx.cd("%s/%s" % (deployment_dir, prj.build_name)):
        ops.run('.ve/bin/python setup.py develop')

    if pydistutils:
        ops.run("mv ~/.pydistutils.cfg.disabled ~/.pydistutils.cfg")
def deploy():
	#backup()
	_pack()

	if not dir_exists(REMOTE_PROJECT_PATH):
		sudo('virtualenv --no-site-packages /opt/drbenv')

	with cd('/opt/drbenv'):
		run('mkdir -Rf ' + PROJECT_NAME)
		run('mkdir -Rf tmp')
	
	put('latest.zip', REMOTE_PROJECT_PATH + '/' + PROJECT_NAME)

	with cd(REMOTE_PROJECT_PATH + '/' +PROJECT_NAME):
		sudo('unzip latest.zip')
		sudo('rm latest.zip')

	sudo('mkdir -Rf /var/www/'+ PROJECT_NAME +'/static')
	sudo('mkdir -Rf /var/www/'+ PROJECT_NAME +'/media')

	_install_deps()
	_migrate()
	_install_gunicorn()
	_gunicorn()
	_supa_gunicorn()
	_rmTemp()
	_collectstatic()
	restart()
Exemplo n.º 27
0
def notebook_gis_install():
    # install basemap
    with settings(warn_only=True):
        basemap = run('/home/%s/notebookenv/bin/pip freeze |grep basemap' % env.user)

    if not basemap:
        if not exists('/usr/lib/libgeos.so'):
            sudo('ln -s /usr/lib/libgeos_c.so /usr/lib/libgeos.so')

        with cd('/tmp/'):
            run('wget http://downloads.sourceforge.net/project/matplotlib/matplotlib-toolkits/basemap-1.0.6/basemap-1.0.6.tar.gz')
            run('tar -xzf basemap-1.0.6.tar.gz')
            with cd('basemap-1.0.6'):
                run('/home/%s/notebookenv/bin/python setup.py install' % env.user)
            run('rm -fR basemap-1.0.6')

    # install shapefile library
    with settings(warn_only=True):
        shapefile = run('/home/%s/notebookenv/bin/pip freeze |grep shapefile' % env.user)

    if not shapefile:
        with cd('/tmp/'):
            run('git clone https://github.com/adamw523/pyshp.git')
            with cd('pyshp'):
                run('/home/%s/notebookenv/bin/python setup.py install' % env.user)
            run('rm -fR pyshp')
Exemplo n.º 28
0
def install_infrastructure_server(publichost, admin_privkey_path, website_pubkey,
                                  leastauth_repo_gitdir, leastauth_commit_hash,
                                  secret_config_repo_gitdir, secret_config_commit_hash,
                                  stdout, stderr):
    """
    This is the code that sets up the infrastructure server.
    This is intended to be idempotent.

    Known sources of non-idempotence:
        - setup_git_deploy
    """
    set_host_and_key(publichost, admin_privkey_path)
    print >>stdout, "Updating server..."
    run_unattended_upgrade(api, UNATTENDED_UPGRADE_REBOOT_SECONDS)
    postfixdebconfstring="""# General type of mail configuration:
# Choices: No configuration, Internet Site, Internet with smarthost, Satellite system, Local only
postfix	postfix/main_mailer_type select	No configuration"""

    print >>stdout, "Installing dependencies..."
    package_list = TAHOE_LAFS_PACKAGE_DEPENDENCIES + EXTRA_INFRASTRUCTURE_PACKAGE_DEPENDENCIES
    apt_install_dependencies(stdout, package_list)
    # From:  https://stripe.com/docs/libraries
    sudo('pip install --index-url https://code.stripe.com --upgrade stripe')
    write(postfixdebconfstring, '/home/ubuntu/postfixdebconfs.txt')
    sudo('debconf-set-selections /home/ubuntu/postfixdebconfs.txt')
    sudo_apt_get('install -y postfix')

    run('wget https://pypi.python.org/packages/source/p/pelican/pelican-3.2.2.tar.gz')
    run('tar zxf pelican-3.2.2.tar.gz')
    with cd('pelican-3.2.2'):
        sudo('python setup.py install')

    create_account('website', website_pubkey, stdout, stderr)

    sudo_apt_get('install -y authbind')
    sudo('touch /etc/authbind/byport/{443,80}')
    sudo('chown website:root /etc/authbind/byport/{443,80}')
    sudo('chmod -f 744 /etc/authbind/byport/{443,80}')

    # patch twisted to send intermediate certs, cf. https://github.com/LeastAuthority/leastauthority.com/issues/6
    sudo("sed --in-place=bak 's/[.]use_certificate_file[(]/.use_certificate_chain_file(/g' $(python -c 'import twisted, os; print os.path.dirname(twisted.__file__)')/internet/ssl.py")

    set_host_and_key(publichost, admin_privkey_path, 'website')
    git_ssh_path = os.path.join(os.path.dirname(leastauth_repo_gitdir), 'git_ssh.sh')
    setup_git_deploy(publichost, admin_privkey_path, git_ssh_path, '/home/website/leastauthority.com',
                     leastauth_repo_gitdir, leastauth_commit_hash)
    setup_git_deploy(publichost, admin_privkey_path, git_ssh_path, '/home/website/secret_config',
                     secret_config_repo_gitdir, secret_config_commit_hash)

    with cd('/home/website/'):
        if not files.exists('signup_logs'):
            run('mkdir signup_logs')
        if not files.exists('secrets'):
            run('mkdir secrets')

    with cd('/home/website/secret_config'):
        run('chmod -f 400 *pem')

    run_flapp_web_servers()
    set_up_crontab(INFRASTRUCTURE_CRONTAB, '/home/website/ctab')
Exemplo n.º 29
0
def install_package_in(env, deb, remote_upload_dir="/tmp/"):
    """
    Install all the packages in the `directory` that match `pattern` in a remote machine.
    """
    SUBDIR = "inst"

    if os.path.exists(deb):
        print(red("%s packages not found"))
        return

    print(green("... the following packages will be installed"))
    print(green("...... %s" % deb))

    with cd(remote_upload_dir):
        print(yellow("... cleaning up old packages"))
        if exists(SUBDIR):
            run("rm -rf {subdir}".format(subdir=SUBDIR))
        run("mkdir {subdir}".format(subdir=SUBDIR))

    with cd(os.path.join(remote_upload_dir, SUBDIR)):
        print(yellow("... uploading packages"))
        put(deb, ".")

        print(yellow("... installing software"))
        sudo("dpkg --install  *.deb")
Exemplo n.º 30
0
def update_refinery():
    """Perform full update of a Refinery Platform instance"""
    puts("Updating Refinery")
    with cd(env.refinery_project_dir):
        # if in Vagrant update current branch, otherwise checkout custom branch
        if env.project_user != 'vagrant':
            run("git checkout {branch}".format(**env))
        # avoid explaining automatic merge commits with both new and old git
        # versions running on different VMs
        # https://raw.githubusercontent.com/gitster/git/master/Documentation/RelNotes/1.7.10.txt
        with shell_env(GIT_MERGE_AUTOEDIT='no'):
            run("git pull".format(**env))
    with cd(env.refinery_ui_dir):
        run("npm prune --progress false")
        run("npm update --progress false")
        run("rm -rf bower_components")
        run("bower update --config.interactive=false")
        run("grunt make")
    with prefix("workon {refinery_virtualenv_name}".format(**env)):
        run("pip install -r {refinery_project_dir}/requirements.txt"
            .format(**env))
        run("find . -name '*.pyc' -delete")
        run("{refinery_app_dir}/manage.py migrate --noinput --fake-initial"
            .format(**env))
        run("{refinery_app_dir}/manage.py collectstatic --clear --noinput"
            .format(**env))
        run("supervisorctl reload")
    with cd(env.refinery_project_dir):
        run("touch {refinery_app_dir}/config/wsgi_*.py".format(**env))
Exemplo n.º 31
0
def mark_last_release_unsuccessful():
    # Removes last line from RELEASE_RECORD file
    with cd(env.root):
        sudo("sed -i '$d' {}".format(RELEASE_RECORD))
Exemplo n.º 32
0
    def deploy(self,
               service,
               script=None,
               n=10,
               gpus=1,
               token=None,
               pyargs=None):
        _service = service
        gpus = int(gpus)
        n = int(n)
        pyargs = '' if pyargs is None else ' {}'.format(pyargs)

        if len(free_gpu_slots()) == 0:
            puts('No free gpus on {}'.format(env.host_string))
            return

        self.initialize()
        with cd(self.host_docker_dir):
            run('docker-compose build --no-cache --build-arg ssh_prv_key="$(cat ~/.ssh/id_rsa)" --build-arg ssh_pub_key="$(cat ~/.ssh/id_rsa.pub)" {}'
                .format(service))
            bare_run_str = 'docker-compose run -d'
            free_gpus = sorted(free_gpu_slots())
            gpu_i = 0
            container_i = 0
            gpu_groups = []
            while gpu_i < len(free_gpus):
                service = _service
                gpu_j = gpu_i + gpus
                gpu_ids = free_gpus[gpu_i:gpu_j]

                if len(gpu_ids) < gpus or container_i >= n:
                    break

                name = env.user + '_' + service + '_{script}_gpu_' + '_'.join(
                    gpu_ids)
                args = ' -e NVIDIA_VISIBLE_DEVICES={}'.format(
                    ','.join(gpu_ids))
                gpu_run_str = bare_run_str + args

                if script is None:
                    name = name.format(script='notebook')
                    args = '-p 444{}:8888'.format(gpu_ids[0])
                    if token is not None:
                        service += ' --NotebookApp.token={}'.format(token)
                else:
                    name = name.format(script=script)
                    args = ' -v {}:/scripts'.format(join(
                        self.host_scripts_dir))
                    args += ' --entrypoint "python3 /scripts/{}.py{}"'.format(
                        script, pyargs)

                run('(docker ps -a | grep {name}) && docker rm {name}'.format(
                    name=name),
                    warn_only=True)
                run('{} {} --name {} {}'.format(gpu_run_str, args, name,
                                                service))

                gpu_i = gpu_j
                container_i += 1
                gpu_groups.append(','.join(gpu_ids))

        self.finalize()
        puts('started service {} on {} on GPUs {}'.format(
            env.host_string, service, ' '.join(gpu_groups)))
Exemplo n.º 33
0
def remove_duds():
    role = env.roles[0]
    releasedir = get_releasedir(role)
    with cd(releasedir):
        run('rm -rf duds/*')
        broadcast('duds/ directory emptied!')
Exemplo n.º 34
0
 def _add_private_file(self):
     run('rm -f /home/ubuntu/private.json')
     put('~/private_active_data_etl.json', '/home/ubuntu/private.json')
     with cd("/home/ubuntu"):
         run("chmod o-r private.json")
Exemplo n.º 35
0
def restart_celery():
    update_celery = prompt("Did you make any changes to the {{ cookiecutter.project_name }} celery tasks? (y/N)", default='n')
    if update_celery == 'y':
        with cd(env.django_settings.supervisor_path):
            cmd = "sudo supervisorctl restart {}".format(" ".join(env.celery))
            run(cmd)
Exemplo n.º 36
0
def install_swampdragon(*args, **kwargs):
    """ Install swampdragon with an example app (todo) """
    home = run('echo $HOME')
    projectname = kwargs.get('project')
    sd_host = kwargs.get('sd_host')
    sd_port = kwargs.get('sd_port')
    redis_port = kwargs.get('redis_port')
    appname = 'todo'
    errors = []

    if not projectname:
        error = 'Enter the project name'
        projectname = prompt(error)
        if not projectname:
            errors.append(error)
    if not sd_host:
        error = 'Enter the domain name of the website'
        sd_host = prompt(error)
        if not sd_host:
            errors.append(error)
            errors.append(
                'Enter the host name which has been used in the django installer name.'
            )
    if not sd_port:
        error = 'Enter a remote port for the swampdragon websocket server'
        sd_port = prompt(error)
        if not sd_port:
            errors.append(error)
    if not redis_port:
        error = 'Enter a local port for the redis server'
        redis_port = prompt(error)
        if not redis_port:
            errors.append(error)

    projectdir = os.path.join(home, projectname)
    if not exists(projectdir):
        errors.append('The Django project ~/%s must already exits.' %
                      projectname)
        errors.append(
            'Swampdragon will be installed into the directory ~/%s.' %
            projectname)

    init_file = os.path.join(home, 'init', projectname)
    if not exists(init_file):
        errors.append('The gunicorn init file ~/init/%s must exits.' %
                      projectname)

    if errors:
        errors.append(
            'Usage: fab -H localhost install_swampdragon:project=$PROJECT_NAME,sd_host=$SD_HOST,sd_port=$SD_PORT,redis_port=$REDIS_PORT'
        )
        for error in errors:
            print(error)
        return 1

    # Install redis.
    # The redis port is necessary for swampdragon, there is no possibility to connect via unix socket.
    install_redis(port=redis_port)

    # Find already existing virtualenv
    daemon = get_daemon(init_file)
    if not daemon:
        venv_path = prompt(
            'Enter the path to virtualenv (Default: ~/%s/venv/)' % projectname)
        if not venv_path:
            venv_path = os.path.join(projectdir, 'venv')
        if not venv_path.endswith('/'):
            venv_path += '/'
        print('Replace DAEMON variable in init script')
        sed(init_file, 'DAEMON=.*', 'DAEMON=%sbin/gunicorn' % venv_path)
    elif daemon and not daemon.startswith('/usr/bin/'):
        venv_path = os.path.dirname(os.path.dirname(daemon))
        print(
            'Found an existing virtualenv for the project %s in the directory %s'
            % (projectname, venv_path))
    else:
        print('Replace DAEMON variable in init script')
        sed(init_file, 'DAEMON=/usr/bin/',
            'DAEMON=$HOME/%s/venv/bin/' % projectname)
        venv_path = os.path.join(projectdir, 'venv')

    # Replace $HOME and ~, because it doesn't work with supervisord
    venv_path = venv_path.replace('$HOME', home).replace('~', home)
    vpython = os.path.join(venv_path, 'bin', 'python')
    vpip = os.path.join(venv_path, 'bin', 'pip')

    with cd(os.path.join(projectdir)):
        if not exists(os.path.join(venv_path, 'bin', 'python')):
            run('virtualenv %s' % venv_path)
        for packet in [
                'mysqlclient', 'swampdragon', 'gunicorn==18.0',
                'gevent==1.1rc5'
        ]:
            if not run(vpip + ' show ' + packet.split('==')[0]):
                with hide('output'):
                    run(vpip + ' install ' + packet)
        if not run(vpip + ' show swampdragon'):
            with hide('output'):
                run(vpip +
                    ' install mysqlclient swampdragon gunicorn==18.0 gevent==1.1rc5'
                    )
        if not exists(os.path.join(projectdir, 'todo.tar.gz')):
            run('wget https://templates.wservices.ch/swampdragon/todo.tar.gz -O ~/%s/todo.tar.gz'
                % projectname)
        if not exists(os.path.join(projectdir, 'todo')):
            run('tar xzf %s/todo.tar.gz' % projectdir)
        sd_settings_file = os.path.join(projectdir, appname,
                                        'swampdragon_settings.py')
        sed(sd_settings_file, 'X_REDIS_PORT', redis_port)
        sed(sd_settings_file, 'X_SD_HOST', sd_host)
        sed(sd_settings_file, 'X_SD_PORT', sd_port)

        settings_file = os.path.join(projectdir, projectname, 'settings.py')
        append(
            settings_file,
            'INSTALLED_APPS = list(INSTALLED_APPS) + [\'swampdragon\', \'todo\']'
        )
        append(settings_file, 'from todo.swampdragon_settings import *')

        urls_file = os.path.join(projectdir, projectname, 'urls.py')
        append(urls_file, 'from django.views.generic import TemplateView')
        append(
            urls_file,
            'urlpatterns += [url(r\'^todo/$\', TemplateView.as_view(template_name=\'index.html\'), name=\'home\')]'
        )

        run(vpython + ' manage.py migrate')
        run(vpython + ' manage.py collectstatic --noinput')

        try:
            run(init_file + ' restart')
        except SystemExit:
            run(init_file + ' start')

        # Setup supervisord
        setup_supervisord()
        runsd_config_file = os.path.join(home, 'supervisor', 'programs',
                                         projectname)
        if not exists(runsd_config_file):
            configs = [
                '[program:%s-sd]' % projectname,
                'directory=%s' % projectdir,
                'environment=DJANGO_SETTINGS_MODULE=%s.settings' % projectname,
                'command=%s manage.py runsd' % vpython,
                'stdout_logfile=%s/sd.log' % projectdir,
                'stderr_logfile=%s/sd.err' % projectdir,
                'autostart=true',
                'autorestart=true',
                'stopsignal=INT',
            ]

            for config in configs:
                append(runsd_config_file, config)

            run('~/init/supervisord reload')
Exemplo n.º 37
0
 def cd(cls, *args, **kwargs):
     return cd(*args, **kwargs)
Exemplo n.º 38
0
def perform_system_checks(current=False):
    path = env.code_current if current else env.code_root
    venv = env.virtualenv_current if current else env.virtualenv_root
    with cd(path):
        sudo('%s/bin/python manage.py check --deploy' % venv)
Exemplo n.º 39
0
def update_fuzzymap_binary():
    with cd('~/fuzzylog/delos-apps/examples/hashmap'):
        run('git pull')
        run('make clean')
        run('make')
Exemplo n.º 40
0
def start_pillows(current=False):
    code_root = env.code_current if current else env.code_root
    with cd(code_root):
        sudo('scripts/supervisor-group-ctl start pillowtop')
Exemplo n.º 41
0
            cmd += ' -w %s' % maxthreads
        if up:
            cmd += ' -up %s' % up
        if down:
            cmd += ' -dwn %s' % down
        run(cmd)


def clean_fuzzymap():
    with cd('~/fuzzylog/delos-apps/examples/hashmap'):
        run('rm *.txt')


def atomicmap_proc(log_addr, exp_range, exp_duration, client_id, workload,
                   async, window_size, replication):
    with cd('~/fuzzylog/delos-apps/examples/hashmap'):
        args = 'build/atomicmap '
        args += '--log_addr=' + str(log_addr) + ' '
        args += '--expt_range=' + str(exp_range) + ' '
        args += '--expt_duration=' + str(exp_duration) + ' '
        args += '--client_id=' + str(client_id) + ' '
        args += '--workload=' + str(workload) + ' '
        if async == "True":
            args += '--async '
            args += '--window_size=' + str(window_size)
        if replication == "True":
            args += ' --replication'
        run(args)


def capmap_proc(log_addr, exp_range, exp_duration, num_clients, client_id,
Exemplo n.º 42
0
def clean_fuzzymap():
    with cd('~/fuzzylog/delos-apps/examples/hashmap'):
        run('rm *.txt')
Exemplo n.º 43
0
def build():
    with cd(env.control_dir):
        sudo('docker-compose build pipecontrol')
Exemplo n.º 44
0
def start():
    with cd(env.control_dir):
        sudo('docker-compose up -d pipecontrol')
Exemplo n.º 45
0
def set_in_progress_flag(use_current_release=False):
    venv = env.virtualenv_root if not use_current_release else env.virtualenv_current
    with cd(env.code_root if not use_current_release else env.code_current):
        sudo('{}/bin/python manage.py deploy_in_progress'.format(venv))
Exemplo n.º 46
0
def down():
    with cd(env.control_dir):
        sudo('docker-compose down')
Exemplo n.º 47
0
def create_kafka_topics():
    """Create kafka topics if needed.  This is pretty fast."""
    with cd(env.code_root):
        sudo('%(virtualenv_root)s/bin/python manage.py create_kafka_topics' %
             env)
Exemplo n.º 48
0
def flip_es_aliases():
    """Flip elasticsearch aliases to the latest version"""
    with cd(env.code_root):
        sudo(
            '%(virtualenv_root)s/bin/python manage.py ptop_es_manage --flip_all_aliases'
            % env)
Exemplo n.º 49
0
def copy_email_templates():
    with cd(env.SRC_PATH):
        sudo("%s manage.py copy_email_templates" % env.PYTHON_BIN)
Exemplo n.º 50
0
def sync_db():
    with cd(env.project_dir):
        run("./%s/bin/manage-pcp-sync" % PROJECT_DIR)
Exemplo n.º 51
0
def compress_static():
    """ Runs django_compressor's compress command"""
    with cd(env.SRC_PATH):
        sudo('{0} compress --force'.format(env.MANAGE_BIN), user='******')
Exemplo n.º 52
0
def install_front_end_requirements():
    with cd(env.SRC_PATH):
        sudo('bower install --allow-root')
Exemplo n.º 53
0
def upload():
    with cd(PROJECT_DIR):
        put("{}.tar.gz".format(PROJECT_NAME), ".")
Exemplo n.º 54
0
def extract():
    with cd(PROJECT_DIR):
        run("sudo tar xf {}.tar.gz".format(PROJECT_NAME))
Exemplo n.º 55
0
def _get_submodule_list():
    if files.exists(env.code_current):
        with cd(env.code_current):
            return sudo("git submodule | awk '{ print $2 }'").split()
    else:
        return []
Exemplo n.º 56
0
def start_celery_tasks(current=False):
    code_root = env.code_current if current else env.code_root
    with cd(code_root):
        sudo('scripts/supervisor-group-ctl start celery')
Exemplo n.º 57
0
def single_install(with_db):
    """
    Perform the tasks to install the whole BOINC server on a single machine
    """
    if with_db:
        # Activate the DB
        sudo('mysql_install_db')
        sudo('chown -R mysql:mysql /var/lib/mysql/*')
        run('''echo "service { 'mysqld': ensure => running, enable => true }" | sudo puppet apply'''
            )
        sudo('service mysqld start')

        # Wait for it to start up
        time.sleep(5)

    if with_db:
        # Setup the database for recording WU's
        run('mysql --user=root < /home/ec2-user/boinc-magphys/server/src/database/create_database.sql'
            )

        # Make the BOINC project
        with cd('/home/ec2-user/boinc/tools'):
            run('./make_project -v --no_query --url_base http://{0} --db_user root {1}'
                .format(env.hosts[0], env.project_name))

        run('''echo 'databaseUserid = "root"
databasePassword = ""
databaseHostname = "localhost"
databaseName = "magphys"
boincDatabaseName = "{0}"' >> /home/ec2-user/boinc-magphys/server/src/config/database.settings'''
            .format(env.project_name))

    else:
        # Setup the database for recording WU's
        run('mysql --user={0} --host={1} --password={2} < /home/ec2-user/boinc-magphys/server/src/database/create_database.sql'
            .format(env.db_username, env.db_host_name, env.db_password))

        # Make the BOINC project
        with cd('/home/ec2-user/boinc/tools'):
            run('./make_project -v --no_query --drop_db_first --url_base http://{0} --db_user {1} --db_host={2} --db_passwd={3} {4}'
                .format(env.hosts[0], env.db_username, env.db_host_name,
                        env.db_password, env.project_name))

        run('''echo 'databaseUserid = "{0}"
databasePassword = "******"
databaseHostname = "{2}"
databaseName = "magphys"
boincDatabaseName = "{3}"' >> /home/ec2-user/boinc-magphys/server/src/config/database.settings'''
            .format(env.db_username, env.db_password, env.db_host_name,
                    env.project_name))

    # Setup Docmosis files
    run('''echo 'docmosis_key = "{0}"
docmosis_render_url = "https://dws.docmosis.com/services/rs/render"
docmosis_template = "Report.doc"' >> /home/ec2-user/boinc-magphys/server/src/config/docmosis.settings'''
        .format(env.docmosis_key))

    # Setup Work Generation files
    run('''echo 'min_pixels_per_file = "15"
row_height = "6"
threshold = "1000"
high_water_mark = "400"
report_deadline = "7"
project_name = "{0}"
tmp = "/tmp"
boinc_project_root = "/home/ec2-user/projects/{0}"' >> /home/ec2-user/boinc-magphys/server/src/config/work_generation.settings'''
        .format(env.project_name))

    # Copy the config files
    run('cp /home/ec2-user/boinc-magphys/server/config/boinc_files/db_dump_spec.xml /home/ec2-user/projects/{0}/db_dump_spec.xml'
        .format(env.project_name))
    run('cp /home/ec2-user/boinc-magphys/server/config/boinc_files/html/user/* /home/ec2-user/projects/{0}/html/user/'
        .format(env.project_name))
    run('cp /home/ec2-user/boinc-magphys/server/config/boinc_files/hr_info.txt /home/ec2-user/projects/{0}/hr_info.txt'
        .format(env.project_name))
    run('mkdir -p /home/ec2-user/projects/{0}/html/stats_archive'.format(
        env.project_name))
    run('mkdir -p /home/ec2-user/projects/{0}/html/stats_tmp'.format(
        env.project_name))

    comment('/home/ec2-user/projects/{0}/html/ops/create_forums.php'.format(
        env.project_name),
            '^die',
            char='// ')

    run('mkdir -p /home/ec2-user/projects/{0}/html/user/logos'.format(
        env.project_name))
    run('cp /home/ec2-user/boinc-magphys/server/logos/* /home/ec2-user/projects/{0}/html/user/logos/'
        .format(env.project_name))

    # Build the validator
    with cd('/home/ec2-user/boinc-magphys/server/src/magphys_validator'):
        run('make')

    # setup_website
    with cd('/home/ec2-user/boinc-magphys/machine-setup/boinc'):
        run('fab --set project_name={0} edit_files'.format(env.project_name))
        sudo('fab --set project_name={0} setup_website'.format(
            env.project_name))

    # This is needed because the files that Apache serve are inside the user's home directory.
    run('chmod 711 /home/ec2-user')
    run('chmod -R oug+r /home/ec2-user/projects/{0}'.format(env.project_name))
    run('chmod -R oug+x /home/ec2-user/projects/{0}/html'.format(
        env.project_name))
    run('chmod ug+w /home/ec2-user/projects/{0}/log_*'.format(
        env.project_name))
    run('chmod ug+wx /home/ec2-user/projects/{0}/upload'.format(
        env.project_name))

    # Setup the forums
    with cd('/home/ec2-user/projects/{0}/html/ops'.format(env.project_name)):
        run('php create_forums.php')

    # Copy files into place
    with cd('/home/ec2-user/boinc-magphys/machine-setup/boinc'):
        run('fab --set project_name={0},gmail_account={1} setup_postfix'.
            format(env.project_name, env.gmail_account))
        run('fab --set project_name={0} create_first_version'.format(
            env.project_name))
        run('fab --set project_name={0} start_daemons'.format(
            env.project_name))

    # Setup the crontab job to keep things ticking
    run('echo "PYTHONPATH=/home/ec2-user/boinc/py:/home/ec2-user/boinc-magphys/server/src" >> /tmp/crontab.txt'
        )
    run('echo "0,5,10,15,20,25,30,35,40,45,50,55 * * * * cd /home/ec2-user/projects/{0} ; /home/ec2-user/projects/{0}/bin/start --cron" >> /tmp/crontab.txt'
        .format(env.project_name))
    run('crontab /tmp/crontab.txt')

    # Setup the ops area password
    with cd('/home/ec2-user/projects/{0}/html/ops'.format(env.project_name)):
        run('htpasswd -bc .htpasswd {0} {1}'.format(env.ops_username,
                                                    env.ops_password))

    # Setup the logrotation
    sudo('''echo "/home/ec2-user/projects/{0}/log_*/*.log
/home/ec2-user/projects/{0}/log_*/*.out
{{
  notifempty
  daily
  compress
  rotate 10
  dateext
  copytruncate
}}" > /etc/logrotate.d/boinc'''.format(env.project_name))

    # Setup the ssh key
    run('ssh-keygen -t rsa -N "" -f /home/ec2-user/.ssh/id_rsa')
    run('cat /home/ec2-user/.ssh/id_rsa.pub >> /home/ec2-user/.ssh/authorized_keys'
        )
Exemplo n.º 58
0
def record_successful_release():
    with cd(env.root):
        files.append(RELEASE_RECORD, str(env.code_root), use_sudo=True)
Exemplo n.º 59
0
def kill_stale_celery_workers(delay=0):
    with cd(env.code_current):
        sudo('echo "{}/bin/python manage.py '
             'kill_stale_celery_workers" '
             '| at now + {} minutes'.format(env.virtualenv_current, delay))
Exemplo n.º 60
0
def base_install(host0):
    """
    Perform the basic install
    """
    if host0:
        # Clone our code
        if env.branch == '':
            run('git clone git://github.com/ICRAR/boinc-magphys.git')
        else:
            run('git clone -b {0} git://github.com/ICRAR/boinc-magphys.git'.
                format(env.branch))

    # Puppet and git should be installed by the python
    with cd('/home/ec2-user/boinc-magphys/machine-setup'):
        sudo('puppet apply boinc-magphys.pp')

    # Setup postfix
    sudo('service sendmail stop')
    sudo('service postfix stop')
    sudo('chkconfig sendmail off')
    sudo('chkconfig sendmail --del')

    sudo('chkconfig postfix --add')
    sudo('chkconfig postfix on')

    sudo('service postfix start')

    sudo('''echo "relayhost = [smtp.gmail.com]:587
smtp_sasl_auth_enable = yes
smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd
smtp_sasl_security_options = noanonymous
smtp_tls_CAfile = /etc/postfix/cacert.pem
smtp_use_tls = yes

# smtp_generic_maps
smtp_generic_maps = hash:/etc/postfix/generic
default_destination_concurrency_limit = 1" >> /etc/postfix/main.cf''')

    sudo(
        'echo "[smtp.gmail.com]:587 {0}@gmail.com:{1}" > /etc/postfix/sasl_passwd'
        .format(env.gmail_account, env.gmail_password))
    sudo('chmod 400 /etc/postfix/sasl_passwd')
    sudo('postmap /etc/postfix/sasl_passwd')

    # Setup the python
    run('wget http://pypi.python.org/packages/2.7/s/setuptools/setuptools-0.6c11-py2.7.egg'
        )
    sudo('sh setuptools-0.6c11-py2.7.egg')
    run('rm setuptools-0.6c11-py2.7.egg')
    sudo('rm -f /usr/bin/easy_install')
    sudo('easy_install-2.7 pip')
    sudo('rm -f /usr/bin/pip')
    sudo('pip-2.7 install sqlalchemy')
    sudo('pip-2.7 install Numpy')
    sudo('pip-2.7 install pyfits')
    sudo('pip-2.7 install pil')
    sudo('pip-2.7 install fabric')
    sudo('pip-2.7 install configobj')
    sudo('pip-2.7 install MySQL-python')
    sudo('pip-2.7 install boto')

    # Plotting and reporting
    sudo('pip-2.7 install matplotlib')
    sudo('pip-2.7 install astropy')

    for user in env.list_of_users:
        sudo('useradd {0}'.format(user))
        sudo('mkdir /home/{0}/.ssh'.format(user))
        sudo('chmod 700 /home/{0}/.ssh'.format(user))
        sudo('chown {0}:{0} /home/{0}/.ssh'.format(user))
        sudo('mv /home/ec2-user/{0}.pub /home/{0}/.ssh/authorized_keys'.format(
            user))
        sudo('chmod 700 /home/{0}/.ssh/authorized_keys'.format(user))
        sudo('chown {0}:{0} /home/{0}/.ssh/authorized_keys'.format(user))

        # Add them to the sudoers
        sudo(
            '''su -l root -c 'echo "{0} ALL = NOPASSWD: ALL" >> /etc/sudoers' '''
            .format(user))

    # Create the .boto file
    if host0:
        file_name = get_aws_keyfile()
        with open(file_name, 'rb') as csv_file:
            reader = csv.reader(csv_file)
            # Skip the header
            reader.next()

            row = reader.next()
            run('''echo "[Credentials]
aws_access_key_id = {0}
aws_secret_access_key = {1}" >> /home/ec2-user/.boto'''.format(row[1], row[2]))

        # Setup the S3 environment
        with cd('/home/ec2-user/boinc-magphys/machine-setup/boinc'):
            run('fab --set project_name={0} create_s3'.format(
                env.project_name))

        # Setup BOINC
        # Grab the latest trunk from GIT
        run('git clone git://boinc.berkeley.edu/boinc-v2.git boinc')

        with cd('/home/ec2-user/boinc'):
            run('./_autosetup')
            run('./configure --disable-client --disable-manager')
            run('make')

        # Setup the pythonpath
        append('/home/ec2-user/.bash_profile', [
            '',
            'PYTHONPATH=/home/ec2-user/boinc/py:/home/ec2-user/boinc-magphys/server/src',
            'export PYTHONPATH'
        ])

    # Setup the HDF5
    with cd('/usr/local/src'):
        sudo(
            'wget http://www.hdfgroup.org/ftp/lib-external/szip/2.1/src/szip-2.1.tar.gz'
        )
        sudo('tar -xvzf szip-2.1.tar.gz')
        sudo(
            'wget http://www.hdfgroup.org/ftp/HDF5/current/src/hdf5-1.8.11.tar.gz'
        )
        sudo('tar -xvzf hdf5-1.8.11.tar.gz')
        sudo('rm *.gz')
    with cd('/usr/local/src/szip-2.1'):
        sudo('./configure --prefix=/usr/local/szip')
        sudo('make')
        sudo('make install')
    with cd('/usr/local/src/hdf5-1.8.11'):
        sudo(
            './configure --prefix=/usr/local/hdf5 --with-szlib=/usr/local/szip --enable-production'
        )
        sudo('make')
        sudo('make install')
    sudo('''echo "/usr/local/hdf5/lib
/usr/local/szip/lib" >> /etc/ld.so.conf.d/hdf5.conf''')
    sudo('ldconfig')

    # Now install the H5py
    with cd('/tmp'):
        run('wget https://h5py.googlecode.com/files/h5py-2.1.0.tar.gz')
        run('tar -xvzf h5py-2.1.0.tar.gz')
    with cd('/tmp/h5py-2.1.0'):
        sudo('python2.7 setup.py build --hdf5=/usr/local/hdf5')
        sudo('python2.7 setup.py install')