Example #1
0
def upload_cookbooks(url="http://github.com/rcbops/chef-cookbooks",
                     branch="grizzly",
                     directory="/opt/rpcs/chef-cookbooks"):
    """Uploads Chef cookbooks from a git repository

    Args:
        url: URL for Git repository
        branch: Branch of Git repo to use
        directory: Path to clone repository into

    """
    puts(green("Installing git"))
    sudo('apt-get -qq update')
    sudo('apt-get install -qy git')

    # We might want to be more careful here
    if files.exists(directory):
        sudo('rm -rf %s' % directory)

    puts('Cloning chef-cookbooks repository')
    sudo('git clone -q --recursive --depth 1 -b %s %s %s'
         % (branch, url, directory))

    puts(green("Uploading cookbooks"))
    sudo('knife cookbook upload -c /root/.chef/knife.rb -a')

    if files.exists('%s/roles' % directory):
        puts(green("Creating roles"))
        sudo('knife role from file %s/roles/*.rb -c /root/.chef/knife.rb'
             % directory)
Example #2
0
def deploy():
    with prefix('source $(which virtualenvwrapper.sh) && workon remote'):
        settings_file = '--settings=haxclub.settings.base'
        env_vars = config.get('env_vars')
        if not exists('~/haxclub'):
            with cd('~/'):
                run('git clone https://github.com/jsalva/haxclub')
        with cd('~/haxclub/haxclub'):
            if not exists('logs'):
                run('mkdir logs')
            run('git pull origin master')
            with shell_env(**env_vars):
                prompts = []
                prompts += expect("Type 'yes' to continue","yes")
                with expecting(prompts):
                    erun('python manage.py collectstatic %s' % settings_file)
                    erun('python manage.py migrate %s' % settings_file)
                    erun('python manage.py syncdb %s' % settings_file)
                    if exists('supervisord.pid'):
                        erun('python manage.py supervisor reload %s' % settings_file)
                    else:
                        erun('python manage.py supervisor --daemonize %s' % settings_file)

    if not exists('/tmp/nginx'):
        run('mkdir /tmp/nginx')

    put('nginx.conf','/etc/nginx/nginx.conf',use_sudo=True)
    put('nginx_haxclub.conf','/etc/nginx/conf.d/nginx_haxclub.conf',use_sudo=True)
    put('ssl/haxclub.key.nopass','/etc/ssl/certs/haxclub.key.nopass',use_sudo=True)
    put('ssl/haxclub.crt','/etc/ssl/certs/haxclub.crt',use_sudo=True)
    put('nginx_haxclub.conf','/etc/nginx/conf.d/nginx_haxclub.conf',use_sudo=True)
    sudo('service nginx stop; service nginx start;')
Example #3
0
def version_state(name, prefix=False, no_content=False):
    """
    If the server state exists return parsed json as a python object or True 
    prefix=True returns True if any files exist with ls [prefix]*
    """
    if env.project_fullname:
        full_name = "-".join([env.project_fullname, name])
    else:
        full_name = name
    current_state = False
    state = State(full_name)
    state_path = "/var/local/woven/%s" % full_name
    if not prefix and not no_content and exists(state_path):
        content = int(sudo("ls -s %s" % state_path).split()[0])  # get size
        if content:
            fd, file_path = tempfile.mkstemp()
            os.close(fd)
            get(state_path, file_path)
            with open(file_path, "r") as f:
                content = f.read()
                object = json.loads(content)
                current_state = object
        else:
            current_state = True
    elif not prefix and no_content and exists(state_path):
        current_state = True
    elif prefix:
        with settings(warn_only=True):  # find any version
            current_state = sudo("ls /var/local/woven/*%s" % name)
        if not current_state.failed:
            current_state = True

    return current_state
Example #4
0
def build_statics(env, version):
    code_path = get_code_path(env, version)
  
    # build player skin
    with cd(code_path):
        for subsite in SUBSITES:
            loc = subsite.location
            def getPath(path):
                return "{project}/{subsite}/static/{path}".format(project=PROJECTNAME, subsite=loc, path=path)

            style = subsite.styles
            if not files.exists(getPath("css")):
                run("mkdir -p {}".format(getPath("css")))

            if style.hasBuster:
                files.sed(getPath("less/cachebuster.less"), "CACHEBUSTTOKEN", '"{}"'.format(getShortToken(version)))

            for stylesheet in style.list:
                run("~/node_modules/less/bin/lessc {project}/{subsite}/static/less/{stylesheet} --yui-compress {project}/{subsite}/static/css/{outname}.min.css".format(project=PROJECTNAME, subsite=loc, stylesheet=stylesheet, outname = stylesheet.rsplit(".")[0]))



        for subsite in SUBSITES:
            if subsite.scripts:
                if not files.exists("{project}/{subsite}/static/scripts/build/".format(project=PROJECTNAME, subsite=subsite.location)):
                    run("mkdir -p {project}/{subsite}/static/scripts/build/".format(project=PROJECTNAME, subsite=subsite.location))

                customs = " ".join(["{project}/{subsite}/static/scripts/{script}".format(project=PROJECTNAME, subsite=subsite.location, script = script) for script in subsite.scripts])
                run("java -jar ~/resources/compiler.jar --compilation_level SIMPLE_OPTIMIZATIONS \
                    --js {customs} \
                    --warning_level QUIET --js_output_file {project}/{subsite}/static/scripts/build/site.js".format(project=PROJECTNAME, subsite=subsite.location, customs = customs))
        run("echo {} > ./VERSION_TOKEN".format(getShortToken(version)))
Example #5
0
def setMaster():
    if exists('/etc/hosts0'):
        print 'etc/hosts0 exists'
    else:
        sudo('cp /etc/hosts /etc/hosts0')

    sudo('rm /etc/hosts')
    sudo('cp /etc/hosts0 /etc/hosts')
    put('hosts')
    sudo('cat hosts|sudo tee -a /etc/hosts')
    run('rm hosts')

    run('cat /etc/hosts')

    path1 = '/home/{0}'.format(parm['USER'])
    rsync_project(path1, exclude=['result'])

    path2 = join(path1, basename(realpath('.')))
    path3 = join(path2, parm['programdir'])
    for dst in (path2, path3):
        fi = '{0}/{1}'.format(dst, parm['keyfile'])
        if not exists(fi, use_sudo=True):
            put(parm['keyfile'], dst)
            sudo('chmod 400 {0}'.format(fi))
    execute('genkey')
Example #6
0
def _clone_galaxy_repo(env):
    """
    Clone Galaxy source code repository from ``env.galaxy_repository`` to
    ``env.galaxy_home``, setting the directory ownership to ``env.galaxy_user``

    This method cannot be used to update an existing Galaxy installation.
    """
    # Make sure ``env.galaxy_home`` dir exists but without Galaxy in it
    galaxy_exists = False
    if exists(env.galaxy_home):
        if exists(os.path.join(env.galaxy_home, '.hg')):
            env.logger.warning("Galaxy install dir {0} exists and seems to have " \
                "a Mercurial repository already there. Galaxy already installed?"\
                .format(env.galaxy_home))
            galaxy_exists = True
    else:
        sudo("mkdir -p '%s'" % env.galaxy_home)
    if not galaxy_exists:
        with cd(env.galaxy_home):
            # Needs to be done as non galaxy user, otherwise we have a
            # permissions problem.
            galaxy_repository = env.get("galaxy_repository", 'https://bitbucket.org/galaxy/galaxy-central/')
            env.safe_sudo('hg clone %s .' % galaxy_repository)
    # Make sure ``env.galaxy_home`` is owned by ``env.galaxy_user``
    _chown_galaxy(env, env.galaxy_home)
    # Make sure env.galaxy_home root dir is also owned by env.galaxy_user so Galaxy
    # process can create necessary dirs (e.g., shed_tools, tmp)
    sudo("chown {0}:{0} {1}".format(env.galaxy_user, os.path.split(env.galaxy_home)[0]))
    # If needed, custom-configure this freshly cloned Galaxy
    preconfigured = _read_boolean(env, "galaxy_preconfigured_repository", False)
    if not preconfigured:
        _configure_galaxy_repository(env)
def get_files_from_file_path(file_path, deployment_path):
  if exists('%s/agencies_fixed.csv' % file_path):
    run('cp %s/agencies_fixed.csv %s' % (file_path, deployment_path))
  if exists('%s/people_fixed.csv' % file_path):
    run('cp %s/people_fixed.csv %s' % (file_path, deployment_path))
  if exists('%s/sites_fixed.csv' % file_path):
    run('cp %s/sites_fixed.csv %s' % (file_path, deployment_path))
Example #8
0
def vcs_upload():
    """
    Uploads the project with the selected VCS tool.
    """
    if env.deploy_tool == "git":
        remote_path = "ssh://%s@%s%s" % (env.user, env.host_string,
                                         env.repo_path)
        if not exists(env.repo_path):
            run("mkdir -p %s" % env.repo_path)
            with cd(env.repo_path):
                run("git init --bare")
        local("git push -f %s master" % remote_path)
        with cd(env.repo_path):
            run("GIT_WORK_TREE=%s git checkout -f master" % env.proj_path)
            run("GIT_WORK_TREE=%s git reset --hard" % env.proj_path)
    elif env.deploy_tool == "hg":
        remote_path = "ssh://%s@%s/%s" % (env.user, env.host_string,
                                          env.repo_path)
        with cd(env.repo_path):
            if not exists("%s/.hg" % env.repo_path):
                run("hg init")
                print(env.repo_path)
            with fab_settings(warn_only=True):
                push = local("hg push -f %s" % remote_path)
                if push.return_code == 255:
                    abort()
            run("hg update")
def update_project_files(update_settings=''):
    if exists(PROJECT_DIR_STAGING):
        run('cd %s && git pull' % PROJECT_DIR_STAGING)
    else:
        run('git clone %s %s' % (PROJECT_REPO_URL, PROJECT_DIR_STAGING))

    run('rsync -az --delete-after --exclude=.git --exclude=.gitignore --exclude=deploy --exclude=local_settings*  --exclude=*.pyc --exclude=*.pyo %s/ %s' % (PROJECT_DIR_STAGING, PROJECT_DIR))

	# TODO customize for other Python versions and probably other GNU/Linux distributions

    project_dict = {'PROJECT_USER': PROJECT_USER, 'PROJECT_NAME': PROJECT_NAME, 'PROJECT_NAME_STAGING': PROJECT_NAME_STAGING}

    if not exists('/home/%(PROJECT_USER)s/%(PROJECT_NAME)s/static' % project_dict):
        run('mkdir -p /home/%(PROJECT_USER)s/%(PROJECT_NAME)s/static' % project_dict)

    if not exists('/home/%(PROJECT_USER)s/%(PROJECT_NAME_STAGING)s/static' % project_dict):
        run('mkdir -p /home/%(PROJECT_USER)s/%(PROJECT_NAME_STAGING)s/static' % project_dict)

    if not exists('/home/%(PROJECT_USER)s/%(PROJECT_NAME)s/static/admin' % project_dict):
        run('ln -s /home/%(PROJECT_USER)s/.virtualenvs/%(PROJECT_NAME)s/lib/python2.7/site-packages/django/contrib/admin/media/ /home/%(PROJECT_USER)s/%(PROJECT_NAME)s/static/admin' % project_dict)

    if not exists('/home/%(PROJECT_USER)s/%(PROJECT_NAME_STAGING)s/static/admin' % project_dict):
        run('ln -s /home/%(PROJECT_USER)s/.virtualenvs/%(PROJECT_NAME_STAGING)s/lib/python2.7/site-packages/django/contrib/admin/media/ /home/%(PROJECT_USER)s/%(PROJECT_NAME_STAGING)s/static/admin' % project_dict)

    if update_settings == 'y':
        put_settings_files()
Example #10
0
def dump_db(dumpfile="pootle_DB_backup.sql"):
    """Dumps the DB as a SQL script and downloads it"""
    require('environment', provided_by=[production, staging])

    if ((isfile(dumpfile) and confirm('\n%s already exists locally. Do you '
        'want to overwrite it?' % dumpfile, default=False))
        or not isfile(dumpfile)):

        remote_filename = '%s/%s' % (env['project_path'], dumpfile)

        if ((exists(remote_filename) and confirm('\n%s already exists. Do you '
            'want to overwrite it?' % remote_filename, default=False))
            or not exists(remote_filename)):

            print('\nDumping DB...')

            with settings(hide('stderr')):
                sudo('mysqldump -u %s -p %s > %s' % (env['db_user'],
                                                     env['db_name'],
                                                     remote_filename))
                get(remote_filename, '.')
        else:
            print('\nAborting.')
    else:
        print('\nAborting.')
Example #11
0
def initialise_qgis_plugin_repo():
    """Initialise a QGIS plugin repo where we host test builds."""
    _all()
    fabtools.require.deb.package('libapache2-mod-wsgi')
    code_path = os.path.join(env.repo_path, env.repo_alias)
    local_path = '%s/scripts/test-build-repo' % code_path

    if not exists(env.plugin_repo_path):
        sudo('mkdir -p %s' % env.plugin_repo_path)
        sudo('chown %s.%s %s' % (env.user, env.user, env.plugin_repo_path))

    run('cp %s/plugin* %s' % (local_path, env.plugin_repo_path))
    run('cp %s/icon* %s' % (code_path, env.plugin_repo_path))
    run('cp %(local_path)s/inasafe-test.conf.templ '
        '%(local_path)s/inasafe-test.conf' % {'local_path': local_path})

    sed('%s/inasafe-test.conf' % local_path,
        'inasafe-test.linfiniti.com',
        env.repo_site_name)

    with cd('/etc/apache2/sites-available/'):
        if exists('inasafe-test.conf'):
            sudo('a2dissite inasafe-test.conf')
            fastprint('Removing old apache2 conf', False)
            sudo('rm inasafe-test.conf')

        sudo('ln -s %s/inasafe-test.conf .' % local_path)

    # Add a hosts entry for local testing - only really useful for localhost
    hosts = '/etc/hosts'
    if not contains(hosts, 'inasafe-test'):
        append(hosts, '127.0.0.1 %s' % env.repo_site_name, use_sudo=True)

    sudo('a2ensite inasafe-test.conf')
    sudo('service apache2 reload')
Example #12
0
def sync():
    """Rysnc local states and pillar data to the master, and checkout margarita."""
    # Check for missing local secrets so that they don't get deleted
    # project.rsync_project fails if host is not set
    sudo("mkdir -p /srv")
    if not have_secrets():
        get_secrets()
    else:
        # Check for differences in the secrets files
        for environment in [env.environment]:
            remote_file = os.path.join('/srv/pillar/', environment, 'secrets.sls')
            with lcd(os.path.join(CONF_ROOT, 'pillar', environment)):
                if files.exists(remote_file):
                    get(remote_file, 'secrets.sls.remote')
                else:
                    local('touch secrets.sls.remote')
                with settings(warn_only=True):
                    result = local('diff -u secrets.sls.remote secrets.sls')
                    if result.failed and files.exists(remote_file) and not confirm(
                            red("Above changes will be made to secrets.sls. Continue?")):
                        abort("Aborted. File have been copied to secrets.sls.remote. " +
                              "Resolve conflicts, then retry.")
                    else:
                        local("rm secrets.sls.remote")
    salt_root = CONF_ROOT if CONF_ROOT.endswith('/') else CONF_ROOT + '/'
    project.rsync_project(local_dir=salt_root, remote_dir='/tmp/salt', delete=True)
    sudo('rm -rf /srv/salt /srv/pillar')
    sudo('mv /tmp/salt/* /srv/')
    sudo('rm -rf /tmp/salt/')
    execute(margarita)
Example #13
0
def load_db(dumpfile=None):
    """Loads data from a SQL script to Pootle DB"""
    require('environment', provided_by=[production, staging])

    if dumpfile is not None:
        if isfile(dumpfile):
            remote_filename = '%(project_path)s/DB_backup_to_load.sql' % env

            if (exists(remote_filename) and
                confirm('\n%s already exists. Do you want to overwrite it?'
                        % remote_filename,
                        default=False)) or not exists(remote_filename):

                print('\nLoading data into the DB...')

                with settings(hide('stderr')):
                    put(dumpfile, remote_filename, use_sudo=True)
                    sudo('mysql -u %s -p %s < %s' % (env['db_user'],
                                                     env['db_name'],
                                                     remote_filename))
            else:
                print('\nAborting.')
        else:
            print('\nERROR: The file "%s" does not exist. Aborting.' % dumpfile)
    else:
        print('\nERROR: A dumpfile must be provided. Aborting.')
Example #14
0
def export_settings():
  data = get_settings()
  config = get_config()
  host = get_host()
  servers = get_roles()
  sudoer = servers[host]['sudo_user']
  wp = servers[host]['wordpress']
  wp_cli = check_for_wp_cli(role)
  settings_url = config['Application']['WordPress']['settings']
  environment = servers[host]['environment']
  if (not files.exists('/tmp/wp-settings')):
    with cd('/tmp/'):
      sudo('git clone %s wp-settings' % settings_url)
  with cd('/tmp/wp-settings'):
    try:
      sudo('git pull origin %s' % environment)
    except:
      puts(red('Could not reach the origin server.'))

  with settings(path=wp_cli, behavior='append', sudo_user=sudoer), cd(wp):
    for d in data:
      sudo('wp option get %s --format=json > /tmp/wp-settings/%s.json --allow-root' % (d, d))
  with settings(sudo_user=sudoer), cd('/tmp/wp-settings'):
    sudo('git config core.fileMode 0')
    if (not files.exists('.git/refs/heads/%s' % environment) ):
      sudo('git checkout -b %s' % environment)
    else:
      sudo('git checkout %s' % environment)
    sudo('git add .')
    sudo('git commit -a -m "Settings update: %s"' % (datetime.date.today()))
    try:
      sudo('git push origin %s' % environment)
    except:
      puts(red('Could not communicate with origin server'))
def deploy():
    """
    Deploy latest version of the project.
    Check out the latest version of the project from version
    control, install new requirements, sync and migrate the database,
    collect any new static assets, and restart gunicorn's work
    processes for the project.
    """
    if not exists(env.venv_path):
        prompt = input("\nVirtualenv doesn't exist: %s"
                       "\nWould you like to create it? (yes/no) "
                       % env.proj_name)
        if prompt.lower() != "yes":
            print("\nAborting!")
            return False
        create()
    for name in get_templates():
        upload_template_and_reload(name)
    with project():
        backup("last.db")
        static_dir = static()
        if exists(static_dir):
            run("tar -cf last.tar %s" % static_dir)
        git = env.git
        last_commit = "git rev-parse HEAD" if git else "hg id -i"
        run("%s > last.commit" % last_commit)
        with update_changed_requirements():
            run("git pull origin master -f" if git else "hg pull && hg up -C")
        manage("collectstatic -v 0 --noinput")
        manage("syncdb --noinput")
        manage("migrate --noinput")
    restart()
    return True
Example #16
0
def purge_data_nav(force=False):
    """
    purge temp/data.nav.lz4 files
    the whole process will be skipped as soon as a single condition is encountered:
    - temp data file is more recent than actual data file
    - temp data file exists but actual data file is missing
    """
    if not force:
        print("Checking lz4 temp files purge conditions before proceeding...")
        reason = {}
        for instance in env.instances.values():
            plain_target = get_real_instance(instance).target_lz4_file
            temp_target = os.path.join(os.path.dirname(plain_target), 'temp', os.path.basename(plain_target))
            if exists(plain_target):
                if exists(temp_target) and files.getmtime(temp_target) > files.getmtime(plain_target):
                    reason[instance.name] = "{} is more recent than {}".format(temp_target, plain_target)
            elif exists(temp_target):
                reason[instance.name] = "{} does not exists".format(plain_target)
        if reason:
            print(yellow("Error: Can't purge lz4 temp files, reasons:"))
            for k, v in reason.iteritems():
                print("  {}: {}".format(k, v))
            exit(1)

    for instance in env.instances.values():
        plain_target = get_real_instance(instance).target_lz4_file
        temp_target = os.path.join(os.path.dirname(plain_target), 'temp', os.path.basename(plain_target))
        if exists(temp_target):
            files.remove(temp_target)
Example #17
0
def install_gatk(env):
    version = env.tool_version
    url = 'ftp://ftp.broadinstitute.org/pub/gsa/GenomeAnalysisTK/GenomeAnalysisTK-%s.tar.bz2' % version
    pkg_name = 'gatk'
    install_dir = os.path.join(env.galaxy_tools_dir, pkg_name, version)
    install_cmd = sudo if env.use_sudo else run
    if not exists(install_dir):
        install_cmd("mkdir -p %s" % install_dir)
        install_cmd("mkdir -p %s/bin" % install_dir)
    with _make_tmp_dir() as work_dir:
        with cd(work_dir):
            run("wget -O gatk.tar.bz2 %s" % url)
            run("tar -xjf gatk.tar.bz2")
            install_cmd("cp GenomeAnalysisTK-%s/*.jar %s/bin" % (version, install_dir))
    # Create shell script to wrap jar
    sudo("echo '#!/bin/sh' > %s/bin/gatk" % (install_dir))
    sudo("echo 'java -jar %s/bin/GenomeAnalysisTK.jar $@' >> %s/bin/gatk" % (install_dir, install_dir))
    sudo("chmod +x %s/bin/gatk" % install_dir)
    # env file
    sudo("echo 'PATH=%s/bin:$PATH' > %s/env.sh" % (install_dir, install_dir))
    _update_default(env, install_dir)
    # Link jar to Galaxy's jar dir
    jar_dir = os.path.join(env.galaxy_jars_dir, pkg_name)
    if not exists(jar_dir):
        install_cmd("mkdir -p %s" % jar_dir)
    tool_dir = os.path.join(env.galaxy_tools_dir, pkg_name, 'default', 'bin')
    install_cmd('ln --force --symbolic %s/*.jar %s/.' % (tool_dir, jar_dir))
    install_cmd('chown --recursive %s:%s %s' % (env.galaxy_user, env.galaxy_user, jar_dir))
Example #18
0
def bootstrap_swan():
    run('mkdir -pp %s' % SWAN_HOME)

    with cd(SWAN_HOME):
        if (not exists(os.path.join(SWAN_HOME, 'llvm'))):
            run("git clone %s llvm" % SWAN_LLVM_REPO)
        if (not exists(os.path.join(SWAN_HOME, 'llvm/tools/clang'))):
            run("git clone %s llvm/tools/clang" % SWAN_CLANG_REPO)
        run('mkdir -p build')
        with cd('build'):
            run('cmake -G "Unix Makefiles" ../llvm')
            run('make clean')
            run('make')
        test_clang()
        if (not exists(os.path.join(SWAN_HOME, 'swan_runtime'))):
            run("git clone %s" % SWAN_RT_REPO)
        with cd('swan_runtime'):
            run("libtoolize")
            run("aclocal")
            run("automake --add-missing")
            run("autoconf")
            run("./configure --prefix=%s/swan_runtime/lib CC=../build/bin/clang CXX=../build/bin/clang++" % SWAN_HOME)
            run("make clean")
            run("make")
        run("git clone https://github.com/project-asap/swan_tests.git")
        with cd("swan_tests"):
            run("make CXX=../build/bin/clang++ SWANRTDIR=../swan_runtime test")
Example #19
0
def rm_old_builds(path=None, user=None):
    '''Remove old build directories on the deploy server.

    Takes the same path and user options as **deploy**.
    '''
    configure(path=path, user=user)
    with cd(env.remote_path):
        with hide('stdout'):  # suppress ls/readlink output
            # get directory listing sorted by modification time (single-column for splitting)
            dir_listing = sudo('ls -t1', user=env.remote_acct)
            # get current and previous links so we don't remove either of them
            current = sudo('readlink current', user=env.remote_acct) if files.exists('current') else None
            previous = sudo('readlink previous', user=env.remote_acct) if files.exists('previous') else None

        # split dir listing on newlines and strip whitespace
        dir_items = [n.strip() for n in dir_listing.split('\n')]
        # regex based on how we generate the build directory:
        #   project name, numeric version, optional pre/dev suffix, optional revision #
        build_dir_regex = r'^%(project)s-[0-9.]+(-[A-Za-z0-9_-]+)?(-r[0-9]+)?$' % env
        build_dirs = [item for item in dir_items if re.match(build_dir_regex, item)]
        # by default, preserve the 3 most recent build dirs from deletion
        rm_dirs = build_dirs[3:]
        # if current or previous for some reason is not in the 3 most recent,
        # make sure we don't delete it
        for link in [current, previous]:
            if link in rm_dirs:
                rm_dirs.remove(link)

        if rm_dirs:
            for build_dir in rm_dirs:
                sudo('rm -rf %s' % build_dir, user=env.remote_acct)
        else:
            puts('No old build directories to remove')
def create_app_user():
    #sudo("sudo locale-gen UTF-8")
    user_exists = run("id -u hadoop", warn_only=True)
    if user_exists.return_code == 1:
        sudo("useradd hadoop --password hadoop -d /home/hadoop -s /bin/bash")
    if not exists("/home/hadoop/.ssh"):
        sudo("mkdir -p /home/hadoop/.ssh")
        sudo("chown -R hadoop /home/hadoop")
        bash_login_content = """
    if [ -f ~/.bashrc ]; then
        . ~/.bashrc
    fi
    """
    _replace_file_content("/home/hadoop/.bash_login", bash_login_content)
    with settings(sudo_user='******'):
        if not exists('/home/hadoop/.ssh/id_rsa'):
            sudo('ssh-keygen -t rsa -P "" -f /home/hadoop/.ssh/id_rsa')
            sudo("cat /home/hadoop/.ssh/id_rsa.pub >> /home/hadoop/.ssh/authorized_keys")
            sudo("chmod 0600 /home/hadoop/.ssh/authorized_keys")
            sudo("ssh-keyscan -H localhost >> /home/hadoop/.ssh/known_hosts")
            sudo("ssh-keyscan -H 0.0.0.0 >> /home/hadoop/.ssh/known_hosts")
            
        if not exists("/home/hadoop/.bashrc"):
            sudo("touch /home/hadoop/.bashrc")
        if not contains("/home/hadoop/.bashrc", "export HADOOP_HOME=/usr/local/lib/hadoop"):
            append("/home/hadoop/.bashrc", APP_USER_SETTINGS, use_sudo=True)
Example #21
0
def enable(conf, weight, do_restart=True):
    """
    Enable logstash input/output provider

    :param conf: Input or output provider config file
    :param weight: Weight of provider
    :param do_restart: Restart service
    :return: Got enabled?
    """
    enabled = False
    conf = conf if conf.endswith(".conf") else "{}.conf".format(conf)

    with sudo():
        available_conf = os.path.join(conf_available_path, conf)
        if not files.exists(available_conf):
            warn("Invalid conf: {}".format(conf))
        else:
            with cd(conf_enabled_path):
                weight = str(weight).zfill(2)
                conf = "{}-{}".format(weight, conf)
                if not files.exists(conf):
                    info("Enabling conf: {}", conf)
                    with silent():
                        debian.ln(available_conf, conf)
                        enabled = True
                    if do_restart:
                        restart("server")

    return enabled
def install_hbase():
    '''
    http://hbase.apache.org/book.h\tml#quickstart
    '''
    if not exists("/usr/local/lib/hbase-0.98.15-hadoop2"):
        with cd('/usr/local/lib'):
            if not exists("hbase-0.98.15-hadoop2-bin.tar.gz"):
                sudo("wget http://www.apache.org/dist/hbase/0.98.15/hbase-0.98.15-hadoop2-bin.tar.gz")
            sudo("tar -xvf hbase-0.98.15-hadoop2-bin.tar.gz")
            sudo("ln -s hbase-0.98.15-hadoop2 hbase")
    with cd("/usr/local/lib/hbase/conf"):
        hbase_site_xml_content= """
        <configuration>
        <property>
          <name>hbase.rootdir</name>
          <value>hdfs://localhost:9000/hbase</value>
        </property>
          <property>
            <name>hbase.zookeeper.property.dataDir</name>
            <value>/home/hadoop/zookeeper</value>
          </property>
          <property>
              <name>hbase.cluster.distributed</name>
              <value>true</value>
          </property>
        </configuration>
        """
        _replace_file_content("hbase-site.xml", hbase_site_xml_content)
    with cd('/usr/local/lib'):
        sudo("chown hadoop -R hbase-0.98.15-hadoop2")
        sudo("chmod -R u+rw hbase-0.98.15-hadoop2")
Example #23
0
def check_war_files():
    with settings(warn_only=True):
        with hide('output','running'):
            for my_war in env.war_list:
                if my_war['wartype'] == 'ACM':
                    for my_env in env.env_list:
                        #print my_war
                        if files.exists(env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile']):
                            print "Found " + env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile']
                            result = run("jar tvf " + env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile'] + " | egrep \"WEB-INF/lib/log4j*.jar WEB-INF/lib/jaxen\-*.jar\"")
                            if result.return_code == 0:
                                print result.stdout
		            elif result.return_code == 1:
                                print 'SUCCESS, no unwanted jars present in the file: ' + env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile']
                if my_war['wartype'] == 'SOA':
                    for my_env in env.env_list:
                        if my_env.find("SOA"):
                            if files.exists(env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile']):
                                print "Found " + env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile']
                                result = run("jar tvf " + env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile'] + " | egrep \"WEB-INF/lib/jaxp-ri*\.jar WEB-INF/lib/jaxp-api*\.jar WEB-INF/lib/xercesImpl*\.jar WEB-INF/lib/log4j*\.jar\"")
                                if result.return_code == 0:
                                    print result.stdout
			        elif result.return_code == 1:
                                    print 'SUCCESS, no unwanted jars present in the file: ' + env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile']
                if my_war['wartype'] == 'CL':
                    for my_env in env.env_list:
                        if my_env.find("CL"):
                            if files.exists(env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile']):
                                print "Found " + env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile']
                                result = run("jar tvf " + env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile'] + " | egrep \"WEB-INF/lib/jaxp-ri*\.jar WEB-INF/lib/jaxp-api*\.jar WEB-INF/lib/xercesImpl*\.jar WEB-INF/lib/log4j-*\.jar\"")
                                if result.return_code == 0:
                                    print result.stdout
			        elif result.return_code == 1:
                                    print 'SUCCESS, no unwanted jars present in the file: ' + env.jboss_root + "/server/" + my_env + "/deploy/" + my_war['warfile']
def setup_paths():
    if not exists(env.site_root_path):
        run("mkdir -p %s" %env.site_root_path)
    with cd(env.site_root_path):
        for path in [VIRTUALENV_FOLDER, INITIAL_DATA_DIR, USER_DATA_DIR,SHARED_CONFIG_DIR, SUPERVISORD_DIR, SUPERVISORD_CONF_DIR]:
            if not exists(path):
                run("mkdir -p %s" %path)
Example #25
0
def config_nginx(regen_dhparm=False):
    if regen_dhparm:
        with cd('/etc/ssl/certs'):
            sudo('openssl dhparam -out dhparam.pem 4096')

    template_dir = 'templates/'
    upload_template('nginx.conf',
                    '/etc/nginx/nginx.conf',
                    use_sudo=True, template_dir=template_dir)
    if not exists('/etc/nginx/sites-enabled/'):
        sudo('mkdir /etc/nginx/sites-enabled/')
    upload_template('docker_gunicorn.conf',
                    '/etc/nginx/sites-enabled/docker_gunicorn.conf',
                    use_sudo=True, template_dir=template_dir, use_jinja=True, context=env, backup=False)

    if not exists('/etc/nginx/ssl/'):
        sudo('mkdir /etc/nginx/ssl/')
    upload_template(
        '/Users/csxds/Documents/iodicus-certs/2nd_iodicus_signing_request/prepare_for_deployment/iodicus_net.bundle.crt',
        '/etc/nginx/ssl/%(ssl_cert_bundle_target_filename)s' % env,
        use_sudo=True)
    upload_template(
        '/Users/csxds/Documents/iodicus-certs/2nd_iodicus_signing_request/prepare_for_deployment/iodicus_net.key',
        '/etc/nginx/ssl/%(ssl_cert_key_target_filename)s' % env,
        use_sudo=True)

    sudo('service nginx restart')
Example #26
0
def _setup_suite():
    sudo('apt-get update')
    if files.exists('/tmp/supervisor.sock'):
        sudo('unlink /tmp/supervisor.sock')
    if files.exists('/var/run/supervisor.sock'):
        sudo('unlink /var/run/supervisor.sock')
    sudo('apt-get install -y '
         'nginx git python-pip postgresql '
         'postgresql-contrib libpq-dev python-dev')
    if not files.exists(
        'etc/nginx/sites-available/original-default',
        use_sudo=True
    ):
        sudo(
            'cp /etc/nginx/sites-available/default '
            '/etc/nginx/sites-available/original-default'
        )

    put(local_path="~/projects/t-buddies/simple_nginx_conf",
        remote_path="/etc/nginx/sites-available/default",
        use_sudo=True)

    append('/etc/nginx/sites-available/default',
           "server {listen 80;server_name " +
           env.active_instance.public_dns_name + "/;"
           "access_log /var/log/nginx/test.log;location /"
           "{proxy_pass http://127.0.0.1:8080;proxy_set_header Host $host;"
           "proxy_set_header X-Real-IP $remote_addr;"
           "proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;"
           "}}")

    if not files.exists("~/.previous/"):
        run('mkdir ~/.previous')

    sudo('service nginx start')
Example #27
0
def _deploy():
    sudo('service nginx stop')

    from datetime import datetime
    now = datetime.now()
    d = now.strftime("%Y_%m_%e__%H_%M_%S")

    if files.exists('~/{p}'.format(p=projname)):
        run('mv ~/{p} ~/.previous/{d}'.format(p=projname, d=d))

    run(
        'git clone -b master'
        ' http://github.com/{gp}'.format(gp=git_path,)
    )

    if files.exists('/{p}/supervisord.conf'.format(p=projname),
                    use_sudo=True,):
        sudo('rm -f /{p}/supervisord.conf'.format(p=projname))
    sudo('mv {p}/supervisord.conf /etc/supervisord.conf'.format(p=projname))

    sudo('pip install -r ~/{p}/requirements.txt'.format(p=projname))
    sudo('cd taste-buddies ; python setup.py develop')
    sudo('reboot')
    time.sleep(90)
    sudo('service nginx start')
    sudo('supervisord')
Example #28
0
def _configure_nfs(env):
    nfs_dir = "/export/data"
    cloudman_dir = "/mnt/galaxyData/export"
    if not exists(nfs_dir):
        # For the case of rerunning this script, ensure the nfs_dir does
        # not exist (exists() method does not recognize it as a file because
        # by default it points to a non-existing dir/file).
        with settings(warn_only=True):
            sudo('rm -rf {0}'.format(nfs_dir))
        sudo("mkdir -p %s" % os.path.dirname(nfs_dir))
        sudo("ln -s %s %s" % (cloudman_dir, nfs_dir))
    sudo("chown -R %s %s" % (env.user, os.path.dirname(nfs_dir)))
    # Setup /etc/exports paths, to be used as NFS mount points
    exports = [ '/opt/sge           *(rw,sync,no_root_squash,no_subtree_check)',
                '/mnt/galaxyData    *(rw,sync,no_root_squash,subtree_check,no_wdelay)',
                '/mnt/galaxyIndices *(rw,sync,no_root_squash,no_subtree_check)',
                '/mnt/galaxyTools   *(rw,sync,no_root_squash,no_subtree_check)',
                '%s       *(rw,sync,no_root_squash,no_subtree_check)' % nfs_dir,
                '%s/openmpi         *(rw,sync,no_root_squash,no_subtree_check)' % env.install_dir]
    append('/etc/exports', exports, use_sudo=True)
    # Create a symlink for backward compatibility where all of CloudMan's
    # stuff is expected to be in /opt/galaxy
    old_dir = '/opt/galaxy'
    # Because stow is used, the equivalent to CloudMan's expected path
    # is actually the parent of the install_dir so use it for the symlink
    new_dir = os.path.dirname(env.install_dir)
    if not exists(old_dir) and exists(new_dir):
        sudo('ln -s {0} {1}'.format(new_dir, old_dir))
    env.logger.debug("Done configuring CloudMan NFS")
Example #29
0
def build_deps():
    build_vm_prereqs()
    pull_fb_libs()
    if not files.exists('/usr/local/include/folly'):
        with cd('%s/folly/folly' % env.build):
            run('git checkout v0.57.0')
            _autotools_build()
    if not files.exists('/usr/local/include/wangle'):
        with cd('%s/wangle/wangle' % env.build):
            run('git checkout v0.13.0')
            _cmake_build()
    if not files.exists('/usr/local/include/thrift'):
        with cd('%s/fbthrift/thrift' % env.build):
            run('git checkout v0.31.0')
            _autotools_build()
            sudo('cp lib/cpp2/libsaslstubs.a /usr/local/lib')
    if not files.exists('/usr/local/include/rocksdb'):
        with cd('%s/rocksdb' % env.build):
            run('git checkout v3.13.1')
            run('make librocksdb.so')
            run('make static_lib')
            sudo('cp -r include/rocksdb /usr/local/include')
            sudo('cp librocks* /usr/local/lib')
    if not files.exists('/usr/local/include/mitie'):
        with cd('%s/MITIE' % env.build):
            run('git checkout v0.4')
            run('make mitielib -j4')
            sudo('cp mitielib/libmitie.* /usr/local/lib')
            sudo('cp -r mitielib/include/* /usr/local/include')
def _download_hadoop():
    if not exists("/usr/local/lib/hadoop-2.6.0"):
        with cd('/usr/local/lib'):
            if not exists("hadoop-2.6.0.tar.gz"):
                sudo("wget http://apache.claz.org/hadoop/common/hadoop-2.6.0/hadoop-2.6.0.tar.gz")
            sudo("tar -xvf hadoop-2.6.0.tar.gz")
            sudo("ln -s hadoop-2.6.0 hadoop")
Example #31
0
def put_task():
    if not exists('/home/python/Desktop/demo', use_sudo=True):
        sudo('mkdir -p /home/python/Desktop/demo')
    with cd('/home/python/Desktop/demo'):
        put('/home/python/Desktop/te.tar.gz','/home/python/Desktop/demo/de.tar.gz',use_sudo=True)
def create():
    """
    Creates the environment needed to host the project.
    The environment consists of: system locales, virtualenv, database, project
    files, SSL certificate, and project-specific Python requirements.
    """
    # Generate project locale
    locale = env.locale.replace("UTF-8", "utf8")
    with hide("stdout"):
        if locale not in run("locale -a"):
            sudo("locale-gen %s" % env.locale)
            sudo("update-locale %s" % env.locale)
            sudo("service postgresql restart")
            run("exit")

    # Create project path
    run("mkdir -p %s" % env.proj_path)

    # Set up virtual env
    run("mkdir -p %s" % env.venv_home)
    with cd(env.venv_home):
        if exists(env.proj_name):
            if confirm("Virtualenv already exists in host server: %s"
                       "\nWould you like to replace it?" % env.proj_name):
                run("rm -rf %s" % env.proj_name)
            else:
                abort()
        run("virtualenv %s" % env.proj_name)

    # Upload project files
    if env.deploy_tool in env.vcs_tools:
        vcs_upload()
    else:
        rsync_upload()

    # Create DB and DB user
    pw = db_pass()
    user_sql_args = (env.proj_name, pw.replace("'", "\'"))
    user_sql = "CREATE USER %s WITH ENCRYPTED PASSWORD '%s';" % user_sql_args
    psql(user_sql, show=False)
    shadowed = "*" * len(pw)
    print_command(user_sql.replace("'%s'" % pw, "'%s'" % shadowed))
    psql("CREATE DATABASE %s WITH OWNER %s ENCODING = 'UTF8' "
         "LC_CTYPE = '%s' LC_COLLATE = '%s' TEMPLATE template0;" %
         (env.proj_name, env.proj_name, env.locale, env.locale))

    # Set up SSL certificate
    if not env.ssl_disabled:
        conf_path = "/etc/nginx/conf"
        if not exists(conf_path):
            sudo("mkdir %s" % conf_path)
        with cd(conf_path):
            crt_file = env.proj_name + ".crt"
            key_file = env.proj_name + ".key"
            if not exists(crt_file) and not exists(key_file):
                try:
                    crt_local, = glob(join("deploy", "*.crt"))
                    key_local, = glob(join("deploy", "*.key"))
                except ValueError:
                    parts = (crt_file, key_file, env.domains[0])
                    sudo("openssl req -new -x509 -nodes -out %s -keyout %s "
                         "-subj '/CN=%s' -days 3650" % parts)
                else:
                    upload_template(crt_local, crt_file, use_sudo=True)
                    upload_template(key_local, key_file, use_sudo=True)

    # Install project-specific requirements
    upload_template_and_reload("settings")
    with project():
        if env.reqs_path:
            pip("-r %s/%s" % (env.proj_path, env.reqs_path))
        pip("gunicorn setproctitle psycopg2 "
            "django-compressor python-memcached")
    # Bootstrap the DB
        manage("createdb --noinput --nodata")
        python("from django.conf import settings;"
               "from django.contrib.sites.models import Site;"
               "Site.objects.filter(id=settings.SITE_ID).update(domain='%s');"
               % env.domains[0])
        for domain in env.domains:
            python("from django.contrib.sites.models import Site;"
                   "Site.objects.get_or_create(domain='%s');" % domain)
        if env.admin_pass:
            pw = env.admin_pass
            user_py = ("from django.contrib.auth import get_user_model;"
                       "User = get_user_model();"
                       "u, _ = User.objects.get_or_create(username='******');"
                       "u.is_staff = u.is_superuser = True;"
                       "u.set_password('%s');"
                       "u.save();" % pw)
            python(user_py, show=False)
            shadowed = "*" * len(pw)
            print_command(user_py.replace("'%s'" % pw, "'%s'" % shadowed))

    return True
Example #33
0
def _get_latest_source():
    if exists('.git'):
        run('git fetch')
    else:
        run(f'git clone {REPO_URL} .')
Example #34
0
def installation():
    core.package_repository("ppa:nginx/stable")
    core.package("nginx")
    if files.exists("/etc/nginx/sites-enabled/default"):
        sudo("rm /etc/nginx/sites-enabled/default")
Example #35
0
def make_directory():
    if not exists(env.path):
        sudo("mkdir -p %s" % env.path)
        sudo("chown -R www-data:www-data %s" % env.path)
        sudo("chmod  g+w %s" % env.path)
Example #36
0
def _update_virtualenv():
    if not exists('virtualenv/bin/pip'):
        run(f'python3 -m venv virtualenv')
    run('./virtualenv/bin/pip install -r requirements.txt')
Example #37
0
def prepare_remote_dirs():
    _set_user_dir()
    if not exists(doc_dir):
        sudo('mkdir -p %s' % doc_dir)
    sudo('chown %s %s' % (user, doc_dir))
Example #38
0
def change_pkg_repos():
    if not exists('/tmp/pkg_china_ensured'):
        put('/root/files/sources.list', '/tmp/sources.list')
        sudo('mv /tmp/sources.list /etc/apt/sources.list')
        sudo('apt-get update')
        sudo('touch /tmp/pkg_china_ensured')
Example #39
0
def shuke_is_running(pidfile):
    if files.exists(pidfile, use_sudo=True):
        res = sudo("kill -0 `cat %s`" % pidfile)
        return res.return_code == 0
    return False
Example #40
0
def _update_virtualenv(source_folder):
    virtualenv_folder = source_folder + '/../virtualenv'
    if not exists(virtualenv_folder + '/bin/pip'):
        run('virtualenv --python=python3 %s' % (virtualenv_folder, ))
    run('%s/bin/pip install -r %s/requirements.txt' %
        (virtualenv_folder, source_folder))
Example #41
0
def deploy():
    require('environment')

    yell(magenta("Create a directory on a remote server, if it doesn't already exists"))
    if not exists(env.code_root):
        sudo('mkdir -p %(code_root)s' % env)

    if not exists(env.logs_root):
        sudo('mkdir -p %(logs_root)s' % env)

    if not exists(env.run_root):
        sudo('mkdir -p %(run_root)s' % env)

    yell(magenta("Create a virtualenv, if it doesn't already exists..."))
    if not exists(env.virtualenv_root):
        with cd(env.root):
            sudo('mkdir env')
            sudo('virtualenv -p python3 env')

    local('git archive --format=tar %(branch)s | gzip > release.tar.gz' % env)
    put('release.tar.gz', env.code_root, use_sudo=True)

    with cd(env.code_root):
        sudo('tar zxf release.tar.gz', pty=True)
        local('rm release.tar.gz')

        yell(magenta("Activate the environment and install requirements..."))
        # run('source %(remote_env_path)s/bin/activate' % env)
        sudo('source %(virtualenv_root)s/bin/activate && pip install --upgrade -r requirements.txt' % env)

        with shell_env(DJANGO_SETTINGS_MODULE='config.settings.production',
                       DATABASE_URL='postgres://%(db_user)s:%(db_pass)s@localhost:5432/%(db_name)s' % env,
                       DJANGO_SECRET_KEY=env.django_secret_key,
                       DJANGO_ADMIN_URL='admin',
                       PYTHONPATH='.'):
            yell(magenta("Collect all the static files..."))
            sudo('%(virtualenv_root)s/bin/python manage.py collectstatic --noinput' % env)

            yell(magenta("Compiling translations..."))
            sudo('%(virtualenv_root)s/bin/python manage.py compilemessages' % env)

            yell(magenta("Give deploy access to logs and run directories..."))
            sudo('chown -R deploy:deploy %(logs_root)s' % env)
            sudo('chown -R deploy:deploy %(run_root)s' % env)

            yell(magenta("Migrate and Update the database..."))
            run('%(virtualenv_root)s/bin/python manage.py migrate --noinput' % env)
            run('%(virtualenv_root)s/bin/python manage.py pycon_start' % env)
            run('%(virtualenv_root)s/bin/python manage.py create_review_permissions' % env)

        yell(magenta("gunicorn entry script..."))
        put(get_and_render_template('gunicorn_run.sh', env),
            os.path.join(env.run_root, 'gunicorn_run.sh'), use_sudo=True)
        sudo('chmod u+x %(run_root)s/gunicorn_run.sh' % env)

        yell(magenta("put supervisor conf..."))
        put(get_and_render_template('pycon2017.conf', env),
            '/etc/supervisor/conf.d/pycon2017_%(environment)s.conf' % env,
            use_sudo=True)

        yell(magenta("restart supervisor..."))
        sudo('supervisorctl reread && supervisorctl update')
        sudo('supervisorctl restart pycon2017_%(environment)s' % env)

    yell(magenta("Draw a ship..."))
    yell(  white("               |    |    |               "))
    yell(  white("              )_)  )_)  )_)              "))
    yell(  white("             )___))___))___)\            "))
    yell(  white("            )____)____)_____)\\          "))
    yell(magenta("          _____|____|____|____\\\__      "))
    yell(magenta(" ---------\                   /--------- "))
    yell(   blue("   ^^^^^ ^^^^^^^^^^^^^^^^^^^^^           "))
    yell(   blue("     ^^^^      ^^^^     ^^^    ^^        "))
    yell(   blue("          ^^^^      ^^^                  "))
def put_config_files(*args):
    """
    Call with the names of the enviroments where you want to put the config files, for example:
    fab -H user@host put_config_files:production,staging,development
    """
    # fix for nginx: Starting nginx: nginx: [emerg] could not build the types_hash, you should increase either types_hash_max_size: 1024 or types_hash_bucket_size: 32
    sed('/etc/nginx/nginx.conf', '# types_hash_max_size.*', 'types_hash_max_size 2048;', use_sudo=True) 
    # fix for nginx: [emerg] could not build the server_names_hash, you should increase server_names_hash_bucket_size: 32
    sed('/etc/nginx/nginx.conf', '# server_names_hash_bucket_size.*', 'server_names_hash_bucket_size 64;', use_sudo=True) 
    put('deploy', '/tmp/')
    projects = build_projects_vars()

    for key in args:
        """
        Copy basic configuration files, this has to be done first for all environments to avoid changing the original contents
        required by sed on the next loop.
        """
        with cd('/tmp/deploy/'):
            print "COPYING CONFIGURATION FILES FOR  %s..." % key
            if key != 'production':
                run('cp run-project %(run-project)s' % projects[key])
                run('cp etc/nginx/sites-available/django-project etc/nginx/sites-available/%(django-project)s' % projects[key])
                run('cp etc/init/django-project.conf etc/init/%(django-project)s.conf' % projects[key])

    for key in args:
        """
        Loop over the original configuration files, make changes with sed and then copy to final locations.
        """
        with cd('/tmp/deploy/'):
            print "SETTING UP CONFIGURATION FILES FOR %s..." % key
            sed(projects[key]['run-project'], '^LOGFILE.*', 'LOGFILE=%(logdir)s/%(log_gunicorn)s' % projects[key]) 
            sed(projects[key]['run-project'], '^LOGLEVEL.*', 'LOGLEVEL=%(gunicorn_loglevel)s' % projects[key]) 
            sed(projects[key]['run-project'], '^NUM_WORKERS.*', 'NUM_WORKERS=%(gunicorn_num_workers)s' % projects[key]) 
            sed(projects[key]['run-project'], '^BIND_ADDRESS.*', 'BIND_ADDRESS=%(gunicorn_bind_ip)s:%(gunicorn_bind_port)s' % projects[key]) 
            sed(projects[key]['run-project'], '^USER.*', 'USER=%(user)s' % projects[key]) 
            sed(projects[key]['run-project'], '^GROUP.*', 'GROUP=%(user)s' % projects[key]) 
            sed(projects[key]['run-project'], '^PROJECTDIR.*', 'PROJECTDIR=%(dir)s' % projects[key]) 
            sed(projects[key]['run-project'], '^PROJECTENV.*', 'PROJECTENV=/home/%(user)s/.virtualenvs/%(name)s' % projects[key]) 

            # TODO figure out how to handle redirection from non-www to www versions passing the port, if needed.
            sed('etc/nginx/sites-available/%(django-project)s' % projects[key], 'listen.*', 'listen %(ip)s:%(port)s;' % projects[key]) 
            sed('etc/nginx/sites-available/%(django-project)s' % projects[key], 'proxy_pass http.*', 'proxy_pass http://%(gunicorn_bind_ip)s:%(gunicorn_bind_port)s/;' % projects[key]) 
            sed('etc/nginx/sites-available/%(django-project)s' % projects[key], 'example\.com', '%(domain)s' % projects[key]) 
            sed('etc/nginx/sites-available/%(django-project)s' % projects[key], 'root.*', 'root %(dir)s;' % projects[key]) 
            sed('etc/nginx/sites-available/%(django-project)s' % projects[key], 'access_log.*', 'access_log %(logdir)s/%(log_nginx_access)s;' % projects[key]) 
            sed('etc/nginx/sites-available/%(django-project)s' % projects[key], 'error_log.*', 'error_log %(logdir)s/%(log_nginx_error)s;' % projects[key]) 

            sed('etc/init/%(django-project)s.conf' % projects[key], '^description.*', 'description "%(descriptive_name)s"' % projects[key]) 
            sed('etc/init/%(django-project)s.conf' % projects[key], '^exec.*', 'exec /home/%(user)s/%(script_name)s' % projects[key]) 

            fix_venv_permission()
            run('cp %(run-project)s /home/%(user)s/%(script_name)s' % projects[key])
            run('chmod u+x /home/%(user)s/%(script_name)s' % projects[key]) 
            sudo('cp etc/nginx/sites-available/%(django-project)s /etc/nginx/sites-available/%(name)s' % projects[key])
            sudo('cp etc/init/%(django-project)s.conf /etc/init/%(name)s.conf' % projects[key])

            if not exists('/etc/nginx/sites-enabled/%(name)s' % projects[key]):
            	sudo('ln -s /etc/nginx/sites-available/%(name)s /etc/nginx/sites-enabled/%(name)s' % projects[key])
            
            if not exists('/etc/init.d/%(name)s' % projects[key]):
            	sudo('ln -s /lib/init/upstart-job /etc/init.d/%(name)s' % projects[key])

    with settings(hide('warnings'), warn_only=True):
        fix_venv_permission()
        sudo('rm /etc/nginx/sites-enabled/default')
        run('rm -rf /tmp/deploy')
Example #43
0
def ensure_src_dir():
    if not exists(env.code_dir):
        run("mkdir -p %s" % env.code_dir)
    with cd(env.code_dir):
        if not exists(posixpath.join(env.code_dir, '.git')):
            run('git clone %s .' % (env.code_repo))
Example #44
0
def integ_test(gateway_host=None,
               test_host=None,
               trf_host=None,
               gateway_vm="cwag",
               gateway_ansible_file="cwag_dev.yml",
               transfer_images=False,
               destroy_vm=False,
               no_build=False,
               tests_to_run="all",
               skip_unit_tests=False,
               test_re=None,
               test_result_xml=None,
               run_tests=True,
               count="1"):
    """
    Run the integration tests. This defaults to running on local vagrant
    machines, but can also be pointed to an arbitrary host (e.g. amazon) by
    passing "address:port" as arguments

    gateway_host: The ssh address string of the machine to run the gateway
        services on. Formatted as "host:port". If not specified, defaults to
        the `cwag` vagrant box.

    test_host: The ssh address string of the machine to run the tests on
        on. Formatted as "host:port". If not specified, defaults to the
        `cwag_test` vagrant box.

    trf_host: The ssh address string of the machine to run the tests on
        on. Formatted as "host:port". If not specified, defaults to the
        `magma_trfserver` vagrant box.

    no_build: When set to true, this script will NOT rebuild all docker images.
    """
    try:
        tests_to_run = SubTests(tests_to_run)
    except ValueError:
        print("{} is not a valid value. We support {}".format(
            tests_to_run, SubTests.list()))
        return

    # Setup the gateway: use the provided gateway if given, else default to the
    # vagrant machine
    _switch_to_vm(gateway_host, gateway_vm, gateway_ansible_file, destroy_vm)

    # We will direct coredumps to be placed in this directory
    # Clean up before every run
    if files.exists("/var/opt/magma/cores/"):
        run("sudo rm /var/opt/magma/cores/*", warn_only=True)
    else:
        run("sudo mkdir -p /var/opt/magma/cores", warn_only=True)

    if not skip_unit_tests:
        execute(_run_unit_tests)

    execute(_set_cwag_configs, "gateway.mconfig")
    execute(_add_networkhost_docker)
    cwag_host_to_mac = execute(_get_br_mac, CWAG_BR_NAME)
    host = env.hosts[0]
    cwag_br_mac = cwag_host_to_mac[host]

    # Transfer built images from local machine to CWAG host
    if gateway_host or transfer_images:
        execute(_transfer_docker_images)
    else:
        execute(_stop_gateway)
        if not no_build:
            execute(_build_gateway)

    execute(_run_gateway)

    # Setup the trfserver: use the provided trfserver if given, else default to
    # the vagrant machine
    with lcd(LTE_AGW_ROOT):
        _switch_to_vm(gateway_host, "magma_trfserver", "magma_trfserver.yml",
                      destroy_vm)

    execute(_start_trfserver)

    # Run the tests: use the provided test machine if given, else default to
    # the vagrant machine
    _switch_to_vm(gateway_host, "cwag_test", "cwag_test.yml", destroy_vm)

    cwag_test_host_to_mac = execute(_get_br_mac, CWAG_TEST_BR_NAME)
    host = env.hosts[0]
    cwag_test_br_mac = cwag_test_host_to_mac[host]
    execute(_set_cwag_test_configs)
    execute(_start_ipfix_controller)

    # Get back to the gateway vm to setup static arp
    _switch_to_vm_no_destroy(gateway_host, gateway_vm, gateway_ansible_file)
    execute(_set_cwag_networking, cwag_test_br_mac)

    # check if docker services are alive except for OCS2 and PCRF2
    ignore_list = ["ocs2", "pcrf2"]
    execute(_check_docker_services, ignore_list)

    _switch_to_vm_no_destroy(gateway_host, "cwag_test", "cwag_test.yml")
    execute(_start_ue_simulator)
    execute(_set_cwag_test_networking, cwag_br_mac)

    if run_tests == "False":
        execute(_add_docker_host_remote_network_envvar)
        print("run_test was set to false. Test will not be run\n"
              "You can now run the tests manually from cwag_test")
        sys.exit(0)

    # HSSLESS tests are to be executed from gateway_host VM
    if tests_to_run.value == SubTests.HSSLESS.value:
        _switch_to_vm_no_destroy(gateway_host, gateway_vm,
                                 gateway_ansible_file)
        execute(_run_integ_tests, gateway_host, trf_host, tests_to_run,
                test_re, count, test_result_xml)
    else:
        execute(_run_integ_tests, test_host, trf_host, tests_to_run, test_re,
                count, test_result_xml)

    # If we got here means everything work well!!
    if not test_host and not trf_host:
        # Clean up only for now when running locally
        execute(_clean_up)
    print('Integration Test Passed for "{}"!'.format(tests_to_run.value))
    sys.exit(0)
Example #45
0
def _check_secret_key(source_folder, python):
    settings_folder = source_folder + '/' + APP_NAME
    if not exists(settings_folder + '/passwords.py'):
        run('%s %s/generate_passwords.py %s/passwords.py' % (python, settings_folder, settings_folder))
Example #46
0
def _update_virtualenv(source_folder):
    virtualenv_folder = source_folder + '/../virtualenv'
    if not exists(virtualenv_folder + '/bin/pip'):
        run(f'python3 -m venv {virtualenv_folder}')
    run(f'{virtualenv_folder}/bin/pip install -r {source_folder}/requirements.txt'
        )
Example #47
0
def is_running():
    if files.exists('/var/run/nginx.pid', verbose=True):
        return True
    return False
Example #48
0
def setup_server4(hostname=None,
                  domain=None,
                  pc="1",
                  forge_modules=[
                      "puppetlabs/stdlib", "puppetlabs/concat",
                      "puppetlabs/firewall", "puppetlabs/apt"
                  ]):
    """Setup Puppet 4 server"""
    import package, util, git, service

    # Local files to copy over
    basedir = "/etc/puppetlabs"
    local_master_conf = "files/puppet-master.conf"
    remote_master_conf = basedir + "/puppet/puppet.conf"
    local_hiera_yaml = "files/hiera.yaml"
    remote_hiera_yaml = basedir + "/code/hiera.yaml"
    local_fileserver_conf = "files/fileserver.conf"
    remote_fileserver_conf = basedir + "/puppet/fileserver.conf"
    local_environments = "files/environments"
    remote_codedir = basedir + "/code"
    local_gitignore = "files/gitignore"
    remote_gitignore = basedir + "/.gitignore"
    modules_dir = basedir + "/code/environments/production/modules"

    # Verify that all the local files are in place
    try:
        open(local_master_conf)
        open(local_hiera_yaml)
    except IOError:
        print "ERROR: some local config files were missing!"
        sys.exit(1)

    # Autodetect hostname and domain from env.host, if they're not overridden
    # with method parameters
    if not hostname:
        hostname = util.get_hostname()
    if not domain:
        domain = util.get_domain()

    # Ensure that clock is correct before doing anything else, like creating SSL
    # certificates.
    util.set_clock()

    # Start the install
    install_puppetlabs_release_package(pc)
    package.install("puppetserver")
    util.put_and_chown(local_master_conf, remote_master_conf)
    util.put_and_chown(local_hiera_yaml, remote_hiera_yaml)
    util.put_and_chown(local_fileserver_conf, remote_fileserver_conf)
    util.put_and_chown(local_gitignore, remote_gitignore)
    util.add_to_path("/opt/puppetlabs/bin")
    util.set_hostname(hostname + "." + domain)
    # "facter fqdn" return a silly name on EC2 without this
    util.add_host_entry("127.0.1.1", hostname, domain)

    # Copy over template environments
    util.put_and_chown(local_environments, remote_codedir)

    # Add modules from Puppet Forge. These should in my experience be limited to
    # those which provide new types and providers. In particular puppetlabs'
    # modules which control some daemon (puppetdb, postgresql, mysql) are
    # extremely complex, very prone to breakage and nasty to debug.
    for module in forge_modules:
        add_forge_module(module)

    # Git setup
    git.install()
    git.init(basedir)
    if not exists(modules_dir):
        sudo("mkdir " + modules_dir)
    git.init(modules_dir)
    git.add_submodules(basedir=modules_dir)
    git.add_all(basedir)
    git.commit(basedir, "Initial commit")

    # Link hieradata and manifests from production to testing. This keeps the
    # testing environment identical to the production environment. The modules
    # directory in testing is separate and may (or may not) contain modules that
    # override or complement those in production.
    util.symlink(remote_codedir + "/environments/production/hieradata",
                 remote_codedir + "/environments/testing/hieradata")
    util.symlink(remote_codedir + "/environments/production/manifests",
                 remote_codedir + "/environments/testing/manifests")

    # Start puppetserver to generate the CA and server certificates/keys
    service.start("puppetserver")
    run_agent(noop="False")
Example #49
0
def integ_test(
    gateway_host=None,
    test_host=None,
    trf_host=None,
    gateway_vm="cwag",
    gateway_ansible_file="cwag_dev.yml",
    transfer_images=False,
    skip_docker_load=False,
    tar_path="/tmp/cwf-images",
    destroy_vm=False,
    no_build=False,
    tests_to_run="all",
    skip_unit_tests=False,
    test_re=None,
    test_result_xml=None,
    run_tests=True,
    count="1",
    provision_vm=True,
    rerun_fails="1",
):
    """
    Run the integration tests. This defaults to running on local vagrant
    machines, but can also be pointed to an arbitrary host (e.g. amazon) by
    passing "address:port" as arguments

    gateway_host: The ssh address string of the machine to run the gateway
        services on. Formatted as "host:port". If not specified, defaults to
        the `cwag` vagrant box

    test_host: The ssh address string of the machine to run the tests on
        on. Formatted as "host:port". If not specified, defaults to the
        `cwag_test` vagrant box

    trf_host: The ssh address string of the machine to run the tests on
        on. Formatted as "host:port". If not specified, defaults to the
        `magma_trfserver` vagrant box

    destroy_vm: When set to true, all VMs will be destroyed before running the tests

    provision_vm: When set to true, all VMs will be provisioned before running the tests

    no_build: When set to true, this script will not rebuild all docker images
        in the CWAG VM

    transfer_images: When set to true, the script will transfer all cwf_* docker
        images from the host machine to the CWAG VM to use in the test

    skip_docker_load: When set to true, /tmp/cwf_* will copied into the CWAG VM
        instead of loading the docker images then copying. This option only is
        valid if transfer_images is set.

    tar_path: The location where the tarred docker images will be copied from.
        Only valid if transfer_images is set.

    skip_unit_tests: When set to true, only integration tests will be run

    run_tests: When set to to false, no tests will be run

    test_re: When set to a value, integrations tests that match the expression will be run.
        (Ex: test_re=TestAuth will run all tests that start with TestAuth)

    count: When set to a number, the integrations tests will be run that many times

    test_result_xml: When set to a path, a JUnit style test summary in XML will be produced at the path

    rerun_fails: Number of times to re-run a test on failure
    """
    try:
        tests_to_run = SubTests(tests_to_run)
    except ValueError:
        print(
            "{} is not a valid value. We support {}".format(
                tests_to_run,
                SubTests.list(),
            ), )
        return

    destroy_vm = _get_boolean_from_param(destroy_vm)
    provision_vm = _get_boolean_from_param(provision_vm)
    skip_docker_load = _get_boolean_from_param(skip_docker_load)
    skip_unit_tests = _get_boolean_from_param(skip_unit_tests)
    transfer_images = _get_boolean_from_param(transfer_images)
    no_build = _get_boolean_from_param(no_build)

    # Setup the gateway: use the provided gateway if given, else default to the
    # vagrant machine
    _switch_to_vm(
        gateway_host,
        gateway_vm,
        gateway_ansible_file,
        destroy_vm,
        provision_vm,
    )

    # We will direct coredumps to be placed in this directory
    # Clean up before every run
    if files.exists("/var/opt/magma/cores/"):
        run("sudo rm /var/opt/magma/cores/*", warn_only=True)
    else:
        run("sudo mkdir -p /var/opt/magma/cores", warn_only=True)

    if not skip_unit_tests:
        execute(_run_unit_tests)

    execute(_set_cwag_configs, "gateway.mconfig")
    execute(_add_networkhost_docker)
    cwag_host_to_mac = execute(_get_br_mac, CWAG_BR_NAME)
    host = env.hosts[0]
    cwag_br_mac = cwag_host_to_mac[host]

    # Transfer built images from local machine to CWAG host
    if gateway_host or transfer_images:
        execute(_transfer_docker_images, skip_docker_load, tar_path)
    else:
        execute(_stop_gateway)
        if not no_build:
            execute(_build_gateway)

    execute(_run_gateway)

    # Setup the trfserver: use the provided trfserver if given, else default to
    # the vagrant machine
    with lcd(LTE_AGW_ROOT):
        _switch_to_vm(
            gateway_host,
            "magma_trfserver",
            "magma_trfserver.yml",
            destroy_vm,
            provision_vm,
        )

    execute(_start_trfserver)

    # Run the tests: use the provided test machine if given, else default to
    # the vagrant machine
    _switch_to_vm(
        gateway_host,
        "cwag_test",
        "cwag_test.yml",
        destroy_vm,
        provision_vm,
    )

    cwag_test_host_to_mac = execute(_get_br_mac, CWAG_TEST_BR_NAME)
    host = env.hosts[0]
    cwag_test_br_mac = cwag_test_host_to_mac[host]
    execute(_set_cwag_test_configs)
    execute(_start_ipfix_controller)

    # Get back to the gateway vm to setup static arp
    _switch_to_vm_no_destroy(gateway_host, gateway_vm, gateway_ansible_file)
    execute(_set_cwag_networking, cwag_test_br_mac)

    # check if docker services are alive except for OCS2 and PCRF2
    ignore_list = ["ocs2", "pcrf2"]
    execute(_check_docker_services, ignore_list)

    _switch_to_vm_no_destroy(gateway_host, "cwag_test", "cwag_test.yml")
    execute(_start_ue_simulator)
    execute(_set_cwag_test_networking, cwag_br_mac)

    if run_tests == "False":
        execute(_add_docker_host_remote_network_envvar)
        print(
            "run_test was set to false. Test will not be run\n"
            "You can now run the tests manually from cwag_test", )
        sys.exit(0)

    # HSSLESS tests are to be executed from gateway_host VM
    if tests_to_run.value == SubTests.HSSLESS.value:
        _switch_to_vm_no_destroy(
            gateway_host,
            gateway_vm,
            gateway_ansible_file,
        )
        execute(
            _run_integ_tests,
            gateway_host,
            trf_host,
            tests_to_run,
            test_re,
            count,
            test_result_xml,
            rerun_fails,
        )
    else:
        execute(
            _run_integ_tests,
            test_host,
            trf_host,
            tests_to_run,
            test_re,
            count,
            test_result_xml,
            rerun_fails,
        )

    # If we got here means everything work well!!
    if not test_host and not trf_host:
        # Clean up only for now when running locally
        execute(_clean_up)
    print('Integration Test Passed for "{}"!'.format(tests_to_run.value))
    sys.exit(0)
Example #50
0
def _init_virtualenv(site_folder):
    if not exists(site_folder + '/virtualenv'):
        run('cd %s && virtualenv --python=python3 virtualenv' % site_folder)
    if not exists(site_folder + '/db'):
        run('cd %s && mkdir db' % site_folder)
Example #51
0
def verdid_stop():
    if exists('verdi/run/supervisor.sock'):
        with prefix('source verdi/bin/activate'):
            run('supervisorctl shutdown')
Example #52
0
def is_installed():
    if files.exists('/usr/sbin/nginx', verbose=True):
        return True
    return False
Example #53
0
def metricsd_stop():
    if exists('metrics/run/supervisor.sock'):
        with prefix('source metrics/bin/activate'):
            run('supervisorctl shutdown')
Example #54
0
def create_virtualenv():
    with cd(env.PROJECT_PATH):
        if not exists("virtualenv"):
            run("mkdir -p virtualenv")
            run("virtualenv ./virtualenv")
Example #55
0
def mozartd_stop():
    if exists('mozart/run/supervisor.sock'):
        with prefix('source mozart/bin/activate'):
            run('supervisorctl shutdown')
Example #56
0
def verdid_start(force=False):
    if not exists('verdi/run/supervisord.pid') or force:
        with prefix('source verdi/bin/activate'):
            run('supervisord')
Example #57
0
def grqd_start(force=False):
    mkdir('sciflo/run', context['OPS_USER'], context['OPS_USER'])
    if not exists('sciflo/run/supervisord.pid') or force:
        with prefix('source sciflo/bin/activate'):
            run('supervisord')
Example #58
0
def metricsd_start(force=False):
    if not exists('metrics/run/supervisord.pid') or force:
        with prefix('source metrics/bin/activate'):
            run('supervisord')
Example #59
0
def svn_co(path, svn_url):
    if not exists(path):
        with cd(os.path.dirname(path)):
            run('svn co --non-interactive --trust-server-cert %s' % svn_url)
Example #60
0
def grqd_stop():
    if exists('sciflo/run/supervisor.sock'):
        with prefix('source sciflo/bin/activate'):
            run('supervisorctl shutdown')