Esempio n. 1
0
def bootstrap():
    """
    Bootstrap Ubuntu for use with the configuration manager of choice.

    Only the bare essentials, the configuration manager will take care of the rest.
    """
    run("/usr/sbin/locale-gen en_US.UTF-8 && /usr/sbin/update-locale LANG=en_US.UTF-8")
    with fabric_settings(warn_only=True):
        run("aptitude update && aptitude -y dist-upgrade")
    append("/etc/hosts", "{0} saltmaster-private".format(env.master_server.private_ip))
    with fabric_settings(warn_only=True):
        reboot()
    run("aptitude install -y build-essential rsync sudo")
    append("/etc/sudoers",
           "## allow members of group wheel to execute any command\n%wheel ALL=(ALL) ALL")
Esempio n. 2
0
def bootstrap():
    """
    Bootstrap Ubuntu for use with the configuration manager of choice.

    Only the bare essentials, the configuration manager will take care of the rest.
    """
    run("/usr/sbin/locale-gen en_US.UTF-8 && /usr/sbin/update-locale LANG=en_US.UTF-8")
    with fabric_settings(warn_only=True):
        run("aptitude update && aptitude -y dist-upgrade")
    append("/etc/hosts", "{0} saltmaster-private".format(env.master_server.private_ip))
    with fabric_settings(warn_only=True):
        reboot()
    run("aptitude install -y build-essential rsync")
    # allow users in the wheel group to sudo without a password
    uncomment("/etc/sudoers", "wheel.*NOPASSWD")
Esempio n. 3
0
def reboot_server(name):
    """
    Simply reboot a server by name.
    The trick here is to change the env vars to that of the server
    to be rebooted. Perform the reboot and change env vars back
    to their original value.

    Usage:
        fab reboot_server:name
    """
    __shared_setup()
    try:
        server = [s for s in env.bootmachine_servers if s.name == name][0]
    except IndexError:
        abort("The server '{0}' was not found.".format(name))
    original_user = env.user
    original_host_string = env.host_string
    try:
        env.port = 22
        telnetlib.Telnet(server.public_ip, env.port)
        env.user = "******"
    except IOError:
        env.port = int(settings.SSH_PORT)
        env.user = getpass.getuser()
        telnetlib.Telnet(server.public_ip, env.port)
    env.host_string = "{0}:{1}".format(server.public_ip, env.port)

    env.keepalive = 30  # keep the ssh key active, see fabric issue #402
    with fabric_settings(warn_only=True):
        reboot()

    env.user = original_user
    env.host_string = original_host_string
Esempio n. 4
0
 def __build(self, worker, user_args):
     fabric_env['pretty_host_string'] = worker.pretty_name
     with fabric_rcd(worker.build_location):
         #don't make a failed build a reason to abort
         with fabric_settings(warn_only=True):
             command = worker.generateBuildCommand(user_args)
             fabric_run(command)
Esempio n. 5
0
  def __install_hooks(self):
    #get the location of the template file based on our location
    this_file_loc = os.path.abspath(inspect.getfile(inspect.currentframe()))
    current_dir = os.path.dirname( this_file_loc )

    template_file = os.path.join(current_dir,'templates/post-receive')
    if self.lfs:
      template_file = os.path.join(current_dir,'templates/post-receive-with-lfs')


    #setup the destination of the hooks
    dest = os.path.join(self.git_location,'hooks/post-receive')

    #make sure the hooks directory exists
    with fabric_settings(warn_only=True):
      self.run("mkdir -p " +  os.path.join(self.git_location,'hooks'))

    #setup the template dictionary to create valid hooks
    context_dict = { }
    context_dict['src_location']=self.src_location

    #upload and create the hooks
    fabric_template(template_file, destination=dest, context=context_dict)

    #make the file executable
    ch_command = "chmod +x " + dest
    self.run(ch_command)
Esempio n. 6
0
def update_master_iptables():
    """
    Update iptables rules for salt, on the salt master,
    to accept newley booted minions.

    Usage:
        fab master configurator.update_master_iptables
    """
    if env.host != env.master_server.public_ip:
        abort("tried to update_master_iptables on a non-master server")

    configurator_ports = ["4505", "4506"]  # get from settings.py?

    # Get the line in the iptables chain for inserting the new minon's
    with fabric_settings(warn_only=True):
        insert_line = sudo("iptables -L --line-numbers | grep {0}".format(configurator_ports[0]))

    if not insert_line:
        print(yellow("NOTE: iptables are wide open during first boot of a master"))
        return

    for port in configurator_ports:
        match = sudo("iptables -nvL | grep {0}".format(port))
        for server in env.bootmachine_servers:
            if server.private_ip not in match:
                sudo(
                    "iptables -I INPUT {0} -s {1} -m state --state new -m tcp -p tcp \
                --dport {2} -j ACCEPT".format(
                        insert_line[0], server.private_ip, port
                    )
                )
Esempio n. 7
0
    def test(self, worker_names, user_test_args):
        user_args = " ".join(user_test_args)

        workers = self.push(worker_names)
        if workers == None:
            return False

        host_list = [name for name in workers]
        is_parallel = len(host_list) > 1

        if (is_parallel):
            #fabric by default treats hosts as unique, and if you have multiple jobs
            #that use the same hostname they are all passed to that fabric worker.
            #what we do is inject our own fabric_execut that pulls in more env
            #settings to create a 2 way mapping from worker names to fabric connections
            with fabric_settings(parallel=True):
                cf_execute.execute(self.__test,
                                   hosts=host_list,
                                   workers=workers,
                                   user_args=user_args)
        else:
            w = workers[host_list[0]]
            cf_execute.execute(self.__test,
                               hosts=w.connection_name,
                               worker=w,
                               user_args=user_args)
        return True
Esempio n. 8
0
def known_host(address, user=None, local_mode=False, clean=True):
    """
    set ssh fingerprint
    :param address:domain or ip
    :param user:remote user name
    :param local_mode: set known_host for localhost,default is False
    :param clean: clean exist record,default is True
    """

    user_path = '~' if local_mode else get_home(user)
    command0 = "grep '%s ' %s/.ssh/known_hosts" % (address, user_path)
    command1 = 'ssh-keyscan %s >> %s/.ssh/known_hosts' % (address, user_path)
    command2 = 'sed -i -e "s/%s//g" %s/.ssh/known_hosts' % (address, user_path)

    commander = fabric_local if local_mode else run

    if user:
        command0 = 'su - %s -c "%s"' % (user, command0)
        command1 = 'su - %s -c "%s"' % (user, command1)
        command2 = 'su - %s -c "%s"' % (user, command2)
        pass

    with fabric_settings(warn_only=True):
        if commander(command0).failed:
            commander(command1)
        else:
            if clean:
                # clean old record
                commander(command2)
                commander(command1)
                pass
            pass
        pass
    pass
Esempio n. 9
0
 def __build(self, worker, user_args):
   fabric_env['pretty_host_string'] = worker.pretty_name
   with fabric_rcd(worker.build_location):
     #don't make a failed build a reason to abort
     with fabric_settings(warn_only=True):
       command = worker.generateBuildCommand(user_args)
       fabric_run(command)
Esempio n. 10
0
    def __install_hooks(self):
        #get the location of the template file based on our location
        this_file_loc = os.path.abspath(inspect.getfile(
            inspect.currentframe()))
        current_dir = os.path.dirname(this_file_loc)

        template_file = os.path.join(current_dir, 'templates/post-receive')
        if self.lfs:
            template_file = os.path.join(current_dir,
                                         'templates/post-receive-with-lfs')

        #setup the destination of the hooks
        dest = os.path.join(self.git_location, 'hooks/post-receive')

        #make sure the hooks directory exists
        with fabric_settings(warn_only=True):
            self.run("mkdir -p " + os.path.join(self.git_location, 'hooks'))

        #setup the template dictionary to create valid hooks
        context_dict = {}
        context_dict['src_location'] = self.src_location

        #upload and create the hooks
        fabric_template(template_file, destination=dest, context=context_dict)

        #make the file executable
        ch_command = "chmod +x " + dest
        self.run(ch_command)
Esempio n. 11
0
def stop_salt():
    """
    Stops salt master and minions.
    """
    with fabric_settings(warn_only=True):
        if env.host == env.master_server.public_ip:
            sudo("systemctl stop salt-master")
        sudo("systemctl stop salt-minion")
Esempio n. 12
0
def upgrade_ubuntu():
    """
    When a provider doesn't offer the latest version, and you want to automate the upgrade.
    """
    with fabric_settings(warn_only=True):
        # dist-upgrade without a grub config prompt
        # http://askubuntu.com/questions/146921/how-do-i-apt-get-y-dist-upgrade-without-a-grub-config-prompt  # nopep8
        run('DEBIAN_FRONTEND=noninteractive apt-get -y \
        -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" dist-upgrade')
Esempio n. 13
0
def start_salt():
    """
    Starts salt master and minions.
    """
    with fabric_settings(warn_only=True):
        if env.host == env.master_server.public_ip:
            sudo("systemctl start salt-master")
        time.sleep(3)
        sudo("systemctl start salt-minion")
Esempio n. 14
0
 def root(self):
     #we don't know if the path is a valid git repo so we don't want
     #fabric to fail if we aren't, so tell it to only warn.
     with fabric_settings(warn_only=True):
         with hide('warnings', 'status', 'running', 'stdout', 'stderr'):
             result = self.git_call("rev-parse", "--show-toplevel")
     #string path will have newline sep, so strip them
     if result.failed:
         return None
     return result.rstrip('\n\r')
Esempio n. 15
0
def get_home(user):
    with fabric_settings(warn_only=True):
        if run('cat /etc/passwd | grep "^%s:"' % user).failed:
            raise ValueError("User:%s not exists" % user)
        else:
            user_path = run("cat /etc/passwd |"
                            " grep '^%s:' |"
                            " awk -F ':' '{print $6}'" % user)
            return user_path
    pass
Esempio n. 16
0
 def root(self):
   #we don't know if the path is a valid git repo so we don't want
   #fabric to fail if we aren't, so tell it to only warn.
   with fabric_settings(warn_only=True):
     with hide('warnings', 'status', 'running', 'stdout', 'stderr'):
       result = self.git_call("rev-parse","--show-toplevel")
   #string path will have newline sep, so strip them
   if result.failed:
     return None
   return result.rstrip('\n\r')
Esempio n. 17
0
  def __create(self):
    #first make the directory
    with fabric_settings(warn_only=True):
      self.run("mkdir -p " +  self.src_location)

    #move into the directory and make it a git repo, and setup
    #the hooks we need
    with self.cd( self.src_location ):
      self.run("git init " + self.src_location)
    with self.cd( self.git_location ):
      self.run("git config --bool receive.denyCurrentBranch false")
      self.run("git config --path core.worktree " + self.src_location)
Esempio n. 18
0
  def __setup(self, worker):
    fabric_env['pretty_host_string'] = worker.pretty_name

    #make directory first
    with fabric_settings(warn_only=True):
      command = "mkdir -p " +  worker.build_location
      fabric_run(command)

    #run ccmake / cmake depending on user input
    run_configure = fabric_prompt('Would you like to run ccmake: ', default='y', validate=r'^(y|n)$')
    command = worker.generateSetupCommand(is_interactive=(run_configure=='y'))
    with fabric_rcd(worker.build_location):
      fabric_run(command)
Esempio n. 19
0
def exist_group(group):
    """
    check group exist or not
    :param group: group name
    :return:
    """

    with fabric_settings(warn_only=True):
        if run('cat /etc/group | grep ^%s:' % group).failed:
            return False
        else:
            return True

    pass
Esempio n. 20
0
def install_salt(installer="ppa"):
    """
    Install salt with the chosen installer.
    """
    if installer == "ppa":
        run("aptitude install -y python-software-properties")
        run("add-apt-repository --yes ppa:saltstack/salt")
        with fabric_settings(warn_only=True):
            run("aptitude update")
        if env.host == env.master_server.public_ip:
            run("aptitude install -y salt-master")
        run("aptitude install -y salt-minion salt-syndic")
    else:
        raise NotImplementedError()
Esempio n. 21
0
  def __create(self):
    #first make the directory
    with fabric_settings(warn_only=True):
      self.run("mkdir -p " +  self.src_location)

    #move into the directory and make it a git repo, and setup
    #the hooks we need
    with self.cd( self.src_location ):
      self.run("git init " + self.src_location)
    with self.cd( self.git_location ):
      self.run("git config --bool receive.denyCurrentBranch false")
      self.run("git config --path core.worktree " + self.src_location)
      if self.lfs:
        #to get lfs to run properly we need to add both a remote
        #and a lfs url, otherwise it wont work properly when we try
        #to fetch data. See https://github.com/git-lfs/git-lfs/issues/930
        with fabric_settings(warn_only=True):
          self.run("git remote rm lfs")

        self.run("git remote add lfs " + self.lfs['fetch_url'])
        #this needs to have info/lfs/ or else you get very weird 403
        #errors about cookies need to be enabled
        self.run("git config lfs.url " + self.lfs['fetch_url'] + "/info/lfs/")
Esempio n. 22
0
def exist_user(user):
    """
    check user exist or not
    :param user:
    :return:
    """

    with fabric_settings(warn_only=True):
        if run('cat /etc/passwd | grep ^%s:' % user).failed:
            return False
        else:
            return True

    pass
Esempio n. 23
0
    def __create(self):
        #first make the directory
        with fabric_settings(warn_only=True):
            self.run("mkdir -p " + self.src_location)

        #move into the directory and make it a git repo, and setup
        #the hooks we need
        with self.cd(self.src_location):
            self.run("git init " + self.src_location)
        with self.cd(self.git_location):
            self.run("git config --bool receive.denyCurrentBranch false")
            self.run("git config --path core.worktree " + self.src_location)
            if self.lfs:
                #to get lfs to run properly we need to add both a remote
                #and a lfs url, otherwise it wont work properly when we try
                #to fetch data. See https://github.com/git-lfs/git-lfs/issues/930
                with fabric_settings(warn_only=True):
                    self.run("git remote rm lfs")

                self.run("git remote add lfs " + self.lfs['fetch_url'])
                #this needs to have info/lfs/ or else you get very weird 403
                #errors about cookies need to be enabled
                self.run("git config lfs.url " + self.lfs['fetch_url'] +
                         "/info/lfs/")
Esempio n. 24
0
def remote_clone_or_pull(remote_dir, repo_url):
    """clone repository repo_url onto remote machine at remote_dir
    """
    group_writable_permissions(remote_dir=remote_dir)
    with cd(remote_dir):
        with fabric_settings(warn_only=True):
            result = run("hg clone %s"%repo_url)
            if result.failed:
                d = os.path.basename(repo_url)
                with cd(os.path.join(remote_dir, d)):
                    result = run("hg pull")
                    result = run("hg update")
                    if result.failed:
                        abort("wtf happened?!?!")
    group_writable_permissions(remote_dir=remote_dir)
Esempio n. 25
0
    def __setup(self, worker):
        fabric_env['pretty_host_string'] = worker.pretty_name

        #make directory first
        with fabric_settings(warn_only=True):
            command = "mkdir -p " + worker.build_location
            fabric_run(command)

        #run ccmake / cmake depending on user input
        run_configure = fabric_prompt('Would you like to run ccmake: ',
                                      default='y',
                                      validate=r'^(y|n)$')
        command = worker.generateSetupCommand(
            is_interactive=(run_configure == 'y'))
        with fabric_rcd(worker.build_location):
            fabric_run(command)
Esempio n. 26
0
  def test(self, worker_names, user_test_args):
    user_args = " ".join(user_test_args)

    workers = self.push(worker_names)
    if workers == None:
      return False

    host_list = [name for name in workers]
    is_parallel = len(host_list) > 1

    if(is_parallel):
      #fabric by default treats hosts as unique, and if you have multiple jobs
      #that use the same hostname they are all passed to that fabric worker.
      #what we do is inject our own fabric_execut that pulls in more env
      #settings to create a 2 way mapping from worker names to fabric connections
      with fabric_settings(parallel=True):
        cf_execute.execute(self.__test, hosts=host_list, workers=workers, user_args=user_args)
    else:
      w = workers[host_list[0]]
      cf_execute.execute(self.__test, hosts=w.connection_name, worker=w, user_args=user_args)
    return True
Esempio n. 27
0
    def configure(self, filename, value):
        if len(value.split('=')) == 2:
            directive, _ = value.split('=')
        else:
            directive = value

        directive = _escape_for_regex(directive)
        value = _escape_for_regex(value)

        # can't use contrib.files.[contains, sed, append] because they need a
        # host to connect to (they run as `run` or `sudo`
        with fabric_settings(hide('everything'), warn_only=True):
            contains = local("grep -q '^{0}' '{1}'".format(
                directive, filename),
                             capture=True).succeeded

        if contains:
            local('sudo sed -i.bak -r -e "s/{0}/{1}/g" "{2}"'.format(
                '^{0}.*$'.format(directive), value, filename))
        else:
            local('sudo sed -i.bak -e "\$a {0}" "{1}"'.format(value, filename))
Esempio n. 28
0
    def configure(self, filename, value):
        if len(value.split('=')) == 2:
            directive, _ = value.split('=')
        else:
            directive = value

        directive = _escape_for_regex(directive)
        value = _escape_for_regex(value)

        # can't use contrib.files.[contains, sed, append] because they need a
        # host to connect to (they run as `run` or `sudo`
        with fabric_settings(hide('everything'), warn_only=True):
            contains = local(
                "grep -q '^{0}' '{1}'".format(directive, filename),
                capture=True).succeeded

        if contains:
            local('sudo sed -i.bak -r -e "s/{0}/{1}/g" "{2}"'.format(
                '^{0}.*$'.format(directive), value, filename))
        else:
            local('sudo sed -i.bak -e "\$a {0}" "{1}"'.format(value, filename))
Esempio n. 29
0
def accept_minions():
    """
    Accept salt-key's for all minions.
    Usage:
        fab master configurator.accept_minions
    """
    if env.host != env.master_server.public_ip:
        abort("tried to accept minions on a non-master server")

    def __get_accepted_minions():
        """TODO: remove when all distros support salt 0.10.5"""
        try:
            accepted = eval(sudo("salt-key --yes --out raw --list acc"))
        except:
            accepted = eval(sudo("salt-key --raw-out --list acc"))
        if type(accepted) == dict:
            return accepted["minions"]
        else:
            return accepted  # support salt version < 0.10.5

    minions = __get_accepted_minions()
    slept = 0

    while len(minions) != len(settings.SERVERS):
        unaccepted = [s["servername"] for s in settings.SERVERS if s["servername"] not in minions]

        with fabric_settings(warn_only=True):
            for server in unaccepted:
                sudo("salt-key --quiet --accept={0} --yes".format(server))
        minions = __get_accepted_minions()
        if len(minions) != len(settings.SERVERS):
            local("fab master configurator.restartall")
            time.sleep(5)
            slept += 5
            print(yellow("there are still unaccpeted keys, trying again."))
        if slept > 300:
            abort("After 5 minutes of attempts, there still exist unaccpeted keys.")

    print(green("all keys have been accepted."))
Esempio n. 30
0
def update_remote(hg_revision='tip'):
    """update remote repository"""

    change_owner()
    group_writable_permissions()

    # get a list of all of the django projects in this 
    rel_django_project_dirs = []
    hg_root = fabhelp.get_hg_root()
    for root, dirs, files in os.walk(hg_root):
        d = os.path.join(hg_root, root)
        try:
            dirs.remove('.hg') # ignore mercurial repositories
        except ValueError:
            pass # .hg is not in dirs
        if fabhelp.is_django_project(d):
            rel_django_project_dirs.append(os.path.relpath(d, hg_root))
            dirs[:] = [] # no need to decend any further... save time!

    # get the remote directory from the apache file
    fabhelp.progress("updating remote repository")
    remote_repo_dir = get_remote_repo_dir()
    with cd(remote_repo_dir):
        with fabric_settings(warn_only = True):
            run("hg pull")

        # run database2fixture for each django repository in this repository
        for rel_django_project_dir in rel_django_project_dirs:
            with cd(rel_django_project_dir):
                if fabhelp.is_dbvcs_installed():
                    run("./manage.py database2fixture")

        run("hg update -r %s"%hg_revision)

    remote_project_dir = get_remote_project_dir()
    with cd(remote_project_dir):
        run("./manage.py setup_virtualenv") # add new packages when necessary
        group_writable_permissions()
Esempio n. 31
0
def bootstrap():
    """
    Bootstrap Fedora.

    Only the bare essentials, salt takes care of the rest.

    """
    base_packages = [
        "curl",
        "git",
        "rsync",
    ]
    run("/usr/bin/localedef -i en_US -f UTF-8 en_US.UTF-8")
    run("export LC_ALL=en_US.UTF-8 && export LANG=en_US.UTF-8")
    append("/etc/sysconfig/i18n", 'LC_ALL="en_US.UTF-8"')
    run("yum update --assumeyes")
    run("yum groupinstall --assumeyes 'Development Tools'")
    run("yum install --assumeyes {pkgs}".format(pkgs=" ".join(base_packages)))
    append("/etc/hosts", "{0} saltmaster-private".format(env.master_server.private_ip))
    # allow users in the wheel group to sudo without a password
    uncomment("/etc/sudoers", "wheel.*NOPASSWD")
    with fabric_settings(warn_only=True):
        reboot()
Esempio n. 32
0
def add_access(key):
    from fabric.context_managers import settings as fabric_settings
    with fabric_settings(user=settings.satellite.sudo_user):
        run('echo "%s" | sudo -u %s tee /home/%s/.ssh/authorized_keys' % (key, settings.fabric.user, settings.fabric.user))
    #TODO run as user
    usersudo('chmod og-rw /home/%s/.ssh/authorized_keys' % settings.fabric.user, settings.fabric.user)
Esempio n. 33
0
def setup_staging(production_root_url, virtualenv_name="env", 
                  local_master_django_project_dir='', use_master_auth=True):
    """setup the staging environment based on the production root url
    for example, acme.datascopeanalytics.com/hello-world will be hosted
    on the staging server at staging.datascopeanalytics.com/acme/hello-world

    if this project is setup at a suburl, then a
    local_master_django_project_dir MUST be specified
    """

    # create a modified urlparse object
    production_root_url = ProductionRootUrl(production_root_url)

    # setup apache
    render_apache_configuration(
        production_root_url,
        local_master_django_project_dir=local_master_django_project_dir,
    )

    # since we want login permissions to persist across subdomain
    # sites, we need to add some lines to settings to use database
    # routers to access the user and session information in the
    # local_master_django_project
    if local_master_django_project_dir and use_master_auth:
        fabhelp.progress("updating django settings to share user sessions")
        for server in ("localhost", "poisson", "noether"):
            project_dir = local('pwd', capture=True)
            settings_filename = os.path.join(project_dir, 'conf', server,
                                             'settings.py')
            quoted = lambda s: '"' + s + '"'
            rel_path = os.path.relpath(
                os.path.abspath(local_master_django_project_dir), 
                project_dir)
            rel_path_str = ', '.join(map(quoted, rel_path.split(os.sep)))
            f = open(settings_filename, 'a')
            f.write(r"""
from common.conf.settings import _shared_user_sessions
(DATABASES, DATABASE_ROUTERS, MIDDLEWARE_CLASSES) = \
    _shared_user_sessions(
        os.path.join(PROJECT_ROOT, %s), 
        DATABASES, DATABASE_ROUTERS, MIDDLEWARE_CLASSES
)
"""%rel_path_str)
            f.close()
        local("hg ci -m 'updated django settings to share user sessions'")

    # push all changes into parent repository so that they can be
    # cloned on remote staging server
    fabhelp.progress("push any local changes to repository")
    with fabric_settings(warn_only=True):
        local("hg push")
    
    # clone parent repository
    fabhelp.progress("clone the project repository")
    result = local("hg showconfig | grep paths.default", capture=True)
    repo_url = result.split('=', 1)[1].strip()
    remote_dir = "/srv/www/%s" % production_root_url.domain
    sudo("mkdir -p %s"%remote_dir)
    remote_clone_or_pull(remote_dir, repo_url)

    # do we need to do something with a database here? yes, we do. syncdb.
    django_project_dir = get_remote_project_dir()
    remote_initdb_from_fixture(django_project_dir, 
                               virtualenv_name=virtualenv_name)
    
    # XXXX TODO: this is probably not enough to simply place the
    # suburls directly before the ####XXXX line. we probably need to
    # order things in a smart way depending on what the suburls are:
    # the most restrictive first like /foo/bar before /foo.

    # setup remote apache stuff. this only has to be done if there is
    # not already a local master django project specified. suburl
    # projects are already installed by the apache configuration
    # written in render_apache_configuration function above
    if not local_master_django_project_dir:
        fabhelp.progress("setup remote apache on staging")
        django_project_dir = get_remote_project_dir()
        root_apache_filename = \
            "/etc/apache2/sites-available/staging.datascopeanalytics.com"
        sudo(r"sed 's/####XXXX/Include %s\n  ####XXXX/' %s > kk"%(
            os.path.join(django_project_dir, "conf", "poisson", "apache", 
                         "poisson.conf").replace('/', r'\/'),
            root_apache_filename,
        ))
        sudo("mv kk %s"%root_apache_filename)

    # that smells like cauliflower. gross.
    restart_server()
Esempio n. 34
0
def setup_production(virtualenv_name="env"):
    """setup the production environment"""

    # get project dirs and subdomain to use throughout
    remote_project_dir = get_remote_project_dir()
    remote_dir = '/'.join(remote_project_dir.split('/')[:4])
    production_root_url = ProductionRootUrl()

    # setup linode resources
    setup_linode(production_root_url)

    # apache is already setup by the setup_staging command (which
    # calls render_apache_configuration)

    # if this is a suburl production project, need to over-ride the
    # settings to use the suburl_settings
    if production_root_url.suburl:
        fabhelp.progress("updating settings for a production url environment")
        filename = os.path.abspath(
            os.path.join(
                os.path.dirname(__file__),"..","..",
                "conf","noether","settings.py"
            )
        )
        with open(filename, 'r') as f:
            lines = f.readlines()
        with open(filename, 'w') as f:
            for line in lines:
                if line.strip()=="from common.conf.noether.settings import *":
                    s = "from common.conf.noether.suburl_settings import *\n"
                    f.write(s)
                else:
                    f.write(line)

        local("hg ci -m 'autocheckin: using suburl settings for production'")

    # push all changes into parent repository so that they can be
    # cloned on remote staging server
    fabhelp.progress("push local changes to development repository."+
                     " propagate to staging")
    with fabric_settings(warn_only = True):
        local("hg push")
    local("./manage.py pull_development_to_staging")

    # clone parent repository
    fabhelp.progress("clone the project repository from staging environment")
    result = local("hg showconfig | grep paths.default", capture=True)
    repo_url = "ssh://poisson/" + os.path.join(
        remote_dir,
        os.path.basename(result.split('=', 1)[1].strip()),
    )
    sudo("mkdir -p %s"%remote_dir)
    remote_clone_or_pull(remote_dir, repo_url)

    # do we need to do something with a database here? yes, we do. syncdb.
    django_project_dir = get_remote_project_dir()
    remote_initdb_from_fixture(django_project_dir, virtualenv_name)

    # include the apache configuration file in the server's virtual
    # host configuration file for the sandbox server ---- only need to
    # do this if this is the root url! everything else is taken care
    # of in the apache configuration by render_apache_configuration
    # function that is executed during setup_staging
    if not production_root_url.suburl:
        fabhelp.progress("setup apache")
        conf_filename = os.path.join(django_project_dir, "conf", "noether", 
                                     "apache", "noether.conf")
        apache_conf = "/etc/apache2/sites-available/" + \
            production_root_url.domain
        if fabric.contrib.files.exists(apache_conf, use_sudo=True):
            sudo("mv -f %s %s.backup "%(apache_conf, apache_conf))
        sudo("ln -s %s %s" % (conf_filename, apache_conf))
        sudo("mkdir -p /srv/www/%s/logs" % production_root_url.domain)
        sudo("a2ensite %s"%production_root_url.domain)
    
    # that smells like cauliflower. gross.
    restart_server()
Esempio n. 35
0
 def __test(self, worker, user_args):
     fabric_env['pretty_host_string'] = worker.pretty_name
     with fabric_rcd(worker.build_location):
         with fabric_settings(warn_only=True):
             command = worker.generateTestCommand(user_args)
             fabric_run(command)
Esempio n. 36
0
def pull_remote_to_localhost(remotehost):
    """pull remotehost changes into local repository and update. this
    includes all subrepositories
    """

    assert remotehost in ("poisson", "noether",)
    
    # dump database and check in as necessary
    fabhelp.progress("dump remote database and auto check-in")
    remote_project_dir = get_remote_project_dir()
    with cd(remote_project_dir):
        if fabhelp.is_dbvcs_installed():
            run("./manage.py database2fixture")
        cmd="hg ci -m 'auto-checkin: adding data for pull_remote_to_localhost'"
        with fabric_settings(warn_only=True):
            result = run(cmd)
            if result.failed:
                # there were no changes to check in
                if result.return_code==1:
                    pass 
                # something else happened. this is a problem, so abort
                else:
                    abort("hg ci failed. see above output for details")

    # recursively get all of the subrepository directories. breadth
    # first search here
    fabhelp.progress("find all of the subrepository directories")
    from django.conf import settings as django_settings
    with lcd(django_settings.PROJECT_ROOT):
        local_repo_dir = fabhelp.get_hg_root()
    remote_repo_dir = get_remote_repo_dir()
    hgsub_directories=collections.deque([local_repo_dir])
    subrepo_directory_list = [local_repo_dir]
    while len(hgsub_directories):
        hgsub_directory = hgsub_directories.popleft()
        hgsub_filename = os.path.join(hgsub_directory, '.hgsub')
        if os.path.exists(hgsub_filename):
            hgsub = open(hgsub_filename)
            for line in hgsub:
                l = line.split('=')
                if len(l)==2:
                    directory = l[0].strip()
                    subrepo_directory = os.path.join(hgsub_directory,directory)
                    subrepo_directory_list.append(subrepo_directory)
                    hgsub_directories.append(subrepo_directory)
                elif line.strip(): # this is a non-empty line
                    raise TypeError("%s has unexpected format"%hgsub_filename)
            hgsub.close()

    # reorder subrepo reldir list to always get subdirectories first 
    subrepo_directory_list.reverse()

    # pull changes into base repository and all
    # subrepositories. iterate over all of the subrepo reldir's and
    # pull in the new information so that there aren't any problems
    # with remote heads, etc.
    fabhelp.progress("recursively pull subrepos and the base repo")
    for subrepo_directory in subrepo_directory_list:
        with lcd(subrepo_directory):
            subrepo_reldir = os.path.relpath(subrepo_directory, local_repo_dir)
            remote_dir = os.path.join(remote_repo_dir, subrepo_reldir)

            # if remote dir exists, pull from that
            # subrepository. remote_dir may not exist if there are
            # locally added subrepos that have not been put on
            # production yet.
            with fabric_settings(warn_only=True):
                exists = run("test -d %s"%remote_dir)
                if exists.succeeded:
                    local("hg pull ssh://%s/%s"%(remotehost, remote_dir))

    # # XXXX THIS IS CAUSING WIERD ERROR WITH PARAMIKO ?!?!
    # # locally update
    # fabhelp.progress("update local repository (or at least try)")
    # with lcd(local_repo_dir):
    #     with fabric_settings(warn_only=True):
    #         result = local("hg up", capture=True)
    #         if result.failed:
    #             pass # nothing to update
    msg="next: update your local repository to get all incoming change sets"
    fabhelp.progress(msg)
Esempio n. 37
0
 def __test(self, worker, user_args):
   fabric_env['pretty_host_string'] = worker.pretty_name
   with fabric_rcd(worker.build_location):
     with fabric_settings(warn_only=True):
       command = worker.generateTestCommand(user_args)
       fabric_run(command)
Esempio n. 38
0
def sudo(command, *args, **kwargs):
    with fabric_settings(user=settings.satellite.sudo_user):
        run("sudo %s" % command, *args, **kwargs)
Esempio n. 39
0
def use_sudo(f, *args, **kwargs):
    kwargs["use_sudo"] = True
    with fabric_settings(user=settings.satellite.sudo_user):
        f(*args, **kwargs)
Esempio n. 40
0
def usersudo(command, user, *args, **kwargs):
    with fabric_settings(user=settings.satellite.sudo_user):
        run("sudo -u %s %s" % (user, command), *args, **kwargs)