Esempio n. 1
0
def set_apache_site_specifics(site):
    site_data = env.sites[site]

    common.get_settings(site=site)

    # Set site specific values.
    env.apache_site = site
    env.update(site_data)
    env.apache_docroot = env.apache_docroot_template % env
    env.apache_wsgi_dir = env.apache_wsgi_dir_template % env
    #env.apache_app_log_dir = env.apache_app_log_dir_template % env
    env.apache_domain = env.apache_domain_template % env
    env.apache_server_name = env.apache_domain
    env.apache_wsgi_python_path = env.apache_wsgi_python_path_template % env
    env.apache_django_wsgi = env.apache_django_wsgi_template % env
    env.apache_server_aliases = env.apache_server_aliases_template % env
    env.apache_ssl_domain = env.apache_ssl_domain_template % env
    env.apache_auth_basic_authuserfile = env.apache_auth_basic_authuserfile_template % env
    env.apache_domain_with_sub = env.apache_domain_with_sub_template % env
    env.apache_domain_without_sub = env.apache_domain_without_sub_template % env
Esempio n. 2
0
def set_apache_site_specifics(site):
    site_data = env.sites[site]
    
    common.get_settings(site=site)
    
    # Set site specific values.
    env.apache_site = site
    env.update(site_data)
    env.apache_docroot = env.apache_docroot_template % env
    env.apache_wsgi_dir = env.apache_wsgi_dir_template % env
    #env.apache_app_log_dir = env.apache_app_log_dir_template % env
    env.apache_domain = env.apache_domain_template % env
    env.apache_server_name = env.apache_domain
    env.apache_wsgi_python_path = env.apache_wsgi_python_path_template % env
    env.apache_django_wsgi = env.apache_django_wsgi_template % env
    env.apache_server_aliases = env.apache_server_aliases_template % env
    env.apache_ssl_domain = env.apache_ssl_domain_template % env
    env.apache_auth_basic_authuserfile = env.apache_auth_basic_authuserfile_template % env
    env.apache_domain_with_sub = env.apache_domain_with_sub_template % env
    env.apache_domain_without_sub = env.apache_domain_without_sub_template % env
Esempio n. 3
0
def set_db(name=None, site=None, role=None):
    name = name or 'default'
    #    print '!'*80
    #    print 'set_db.site:',site
    #    print 'set_db.role:',role
    settings = get_settings(site=site, role=role)
    #    print 'settings:',settings
    #    print 'databases:',settings.DATABASES
    default_db = settings.DATABASES[name]
    env.db_name = default_db['NAME']
    env.db_user = default_db['USER']
    env.db_host = default_db['HOST']
    env.db_password = default_db['PASSWORD']
    env.db_engine = default_db['ENGINE']
Esempio n. 4
0
def set_db(name=None, site=None, role=None):
    name = name or "default"
    #    print '!'*80
    #    print 'set_db.site:',site
    #    print 'set_db.role:',role
    settings = get_settings(site=site, role=role)
    #    print 'settings:',settings
    #    print 'databases:',settings.DATABASES
    default_db = settings.DATABASES[name]
    env.db_name = default_db["NAME"]
    env.db_user = default_db["USER"]
    env.db_host = default_db["HOST"]
    env.db_password = default_db["PASSWORD"]
    env.db_engine = default_db["ENGINE"]
Esempio n. 5
0
def invalidate(*paths):
    """
    Issues invalidation requests to a Cloudfront distribution
    for the current static media bucket, triggering it to reload the specified
    paths from the origin.
    
    Note, only 1000 paths can be issued in a request at any one time.
    """
    if not paths:
        return
    # http://boto.readthedocs.org/en/latest/cloudfront_tut.html
    _settings = common.get_settings()
    if not _settings.AWS_STATIC_BUCKET_NAME:
        print 'No static media bucket set.'
        return
    if isinstance(paths, basestring):
        paths = paths.split(',')
    all_paths = map(str.strip, paths)
    #    assert len(paths) <= 1000, \
    #        'Cloudfront invalidation request limited to 1000 paths or less.'
    i = 0
    while 1:
        paths = all_paths[i:i + 1000]
        if not paths:
            break

        #print 'paths:',paths
        c = boto.connect_cloudfront()
        rs = c.get_all_distributions()
        target_dist = None
        for dist in rs:
            print dist.domain_name, dir(dist), dist.__dict__
            bucket_name = dist.origin.dns_name.replace('.s3.amazonaws.com', '')
            if bucket_name == _settings.AWS_STATIC_BUCKET_NAME:
                target_dist = dist
                break
        if not target_dist:
            raise Exception, \
                'Target distribution %s could not be found in the AWS account.' \
                    % (settings.AWS_STATIC_BUCKET_NAME,)
        print 'Using distribution %s associated with origin %s.' \
            % (target_dist.id, _settings.AWS_STATIC_BUCKET_NAME)
        inval_req = c.create_invalidation_request(target_dist.id, paths)
        print 'Issue invalidation request %s.' % (inval_req, )

        i += 1000
Esempio n. 6
0
def invalidate(*paths):
    """
    Issues invalidation requests to a Cloudfront distribution
    for the current static media bucket, triggering it to reload the specified
    paths from the origin.
    
    Note, only 1000 paths can be issued in a request at any one time.
    """
    if not paths:
        return
    # http://boto.readthedocs.org/en/latest/cloudfront_tut.html
    _settings = common.get_settings()
    if not _settings.AWS_STATIC_BUCKET_NAME:
        print "No static media bucket set."
        return
    if isinstance(paths, basestring):
        paths = paths.split(",")
    all_paths = map(str.strip, paths)
    #    assert len(paths) <= 1000, \
    #        'Cloudfront invalidation request limited to 1000 paths or less.'
    i = 0
    while 1:
        paths = all_paths[i : i + 1000]
        if not paths:
            break

        # print 'paths:',paths
        c = boto.connect_cloudfront()
        rs = c.get_all_distributions()
        target_dist = None
        for dist in rs:
            print dist.domain_name, dir(dist), dist.__dict__
            bucket_name = dist.origin.dns_name.replace(".s3.amazonaws.com", "")
            if bucket_name == _settings.AWS_STATIC_BUCKET_NAME:
                target_dist = dist
                break
        if not target_dist:
            raise Exception, "Target distribution %s could not be found in the AWS account." % (
                settings.AWS_STATIC_BUCKET_NAME,
            )
        print "Using distribution %s associated with origin %s." % (target_dist.id, _settings.AWS_STATIC_BUCKET_NAME)
        inval_req = c.create_invalidation_request(target_dist.id, paths)
        print "Issue invalidation request %s." % (inval_req,)

        i += 1000
Esempio n. 7
0
def configure(site=None, full=0, dryrun=0):
    """
    Installs and configures RabbitMQ.
    """
    full = int(full)
    dryrun = int(dryrun)

    from burlap import package
    #    assert env.rabbitmq_erlang_cookie
    if full:
        package.install_required(type=package.common.SYSTEM, service=RABBITMQ)

    #render_paths()

    params = set()  # [(user,vhost)]
    for site, site_data in common.iter_sites(site=site, renderer=render_paths):
        print '!' * 80
        print site
        _settings = common.get_settings(site=site)
        #print '_settings:',_settings
        if not _settings:
            continue
        print 'RabbitMQ:', _settings.BROKER_USER, _settings.BROKER_VHOST
        params.add((_settings.BROKER_USER, _settings.BROKER_VHOST))

    for user, vhost in params:
        env.rabbitmq_broker_user = user
        env.rabbitmq_broker_vhost = vhost
        with settings(warn_only=True):
            cmd = 'rabbitmqctl add_vhost %(rabbitmq_broker_vhost)s' % env
            print cmd
            if not dryrun:
                sudo(cmd)
            cmd = 'rabbitmqctl set_permissions -p %(rabbitmq_broker_vhost)s %(rabbitmq_broker_user)s ".*" ".*" ".*"' % env
            print cmd
            if not dryrun:
                sudo(cmd)
Esempio n. 8
0
def configure(site=None, full=0, dryrun=0):
    """
    Installs and configures RabbitMQ.
    """
    full = int(full)
    dryrun = int(dryrun)
    
    from burlap import package
#    assert env.rabbitmq_erlang_cookie
    if full:
        package.install_required(type=package.common.SYSTEM, service=RABBITMQ)
    
    #render_paths()
    
    params = set() # [(user,vhost)]
    for site, site_data in common.iter_sites(site=site, renderer=render_paths):
        print '!'*80
        print site
        _settings = common.get_settings(site=site)
        #print '_settings:',_settings
        if not _settings:
            continue
        print 'RabbitMQ:',_settings.BROKER_USER, _settings.BROKER_VHOST
        params.add((_settings.BROKER_USER, _settings.BROKER_VHOST))
    
    for user, vhost in params:
        env.rabbitmq_broker_user = user
        env.rabbitmq_broker_vhost = vhost
        with settings(warn_only=True):
            cmd = 'rabbitmqctl add_vhost %(rabbitmq_broker_vhost)s' % env
            print cmd
            if not dryrun:
                sudo(cmd)
            cmd = 'rabbitmqctl set_permissions -p %(rabbitmq_broker_vhost)s %(rabbitmq_broker_user)s ".*" ".*" ".*"' % env
            print cmd
            if not dryrun:
                sudo(cmd)
Esempio n. 9
0
def sync(sync_set, dryrun=0, auto_invalidate=True):
    """
    Uploads media to an Amazon S3 bucket using s3sync.
    
    Requires the s3sync gem: sudo gem install s3sync
    """
    env.dryrun = int(dryrun)
    _settings = get_settings()
    for k in _settings.__dict__.iterkeys():
        if k.startswith('AWS_'):
            env[k] = _settings.__dict__[k]

    #local('which s3sync')
    #print 'AWS_STATIC_BUCKET_NAME:',_settings.AWS_STATIC_BUCKET_NAME

    common.render_remote_paths()

    site_data = env.sites[env.SITE]
    env.update(site_data)

    rets = []
    for paths in env.s3_sync_sets[sync_set]:
        is_local = paths.get('is_local', True)
        local_path = paths['local_path'] % env
        remote_path = paths['remote_path']
        local_path = local_path % env

        if is_local:
            local('which s3sync')  #, capture=True)
            env.s3_local_path = os.path.abspath(local_path)
        else:
            run('which s3sync')
            env.s3_local_path = local_path

        if local_path.endswith('/') and not env.s3_local_path.endswith('/'):
            env.s3_local_path = env.s3_local_path + '/'

        env.s3_remote_path = remote_path % env

        print 'Syncing %s to %s...' % (env.s3_local_path, env.s3_remote_path)

        cmd = ('export AWS_ACCESS_KEY_ID=%(AWS_ACCESS_KEY_ID)s; '\
            'export AWS_SECRET_ACCESS_KEY=%(AWS_SECRET_ACCESS_KEY)s; '\
            's3sync --recursive --verbose --progress --public-read '\
            '%(s3_local_path)s %(s3_remote_path)s') % env
        print cmd
        if not int(dryrun):
            if is_local:
                rets.append(local(cmd, capture=True))  # can't see progress
                #rets.append(run(cmd))
            else:
                rets.append(run(cmd))

    if auto_invalidate:
        for ret in rets:
            print 's3sync:', ret
            paths = re.findall('(?:Create|Update)\s+node\s+([^\n]+)',
                               ret,
                               flags=re.DOTALL | re.MULTILINE | re.IGNORECASE)
            print 'paths:', paths
            #TODO:handle more than 1000 paths?
            invalidate(*paths)
Esempio n. 10
0
def sync(sync_set, dryrun=0, auto_invalidate=True):
    """
    Uploads media to an Amazon S3 bucket using s3sync.
    
    Requires the s3sync gem: sudo gem install s3sync
    """
    env.dryrun = int(dryrun)
    _settings = get_settings()
    for k in _settings.__dict__.iterkeys():
        if k.startswith("AWS_"):
            env[k] = _settings.__dict__[k]

    # local('which s3sync')
    # print 'AWS_STATIC_BUCKET_NAME:',_settings.AWS_STATIC_BUCKET_NAME

    common.render_remote_paths()

    site_data = env.sites[env.SITE]
    env.update(site_data)

    rets = []
    for paths in env.s3_sync_sets[sync_set]:
        is_local = paths.get("is_local", True)
        local_path = paths["local_path"] % env
        remote_path = paths["remote_path"]
        local_path = local_path % env

        if is_local:
            local("which s3sync")  # , capture=True)
            env.s3_local_path = os.path.abspath(local_path)
        else:
            run("which s3sync")
            env.s3_local_path = local_path

        if local_path.endswith("/") and not env.s3_local_path.endswith("/"):
            env.s3_local_path = env.s3_local_path + "/"

        env.s3_remote_path = remote_path % env

        print "Syncing %s to %s..." % (env.s3_local_path, env.s3_remote_path)

        cmd = (
            "export AWS_ACCESS_KEY_ID=%(AWS_ACCESS_KEY_ID)s; "
            "export AWS_SECRET_ACCESS_KEY=%(AWS_SECRET_ACCESS_KEY)s; "
            "s3sync --recursive --verbose --progress --public-read "
            "%(s3_local_path)s %(s3_remote_path)s"
        ) % env
        print cmd
        if not int(dryrun):
            if is_local:
                rets.append(local(cmd, capture=True))  # can't see progress
                # rets.append(run(cmd))
            else:
                rets.append(run(cmd))

    if auto_invalidate:
        for ret in rets:
            print "s3sync:", ret
            paths = re.findall(
                "(?:Create|Update)\s+node\s+([^\n]+)", ret, flags=re.DOTALL | re.MULTILINE | re.IGNORECASE
            )
            print "paths:", paths
            # TODO:handle more than 1000 paths?
            invalidate(*paths)