Beispiel #1
0
def deploy(src_folder=None, mount_point='mnt', include_root=True):
    '''deploy files recursively to mounted point'''
    # Parse boolean fabric args
    include_root = prep_bool_arg(include_root)
    src_path = os.path.abspath(os.path.join(cwd, '..', src_folder))
    if not os.path.exists(src_path):
        logger.error('Could not find %s' % src_path)
        exit(1)

    mount_path = os.path.join(cwd, '..', mount_point)

    # Create root source folder
    if not include_root:
        src_path += '%s*' % (os.path.sep)

    src_root_folder = src_path.split(os.path.sep)[-1]
    mount_root_path = os.path.join(mount_path, src_root_folder)
    if os.path.exists(mount_root_path):
        confirm(
            colored(
                'The source folder exists on the mounted point. Existing files will be overwritten. continue?',
                'red'))

    local('cp -v -r %s %s' % (src_path, mount_path))

    logger.info(
        "Please don't forget to unmount the server when you are done. 'fab umount'"
    )
Beispiel #2
0
def go(github_username='******', repository_name=None):
    """
    Execute the bootstrap tasks for a new project.
    """
    config_files = ' '.join(['PROJECT_README.md', 'app_config.py'])

    config = {}
    config['$NEW_PROJECT_SLUG'] = os.getcwd().split('/')[-1]
    config['$NEW_REPOSITORY_NAME'] = repository_name or config['$NEW_PROJECT_SLUG']
    config['$NEW_PROJECT_FILENAME'] = config['$NEW_PROJECT_SLUG'].replace('-', '_')
    config['$NEW_DISQUS_UUID'] = str(uuid.uuid1())

    utils.confirm("Have you created a Github repository named \"%s\"?" % config['$NEW_REPOSITORY_NAME'])

    for k, v in config.items():
        local('sed -i "" \'s|%s|%s|g\' %s' % (k, v, config_files))

    local('rm -rf .git')
    local('git init')
    local('mv PROJECT_README.md README.md')
    local('rm *.pyc')
    local('rm LICENSE')
    local('git add .')
    local('git add -f www/assets/.assetsignore')
    local('git commit -am "Initial import from app-template."')
    local('git remote add origin [email protected]:%s/%s.git' % (github_username, config['$NEW_REPOSITORY_NAME']))
    local('git push -u origin master')

    # Update app data
    execute('update')
def go(github_username=app_config.GITHUB_USERNAME, repository_name=None):
    """
    Execute the bootstrap tasks for a new project.
    """
    config_files = " ".join(["PROJECT_README.md", "app_config.py", "crontab"])

    config = {}
    config["$NEW_PROJECT_SLUG"] = os.getcwd().split("/")[-1]
    config["$NEW_REPOSITORY_NAME"] = repository_name or config["$NEW_PROJECT_SLUG"]
    config["$NEW_PROJECT_FILENAME"] = config["$NEW_PROJECT_SLUG"].replace("-", "_")
    config["$NEW_DISQUS_UUID"] = str(uuid.uuid1())

    utils.confirm('Have you created a Github repository named "%s"?' % config["$NEW_REPOSITORY_NAME"])

    for k, v in config.items():
        local("sed -i \"\" 's|%s|%s|g' %s" % (k, v, config_files))

    local("rm -rf .git")
    local("git init")
    local("mv PROJECT_README.md README.md")
    local("rm *.pyc")
    local("rm LICENSE")
    local("git add .")
    local("git add -f www/assets/assetsignore")
    local('git commit -am "Initial import from app-template."')
    local("git remote add origin [email protected]:%s/%s.git" % (github_username, config["$NEW_REPOSITORY_NAME"]))
    local("git push -u origin master")

    # Update app data
    execute("update")
Beispiel #4
0
def deploy(remote='origin'):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

        if (app_config.DEPLOYMENT_TARGET == 'production'
                and env.branch != 'stable'):
            utils.confirm(
                colored(
                    "You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?"
                    % env.branch, "red"))

        servers.checkout_latest(remote)

        servers.fabcast('text.update')
        #servers.fabcast('assets.sync')
        servers.fabcast('data.update')

        if app_config.DEPLOY_CRONTAB:
            # servers.install_crontab()
            _servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    update()
    render.render_all()

    _deploy_to_graphics()
Beispiel #5
0
def shiva_the_destroyer():
    """
    Deletes the app from s3
    """
    require("settings", provided_by=[production, staging])

    utils.confirm(
        "You are about to destroy everything deployed to %s for this project.\nDo you know what you're doing?"
        % app_config.DEPLOYMENT_TARGET
    )

    with settings(warn_only=True):
        sync = 'aws s3 rm %s --recursive --region "us-east-1"'

        for bucket in app_config.S3_BUCKETS:
            local(sync % ("s3://%s/%s/" % (bucket, app_config.PROJECT_SLUG)))

        if app_config.DEPLOY_TO_SERVERS:
            run("rm -rf %(SERVER_PROJECT_PATH)s" % app_config.__dict__)

            if app_config.DEPLOY_CRONTAB:
                uninstall_crontab()

            if app_config.DEPLOY_SERVICES:
                nuke_confs()
Beispiel #6
0
def shiva_the_destroyer():
    """
    Deletes the app from s3
    """
    require('settings', provided_by=[production, staging])

    utils.confirm(
        colored(
            "You are about to destroy everything deployed to %s for this project.\nDo you know what you're doing?')"
            % app_config.DEPLOYMENT_TARGET, "red"))

    with settings(warn_only=True):
        flat.delete_folder(
            app_config.S3_BUCKET,
            '%s%s' % (app_config.FACTCHECKS_DIRECTORY_PREFIX,
                      app_config.CURRENT_FACTCHECK))

        if app_config.DEPLOY_TO_SERVERS:
            servers.delete_project()

            if app_config.DEPLOY_CRONTAB:
                servers.uninstall_crontab()

            if app_config.DEPLOY_SERVICES:
                servers.nuke_confs()
Beispiel #7
0
def deploy(remote='origin', reload=False):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

        if (app_config.DEPLOYMENT_TARGET == 'production' and env.branch != 'stable'):
            utils.confirm(
                colored("You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?" % env.branch, "red")
            )

        servers.checkout_latest(remote)

        if app_config.DEPLOY_CRONTAB:
            servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    update()
    render.render_all()

    flat.deploy_folder(
        app_config.S3_BUCKET,
        'www',
        app_config.PROJECT_SLUG,
        headers={
            'Cache-Control': 'max-age=%i' % app_config.DEFAULT_MAX_AGE
        },
        ignore=[]
    )
Beispiel #8
0
def update_template(stackname):
    """Limited update of the Cloudformation template.

    Resources can be added, but existing ones are immutable.

    Moreover, we never add anything related to EC2 instances as they are
    not supported anyway (they will come up as part of the template
    but without any software being on it)

    Moreover, EC2 instances must be running while this is executed or their
    resources like PublicIP will be inaccessible"""

    core_lifecycle.start(stackname)

    (pname, _) = core.parse_stackname(stackname)
    current_template = bootstrap.current_template(stackname)
    cfngen.write_template(stackname, json.dumps(current_template))

    more_context = cfngen.choose_config(stackname)
    delta = cfngen.template_delta(pname, **more_context)
    LOG.info("%s", pformat(delta))
    utils.confirm('Confirming changes to the stack template?')

    new_template = cfngen.merge_delta(stackname, delta)
    bootstrap.update_template(stackname, new_template)

    update(stackname)
Beispiel #9
0
def shiva_the_destroyer():
    """
    Deletes the app from s3
    """
    require("settings", provided_by=[production, staging])

    utils.confirm(
        colored(
            "You are about to destroy everything deployed to %s for this project.\nDo you know what you're doing?')"
            % app_config.DEPLOYMENT_TARGET,
            "red",
        )
    )

    with settings(warn_only=True):
        flat.delete_folder(app_config.S3_BUCKET, app_config.PROJECT_SLUG)

        if app_config.DEPLOY_TO_SERVERS:
            servers.delete_project()

            if app_config.DEPLOY_CRONTAB:
                servers.uninstall_crontab()

            if app_config.DEPLOY_SERVICES:
                servers.nuke_confs()
def deploy(remote='origin'):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

        if (app_config.DEPLOYMENT_TARGET == 'production' and env.branch != 'stable'):
            utils.confirm(
                colored("You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?" % env.branch, "red")
            )

        servers.checkout_latest(remote)

        servers.fabcast('text.update')
        servers.fabcast('assets.sync')
        servers.fabcast('data.update')

        if app_config.DEPLOY_CRONTAB:
            servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    update()
    render.render_all()
    _gzip('www', '.gzip')
    _deploy_to_s3()
Beispiel #11
0
def app_template_bootstrap(project_name=None, repository_name=None):
    """
    Execute the bootstrap tasks for a new project.
    """
    config_files = ' '.join(['PROJECT_README.md', 'app_config.py'])

    config = {}
    config['$NEW_PROJECT_SLUG'] = os.getcwd().split('/')[-1]
    config['$NEW_PROJECT_NAME'] = project_name or config['$NEW_PROJECT_SLUG']
    config['$NEW_REPOSITORY_NAME'] = repository_name or config['$NEW_PROJECT_SLUG']
    config['$NEW_PROJECT_FILENAME'] = config['$NEW_PROJECT_SLUG'].replace('-', '_')

    utils.confirm("Have you created a Github repository named \"%s\"?" % config['$NEW_REPOSITORY_NAME'])

    for k, v in config.items():
        local('sed -i "" \'s|%s|%s|g\' %s' % (k, v, config_files))

    local('rm -rf .git')
    local('git init')
    local('mv PROJECT_README.md README.md')
    local('rm *.pyc')
    local('rm LICENSE')
    local('git add .')
    local('git commit -am "Initial import from app-template."')
    local('git remote add origin [email protected]:nprapps/%s.git' % config['$NEW_REPOSITORY_NAME'])
    local('git push -u origin master')

    local('mkdir ~/Dropbox/nprapps/assets/%s' % config['$NEW_PROJECT_NAME'])
Beispiel #12
0
def post(slug):
    """
    Set the post to work on.
    """
    # Force root path every time
    fab_path = os.path.realpath(os.path.dirname(__file__))
    root_path = os.path.join(fab_path, '..')
    os.chdir(root_path)

    env.slug = utils._find_slugs(slug)

    if not env.slug:
        utils.confirm(
            'This post does not exist. Do you want to create a new post called %s?'
            % slug)
        _new(slug)
        return

    env.static_path = '%s/%s' % (app_config.POST_PATH, env.slug)

    if os.path.exists('%s/post_config.py' % env.static_path):
        # set slug for deployment in post_config
        find = "DEPLOY_SLUG = ''"
        replace = "DEPLOY_SLUG = '%s'" % env.slug
        utils.replace_in_file('%s/post_config.py' % env.static_path, find,
                              replace)

        env.post_config = imp.load_source(
            'post_config', '%s/post_config.py' % env.static_path)
        env.copytext_key = env.post_config.COPY_GOOGLE_DOC_KEY
    else:
        env.post_config = None
        env.copytext_key = None

    env.copytext_slug = env.slug
Beispiel #13
0
def deploy(quick=None, remote='origin', reload=False):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[random_prod, production, staging])

    if app_config.DEPLOYMENT_TARGET == 'production' or app_config.DEPLOYMENT_TARGET == 'random_prod':
        utils.confirm(
            colored(
                "You are trying to deploy to production this project %s.\nDo you know what you're doing?"
                % app_config.PROJECT_SLUG, "red"))

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

        if (app_config.DEPLOYMENT_TARGET == 'production'
                and env.branch != 'stable'):
            utils.confirm(
                colored(
                    "You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?"
                    % env.branch, "red"))

        servers.checkout_latest(remote)

        servers.fabcast('text.update')
        servers.fabcast('data.update')
        servers.fabcast('assets.sync')

        if app_config.DEPLOY_CRONTAB:
            servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    if quick != 'quick':
        update()

    render.render_all()

    # Clear files that should never be deployed
    local('rm -rf www/live-data')

    flat.deploy_folder(
        app_config.S3_BUCKET,
        'www',
        app_config.PROJECT_SLUG,
        headers={'Cache-Control': 'max-age=%i' % app_config.DEFAULT_MAX_AGE},
        ignore=['www/assets/*', 'www/live-data/*'])

    flat.deploy_folder(
        app_config.S3_BUCKET,
        'www/assets',
        '%s/assets' % app_config.PROJECT_SLUG,
        headers={'Cache-Control': 'max-age=%i' % app_config.ASSETS_MAX_AGE})

    if reload:
        reset_browsers()

    if not check_timestamp():
        reset_browsers()
Beispiel #14
0
def deploy(remote='origin'):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

    if (app_config.DEPLOYMENT_TARGET == 'production' and env.branch != 'stable'):
        utils.confirm("You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?" % env.branch)

    if app_config.DEPLOY_TO_SERVERS:
        checkout_latest(remote)

        #fabcast('update_copy')
        #fabcast('assets.sync')
        #fabcast('update_data')

        if app_config.DEPLOY_CRONTAB:
            install_crontab()

        if app_config.DEPLOY_SERVICES:
            deploy_confs()

    render()
    _gzip('www', '.gzip')
    _deploy_to_s3()
    _cleanup_minified_includes()
def deploy(remote='origin'):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

    if (app_config.DEPLOYMENT_TARGET == 'production' and env.branch != 'stable'):
        utils.confirm("You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?" % env.branch)

    if app_config.DEPLOY_TO_SERVERS:
        checkout_latest(remote)

        fabcast('update_copy')
        fabcast('assets_sync')
        fabcast('update_data')

        if app_config.DEPLOY_CRONTAB:
            install_crontab()

        if app_config.DEPLOY_SERVICES:
            deploy_confs()

    compiled_includes = render.render_all()
    render.render_dorms(compiled_includes)
    sass()
    # _gzip('www', '.gzip')
    # _deploy_to_s3()
    # _gzip('.dorms_html', '.dorms_gzip')
    # _deploy_to_s3('.dorms_gzip')
    local('rm -rf dist')
    local('cp -r .dorms_html dist')
    local('cp -r www/ dist/')
Beispiel #16
0
def go(github_username='******', repository_name=None):
    """
    Execute the bootstrap tasks for a new project.
    """
    config_files = ' '.join(['PROJECT_README.md', 'app_config.py'])

    config = {}
    config['$NEW_PROJECT_SLUG'] = os.getcwd().split('/')[-1]
    config['$NEW_REPOSITORY_NAME'] = repository_name or config[
        '$NEW_PROJECT_SLUG']
    config['$NEW_PROJECT_FILENAME'] = config['$NEW_PROJECT_SLUG'].replace(
        '-', '_')
    config['$NEW_DISQUS_UUID'] = str(uuid.uuid1())

    utils.confirm("Have you created a Github repository named \"%s\"?" %
                  config['$NEW_REPOSITORY_NAME'])

    for k, v in config.items():
        local('sed -i "" \'s|%s|%s|g\' %s' % (k, v, config_files))

    local('rm -rf .git')
    local('git init')
    local('mv PROJECT_README.md README.md')
    local('rm *.pyc')
    local('rm LICENSE')
    local('git add .')
    local('git commit -am "Initial import from app-template."')
    local('git remote add origin [email protected]:%s/%s.git' %
          (github_username, config['$NEW_REPOSITORY_NAME']))
    local('git push -u origin master')

    # Update app data
    execute('update')
Beispiel #17
0
def app_template_bootstrap(project_name=None, repository_name=None):
    """
    Execute the bootstrap tasks for a new project.
    """
    with settings(warn_only=True):
        config_files = ' '.join(['PROJECT_README.md', 'app_config.py'])

        config = {}
        config['$NEW_PROJECT_SLUG'] = os.getcwd().split('/')[-1]
        config['$NEW_PROJECT_NAME'] = project_name or config['$NEW_PROJECT_SLUG']
        config['$NEW_REPOSITORY_NAME'] = repository_name or config['$NEW_PROJECT_SLUG']
        config['$NEW_PROJECT_FILENAME'] = config['$NEW_PROJECT_SLUG'].replace('-', '_')

        utils.confirm("Have you created a Github repository named \"%s\"?" % config['$NEW_REPOSITORY_NAME'])

        for k, v in config.items():
            local('sed -i "" \'s|%s|%s|g\' %s' % (k, v, config_files))

        local('rm -rf .git')
        local('git init')
        local('mv PROJECT_README.md README.md')
        local('rm *.pyc')
        local('rm LICENSE')
        local('git add .')
        local('git commit -am "Initial import from app-template."')
        local('git remote add origin [email protected]:BeckyBowers/%s.git' % config['$NEW_REPOSITORY_NAME'])
        local('git push -u origin master')
Beispiel #18
0
def shiva_the_destroyer():
    """
    Deletes the app from s3
    """
    require('settings', provided_by=[production, staging])

    utils.confirm(
        colored("You are about to destroy everything deployed to %s for this project.\nDo you know what you're doing?')" % app_config.DEPLOYMENT_TARGET, "red")
    )

    with settings(warn_only=True):
        sync = 'aws s3 rm s3://%s/%s/ --recursive --region "%s"' % (
            app_config.S3_BUCKET['bucket_name'],
            app_config.PROJECT_SLUG,
            app_config.S3_BUCKET['region']
        ) 

        local(sync)

        if app_config.DEPLOY_TO_SERVERS:
            servers.delete_project()

            if app_config.DEPLOY_CRONTAB:
                servers.uninstall_crontab()

            if app_config.DEPLOY_SERVICES:
                servers.nuke_confs()
Beispiel #19
0
def deploy(remote='origin'):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

    if (app_config.DEPLOYMENT_TARGET == 'production' and env.branch != 'stable'):
        utils.confirm("You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?" % env.branch)

    if app_config.DEPLOY_TO_SERVERS:
        checkout_latest(remote)

        fabcast('update_copy')
        fabcast('assets_sync')
        fabcast('update_data')

        if app_config.DEPLOY_CRONTAB:
            install_crontab()

        if app_config.DEPLOY_SERVICES:
            deploy_confs()

    render()
    _gzip('www', '.gzip')
    _deploy_to_s3()
Beispiel #20
0
def post(slug):
    """
    Set the post to work on.
    """
    # Force root path every time
    fab_path = os.path.realpath(os.path.dirname(__file__))
    root_path = os.path.join(fab_path, '..')
    os.chdir(root_path)

    env.slug = utils._find_slugs(slug)

    if not env.slug:
        utils.confirm('This post does not exist. Do you want to create a new post called %s?' % slug)
        _new(slug)
        return

    env.static_path = '%s/%s' % (app_config.POST_PATH, env.slug)

    if os.path.exists ('%s/post_config.py' % env.static_path):
        # set slug for deployment in post_config
        find = "DEPLOY_SLUG = ''"
        replace = "DEPLOY_SLUG = '%s'" % env.slug
        utils.replace_in_file('%s/post_config.py' % env.static_path, find, replace)

        env.post_config = imp.load_source('post_config', '%s/post_config.py' % env.static_path)
        env.copytext_key = env.post_config.COPY_GOOGLE_DOC_KEY
    else:
        env.post_config = None
        env.copytext_key = None

    env.copytext_slug = env.slug
Beispiel #21
0
def deploy(remote="origin"):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require("settings", provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require("branch", provided_by=[stable, master, branch])

    if app_config.DEPLOYMENT_TARGET == "production" and env.branch != "stable":
        utils.confirm(
            "You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?"
            % env.branch
        )

    if app_config.DEPLOY_TO_SERVERS:
        checkout_latest(remote)

        fabcast("update_copy")
        fabcast("assets_sync")
        fabcast("update_data")

        if app_config.DEPLOY_CRONTAB:
            install_crontab()

        if app_config.DEPLOY_SERVICES:
            deploy_confs()

    compiled_includes = render()
    render_dorms(compiled_includes)
    _gzip("www", ".gzip")
    _deploy_to_s3()
    _gzip(".dorms_html", ".dorms_gzip")
    _deploy_to_s3(".dorms_gzip")
Beispiel #22
0
def shiva_the_destroyer():
    """
    Deletes the app from s3
    """
    require('settings', provided_by=[production, staging])

    utils.confirm(
        colored(
            "You are about to destroy everything deployed to %s for this project.\nDo you know what you're doing?')"
            % app_config.DEPLOYMENT_TARGET, "red"))

    with settings(warn_only=True):
        sync = 'aws s3 rm %s --recursive --region "us-east-1"'

        for bucket in app_config.S3_BUCKETS:
            local(sync % ('s3://%s/%s/' % (bucket, app_config.PROJECT_SLUG)))

        if app_config.DEPLOY_TO_SERVERS:
            servers.delete_project()

            if app_config.DEPLOY_CRONTAB:
                servers.uninstall_crontab()

            if app_config.DEPLOY_SERVICES:
                servers.nuke_confs()
Beispiel #23
0
def generate_stack_from_input(pname, instance_id=None, alt_config=None):
    """creates a new CloudFormation/Terraform file for the given project `pname` with
    the identifier `instance_id` using the (optional) project configuration `alt_config`."""
    more_context = check_user_input(pname, instance_id, alt_config)
    stackname = more_context['stackname']

    # ~TODO: return the templates used here, so that they can be passed down to~
    # ~bootstrap.create_stack() without relying on them implicitly existing~
    # ~on the filesystem~
    # lsh@2021-07: having the files on the filesystem with predictable names seems more
    # robust than carrying it around as a parameter through complex logic.
    _, cloudformation_file, terraform_file = cfngen.generate_stack(
        pname, **more_context)

    if cloudformation_file:
        print('cloudformation template:')
        print(json.dumps(json.load(open(cloudformation_file, 'r')), indent=4))
        print()

    if terraform_file:
        print('terraform template:')
        print(json.dumps(json.load(open(terraform_file, 'r')), indent=4))
        print()

    if cloudformation_file:
        LOG.info('wrote: %s' % os.path.abspath(cloudformation_file))

    if terraform_file:
        LOG.info('wrote: %s' % os.path.abspath(terraform_file))

    # see: `buildercore.config.BUILDER_NON_INTERACTIVE` for skipping confirmation prompts
    utils.confirm('the above resources will be created')

    return stackname
Beispiel #24
0
def app_template_bootstrap(github_username="******", project_name=None, repository_name=None):
    """
    Execute the bootstrap tasks for a new project.
    """
    config_files = " ".join(["PROJECT_README.md", "app_config.py"])

    config = {}
    config["$NEW_PROJECT_SLUG"] = os.getcwd().split("/")[-1]
    config["$NEW_PROJECT_NAME"] = project_name or config["$NEW_PROJECT_SLUG"]
    config["$NEW_REPOSITORY_NAME"] = repository_name or config["$NEW_PROJECT_SLUG"]
    config["$NEW_PROJECT_FILENAME"] = config["$NEW_PROJECT_SLUG"].replace("-", "_")

    utils.confirm('Have you created a Github repository named "%s"?' % config["$NEW_REPOSITORY_NAME"])

    for k, v in config.items():
        local("sed -i \"\" 's|%s|%s|g' %s" % (k, v, config_files))

    local("rm -rf .git")
    local("git init")
    local("mv PROJECT_README.md README.md")
    local("rm *.pyc")
    local("rm LICENSE")
    local("git add .")
    local('git commit -am "Initial import from app-template."')
    local("git remote add origin [email protected]:%s/%s.git" % (github_username, config["$NEW_REPOSITORY_NAME"]))
    local("git push -u origin master")

    bootstrap()
Beispiel #25
0
def remaster(stackname, new_master_stackname):
    "tell minion who their new master is. deletes any existing master key on minion"
    # TODO: turn this into a decorator
    import cfn
    # start the machine if it's stopped
    # you might also want to acquire a lock so alfred doesn't stop things
    cfn._check_want_to_be_running(stackname, 1)

    master_ip = _cached_master_ip(new_master_stackname)
    LOG.info('re-mastering %s to %s', stackname, master_ip)

    context = context_handler.load_context(stackname)

    # remove if no longer an issue
    # if context.get('ec2') == True:
    #    # TODO: duplicates bad ec2 data wrangling in cfngen.build_context
    #    # ec2 == True for some reason, which is completely useless
    #    LOG.warn("bad context for stack: %s", stackname)
    #    context['ec2'] = {}
    #    context['project']['aws']['ec2'] = {}
    if not context.get('ec2'):
        LOG.info("no ec2 context, skipping %s", stackname)
        return

    if context['ec2'].get('master_ip') == master_ip:
        LOG.info("already remastered: %s", stackname)
        try:
            utils.confirm("Skip?")
            return
        except KeyboardInterrupt:
            LOG.info("not skipping")

    LOG.info("upgrading salt client")
    pdata = core.project_data_for_stackname(stackname)
    context['project']['salt'] = pdata['salt']

    LOG.info("setting new master address")
    cfngen.set_master_address(pdata, context, master_ip)  # mutates context

    # update context
    LOG.info("updating context")
    context_handler.write_context(stackname, context)

    # update buildvars
    LOG.info("updating buildvars")
    buildvars.refresh(stackname, context)

    # remove knowledge of old master
    def work():
        remote_sudo("rm -f /etc/salt/pki/minion/minion_master.pub"
                    )  # destroy the old master key we have

    LOG.info("removing old master key from minion")
    core.stack_all_ec2_nodes(stackname, work, username=config.BOOTSTRAP_USER)

    # update ec2 nodes
    LOG.info("updating nodes")
    bootstrap.update_ec2_stack(stackname, context, concurrency='serial')
    return True
Beispiel #26
0
def create_ami(stackname, name=None):
    pname = core.project_name_from_stackname(stackname)
    msg = "this will create a new AMI for the project %r" % pname
    confirm(msg)

    amiid = bakery.create_ami(stackname, name)
    print(amiid)
    errcho('update project file with new ami %s. these changes must be merged and committed manually' % amiid)
Beispiel #27
0
def create_ami(stackname, name=None):
    pname = core.project_name_from_stackname(stackname)
    msg = "this will create a new AMI for the project %r" % pname
    confirm(msg)

    amiid = bakery.create_ami(stackname, name)
    print(amiid)
    errcho('update project file with new ami %s. these changes must be merged and committed manually' % amiid)
Beispiel #28
0
def deploy(quick=None, remote='origin', reload=False):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

        if (app_config.DEPLOYMENT_TARGET == 'production' and env.branch != 'stable'):
            utils.confirm(
                colored("You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?" % env.branch, "red")
            )

        servers.checkout_latest(remote)

        servers.fabcast('text.update')
        servers.fabcast('assets.sync')
        servers.fabcast('data.update')

        if app_config.DEPLOY_CRONTAB:
            servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    if quick != 'quick':
        update()

    render.render_all()

    # Clear files that should never be deployed
    local('rm -rf www/live-data')

    flat.deploy_folder(
        app_config.S3_BUCKET,
        'www',
        app_config.PROJECT_SLUG,
        headers={
            'Cache-Control': 'max-age=%i' % app_config.DEFAULT_MAX_AGE
        },
        ignore=['www/assets/*', 'www/live-data/*']
    )

    flat.deploy_folder(
        app_config.S3_BUCKET,
        'www/assets',
        '%s/assets' % app_config.PROJECT_SLUG,
        headers={
            'Cache-Control': 'max-age=%i' % app_config.ASSETS_MAX_AGE
        }
    )

    if reload:
        reset_browsers()

    if not check_timestamp():
        reset_browsers()
Beispiel #29
0
def deploy(remote='origin', reload=False):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])
    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

        if (app_config.DEPLOYMENT_TARGET == 'production'
                and env.branch != 'stable'):
            utils.confirm(
                colored(
                    "You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?"
                    % env.branch, "red"))

        servers.checkout_latest(remote)

        # servers.fabcast('text.update')
        # servers.fabcast('assets.sync')
        # servers.fabcast('data.update')

        if app_config.DEPLOY_CRONTAB:
            servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    update()
    render.render_all()

    # Clear files that should never be deployed
    local('rm -rf www/live-data')

    flat.deploy_folder(
        app_config.S3_BUCKET,
        'www',
        '%s%s' %
        (app_config.DEBATE_DIRECTORY_PREFIX, app_config.CURRENT_DEBATE),
        headers={'Cache-Control': 'max-age=%i' % app_config.DEFAULT_MAX_AGE},
        ignore=['www/assets/*', 'www/live-data/*'])

    flat.deploy_folder(
        app_config.S3_BUCKET,
        'www/assets',
        '%s%s/assets' %
        (app_config.DEBATE_DIRECTORY_PREFIX, app_config.CURRENT_DEBATE),
        headers={'Cache-Control': 'max-age=%i' % app_config.ASSETS_MAX_AGE})

    # DEPLOY STATIC FACTCHECK FROM LOCAL ENVIRONMENT
    if app_config.DEPLOY_STATIC_FACTCHECK:
        execute('deploy_factcheck')

    if reload:
        reset_browsers()

    if not check_timestamp():
        reset_browsers()
Beispiel #30
0
def remaster(stackname, new_master_stackname):
    "tell minion who their new master is. deletes any existing master key on minion"
    # TODO: turn this into a decorator
    import cfn
    # start the machine if it's stopped
    # you might also want to acquire a lock so alfred doesn't stop things
    cfn._check_want_to_be_running(stackname, 1)

    master_ip = _cached_master_ip(new_master_stackname)
    LOG.info('re-mastering %s to %s', stackname, master_ip)

    context = context_handler.load_context(stackname)

    # remove if no longer an issue
    # if context.get('ec2') == True:
    #    # TODO: duplicates bad ec2 data wrangling in cfngen.build_context
    #    # ec2 == True for some reason, which is completely useless
    #    LOG.warn("bad context for stack: %s", stackname)
    #    context['ec2'] = {}
    #    context['project']['aws']['ec2'] = {}
    if not context.get('ec2'):
        LOG.info("no ec2 context, skipping %s", stackname)
        return

    if context['ec2'].get('master_ip') == master_ip:
        LOG.info("already remastered: %s", stackname)
        try:
            utils.confirm("Skip?")
            return
        except KeyboardInterrupt:
            LOG.info("not skipping")

    LOG.info("upgrading salt client")
    pdata = core.project_data_for_stackname(stackname)
    context['project']['salt'] = pdata['salt']

    LOG.info("setting new master address")
    cfngen.set_master_address(pdata, context, master_ip) # mutates context

    # update context
    LOG.info("updating context")
    context_handler.write_context(stackname, context)

    # update buildvars
    LOG.info("updating buildvars")
    buildvars.refresh(stackname, context)

    # remove knowledge of old master
    def work():
        sudo("rm -f /etc/salt/pki/minion/minion_master.pub")  # destroy the old master key we have
    LOG.info("removing old master key from minion")
    core.stack_all_ec2_nodes(stackname, work, username=config.BOOTSTRAP_USER)

    # update ec2 nodes
    LOG.info("updating nodes")
    bootstrap.update_ec2_stack(stackname, context, concurrency='serial')
    return True
Beispiel #31
0
def update_infrastructure(stackname, skip=None, start=['ec2']):
    """Limited update of the Cloudformation template and/or Terraform template.

    Resources can be added, but most of the existing ones are immutable.

    Some resources are updatable in place.

    Moreover, we never add anything related to EC2 instances as they are
    not supported anyway (they will come up as part of the template
    but without any software being on it)

    Moreover, EC2 instances must be running while this is executed or their
    resources like PublicIP will be inaccessible.

    Allows to skip EC2, SQS, S3 updates by passing `skip=ec2\\,sqs\\,s3`

    By default starts EC2 instances but this can be avoid by passing `start=`"""

    skip = skip.split(",") if skip else []
    start = start.split(",") if isinstance(start, str) else start or []

    (pname, _) = core.parse_stackname(stackname)
    more_context = {}
    context, delta, current_context = cfngen.regenerate_stack(
        stackname, **more_context)

    if _are_there_existing_servers(current_context) and 'ec2' in start:
        core_lifecycle.start(stackname)
    LOG.info("Create: %s", pformat(delta.plus))
    LOG.info("Update: %s", pformat(delta.edit))
    LOG.info("Delete: %s", pformat(delta.minus))
    LOG.info("Terraform delta: %s", delta.terraform)

    # see: `buildercore.config.BUILDER_NON_INTERACTIVE` for skipping confirmation prompts
    utils.confirm(
        'Confirming changes to CloudFormation and Terraform templates?')

    context_handler.write_context(stackname, context)

    cloudformation.update_template(stackname, delta.cloudformation)
    terraform.update_template(stackname)

    # TODO: move inside bootstrap.update_stack
    # EC2
    if _are_there_existing_servers(context) and not 'ec2' in skip:
        # the /etc/buildvars.json file may need to be updated
        buildvars.refresh(stackname, context)
        update(stackname)

    # SQS
    if context.get('sqs', {}) and not 'sqs' in skip:
        bootstrap.update_stack(stackname, service_list=['sqs'])

    # S3
    if context.get('s3', {}) and not 's3' in skip:
        bootstrap.update_stack(stackname, service_list=['s3'])
Beispiel #32
0
def _disclaimer():
    title("fix_infrastructure")
    disclaimer = """this task performs checks and suggests fixes.

it does *not* modify infrastructure.

ctrl-c will safely quit this task."""

    # see: `buildercore.config.BUILDER_NON_INTERACTIVE` for skipping confirmation prompts
    utils.confirm(disclaimer)
    return success()
Beispiel #33
0
def deploy(slug=''):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if not slug:
        utils.confirm('You are about about to deploy ALL graphics. Are you sure you want to do this? (Deploy a single graphic with "deploy:SLUG".)')

    render(slug)
    _gzip('%s/%s' % (app_config.GRAPHICS_PATH, slug), '.gzip/graphics/%s' % slug)
    _deploy_to_s3('.gzip/graphics/%s' % slug)
Beispiel #34
0
def install_package(*args, **kargs):
    '''Provide a string, an iterable or pass values to the 
    *args. Each a url or endoint used by pip
    '''
    v = True
    for url in args:
        log('installing %s' % url, color='yellow')
        confirm('Would you like to install package \'%s\'' % url, True)
        v = pip_install(url)
        # To not have a horrible big in the future, if one package
        # fails the entire procedure reports False
    return v
Beispiel #35
0
def deploy(remote='origin'):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

        if (app_config.DEPLOYMENT_TARGET == 'production'
                and env.branch != 'stable'):
            utils.confirm(
                colored(
                    "You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?"
                    % env.branch, "red"))

        servers.checkout_latest(remote)

        servers.fabcast('text.update')
        #servers.fabcast('assets.sync')
        servers.fabcast('data.update')

        if app_config.DEPLOY_CRONTAB:
            # servers.install_crontab()
            _servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    update()
    render.render_all()

    # # Clear files that should never be deployed
    # local('rm -rf www/live-data')

    # flat.deploy_folder(
    #     'www',
    #     app_config.PROJECT_SLUG,
    #     headers={
    #         'Cache-Control': 'max-age=%i' % app_config.DEFAULT_MAX_AGE
    #     },
    #     ignore=['www/assets/*', 'www/live-data/*']
    # )

    # flat.deploy_folder(
    #     'www/assets',
    #     '%s/assets' % app_config.PROJECT_SLUG,
    #     headers={
    #         'Cache-Control': 'max-age=%i' % app_config.ASSETS_MAX_AGE
    #     }
    # )
    _deploy_to_graphics()
Beispiel #36
0
def update_infrastructure(stackname, skip=None, start=['ec2']):
    """Limited update of the Cloudformation template and/or Terraform template.

    Resources can be added, but most of the existing ones are immutable.

    Some resources are updatable in place.

    Moreover, we never add anything related to EC2 instances as they are
    not supported anyway (they will come up as part of the template
    but without any software being on it)

    Moreover, EC2 instances must be running while this is executed or their
    resources like PublicIP will be inaccessible.

    Allows to skip EC2, SQS, S3 updates by passing `skip=ec2\\,sqs\\,s3`

    By default starts EC2 instances but this can be avoid by passing `start=`"""

    skip = skip.split(",") if skip else []
    start = start.split(",") if isinstance(start, str) else start or []

    (pname, _) = core.parse_stackname(stackname)
    more_context = {}
    context, delta, current_context = cfngen.regenerate_stack(stackname, **more_context)

    if _are_there_existing_servers(current_context) and 'ec2' in start:
        core_lifecycle.start(stackname)
    LOG.info("Create: %s", pformat(delta.plus))
    LOG.info("Update: %s", pformat(delta.edit))
    LOG.info("Delete: %s", pformat(delta.minus))
    LOG.info("Terraform delta: %s", delta.terraform)
    utils.confirm('Confirming changes to CloudFormation and Terraform templates?')

    context_handler.write_context(stackname, context)

    cloudformation.update_template(stackname, delta.cloudformation)
    terraform.update_template(stackname)

    # TODO: move inside bootstrap.update_stack
    # EC2
    if _are_there_existing_servers(context) and not 'ec2' in skip:
        # the /etc/buildvars.json file may need to be updated
        buildvars.refresh(stackname, context)
        update(stackname)

    # SQS
    if context.get('sqs', {}) and not 'sqs' in skip:
        bootstrap.update_stack(stackname, service_list=['sqs'])

    # S3
    if context.get('s3', {}) and not 's3' in skip:
        bootstrap.update_stack(stackname, service_list=['s3'])
Beispiel #37
0
def deploy(slug=''):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if not slug:
        utils.confirm('You are about about to deploy ALL graphics. Are you sure you want to do this? (Deploy a single graphic with "deploy:SLUG".)')

    render(slug)
    _gzip('www', '.gzip')
    _gzip(app_config.GRAPHICS_PATH, '.gzip/graphics')
    _deploy_to_s3('.gzip/graphics/%s' % slug)
Beispiel #38
0
def shiva_the_destroyer():
    """
    Deletes the app from s3
    """
    require('settings', provided_by=[production, staging])

    utils.confirm("You are about to destroy everything deployed to %s for this project.\nDo you know what you're doing?" % app_config.DEPLOYMENT_TARGET)

    with settings(warn_only=True):
        sync = 'aws s3 rm %s --recursive --region "us-east-1"'

        for bucket in app_config.S3_BUCKETS:
            local(sync % ('s3://%s/%s/' % (bucket, app_config.PROJECT_SLUG)))
def deploy(remote='origin'):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

        if (app_config.DEPLOYMENT_TARGET == 'production' and env.branch != 'stable'):
            utils.confirm(
                colored("You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?" % env.branch, "red")
            )

        servers.checkout_latest(remote)

        servers.fabcast('text.update')
        #servers.fabcast('assets.sync')
        servers.fabcast('data.update')

        if app_config.DEPLOY_CRONTAB:
            # servers.install_crontab()
            _servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    update()
    compiled_includes = render.render_all()
    render.render_restaurants(compiled_includes)

    # # Clear files that should never be deployed
    # local('rm -rf www/live-data')

    # flat.deploy_folder(
    #     'www',
    #     app_config.PROJECT_SLUG,
    #     headers={
    #         'Cache-Control': 'max-age=%i' % app_config.DEFAULT_MAX_AGE
    #     },
    #     ignore=['www/assets/*', 'www/live-data/*']
    # )

    # flat.deploy_folder(
    #     'www/assets',
    #     '%s/assets' % app_config.PROJECT_SLUG,
    #     headers={
    #         'Cache-Control': 'max-age=%i' % app_config.ASSETS_MAX_AGE
    #     }
    # )
    _deploy_to_graphics()
Beispiel #40
0
def go(github_username=app_config.GITHUB_USERNAME, repository_name=None):
    """
    Execute the bootstrap tasks for a new project.
    """
    check_credentials()
    config_files = ' '.join(['PROJECT_README.md', 'app_config.py', 'crontab'])

    config = {}
    config['$NEW_PROJECT_SLUG'] = os.getcwd().split('/')[-1]
    config['$NEW_REPOSITORY_NAME'] = repository_name or config[
        '$NEW_PROJECT_SLUG']
    config['$NEW_PROJECT_FILENAME'] = config['$NEW_PROJECT_SLUG'].replace(
        '-', '_')
    config['$NEW_DISQUS_UUID'] = str(uuid.uuid1())

    utils.confirm("Have you created a Github repository named \"%s\"?" %
                  config['$NEW_REPOSITORY_NAME'])

    # Create the spreadsheet
    title = '%s COPY' % config['$NEW_PROJECT_SLUG']
    new_spreadsheet_key = create_spreadsheet(title)
    if new_spreadsheet_key:
        config[app_config.COPY_GOOGLE_DOC_KEY] = new_spreadsheet_key
    else:
        logger.warn(
            'No spreadsheet created, you will need to update COPY_GOOGLE_DOC_KEY manually.'
        )

    for k, v in config.items():
        local('sed -i \'s|%s|%s|g\' %s' % (k, v, config_files))

    local('mv PROJECT_README.md README.md')
    with settings(warn_only=True):
        local('rm *.pyc')
        local('rm LICENSE')
        local('rm -rf .git')
    local('git init')
    local('git add .')
    local('git add -f www/assets/assetsignore')
    local('git commit -am "Initial import from app-template."')
    local('git remote add origin [email protected]:%s/%s.git' %
          (github_username, config['$NEW_REPOSITORY_NAME']))
    local('git push -u origin master')

    # Update app data
    execute('update')

    if new_spreadsheet_key:
        logger.info('You can view your COPY spreadsheet at:')
        logger.info(SPREADSHEET_VIEW_TEMPLATE % new_spreadsheet_key)
Beispiel #41
0
def deploy(remote='origin'):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

        if (app_config.DEPLOYMENT_TARGET == 'production' and env.branch != 'stable'):
            utils.confirm(
                colored("You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?" % env.branch, "red")
            )

        servers.checkout_latest(remote)

        servers.fabcast('text.update')
        servers.fabcast('assets.sync')
        servers.fabcast('data.update')

        if app_config.DEPLOY_CRONTAB:
            servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    update()
    render.render_all()

    # Clear files that should never be deployed
    local('rm -rf www/live-data')

    flat.deploy_folder(
        'www',
        app_config.PROJECT_SLUG,
        max_age=app_config.DEFAULT_MAX_AGE,
        ignore=['www/assets/*']
    )

    flat.deploy_folder(
        '.briefings_html',
        app_config.PROJECT_SLUG,
        max_age=app_config.DEFAULT_MAX_AGE,
    )

    flat.deploy_folder(
        'www/assets',
        '%s/assets' % app_config.PROJECT_SLUG,
        max_age=app_config.ASSETS_MAX_AGE
    )
Beispiel #42
0
def cp_backup_folder(folder):
    """
    Copies copydoc.html to the frontend folder
    """
    if not folder:
        print "you need to provide one of the backup folder names to copy from"
        exit()
    else:
        utils.confirm(
            colored(
                "You are trying to copy an old backup version of copydoc to the production app\nDo you know what you're doing?",
                "red"))
        cmd = 'aws s3 cp --acl public-read s3://election-backup.apps.npr.org/debates/%s/copydoc.html https://s3.amazonaws.com/apps.npr.org/dailygraphics/graphics/presidential-debate-factcheck-20160926/copydoc.html' % (
            folder)
        local(cmd)
Beispiel #43
0
def shiva_the_destroyer():
    """
    Deletes the app from s3
    """
    require('settings', provided_by=[production, staging])

    utils.confirm(
        "You are about to destroy everything deployed to %s for this project.\nDo you know what you're doing?"
        % app_config.DEPLOYMENT_TARGET)

    with settings(warn_only=True):
        sync = 'aws s3 rm %s --recursive --region "us-east-1"'

        for bucket in app_config.S3_BUCKETS:
            local(sync % ('s3://%s/%s/' % (bucket, app_config.PROJECT_SLUG)))
Beispiel #44
0
def deploy(remote="origin"):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require("settings", provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require("branch", provided_by=[stable, master, branch])

        if app_config.DEPLOYMENT_TARGET == "production" and env.branch != "stable":
            utils.confirm(
                colored(
                    "You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?"
                    % env.branch,
                    "red",
                )
            )

        servers.checkout_latest(remote)

        servers.fabcast("text.update")
        servers.fabcast("assets.sync")
        servers.fabcast("data.update")

        if app_config.DEPLOY_CRONTAB:
            servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    update()
    render.render_all()

    # Clear files that should never be deployed
    local("rm -rf www/live-data")

    flat.deploy_folder(
        "www",
        app_config.PROJECT_SLUG,
        headers={"Cache-Control": "max-age=%i" % app_config.DEFAULT_MAX_AGE},
        ignore=["www/assets/*", "www/live-data/*"],
    )

    flat.deploy_folder(
        "www/assets",
        "%s/assets" % app_config.PROJECT_SLUG,
        headers={"Cache-Control": "max-age=%i" % app_config.ASSETS_MAX_AGE},
    )
Beispiel #45
0
def load_db(backend_name):
    clean_build()

    if not os.path.isdir(settings.DB_FILES_DIR):
        os.makedirs(settings.DB_FILES_DIR)

    backend = _get_backend(backend_name)
    if confirm("Do you want to delete the current database?"):
        backend.check_ssh_access()
        backend.stop()
        backend.reset_db()
        print('Bootstrapping service with domain and superuser')
        backend.bootstrap_service()

        backend.start()
        print('Creating test users')
        users = backend.create_users(settings.NUM_UNIQUE_USERS)

        user_db = os.path.join(settings.DB_FILES_DIR, 'userdb-{}.csv'.format(backend_name))
        with open(user_db, "w") as file:
            for user in users:
                file.write("{},{},{}\n".format(
                    user.id, user.username, user.password
                ))

    backend.load_data(settings.DB_FILES_DIR)
Beispiel #46
0
def api():
    credentials = load_credentials()

    if not credentials or confirm(
        'Do you want to switch to a new user?', default=False
    ):
        credentials = get_credentials()
        with open('.twitter_credentials.yml', 'w') as f:
            yaml.dump(
                {
                    'consumer_key': credentials['ck'],
                    'consumer_secret': credentials['cs'],
                    'access_token': credentials['at'],
                    'access_token_secret': credentials['ats'],
                },
                f,
                default_flow_style=False,
            )

    a: twitter.Api = twitter.Api(
        consumer_key=credentials['ck'],
        consumer_secret=credentials['cs'],
        access_token_key=credentials['at'],
        access_token_secret=credentials['ats'],
    )

    try:
        a.VerifyCredentials()
    except TwitterError:
        print('User logged out')
        exit(0)

    return a
Beispiel #47
0
    def team_delete(self):
        index = self.team_table.selectedIndexes()

        # Ensure there is a selection
        # Maybe disable in context menu if there is no selection?
        if index:
            index = index[0]
        else:
            utils.alert("No Team Selected", "Please Select a team to delete",
                        "warn")
            return

        backup = []
        for i in range(self.data_model.columnCount()):
            value = self.data_model.data(self.data_model.index(index.row(), i))

            # Replace empty strings with none to better represent the database state
            if value == "":
                value = None
            backup.append(value)

        confirmation = utils.confirm(
            "Delete Team",
            f"Do you want to delete the following team?\n{backup[2]}")
        if confirmation == QtWidgets.QMessageBox.Yes:
            self.logger.info("Deleting Team")
            self.logger.txn(f"[Deleted] Team Data - {backup}")
            self.data_model.deleteRowFromTable(index.row())
            self.data_model.select()
        else:
            self.logger.debug("Canceled team delete request")
Beispiel #48
0
def assets_rm(path):
    """
    Remove an asset from s3 and locally
    """
    bucket = _assets_get_bucket()

    file_list = glob(path)

    if len(file_list) > 0:
        utils.confirm("You are about to destroy %i files. Are you sure?" % len(file_list))

        for local_path in file_list:
            key_name = local_path.replace('www/assets', app_config.PROJECT_FILENAME, 1)
            key = bucket.get_key(key_name)
            
            _assets_delete(local_path, key)
Beispiel #49
0
def assets_rm(path):
    """
    Remove an asset from s3 and locally
    """
    bucket = _assets_get_bucket()

    file_list = glob(path)

    if len(file_list) > 0:
        utils.confirm("You are about to destroy %i files. Are you sure?" % len(file_list))

        for local_path in file_list:
            key_name = local_path.replace('www/assets', app_config.PROJECT_SLUG, 1)
            key = bucket.get_key(key_name)
            
            _assets_delete(local_path, key)
def attempt_connection(profile_name):

    print('Sending connection request...')
    command_result = try_call(['netsh', 'wlan', 'connect', profile_name])

    if not command_result['Success']:
        print(command_result['Output'])

    try:
        # Check if the request was successful
        for i in range(5):
            sleep(1)
            if get_connection_status()['State'] == 'connected':
                print('Connected.')
                return

    except KeyboardInterrupt:
        pass

    print('Request timed out.' + NL)
    print(
        'This may mean the network is no longer available or the network password is incorrect'
    )
    if confirm('Delete associated profile?'):
        delete_profile(profile_name)
Beispiel #51
0
def cp_backup_folder(folder):
    """
    Copies copydoc.html to the frontend folder
    """
    require('settings', provided_by=[production])
    if not folder:
        print "you need to provide one of the backup folder names to copy from"
        exit()
    else:
        utils.confirm(
            colored(
                "You are trying to copy an old backup version of the liveblog to the production app in %s\nDo you know what you're doing?"
                % (app_config.S3_DEPLOY_URL), "red"))
        cmd = 'aws s3 cp --acl public-read s3://liveblog-backup.apps.npr.org/liveblogs/20170120-inauguration-liveblog/%s/ %s/ --recursive' % (
            folder, app_config.S3_DEPLOY_URL)
        local(cmd)
Beispiel #52
0
    def tie_del(self):
        index = self.table.selectedIndexes()

        if index:
            index = index[0]
        else:
            utils.alert("No Tie Selected", "Please Select a tie to delete",
                        "warn")
            return

        backup = []
        for i in range(self.model.columnCount()):
            value = self.model.data(self.model.index(index.row(), i))
            backup.append(value)

        confirmation = utils.confirm(
            "Delete Team",
            f"Do you want to delete the Tie between the following teams?\n{backup[1]}  -  {backup[2]}",
        )
        if confirmation == QtWidgets.QMessageBox.Yes:
            self.logger.info("Deleting Tie")
            self.logger.txn(f"[Deleted] Tie Data - {backup}")
            self.model.deleteRowFromTable(index.row())
            self.model.select()
        else:
            self.logger.debug("Canceled team delete request")
Beispiel #53
0
def deploy(analyse=True, remote='origin'):
    """
    Deploy the latest app to S3 and, if configured, to our servers.
    """
    require('settings', provided_by=[production, staging])

    if app_config.DEPLOY_TO_SERVERS:
        require('branch', provided_by=[stable, master, branch])

        if (app_config.DEPLOYMENT_TARGET == 'production'
                and env.branch != 'stable'):
            utils.confirm(
                colored(
                    "You are trying to deploy the '%s' branch to production.\nYou should really only deploy a stable branch.\nDo you know what you're doing?"
                    % env.branch, "red"))

        servers.checkout_latest(remote)

        servers.fabcast('text.update')
        servers.fabcast('assets.sync')

        if app_config.DEPLOY_CRONTAB:
            servers.install_crontab()

        if app_config.DEPLOY_SERVICES:
            servers.deploy_confs()

    if analyse == True:
        execute('analysis.analyse')
        execute('render.render_all')

        flat.deploy_folder(app_config.S3_BUCKET['bucket_name'],
                           'www',
                           app_config.PROJECT_SLUG,
                           headers={
                               'Cache-Control':
                               'max-age=%i' % app_config.DEFAULT_MAX_AGE
                           },
                           ignore=['www/assets/*'])

        flat.deploy_folder(app_config.S3_BUCKET['bucket_name'],
                           'www/assets',
                           '%s/assets' % app_config.PROJECT_SLUG,
                           headers={
                               'Cache-Control':
                               'max-age=%i' % app_config.ASSETS_MAX_AGE
                           })
Beispiel #54
0
def commandLine():
    while True:
        try:
            command = input(': ').split(' ')
            if command[0].upper() == 'WHO':
                print('You are: ' + usr)
            elif command[0].upper() == 'QUIT':
                if utils.confirm('Are you sure you want to quit?'):
                    quitGame()
            elif command[0].upper() == 'RESET':
                if utils.confirm('Are you sure you want to reset?'):
                    newGame()
            else:
                utils.execute(command)
                entities.player.previousCommand = command
        except KeyboardInterrupt:
            quitGame()
Beispiel #55
0
def personInteraction():
    entities.player.location = entities.getLocation('Interact')
    personType = random.randint(1, 3)
    if personType == 1:
        person = [random.choice(entities.enemies), random.choice(entities.weapons)]
        if utils.confirm('You see a mean-looking person in the distance. Do you choose to approach?'):
            utils.fight(person[0], person[1])
        else:
            print('You run away in fear.')
    elif personType == 2:
        if entities.worldEntities:
            person = random.choice(entities.worldEntities)
            person.inventory.append(random.choice(entities.weapons))
            if utils.confirm('You see a familiar, mean-looking person in the distance. Do you choose to approach?'):
                utils.fight(person, person.inventory[0])
            else:
                print('You run away in fear.')
        else:
            person = random.choice(entities.enemies)
            person.inventory.append(random.choice(entities.weapons))
            if utils.confirm('You see a mean-looking person in the distance. Do you choose to approach?'):
                utils.fight(person, person.inventory[0])
            else:
                print('You run away in fear.')
    else:
        person = [random.choice(entities.helpers), random.choice(entities.helperItems)]
        if utils.confirm('You see a kind-looking person in the distance. Do you choose to approach?'):
            print('The person is a(n) ' + person[0].name + '!')
            if person[0] == entities.getHelper('old lady'):
                if random.randint(0,1) == 0:
                    utils.fight(entities.getEnemy('old lady'), entities.getWeapon('cane'))
                else:
                    time.sleep(0.5)
                    print('The %s smiles and holds a(n) %s out in her hand.' % (person[0].name, person[1].name))
                    entities.player.inventory.append(person[1])
                    time.sleep(0.2)
                    print(person[1].name + ' added to your inventory!')
            else:
                time.sleep(0.5)
                print('The %s smiles and holds a(n) %s out in her hand.' % (person[0].name, person[1].name))
                entities.player.inventory.append(person[1])
                time.sleep(0.2)
                print(person[1].name + ' added to your inventory!')
        else:
            print('You walk away')
            time.sleep(2)
Beispiel #56
0
 def remove(self):
     directory = os.path.dirname(self.vmx)
     name = os.path.basename(directory)
     if utils.confirm("Are you sure you want to delete {name} at {directory}".format(name=name, directory=directory), default='n'):
         print "Deleting..."
         shutil.rmtree(directory)
     else:
         print "Deletion aborted"
    def gce_delete_all_denovo_instances(self):
        """ Deletes all denovo instances """

        self.logger.info("Deleting all denovo instances...")
        if not confirm():
            return
        for instance_name in self._list_denovo_instances():
            self.delete_instance(instance_name)
def go(github_username=app_config.GITHUB_USERNAME, repository_name=None):

    """
    Execute the bootstrap tasks for a new project.
    """
    check_credentials()
    config_files = ' '.join(['PROJECT_README.md', 'app_config.py', 'crontab'])

    config = {}
    config['$NEW_PROJECT_SLUG'] = os.getcwd().split('/')[-1]
    config['$NEW_REPOSITORY_NAME'] = repository_name or config['$NEW_PROJECT_SLUG']
    config['$NEW_PROJECT_FILENAME'] = config['$NEW_PROJECT_SLUG'].replace('-', '_')
    config['$NEW_DISQUS_UUID'] = str(uuid.uuid1())

    utils.confirm("Have you created a Github repository named \"%s\"?" % config['$NEW_REPOSITORY_NAME'])

    # Create the spreadsheet
    title = '%s COPY' % config['$NEW_PROJECT_SLUG']
    new_spreadsheet_key = create_spreadsheet(title)
    if new_spreadsheet_key:
        config[app_config.COPY_GOOGLE_DOC_KEY] = new_spreadsheet_key
    else:
        print 'No spreadsheet created, you will need to update COPY_GOOGLE_DOC_KEY manually.'

    for k, v in config.items():
        local('sed -i "" \'s|%s|%s|g\' %s' % (k, v, config_files))

    local('rm -rf .git')
    local('git init')
    local('mv PROJECT_README.md README.md')
    local('rm *.pyc')
    local('rm LICENSE')
    local('git add .')
    local('git add -f www/assets/assetsignore')
    local('git commit -am "Initial import from app-template."')
    local('git remote add origin [email protected]:%s/%s.git' % (github_username, config['$NEW_REPOSITORY_NAME']))
    local('git push -u origin master')

    # Update app data
    execute('update')

    if new_spreadsheet_key:
        print 'You can view your COPY spreadsheet at:'
        print SPREADSHEET_VIEW_TEMPLATE % new_spreadsheet_key
Beispiel #59
0
def rm(path):
    """
    Remove an asset from s3 and locally
    """
    bucket = _assets_get_bucket()

    slug = path.split('/assets')[0]
    static_path = '%s/%s' % (app_config.GRAPHICS_PATH, slug)
    assets_root = '%s/assets' % static_path
    real_path = '%s/%s' % (app_config.GRAPHICS_PATH, path)

    file_list = glob(real_path)

    found_folder = True

    # Add files in folders, instead of folders themselves (S3 doesn't have folders)
    while found_folder:
        found_folder = False

        for local_path in file_list:
            if os.path.isdir(local_path):
                found_folder = True

                file_list.remove(local_path)

                for path in os.listdir(local_path):
                    file_list.append(os.path.join(local_path, path))

    if len(file_list) > 0:
        utils.confirm("You are about to destroy %i files. Are you sure?" % len(file_list))

        for local_path in file_list:
            print local_path

            if os.path.isdir(local_path):
                file_list.extend(os.listdir(local_path))

                continue

            assets_slug = '%s/%s' % (app_config.ASSETS_SLUG, slug)
            key_name = local_path.replace(assets_root, assets_slug, 1)
            key = bucket.get_key(key_name)

            _assets_delete(local_path, key)