Exemplo n.º 1
0
def deploy_data(build_type='prod'):
    """
    Logs into remote machine and loads the data_load that has been published to the same machine
    The dump file is expected at /srv/datadump/pg_dump.dmp
    """

    # stop any connections to the db
    with settings(warn_only=True):
        sudo('supervisorctl stop all')
        sudo('/etc/init.d/supervisor stop')

    # with cd(get_django_setting(build_type, 'ROOT_PATH')):
    #
    #
    #     fab_cmd = env.virtualenv_directory + '/bin/fab'
    #     sudo('{fab_cmd} localhost fetch_datadump:use_local=True,force_local_db_destroy=True'.format(
    #         fab_cmd=fab_cmd), user=env.deploy_user)

    with postgres_env_password_loaded():
        db_conn_string = build_postgres_conn_string()
        database_name = os.environ['DATABASE_NAME']

        # Some versions of postgres do not have --if-exists, so just ignore the error if it doesn't exist
        with settings(warn_only=True):
            drop_db_connections(database_name)
            run('dropdb {db_conn_string}'.format(
                db_conn_string=db_conn_string))

        run_as_pg('createdb -O {db_user} {db_conn_string}'.format(
            db_user=os.environ['DATABASE_USERNAME'],
            db_conn_string=db_conn_string))

        psql(
            "ALTER DATABASE {database_name} SET search_path = '$user',public,postgis;"
            .format(database_name=database_name))

        with settings(warn_only=True):
            result = run(
                'pg_restore {db_conn_string} -d {dbname} {local_dump_file}'.
                format(db_conn_string=build_postgres_conn_string(omit_db=True),
                       dbname=database_name,
                       local_dump_file='/srv/datadump/pg_dump.dmp'))
        if result.failed:
            print "ERROR: You probably don't have 'calthorpe' ROLE defined. Fix by executing:"
            print "CREATE ROLE calthorpe; GRANT calthorpe to {user};".format(
                user=os.environ['DATABASE_USERNAME'])
            raise SystemExit()

    if os.path.exists('/srv/calthorpe_media'):
        sudo('rm -r /srv/calthorpe_media')
    sudo('cp -R /srv/datadump/media/calthorpe_media /srv/')
    directory_permissions(build_type=build_type)

    # start connections to the db
    sudo('/etc/init.d/supervisor start')
    sudo('supervisorctl start all')
Exemplo n.º 2
0
def deploy_data(build_type='prod'):
    """
    Logs into remote machine and loads the data_load that has been published to the same machine
    The dump file is expected at /srv/datadump/pg_dump.dmp
    """

    # stop any connections to the db
    with settings(warn_only=True):
        sudo('supervisorctl stop all')
        sudo('/etc/init.d/supervisor stop')

    # with cd(get_django_setting(build_type, 'ROOT_PATH')):
    #
    #
    #     fab_cmd = env.virtualenv_directory + '/bin/fab'
    #     sudo('{fab_cmd} localhost fetch_datadump:use_local=True,force_local_db_destroy=True'.format(
    #         fab_cmd=fab_cmd), user=env.deploy_user)

    with postgres_env_password_loaded():
        db_conn_string = build_postgres_conn_string()
        database_name = os.environ['DATABASE_NAME']

        # Some versions of postgres do not have --if-exists, so just ignore the error if it doesn't exist
        with settings(warn_only=True):
            drop_db_connections(database_name)
            run('dropdb {db_conn_string}'.format(db_conn_string=db_conn_string))

        run_as_pg('createdb -O {db_user} {db_conn_string}'.format(
            db_user=os.environ['DATABASE_USERNAME'], db_conn_string=db_conn_string))

        psql("ALTER DATABASE {database_name} SET search_path = '$user',public,postgis;".format(database_name=database_name))

        with settings(warn_only=True):
            result = run('pg_restore {db_conn_string} -d {dbname} {local_dump_file}'.format(
                db_conn_string=build_postgres_conn_string(omit_db=True),
                dbname=database_name,
                local_dump_file='/srv/datadump/pg_dump.dmp'))
        if result.failed:
            print "ERROR: You probably don't have 'calthorpe' ROLE defined. Fix by executing:"
            print "CREATE ROLE calthorpe; GRANT calthorpe to {user};".format(user=os.environ['DATABASE_USERNAME'])
            raise SystemExit()

    if os.path.exists('/srv/calthorpe_media'):
        sudo('rm -r /srv/calthorpe_media')
    sudo('cp -R /srv/datadump/media/calthorpe_media /srv/')
    directory_permissions(build_type=build_type)

    # start connections to the db
    sudo('/etc/init.d/supervisor start')
    sudo('supervisorctl start all')
Exemplo n.º 3
0
def fetch_datadump(force_local_db_destroy=True, use_local=True, build_type='prod'):
    """
    Sync local database and media folder with official data_dump
    'fetch_datadump:force_local_db_destroy:True' to not prompt for db destruction confirmation
    'fetch_datadump:use_local:True avoid going through the ssh wire
    """

    database_name = os.environ['DATABASE_NAME']

    if not force_local_db_destroy:
        msg = 'You are DESTROYING the local "{dbname}" database! Continue?'.format(
            dbname=database_name)

        accepted_database_destroy = console.confirm(msg, default=False)

        if not accepted_database_destroy:
            print 'Aborting fetch_datadump()'
            return

    if not use_local:
        media_root = get_django_setting(build_type, 'MEDIA_ROOT')
        rsync_project(os.path.join(media_root, '/media'),
                      local_dir=media_root, default_opts='-pthrvzO')

    # rsync postgres datadump file into local folder
    # The reason we don't use tempfile.gettempdir() is that we always want the file to exist
    # in the same place so we can take advantage of rsync's delta file-chunk speedup. In OSX,
    # after every reboot, gettempdir returns a different directory defeating the point of using
    # rsync. We use the '/tmp/' folder instead

    temp_dir = get_django_setting(build_type, 'TEMP_DIR')
    local_dump_file = os.path.join(temp_dir, 'pg_dump.dmp')
    if not use_local:
        dump_path = get_django_setting(build_type, 'DATA_DUMP_PATH')
        rsync_project(os.path.join(dump_path, '/pg_dump.dmp', local_dir=local_dump_file))

    with postgres_env_password_loaded():
        db_conn_string = build_postgres_conn_string()

        # Some versions of postgres do not have --if-exists, so just ignore the error if it doesn't exist
        with settings(warn_only=True):
            local('dropdb {db_conn_string}'.format(db_conn_string=db_conn_string))

        local('createdb -O {db_user} {db_conn_string}'.format(
            db_user=os.environ['DATABASE_USERNAME'], db_conn_string=db_conn_string))

        local('''psql -h {1} -p {2} -c 'ALTER DATABASE {0} SET search_path = "$user",public,postgis;' {0}'''.format(
            os.environ['DATABASE_NAME'],
            os.environ['DATABASE_HOST'],
            os.environ['DATABASE_PORT']
        ))

        with settings(warn_only=True):
            result = local('pg_restore {db_conn_string} -d {dbname} {local_dump_file}'.format(
                db_conn_string=build_postgres_conn_string(omit_db=True),
                dbname=os.environ['DATABASE_NAME'],
                local_dump_file=local_dump_file))
        if result.failed:
            print "ERROR: You probably don't have 'calthorpe' ROLE defined. Fix by executing:"
            print "CREATE ROLE calthorpe; GRANT calthorpe to {user};".format(
                user=os.environ['DATABASE_USERNAME'])
            raise SystemExit()
Exemplo n.º 4
0
def fetch_datadump(force_local_db_destroy=True, use_local=True, build_type='prod'):
    """
    Sync local database and media folder with official data_dump
    'fetch_datadump:force_local_db_destroy:True' to not prompt for db destruction confirmation
    'fetch_datadump:use_local:True avoid going through the ssh wire
    """

    database_name = os.environ['DATABASE_NAME']

    if not force_local_db_destroy:
        msg = 'You are DESTROYING the local "{dbname}" database! Continue?'.format(
            dbname=database_name)

        accepted_database_destroy = console.confirm(msg, default=False)

        if not accepted_database_destroy:
            print 'Aborting fetch_datadump()'
            return

    if not use_local:
        media_root = get_django_setting(build_type, 'MEDIA_ROOT')
        rsync_project(os.path.join(media_root, '/media'),
                      local_dir=media_root, default_opts='-pthrvzO')

    # rsync postgres datadump file into local folder
    # The reason we don't use tempfile.gettempdir() is that we always want the file to exist
    # in the same place so we can take advantage of rsync's delta file-chunk speedup. In OSX,
    # after every reboot, gettempdir returns a different directory defeating the point of using
    # rsync. We use the '/tmp/' folder instead

    temp_dir = get_django_setting(build_type, 'TEMP_DIR')
    local_dump_file = os.path.join(temp_dir, 'pg_dump.dmp')
    if not use_local:
        dump_path = get_django_setting(build_type, 'DATA_DUMP_PATH')
        rsync_project(os.path.join(dump_path, '/pg_dump.dmp', local_dir=local_dump_file))

    with postgres_env_password_loaded():
        db_conn_string = build_postgres_conn_string()

        # Some versions of postgres do not have --if-exists, so just ignore the error if it doesn't exist
        with settings(warn_only=True):
            local('dropdb {db_conn_string}'.format(db_conn_string=db_conn_string))

        local('createdb -O {db_user} {db_conn_string}'.format(
            db_user=os.environ['DATABASE_USERNAME'], db_conn_string=db_conn_string))

        local('''psql -h {1} -p {2} -c 'ALTER DATABASE {0} SET search_path = "$user",public,postgis;' {0}'''.format(
            os.environ['DATABASE_NAME'],
            os.environ['DATABASE_HOST'],
            os.environ['DATABASE_PORT']
        ))

        with settings(warn_only=True):
            result = local('pg_restore {db_conn_string} -d {dbname} {local_dump_file}'.format(
                db_conn_string=build_postgres_conn_string(omit_db=True),
                dbname=os.environ['DATABASE_NAME'],
                local_dump_file=local_dump_file))
        if result.failed:
            print "ERROR: You probably don't have 'calthorpe' ROLE defined. Fix by executing:"
            print "CREATE ROLE calthorpe; GRANT calthorpe to {user};".format(
                user=os.environ['DATABASE_USERNAME'])
            raise SystemExit()
Exemplo n.º 5
0
class Command(BaseCommand):
    args = '<destination_folder> (optional - if not specified use settings.py option)'
    help = 'Creates a data dump'

    # I hate having to use optparse. We should be using argparse.
    # When https://code.djangoproject.com/ticket/19973 gets fixed, we can
    # use the new way of parsing (which will likely use argparse instead).
    # In the meantime we'll stick with the documented way of doing this
    option_list = BaseCommand.option_list + (make_option(
        '--destination-folder',
        action='store',
        type='string',
        dest='destination_folder',
        default=getattr(settings, 'CALTHORPE_DATA_DUMP_LOCATION', ''),
        help='output folder for daily dump'), )

    def handle(self, *args, **options):

        rsync = spawn.find_executable('rsync')
        if rsync is None:
            raise CommandError('rsync not found')

        pg_dump = spawn.find_executable('pg_dump')
        if pg_dump is None:
            raise CommandError('pg_dump not found')

        if options['destination_folder'] == '':
            raise CommandError(
                '--destination-folder not specified in command line nor settings.py'
            )

        # make sure destination folder exists
        if not os.path.exists(options['destination_folder']):
            try:
                os.makedirs(options['destination_folder'])
            except Exception, e:
                raise Exception(
                    "Cannot create directory with user %s. Exception %s" %
                    (pwd.getpwuid(os.getuid())[0], e.message))

        pg_output_file_name = os.path.join(options['destination_folder'],
                                           'pg_dump.dmp')
        media_output_copy_folder = os.path.join(options['destination_folder'],
                                                'media')

        # make sure destination daily media folder also exists
        if not os.path.exists(media_output_copy_folder):
            os.makedirs(media_output_copy_folder)

        #################
        #rsync folder
        rsync += ' -rapthzvO {extra} {src} {dest}'.format(
            extra=settings.CALTHORPE_DAILY_DUMP_RSYNC_EXTRA_PARAMS,
            src=settings.MEDIA_ROOT,
            dest=media_output_copy_folder)
        self.stdout.write(rsync + '\n')

        output = self.exec_cmd(rsync)
        self.stdout.write(output)

        #################
        #do database dump
        print settings.DATABASES['default']
        with postgres_env_password_loaded():

            pg_dump += ' {pg_conn_string} -Fc -f {output_file_name}'.format(
                pg_conn_string=build_postgres_conn_string(
                    settings.DATABASES['default']),
                output_file_name=pg_output_file_name)

            output = self.exec_cmd(pg_dump)
            self.stdout.write(output)
            self.stdout.write('Wrote ' + pg_output_file_name + '\n')