def set_collation_mysql(name=None, site=None): from burlap.dj import set_db set_db(name=name, site=site) set_root_login() cmd = ("mysql -v -h %(db_host)s -u %(db_root_user)s -p'%(db_root_password)s' " "--execute='ALTER DATABASE %(db_name)s CHARACTER SET %(db_mysql_character_set)s COLLATE %(db_mysql_collate)s;'") % env run_or_dryrun(cmd)
def iter_unique_databases(site=None): prior_database_names = set() for site, site_data in common.iter_sites(site=site, no_secure=True): set_db(site=site) key = (env.db_name, env.db_user, env.db_host, env.db_engine) if key in prior_database_names: continue prior_database_names.add(key) env.SITE = site yield site, site_data
def create_db(name=None): from burlap.db import create set_db(name=name) create( name=name, db_engine=env.db_engine, db_user=env.db_user, db_host=env.db_host, db_password=env.db_password, db_name=env.db_name, )
def post_db_create(name=None, site=None, apps=None): from burlap.db import load_db_set print('post_db_create') assert env[ROLE] require('app_name') site = site or env.SITE set_db(name=name, site=site, verbose=1) load_db_set(name=name) syncdb(all=True, site=site, database=name) migrate(fake=True, site=site, database=name, migrate_apps=apps) install_sql(site=site, database=name, apps=apps)
def dump(dest_dir=None, to_local=None, from_local=0, archive=0, dump_fn=None): """ Exports the target database to a single transportable file on the localhost, appropriate for loading using load(). """ from burlap.dj import set_db from_local = int(from_local) set_db() if dest_dir: env.db_dump_dest_dir = dest_dir env.db_date = datetime.date.today().strftime('%Y%m%d') #env.db_dump_fn = dump_fn or (env.db_dump_fn_template % env) env.db_dump_fn = get_default_db_fn(dump_fn or env.db_dump_fn_template).strip() if to_local is None and not env.is_local: to_local = 1 if env.db_dump_command: run_or_dryrun(env.db_dump_command % env) elif 'postgres' in env.db_engine or 'postgis' in env.db_engine: assert env.db_schemas, \ 'Please specify the list of schemas to dump in db_schemas.' env.db_schemas_str = ' '.join('-n %s' % _ for _ in env.db_schemas) cmd = env.db_postgresql_dump_command % env #print 'db_host:',env.db_host if env.is_local or from_local: local_or_dryrun(cmd) else: sudo_or_dryrun(cmd) elif 'mysql' in env.db_engine: cmd = env.db_mysql_dump_command % env if env.is_local: local_or_dryrun(cmd) else: sudo_or_dryrun(cmd) else: raise NotImplemented # Download the database dump file on the remote host to localhost. if not from_local and (0 if to_local is None else int(to_local)) and not env.is_local: cmd = ('rsync -rvz --progress --recursive --no-p --no-g --rsh "ssh -o StrictHostKeyChecking=no -i %(key_filename)s" %(user)s@%(host_string)s:%(db_dump_fn)s %(db_dump_fn)s') % env local_or_dryrun(cmd) if to_local and int(archive): db_fn = render_fn(env.db_dump_fn) env.db_archive_fn = '%s/%s' % (env.db_dump_archive_dir, os.path.split(db_fn)[-1]) local_or_dryrun('mv %s %s' % (db_fn, env.db_archive_fn)) return env.db_dump_fn
def update(name=None, site=None, skip_databases=None, do_install_sql=0, migrate_apps=''): """ Updates schema and custom SQL. """ from burlap.dj import set_db set_db(name=name, site=site) syncdb(site=site) # Note, this loads initial_data fixtures. migrate( site=site, skip_databases=skip_databases, migrate_apps=migrate_apps) if int(do_install_sql): install_sql(name=name, site=site)
def set_max_mysql_packet_size(do_set=1): verbose = common.get_verbose() from burlap.dj import set_db do_set = int(do_set) if do_set: set_db(site=env.SITE, role=env.ROLE) # Raise max packet limitation. run_or_dryrun( ('mysql -v -h %(db_host)s -D %(db_name)s -u %(db_root_user)s ' '-p"%(db_root_password)s" --execute="SET global ' 'net_buffer_length=%(db_mysql_net_buffer_length)s; SET global ' 'max_allowed_packet=%(db_mysql_max_allowed_packet)s;"') % env)
def update(name=None, site=None, skip_databases=None, do_install_sql=0, apps=''): """ Updates schema and custom SQL. """ #from burlap.dj import set_db # print('update()' # raise Exception set_db(name=name, site=site) syncdb(site=site) # Note, this loads initial_data fixtures. migrate( site=site, skip_databases=skip_databases, migrate_apps=apps) if int(do_install_sql): install_sql(name=name, site=site, apps=apps)
def drop_views(self, name=None, site=None): """ Drops all views. """ from burlap.dj import set_db set_db(name=name, site=site) if 'postgres' in env.db_engine or 'postgis' in env.db_engine: # SELECT 'DROP VIEW ' || table_name || ';' # FROM information_schema.views # WHERE table_schema NOT IN ('pg_catalog', 'information_schema') # AND table_name !~ '^pg_'; # http://stackoverflow.com/questions/13643831/drop-all-views-postgresql # DO$$ # BEGIN # # EXECUTE ( # SELECT string_agg('DROP VIEW ' || t.oid::regclass || ';', ' ') -- CASCADE? # FROM pg_class t # JOIN pg_namespace n ON n.oid = t.relnamespace # WHERE t.relkind = 'v' # AND n.nspname = 'my_messed_up_schema' # ); # # END # $$ todo elif 'mysql' in env.db_engine: set_root_login() cmd = ("mysql --batch -v -h %(db_host)s " \ #"-u %(db_root_user)s -p'%(db_root_password)s' " \ "-u %(db_user)s -p'%(db_password)s' " \ "--execute=\"SELECT GROUP_CONCAT(CONCAT(TABLE_SCHEMA,'.',table_name) SEPARATOR ', ') AS views FROM INFORMATION_SCHEMA.views WHERE TABLE_SCHEMA = '%(db_name)s' ORDER BY table_name DESC;\"") % env result = sudo_or_dryrun(cmd) result = re.findall( '^views[\s\t\r\n]+(.*)', result, flags=re.IGNORECASE|re.DOTALL|re.MULTILINE) if not result: return env.db_view_list = result[0] #cmd = ("mysql -v -h %(db_host)s -u %(db_root_user)s -p'%(db_root_password)s' " \ cmd = ("mysql -v -h %(db_host)s -u %(db_user)s -p'%(db_password)s' " \ "--execute=\"DROP VIEW %(db_view_list)s CASCADE;\"") % env sudo_or_dryrun(cmd) else: raise NotImplementedError
def write_pgpass(self, name=None, use_sudo=0, verbose=1, commands_only=0): """ Write the file used to store login credentials for PostgreSQL. """ from burlap.dj import set_db from burlap.file import appendline use_sudo = int(use_sudo) verbose = common.get_verbose() commands_only = int(commands_only) if name: set_db(name=name) cmds = [] cmds.append( 'touch {db_postgresql_pgass_path}'.format( db_postgresql_pgass_path=env.db_postgresql_pgass_path)) cmds.append( 'chmod {db_postgresql_pgpass_chmod} {db_postgresql_pgass_path}'.format( db_postgresql_pgass_path=env.db_postgresql_pgass_path, db_postgresql_pgpass_chmod=env.db_postgresql_pgpass_chmod)) pgpass_kwargs = dict( db_host=env.db_host, db_port=env.db_postgresql_port, db_user=env.db_user, db_password=env.db_password, ) pgpass_line = '{db_host}:{db_port}:*:{db_user}:{db_password}'\ .format(**pgpass_kwargs) cmds.extend(appendline( fqfn=env.db_postgresql_pgass_path, line=pgpass_line, use_sudo=use_sudo, commands_only=1, verbose=0)) if not commands_only: for cmd in cmds: if verbose: print(cmd) if use_sudo: sudo_or_dryrun(cmd) else: run_or_dryrun(cmd) return cmds
def database_renderer(self, name): """ Renders local settings for a specific database. """ d = type(self.genv)(self.lenv) d.update(self.get_database_defaults()) d.update(self.env.databases[name]) d['db_name'] = name if d.connection_handler == CONNECTION_HANDLER_DJANGO: from burlap.dj import set_db _d = type(self.genv)() set_db(name=name, e=_d) d.update(_d) return LocalRenderer(self, lenv=d)
def execute_sql(fn, name='default', site=None): """ Executes an arbitrary SQL file. """ from burlap.dj import set_db from burlap.db import load_db_set assert os.path.isfile(fn), 'Missing file: %s' % fn site_summary = {} # {site: ret} for site, site_data in common.iter_sites(site=site, no_secure=True): try: set_db(name=name, site=site) load_db_set(name=name) env.SITE = site put_or_dryrun(local_path=fn) with settings(warn_only=True): ret = None if 'postgres' in env.db_engine or 'postgis' in env.db_engine: ret = run_or_dryrun( "psql --host=%(db_host)s --user=%(db_user)s -d %(db_name)s -f %(put_remote_path)s" % env) elif 'mysql' in env.db_engine: ret = run_or_dryrun( "mysql -h %(db_host)s -u %(db_user)s -p'%(db_password)s' %(db_name)s < %(put_remote_path)s" % env) else: raise NotImplementedError('Unknown database type: %s' % env.db_engine) print('ret:', ret) site_summary[site] = ret except KeyError as e: site_summary[site] = 'Error: %s' % str(e) pass print('-' * 80) print('Site Summary:') for site, ret in sorted(site_summary.items(), key=lambda o: o[0]): print(site, ret)
def get_size(): """ Retrieves the size of the database in bytes. """ from burlap.dj import set_db set_db(site=env.SITE, role=env.ROLE) if 'postgres' in env.db_engine or 'postgis' in env.db_engine: #cmd = 'psql --user=%(db_postgresql_postgres_user)s --tuples-only -c "SELECT pg_size_pretty(pg_database_size(\'%(db_name)s\'));"' % env cmd = 'psql --user=%(db_postgresql_postgres_user)s --tuples-only -c "SELECT pg_database_size(\'%(db_name)s\');"' % env #print cmd output = run_or_dryrun(cmd) output = int(output.strip().split('\n')[-1].strip()) if int(verbose): print('database size (bytes):', output) return output else: raise NotImplementedError
def execute_sql(fn, name='default', site=None): """ Executes an arbitrary SQL file. """ from burlap.dj import set_db from burlap.db import load_db_set assert os.path.isfile(fn), 'Missing file: %s' % fn site_summary = {} # {site: ret} for site, site_data in common.iter_sites(site=site, no_secure=True): try: set_db(name=name, site=site) load_db_set(name=name) env.SITE = site put_or_dryrun(local_path=fn) with settings(warn_only=True): ret = None if 'postgres' in env.db_engine or 'postgis' in env.db_engine: ret = run_or_dryrun("psql --host=%(db_host)s --user=%(db_user)s -d %(db_name)s -f %(put_remote_path)s" % env) elif 'mysql' in env.db_engine: ret = run_or_dryrun("mysql -h %(db_host)s -u %(db_user)s -p'%(db_password)s' %(db_name)s < %(put_remote_path)s" % env) else: raise NotImplementedError('Unknown database type: %s' % env.db_engine) print('ret:', ret) site_summary[site] = ret except KeyError as e: site_summary[site] = 'Error: %s' % str(e) pass print('-'*80) print('Site Summary:') for site, ret in sorted(site_summary.items(), key=lambda o: o[0]): print(site, ret)
def save_db_password(user, password): """ Writes the database user's password to a file, allowing automatic login from a secure location. Currently, only PostgreSQL is supported. """ from burlap.dj import set_db set_db(name='default') if 'postgres' in env.db_engine or 'postgis' in env.db_engine: env.db_save_user = user env.db_save_password = password sudo_or_dryrun('sudo -u postgres psql -c "ALTER USER %(db_save_user)s PASSWORD \'%(db_save_password)s\';"' % env) sudo_or_dryrun("sed -i '/%(db_save_user)s/d' ~/.pgpass" % env) sudo_or_dryrun('echo "localhost:5432:*:%(db_save_user)s:%(db_save_password)s" >> ~/.pgpass' % env) sudo_or_dryrun('chmod 600 ~/.pgpass') else: raise NotImplementedError
def loaddata(path, site=None): """ Runs the Dango loaddata management command. By default, runs on only the current site. Pass site=all to run on all sites. """ render_remote_paths() site = site or env.SITE env._loaddata_path = path for site, site_data in common.iter_sites(site=site, no_secure=True): try: set_db(site=site) env.SITE = site cmd = ('export SITE=%(SITE)s; export ROLE=%(ROLE)s; ' 'cd %(shell_default_dir)s; ' './manage loaddata %(_loaddata_path)s') % env sudo_or_dryrun(cmd) except KeyError: pass
def dumpload(): """ Dumps and loads a database snapshot simultaneously. Requires that the destination server has direct database access to the source server. This is better than a serial dump+load when: 1. The network connection is reliable. 2. You don't need to save the dump file. The benefits of this over a dump+load are: 1. Usually runs faster, since the load and dump happen in parallel. 2. Usually takes up less disk space since no separate dump file is downloaded. """ set_db(site=env.SITE, role=env.ROLE) if 'postgres' in env.db_engine or 'postgis' in env.db_engine: cmd = ('pg_dump -c --host=%(host_string)s --username=%(db_user)s '\ '--blobs --format=c %(db_name)s -n public | '\ 'pg_restore -U %(db_postgresql_postgres_user)s --create '\ '--dbname=%(db_name)s') % env run_or_dryrun(cmd) else: raise NotImplementedError
def install_sql(site=None, database='default', apps=None): """ Installs all custom SQL. """ from burlap.dj import set_db from burlap.db import load_db_set name = database set_db(name=name, site=site) load_db_set(name=name) paths = glob.glob(env.django_install_sql_path_template % env) #paths = glob.glob('%(src_dir)s/%(app_name)s/*/sql/*' % env) apps = (apps or '').split(',') def cmp_paths(d0, d1): if d0[1] and d0[1] in d1[2]: return -1 if d1[1] and d1[1] in d0[2]: return +1 return cmp(d0[0], d1[0]) def get_paths(t): """ Returns SQL file paths in an execution order that respect dependencies. """ data = [] # [(path, view_name, content)] for path in paths: #print path parts = path.split('.') if len(parts) == 3 and parts[1] != t: continue if not path.lower().endswith('.sql'): continue content = open(path, 'r').read() matches = re.findall('[\s\t]+VIEW[\s\t]+([a-zA-Z0-9_]+)', content, flags=re.IGNORECASE) #assert matches, 'Unable to find view name: %s' % (p,) view_name = '' if matches: view_name = matches[0] data.append((path, view_name, content)) for d in sorted(data, cmp=cmp_paths): yield d[0] def run_paths(paths, cmd_template, max_retries=3): paths = list(paths) error_counts = defaultdict(int) # {path:count} terminal = set() while paths: path = paths.pop(0) app_name = re.findall(r'/([^/]+)/sql/', path)[0] if apps and app_name not in apps: continue with settings(warn_only=True): put_or_dryrun(local_path=path) cmd = cmd_template % env error_code = run_or_dryrun(cmd) if error_code: error_counts[path] += 1 if error_counts[path] < max_retries: paths.append(path) else: terminal.add(path) if terminal: print('%i files could not be loaded.' % len(terminal), file=sys.stderr) for path in sorted(list(terminal)): print(path, file=sys.stderr) print(file=sys.stderr) if 'postgres' in env.db_engine or 'postgis' in env.db_engine: run_paths( paths=get_paths('postgresql'), cmd_template="psql --host=%(db_host)s --user=%(db_user)s -d %(db_name)s -f %(put_remote_path)s") elif 'mysql' in env.db_engine: run_paths( paths=get_paths('mysql'), cmd_template="mysql -v -h %(db_host)s -u %(db_user)s -p'%(db_password)s' %(db_name)s < %(put_remote_path)s") else: raise NotImplementedError
def exists(self, name='default', site=None): """ Returns true if a database with the given name exists. False otherwise. """ if self.verbose: print('!'*80) print('db.exists:', name) if name and self.env.connection_handler == CONNECTION_HANDLER_DJANGO: from burlap.dj import set_db set_db(name=name, site=site, verbose=verbose) load_db_set(name=name) self.set_root_login() ret = None if 'postgres' in env.db_engine or 'postgis' in env.db_engine: kwargs = dict( db_user=env.db_root_user, db_password=env.db_root_password, db_host=env.db_host, db_name=env.db_name, ) env.update(kwargs) # Set pgpass file. if env.db_password: self.write_pgpass(verbose=verbose, name=name) # cmd = ('psql --username={db_user} --no-password -l '\ # '--host={db_host} --dbname={db_name}'\ # '| grep {db_name} | wc -l').format(**env) cmd = ('psql --username={db_user} --host={db_host} -l '\ '| grep {db_name} | wc -l').format(**env) if verbose: print(cmd) with settings(warn_only=True): ret = run_or_dryrun(cmd) #print 'ret:', ret if ret is not None: if 'password authentication failed' in ret: ret = False else: ret = int(ret) >= 1 elif 'mysql' in env.db_engine: kwargs = dict( db_user=env.db_root_user, db_password=env.db_root_password, db_host=env.db_host, db_name=env.db_name, ) env.update(kwargs) cmd = ('mysql -h {db_host} -u {db_user} '\ '-p"{db_password}" -N -B -e "SELECT IF(\'{db_name}\''\ ' IN(SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA), '\ '\'exists\', \'notexists\') AS found;"').format(**env) if verbose: print(cmd) ret = run_or_dryrun(cmd) if ret is not None: ret = 'notexists' not in (ret or 'notexists') else: raise NotImplementedError if ret is not None: print('%s database on site %s %s exist' % (name, env.SITE, 'DOES' if ret else 'DOES NOT')) return ret
def create(self, drop=0, name='default', site=None, post_process=0, db_engine=None, db_user=None, db_host=None, db_password=None, db_name=None): """ Creates the target database. """ from burlap.dj import set_db, render_remote_paths assert env[ROLE] require('app_name') drop = int(drop) # Do nothing if we're not dropping and the database already exists. print('Checking to see if database already exists...') if self.exists(name=name, site=site) and not drop: print('Database already exists. Aborting creation. '\ 'Use drop=1 to override.') return env.db_drop_flag = '--drop' if drop else '' if name: set_db(name=name, site=site) load_db_set(name=name) if db_engine: env.db_engine = db_engine if db_user: env.db_user = db_user if db_host: env.db_host = db_host if db_password: env.db_password = db_password if db_name: env.db_name = db_name if 'postgres' in env.db_engine or 'postgis' in env.db_engine: pass elif 'mysql' in env.db_engine: set_root_login() if int(drop): cmd = "mysql -v -h %(db_host)s -u %(db_root_user)s -p'%(db_root_password)s' --execute='DROP DATABASE IF EXISTS %(db_name)s'" % env sudo_or_dryrun(cmd) cmd = "mysqladmin -h %(db_host)s -u %(db_root_user)s -p'%(db_root_password)s' create %(db_name)s" % env sudo_or_dryrun(cmd) set_collation_mysql() # Create user. cmd = "mysql -v -h %(db_host)s -u %(db_root_user)s -p'%(db_root_password)s' --execute=\"GRANT USAGE ON *.* TO %(db_user)s@'%%'; DROP USER %(db_user)s@'%%';\"" % env run_or_dryrun(cmd) # Grant user access to the database. cmd = ("mysql -v -h %(db_host)s -u %(db_root_user)s "\ "-p'%(db_root_password)s' --execute=\"GRANT ALL PRIVILEGES "\ "ON %(db_name)s.* TO %(db_user)s@'%%' IDENTIFIED BY "\ "'%(db_password)s'; FLUSH PRIVILEGES;\"") % env run_or_dryrun(cmd) #TODO:why is this necessary? why doesn't the user@% pattern above give #localhost access?! cmd = ("mysql -v -h %(db_host)s -u %(db_root_user)s "\ "-p'%(db_root_password)s' --execute=\"GRANT ALL PRIVILEGES "\ "ON %(db_name)s.* TO %(db_user)s@%(db_host)s IDENTIFIED BY "\ "'%(db_password)s'; FLUSH PRIVILEGES;\"") % env run_or_dryrun(cmd) # Let the primary login do so from everywhere. # cmd = 'mysql -h %(db_host)s -u %()s -p'%(db_root_password)s' --execute="USE mysql; GRANT ALL ON %(db_name)s.* to %(db_user)s@\'%\' IDENTIFIED BY \'%(db_password)s\'; FLUSH PRIVILEGES;"' # sudo_or_dryrun(cmd) else: raise NotImplemented
def shell(self, name='default', user=None, password=None, root=0, verbose=1, write_password=1, no_db=0, no_pw=0): """ Opens a SQL shell to the given database, assuming the configured database and user supports this feature. """ from burlap.dj import set_db verbose = self.verbose root = int(root) write_password = int(write_password) no_db = int(no_db) no_pw = int(no_pw) # Load database credentials. set_db(name=name, verbose=verbose) load_db_set(name=name, verbose=verbose) set_root_login() if root: env.db_user = env.db_root_user env.db_password = env.db_root_password else: if user is not None: env.db_user = user if password is not None: env.db_password = password # Switch relative to absolute host name. env.db_shell_host = env.db_host # if env.db_shell_host in ('localhost', '127.0.0.1'): # env.db_shell_host = env.host_string if no_pw: env.db_password = '' cmds = [] env.db_name_str = '' if 'postgres' in env.db_engine or 'postgis' in env.db_engine: # Note, psql does not support specifying password at the command line. # If you don't want to manually type it at the command line, you must # add the password to your local ~/.pgpass file. # Each line in that file should be formatted as: # host:port:username:password # Set pgpass file. if write_password and env.db_password: cmds.extend(write_postgres_pgpass(verbose=0, commands_only=1, name=name)) if not no_db: env.db_name_str = ' --dbname=%(db_name)s' % env cmds.append(('/bin/bash -i -c \"psql --username=%(db_user)s '\ '--host=%(db_shell_host)s%(db_name_str)s\"') % env) elif 'mysql' in env.db_engine: if not no_db: env.db_name_str = ' %(db_name)s' % env if env.db_password: cmds.append(('/bin/bash -i -c \"mysql -u %(db_user)s '\ '-p\'%(db_password)s\' -h %(db_shell_host)s%(db_name_str)s\"') % env) else: cmds.append(('/bin/bash -i -c \"mysql -u %(db_user)s '\ '-h %(db_shell_host)s%(db_name_str)s\"') % env) else: raise NotImplementedError if cmds: for cmd in cmds: if verbose: print(cmd) if env.is_local: local_or_dryrun(cmd) else: run_or_dryrun(cmd)
def install_sql(site=None, database='default', apps=None): """ Installs all custom SQL. """ from burlap.dj import set_db from burlap.db import load_db_set name = database set_db(name=name, site=site) load_db_set(name=name) paths = glob.glob(env.django_install_sql_path_template % env) #paths = glob.glob('%(src_dir)s/%(app_name)s/*/sql/*' % env) apps = (apps or '').split(',') def cmp_paths(d0, d1): if d0[1] and d0[1] in d1[2]: return -1 if d1[1] and d1[1] in d0[2]: return +1 return cmp(d0[0], d1[0]) def get_paths(t): """ Returns SQL file paths in an execution order that respect dependencies. """ data = [] # [(path, view_name, content)] for path in paths: #print path parts = path.split('.') if len(parts) == 3 and parts[1] != t: continue if not path.lower().endswith('.sql'): continue content = open(path, 'r').read() matches = re.findall('[\s\t]+VIEW[\s\t]+([a-zA-Z0-9_]+)', content, flags=re.IGNORECASE) #assert matches, 'Unable to find view name: %s' % (p,) view_name = '' if matches: view_name = matches[0] data.append((path, view_name, content)) for d in sorted(data, cmp=cmp_paths): yield d[0] def run_paths(paths, cmd_template, max_retries=3): paths = list(paths) error_counts = defaultdict(int) # {path:count} terminal = set() while paths: path = paths.pop(0) app_name = re.findall(r'/([^/]+)/sql/', path)[0] if apps and app_name not in apps: continue with settings(warn_only=True): put_or_dryrun(local_path=path) cmd = cmd_template % env error_code = run_or_dryrun(cmd) if error_code: error_counts[path] += 1 if error_counts[path] < max_retries: paths.append(path) else: terminal.add(path) if terminal: print('%i files could not be loaded.' % len(terminal), file=sys.stderr) for path in sorted(list(terminal)): print(path, file=sys.stderr) print(file=sys.stderr) if 'postgres' in env.db_engine or 'postgis' in env.db_engine: run_paths( paths=get_paths('postgresql'), cmd_template= "psql --host=%(db_host)s --user=%(db_user)s -d %(db_name)s -f %(put_remote_path)s" ) elif 'mysql' in env.db_engine: run_paths( paths=get_paths('mysql'), cmd_template= "mysql -v -h %(db_host)s -u %(db_user)s -p'%(db_password)s' %(db_name)s < %(put_remote_path)s" ) else: raise NotImplementedError
def load(db_dump_fn='', prep_only=0, force_upload=0, from_local=0): """ Restores a database snapshot onto the target database server. If prep_only=1, commands for preparing the load will be generated, but not the command to finally load the snapshot. """ verbose = common.get_verbose() from burlap.dj import set_db from burlap.common import get_dryrun if not db_dump_fn: db_dump_fn = get_default_db_fn() env.db_dump_fn = render_fn(db_dump_fn).strip() set_db(site=env.SITE, role=env.ROLE) from_local = int(from_local) prep_only = int(prep_only) # Copy snapshot file to target. missing_local_dump_error = ( "Database dump file %(db_dump_fn)s does not exist." ) % env if env.is_local: env.db_remote_dump_fn = db_dump_fn else: env.db_remote_dump_fn = '/tmp/'+os.path.split(env.db_dump_fn)[-1] if not prep_only: if int(force_upload) or (not get_dryrun() and not env.is_local and not files.exists(env.db_remote_dump_fn)): assert os.path.isfile(env.db_dump_fn), \ missing_local_dump_error if verbose: print('Uploading database snapshot...') put_or_dryrun(local_path=env.db_dump_fn, remote_path=env.db_remote_dump_fn) if env.is_local and not prep_only and not get_dryrun(): assert os.path.isfile(env.db_dump_fn), \ missing_local_dump_error if env.db_load_command: cmd = env.db_load_command % env run_or_dryrun(cmd) elif 'postgres' in env.db_engine or 'postgis' in env.db_engine: set_root_login() with settings(warn_only=True): cmd = 'dropdb --user=%(db_postgresql_postgres_user)s %(db_name)s' % env run_or_dryrun(cmd) cmd = 'psql --user=%(db_postgresql_postgres_user)s -c "CREATE DATABASE %(db_name)s;"' % env run_or_dryrun(cmd) with settings(warn_only=True): if 'postgis' in env.db_engine: cmd = 'psql --user=%(db_postgresql_postgres_user)s --no-password --dbname=%(db_name)s --command="CREATE EXTENSION postgis;"' % env run_or_dryrun(cmd) cmd = 'psql --user=%(db_postgresql_postgres_user)s --no-password --dbname=%(db_name)s --command="CREATE EXTENSION postgis_topology;"' % env run_or_dryrun(cmd) cmd = 'psql --user=%(db_postgresql_postgres_user)s -c "DROP OWNED BY %(db_user)s CASCADE;"' % env run_or_dryrun(cmd) cmd = ('psql --user=%(db_postgresql_postgres_user)s -c "DROP USER IF EXISTS %(db_user)s; ' 'CREATE USER %(db_user)s WITH PASSWORD \'%(db_password)s\'; ' 'GRANT ALL PRIVILEGES ON DATABASE %(db_name)s to %(db_user)s;"') % env run_or_dryrun(cmd) for createlang in env.db_postgresql_createlangs: env.db_createlang = createlang cmd = 'createlang -U %(db_postgresql_postgres_user)s %(db_createlang)s %(db_name)s || true' % env run_or_dryrun(cmd) if not prep_only: #cmd = 'gunzip -c %(db_remote_dump_fn)s | pg_restore --jobs=8 -U %(db_postgresql_postgres_user)s --create --dbname=%(db_name)s' % env #TODO:deprecated #cmd = 'gunzip -c %(db_remote_dump_fn)s | pg_restore -U %(db_postgresql_postgres_user)s --create --dbname=%(db_name)s' % env #TODO:deprecated if env.db_postgresql_custom_load_cmd: cmd = env.db_postgresql_custom_load_cmd % env else: cmd = 'pg_restore --jobs=8 -U %(db_postgresql_postgres_user)s --create --dbname=%(db_name)s %(db_remote_dump_fn)s' % env run_or_dryrun(cmd) elif 'mysql' in env.db_engine: set_root_login() # Drop the database if it's there. #cmd = ("mysql -v -h %(db_host)s -u %(db_user)s -p'%(db_password)s' " cmd = ("mysql -v -h %(db_host)s -u %(db_root_user)s -p'%(db_root_password)s' " "--execute='DROP DATABASE IF EXISTS %(db_name)s'") % env run_or_dryrun(cmd) # Now, create the database. #cmd = ("mysqladmin -h %(db_host)s -u %(db_user)s -p'%(db_password)s' " cmd = ("mysqladmin -h %(db_host)s -u %(db_root_user)s -p'%(db_root_password)s' " "create %(db_name)s") % env run_or_dryrun(cmd) # Create user with settings(warn_only=True): cmd = ("mysql -v -h %(db_host)s -u %(db_root_user)s -p'%(db_root_password)s' " "--execute=\"CREATE USER '%(db_user)s'@'%%' IDENTIFIED BY '%(db_password)s'; GRANT ALL PRIVILEGES ON *.* TO '%(db_user)s'@'%%' WITH GRANT OPTION; FLUSH PRIVILEGES;\"") % env run_or_dryrun(cmd) # DROP USER '<username>'@'%'; # CREATE USER '<username>'@'%' IDENTIFIED BY '<password>'; # GRANT ALL PRIVILEGES ON *.* TO '<username>'@'%' WITH GRANT OPTION; # FLUSH PRIVILEGES; # Set collation. # cmd = ("mysql -v -h %(db_host)s -u %(db_root_user)s -p'%(db_root_password)s' " # "--execute='ALTER DATABASE %(db_name)s CHARACTER SET %(db_mysql_character_set)s COLLATE %(db_mysql_collate)s;'") % env set_collation_mysql() # Raise max packet limitation. # run_or_dryrun( # ('mysql -v -h %(db_host)s -D %(db_name)s -u %(db_root_user)s ' # '-p"%(db_root_password)s" --execute="SET global ' # 'net_buffer_length=%(db_mysql_net_buffer_length)s; SET global ' # 'max_allowed_packet=%(db_mysql_max_allowed_packet)s;"') % env) set_max_mysql_packet_size(do_set=0) # Run any server-specific commands (e.g. to setup permissions) before # we load the data. for command in env.db_mysql_preload_commands: run_or_dryrun(command % env) # Restore the database content from the dump file. env.db_dump_fn = db_dump_fn cmd = ('gunzip < %(db_remote_dump_fn)s | mysql -u %(db_root_user)s ' '--password=%(db_root_password)s --host=%(db_host)s ' '-D %(db_name)s') % env run_or_dryrun(cmd) set_collation_mysql() else: raise NotImplemented