def migrate_ecm_db(instance_dir, upgrade_from_149=False): instance_dir = path.abspath(instance_dir) log("Migrating database...") run_python_cmd('manage.py syncdb --noinput', instance_dir) if upgrade_from_149: log('Migrating from ECM 1.4.9...') # we are upgrading from ECM 1.X.Y, we must perform the init migration # on the 'hr' app (rename tables from 'roles_xxxxx' to 'hr_xxxxx') pipe_to_django_shell('from south.models import MigrationHistory; '\ 'MigrationHistory.objects.all().delete()' , instance_dir) run_python_cmd('manage.py migrate hr 0001 --noinput', instance_dir) # we MUST "fake" the first migration for 1.4.9 apps # otherwise the migrate command will fail because DB tables already exist... for app in ('common', 'scheduler', 'corp', 'assets', 'accounting'): run_python_cmd('manage.py migrate %s 0001 --fake --noinput' % app, instance_dir) run_python_cmd('manage.py migrate --all --noinput', instance_dir) if upgrade_from_149: pipe_to_django_shell('from ecm.apps.scheduler.models import ScheduledTask; '\ 'ScheduledTask.objects.all().delete()' , instance_dir) pipe_to_django_shell('from ecm.apps.common.models import UrlPermission; '\ 'UrlPermission.objects.all().delete()' , instance_dir) log('Database Migration successful.')
def load_json_dump(instance_dir, json_file, tempdir): # Break the load into multiple chunks to save memory # blocksize must be large enough to grab the main foreign keys in the first chunk blocksize = 15000000 infile = open(json_file, 'r') outfilename = os.path.join(tempdir, 'temp.json') i = 1 data = infile.read(blocksize) while (len(data)): outfile = open(outfilename, 'w') if (i > 1): outfile.write('[') outfile.write(data) if (len(data) == blocksize): # Look for "}," and separate there separator = 0 while (separator < 2): databyte = infile.read(1) if (databyte == ']'): separator = 2 elif (databyte == '}'): separator = 1 elif (databyte == ',' and separator == 1): separator = 2 databyte = ']' else: separator = 0 outfile.write(databyte) outfile.close() run_python_cmd(['manage.py', 'loaddata', os.path.abspath(outfilename)], run_dir=instance_dir) # If len is less than blocksize we're out of data if (len(data) < blocksize): break data = infile.read(blocksize) i = i + 1 infile.close() os.remove(outfilename)
def load_json_dump(instance_dir, json_file, tempdir): # Break the load into multiple chunks to save memory # blocksize must be large enough to grab the main foreign keys in the first chunk blocksize = 15000000 infile = open(json_file, 'r') outfilename = os.path.join(tempdir, 'temp.json') i = 1 data = infile.read(blocksize) while(len(data)): outfile = open(outfilename, 'w') if (i > 1): outfile.write('[') outfile.write(data) if (len(data) == blocksize): # Look for "}," and separate there separator = 0 while(separator < 2): databyte = infile.read(1) if (databyte == ']'): separator = 2 elif (databyte == '}'): separator = 1 elif (databyte == ',' and separator == 1): separator = 2 databyte = ']' else: separator = 0 outfile.write(databyte) outfile.close() run_python_cmd(['manage.py', 'loaddata', os.path.abspath(outfilename)], run_dir=instance_dir) # If len is less than blocksize we're out of data if (len(data) < blocksize): break data = infile.read(blocksize) i = i + 1 infile.close() os.remove(outfilename)
def collect_static_files(instance_dir, options): log("Gathering static files...") switches = '--noinput' if os.name != 'nt' and options.symlink_files: switches += ' --link' run_python_cmd('manage.py collectstatic ' + switches, instance_dir)
def init_ecm_db(instance_dir): log("Initializing database...") run_python_cmd('manage.py syncdb --noinput --migrate', instance_dir) log('Database initialization successful.')
def run(command, global_options, options, args): if not args: command.parser.error('Missing instance directory.') instance_dir = args.pop(0) # relay command to manage.py run_python_cmd(['manage.py'] + args, run_dir=instance_dir)