def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = helpers.Config(options) backup_file = options.get('backup_file') owner = options.get('owner') if not backup_file or len(backup_file) == 0: raise CommandError("Backup archive '--backup-file' is mandatory") if not owner or len(owner) == 0: raise CommandError("Owner '--owner' is mandatory") message = 'WARNING: The migration may break GeoNode existing Layers. You want to proceed?' if helpers.confirm(prompt=message, resp=False): """Migrate existing Layers on GeoNode DB""" try: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(backup_file, restore_folder) # Retrieve the max Primary Key from the DB from geonode.base.models import ResourceBase try: higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk except: higher_pk = 0 # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): for mig_name, mangler in zip(config.migrations, config.manglers): if app_name == mig_name: fixture_file = os.path.join(target_folder, dump_name+'.json') print("Deserializing "+fixture_file) mangler = helpers.load_class(mangler) site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL obj = helpers.load_fixture(app_name, fixture_file, mangler=mangler, basepk=higher_pk, owner=owner, datastore=settings.OGC_SERVER['default']['DATASTORE'], siteurl=site_url) from django.core import serializers objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True) for obj in objects: obj.save(using=DEFAULT_DB_ALIAS) print("Restore finished. Please find restored files and dumps into: '"+target_folder+"'.") except Exception: traceback.print_exc()
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = helpers.Config(options) backup_file = options.get('backup_file') owner = options.get('owner') if not backup_file or len(backup_file) == 0: raise CommandError("Backup archive '--backup-file' is mandatory") if not owner or len(owner) == 0: raise CommandError("Owner '--owner' is mandatory") message = 'WARNING: The migration may break GeoNode existing Layers. You want to proceed?' if helpers.confirm(prompt=message, resp=False): """Migrate existing Layers on GeoNode DB""" try: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(backup_file, restore_folder) # Retrieve the max Primary Key from the DB from geonode.base.models import ResourceBase try: higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk except: higher_pk = 0 # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): for mig_name, mangler in zip(config.migrations, config.manglers): if app_name == mig_name: fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file mangler = helpers.load_class(mangler) obj = helpers.load_fixture(app_name, fixture_file, mangler=mangler, basepk=higher_pk, owner=owner, datastore=settings.OGC_SERVER['default']['DATASTORE'], siteurl=settings.SITEURL) from django.core import serializers objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True) for obj in objects: obj.save(using=DEFAULT_DB_ALIAS) print "Restore finished. Please find restored files and dumps into: '"+target_folder+"'." except Exception: traceback.print_exc()
def migrate_layers(archive, owner): """Migrate existing Layers on GeoNode DB""" try: # Create Target Folder restore_folder = 'restore' if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(archive, restore_folder) # Retrieve the max Primary Key from the DB from geonode.base.models import ResourceBase try: higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk except: higher_pk = 0 # Restore Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): for mig_name, mangler in zip(helpers.migrations, helpers.manglers): if app_name == mig_name: fixture_file = os.path.join(target_folder, dump_name + '.json') print "Deserializing " + fixture_file mangler = helpers.load_class(mangler) obj = helpers.load_fixture( app_name, fixture_file, mangler=mangler, basepk=higher_pk, owner=owner, datastore=settings.OGC_SERVER['default']['DATASTORE'], siteurl=settings.SITEURL) from django.core import serializers objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True) for obj in objects: obj.save(using=DEFAULT_DB_ALIAS) except Exception, err: traceback.print_exc()
def migrate_layers(archive, owner): """Migrate existing Layers on GeoNode DB""" try: # Create Target Folder restore_folder = 'restore' if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(archive, restore_folder) # Retrieve the max Primary Key from the DB from geonode.base.models import ResourceBase try: higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk except: higher_pk = 0 # Restore Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): for mig_name, mangler in zip(helpers.migrations, helpers.manglers): if app_name == mig_name: fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file mangler = helpers.load_class(mangler) obj = helpers.load_fixture(app_name, fixture_file, mangler=mangler, basepk=higher_pk, owner=owner, datastore=settings.OGC_SERVER['default']['DATASTORE'], siteurl=settings.SITEURL) from django.core import serializers objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True) for obj in objects: obj.save(using=DEFAULT_DB_ALIAS) except Exception, err: traceback.print_exc()
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') backup_file = options.get('backup_file') if not backup_file or len(backup_file) == 0: raise CommandError("Backup archive '--backup-file' is mandatory") print "Before proceeding with the Restore, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(backup_file, restore_folder) # Restore GeoServer Catalog url = settings.OGC_SERVER['default']['PUBLIC_LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] geoserver_bk_file = os.path.join(target_folder, 'geoserver_catalog.zip') print "Restoring 'GeoServer Catalog ["+url+"]' into '"+geoserver_bk_file+"'." if not os.path.exists(geoserver_bk_file): raise ValueError('Could not find GeoServer Backup file [' + geoserver_bk_file + ']') # Best Effort Restore: 'options': {'option': ['BK_BEST_EFFORT=true']} data = {'restore': {'archiveFile': geoserver_bk_file, 'options': {}}} headers = {'Content-type': 'application/json'} r = requests.post(url + 'rest/br/restore/', data=json.dumps(data), headers=headers, auth=HTTPBasicAuth(user, passwd)) if (r.status_code > 201): gs_backup = r.json() gs_bk_exec_id = gs_backup['restore']['execution']['id'] r = requests.get(url + 'rest/br/restore/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_progress = gs_backup['restore']['execution']['progress'] print gs_bk_progress raise ValueError('Could not successfully restore GeoServer catalog [' + url + 'rest/br/restore/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: gs_backup = r.json() gs_bk_exec_id = gs_backup['restore']['execution']['id'] r = requests.get(url + 'rest/br/restore/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_bk_exec_status = gs_backup['restore']['execution']['status'] gs_bk_exec_progress = gs_backup['restore']['execution']['progress'] gs_bk_exec_progress_updated = '0/0' while (gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED'): if (gs_bk_exec_progress != gs_bk_exec_progress_updated): gs_bk_exec_progress_updated = gs_bk_exec_progress r = requests.get(url + 'rest/br/restore/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_exec_status = gs_backup['restore']['execution']['status'] gs_bk_exec_progress = gs_backup['restore']['execution']['progress'] print str(gs_bk_exec_status) + ' - ' + gs_bk_exec_progress time.sleep(3) else: raise ValueError('Could not successfully restore GeoServer catalog [' + url + 'rest/br/restore/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: raise ValueError('Could not successfully restore GeoServer catalog [' + url + 'rest/br/restore/]: ' + str(r.status_code) + ' - ' + str(r.text)) # Restore GeoServer Data if (helpers.GS_DATA_DIR): if (helpers.GS_DUMP_RASTER_DATA): # Restore '$GS_DATA_DIR/data/geonode' gs_data_root = os.path.join(helpers.GS_DATA_DIR, 'data', 'geonode') gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') try: shutil.rmtree(gs_data_root) except: pass if not os.path.exists(gs_data_root): os.makedirs(gs_data_root) helpers.copy_tree(gs_data_folder, gs_data_root) helpers.chmod_tree(gs_data_root) print "GeoServer Uploaded Data Restored to '"+gs_data_root+"'." # Cleanup '$GS_DATA_DIR/gwc-layers' gwc_layers_root = os.path.join(helpers.GS_DATA_DIR, 'gwc-layers') if not os.path.isabs(gwc_layers_root): gwc_layers_root = os.path.join(settings.PROJECT_ROOT, '..', gwc_layers_root) try: shutil.rmtree(gwc_layers_root) print 'Cleaned out old GeoServer GWC Layers Config: ' + gwc_layers_root except: pass if not os.path.exists(gwc_layers_root): os.makedirs(gwc_layers_root) if (helpers.GS_DUMP_VECTOR_DATA): # Restore Vectorial Data from DB datastore = settings.OGC_SERVER['default']['DATASTORE'] if (datastore): ogc_db_name = settings.DATABASES[datastore]['NAME'] ogc_db_user = settings.DATABASES[datastore]['USER'] ogc_db_passwd = settings.DATABASES[datastore]['PASSWORD'] ogc_db_host = settings.DATABASES[datastore]['HOST'] ogc_db_port = settings.DATABASES[datastore]['PORT'] gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') helpers.restore_db(ogc_db_name, ogc_db_user, ogc_db_port, ogc_db_host, ogc_db_passwd, gs_data_folder) # Prepare Target DB try: call_command('migrate', interactive=False, load_initial_data=False) call_command('flush', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Restore Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." # helpers.load_fixture(app_name, fixture_file) # Restore Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) helpers.copy_tree(media_folder, media_root) helpers.chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) helpers.copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) helpers.chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) helpers.copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) helpers.chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) helpers.copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) helpers.chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() print "Restore finished. Please find restored files and dumps into:" return str(target_folder) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def restore_full(archive): """Full Restore of GeoNode DB""" try: # Create Target Folder restore_folder = 'restore' if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(archive, restore_folder) # Prepare Target DB try: call_command('syncdb', interactive=False, load_initial_data=False) call_command('flush', interactive=False, load_initial_data=False) helpers.patch_db() except: traceback.print_exc() # Restore Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: #traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." #helpers.load_fixture(app_name, fixture_file) # Restore Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) helpers.copy_tree(media_folder, media_root) helpers.chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) helpers.copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) helpers.chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) helpers.copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) helpers.chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) helpers.copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) helpers.chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." # Cleanup DB try: helpers.cleanup_db() except: traceback.print_exc() except Exception, err: print str(err)
def restore_full(archive): """Full Restore of GeoNode DB""" try: # Create Target Folder restore_folder = 'restore' if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(archive, restore_folder) # Prepare Target DB try: call_command('syncdb', interactive=False, load_initial_data=False) call_command('flush', interactive=False, load_initial_data=False) helpers.patch_db() except: traceback.print_exc() # Restore Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): fixture_file = os.path.join(target_folder, dump_name + '.json') print "Deserializing " + fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: #traceback.print_exc() print "WARNING: No valid fixture data found for '" + dump_name + "'." #helpers.load_fixture(app_name, fixture_file) # Restore Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) helpers.copy_tree(media_folder, media_root) helpers.chmod_tree(media_root) print "Media Files Restored into '" + media_root + "'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '" + static_root + "'." # Restore Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) helpers.copy_tree( os.path.join( static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) helpers.chmod_tree(static_files_folder) print "Static Files Restored into '" + static_files_folder + "'." # Restore Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) helpers.copy_tree( os.path.join( template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) helpers.chmod_tree(template_files_folder) print "Template Files Restored into '" + template_files_folder + "'." # Restore Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) helpers.copy_tree( os.path.join( locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) helpers.chmod_tree(locale_files_folder) print "Locale Files Restored into '" + locale_files_folder + "'." # Cleanup DB try: helpers.cleanup_db() except: traceback.print_exc() except Exception, err: print str(err)
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_file = options.get('backup_file') skip_geoserver = options.get('skip_geoserver') backup_dir = options.get('backup_dir') if not any([backup_file, backup_dir]): raise CommandError("Mandatory option (--backup-file|--backup-dir)") if all([backup_file, backup_dir]): raise CommandError("Exclusive option (--backup-file|--backup-dir)") if backup_file and not os.path.isfile(backup_file): raise CommandError("Provided '--backup-file' is not a file") if backup_dir and not os.path.isdir(backup_dir): raise CommandError("Provided '--backup-dir' is not a directory") print "Before proceeding with the Restore, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): target_folder = backup_dir if backup_file: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(backup_file, restore_folder) if not skip_geoserver: self.restore_geoserver_backup(settings, target_folder) self.restore_geoserver_raster_data(config, settings, target_folder) self.restore_geoserver_vector_data(config, settings, target_folder) print("Restoring geoserver external resources") self.restore_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup restore") # Prepare Target DB try: call_command('migrate', interactive=False, load_initial_data=False) call_command('flush', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." # helpers.load_fixture(app_name, fixture_file) # Restore Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) helpers.copy_tree(media_folder, media_root) helpers.chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) helpers.copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) helpers.chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) helpers.copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) helpers.chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) helpers.copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) helpers.chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() print "Restore finished. Please find restored files and dumps into:" return str(target_folder) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"