def handle(self, **options): force_exec = options.get('force_exec') source_address = options.get('source_address') target_address = options.get('target_address') if not source_address or len(source_address) == 0: raise CommandError("Source Address '--source-address' is mandatory") if not target_address or len(target_address) == 0: raise CommandError("Target Address '--target-address' is mandatory") print "This will change all Maps, Layers, \ Styles and Links Base URLs from [%s] to [%s]." % (source_address, target_address) print "The operation may take some time, depending on the amount of Layer on GeoNode." message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" _cnt = Map.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Maps" % _cnt _cnt = MapLayer.objects.filter(ows_url__icontains=source_address).update( ows_url=Func( F('ows_url'),Value(source_address),Value(target_address),function='replace')) MapLayer.objects.filter(layer_params__icontains=source_address).update( layer_params=Func( F('layer_params'),Value(source_address),Value(target_address),function='replace')) print "Updated %s MapLayers" % _cnt _cnt = Layer.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Layers" % _cnt _cnt = Style.objects.filter(sld_url__icontains=source_address).update( sld_url=Func( F('sld_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Styles" % _cnt _cnt = Link.objects.filter(url__icontains=source_address).update( url=Func( F('url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Links" % _cnt _cnt = ResourceBase.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s ResourceBases" % _cnt finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def setUp(self): # Test initialization designals() call_command('loaddata', '005_risks_test_layer') # Prepare Test Tables with connections['datastore'].cursor() as cursor: sql_file = open(TESTDATA_SQL_INIT, 'r') sql = " ".join(sql_file.readlines()) cursor.execute(sql) connections['datastore'].commit() sql_file.close()
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') backup_file = options.get('backup_file') if not backup_file or len(backup_file) == 0: raise CommandError("Backup archive '--backup-file' is mandatory") print "Before proceeding with the Restore, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(backup_file, restore_folder) # Restore GeoServer Catalog url = settings.OGC_SERVER['default']['PUBLIC_LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] geoserver_bk_file = os.path.join(target_folder, 'geoserver_catalog.zip') print "Restoring 'GeoServer Catalog ["+url+"]' into '"+geoserver_bk_file+"'." if not os.path.exists(geoserver_bk_file): raise ValueError('Could not find GeoServer Backup file [' + geoserver_bk_file + ']') # Best Effort Restore: 'options': {'option': ['BK_BEST_EFFORT=true']} data = {'restore': {'archiveFile': geoserver_bk_file, 'options': {}}} headers = {'Content-type': 'application/json'} r = requests.post(url + 'rest/br/restore/', data=json.dumps(data), headers=headers, auth=HTTPBasicAuth(user, passwd)) if (r.status_code > 201): gs_backup = r.json() gs_bk_exec_id = gs_backup['restore']['execution']['id'] r = requests.get(url + 'rest/br/restore/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_progress = gs_backup['restore']['execution']['progress'] print gs_bk_progress raise ValueError('Could not successfully restore GeoServer catalog [' + url + 'rest/br/restore/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: gs_backup = r.json() gs_bk_exec_id = gs_backup['restore']['execution']['id'] r = requests.get(url + 'rest/br/restore/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_bk_exec_status = gs_backup['restore']['execution']['status'] gs_bk_exec_progress = gs_backup['restore']['execution']['progress'] gs_bk_exec_progress_updated = '0/0' while (gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED'): if (gs_bk_exec_progress != gs_bk_exec_progress_updated): gs_bk_exec_progress_updated = gs_bk_exec_progress r = requests.get(url + 'rest/br/restore/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_exec_status = gs_backup['restore']['execution']['status'] gs_bk_exec_progress = gs_backup['restore']['execution']['progress'] print str(gs_bk_exec_status) + ' - ' + gs_bk_exec_progress time.sleep(3) else: raise ValueError('Could not successfully restore GeoServer catalog [' + url + 'rest/br/restore/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: raise ValueError('Could not successfully restore GeoServer catalog [' + url + 'rest/br/restore/]: ' + str(r.status_code) + ' - ' + str(r.text)) # Restore GeoServer Data if (helpers.GS_DATA_DIR): if (helpers.GS_DUMP_RASTER_DATA): # Restore '$GS_DATA_DIR/data/geonode' gs_data_root = os.path.join(helpers.GS_DATA_DIR, 'data', 'geonode') gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') try: shutil.rmtree(gs_data_root) except: pass if not os.path.exists(gs_data_root): os.makedirs(gs_data_root) helpers.copy_tree(gs_data_folder, gs_data_root) helpers.chmod_tree(gs_data_root) print "GeoServer Uploaded Data Restored to '"+gs_data_root+"'." # Cleanup '$GS_DATA_DIR/gwc-layers' gwc_layers_root = os.path.join(helpers.GS_DATA_DIR, 'gwc-layers') if not os.path.isabs(gwc_layers_root): gwc_layers_root = os.path.join(settings.PROJECT_ROOT, '..', gwc_layers_root) try: shutil.rmtree(gwc_layers_root) print 'Cleaned out old GeoServer GWC Layers Config: ' + gwc_layers_root except: pass if not os.path.exists(gwc_layers_root): os.makedirs(gwc_layers_root) if (helpers.GS_DUMP_VECTOR_DATA): # Restore Vectorial Data from DB datastore = settings.OGC_SERVER['default']['DATASTORE'] if (datastore): ogc_db_name = settings.DATABASES[datastore]['NAME'] ogc_db_user = settings.DATABASES[datastore]['USER'] ogc_db_passwd = settings.DATABASES[datastore]['PASSWORD'] ogc_db_host = settings.DATABASES[datastore]['HOST'] ogc_db_port = settings.DATABASES[datastore]['PORT'] gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') helpers.restore_db(ogc_db_name, ogc_db_user, ogc_db_port, ogc_db_host, ogc_db_passwd, gs_data_folder) # Prepare Target DB try: call_command('migrate', interactive=False, load_initial_data=False) call_command('flush', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Restore Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." # helpers.load_fixture(app_name, fixture_file) # Restore Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) helpers.copy_tree(media_folder, media_root) helpers.chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) helpers.copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) helpers.chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) helpers.copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) helpers.chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) helpers.copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) helpers.chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() print "Restore finished. Please find restored files and dumps into:" return str(target_folder) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_dir = options.get('backup_dir') skip_geoserver = options.get('skip_geoserver') if not backup_dir or len(backup_dir) == 0: raise CommandError("Destination folder '--backup-dir' is mandatory") print "Before proceeding with the Backup, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): # Create Target Folder dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(backup_dir, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Temporary folder to store backup files. It will be deleted at the end. os.chmod(target_folder, 0777) if not skip_geoserver: self.create_geoserver_backup(settings, target_folder) self.dump_geoserver_raster_data(config, settings, target_folder) self.dump_geoserver_vector_data(config, settings, target_folder) print("Duming geoserver external resources") self.dump_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup") try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Dump Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): print "Dumping '"+app_name+"' into '"+dump_name+".json'." # Point stdout at a file for dumping data to. output = open(os.path.join(target_folder, dump_name+'.json'), 'w') call_command('dumpdata', app_name, format='json', indent=2, natural=True, stdout=output) output.close() # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) copy_tree(media_root, media_folder) print "Saved Media Files from '"+media_root+"'." # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_root, static_folder) print "Saved Static Root from '"+static_root+"'." # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: static_folder = os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_files_folder, static_folder) print "Saved Static Files from '"+static_files_folder+"'." # Store Template Folders template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except: try: template_folders = settings.TEMPLATES[0]['DIRS'] except: pass template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: template_folder = os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) copy_tree(template_files_folder, template_folder) print "Saved Template Files from '"+template_files_folder+"'." # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: locale_folder = os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) copy_tree(locale_files_folder, locale_folder) print "Saved Locale Files from '"+locale_files_folder+"'." # Create Final ZIP Archive zip_dir(target_folder, os.path.join(backup_dir, dir_time_suffix+'.zip')) # Clean-up Temp Folder try: shutil.rmtree(target_folder) except: print "WARNING: Could not be possible to delete the temp folder: '" + str(target_folder) + "'" print "Backup Finished. Archive generated." return str(os.path.join(backup_dir, dir_time_suffix+'.zip')) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): force_exec = options.get('force_exec') source_address = options.get('source_address') target_address = options.get('target_address') if not source_address or len(source_address) == 0: raise CommandError("Source Address '--source-address' is mandatory") if not target_address or len(target_address) == 0: raise CommandError("Target Address '--target-address' is mandatory") print "This will change all Maps, Layers, \ Styles and Links Base URLs from [%s] to [%s]." % (source_address, target_address) print "The operation may take some time, depending on the amount of Layer on GeoNode." message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" _cnt = Map.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Maps" % _cnt _cnt = MapLayer.objects.filter(ows_url__icontains=source_address).update( ows_url=Func( F('ows_url'),Value(source_address),Value(target_address),function='replace')) MapLayer.objects.filter(layer_params__icontains=source_address).update( layer_params=Func( F('layer_params'),Value(source_address),Value(target_address),function='replace')) print "Updated %s MapLayers" % _cnt _cnt = Layer.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Layers" % _cnt _cnt = Style.objects.filter(sld_url__icontains=source_address).update( sld_url=Func( F('sld_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Styles" % _cnt _cnt = Link.objects.filter(url__icontains=source_address).update( url=Func( F('url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Links" % _cnt _cnt = ResourceBase.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) _cnt += ResourceBase.objects.filter(csw_anytext__icontains=source_address).update( csw_anytext=Func( F('csw_anytext'), Value(source_address), Value(target_address), function='replace')) _cnt += ResourceBase.objects.filter(metadata_xml__icontains=source_address).update( metadata_xml=Func( F('metadata_xml'), Value(source_address), Value(target_address), function='replace')) print "Updated %s ResourceBases" % _cnt finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_file = options.get('backup_file') skip_geoserver = options.get('skip_geoserver') backup_dir = options.get('backup_dir') if not any([backup_file, backup_dir]): raise CommandError("Mandatory option (--backup-file|--backup-dir)") if all([backup_file, backup_dir]): raise CommandError("Exclusive option (--backup-file|--backup-dir)") if backup_file and not os.path.isfile(backup_file): raise CommandError("Provided '--backup-file' is not a file") if backup_dir and not os.path.isdir(backup_dir): raise CommandError("Provided '--backup-dir' is not a directory") print "Before proceeding with the Restore, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): target_folder = backup_dir if backup_file: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = extract_archive(backup_file, restore_folder) # Write Checks media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) try: print("[Sanity Check] Full Write Access to '{}' ...".format(media_root)) chmod_tree(media_root) print("[Sanity Check] Full Write Access to '{}' ...".format(static_root)) chmod_tree(static_root) for static_files_folder in static_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(static_files_folder)) chmod_tree(static_files_folder) for template_files_folder in template_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(template_files_folder)) chmod_tree(template_files_folder) for locale_files_folder in locale_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(locale_files_folder)) chmod_tree(locale_files_folder) except: print("...Sanity Checks on Folder failed. Please make sure that the current user has full WRITE access to the above folders (and sub-folders or files).") print("Reason:") raise if not skip_geoserver: self.restore_geoserver_backup(settings, target_folder) self.restore_geoserver_raster_data(config, settings, target_folder) self.restore_geoserver_vector_data(config, settings, target_folder) print("Restoring geoserver external resources") self.restore_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup restore") # Prepare Target DB try: call_command('migrate', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd, settings.MONITORING_ENABLED) except: traceback.print_exc() try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Flush DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.flush_db(db_name, db_user, db_port, db_host, db_passwd) except: try: call_command('flush', interactive=False, load_initial_data=False) except: traceback.print_exc() raise # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." # helpers.load_fixture(app_name, fixture_file) raise # Restore Media Root try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) copy_tree(media_folder, media_root) chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Root try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." call_command('collectstatic', interactive=False) # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() return str(target_folder) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!" call_command('migrate', interactive=False, load_initial_data=False, fake=True) print "HINT: If you migrated from another site, do not forget to run the command 'migrate_baseurl' to fix Links" print " e.g.: DJANGO_SETTINGS_MODULE=my_geonode.settings python manage.py migrate_baseurl --source-address=my-host-dev.geonode.org --target-address=my-host-prod.geonode.org" print "Restore finished. Please find restored files and dumps into:"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_file = options.get('backup_file') skip_geoserver = options.get('skip_geoserver') backup_dir = options.get('backup_dir') if not any([backup_file, backup_dir]): raise CommandError("Mandatory option (--backup-file|--backup-dir)") if all([backup_file, backup_dir]): raise CommandError("Exclusive option (--backup-file|--backup-dir)") if backup_file and not os.path.isfile(backup_file): raise CommandError("Provided '--backup-file' is not a file") if backup_dir and not os.path.isdir(backup_dir): raise CommandError("Provided '--backup-dir' is not a directory") print "Before proceeding with the Restore, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): target_folder = backup_dir if backup_file: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = extract_archive(backup_file, restore_folder) # Write Checks media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except: try: template_folders = settings.TEMPLATES[0]['DIRS'] except: pass template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) try: print("[Sanity Check] Full Write Access to '{}' ...".format(media_root)) chmod_tree(media_root) print("[Sanity Check] Full Write Access to '{}' ...".format(static_root)) chmod_tree(static_root) for static_files_folder in static_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(static_files_folder)) chmod_tree(static_files_folder) for template_files_folder in template_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(template_files_folder)) chmod_tree(template_files_folder) for locale_files_folder in locale_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(locale_files_folder)) chmod_tree(locale_files_folder) except: print("...Sanity Checks on Folder failed. Please make sure that the current user has full WRITE access to the above folders (and sub-folders or files).") print("Reason:") raise if not skip_geoserver: self.restore_geoserver_backup(settings, target_folder) self.restore_geoserver_raster_data(config, settings, target_folder) self.restore_geoserver_vector_data(config, settings, target_folder) print("Restoring geoserver external resources") self.restore_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup restore") # Prepare Target DB try: call_command('migrate', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd, settings.MONITORING_ENABLED) except: traceback.print_exc() try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Flush DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.flush_db(db_name, db_user, db_port, db_host, db_passwd) except: try: call_command('flush', interactive=False, load_initial_data=False) except: traceback.print_exc() raise # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." # helpers.load_fixture(app_name, fixture_file) raise # Restore Media Root try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) copy_tree(media_folder, media_root) chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Root try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." call_command('collectstatic', interactive=False) # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() return str(target_folder) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!" call_command('migrate', interactive=False, load_initial_data=False, fake=True) print "HINT: If you migrated from another site, do not forget to run the command 'migrate_baseurl' to fix Links" print " e.g.: DJANGO_SETTINGS_MODULE=my_geonode.settings python manage.py migrate_baseurl --source-address=my-host-dev.geonode.org --target-address=my-host-prod.geonode.org" print "Restore finished. Please find restored files and dumps into:"
def test_risk_layers(self): """ Check if layers are saved correctly along with risk analysis """ client = self.client url = '/risks/data_extraction/loc/AF/' resp = client.get(url) self.assertEqual(resp.status_code, 200) data = json.loads(resp.content) risk_analysis = RiskAnalysis.objects.all().first() for ht in data['overview']: url = ht['href'] resp = client.get(url) if ht['riskAnalysis'] > 0: risk_link = ht['href'] resp = client.get(risk_link) data = json.loads(resp.content) for ra in data['analysisType']['riskAnalysis']: risk_url = url = ra['href'] resp = client.get(url) self.assertEqual( resp.status_code, 200, 'wrong status on non-empty hazard type {}: {}'.format( url, resp.content)) layers_url = reverse('risks:api:layers', args=(risk_analysis.id, )) l0 = client.get(layers_url) l0_data = json.loads(l0.content) self.assertEqual(l0.status_code, 200) self.assertEqual(l0_data.get('data').get('layers'), []) designals() _layers = [] for lname in ['l1', 'l2', 'l3']: _layers.append(Layer.objects.create(name=lname)) resignals() to_add = [str(_l.id) for _l in _layers[:2]] l1 = client.post(layers_url, {'layers': to_add}) # check if additional layers match in db rq = risk_analysis.additional_layers.all() self.assertEqual(rq.count(), len(to_add)) self.assertEqual([str(r.id) for r in rq], to_add) # .. and in response l1_data = json.loads(l1.content) self.assertEqual(l1.status_code, 200) self.assertEqual(len(l1_data.get('data').get('layers')), len(to_add)) # update check to_add = [str(_l.id) for _l in _layers[2:]] l2 = client.post(layers_url, {'layers': to_add}) self.assertEqual(l2.status_code, 200) self.assertTrue(len(to_add) == 1) l2_data = json.loads(l2.content) # check if additional layers match in db rq = risk_analysis.additional_layers.all() self.assertEqual(rq.count(), len(to_add)) self.assertEqual([str(r.id) for r in rq], to_add) self.assertEqual(len(l2_data.get('data').get('layers')), len(to_add)) resp = client.get(risk_url) self.assertEqual( resp.status_code, 200, 'wrong status on non-empty hazard type {}: {}'.format( url, resp.content)) resp_data = json.loads(resp.content) self.assertEqual( len(resp_data['riskAnalysisData']['additionalLayers']), len(to_add))
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_file = options.get('backup_file') skip_geoserver = options.get('skip_geoserver') backup_dir = options.get('backup_dir') if not any([backup_file, backup_dir]): raise CommandError("Mandatory option (--backup-file|--backup-dir)") if all([backup_file, backup_dir]): raise CommandError("Exclusive option (--backup-file|--backup-dir)") if backup_file and not os.path.isfile(backup_file): raise CommandError("Provided '--backup-file' is not a file") if backup_dir and not os.path.isdir(backup_dir): raise CommandError("Provided '--backup-dir' is not a directory") print "Before proceeding with the Restore, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): target_folder = backup_dir if backup_file: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(backup_file, restore_folder) if not skip_geoserver: self.restore_geoserver_backup(settings, target_folder) self.restore_geoserver_raster_data(config, settings, target_folder) self.restore_geoserver_vector_data(config, settings, target_folder) print("Restoring geoserver external resources") self.restore_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup restore") # Prepare Target DB try: call_command('migrate', interactive=False, load_initial_data=False) call_command('flush', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." # helpers.load_fixture(app_name, fixture_file) # Restore Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) helpers.copy_tree(media_folder, media_root) helpers.chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) helpers.copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) helpers.chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) helpers.copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) helpers.chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) helpers.copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) helpers.chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() print "Restore finished. Please find restored files and dumps into:" return str(target_folder) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') source_address = options.get('source_address') target_address = options.get('target_address') if not source_address or len(source_address) == 0: raise CommandError("Source Address '--source-address' is mandatory") if not target_address or len(target_address) == 0: raise CommandError("Target Address '--target-address' is mandatory") print "This will change all Maps, Layers, \ Styles and Links Base URLs from [%s] to [%s]." % (source_address, target_address) print "The operation may take some time, depending on the amount of Layer on GeoNode." message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" maps = Map.objects.all() for map in maps: print "Checking Map[%s]" % (map) if map.thumbnail_url: map.thumbnail_url = map.thumbnail_url.replace(source_address, target_address) map_layers = map.layers for layer in map_layers: if layer.ows_url: original = layer.ows_url layer.ows_url = layer.ows_url.replace(source_address, target_address) print "Updated OWS URL from [%s] to [%s]" % (original, layer.ows_url) if layer.layer_params: layer.layer_params = layer.layer_params.replace(source_address, target_address) print "Updated Layer Params also for Layer [%s]" % (layer) layer.save() map.save() print "Updated Map[%s]" % (map) layers = Layer.objects.all() for layer in layers: print "Checking Layer[%s]" % (layer) if layer.thumbnail_url: original = layer.thumbnail_url layer.thumbnail_url = layer.thumbnail_url.replace(source_address, target_address) layer.save() print "Updated Thumbnail URL from [%s] to [%s]" % (original, layer.thumbnail_url) styles = Style.objects.all() for style in styles: print "Checking Style[%s]" % (style) if style.sld_url: original = style.sld_url style.sld_url = style.sld_url.replace(source_address, target_address) style.save() print "Updated SLD URL from [%s] to [%s]" % (original, style.sld_url) links = Link.objects.all() for link in links: print "Checking Link[%s]" % (link) if link.url: original = link.url link.url = link.url.replace(source_address, target_address) link.save() print "Updated URL from [%s] to [%s]" % (original, link.url) resources = ResourceBase.objects.all() for res in resources: print "Checking Resource[%s]" % (res) if res.metadata_xml: original = res.metadata_xml res.metadata_xml = res.metadata_xml.replace(source_address, target_address) res.save() print "Updated URL in metadata XML for resource [%s]" % (res) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') backup_dir = options.get('backup_dir') if not backup_dir or len(backup_dir) == 0: raise CommandError( "Destination folder '--backup-dir' is mandatory") print "Before proceeding with the Backup, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): # Create Target Folder dir_time_suffix = helpers.get_dir_time_suffix() target_folder = os.path.join(backup_dir, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) os.chmod(target_folder, 0755) # Create GeoServer Backup url = settings.OGC_SERVER['default']['PUBLIC_LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] geoserver_bk_file = os.path.join(target_folder, 'geoserver_catalog.zip') print "Dumping 'GeoServer Catalog [" + url + "]' into '" + geoserver_bk_file + "'." data = { 'backup': { 'archiveFile': geoserver_bk_file, 'overwrite': 'true', 'options': { 'option': ['BK_BEST_EFFORT=true'] } } } headers = {'Content-type': 'application/json'} r = requests.post(url + 'rest/br/backup/', data=json.dumps(data), headers=headers, auth=HTTPBasicAuth(user, passwd)) if (r.status_code > 201): gs_backup = r.json() gs_bk_exec_id = gs_backup['backup']['execution']['id'] r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_progress = gs_backup['backup']['execution'][ 'progress'] print gs_bk_progress raise ValueError( 'Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: gs_backup = r.json() gs_bk_exec_id = gs_backup['backup']['execution']['id'] r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_bk_exec_status = gs_backup['backup']['execution'][ 'status'] gs_bk_exec_progress = gs_backup['backup']['execution'][ 'progress'] gs_bk_exec_progress_updated = '0/0' while (gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED'): if (gs_bk_exec_progress != gs_bk_exec_progress_updated): gs_bk_exec_progress_updated = gs_bk_exec_progress r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_exec_status = gs_backup['backup'][ 'execution']['status'] gs_bk_exec_progress = gs_backup['backup'][ 'execution']['progress'] print str(gs_bk_exec_status ) + ' - ' + gs_bk_exec_progress time.sleep(3) else: raise ValueError( 'Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: raise ValueError( 'Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) # Dump GeoServer Data if (helpers.GS_DATA_DIR): if (helpers.GS_DUMP_RASTER_DATA): # Dump '$GS_DATA_DIR/data/geonode' gs_data_root = os.path.join(helpers.GS_DATA_DIR, 'data', 'geonode') gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') if not os.path.exists(gs_data_folder): os.makedirs(gs_data_folder) helpers.copy_tree(gs_data_root, gs_data_folder) print "Dumped GeoServer Uploaded Data from '" + gs_data_root + "'." if (helpers.GS_DUMP_VECTOR_DATA): # Dump Vectorial Data from DB datastore = settings.OGC_SERVER['default']['DATASTORE'] if (datastore): ogc_db_name = settings.DATABASES[datastore]['NAME'] ogc_db_user = settings.DATABASES[datastore]['USER'] ogc_db_passwd = settings.DATABASES[datastore]['PASSWORD'] ogc_db_host = settings.DATABASES[datastore]['HOST'] ogc_db_port = settings.DATABASES[datastore]['PORT'] gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') if not os.path.exists(gs_data_folder): os.makedirs(gs_data_folder) helpers.dump_db(ogc_db_name, ogc_db_user, ogc_db_port, ogc_db_host, ogc_db_passwd, gs_data_folder) try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Dump Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): print "Dumping '" + app_name + "' into '" + dump_name + ".json'." # Point stdout at a file for dumping data to. output = open( os.path.join(target_folder, dump_name + '.json'), 'w') call_command('dumpdata', app_name, format='json', indent=2, natural=True, stdout=output) output.close() # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) helpers.copy_tree(media_root, media_folder) print "Saved Media Files from '" + media_root + "'." # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) helpers.copy_tree(static_root, static_folder) print "Saved Static Root from '" + static_root + "'." # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: static_folder = os.path.join( static_files_folders, os.path.basename( os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) helpers.copy_tree(static_files_folder, static_folder) print "Saved Static Files from '" + static_files_folder + "'." # Store Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: template_folder = os.path.join( template_files_folders, os.path.basename( os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) helpers.copy_tree(template_files_folder, template_folder) print "Saved Template Files from '" + template_files_folder + "'." # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: locale_folder = os.path.join( locale_files_folders, os.path.basename( os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) helpers.copy_tree(locale_files_folder, locale_folder) print "Saved Locale Files from '" + locale_files_folder + "'." # Create Final ZIP Archive helpers.zip_dir( target_folder, os.path.join(backup_dir, dir_time_suffix + '.zip')) # Cleanup Temp Folder shutil.rmtree(target_folder) print "Backup Finished. Archive generated." return str(os.path.join(backup_dir, dir_time_suffix + '.zip')) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') source_address = options.get('source_address') target_address = options.get('target_address') if not source_address or len(source_address) == 0: raise CommandError( "Source Address '--source-address' is mandatory") if not target_address or len(target_address) == 0: raise CommandError( "Target Address '--target-address' is mandatory") print "This will change all Maps, Layers, \ Styles and Links Base URLs from [%s] to [%s]." % (source_address, target_address) print "The operation may take some time, depending on the amount of Layer on GeoNode." message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" maps = Map.objects.all() for map in maps: print "Checking Map[%s]" % (map) if map.thumbnail_url: map.thumbnail_url = map.thumbnail_url.replace( source_address, target_address) map_layers = map.layers for layer in map_layers: if layer.ows_url: original = layer.ows_url layer.ows_url = layer.ows_url.replace( source_address, target_address) print "Updated OWS URL from [%s] to [%s]" % ( original, layer.ows_url) if layer.layer_params: layer.layer_params = layer.layer_params.replace( source_address, target_address) print "Updated Layer Params also for Layer [%s]" % ( layer) layer.save() map.save() print "Updated Map[%s]" % (map) layers = Layer.objects.all() for layer in layers: print "Checking Layer[%s]" % (layer) if layer.thumbnail_url: original = layer.thumbnail_url layer.thumbnail_url = layer.thumbnail_url.replace( source_address, target_address) layer.save() print "Updated Thumbnail URL from [%s] to [%s]" % ( original, layer.thumbnail_url) styles = Style.objects.all() for style in styles: print "Checking Style[%s]" % (style) if style.sld_url: original = style.sld_url style.sld_url = style.sld_url.replace( source_address, target_address) style.save() print "Updated SLD URL from [%s] to [%s]" % ( original, style.sld_url) links = Link.objects.all() for link in links: print "Checking Link[%s]" % (link) if link.url: original = link.url link.url = link.url.replace(source_address, target_address) link.save() print "Updated URL from [%s] to [%s]" % (original, link.url) resources = ResourceBase.objects.all() for res in resources: print "Checking Resource[%s]" % (res) if res.metadata_xml: original = res.metadata_xml res.metadata_xml = res.metadata_xml.replace( source_address, target_address) res.save() print "Updated URL in metadata XML for resource [%s]" % ( res) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') backup_dir = options.get('backup_dir') if not backup_dir or len(backup_dir) == 0: raise CommandError("Destination folder '--backup-dir' is mandatory") print "Before proceeding with the Backup, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): # Create Target Folder dir_time_suffix = helpers.get_dir_time_suffix() target_folder = os.path.join(backup_dir, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) os.chmod(target_folder, 0755) # Create GeoServer Backup url = settings.OGC_SERVER['default']['PUBLIC_LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] geoserver_bk_file = os.path.join(target_folder, 'geoserver_catalog.zip') print "Dumping 'GeoServer Catalog ["+url+"]' into '"+geoserver_bk_file+"'." data = {'backup': {'archiveFile': geoserver_bk_file, 'overwrite': 'true', 'options': {'option': ['BK_BEST_EFFORT=true']}}} headers = {'Content-type': 'application/json'} r = requests.post(url + 'rest/br/backup/', data=json.dumps(data), headers=headers, auth=HTTPBasicAuth(user, passwd)) if (r.status_code > 201): gs_backup = r.json() gs_bk_exec_id = gs_backup['backup']['execution']['id'] r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_progress = gs_backup['backup']['execution']['progress'] print gs_bk_progress raise ValueError('Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: gs_backup = r.json() gs_bk_exec_id = gs_backup['backup']['execution']['id'] r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_bk_exec_status = gs_backup['backup']['execution']['status'] gs_bk_exec_progress = gs_backup['backup']['execution']['progress'] gs_bk_exec_progress_updated = '0/0' while (gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED'): if (gs_bk_exec_progress != gs_bk_exec_progress_updated): gs_bk_exec_progress_updated = gs_bk_exec_progress r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_exec_status = gs_backup['backup']['execution']['status'] gs_bk_exec_progress = gs_backup['backup']['execution']['progress'] print str(gs_bk_exec_status) + ' - ' + gs_bk_exec_progress time.sleep(3) else: raise ValueError('Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: raise ValueError('Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) # Dump GeoServer Data if (helpers.GS_DATA_DIR): if (helpers.GS_DUMP_RASTER_DATA): # Dump '$GS_DATA_DIR/data/geonode' gs_data_root = os.path.join(helpers.GS_DATA_DIR, 'data', 'geonode') gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') if not os.path.exists(gs_data_folder): os.makedirs(gs_data_folder) helpers.copy_tree(gs_data_root, gs_data_folder) print "Dumped GeoServer Uploaded Data from '"+gs_data_root+"'." if (helpers.GS_DUMP_VECTOR_DATA): # Dump Vectorial Data from DB datastore = settings.OGC_SERVER['default']['DATASTORE'] if (datastore): ogc_db_name = settings.DATABASES[datastore]['NAME'] ogc_db_user = settings.DATABASES[datastore]['USER'] ogc_db_passwd = settings.DATABASES[datastore]['PASSWORD'] ogc_db_host = settings.DATABASES[datastore]['HOST'] ogc_db_port = settings.DATABASES[datastore]['PORT'] gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') if not os.path.exists(gs_data_folder): os.makedirs(gs_data_folder) helpers.dump_db(ogc_db_name, ogc_db_user, ogc_db_port, ogc_db_host, ogc_db_passwd, gs_data_folder) try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Dump Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): print "Dumping '"+app_name+"' into '"+dump_name+".json'." # Point stdout at a file for dumping data to. output = open(os.path.join(target_folder, dump_name+'.json'), 'w') call_command('dumpdata', app_name, format='json', indent=2, natural=True, stdout=output) output.close() # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) helpers.copy_tree(media_root, media_folder) print "Saved Media Files from '"+media_root+"'." # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) helpers.copy_tree(static_root, static_folder) print "Saved Static Root from '"+static_root+"'." # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: static_folder = os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) helpers.copy_tree(static_files_folder, static_folder) print "Saved Static Files from '"+static_files_folder+"'." # Store Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: template_folder = os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) helpers.copy_tree(template_files_folder, template_folder) print "Saved Template Files from '"+template_files_folder+"'." # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: locale_folder = os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) helpers.copy_tree(locale_files_folder, locale_folder) print "Saved Locale Files from '"+locale_files_folder+"'." # Create Final ZIP Archive helpers.zip_dir(target_folder, os.path.join(backup_dir, dir_time_suffix+'.zip')) # Cleanup Temp Folder shutil.rmtree(target_folder) print "Backup Finished. Archive generated." return str(os.path.join(backup_dir, dir_time_suffix+'.zip')) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"