def koji_build(update_file=None, skip_build=False): if skip_build: log.info("\nSkipping koji build due to -s/--skip-build") fcmd = kojibuild.get_fedpkg_commands() build_id = fcmd.nvr else: if git.branch_needs_push(): helpers.confirm("It seems local distgit branch needs push. Push " "now?") git('push') build_id = kojibuild.new_build() build = kojibuild.guess_build(build_id) _show_update_entry(build) if update_file: _update.dump_build(build, update_file)
def koji_build(update_file=None, skip_build=False): if skip_build: log.info("\nSkipping koji build due to -s/--skip-build") fcmd = kojibuild.get_fedpkg_commands() build_id = fcmd.nvr else: if git.branch_needs_push(): helpers.confirm("It seems local distgit branch needs push. Push " "now?") git("push") build_id = kojibuild.new_build() build = kojibuild.guess_build(build_id) if not build: raise exception.CantGuess(what="build arguments", why="Unknown branch? Check `rdopkg pkgenv` and `rdopkg info`") _show_update_entry(build) if update_file: _update.dump_build(build, update_file)
def handle(self, **options): force_exec = options.get('force_exec') source_address = options.get('source_address') target_address = options.get('target_address') if not source_address or len(source_address) == 0: raise CommandError("Source Address '--source-address' is mandatory") if not target_address or len(target_address) == 0: raise CommandError("Target Address '--target-address' is mandatory") print "This will change all Maps, Layers, \ Styles and Links Base URLs from [%s] to [%s]." % (source_address, target_address) print "The operation may take some time, depending on the amount of Layer on GeoNode." message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" _cnt = Map.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Maps" % _cnt _cnt = MapLayer.objects.filter(ows_url__icontains=source_address).update( ows_url=Func( F('ows_url'),Value(source_address),Value(target_address),function='replace')) MapLayer.objects.filter(layer_params__icontains=source_address).update( layer_params=Func( F('layer_params'),Value(source_address),Value(target_address),function='replace')) print "Updated %s MapLayers" % _cnt _cnt = Layer.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Layers" % _cnt _cnt = Style.objects.filter(sld_url__icontains=source_address).update( sld_url=Func( F('sld_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Styles" % _cnt _cnt = Link.objects.filter(url__icontains=source_address).update( url=Func( F('url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Links" % _cnt _cnt = ResourceBase.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s ResourceBases" % _cnt finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def copr_check(release=None, dist=None): osdist = guess.osdist() if osdist == "RHOS": helpers.confirm("Look like you're trying to build RHOS package in " "public copr.\nProceed anyway?") if not release: raise exception.CantGuess(what="release", why="Specify with -r/--release") if not dist: builds = guess.builds(release=release) for dist_, src in builds: if src.startswith("copr/jruzicka"): dist = dist_ log.info("Autodetected dist: %s" % dist) break if not dist: raise exception.CantGuess(what="dist", why="Specify with -d/--dist") return {"dist": dist}
def delete(config_name, config_dir=None): if config_dir is None: basedir = path.dirname(__file__) config_dir = path.join(basedir, '..', 'private') txt = path.join(config_dir, 'infos', config_name + '.txt') if path.isfile(txt) and helpers.confirm('Delete emails for {}'.format(config_name)): os.remove(txt) _commit_push(config_name, 'Delete', config_dir)
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = helpers.Config(options) backup_file = options.get('backup_file') owner = options.get('owner') if not backup_file or len(backup_file) == 0: raise CommandError("Backup archive '--backup-file' is mandatory") if not owner or len(owner) == 0: raise CommandError("Owner '--owner' is mandatory") message = 'WARNING: The migration may break GeoNode existing Layers. You want to proceed?' if helpers.confirm(prompt=message, resp=False): """Migrate existing Layers on GeoNode DB""" try: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(backup_file, restore_folder) # Retrieve the max Primary Key from the DB from geonode.base.models import ResourceBase try: higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk except: higher_pk = 0 # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): for mig_name, mangler in zip(config.migrations, config.manglers): if app_name == mig_name: fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file mangler = helpers.load_class(mangler) obj = helpers.load_fixture(app_name, fixture_file, mangler=mangler, basepk=higher_pk, owner=owner, datastore=settings.OGC_SERVER['default']['DATASTORE'], siteurl=settings.SITEURL) from django.core import serializers objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True) for obj in objects: obj.save(using=DEFAULT_DB_ALIAS) print "Restore finished. Please find restored files and dumps into: '"+target_folder+"'." except Exception: traceback.print_exc()
def rebase_patches_branch( new_version, local_patches_branch, patches_branch=None, local_patches=False, bump_only=False, version_tag_style=None ): if bump_only: return git.checkout(local_patches_branch) new_version_tag = guess.version2tag(new_version, version_tag_style) git("rebase", new_version_tag, direct=True) if local_patches or not patches_branch: return if _is_same_commit(local_patches_branch, patches_branch): log.info("%s is up to date, no need for push." % patches_branch) return try: remote, branch = git.remote_branch_split(patches_branch) helpers.confirm("Push %s to %s / %s (with --force)?" % (local_patches_branch, remote, branch)) git("push", "--force", remote, "%s:%s" % (local_patches_branch, branch)) # push the tag git("push", "--force", remote, new_version_tag) except exception.UserAbort: pass
def check_new_patches(version, local_patches_branch, local_patches=False, patches_branch=None, changes=None): if not changes: changes = [] if local_patches: head = local_patches_branch else: if not patches_branch: raise exception.RequiredActionArgumentNotAvailable( action='check_new_patches', arg='patches_branch') head = patches_branch spec = specfile.Spec() n_patches = spec.get_n_patches() + spec.get_n_excluded_patches() patches = git.get_commit_subjects(version, head) if n_patches > 0: patches = patches[0:-n_patches] if not patches: log.warn("No new patches detected in %s." % head) helpers.confirm("Do you want to continue anyway?", default_yes=False) changes.extend(patches) return {'changes': changes}
def rebase_patches_branch(new_version, local_patches_branch, patches_branch=None, local_patches=False, bump_only=False): if bump_only: return git.checkout(local_patches_branch) git('rebase', new_version, direct=True) if local_patches or not patches_branch: return if _is_same_commit(local_patches_branch, patches_branch): log.info("%s is up to date, no need for push." % patches_branch) return try: remote, branch = git.remote_branch_split(patches_branch) helpers.confirm("Push %s to %s / %s (with --force)?" % ( local_patches_branch, remote, branch)) git('push', '--force', remote, '%s:%s' % (local_patches_branch, branch)) # push the tag git('push', '--force', remote, new_version) except exception.UserAbort: pass
def get_upstream_patches(version, local_patches_branch, patches_branch=None, upstream_branch=None, new_milestone=None): patches = git( "log", "--cherry-pick", "--pretty=format:\%s", "%(remote)s...%(local)s" % {"remote": patches_branch, "local": local_patches_branch}, ) changes = [p.strip().replace("\\", "") for p in patches.split("\n") if p != ""] if not changes: log.warn("No new patches detected in %s." % local_patches_branch) helpers.confirm("Do you want to continue anyway?", default_yes=False) n_patches = len(changes) changes.insert(0, ("Rebase %s changes from %s" % (n_patches, upstream_branch))) args = {"changes": changes} if n_patches > 0: if new_milestone: new_milestone += ".p%d" % n_patches else: new_milestone = "p%d" % n_patches args["new_milestone"] = new_milestone return args
def get_upstream_patches(version, local_patches_branch, patches_branch=None, upstream_branch=None, new_milestone=None): patches = git("log", "--cherry-pick", "--pretty=format:\%s", "%(remote)s...%(local)s" % {'remote': patches_branch, 'local': local_patches_branch}) changes = [p.strip().replace('\\', '') for p in patches.split('\n') if p != ''] if not changes: log.warn("No new patches detected in %s." % local_patches_branch) helpers.confirm("Do you want to continue anyway?", default_yes=False) n_patches = len(changes) changes.insert(0, ("Rebase %s changes from %s" % (n_patches, upstream_branch))) args = {'changes': changes} if n_patches > 0: if new_milestone: new_milestone += '.p%d' % n_patches else: new_milestone = 'p%d' % n_patches args['new_milestone'] = new_milestone return args
def update_nb_hits(self, nb_hits): if self.config_file is not None: previous_nb_hits = None if 'nb_hits' not in self.config_content else self.config_content['nb_hits'] if previous_nb_hits is None or previous_nb_hits != nb_hits: print( "previous nb_hits: " + str(previous_nb_hits)) print("") if helpers.confirm('Do you want to update the nb_hits in ' + self.config_file + ' ?'): try: self.config_content['nb_hits'] = nb_hits with open(self.config_file, 'w') as f: f.write(json.dumps(self.config_content, indent=2, separators=(',', ': '))) print("") print("[OK] " + self.config_file + " has been updated") except Exception: print("") print("[KO] " + "Was not able to update " + self.config_file)
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_file = options.get('backup_file') skip_geoserver = options.get('skip_geoserver') backup_dir = options.get('backup_dir') if not any([backup_file, backup_dir]): raise CommandError("Mandatory option (--backup-file|--backup-dir)") if all([backup_file, backup_dir]): raise CommandError("Exclusive option (--backup-file|--backup-dir)") if backup_file and not os.path.isfile(backup_file): raise CommandError("Provided '--backup-file' is not a file") if backup_dir and not os.path.isdir(backup_dir): raise CommandError("Provided '--backup-dir' is not a directory") print "Before proceeding with the Restore, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): target_folder = backup_dir if backup_file: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(backup_file, restore_folder) if not skip_geoserver: self.restore_geoserver_backup(settings, target_folder) self.restore_geoserver_raster_data(config, settings, target_folder) self.restore_geoserver_vector_data(config, settings, target_folder) print("Restoring geoserver external resources") self.restore_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup restore") # Prepare Target DB try: call_command('migrate', interactive=False, load_initial_data=False) call_command('flush', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." # helpers.load_fixture(app_name, fixture_file) # Restore Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) helpers.copy_tree(media_folder, media_root) helpers.chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) helpers.copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) helpers.chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) helpers.copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) helpers.chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) helpers.copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) helpers.chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() print "Restore finished. Please find restored files and dumps into:" return str(target_folder) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') backup_dir = options.get('backup_dir') if not backup_dir or len(backup_dir) == 0: raise CommandError( "Destination folder '--backup-dir' is mandatory") print "Before proceeding with the Backup, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): # Create Target Folder dir_time_suffix = helpers.get_dir_time_suffix() target_folder = os.path.join(backup_dir, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) os.chmod(target_folder, 0755) # Create GeoServer Backup url = settings.OGC_SERVER['default']['PUBLIC_LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] geoserver_bk_file = os.path.join(target_folder, 'geoserver_catalog.zip') print "Dumping 'GeoServer Catalog [" + url + "]' into '" + geoserver_bk_file + "'." data = { 'backup': { 'archiveFile': geoserver_bk_file, 'overwrite': 'true', 'options': { 'option': ['BK_BEST_EFFORT=true'] } } } headers = {'Content-type': 'application/json'} r = requests.post(url + 'rest/br/backup/', data=json.dumps(data), headers=headers, auth=HTTPBasicAuth(user, passwd)) if (r.status_code > 201): gs_backup = r.json() gs_bk_exec_id = gs_backup['backup']['execution']['id'] r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_progress = gs_backup['backup']['execution'][ 'progress'] print gs_bk_progress raise ValueError( 'Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: gs_backup = r.json() gs_bk_exec_id = gs_backup['backup']['execution']['id'] r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_bk_exec_status = gs_backup['backup']['execution'][ 'status'] gs_bk_exec_progress = gs_backup['backup']['execution'][ 'progress'] gs_bk_exec_progress_updated = '0/0' while (gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED'): if (gs_bk_exec_progress != gs_bk_exec_progress_updated): gs_bk_exec_progress_updated = gs_bk_exec_progress r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_exec_status = gs_backup['backup'][ 'execution']['status'] gs_bk_exec_progress = gs_backup['backup'][ 'execution']['progress'] print str(gs_bk_exec_status ) + ' - ' + gs_bk_exec_progress time.sleep(3) else: raise ValueError( 'Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: raise ValueError( 'Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) # Dump GeoServer Data if (helpers.GS_DATA_DIR): if (helpers.GS_DUMP_RASTER_DATA): # Dump '$GS_DATA_DIR/data/geonode' gs_data_root = os.path.join(helpers.GS_DATA_DIR, 'data', 'geonode') gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') if not os.path.exists(gs_data_folder): os.makedirs(gs_data_folder) helpers.copy_tree(gs_data_root, gs_data_folder) print "Dumped GeoServer Uploaded Data from '" + gs_data_root + "'." if (helpers.GS_DUMP_VECTOR_DATA): # Dump Vectorial Data from DB datastore = settings.OGC_SERVER['default']['DATASTORE'] if (datastore): ogc_db_name = settings.DATABASES[datastore]['NAME'] ogc_db_user = settings.DATABASES[datastore]['USER'] ogc_db_passwd = settings.DATABASES[datastore]['PASSWORD'] ogc_db_host = settings.DATABASES[datastore]['HOST'] ogc_db_port = settings.DATABASES[datastore]['PORT'] gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') if not os.path.exists(gs_data_folder): os.makedirs(gs_data_folder) helpers.dump_db(ogc_db_name, ogc_db_user, ogc_db_port, ogc_db_host, ogc_db_passwd, gs_data_folder) # Dump Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): print "Dumping '" + app_name + "' into '" + dump_name + ".json'." # Point stdout at a file for dumping data to. output = open(os.path.join(target_folder, dump_name + '.json'), 'w') call_command('dumpdata', app_name, format='json', indent=2, natural=True, stdout=output) output.close() # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) helpers.copy_tree(media_root, media_folder) print "Saved Media Files from '" + media_root + "'." # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) helpers.copy_tree(static_root, static_folder) print "Saved Static Root from '" + static_root + "'." # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: static_folder = os.path.join( static_files_folders, os.path.basename(os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) helpers.copy_tree(static_files_folder, static_folder) print "Saved Static Files from '" + static_files_folder + "'." # Store Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: template_folder = os.path.join( template_files_folders, os.path.basename(os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) helpers.copy_tree(template_files_folder, template_folder) print "Saved Template Files from '" + template_files_folder + "'." # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: locale_folder = os.path.join( locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) helpers.copy_tree(locale_files_folder, locale_folder) print "Saved Locale Files from '" + locale_files_folder + "'." # Create Final ZIP Archive helpers.zip_dir(target_folder, os.path.join(backup_dir, dir_time_suffix + '.zip')) # Cleanup Temp Folder shutil.rmtree(target_folder) print "Backup Finished. Archive generated." return str(os.path.join(backup_dir, dir_time_suffix + '.zip'))
def handle(self, **options): force_exec = options.get('force_exec') source_address = options.get('source_address') target_address = options.get('target_address') if not source_address or len(source_address) == 0: raise CommandError("Source Address '--source-address' is mandatory") if not target_address or len(target_address) == 0: raise CommandError("Target Address '--target-address' is mandatory") print "This will change all Maps, Layers, \ Styles and Links Base URLs from [%s] to [%s]." % (source_address, target_address) print "The operation may take some time, depending on the amount of Layer on GeoNode." message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" _cnt = Map.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Maps" % _cnt _cnt = MapLayer.objects.filter(ows_url__icontains=source_address).update( ows_url=Func( F('ows_url'),Value(source_address),Value(target_address),function='replace')) MapLayer.objects.filter(layer_params__icontains=source_address).update( layer_params=Func( F('layer_params'),Value(source_address),Value(target_address),function='replace')) print "Updated %s MapLayers" % _cnt _cnt = Layer.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Layers" % _cnt _cnt = Style.objects.filter(sld_url__icontains=source_address).update( sld_url=Func( F('sld_url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Styles" % _cnt _cnt = Link.objects.filter(url__icontains=source_address).update( url=Func( F('url'),Value(source_address),Value(target_address),function='replace')) print "Updated %s Links" % _cnt _cnt = ResourceBase.objects.filter(thumbnail_url__icontains=source_address).update( thumbnail_url=Func( F('thumbnail_url'),Value(source_address),Value(target_address),function='replace')) _cnt += ResourceBase.objects.filter(csw_anytext__icontains=source_address).update( csw_anytext=Func( F('csw_anytext'), Value(source_address), Value(target_address), function='replace')) _cnt += ResourceBase.objects.filter(metadata_xml__icontains=source_address).update( metadata_xml=Func( F('metadata_xml'), Value(source_address), Value(target_address), function='replace')) print "Updated %s ResourceBases" % _cnt finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_file = options.get('backup_file') skip_geoserver = options.get('skip_geoserver') backup_dir = options.get('backup_dir') if not any([backup_file, backup_dir]): raise CommandError("Mandatory option (--backup-file|--backup-dir)") if all([backup_file, backup_dir]): raise CommandError("Exclusive option (--backup-file|--backup-dir)") if backup_file and not os.path.isfile(backup_file): raise CommandError("Provided '--backup-file' is not a file") if backup_dir and not os.path.isdir(backup_dir): raise CommandError("Provided '--backup-dir' is not a directory") print "Before proceeding with the Restore, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): target_folder = backup_dir if backup_file: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = extract_archive(backup_file, restore_folder) # Write Checks media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) try: print("[Sanity Check] Full Write Access to '{}' ...".format(media_root)) chmod_tree(media_root) print("[Sanity Check] Full Write Access to '{}' ...".format(static_root)) chmod_tree(static_root) for static_files_folder in static_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(static_files_folder)) chmod_tree(static_files_folder) for template_files_folder in template_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(template_files_folder)) chmod_tree(template_files_folder) for locale_files_folder in locale_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(locale_files_folder)) chmod_tree(locale_files_folder) except: print("...Sanity Checks on Folder failed. Please make sure that the current user has full WRITE access to the above folders (and sub-folders or files).") print("Reason:") raise if not skip_geoserver: self.restore_geoserver_backup(settings, target_folder) self.restore_geoserver_raster_data(config, settings, target_folder) self.restore_geoserver_vector_data(config, settings, target_folder) print("Restoring geoserver external resources") self.restore_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup restore") # Prepare Target DB try: call_command('migrate', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd, settings.MONITORING_ENABLED) except: traceback.print_exc() try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Flush DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.flush_db(db_name, db_user, db_port, db_host, db_passwd) except: try: call_command('flush', interactive=False, load_initial_data=False) except: traceback.print_exc() raise # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." # helpers.load_fixture(app_name, fixture_file) raise # Restore Media Root try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) copy_tree(media_folder, media_root) chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Root try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." call_command('collectstatic', interactive=False) # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() return str(target_folder) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!" call_command('migrate', interactive=False, load_initial_data=False, fake=True) print "HINT: If you migrated from another site, do not forget to run the command 'migrate_baseurl' to fix Links" print " e.g.: DJANGO_SETTINGS_MODULE=my_geonode.settings python manage.py migrate_baseurl --source-address=my-host-dev.geonode.org --target-address=my-host-prod.geonode.org" print "Restore finished. Please find restored files and dumps into:"
ignorenonexistent=True) for obj in objects: obj.save(using=DEFAULT_DB_ALIAS) except Exception, err: traceback.print_exc() print "Restore finished. Please find restored files and dumps into: '" + target_folder + "'." if __name__ == '__main__': os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geonode.settings") restore_file = None owner = None try: restore_file = sys.argv[1] owner = sys.argv[2] except: pass if restore_file and owner: if helpers.confirm( prompt= 'WARNING: The migration may break some of your GeoNode existing Layers. Are you sure you want to proceed?', resp=False): migrate_layers(restore_file, owner) else: print "Please, provide the full path to the ZIP archive to Restore AND the Owner of the imported Layers.\n" print "Usage example: python migrate_layers.py backup/geonode_backup_test.zip admin\n"
# Cleanup DB try: helpers.cleanup_db() except: traceback.print_exc() except Exception, err: print str(err) print "Restore finished. Please find restored files and dumps into: '"+target_folder+"'." if __name__ == '__main__': os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geonode.settings") restore_file = None try: restore_file = sys.argv[1] except: pass if restore_file: if helpers.confirm(prompt='WARNING: The restore will overwrite all your GeoNode data and files. Are you sure you want to proceed?', resp=False): restore_full(restore_file) else: print "Please, provide the full path to the ZIP archive to Restore.\n" print "Usage example: python restore.py backup/geonode_backup_test.zip\n"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_file = options.get('backup_file') skip_geoserver = options.get('skip_geoserver') backup_dir = options.get('backup_dir') if not any([backup_file, backup_dir]): raise CommandError("Mandatory option (--backup-file|--backup-dir)") if all([backup_file, backup_dir]): raise CommandError("Exclusive option (--backup-file|--backup-dir)") if backup_file and not os.path.isfile(backup_file): raise CommandError("Provided '--backup-file' is not a file") if backup_dir and not os.path.isdir(backup_dir): raise CommandError("Provided '--backup-dir' is not a directory") print "Before proceeding with the Restore, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): target_folder = backup_dir if backup_file: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = extract_archive(backup_file, restore_folder) # Write Checks media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except: try: template_folders = settings.TEMPLATES[0]['DIRS'] except: pass template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) try: print("[Sanity Check] Full Write Access to '{}' ...".format(media_root)) chmod_tree(media_root) print("[Sanity Check] Full Write Access to '{}' ...".format(static_root)) chmod_tree(static_root) for static_files_folder in static_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(static_files_folder)) chmod_tree(static_files_folder) for template_files_folder in template_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(template_files_folder)) chmod_tree(template_files_folder) for locale_files_folder in locale_folders: print("[Sanity Check] Full Write Access to '{}' ...".format(locale_files_folder)) chmod_tree(locale_files_folder) except: print("...Sanity Checks on Folder failed. Please make sure that the current user has full WRITE access to the above folders (and sub-folders or files).") print("Reason:") raise if not skip_geoserver: self.restore_geoserver_backup(settings, target_folder) self.restore_geoserver_raster_data(config, settings, target_folder) self.restore_geoserver_vector_data(config, settings, target_folder) print("Restoring geoserver external resources") self.restore_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup restore") # Prepare Target DB try: call_command('migrate', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd, settings.MONITORING_ENABLED) except: traceback.print_exc() try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Flush DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.flush_db(db_name, db_user, db_port, db_host, db_passwd) except: try: call_command('flush', interactive=False, load_initial_data=False) except: traceback.print_exc() raise # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." # helpers.load_fixture(app_name, fixture_file) raise # Restore Media Root try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) copy_tree(media_folder, media_root) chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Root try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." call_command('collectstatic', interactive=False) # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() return str(target_folder) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!" call_command('migrate', interactive=False, load_initial_data=False, fake=True) print "HINT: If you migrated from another site, do not forget to run the command 'migrate_baseurl' to fix Links" print " e.g.: DJANGO_SETTINGS_MODULE=my_geonode.settings python manage.py migrate_baseurl --source-address=my-host-dev.geonode.org --target-address=my-host-prod.geonode.org" print "Restore finished. Please find restored files and dumps into:"
objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True) for obj in objects: obj.save(using=DEFAULT_DB_ALIAS) except Exception, err: traceback.print_exc() print "Restore finished. Please find restored files and dumps into: '"+target_folder+"'." if __name__ == '__main__': os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geonode.settings") restore_file = None owner = None try: restore_file = sys.argv[1] owner = sys.argv[2] except: pass if restore_file and owner: if helpers.confirm(prompt='WARNING: The migration may break some of your GeoNode existing Layers. Are you sure you want to proceed?', resp=False): migrate_layers(restore_file, owner) else: print "Please, provide the full path to the ZIP archive to Restore AND the Owner of the imported Layers.\n" print "Usage example: python migrate_layers.py backup/geonode_backup_test.zip admin\n"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') backup_file = options.get('backup_file') if not backup_file or len(backup_file) == 0: raise CommandError("Backup archive '--backup-file' is mandatory") print "Before proceeding with the Restore, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = helpers.unzip_file(backup_file, restore_folder) # Restore GeoServer Catalog url = settings.OGC_SERVER['default']['PUBLIC_LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] geoserver_bk_file = os.path.join(target_folder, 'geoserver_catalog.zip') print "Restoring 'GeoServer Catalog ["+url+"]' into '"+geoserver_bk_file+"'." if not os.path.exists(geoserver_bk_file): raise ValueError('Could not find GeoServer Backup file [' + geoserver_bk_file + ']') # Best Effort Restore: 'options': {'option': ['BK_BEST_EFFORT=true']} data = {'restore': {'archiveFile': geoserver_bk_file, 'options': {}}} headers = {'Content-type': 'application/json'} r = requests.post(url + 'rest/br/restore/', data=json.dumps(data), headers=headers, auth=HTTPBasicAuth(user, passwd)) if (r.status_code > 201): gs_backup = r.json() gs_bk_exec_id = gs_backup['restore']['execution']['id'] r = requests.get(url + 'rest/br/restore/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_progress = gs_backup['restore']['execution']['progress'] print gs_bk_progress raise ValueError('Could not successfully restore GeoServer catalog [' + url + 'rest/br/restore/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: gs_backup = r.json() gs_bk_exec_id = gs_backup['restore']['execution']['id'] r = requests.get(url + 'rest/br/restore/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_bk_exec_status = gs_backup['restore']['execution']['status'] gs_bk_exec_progress = gs_backup['restore']['execution']['progress'] gs_bk_exec_progress_updated = '0/0' while (gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED'): if (gs_bk_exec_progress != gs_bk_exec_progress_updated): gs_bk_exec_progress_updated = gs_bk_exec_progress r = requests.get(url + 'rest/br/restore/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_exec_status = gs_backup['restore']['execution']['status'] gs_bk_exec_progress = gs_backup['restore']['execution']['progress'] print str(gs_bk_exec_status) + ' - ' + gs_bk_exec_progress time.sleep(3) else: raise ValueError('Could not successfully restore GeoServer catalog [' + url + 'rest/br/restore/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: raise ValueError('Could not successfully restore GeoServer catalog [' + url + 'rest/br/restore/]: ' + str(r.status_code) + ' - ' + str(r.text)) # Restore GeoServer Data if (helpers.GS_DATA_DIR): if (helpers.GS_DUMP_RASTER_DATA): # Restore '$GS_DATA_DIR/data/geonode' gs_data_root = os.path.join(helpers.GS_DATA_DIR, 'data', 'geonode') gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') try: shutil.rmtree(gs_data_root) except: pass if not os.path.exists(gs_data_root): os.makedirs(gs_data_root) helpers.copy_tree(gs_data_folder, gs_data_root) helpers.chmod_tree(gs_data_root) print "GeoServer Uploaded Data Restored to '"+gs_data_root+"'." # Cleanup '$GS_DATA_DIR/gwc-layers' gwc_layers_root = os.path.join(helpers.GS_DATA_DIR, 'gwc-layers') if not os.path.isabs(gwc_layers_root): gwc_layers_root = os.path.join(settings.PROJECT_ROOT, '..', gwc_layers_root) try: shutil.rmtree(gwc_layers_root) print 'Cleaned out old GeoServer GWC Layers Config: ' + gwc_layers_root except: pass if not os.path.exists(gwc_layers_root): os.makedirs(gwc_layers_root) if (helpers.GS_DUMP_VECTOR_DATA): # Restore Vectorial Data from DB datastore = settings.OGC_SERVER['default']['DATASTORE'] if (datastore): ogc_db_name = settings.DATABASES[datastore]['NAME'] ogc_db_user = settings.DATABASES[datastore]['USER'] ogc_db_passwd = settings.DATABASES[datastore]['PASSWORD'] ogc_db_host = settings.DATABASES[datastore]['HOST'] ogc_db_port = settings.DATABASES[datastore]['PORT'] gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') helpers.restore_db(ogc_db_name, ogc_db_user, ogc_db_port, ogc_db_host, ogc_db_passwd, gs_data_folder) # Prepare Target DB try: call_command('migrate', interactive=False, load_initial_data=False) call_command('flush', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Restore Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): fixture_file = os.path.join(target_folder, dump_name+'.json') print "Deserializing "+fixture_file try: call_command('loaddata', fixture_file, app_label=app_name) except: traceback.print_exc() print "WARNING: No valid fixture data found for '"+dump_name+"'." # helpers.load_fixture(app_name, fixture_file) # Restore Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) try: shutil.rmtree(media_root) except: pass if not os.path.exists(media_root): os.makedirs(media_root) helpers.copy_tree(media_folder, media_root) helpers.chmod_tree(media_root) print "Media Files Restored into '"+media_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) try: shutil.rmtree(static_root) except: pass if not os.path.exists(static_root): os.makedirs(static_root) helpers.copy_tree(static_folder, static_root) helpers.chmod_tree(static_root) print "Static Root Restored into '"+static_root+"'." # Restore Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) helpers.copy_tree(os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))), static_files_folder) helpers.chmod_tree(static_files_folder) print "Static Files Restored into '"+static_files_folder+"'." # Restore Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) helpers.copy_tree(os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))), template_files_folder) helpers.chmod_tree(template_files_folder) print "Template Files Restored into '"+template_files_folder+"'." # Restore Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) helpers.copy_tree(os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))), locale_files_folder) helpers.chmod_tree(locale_files_folder) print "Locale Files Restored into '"+locale_files_folder+"'." # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except: traceback.print_exc() print "Restore finished. Please find restored files and dumps into:" return str(target_folder) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') source_address = options.get('source_address') target_address = options.get('target_address') if not source_address or len(source_address) == 0: raise CommandError("Source Address '--source-address' is mandatory") if not target_address or len(target_address) == 0: raise CommandError("Target Address '--target-address' is mandatory") print "This will change all Maps, Layers, \ Styles and Links Base URLs from [%s] to [%s]." % (source_address, target_address) print "The operation may take some time, depending on the amount of Layer on GeoNode." message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" maps = Map.objects.all() for map in maps: print "Checking Map[%s]" % (map) if map.thumbnail_url: map.thumbnail_url = map.thumbnail_url.replace(source_address, target_address) map_layers = map.layers for layer in map_layers: if layer.ows_url: original = layer.ows_url layer.ows_url = layer.ows_url.replace(source_address, target_address) print "Updated OWS URL from [%s] to [%s]" % (original, layer.ows_url) if layer.layer_params: layer.layer_params = layer.layer_params.replace(source_address, target_address) print "Updated Layer Params also for Layer [%s]" % (layer) layer.save() map.save() print "Updated Map[%s]" % (map) layers = Layer.objects.all() for layer in layers: print "Checking Layer[%s]" % (layer) if layer.thumbnail_url: original = layer.thumbnail_url layer.thumbnail_url = layer.thumbnail_url.replace(source_address, target_address) layer.save() print "Updated Thumbnail URL from [%s] to [%s]" % (original, layer.thumbnail_url) styles = Style.objects.all() for style in styles: print "Checking Style[%s]" % (style) if style.sld_url: original = style.sld_url style.sld_url = style.sld_url.replace(source_address, target_address) style.save() print "Updated SLD URL from [%s] to [%s]" % (original, style.sld_url) links = Link.objects.all() for link in links: print "Checking Link[%s]" % (link) if link.url: original = link.url link.url = link.url.replace(source_address, target_address) link.save() print "Updated URL from [%s] to [%s]" % (original, link.url) resources = ResourceBase.objects.all() for res in resources: print "Checking Resource[%s]" % (res) if res.metadata_xml: original = res.metadata_xml res.metadata_xml = res.metadata_xml.replace(source_address, target_address) res.save() print "Updated URL in metadata XML for resource [%s]" % (res) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') source_address = options.get('source_address') target_address = options.get('target_address') if not source_address or len(source_address) == 0: raise CommandError( "Source Address '--source-address' is mandatory") if not target_address or len(target_address) == 0: raise CommandError( "Target Address '--target-address' is mandatory") print "This will change all Maps, Layers, \ Styles and Links Base URLs from [%s] to [%s]." % (source_address, target_address) print "The operation may take some time, depending on the amount of Layer on GeoNode." message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" maps = Map.objects.all() for map in maps: print "Checking Map[%s]" % (map) if map.thumbnail_url: map.thumbnail_url = map.thumbnail_url.replace( source_address, target_address) map_layers = map.layers for layer in map_layers: if layer.ows_url: original = layer.ows_url layer.ows_url = layer.ows_url.replace( source_address, target_address) print "Updated OWS URL from [%s] to [%s]" % ( original, layer.ows_url) if layer.layer_params: layer.layer_params = layer.layer_params.replace( source_address, target_address) print "Updated Layer Params also for Layer [%s]" % ( layer) layer.save() map.save() print "Updated Map[%s]" % (map) layers = Layer.objects.all() for layer in layers: print "Checking Layer[%s]" % (layer) if layer.thumbnail_url: original = layer.thumbnail_url layer.thumbnail_url = layer.thumbnail_url.replace( source_address, target_address) layer.save() print "Updated Thumbnail URL from [%s] to [%s]" % ( original, layer.thumbnail_url) styles = Style.objects.all() for style in styles: print "Checking Style[%s]" % (style) if style.sld_url: original = style.sld_url style.sld_url = style.sld_url.replace( source_address, target_address) style.save() print "Updated SLD URL from [%s] to [%s]" % ( original, style.sld_url) links = Link.objects.all() for link in links: print "Checking Link[%s]" % (link) if link.url: original = link.url link.url = link.url.replace(source_address, target_address) link.save() print "Updated URL from [%s] to [%s]" % (original, link.url) resources = ResourceBase.objects.all() for res in resources: print "Checking Resource[%s]" % (res) if res.metadata_xml: original = res.metadata_xml res.metadata_xml = res.metadata_xml.replace( source_address, target_address) res.save() print "Updated URL in metadata XML for resource [%s]" % ( res) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') backup_dir = options.get('backup_dir') if not backup_dir or len(backup_dir) == 0: raise CommandError("Destination folder '--backup-dir' is mandatory") print "Before proceeding with the Backup, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): # Create Target Folder dir_time_suffix = helpers.get_dir_time_suffix() target_folder = os.path.join(backup_dir, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) os.chmod(target_folder, 0755) # Create GeoServer Backup url = settings.OGC_SERVER['default']['PUBLIC_LOCATION'] user = settings.OGC_SERVER['default']['USER'] passwd = settings.OGC_SERVER['default']['PASSWORD'] geoserver_bk_file = os.path.join(target_folder, 'geoserver_catalog.zip') print "Dumping 'GeoServer Catalog ["+url+"]' into '"+geoserver_bk_file+"'." data = {'backup': {'archiveFile': geoserver_bk_file, 'overwrite': 'true', 'options': {'option': ['BK_BEST_EFFORT=true']}}} headers = {'Content-type': 'application/json'} r = requests.post(url + 'rest/br/backup/', data=json.dumps(data), headers=headers, auth=HTTPBasicAuth(user, passwd)) if (r.status_code > 201): gs_backup = r.json() gs_bk_exec_id = gs_backup['backup']['execution']['id'] r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_progress = gs_backup['backup']['execution']['progress'] print gs_bk_progress raise ValueError('Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: gs_backup = r.json() gs_bk_exec_id = gs_backup['backup']['execution']['id'] r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_bk_exec_status = gs_backup['backup']['execution']['status'] gs_bk_exec_progress = gs_backup['backup']['execution']['progress'] gs_bk_exec_progress_updated = '0/0' while (gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED'): if (gs_bk_exec_progress != gs_bk_exec_progress_updated): gs_bk_exec_progress_updated = gs_bk_exec_progress r = requests.get(url + 'rest/br/backup/' + str(gs_bk_exec_id) + '.json', auth=HTTPBasicAuth(user, passwd)) if (r.status_code == 200): gs_backup = r.json() gs_bk_exec_status = gs_backup['backup']['execution']['status'] gs_bk_exec_progress = gs_backup['backup']['execution']['progress'] print str(gs_bk_exec_status) + ' - ' + gs_bk_exec_progress time.sleep(3) else: raise ValueError('Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) else: raise ValueError('Could not successfully backup GeoServer catalog [' + url + 'rest/br/backup/]: ' + str(r.status_code) + ' - ' + str(r.text)) # Dump GeoServer Data if (helpers.GS_DATA_DIR): if (helpers.GS_DUMP_RASTER_DATA): # Dump '$GS_DATA_DIR/data/geonode' gs_data_root = os.path.join(helpers.GS_DATA_DIR, 'data', 'geonode') gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') if not os.path.exists(gs_data_folder): os.makedirs(gs_data_folder) helpers.copy_tree(gs_data_root, gs_data_folder) print "Dumped GeoServer Uploaded Data from '"+gs_data_root+"'." if (helpers.GS_DUMP_VECTOR_DATA): # Dump Vectorial Data from DB datastore = settings.OGC_SERVER['default']['DATASTORE'] if (datastore): ogc_db_name = settings.DATABASES[datastore]['NAME'] ogc_db_user = settings.DATABASES[datastore]['USER'] ogc_db_passwd = settings.DATABASES[datastore]['PASSWORD'] ogc_db_host = settings.DATABASES[datastore]['HOST'] ogc_db_port = settings.DATABASES[datastore]['PORT'] gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode') if not os.path.exists(gs_data_folder): os.makedirs(gs_data_folder) helpers.dump_db(ogc_db_name, ogc_db_user, ogc_db_port, ogc_db_host, ogc_db_passwd, gs_data_folder) try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Dump Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): print "Dumping '"+app_name+"' into '"+dump_name+".json'." # Point stdout at a file for dumping data to. output = open(os.path.join(target_folder, dump_name+'.json'), 'w') call_command('dumpdata', app_name, format='json', indent=2, natural=True, stdout=output) output.close() # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) helpers.copy_tree(media_root, media_folder) print "Saved Media Files from '"+media_root+"'." # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) helpers.copy_tree(static_root, static_folder) print "Saved Static Root from '"+static_root+"'." # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: static_folder = os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) helpers.copy_tree(static_files_folder, static_folder) print "Saved Static Files from '"+static_files_folder+"'." # Store Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: template_folder = os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) helpers.copy_tree(template_files_folder, template_folder) print "Saved Template Files from '"+template_files_folder+"'." # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: locale_folder = os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) helpers.copy_tree(locale_files_folder, locale_folder) print "Saved Locale Files from '"+locale_files_folder+"'." # Create Final ZIP Archive helpers.zip_dir(target_folder, os.path.join(backup_dir, dir_time_suffix+'.zip')) # Cleanup Temp Folder shutil.rmtree(target_folder) print "Backup Finished. Archive generated." return str(os.path.join(backup_dir, dir_time_suffix+'.zip')) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_dir = options.get('backup_dir') skip_geoserver = options.get('skip_geoserver') if not backup_dir or len(backup_dir) == 0: raise CommandError("Destination folder '--backup-dir' is mandatory") print "Before proceeding with the Backup, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): # Create Target Folder dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(backup_dir, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Temporary folder to store backup files. It will be deleted at the end. os.chmod(target_folder, 0777) if not skip_geoserver: self.create_geoserver_backup(settings, target_folder) self.dump_geoserver_raster_data(config, settings, target_folder) self.dump_geoserver_vector_data(config, settings, target_folder) print("Duming geoserver external resources") self.dump_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup") try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Dump Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): print "Dumping '"+app_name+"' into '"+dump_name+".json'." # Point stdout at a file for dumping data to. output = open(os.path.join(target_folder, dump_name+'.json'), 'w') call_command('dumpdata', app_name, format='json', indent=2, natural=True, stdout=output) output.close() # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) copy_tree(media_root, media_folder) print "Saved Media Files from '"+media_root+"'." # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_root, static_folder) print "Saved Static Root from '"+static_root+"'." # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: static_folder = os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_files_folder, static_folder) print "Saved Static Files from '"+static_files_folder+"'." # Store Template Folders template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except: try: template_folders = settings.TEMPLATES[0]['DIRS'] except: pass template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: template_folder = os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) copy_tree(template_files_folder, template_folder) print "Saved Template Files from '"+template_files_folder+"'." # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: locale_folder = os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) copy_tree(locale_files_folder, locale_folder) print "Saved Locale Files from '"+locale_files_folder+"'." # Create Final ZIP Archive zip_dir(target_folder, os.path.join(backup_dir, dir_time_suffix+'.zip')) # Clean-up Temp Folder try: shutil.rmtree(target_folder) except: print "WARNING: Could not be possible to delete the temp folder: '" + str(target_folder) + "'" print "Backup Finished. Archive generated." return str(os.path.join(backup_dir, dir_time_suffix+'.zip')) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
("lvl2", "FIXME h3"), ("lvl3", "FIXME h4"), ("lvl4", "FIXME h5"), ("text", "FIXME p, FIXME li") ))) )) u = helpers.get_user_value("start url: ") urls = [u] if '.html' in u: urls.append(u.rsplit('/', 1)[0]) config['index_name'] = tldextract.extract(u).domain if helpers.confirm("Does the start_urls require variables ?"): config['start_urls'] = [{ "url": u + ('/' if u[-1] != '/' else '') + '(?P<static_variable>.*?)/(?P<dynamic_variable>.*?)/', "variables": { "static_variable": [ "value1", "value2" ], "dynamic_variable": { "url": u, "js": "var versions = $('#selector option').map(function (i, elt) { return $(elt).html(); }).toArray(); return JSON.stringify(versions);" } } }] else: config['start_urls'] = urls
# Cleanup DB try: helpers.cleanup_db() except: traceback.print_exc() except Exception, err: print str(err) print "Restore finished. Please find restored files and dumps into: '" + target_folder + "'." if __name__ == '__main__': os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geonode.settings") restore_file = None try: restore_file = sys.argv[1] except: pass if restore_file: if helpers.confirm( prompt= 'WARNING: The restore will overwrite all your GeoNode data and files. Are you sure you want to proceed?', resp=False): restore_full(restore_file) else: print "Please, provide the full path to the ZIP archive to Restore.\n" print "Usage example: python restore.py backup/geonode_backup_test.zip\n"
for config in removed: print(" - " + config) if len(changed) > 0: print("") print("Will be updated :") for config in changed: log = " - " + config + ' (' + ', '.join(changed_attributes[config]) + ')' if len(changed_attributes[config]) != 1 or 'nb_hits' not in changed_attributes[config]: log = '\033[0;35m' + log + '\033[0m' print(log) print("") if len(added) > 0 or len(removed) > 0 or len(changed) > 0: if helpers.confirm() is True: if len(added) > 0: print("") for config in added: key = algolia_helper.add_docsearch_key(config) print(config + ' (' + key + ')') helpers.make_request('/', 'POST', ref_configs[config]) if len(changed) > 0: print("") for config in changed: config_id = str(inverted_actual_configs[config]) key = algolia_helper.get_docsearch_key(config) print(config + ' (' + key + ')')