def remove_models(obj_ids, type=None, integration=False): from geonode.utils import DisableDjangoSignals with DisableDjangoSignals(skip=integration): if not type: remove_models(None, type=b'map') remove_models(None, type=b'layer') remove_models(None, type=b'document') if type == 'map': try: m_ids = obj_ids or [mp.id for mp in Map.objects.all()] for id in m_ids: m = Map.objects.get(pk=id) m.delete() except Exception: pass elif type == 'layer': try: l_ids = obj_ids or [lyr.id for lyr in Layer.objects.all()] for id in l_ids: layer = Layer.objects.get(pk=id) layer.delete() except Exception: pass elif type == 'document': try: d_ids = obj_ids or [doc.id for doc in Document.objects.all()] for id in d_ids: d = Document.objects.get(pk=id) d.delete() except Exception: pass
def setUpClass(cls): super().setUpClass() if check_ogc_backend(geoserver.BACKEND_PACKAGE): # upload shape files shp_file = os.path.join(gisdata.VECTOR_DATA, "san_andres_y_providencia_coastline.shp") cls.layer_coast_line = file_upload(shp_file, overwrite=True) shp_file = os.path.join(gisdata.VECTOR_DATA, "san_andres_y_providencia_highway.shp") cls.layer_highway = file_upload(shp_file, overwrite=True) # create a map from loaded layers cls.map_composition = Map() admin_user = get_user_model().objects.get(username="******") cls.map_composition.create_from_layer_list( admin_user, [cls.layer_coast_line, cls.layer_highway], "composition", "abstract") # update MapLayers to correctly show layers' location with DisableDjangoSignals(): for maplayer in cls.map_composition.layers: if maplayer.name in [ cls.layer_coast_line.alternate, cls.layer_highway.alternate ]: maplayer.local = True maplayer.save(force_update=True) maplayer.refresh_from_db() cls.map_composition.refresh_from_db()
def setUpClass(cls): super().setUpClass() cls.user_admin = get_user_model().objects.get(username="******") admin, _ = get_user_model().objects.get_or_create(username="******") if check_ogc_backend(geoserver.BACKEND_PACKAGE): cls.dataset_coast_line = create_single_dataset('san_andres_y_providencia_coastline') cls.dataset_highway = create_single_dataset('san_andres_y_providencia_highway') # create a map from loaded layers cls.map_composition = Map() admin_user = get_user_model().objects.get(username="******") cls.map_composition.create_from_dataset_list( admin_user, [cls.dataset_coast_line, cls.dataset_highway], "composition", "abstract" ) # update MapLayers to correctly show layers' location with DisableDjangoSignals(): for maplayer in cls.map_composition.datasets: if maplayer.name in [cls.dataset_coast_line.alternate, cls.dataset_highway.alternate]: maplayer.local = True maplayer.save(force_update=True) maplayer.refresh_from_db() cls.map_composition.refresh_from_db()
def create_maplayers(): from geonode.utils import DisableDjangoSignals with DisableDjangoSignals(): for ml in maplayers: MapLayer.objects.create( name=ml['name'], current_style=ml['current_style'], map=Map.objects.get(title=ml['map']), )
def create_maplayers(): from geonode.utils import DisableDjangoSignals with DisableDjangoSignals(): for ml in maplayers: MapLayer.objects.create(fixed=ml['fixed'], group=ml['group'], name=ml['name'], layer_params=ml['layer_params'], map=Map.objects.get(title=ml['map']), source_params=ml['source_params'], stack_order=ml['stack_order'], opacity=ml['opacity'], transparent=True, visibility=True)
def setUpClass(cls): super().setUpClass() cls.user_admin = get_user_model().objects.get(username="******") admin, _ = get_user_model().objects.get_or_create(username="******") if check_ogc_backend(geoserver.BACKEND_PACKAGE): cls.dataset_coast_line = create_single_dataset( 'san_andres_y_providencia_coastline') cls.dataset_highway = create_single_dataset( 'san_andres_y_providencia_highway') # create a map from loaded layers admin_user = get_user_model().objects.get(username="******") cls.map_composition = Map.objects.create( title="composition", abstract="abstract", owner=admin_user, ) cls.map_composition.id MapLayer.objects.create( map=cls.map_composition, extra_params={}, name="geonode:san_andres_y_providencia_coastline", store=None, current_style=None, ows_url=None, local=True) MapLayer.objects.create( map=cls.map_composition, extra_params={}, name="geonode:san_andres_y_providencia_highway", store=None, current_style=None, ows_url=None, local=True) # update MapLayers to correctly show layers' location with DisableDjangoSignals(): for maplayer in cls.map_composition.maplayers.iterator(): if maplayer.name in [ cls.dataset_coast_line.alternate, cls.dataset_highway.alternate ]: maplayer.local = True maplayer.save(force_update=True) maplayer.refresh_from_db() cls.map_composition.refresh_from_db()
def setUpClass(cls): # temporarily disconnect signals to load Service fixture with DisableDjangoSignals(): super().setUpClass()
def create_models(type=None, integration=False): map_data, user_data, people_data, layer_data, document_data = create_fixtures( ) anonymous_group, created = Group.objects.get_or_create(name='anonymous') with transaction.atomic(): logger.info("[SetUp] Get or create user admin") u = get_user_model().objects.filter(username='******').first() if not u: try: u = get_user_model().objects.create(username='******', is_superuser=True, first_name='admin') except Exception: raise if u: u.set_password('admin') u.save() users = [] for ud, pd in zip(user_data, cycle(people_data)): user_name, password, first_name, last_name = ud with transaction.atomic(): try: logger.info(f"[SetUp] Get or create user {user_name}") u, created = get_user_model().objects.get_or_create( username=user_name) if created: u.set_password(password) u.first_name = first_name u.last_name = last_name u.save() u.groups.add(anonymous_group) users.append(u) except Exception: raise logger.info(f"[SetUp] Add group {anonymous_group}") get_user_model().objects.get( username='******').groups.add(anonymous_group) obj_ids = [] from geonode.utils import DisableDjangoSignals with DisableDjangoSignals(skip=integration): if not type or ensure_string(type) == 'map': for md, user in zip(map_data, cycle(users)): title, abstract, kws, (bbox_x0, bbox_x1, bbox_y0, bbox_y1), category = md logger.info(f"[SetUp] Add map {title}") m = Map(title=title, abstract=abstract, zoom=4, projection='EPSG:4326', center_x=42, center_y=-73, owner=user, bbox_polygon=Polygon.from_bbox( (bbox_x0, bbox_y0, bbox_x1, bbox_y1)), srid='EPSG:4326', category=category) m.save() m.set_default_permissions() obj_ids.append(m.id) for kw in kws: m.keywords.add(kw) m.save() if not type or ensure_string(type) == 'document': for dd, user in zip(document_data, cycle(users)): title, abstract, kws, (bbox_x0, bbox_x1, bbox_y0, bbox_y1), category = dd logger.info(f"[SetUp] Add document {title}") m = Document(title=title, abstract=abstract, owner=user, bbox_polygon=Polygon.from_bbox( (bbox_x0, bbox_y0, bbox_x1, bbox_y1)), srid='EPSG:4326', category=category, doc_file=f) m.save() m.set_default_permissions() obj_ids.append(m.id) for kw in kws: m.keywords.add(kw) m.save() if not type or ensure_string(type) == 'layer': for ld, owner, storeType in zip( layer_data, cycle(users), cycle(('coverageStore', 'dataStore'))): title, abstract, name, alternate, ( bbox_x0, bbox_x1, bbox_y0, bbox_y1), start, kws, category = ld end = start + timedelta(days=365) logger.info(f"[SetUp] Add layer {title}") layer = Layer(title=title, abstract=abstract, name=name, alternate=alternate, bbox_polygon=Polygon.from_bbox( (bbox_x0, bbox_y0, bbox_x1, bbox_y1)), srid='EPSG:4326', uuid=str(uuid4()), owner=owner, temporal_extent_start=start, temporal_extent_end=end, date=start, storeType=storeType, category=category) layer.save() layer.set_default_permissions() obj_ids.append(layer.id) for kw in kws: layer.keywords.add(kw) layer.save() return obj_ids
def execute_restore(self, **options): self.validate_backup_file_options(**options) ignore_errors = options.get('ignore_errors') force_exec = options.get('force_exec') skip_geoserver = options.get('skip_geoserver') skip_geoserver_info = options.get('skip_geoserver_info') skip_geoserver_security = options.get('skip_geoserver_security') backup_file = options.get('backup_file') recovery_file = options.get('recovery_file') backup_files_dir = options.get('backup_files_dir') with_logs = options.get('with_logs') notify = options.get('notify') soft_reset = options.get('soft_reset') # choose backup_file from backup_files_dir, if --backup-files-dir was provided if backup_files_dir: backup_file = self.parse_backup_files_dir(backup_files_dir) else: backup_files_dir = os.path.dirname(backup_file) # calculate and validate backup archive hash backup_md5 = self.validate_backup_file_hash(backup_file) # check if the original backup file ini setting are available or not backup_ini = self.check_backup_ini_settings(backup_file) if backup_ini: options['config'] = backup_ini config = utils.Config(options) # check if the backup has already been restored if with_logs: if RestoredBackup.objects.filter(archive_md5=backup_md5): raise RuntimeError( 'Backup archive has already been restored. If you want to restore ' 'this backup anyway, run the script without "-l" argument.' ) # get a list of instance administrators' emails admin_emails = [] if notify: admins = get_user_model().objects.filter(is_superuser=True) for user in admins: if user.email: admin_emails.append(user.email) print("Before proceeding with the Restore, please ensure that:") print( " 1. The backend (DB or whatever) is accessible and you have rights" ) print( " 2. The GeoServer is up and running and reachable from this machine" ) message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or utils.confirm(prompt=message, resp=False): # Create Target Folder # restore_folder must be located in the directory Geoserver has access to (and it should # not be Geoserver data dir) # for dockerized project-template GeoNode projects, it should be located in /backup-restore, # otherwise default tmp directory is chosen temp_dir_path = backup_files_dir if os.path.exists( backup_files_dir) else None restore_folder = os.path.join(temp_dir_path, f'tmp{str(uuid.uuid4())[:4]}') try: os.makedirs(restore_folder) except Exception as e: raise e try: # Extract ZIP Archive to Target Folder target_folder = extract_archive(backup_file, restore_folder) # Write Checks media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, utils.MEDIA_ROOT) static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, utils.STATIC_ROOT) static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, utils.STATICFILES_DIRS) template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except Exception: try: template_folders = settings.TEMPLATES[0]['DIRS'] except Exception: pass template_files_folders = os.path.join(target_folder, utils.TEMPLATE_DIRS) locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, utils.LOCALE_PATHS) try: print( f"[Sanity Check] Full Write Access to '{restore_folder}' ..." ) chmod_tree(restore_folder) print( f"[Sanity Check] Full Write Access to '{media_root}' ..." ) chmod_tree(media_root) print( f"[Sanity Check] Full Write Access to '{static_root}' ..." ) chmod_tree(static_root) for static_files_folder in static_folders: print( f"[Sanity Check] Full Write Access to '{static_files_folder}' ..." ) chmod_tree(static_files_folder) for template_files_folder in template_folders: print( f"[Sanity Check] Full Write Access to '{template_files_folder}' ..." ) chmod_tree(template_files_folder) for locale_files_folder in locale_folders: print( f"[Sanity Check] Full Write Access to '{locale_files_folder}' ..." ) chmod_tree(locale_files_folder) except Exception as exception: if notify: restore_notification.apply_async( (admin_emails, backup_file, backup_md5, str(exception))) print( "...Sanity Checks on Folder failed. Please make sure that the current user has full WRITE access to the above folders (and sub-folders or files)." ) # noqa print("Reason:") raise if not skip_geoserver: try: print( f"[Sanity Check] Full Write Access to '{target_folder}' ..." ) chmod_tree(target_folder) self.restore_geoserver_backup(config, settings, target_folder, skip_geoserver_info, skip_geoserver_security, ignore_errors, soft_reset) self.prepare_geoserver_gwc_config(config, settings) self.restore_geoserver_raster_data( config, settings, target_folder) self.restore_geoserver_vector_data( config, settings, target_folder, soft_reset) print("Restoring geoserver external resources") self.restore_geoserver_externals( config, settings, target_folder) except Exception as exception: if recovery_file: with tempfile.TemporaryDirectory( dir=temp_dir_path) as restore_folder: recovery_folder = extract_archive( recovery_file, restore_folder) self.restore_geoserver_backup( config, settings, recovery_folder, skip_geoserver_info, skip_geoserver_security, ignore_errors, soft_reset) self.restore_geoserver_raster_data( config, settings, recovery_folder) self.restore_geoserver_vector_data( config, settings, recovery_folder, soft_reset) self.restore_geoserver_externals( config, settings, recovery_folder) if notify: restore_notification.apply_async( (admin_emails, backup_file, backup_md5, str(exception))) raise exception else: print("Skipping geoserver backup restore") # Prepare Target DB try: call_command('makemigrations', interactive=False) call_command('migrate', interactive=False, load_initial_data=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] utils.patch_db(db_name, db_user, db_port, db_host, db_passwd, settings.MONITORING_ENABLED) except Exception: traceback.print_exc() try: # Deactivate GeoNode Signals with DisableDjangoSignals(): # Flush DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default'][ 'PASSWORD'] utils.flush_db(db_name, db_user, db_port, db_host, db_passwd) except Exception: try: call_command('flush', interactive=False) except Exception: traceback.print_exc() raise # Restore Fixtures abortlater = False for app_name, dump_name in zip(config.app_names, config.dump_names): fixture_file = os.path.join( target_folder, f"{dump_name}.json") print(f"Deserializing '{fixture_file}'") try: call_command('loaddata', fixture_file, app_label=app_name) except IntegrityError: traceback.print_exc() logger.warning( f"WARNING: The fixture '{dump_name}' fails on integrity check and import is aborted after all fixtures have been checked." ) # noqa abortlater = True except Exception as e: traceback.print_exc() logger.warning( f"WARNING: No valid fixture data found for '{dump_name}'." ) # helpers.load_fixture(app_name, fixture_file) raise e if abortlater: raise IntegrityError() # Restore Media Root if config.gs_data_dt_filter[0] is None: shutil.rmtree(media_root, ignore_errors=True) if not os.path.exists(media_root): os.makedirs(media_root) copy_tree(media_folder, media_root) chmod_tree(media_root) print(f"Media Files Restored into '{media_root}'.") # Restore Static Root if config.gs_data_dt_filter[0] is None: shutil.rmtree(static_root, ignore_errors=True) if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print(f"Static Root Restored into '{static_root}'.") # Restore Static Folders for static_files_folder in static_folders: # skip restoration of static files of apps not located under LOCAL_ROOT path # (check to prevent overriding files from site-packages # in project-template based GeoNode projects) if getattr(settings, 'LOCAL_ROOT', None) and \ not static_files_folder.startswith(settings.LOCAL_ROOT): print( f"Skipping static directory: {static_files_folder}. " f"It's not located under LOCAL_ROOT path: {settings.LOCAL_ROOT}." ) continue if config.gs_data_dt_filter[0] is None: shutil.rmtree(static_files_folder, ignore_errors=True) if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) copy_tree( os.path.join( static_files_folders, os.path.basename( os.path.normpath( static_files_folder))), static_files_folder) chmod_tree(static_files_folder) print( f"Static Files Restored into '{static_files_folder}'." ) # Restore Template Folders for template_files_folder in template_folders: # skip restoration of template files of apps not located under LOCAL_ROOT path # (check to prevent overriding files from site-packages # in project-template based GeoNode projects) if getattr(settings, 'LOCAL_ROOT', None) and \ not template_files_folder.startswith(settings.LOCAL_ROOT): print( f"Skipping template directory: {template_files_folder}. " f"It's not located under LOCAL_ROOT path: {settings.LOCAL_ROOT}." ) continue if config.gs_data_dt_filter[0] is None: shutil.rmtree(template_files_folder, ignore_errors=True) if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) copy_tree( os.path.join( template_files_folders, os.path.basename( os.path.normpath( template_files_folder))), template_files_folder) chmod_tree(template_files_folder) print( f"Template Files Restored into '{template_files_folder}'." ) # Restore Locale Folders for locale_files_folder in locale_folders: # skip restoration of locale files of apps not located under LOCAL_ROOT path # (check to prevent overriding files from site-packages # in project-template based GeoNode projects) if getattr(settings, 'LOCAL_ROOT', None) and \ not locale_files_folder.startswith(settings.LOCAL_ROOT): print( f"Skipping locale directory: {locale_files_folder}. " f"It's not located under LOCAL_ROOT path: {settings.LOCAL_ROOT}." ) continue if config.gs_data_dt_filter[0] is None: shutil.rmtree(locale_files_folder, ignore_errors=True) if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) copy_tree( os.path.join( locale_files_folders, os.path.basename( os.path.normpath( locale_files_folder))), locale_files_folder) chmod_tree(locale_files_folder) print( f"Locale Files Restored into '{locale_files_folder}'." ) call_command('collectstatic', interactive=False) # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default'][ 'PASSWORD'] utils.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except Exception: traceback.print_exc() # store backup info restored_backup = RestoredBackup( name=backup_file.rsplit('/', 1)[-1], archive_md5=backup_md5, creation_date=datetime.fromtimestamp( os.path.getmtime(backup_file))) restored_backup.save() except Exception as exception: if notify: restore_notification.apply_async( (admin_emails, backup_file, backup_md5, str(exception))) finally: call_command('makemigrations', interactive=False) call_command('migrate', interactive=False, fake=True) call_command('sync_geonode_datasets', updatepermissions=True, ignore_errors=True) if notify: restore_notification.apply_async( (admin_emails, backup_file, backup_md5)) print( "HINT: If you migrated from another site, do not forget to run the command 'migrate_baseurl' to fix Links" ) # noqa print( " e.g.: DJANGO_SETTINGS_MODULE=my_geonode.settings python manage.py migrate_baseurl " "--source-address=my-host-dev.geonode.org --target-address=my-host-prod.geonode.org" ) print("Restore finished.") finally: shutil.rmtree(restore_folder)
def create_models(type=None, integration=False): from django.contrib.auth.models import Group map_data, user_data, people_data, layer_data, document_data = create_fixtures( ) anonymous_group, created = Group.objects.get_or_create(name='anonymous') u, _ = get_user_model().objects.get_or_create(username='******', is_superuser=True, first_name='admin') u.set_password('admin') u.save() users = [] for ud, pd in zip(user_data, cycle(people_data)): user_name, password, first_name, last_name = ud u, created = get_user_model().objects.get_or_create(username=user_name) if created: u.set_password(password) u.first_name = first_name u.last_name = last_name u.save() u.groups.add(anonymous_group) users.append(u) get_user_model().objects.get( username='******').groups.add(anonymous_group) obj_ids = [] from geonode.utils import DisableDjangoSignals with DisableDjangoSignals(skip=integration): if not type or ensure_string(type) == 'map': for md, user in zip(map_data, cycle(users)): title, abstract, kws, (bbox_x0, bbox_x1, bbox_y0, bbox_y1), category = md m = Map( title=title, abstract=abstract, zoom=4, projection='EPSG:4326', center_x=42, center_y=-73, owner=user, bbox_x0=bbox_x0, bbox_x1=bbox_x1, bbox_y0=bbox_y0, bbox_y1=bbox_y1, srid='EPSG:4326', category=category, ) m.save() obj_ids.append(m.id) for kw in kws: m.keywords.add(kw) m.save() if not type or ensure_string(type) == 'document': for dd, user in zip(document_data, cycle(users)): title, abstract, kws, (bbox_x0, bbox_x1, bbox_y0, bbox_y1), category = dd m = Document(title=title, abstract=abstract, owner=user, bbox_x0=bbox_x0, bbox_x1=bbox_x1, bbox_y0=bbox_y0, bbox_y1=bbox_y1, srid='EPSG:4326', category=category, doc_file=f) m.save() obj_ids.append(m.id) for kw in kws: m.keywords.add(kw) m.save() if not type or ensure_string(type) == 'layer': for ld, owner, storeType in zip( layer_data, cycle(users), cycle(('coverageStore', 'dataStore'))): title, abstract, name, alternate, ( bbox_x0, bbox_x1, bbox_y0, bbox_y1), start, kws, category = ld end = start + timedelta(days=365) layer = Layer(title=title, abstract=abstract, name=name, alternate=alternate, bbox_x0=bbox_x0, bbox_x1=bbox_x1, bbox_y0=bbox_y0, bbox_y1=bbox_y1, srid='EPSG:4326', uuid=str(uuid4()), owner=owner, temporal_extent_start=start, temporal_extent_end=end, date=start, storeType=storeType, category=category) layer.save() obj_ids.append(layer.id) for kw in kws: layer.keywords.add(kw) layer.save() return obj_ids
def execute_backup(self, **options): ignore_errors = options.get('ignore_errors') config = utils.Config(options) force_exec = options.get('force_exec') backup_dir = options.get('backup_dir') skip_geoserver = options.get('skip_geoserver') if not backup_dir or len(backup_dir) == 0: raise CommandError( "Destination folder '--backup-dir' is mandatory") print("Before proceeding with the Backup, please ensure that:") print( " 1. The backend (DB or whatever) is accessible and you have rights" ) print( " 2. The GeoServer is up and running and reachable from this machine" ) message = 'You want to proceed?' if force_exec or utils.confirm(prompt=message, resp=False): # Create Target Folder dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(backup_dir, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Temporary folder to store backup files. It will be deleted at the end. os.chmod(target_folder, 0o777) if not skip_geoserver: self.create_geoserver_backup(config, settings, target_folder, ignore_errors) self.dump_geoserver_raster_data(config, settings, target_folder) self.dump_geoserver_vector_data(config, settings, target_folder) logger.info("Dumping geoserver external resources") self.dump_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup") # Deactivate GeoNode Signals with DisableDjangoSignals(): # Dump Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): # prevent dumping BackupRestore application if app_name == 'br': continue logger.info( f"Dumping '{app_name}' into '{dump_name}.json'.") # Point stdout at a file for dumping data to. with open(os.path.join(target_folder, f'{dump_name}.json'), 'w') as output: call_command('dumpdata', app_name, format='json', indent=2, stdout=output) # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, utils.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) copy_tree( media_root, media_folder, ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1])) print(f"Saved Media Files from '{media_root}'.") # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, utils.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree( static_root, static_folder, ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1])) print(f"Saved Static Root from '{static_root}'.") # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, utils.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: # skip dumping of static files of apps not located under LOCAL_ROOT path # (check to prevent saving files from site-packages in project-template based GeoNode projects) if getattr(settings, 'LOCAL_ROOT', None) and \ not static_files_folder.startswith(settings.LOCAL_ROOT): print( f"Skipping static directory: {static_files_folder}. " f"It's not located under LOCAL_ROOT path: {settings.LOCAL_ROOT}." ) continue static_folder = os.path.join( static_files_folders, os.path.basename( os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_files_folder, static_folder, ignore=utils.ignore_time( config.gs_data_dt_filter[0], config.gs_data_dt_filter[1])) print(f"Saved Static Files from '{static_files_folder}'.") # Store Template Folders template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except Exception: try: template_folders = settings.TEMPLATES[0]['DIRS'] except Exception: pass template_files_folders = os.path.join(target_folder, utils.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: # skip dumping of template files of apps not located under LOCAL_ROOT path # (check to prevent saving files from site-packages in project-template based GeoNode projects) if getattr(settings, 'LOCAL_ROOT', None) and \ not template_files_folder.startswith(settings.LOCAL_ROOT): print( f"Skipping template directory: {template_files_folder}. " f"It's not located under LOCAL_ROOT path: {settings.LOCAL_ROOT}." ) continue template_folder = os.path.join( template_files_folders, os.path.basename( os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) copy_tree(template_files_folder, template_folder, ignore=utils.ignore_time( config.gs_data_dt_filter[0], config.gs_data_dt_filter[1])) print( f"Saved Template Files from '{template_files_folder}'." ) # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, utils.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: # skip dumping of locale files of apps not located under LOCAL_ROOT path # (check to prevent saving files from site-packages in project-template based GeoNode projects) if getattr(settings, 'LOCAL_ROOT', None) and \ not locale_files_folder.startswith(settings.LOCAL_ROOT): logger.info( f"Skipping locale directory: {locale_files_folder}. " f"It's not located under LOCAL_ROOT path: {settings.LOCAL_ROOT}." ) continue locale_folder = os.path.join( locale_files_folders, os.path.basename( os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) copy_tree(locale_files_folder, locale_folder, ignore=utils.ignore_time( config.gs_data_dt_filter[0], config.gs_data_dt_filter[1])) logger.info( f"Saved Locale Files from '{locale_files_folder}'.") # Create Final ZIP Archive backup_archive = os.path.join(backup_dir, f'{dir_time_suffix}.zip') zip_dir(target_folder, backup_archive) # Generate a md5 hash of a backup archive and save it backup_md5_file = os.path.join(backup_dir, f'{dir_time_suffix}.md5') zip_archive_md5 = utils.md5_file_hash(backup_archive) with open(backup_md5_file, 'w') as md5_file: md5_file.write(zip_archive_md5) # Generate the ini file with the current settings used by the backup command backup_ini_file = os.path.join(backup_dir, f'{dir_time_suffix}.ini') with open(backup_ini_file, 'w') as configfile: config.config_parser.write(configfile) # Clean-up Temp Folder try: shutil.rmtree(target_folder) except Exception: logger.warning( f"WARNING: Could not be possible to delete the temp folder: '{target_folder}'" ) print("Backup Finished. Archive generated.") return str(os.path.join(backup_dir, f'{dir_time_suffix}.zip'))
def create_models(type=None, integration=False): users = [] obj_ids = [] with transaction.atomic(): map_data, user_data, people_data, dataset_data, document_data = create_fixtures() registeredmembers_group, created = Group.objects.get_or_create(name='registered-members') anonymous_group, created = Group.objects.get_or_create(name='anonymous') cont_group, created = Group.objects.get_or_create(name='contributors') perm = Permission.objects.get(codename='add_resourcebase') cont_group.permissions.add(perm) logger.debug("[SetUp] Get or create user admin") u, created = get_user_model().objects.get_or_create(username='******') u.set_password('admin') u.is_superuser = True u.first_name = 'admin' u.save() u.groups.add(anonymous_group) users.append(u) for ud, pd in zip(user_data, cycle(people_data)): user_name, password, first_name, last_name = ud logger.debug(f"[SetUp] Get or create user {user_name}") u, created = get_user_model().objects.get_or_create(username=user_name) u.set_password(password) u.first_name = first_name u.last_name = last_name u.save() u.groups.add(anonymous_group) if not (u.is_superuser or u.is_staff or u.is_anonymous): u.groups.add(cont_group) users.append(u) logger.debug(f"[SetUp] Add group {anonymous_group}") get_user_model().objects.get(username='******').groups.add(anonymous_group) from geonode.utils import DisableDjangoSignals with DisableDjangoSignals(skip=integration): if not type or ensure_string(type) == 'map': for md, user in zip(map_data, cycle(users)): title, abstract, kws, (bbox_x0, bbox_x1, bbox_y0, bbox_y1), category = md logger.debug(f"[SetUp] Add map {title}") m = Map( title=title, abstract=abstract, zoom=4, projection='EPSG:4326', center_x=42, center_y=-73, owner=user, bbox_polygon=Polygon.from_bbox((bbox_x0, bbox_y0, bbox_x1, bbox_y1)), ll_bbox_polygon=Polygon.from_bbox((bbox_x0, bbox_y0, bbox_x1, bbox_y1)), srid='EPSG:4326', category=category, metadata_only=title == 'map metadata true' ) m.save() m.set_default_permissions() m.clear_dirty_state() m.set_processing_state(enumerations.STATE_PROCESSED) obj_ids.append(m.id) for kw in kws: m.keywords.add(kw) m.save() if not type or ensure_string(type) == 'document': for dd, user in zip(document_data, cycle(users)): title, abstract, kws, (bbox_x0, bbox_x1, bbox_y0, bbox_y1), category = dd logger.debug(f"[SetUp] Add document {title}") m = Document( title=title, abstract=abstract, owner=user, bbox_polygon=Polygon.from_bbox((bbox_x0, bbox_y0, bbox_x1, bbox_y1)), ll_bbox_polygon=Polygon.from_bbox((bbox_x0, bbox_y0, bbox_x1, bbox_y1)), srid='EPSG:4326', files=dfile, extension="gif", metadata_only=title == 'doc metadata true' ) m.save() m.set_default_permissions() m.clear_dirty_state() m.set_processing_state(enumerations.STATE_PROCESSED) obj_ids.append(m.id) for kw in kws: m.keywords.add(kw) m.save() if not type or ensure_string(type) == 'dataset': for ld, owner, subtype in zip(dataset_data, cycle(users), cycle(('raster', 'vector'))): title, abstract, name, alternate, (bbox_x0, bbox_x1, bbox_y0, bbox_y1), start, kws, category = ld end = start + timedelta(days=365) logger.debug(f"[SetUp] Add dataset {title}") dataset = Dataset( title=title, abstract=abstract, name=name, alternate=alternate, bbox_polygon=Polygon.from_bbox((bbox_x0, bbox_y0, bbox_x1, bbox_y1)), ll_bbox_polygon=Polygon.from_bbox((bbox_x0, bbox_y0, bbox_x1, bbox_y1)), srid='EPSG:4326', uuid=str(uuid4()), owner=owner, temporal_extent_start=start, temporal_extent_end=end, date=start, subtype=subtype, category=category, metadata_only=title == 'dataset metadata true' ) dataset.save() dataset.set_default_permissions() dataset.clear_dirty_state() dataset.set_processing_state(enumerations.STATE_PROCESSED) obj_ids.append(dataset.id) for kw in kws: dataset.keywords.add(kw) dataset.save() return obj_ids
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_file = options.get('backup_file') skip_geoserver = options.get('skip_geoserver') backup_dir = options.get('backup_dir') if not any([backup_file, backup_dir]): raise CommandError("Mandatory option (--backup-file|--backup-dir)") if all([backup_file, backup_dir]): raise CommandError("Exclusive option (--backup-file|--backup-dir)") if backup_file and not os.path.isfile(backup_file): raise CommandError("Provided '--backup-file' is not a file") if backup_dir and not os.path.isdir(backup_dir): raise CommandError("Provided '--backup-dir' is not a directory") print("Before proceeding with the Restore, please ensure that:") print( " 1. The backend (DB or whatever) is accessible and you have rights" ) print( " 2. The GeoServer is up and running and reachable from this machine" ) message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): target_folder = backup_dir if backup_file: # Create Target Folder restore_folder = os.path.join(tempfile.gettempdir(), 'restore') if not os.path.exists(restore_folder): os.makedirs(restore_folder) # Extract ZIP Archive to Target Folder target_folder = extract_archive(backup_file, restore_folder) # Write Checks media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except Exception: try: template_folders = settings.TEMPLATES[0]['DIRS'] except Exception: pass template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) try: print( ("[Sanity Check] Full Write Access to '{}' ...".format( media_root))) chmod_tree(media_root) print( ("[Sanity Check] Full Write Access to '{}' ...".format( static_root))) chmod_tree(static_root) for static_files_folder in static_folders: print(("[Sanity Check] Full Write Access to '{}' ...". format(static_files_folder))) chmod_tree(static_files_folder) for template_files_folder in template_folders: print(("[Sanity Check] Full Write Access to '{}' ...". format(template_files_folder))) chmod_tree(template_files_folder) for locale_files_folder in locale_folders: print(("[Sanity Check] Full Write Access to '{}' ...". format(locale_files_folder))) chmod_tree(locale_files_folder) except Exception: print( "...Sanity Checks on Folder failed. Please make sure that the current user has full WRITE access to the above folders (and sub-folders or files)." ) print("Reason:") raise if not skip_geoserver: self.restore_geoserver_backup(settings, target_folder) self.restore_geoserver_raster_data(config, settings, target_folder) self.restore_geoserver_vector_data(config, settings, target_folder) print("Restoring geoserver external resources") self.restore_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup restore") # Prepare Target DB try: call_command('migrate', interactive=False) db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.patch_db(db_name, db_user, db_port, db_host, db_passwd, settings.MONITORING_ENABLED) except Exception: traceback.print_exc() try: # Deactivate GeoNode Signals with DisableDjangoSignals(): # Flush DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.flush_db(db_name, db_user, db_port, db_host, db_passwd) except Exception: try: call_command('flush', interactive=False) except Exception: traceback.print_exc() raise # Restore Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): fixture_file = os.path.join(target_folder, dump_name + '.json') print("Deserializing " + fixture_file) try: call_command('loaddata', fixture_file, app_label=app_name) except Exception: traceback.print_exc() print( "WARNING: No valid fixture data found for '" + dump_name + "'.") # helpers.load_fixture(app_name, fixture_file) raise # Restore Media Root try: shutil.rmtree(media_root) except Exception: pass if not os.path.exists(media_root): os.makedirs(media_root) copy_tree(media_folder, media_root) chmod_tree(media_root) print("Media Files Restored into '" + media_root + "'.") # Restore Static Root try: shutil.rmtree(static_root) except Exception: pass if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print("Static Root Restored into '" + static_root + "'.") # Restore Static Root try: shutil.rmtree(static_root) except Exception: pass if not os.path.exists(static_root): os.makedirs(static_root) copy_tree(static_folder, static_root) chmod_tree(static_root) print("Static Root Restored into '" + static_root + "'.") # Restore Static Folders for static_files_folder in static_folders: try: shutil.rmtree(static_files_folder) except Exception: pass if not os.path.exists(static_files_folder): os.makedirs(static_files_folder) copy_tree( os.path.join( static_files_folders, os.path.basename( os.path.normpath(static_files_folder))), static_files_folder) chmod_tree(static_files_folder) print("Static Files Restored into '" + static_files_folder + "'.") # Restore Template Folders for template_files_folder in template_folders: try: shutil.rmtree(template_files_folder) except Exception: pass if not os.path.exists(template_files_folder): os.makedirs(template_files_folder) copy_tree( os.path.join( template_files_folders, os.path.basename( os.path.normpath(template_files_folder))), template_files_folder) chmod_tree(template_files_folder) print("Template Files Restored into '" + template_files_folder + "'.") # Restore Locale Folders for locale_files_folder in locale_folders: try: shutil.rmtree(locale_files_folder) except Exception: pass if not os.path.exists(locale_files_folder): os.makedirs(locale_files_folder) copy_tree( os.path.join( locale_files_folders, os.path.basename( os.path.normpath(locale_files_folder))), locale_files_folder) chmod_tree(locale_files_folder) print("Locale Files Restored into '" + locale_files_folder + "'.") call_command('collectstatic', interactive=False) # Cleanup DB try: db_name = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_port = settings.DATABASES['default']['PORT'] db_host = settings.DATABASES['default']['HOST'] db_passwd = settings.DATABASES['default']['PASSWORD'] helpers.cleanup_db(db_name, db_user, db_port, db_host, db_passwd) except Exception: traceback.print_exc() return str(target_folder) finally: call_command('migrate', interactive=False, fake=True) print( "HINT: If you migrated from another site, do not forget to run the command 'migrate_baseurl' to fix Links" ) print( " e.g.: DJANGO_SETTINGS_MODULE=my_geonode.settings python manage.py migrate_baseurl --source-address=my-host-dev.geonode.org --target-address=my-host-prod.geonode.org" ) print( "Restore finished. Please find restored files and dumps into:" )
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = utils.Config(options) force_exec = options.get('force_exec') backup_dir = options.get('backup_dir') skip_geoserver = options.get('skip_geoserver') if not backup_dir or len(backup_dir) == 0: raise CommandError( "Destination folder '--backup-dir' is mandatory") print("Before proceeding with the Backup, please ensure that:") print( " 1. The backend (DB or whatever) is accessible and you have rights" ) print( " 2. The GeoServer is up and running and reachable from this machine" ) message = 'You want to proceed?' if force_exec or utils.confirm(prompt=message, resp=False): # Create Target Folder dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(backup_dir, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Temporary folder to store backup files. It will be deleted at the end. os.chmod(target_folder, 0o777) if not skip_geoserver: self.create_geoserver_backup(settings, target_folder) self.dump_geoserver_raster_data(config, settings, target_folder) self.dump_geoserver_vector_data(config, settings, target_folder) print("Duming geoserver external resources") self.dump_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup") # Deactivate GeoNode Signals with DisableDjangoSignals(): # Dump Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): print("Dumping '" + app_name + "' into '" + dump_name + ".json'.") # Point stdout at a file for dumping data to. output = open( os.path.join(target_folder, dump_name + '.json'), 'w') call_command('dumpdata', app_name, format='json', indent=2, stdout=output) output.close() # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, utils.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) copy_tree(media_root, media_folder) print("Saved Media Files from '" + media_root + "'.") # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, utils.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_root, static_folder) print("Saved Static Root from '" + static_root + "'.") # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, utils.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: static_folder = os.path.join( static_files_folders, os.path.basename( os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_files_folder, static_folder) print("Saved Static Files from '" + static_files_folder + "'.") # Store Template Folders template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except Exception: try: template_folders = settings.TEMPLATES[0]['DIRS'] except Exception: pass template_files_folders = os.path.join(target_folder, utils.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: template_folder = os.path.join( template_files_folders, os.path.basename( os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) copy_tree(template_files_folder, template_folder) print("Saved Template Files from '" + template_files_folder + "'.") # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, utils.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: locale_folder = os.path.join( locale_files_folders, os.path.basename( os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) copy_tree(locale_files_folder, locale_folder) print("Saved Locale Files from '" + locale_files_folder + "'.") # Create Final ZIP Archive backup_archive = os.path.join(backup_dir, dir_time_suffix + '.zip') zip_dir(target_folder, backup_archive) # Generate a md5 hash of a backup archive and save it backup_md5_file = os.path.join(backup_dir, dir_time_suffix + '.md5') zip_archive_md5 = utils.md5_file_hash(backup_archive) with open(backup_md5_file, 'w') as md5_file: md5_file.write(zip_archive_md5) # Clean-up Temp Folder try: shutil.rmtree(target_folder) except Exception: print( "WARNING: Could not be possible to delete the temp folder: '" + str(target_folder) + "'") print("Backup Finished. Archive generated.") return str(os.path.join(backup_dir, dir_time_suffix + '.zip'))