def download(request, resourceid, sender=Layer): _not_authorized = _("You are not authorized to download this resource.") _not_permitted = _("You are not permitted to save or edit this resource.") _no_files_found = _( "No files have been found for this resource. Please, contact a system administrator." ) instance = resolve_object(request, sender, {'pk': resourceid}, permission='base.download_resourcebase', permission_msg=_not_permitted) if isinstance(instance, Layer): # Create Target Folder dirpath = tempfile.mkdtemp() dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(dirpath, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) layer_files = [] try: upload_session = instance.get_upload_session() if upload_session: layer_files = [ item for idx, item in enumerate( LayerFile.objects.filter( upload_session=upload_session)) ] if layer_files: # Copy all Layer related files into a temporary folder for lyr in layer_files: if storage.exists(str(lyr.file)): geonode_layer_path = storage.path(str(lyr.file)) shutil.copy2(geonode_layer_path, target_folder) else: return HttpResponse(loader.render_to_string( '401.html', context={ 'error_title': _("No files found."), 'error_message': _no_files_found }, request=request), status=404) # Check we can access the original files if not layer_files: return HttpResponse(loader.render_to_string( '401.html', context={ 'error_title': _("No files found."), 'error_message': _no_files_found }, request=request), status=404) # Let's check for associated SLD files (if any) try: for s in instance.styles.all(): sld_file_path = os.path.join(target_folder, "".join([s.name, ".sld"])) with open(sld_file_path, "w") as sld_file: sld_file.write(s.sld_body.strip()) try: # Collecting headers and cookies headers, access_token = get_headers( request, urlsplit(s.sld_url), s.sld_url) response, content = http_client.get(s.sld_url, headers=headers, timeout=TIMEOUT, user=request.user) sld_remote_content = response.text sld_file_path = os.path.join( target_folder, "".join([s.name, "_remote.sld"])) with open(sld_file_path, "w") as sld_file: sld_file.write(sld_remote_content.strip()) except Exception: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) except Exception: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) # Let's dump metadata target_md_folder = os.path.join(target_folder, ".metadata") if not os.path.exists(target_md_folder): os.makedirs(target_md_folder) try: dump_file = os.path.join(target_md_folder, "".join([instance.name, ".dump"])) with open(dump_file, 'w') as outfile: serialized_obj = json_serializer_producer( model_to_dict(instance)) json.dump(serialized_obj, outfile) links = Link.objects.filter(resource=instance.resourcebase_ptr) for link in links: link_name = slugify(link.name) link_file = os.path.join( target_md_folder, "".join([link_name, ".%s" % link.extension])) if link.link_type in ('data'): # Skipping 'data' download links continue elif link.link_type in ('metadata', 'image'): # Dumping metadata files and images with open(link_file, "wb"): try: # Collecting headers and cookies headers, access_token = get_headers( request, urlsplit(link.url), link.url) response, raw = http_client.get( link.url, stream=True, headers=headers, timeout=TIMEOUT, user=request.user) raw.decode_content = True shutil.copyfileobj(raw, link_file) except Exception: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) elif link.link_type.startswith('OGC'): # Dumping OGC/OWS links with open(link_file, "w") as link_file: link_file.write(link.url.strip()) except Exception: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) # ZIP everything and return target_file_name = "".join([instance.name, ".zip"]) target_file = os.path.join(dirpath, target_file_name) zip_dir(target_folder, target_file) register_event(request, 'download', instance) response = HttpResponse(content=open(target_file, mode='rb'), status=200, content_type="application/zip") response[ 'Content-Disposition'] = 'attachment; filename="%s"' % target_file_name return response except NotImplementedError: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) return HttpResponse(loader.render_to_string( '401.html', context={ 'error_title': _("No files found."), 'error_message': _no_files_found }, request=request), status=404) return HttpResponse(loader.render_to_string('401.html', context={ 'error_title': _("Not Authorized"), 'error_message': _not_authorized }, request=request), status=403)
def download(request, resourceid, sender=Layer): instance = resolve_object( request, sender, {'pk': resourceid}, permission='base.download_resourcebase', permission_msg=_( "You are not permitted to save or edit this resource.")) if isinstance(instance, Layer): try: upload_session = instance.get_upload_session() layer_files = [ item for idx, item in enumerate( LayerFile.objects.filter(upload_session=upload_session)) ] # Create Target Folder dirpath = tempfile.mkdtemp() dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(dirpath, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Copy all Layer related files into a temporary folder for l in layer_files: if storage.exists(l.file): geonode_layer_path = storage.path(l.file) base_filename, original_ext = os.path.splitext( geonode_layer_path) shutil.copy2(geonode_layer_path, target_folder) # Let's check for associated SLD files (if any) try: for s in instance.styles.all(): sld_file_path = os.path.join(target_folder, "".join([s.name, ".sld"])) sld_file = open(sld_file_path, "w") sld_file.write(s.sld_body.strip()) sld_file.close() try: sld_file = open(sld_file_path, "r") response = requests.get(s.sld_url, timeout=TIMEOUT) sld_remote_content = response.text sld_file_path = os.path.join( target_folder, "".join([s.name, "_remote.sld"])) sld_file = open(sld_file_path, "w") sld_file.write(sld_remote_content.strip()) sld_file.close() except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) # Let's dump metadata target_md_folder = os.path.join(target_folder, ".metadata") if not os.path.exists(target_md_folder): os.makedirs(target_md_folder) try: links = Link.objects.filter(resource=instance.resourcebase_ptr) for link in links: link_name = custom_slugify(link.name) link_file = os.path.join( target_md_folder, "".join([link_name, ".%s" % link.extension])) if link.link_type in ('data'): # Skipping 'data' download links continue elif link.link_type in ('metadata', 'image'): # Dumping metadata files and images link_file = open(link_file, "wb") try: response = requests.get(link.url, stream=True, timeout=TIMEOUT) response.raw.decode_content = True shutil.copyfileobj(response.raw, link_file) except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) finally: link_file.close() elif link.link_type.startswith('OGC'): # Dumping OGC/OWS links link_file = open(link_file, "w") link_file.write(link.url.strip()) link_file.close() except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) # ZIP everything and return target_file_name = "".join([instance.name, ".zip"]) target_file = os.path.join(dirpath, target_file_name) zip_dir(target_folder, target_file) response = HttpResponse(content=open(target_file), status=200, content_type="application/zip") response[ 'Content-Disposition'] = 'attachment; filename="%s"' % target_file_name return response except NotImplementedError: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) return HttpResponse(json.dumps({'error': 'file_not_found'}), status=404, content_type="application/json") return HttpResponse(json.dumps({'error': 'unauthorized_request'}), status=403, content_type="application/json")
def handle(self, **options): # ignore_errors = options.get('ignore_errors') config = Config(options) force_exec = options.get('force_exec') backup_dir = options.get('backup_dir') skip_geoserver = options.get('skip_geoserver') if not backup_dir or len(backup_dir) == 0: raise CommandError("Destination folder '--backup-dir' is mandatory") print "Before proceeding with the Backup, please ensure that:" print " 1. The backend (DB or whatever) is accessible and you have rights" print " 2. The GeoServer is up and running and reachable from this machine" message = 'You want to proceed?' if force_exec or helpers.confirm(prompt=message, resp=False): # Create Target Folder dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(backup_dir, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Temporary folder to store backup files. It will be deleted at the end. os.chmod(target_folder, 0777) if not skip_geoserver: self.create_geoserver_backup(settings, target_folder) self.dump_geoserver_raster_data(config, settings, target_folder) self.dump_geoserver_vector_data(config, settings, target_folder) print("Duming geoserver external resources") self.dump_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup") try: # Deactivate GeoNode Signals print "Deactivating GeoNode Signals..." designals() print "...done!" # Dump Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): print "Dumping '"+app_name+"' into '"+dump_name+".json'." # Point stdout at a file for dumping data to. output = open(os.path.join(target_folder, dump_name+'.json'), 'w') call_command('dumpdata', app_name, format='json', indent=2, natural=True, stdout=output) output.close() # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) copy_tree(media_root, media_folder) print "Saved Media Files from '"+media_root+"'." # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_root, static_folder) print "Saved Static Root from '"+static_root+"'." # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: static_folder = os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_files_folder, static_folder) print "Saved Static Files from '"+static_files_folder+"'." # Store Template Folders template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except: try: template_folders = settings.TEMPLATES[0]['DIRS'] except: pass template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: template_folder = os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) copy_tree(template_files_folder, template_folder) print "Saved Template Files from '"+template_files_folder+"'." # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: locale_folder = os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) copy_tree(locale_files_folder, locale_folder) print "Saved Locale Files from '"+locale_files_folder+"'." # Create Final ZIP Archive zip_dir(target_folder, os.path.join(backup_dir, dir_time_suffix+'.zip')) # Clean-up Temp Folder try: shutil.rmtree(target_folder) except: print "WARNING: Could not be possible to delete the temp folder: '" + str(target_folder) + "'" print "Backup Finished. Archive generated." return str(os.path.join(backup_dir, dir_time_suffix+'.zip')) finally: # Reactivate GeoNode Signals print "Reactivating GeoNode Signals..." resignals() print "...done!"
def backup_full(): """Full Backup of GeoNode DB""" try: # Create Target Folder dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join('backup', dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Dump Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): print "Dumping '" + app_name + "' into '" + dump_name + ".json'." output = open(os.path.join(target_folder, dump_name + '.json'), 'w') # Point stdout at a file for dumping data to. call_command('dumpdata', app_name, format='json', indent=2, natural=True, stdout=output) output.close() # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) copy_tree(media_root, media_folder) print "Saved Media Files from '" + media_root + "'." # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_root, static_folder) print "Saved Static Root from '" + static_root + "'." # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: static_folder = os.path.join( static_files_folders, os.path.basename(os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_files_folder, static_folder) print "Saved Static Files from '" + static_files_folder + "'." # Store Template Folders template_folders = settings.TEMPLATE_DIRS template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: template_folder = os.path.join( template_files_folders, os.path.basename(os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) copy_tree(template_files_folder, template_folder) print "Saved Template Files from '" + template_files_folder + "'." # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: locale_folder = os.path.join( locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) copy_tree(locale_files_folder, locale_folder) print "Saved Locale Files from '" + locale_files_folder + "'." # Create Final ZIP Archive zip_dir(target_folder, os.path.join('backup', dir_time_suffix + '.zip')) # Cleanup Temp Folder shutil.rmtree(target_folder) print "Backup Finished. Archive generated '" + os.path.join( 'backup', dir_time_suffix + '.zip') + "'." except Exception, err: pass
def execute_backup(self, **options): ignore_errors = options.get('ignore_errors') config = utils.Config(options) force_exec = options.get('force_exec') backup_dir = options.get('backup_dir') skip_geoserver = options.get('skip_geoserver') if not backup_dir or len(backup_dir) == 0: raise CommandError( "Destination folder '--backup-dir' is mandatory") print("Before proceeding with the Backup, please ensure that:") print( " 1. The backend (DB or whatever) is accessible and you have rights" ) print( " 2. The GeoServer is up and running and reachable from this machine" ) message = 'You want to proceed?' if force_exec or utils.confirm(prompt=message, resp=False): # Create Target Folder dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(backup_dir, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Temporary folder to store backup files. It will be deleted at the end. os.chmod(target_folder, 0o777) if not skip_geoserver: self.create_geoserver_backup(config, settings, target_folder, ignore_errors) self.dump_geoserver_raster_data(config, settings, target_folder) self.dump_geoserver_vector_data(config, settings, target_folder) logger.info("Dumping geoserver external resources") self.dump_geoserver_externals(config, settings, target_folder) else: print("Skipping geoserver backup") # Deactivate GeoNode Signals with DisableDjangoSignals(): # Dump Fixtures for app_name, dump_name in zip(config.app_names, config.dump_names): # prevent dumping BackupRestore application if app_name == 'br': continue logger.info( f"Dumping '{app_name}' into '{dump_name}.json'.") # Point stdout at a file for dumping data to. with open(os.path.join(target_folder, f'{dump_name}.json'), 'w') as output: call_command('dumpdata', app_name, format='json', indent=2, stdout=output) # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, utils.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) copy_tree( media_root, media_folder, ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1])) print(f"Saved Media Files from '{media_root}'.") # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, utils.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree( static_root, static_folder, ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1])) print(f"Saved Static Root from '{static_root}'.") # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, utils.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: # skip dumping of static files of apps not located under LOCAL_ROOT path # (check to prevent saving files from site-packages in project-template based GeoNode projects) if getattr(settings, 'LOCAL_ROOT', None) and \ not static_files_folder.startswith(settings.LOCAL_ROOT): print( f"Skipping static directory: {static_files_folder}. " f"It's not located under LOCAL_ROOT path: {settings.LOCAL_ROOT}." ) continue static_folder = os.path.join( static_files_folders, os.path.basename( os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_files_folder, static_folder, ignore=utils.ignore_time( config.gs_data_dt_filter[0], config.gs_data_dt_filter[1])) print(f"Saved Static Files from '{static_files_folder}'.") # Store Template Folders template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except Exception: try: template_folders = settings.TEMPLATES[0]['DIRS'] except Exception: pass template_files_folders = os.path.join(target_folder, utils.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: # skip dumping of template files of apps not located under LOCAL_ROOT path # (check to prevent saving files from site-packages in project-template based GeoNode projects) if getattr(settings, 'LOCAL_ROOT', None) and \ not template_files_folder.startswith(settings.LOCAL_ROOT): print( f"Skipping template directory: {template_files_folder}. " f"It's not located under LOCAL_ROOT path: {settings.LOCAL_ROOT}." ) continue template_folder = os.path.join( template_files_folders, os.path.basename( os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) copy_tree(template_files_folder, template_folder, ignore=utils.ignore_time( config.gs_data_dt_filter[0], config.gs_data_dt_filter[1])) print( f"Saved Template Files from '{template_files_folder}'." ) # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, utils.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: # skip dumping of locale files of apps not located under LOCAL_ROOT path # (check to prevent saving files from site-packages in project-template based GeoNode projects) if getattr(settings, 'LOCAL_ROOT', None) and \ not locale_files_folder.startswith(settings.LOCAL_ROOT): logger.info( f"Skipping locale directory: {locale_files_folder}. " f"It's not located under LOCAL_ROOT path: {settings.LOCAL_ROOT}." ) continue locale_folder = os.path.join( locale_files_folders, os.path.basename( os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) copy_tree(locale_files_folder, locale_folder, ignore=utils.ignore_time( config.gs_data_dt_filter[0], config.gs_data_dt_filter[1])) logger.info( f"Saved Locale Files from '{locale_files_folder}'.") # Create Final ZIP Archive backup_archive = os.path.join(backup_dir, f'{dir_time_suffix}.zip') zip_dir(target_folder, backup_archive) # Generate a md5 hash of a backup archive and save it backup_md5_file = os.path.join(backup_dir, f'{dir_time_suffix}.md5') zip_archive_md5 = utils.md5_file_hash(backup_archive) with open(backup_md5_file, 'w') as md5_file: md5_file.write(zip_archive_md5) # Generate the ini file with the current settings used by the backup command backup_ini_file = os.path.join(backup_dir, f'{dir_time_suffix}.ini') with open(backup_ini_file, 'w') as configfile: config.config_parser.write(configfile) # Clean-up Temp Folder try: shutil.rmtree(target_folder) except Exception: logger.warning( f"WARNING: Could not be possible to delete the temp folder: '{target_folder}'" ) print("Backup Finished. Archive generated.") return str(os.path.join(backup_dir, f'{dir_time_suffix}.zip'))
def backup_full(): """Full Backup of GeoNode DB""" try: # Create Target Folder dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join('backup', dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Dump Fixtures for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): print "Dumping '"+app_name+"' into '"+dump_name+".json'." output = open(os.path.join(target_folder,dump_name+'.json'),'w') # Point stdout at a file for dumping data to. call_command('dumpdata',app_name,format='json',indent=2,natural=True,stdout=output) output.close() # Store Media Root media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, helpers.MEDIA_ROOT) if not os.path.exists(media_folder): os.makedirs(media_folder) copy_tree(media_root, media_folder) print "Saved Media Files from '"+media_root+"'." # Store Static Root static_root = settings.STATIC_ROOT static_folder = os.path.join(target_folder, helpers.STATIC_ROOT) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_root, static_folder) print "Saved Static Root from '"+static_root+"'." # Store Static Folders static_folders = settings.STATICFILES_DIRS static_files_folders = os.path.join(target_folder, helpers.STATICFILES_DIRS) if not os.path.exists(static_files_folders): os.makedirs(static_files_folders) for static_files_folder in static_folders: static_folder = os.path.join(static_files_folders, os.path.basename(os.path.normpath(static_files_folder))) if not os.path.exists(static_folder): os.makedirs(static_folder) copy_tree(static_files_folder, static_folder) print "Saved Static Files from '"+static_files_folder+"'." # Store Template Folders template_folders = [] try: template_folders = settings.TEMPLATE_DIRS except: try: template_folders = settings.TEMPLATES[0]['DIRS'] except: pass template_files_folders = os.path.join(target_folder, helpers.TEMPLATE_DIRS) if not os.path.exists(template_files_folders): os.makedirs(template_files_folders) for template_files_folder in template_folders: template_folder = os.path.join(template_files_folders, os.path.basename(os.path.normpath(template_files_folder))) if not os.path.exists(template_folder): os.makedirs(template_folder) copy_tree(template_files_folder, template_folder) print "Saved Template Files from '"+template_files_folder+"'." # Store Locale Folders locale_folders = settings.LOCALE_PATHS locale_files_folders = os.path.join(target_folder, helpers.LOCALE_PATHS) if not os.path.exists(locale_files_folders): os.makedirs(locale_files_folders) for locale_files_folder in locale_folders: locale_folder = os.path.join(locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder))) if not os.path.exists(locale_folder): os.makedirs(locale_folder) copy_tree(locale_files_folder, locale_folder) print "Saved Locale Files from '"+locale_files_folder+"'." # Create Final ZIP Archive zip_dir(target_folder, os.path.join('backup', dir_time_suffix+'.zip')) # Cleanup Temp Folder shutil.rmtree(target_folder) print "Backup Finished. Archive generated '"+os.path.join('backup', dir_time_suffix+'.zip')+"'." except Exception, err: pass
def download(request, resourceid, sender=Dataset): _not_authorized = _("You are not authorized to download this resource.") _not_permitted = _("You are not permitted to save or edit this resource.") _no_files_found = _("No files have been found for this resource. Please, contact a system administrator.") instance = resolve_object(request, sender, {'pk': resourceid}, permission='base.download_resourcebase', permission_msg=_not_permitted) if isinstance(instance, ResourceBase): # Create Target Folder dirpath = tempfile.mkdtemp(dir=settings.STATIC_ROOT) dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(dirpath, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) dataset_files = [] try: files = instance.resourcebase_ptr.files # Copy all Dataset related files into a temporary folder for file_path in files: if storage_manager.exists(file_path): dataset_files.append(file_path) filename = os.path.basename(file_path) with open(f"{target_folder}/{filename}", 'wb+') as f: f.write(storage_manager.open(file_path).read()) else: return HttpResponse( loader.render_to_string( '401.html', context={ 'error_title': _("No files found."), 'error_message': _no_files_found }, request=request), status=404) # Check we can access the original files if not dataset_files: return HttpResponse( loader.render_to_string( '401.html', context={ 'error_title': _("No files found."), 'error_message': _no_files_found }, request=request), status=404) # ZIP everything and return target_file_name = "".join([instance.name, ".zip"]) target_file = os.path.join(dirpath, target_file_name) zip_dir(target_folder, target_file) register_event(request, 'download', instance) response = HttpResponse( content=open(target_file, mode='rb'), status=200, content_type="application/zip") response['Content-Disposition'] = f'attachment; filename="{target_file_name}"' return response except (NotImplementedError, Upload.DoesNotExist): traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) return HttpResponse( loader.render_to_string( '401.html', context={ 'error_title': _("No files found."), 'error_message': _no_files_found }, request=request), status=404) finally: if target_folder is not None: shutil.rmtree(target_folder, ignore_errors=True) return HttpResponse( loader.render_to_string( '401.html', context={ 'error_title': _("Not Authorized"), 'error_message': _not_authorized }, request=request), status=403)
def download(request, resourceid, sender=Layer): instance = resolve_object(request, sender, {'pk': resourceid}, permission='base.download_resourcebase', permission_msg=_("You are not permitted to save or edit this resource.")) if isinstance(instance, Layer): try: upload_session = instance.get_upload_session() layer_files = [item for idx, item in enumerate(LayerFile.objects.filter(upload_session=upload_session))] # Create Target Folder dirpath = tempfile.mkdtemp() dir_time_suffix = get_dir_time_suffix() target_folder = os.path.join(dirpath, dir_time_suffix) if not os.path.exists(target_folder): os.makedirs(target_folder) # Copy all Layer related files into a temporary folder for l in layer_files: if storage.exists(l.file): geonode_layer_path = storage.path(l.file) base_filename, original_ext = os.path.splitext(geonode_layer_path) shutil.copy2(geonode_layer_path, target_folder) # Let's check for associated SLD files (if any) try: for s in instance.styles.all(): sld_file_path = os.path.join(target_folder, "".join([s.name, ".sld"])) sld_file = open(sld_file_path, "w") sld_file.write(s.sld_body.strip()) sld_file.close() try: sld_file = open(sld_file_path, "r") response = requests.get(s.sld_url, timeout=TIMEOUT) sld_remote_content = response.text sld_file_path = os.path.join(target_folder, "".join([s.name, "_remote.sld"])) sld_file = open(sld_file_path, "w") sld_file.write(sld_remote_content.strip()) sld_file.close() except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) # Let's dump metadata target_md_folder = os.path.join(target_folder, ".metadata") if not os.path.exists(target_md_folder): os.makedirs(target_md_folder) try: links = Link.objects.filter(resource=instance.resourcebase_ptr) for link in links: link_name = custom_slugify(link.name) link_file = os.path.join(target_md_folder, "".join([link_name, ".%s" % link.extension])) if link.link_type in ('data'): # Skipping 'data' download links continue elif link.link_type in ('metadata', 'image'): # Dumping metadata files and images link_file = open(link_file, "wb") try: response = requests.get(link.url, stream=True, timeout=TIMEOUT) response.raw.decode_content = True shutil.copyfileobj(response.raw, link_file) except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) finally: link_file.close() elif link.link_type.startswith('OGC'): # Dumping OGC/OWS links link_file = open(link_file, "w") link_file.write(link.url.strip()) link_file.close() except BaseException: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) # ZIP everything and return target_file_name = "".join([instance.name, ".zip"]) target_file = os.path.join(dirpath, target_file_name) zip_dir(target_folder, target_file) response = HttpResponse( content=open(target_file), status=200, content_type="application/zip") response['Content-Disposition'] = 'attachment; filename="%s"' % target_file_name return response except NotImplementedError: traceback.print_exc() tb = traceback.format_exc() logger.debug(tb) return HttpResponse( json.dumps({ 'error': 'file_not_found' }), status=404, content_type="application/json" ) return HttpResponse( json.dumps({ 'error': 'unauthorized_request' }), status=403, content_type="application/json" )