def get_importable(self, obj): if 'request' not in self.context or not self.context['request'].query_params.get('importing_from_drive_id', None) or obj.kind == content_kinds.TOPIC: return True else: drive_id = self.context['request'].query_params.get('importing_from_drive_id', None) datafolder = cache.get(drive_id, None) if datafolder is None: drives = get_mounted_drives_with_channel_info() if drive_id in drives: datafolder = drives[drive_id].datafolder cache.set(drive_id, datafolder, 60) # cache the datafolder for 1 minute else: raise serializers.ValidationError( 'The external drive with given drive id does not exist.') files = obj.files.all() if not files.exists(): return False importable = True for f in files: # If one of the files under the node is unavailable on the external drive, mark the node as unimportable file_path = get_content_storage_file_path(f.local_file.get_filename(), datafolder) importable = importable and os.path.exists(file_path) return importable
def _localexport(channel_id, drive_id, update_progress=None, check_for_cancel=None, node_ids=None, exclude_node_ids=None, extra_metadata=None): drives = get_mounted_drives_with_channel_info() drive = drives[drive_id] call_command( "exportchannel", channel_id, drive.datafolder, update_progress=update_progress, check_for_cancel=check_for_cancel) try: call_command( "exportcontent", channel_id, drive.datafolder, node_ids=node_ids, exclude_node_ids=exclude_node_ids, update_progress=update_progress, check_for_cancel=check_for_cancel) except UserCancelledError: try: os.remove(get_content_database_file_path(channel_id, datafolder=drive.datafolder)) except OSError: pass raise
def _localimport(drive_id, update_progress=None, check_for_cancel=None): drives = get_mounted_drives_with_channel_info() drive = drives[drive_id] # copy channel's db file then copy all the content files from storage dir try: for channel in drive.metadata["channels"]: call_command("importchannel", "local", channel["id"], drive.datafolder, update_progress=update_progress, check_for_cancel=check_for_cancel) call_command("importcontent", "local", channel["id"], drive.datafolder, update_progress=update_progress, check_for_cancel=check_for_cancel) except UserCancelledError: connections.close_all() # close all DB connections (FIX for #1818) for channel in drive.metadata["channels"]: channel_id = channel["id"] try: os.remove(get_content_database_file_path(channel_id)) except OSError: pass ChannelMetadataCache.objects.filter(id=channel_id).delete() connections.close_all() # close all DB connections (FIX for #1818)s raise connections.close_all() # close all DB connections (FIX for #1818)
def _localimport(drive_id, channel_id, node_ids=None, update_progress=None, check_for_cancel=None): drives = get_mounted_drives_with_channel_info() drive = drives[drive_id] # copy channel's db file then copy all the content files from storage dir available_channel_ids = [c["id"] for c in drive.metadata["channels"]] assert channel_id in available_channel_ids, "The given channel was not found in the drive." try: call_command( "importchannel", "local", channel_id, drive.datafolder, update_progress=update_progress, check_for_cancel=check_for_cancel ) call_command( "importcontent", "local", channel_id, drive.datafolder, node_ids=node_ids, update_progress=update_progress, check_for_cancel=check_for_cancel ) except UserCancelledError: try: call_command("deletechannel", channel_id, update_progress=update_progress) except CommandError: pass raise
def _localexport(drive_id, update_progress=None, check_for_cancel=None): drives = get_mounted_drives_with_channel_info() drive = drives[drive_id] for channel in ChannelMetadataCache.objects.all(): call_command("exportchannel", channel.id, drive.datafolder, update_progress=update_progress, check_for_cancel=check_for_cancel) try: call_command("exportcontent", channel.id, drive.datafolder, update_progress=update_progress, check_for_cancel=check_for_cancel) except UserCancelledError: try: os.remove( get_content_database_file_path( channel.id, datafolder=drive.datafolder)) except OSError: pass connections.close_all() # close all DB connections (FIX for #1818) raise connections.close_all() # close all DB connections (FIX for #1818)
def startdiskchannelimport(self, request): # Load the required parameters try: channel_id = request.data["channel_id"] except KeyError: raise serializers.ValidationError("The channel_id field is required.") try: drive_id = request.data["drive_id"] except KeyError: raise serializers.ValidationError("The drive_id field is required.") try: drives = get_mounted_drives_with_channel_info() drive = drives[drive_id] except KeyError: raise serializers.ValidationError("That drive_id was not found in the list of drives.") job_metadata = { "type": "DISKCHANNELIMPORT", "started_by": request.user.pk, } job_id = get_client().schedule( call_command, "importchannel", "disk", channel_id, drive.datafolder, extra_metadata=job_metadata, ) resp = _job_to_response(get_client().status(job_id)) return Response(resp)
def get_importable(self, instance): drive_id = self.context['request'].query_params.get('importing_from_drive_id', None) # If node is from a remote source, assume it is importable. # Topics are annotated as importable by default, but client may disable importing # of the topic if it determines that the entire topic sub-tree is already on the device. if drive_id is None or instance.kind == content_kinds.TOPIC: return True # If non-topic ContentNode has no files, then it is not importable. content_files = instance.files.all() if not content_files.exists(): return False # Inspecting the external drive's files datafolder = cache.get(drive_id, None) if datafolder is None: drive_ids = get_mounted_drives_with_channel_info() if drive_id in drive_ids: datafolder = drive_ids[drive_id].datafolder cache.set(drive_id, datafolder, 60) # cache the datafolder for 1 minute else: raise serializers.ValidationError('The external drive with given drive id {} does not exist.'.format(drive_id)) importable = True for f in content_files: # Node is importable only if all of its Files are on the external drive file_path = get_content_storage_file_path(f.local_file.get_filename(), datafolder) importable = importable and os.path.exists(file_path) if not importable: break return importable
def localdrive(self, request): drives = get_mounted_drives_with_channel_info() # make sure everything is a dict, before converting to JSON assert isinstance(drives, dict) out = [mountdata._asdict() for mountdata in drives.values()] return Response(out)
def _localexport(drive_id, update_state=None): drives = get_mounted_drives_with_channel_info() drive = drives[drive_id] for channel in ChannelMetadataCache.objects.all(): call_command("exportchannel", channel.id, drive.datafolder) call_command("exportcontent", channel.id, drive.datafolder, update_state=update_state)
def _localimport(drive_id, update_state=None): drives = get_mounted_drives_with_channel_info() drive = drives[drive_id] for channel in drive.metadata["channels"]: call_command("importchannel", "local", channel["id"], drive.datafolder) call_command("importcontent", "local", channel["id"], drive.datafolder, update_state=update_state)
def startdiskcontentimport(self, request): try: channel_id = request.data["channel_id"] except KeyError: raise serializers.ValidationError("The channel_id field is required.") try: drive_id = request.data["drive_id"] except KeyError: raise serializers.ValidationError("The drive_id field is required.") try: drives = get_mounted_drives_with_channel_info() drive = drives[drive_id] except KeyError: raise serializers.ValidationError("That drive_id was not found in the list of drives.") # optional arguments node_ids = request.data.get("node_ids", None) exclude_node_ids = request.data.get("exclude_node_ids", None) if node_ids and not isinstance(node_ids, list): raise serializers.ValidationError("node_ids must be a list.") if exclude_node_ids and not isinstance(exclude_node_ids, list): raise serializers.ValidationError("exclude_node_ids must be a list.") job_metadata = { "type": "DISKCONTENTIMPORT", "started_by": request.user.pk, } job_id = get_client().schedule( call_command, "importcontent", "disk", channel_id, drive.datafolder, node_ids=node_ids, exclude_node_ids=exclude_node_ids, extra_metadata=job_metadata, track_progress=True, cancellable=True, ) resp = _job_to_response(get_client().status(job_id)) return Response(resp)
def localdrive(self, request): drives = get_mounted_drives_with_channel_info() # make sure everything is a dict, before converting to JSON assert isinstance(drives, dict) out = [] for mountdata in drives.values(): mountdata = mountdata._asdict() if mountdata['metadata']['channels']: mountdata['channels'] = [c._asdict() for c in mountdata['channels']] out.append(mountdata) return Response(out)
def _localexport(drive_id, update_state=None): drives = get_mounted_drives_with_channel_info() drive = drives[drive_id] for channel in get_channels_for_data_folder(drive.datafolder): call_command("exportchannel", channel["id"], drive.datafolder, update_state=update_state) call_command("exportcontent", channel["id"], drive.datafolder, update_state=update_state)