def test_success(self, mock_getsize): """Tests calling ScaleFileManager.upload_files() successfully""" def new_getsize(path): return 100 mock_getsize.side_effect = new_getsize workspace = storage_test_utils.create_workspace() file_1 = ScaleFile() file_1.set_basic_fields('file.txt', 100, None) # Scale should auto-detect text/plain remote_path_1 = 'my/remote/path/file.txt' local_path_1 = 'my/local/path/file.txt' file_1.file_path = remote_path_1 file_2 = ScaleFile() file_2.set_basic_fields('file.json', 100, 'application/json') remote_path_2 = 'my/remote/path/2/file.json' local_path_2 = 'my/local/path/2/file.json' file_2.file_path = remote_path_2 workspace.upload_files = MagicMock() files = [FileUpload(file_1, local_path_1), FileUpload(file_2, local_path_2)] models = ScaleFile.objects.upload_files(workspace, files) workspace.upload_files.assert_called_once_with([FileUpload(file_1, local_path_1), FileUpload(file_2, local_path_2)]) self.assertEqual('file.txt', models[0].file_name) self.assertEqual(remote_path_1, models[0].file_path) self.assertEqual('text/plain', models[0].media_type) self.assertEqual(workspace.id, models[0].workspace_id) self.assertEqual('file.json', models[1].file_name) self.assertEqual(remote_path_2, models[1].file_path) self.assertEqual('application/json', models[1].media_type) self.assertEqual(workspace.id, models[1].workspace_id)
def handle(self, *args, **options): """See :meth:`django.core.management.base.BaseCommand.handle`. This method starts the file upload process. """ file_id = options.get('file_id') remote_path = options.get('remote_path') workspace_name = options.get('workspace') logger.info('Command starting: scale_upload_file') logger.info(' - Workspace: %s', workspace_name) # Validate the file paths file_name = os.path.basename(local_path) if not os.path.exists(local_path): logger.exception('Local file does not exist: %s', local_path) sys.exit(1) # Attempt to fetch the workspace model try: workspace = Workspace.objects.get(name=workspace_name) except Workspace.DoesNotExist: logger.exception('Workspace does not exist: %s', workspace_name) sys.exit(1) # Attempt to set up a file model try: scale_file = ScaleFile.objects.get(file_name=file_name) except ScaleFile.DoesNotExist: scale_file = ScaleFile() scale_file.update_uuid(file_name) scale_file.file_path = remote_path try: ScaleFile.objects.upload_files( workspace, [FileUpload(scale_file, local_path)]) except: logger.exception('Unknown error occurred, exit code 1 returning') sys.exit(1) logger.info('Command completed: scale_upload_file')
def handle(self, local_path, remote_path, **options): """See :meth:`django.core.management.base.BaseCommand.handle`. This method starts the file upload process. """ workspace_name = options.get('workspace') logger.info('Command starting: scale_upload_file') logger.info(' - Workspace: %s', workspace_name) # Validate the file paths file_name = os.path.basename(local_path) if not os.path.exists(local_path): logger.exception('Local file does not exist: %s', local_path) sys.exit(1) # Attempt to fetch the workspace model try: workspace = Workspace.objects.get(name=workspace_name) except Workspace.DoesNotExist: logger.exception('Workspace does not exist: %s', workspace_name) sys.exit(1) # Attempt to set up a file model try: scale_file = ScaleFile.objects.get(file_name=file_name) except ScaleFile.DoesNotExist: scale_file = ScaleFile() scale_file.update_uuid(file_name) scale_file.file_path = remote_path try: ScaleFile.objects.upload_files(workspace, [FileUpload(scale_file, local_path)]) except: logger.exception('Unknown error occurred, exit code 1 returning') sys.exit(1) logger.info('Command completed: scale_upload_file')
def _generate_input_metadata(self, job_exe): """Generate the input metadata file for the job execution :param job_id: The job ID :type job_id: int :param exe_num: The execution number :type exe_num: int """ job_interface = job_exe.job_type.get_job_interface() if not job_interface.needs_input_metadata(): return # Generate input metadata dict input_metadata = {} config = job_exe.get_execution_configuration if 'input_files' in config.get_dict(): input_metadata['JOB'] = {} input_data = job_exe.job.get_input_data() for i in input_data.values.keys(): if type(input_data.values[i]) is JsonValue: input_metadata['JOB'][i] = input_data.values[i].value elif type(input_data.values[i]) is FileValue: input_metadata['JOB'][i] = [ ScaleFile.objects.get(pk=f)._get_url() for f in input_data.values[i].file_ids ] if job_exe.recipe_id and job_exe.recipe.has_input(): input_metadata['RECIPE'] = {} input_data = job_exe.recipe.get_input_data() for i in input_data.values.keys(): if type(input_data.values[i]) is JsonValue: input_metadata['RECIPE'][i] = input_data.values[i].value elif type(input_data.values[i]) is FileValue: input_metadata['RECIPE'][i] = [ ScaleFile.objects.get(pk=f)._get_url() for f in input_data.values[i].file_ids ] workspace_names = config.get_input_workspace_names() workspace_models = { w.name: w for w in Workspace.objects.get_workspaces(names=workspace_names) } input_metadata_id = None if input_metadata: file_name = '%d-input_metadata.json' % job_exe.job.id local_path = os.path.join(SCALE_JOB_EXE_INPUT_PATH, 'tmp', file_name) with open(local_path, 'w') as metadata_file: json.dump(input_metadata, metadata_file) try: scale_file = ScaleFile.objects.get(file_name=file_name) except ScaleFile.DoesNotExist: scale_file = ScaleFile() scale_file.update_uuid(file_name) remote_path = self._calculate_remote_path(job_exe) scale_file.file_path = remote_path for workspace in workspace_models: try: if not input_metadata_id: ScaleFile.objects.upload_files( workspace, [FileUpload(scale_file, local_path)]) input_metadata_id = ScaleFile.objects.get( file_name=file_name).id data = job_exe.job.get_job_data() data.add_file_input('INPUT_METADATA_MANIFEST', input_metadata_id) job_exe.job.input = data.get_dict() job_exe.job.save() except: continue if not input_metadata_id: logger.exception( 'Error uploading input_metadata manifest for job_exe %d' % job_exe.job.id)