def test_objects(self): """Tests calling update_uuid with multiple object types.""" the_file = ScaleFile() the_file.update_uuid('test.txt', 1, True, {'key': 'value'}) self.assertEqual(len(the_file.uuid), 32) self.assertEqual(the_file.uuid, 'ee6535359fbe02d50589a823951eb491')
def test_multi_strings(self): """Tests calling update_uuid with multiple strings.""" the_file = ScaleFile() the_file.update_uuid('test.txt', 'test1', 'test2') self.assertEqual(len(the_file.uuid), 32) self.assertEqual(the_file.uuid, '8ff66acfc019330bba973b408c63ad15')
def test_one_string(self): """Tests calling update_uuid with a single string.""" the_file = ScaleFile() the_file.update_uuid('test.txt') self.assertEqual(len(the_file.uuid), 32) self.assertEqual(the_file.uuid, 'dd18bf3a8e0a2a3e53e2661c7fb53534')
def test_partial(self): """Tests calling update_uuid with some ignored None types.""" the_file1 = ScaleFile() the_file1.update_uuid('test.txt', 'test') the_file2 = ScaleFile() the_file2.update_uuid('test.txt', None, 'test', None) self.assertEqual(the_file1.uuid, the_file2.uuid)
def handle(self, *args, **options): """See :meth:`django.core.management.base.BaseCommand.handle`. This method starts the file upload process. """ file_id = options.get('file_id') remote_path = options.get('remote_path') workspace_name = options.get('workspace') logger.info('Command starting: scale_upload_file') logger.info(' - Workspace: %s', workspace_name) # Validate the file paths file_name = os.path.basename(local_path) if not os.path.exists(local_path): logger.exception('Local file does not exist: %s', local_path) sys.exit(1) # Attempt to fetch the workspace model try: workspace = Workspace.objects.get(name=workspace_name) except Workspace.DoesNotExist: logger.exception('Workspace does not exist: %s', workspace_name) sys.exit(1) # Attempt to set up a file model try: scale_file = ScaleFile.objects.get(file_name=file_name) except ScaleFile.DoesNotExist: scale_file = ScaleFile() scale_file.update_uuid(file_name) scale_file.file_path = remote_path try: ScaleFile.objects.upload_files( workspace, [FileUpload(scale_file, local_path)]) except: logger.exception('Unknown error occurred, exit code 1 returning') sys.exit(1) logger.info('Command completed: scale_upload_file')
def handle(self, local_path, remote_path, **options): """See :meth:`django.core.management.base.BaseCommand.handle`. This method starts the file upload process. """ workspace_name = options.get('workspace') logger.info('Command starting: scale_upload_file') logger.info(' - Workspace: %s', workspace_name) # Validate the file paths file_name = os.path.basename(local_path) if not os.path.exists(local_path): logger.exception('Local file does not exist: %s', local_path) sys.exit(1) # Attempt to fetch the workspace model try: workspace = Workspace.objects.get(name=workspace_name) except Workspace.DoesNotExist: logger.exception('Workspace does not exist: %s', workspace_name) sys.exit(1) # Attempt to set up a file model try: scale_file = ScaleFile.objects.get(file_name=file_name) except ScaleFile.DoesNotExist: scale_file = ScaleFile() scale_file.update_uuid(file_name) scale_file.file_path = remote_path try: ScaleFile.objects.upload_files(workspace, [FileUpload(scale_file, local_path)]) except: logger.exception('Unknown error occurred, exit code 1 returning') sys.exit(1) logger.info('Command completed: scale_upload_file')
def _generate_input_metadata(self, job_exe): """Generate the input metadata file for the job execution :param job_id: The job ID :type job_id: int :param exe_num: The execution number :type exe_num: int """ job_interface = job_exe.job_type.get_job_interface() if not job_interface.needs_input_metadata(): return # Generate input metadata dict input_metadata = {} config = job_exe.get_execution_configuration if 'input_files' in config.get_dict(): input_metadata['JOB'] = {} input_data = job_exe.job.get_input_data() for i in input_data.values.keys(): if type(input_data.values[i]) is JsonValue: input_metadata['JOB'][i] = input_data.values[i].value elif type(input_data.values[i]) is FileValue: input_metadata['JOB'][i] = [ ScaleFile.objects.get(pk=f)._get_url() for f in input_data.values[i].file_ids ] if job_exe.recipe_id and job_exe.recipe.has_input(): input_metadata['RECIPE'] = {} input_data = job_exe.recipe.get_input_data() for i in input_data.values.keys(): if type(input_data.values[i]) is JsonValue: input_metadata['RECIPE'][i] = input_data.values[i].value elif type(input_data.values[i]) is FileValue: input_metadata['RECIPE'][i] = [ ScaleFile.objects.get(pk=f)._get_url() for f in input_data.values[i].file_ids ] workspace_names = config.get_input_workspace_names() workspace_models = { w.name: w for w in Workspace.objects.get_workspaces(names=workspace_names) } input_metadata_id = None if input_metadata: file_name = '%d-input_metadata.json' % job_exe.job.id local_path = os.path.join(SCALE_JOB_EXE_INPUT_PATH, 'tmp', file_name) with open(local_path, 'w') as metadata_file: json.dump(input_metadata, metadata_file) try: scale_file = ScaleFile.objects.get(file_name=file_name) except ScaleFile.DoesNotExist: scale_file = ScaleFile() scale_file.update_uuid(file_name) remote_path = self._calculate_remote_path(job_exe) scale_file.file_path = remote_path for workspace in workspace_models: try: if not input_metadata_id: ScaleFile.objects.upload_files( workspace, [FileUpload(scale_file, local_path)]) input_metadata_id = ScaleFile.objects.get( file_name=file_name).id data = job_exe.job.get_job_data() data.add_file_input('INPUT_METADATA_MANIFEST', input_metadata_id) job_exe.job.input = data.get_dict() job_exe.job.save() except: continue if not input_metadata_id: logger.exception( 'Error uploading input_metadata manifest for job_exe %d' % job_exe.job.id)