def postProcess(self, _edObject = None): EDPluginControl.postProcess(self) self.DEBUG("EDPluginControlAutoproc.postProcess") #Now that we have executed the whole thing we need to create #the suitable ISPyB plugin input and serialize it to the file #we've been given as input output = AutoProcContainer() # AutoProc attr autoproc = AutoProc() xdsout = self.xds_first.dataOutput if xdsout.sg_number is not None: # and it should not autoproc.spaceGroup = SPACE_GROUP_NAMES[xdsout.sg_number.value] autoproc.refinedCell_a = xdsout.cell_a.value autoproc.refinedCell_b = xdsout.cell_b.value autoproc.refinedCell_c = xdsout.cell_c.value autoproc.refinedCell_alpha = xdsout.cell_alpha.value autoproc.refinedCell_beta = xdsout.cell_beta.value autoproc.refinedCell_gamma = xdsout.cell_gamma.value output.AutoProc = autoproc # scaling container and all the things that go in scaling_container_noanom = AutoProcScalingContainer() scaling = AutoProcScaling() scaling.recordTimeStamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) scaling_container_noanom.AutoProcScaling = scaling # NOANOM PATH xscale_stats_noanom = self.xscale_generate.dataOutput.stats_noanom_merged inner_stats_noanom = xscale_stats_noanom.completeness_entries[0] outer_stats_noanom = xscale_stats_noanom.completeness_entries[-1] # use the previous shell's res as low res, if available prev_res = self.low_resolution_limit try: prev_res = xscale_stats_noanom.completeness_entries[-2].outer_res.value except IndexError: pass total_stats_noanom = xscale_stats_noanom.total_completeness stats = _create_scaling_stats(inner_stats_noanom, 'innerShell', self.low_resolution_limit, False) overall_low = stats.resolutionLimitLow scaling_container_noanom.AutoProcScalingStatistics.append(stats) stats = _create_scaling_stats(outer_stats_noanom, 'outerShell', prev_res, False) overall_high = stats.resolutionLimitHigh scaling_container_noanom.AutoProcScalingStatistics.append(stats) stats = _create_scaling_stats(total_stats_noanom, 'overall', self.low_resolution_limit, False) stats.resolutionLimitLow = overall_low stats.resolutionLimitHigh = overall_high scaling_container_noanom.AutoProcScalingStatistics.append(stats) integration_container_noanom = AutoProcIntegrationContainer() image = Image() image.dataCollectionId = self.dataInput.data_collection_id.value integration_container_noanom.Image = image integration_noanom = AutoProcIntegration() if self.integration_id_noanom is not None: integration_noanom.autoProcIntegrationId = self.integration_id_noanom crystal_stats = self.parse_xds_noanom.dataOutput integration_noanom.cell_a = crystal_stats.cell_a.value integration_noanom.cell_b = crystal_stats.cell_b.value integration_noanom.cell_c = crystal_stats.cell_c.value integration_noanom.cell_alpha = crystal_stats.cell_alpha.value integration_noanom.cell_beta = crystal_stats.cell_beta.value integration_noanom.cell_gamma = crystal_stats.cell_gamma.value integration_noanom.anomalous = 0 # done with the integration integration_container_noanom.AutoProcIntegration = integration_noanom scaling_container_noanom.AutoProcIntegrationContainer = integration_container_noanom # ANOM PATH scaling_container_anom = AutoProcScalingContainer() scaling = AutoProcScaling() scaling.recordTimeStamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) scaling_container_anom.AutoProcScaling = scaling xscale_stats_anom = self.xscale_generate.dataOutput.stats_anom_merged inner_stats_anom = xscale_stats_anom.completeness_entries[0] outer_stats_anom = xscale_stats_anom.completeness_entries[-1] # use the previous shell's res as low res if available prev_res = self.low_resolution_limit try: prev_res = xscale_stats_anom.completeness_entries[-2].outer_res.value except IndexError: pass total_stats_anom = xscale_stats_anom.total_completeness stats = _create_scaling_stats(inner_stats_anom, 'innerShell', self.low_resolution_limit, True) overall_low = stats.resolutionLimitLow scaling_container_anom.AutoProcScalingStatistics.append(stats) stats = _create_scaling_stats(outer_stats_anom, 'outerShell', prev_res, True) overall_high = stats.resolutionLimitHigh scaling_container_anom.AutoProcScalingStatistics.append(stats) stats = _create_scaling_stats(total_stats_anom, 'overall', self.low_resolution_limit, True) stats.resolutionLimitLow = overall_low stats.resolutionLimitHigh = overall_high scaling_container_anom.AutoProcScalingStatistics.append(stats) integration_container_anom = AutoProcIntegrationContainer() image = Image() image.dataCollectionId = self.dataInput.data_collection_id.value integration_container_anom.Image = image integration_anom = AutoProcIntegration() crystal_stats = self.parse_xds_anom.dataOutput if self.integration_id_anom is not None: integration_anom.autoProcIntegrationId = self.integration_id_anom integration_anom.cell_a = crystal_stats.cell_a.value integration_anom.cell_b = crystal_stats.cell_b.value integration_anom.cell_c = crystal_stats.cell_c.value integration_anom.cell_alpha = crystal_stats.cell_alpha.value integration_anom.cell_beta = crystal_stats.cell_beta.value integration_anom.cell_gamma = crystal_stats.cell_gamma.value integration_anom.anomalous = 1 # done with the integration integration_container_anom.AutoProcIntegration = integration_anom scaling_container_anom.AutoProcIntegrationContainer = integration_container_anom # ------ NO ANOM / ANOM end program_container = AutoProcProgramContainer() program_container.AutoProcProgram = AutoProcProgram() program_container.AutoProcProgram.processingCommandLine = ' '.join(sys.argv) program_container.AutoProcProgram.processingPrograms = 'edna-fastproc' # now for the generated files. There's some magic to do with # their paths to determine where to put them on pyarch pyarch_path = None # Note: the path is in the form /data/whatever # remove the edna-autoproc-import suffix original_files_dir = self.file_conversion.dataInput.output_directory.value #files_dir, _ = os.path.split(original_files_dir) files_dir = original_files_dir # the whole transformation is fragile! if files_dir.startswith('/data/visitor'): # We might get empty elements at the head/tail of the list tokens = [elem for elem in files_dir.split(os.path.sep) if len(elem) > 0] pyarch_path = os.path.join('/data/pyarch', tokens[3], tokens[2], *tokens[4:]) else: # We might get empty elements at the head/tail of the list tokens = [elem for elem in files_dir.split(os.path.sep) if len(elem) > 0] if tokens[2] == 'inhouse': pyarch_path = os.path.join('/data/pyarch', tokens[1], *tokens[3:]) if pyarch_path is not None: pyarch_path = pyarch_path.replace('PROCESSED_DATA', 'RAW_DATA') try: os.makedirs(pyarch_path) except OSError: # dir already exists, may happen when testing EDVerbose.screen('Target directory on pyarch ({0}) already exists, ignoring'.format(pyarch_path)) file_list = [] # we can now copy the files to this dir for f in os.listdir(original_files_dir): current = os.path.join(original_files_dir, f) if not os.path.isfile(current): continue if not os.path.splitext(current)[1].lower() in ISPYB_UPLOAD_EXTENSIONS: continue new_path = os.path.join(pyarch_path, f) file_list.append(new_path) shutil.copyfile(current, new_path) # now add those to the ispyb upload for path in file_list: dirname, filename = os.path.split(path) attach = AutoProcProgramAttachment() attach.fileType = "Result" attach.fileName = filename attach.filePath = dirname program_container.AutoProcProgramAttachment.append(attach) program_container.AutoProcProgram.processingStatus = True output.AutoProcProgramContainer = program_container # first with anom output.AutoProcScalingContainer = scaling_container_anom ispyb_input = XSDataInputStoreAutoProc() ispyb_input.AutoProcContainer = output with open(self.dataInput.output_file.path.value, 'w') as f: f.write(ispyb_input.marshal()) # store results in ispyb self.store_autoproc_anom.dataInput = ispyb_input t0=time.time() self.store_autoproc_anom.executeSynchronous() self.stats['ispyb_upload'] = time.time() - t0 with open(self.log_file_path, 'w') as f: json.dump(self.stats, f) if self.store_autoproc_anom.isFailure(): self.ERROR('could not send results to ispyb') else: # store the autoproc ID as a filename in the # fastproc_integration_ids directory os.mknod(os.path.join(self.autoproc_ids_dir, str(self.integration_id_anom)), 0755) # then noanom stats output.AutoProcScalingContainer = scaling_container_noanom ispyb_input = XSDataInputStoreAutoProc() ispyb_input.AutoProcContainer = output with open(self.dataInput.output_file.path.value, 'w') as f: f.write(ispyb_input.marshal()) # store results in ispyb self.store_autoproc_noanom.dataInput = ispyb_input t0=time.time() self.store_autoproc_noanom.executeSynchronous() self.stats['ispyb_upload'] = time.time() - t0 with open(self.log_file_path, 'w') as f: json.dump(self.stats, f) if self.store_autoproc_noanom.isFailure(): self.ERROR('could not send results to ispyb') else: # store the autoproc id os.mknod(os.path.join(self.autoproc_ids_dir, str(self.integration_id_noanom)), 0755)
def postProcess(self, _edObject=None): EDPluginControl.postProcess(self) self.DEBUG("EDPluginControlAutoproc.postProcess") #Now that we have executed the whole thing we need to create #the suitable ISPyB plugin input and serialize it to the file #we've been given as input output = AutoProcContainer() # AutoProc attr autoproc = AutoProc() xdsout = self.xds_first.dataOutput if xdsout.sg_number is not None: # and it should not autoproc.spaceGroup = SPACE_GROUP_NAMES[xdsout.sg_number.value] autoproc.refinedCell_a = xdsout.cell_a.value autoproc.refinedCell_b = xdsout.cell_b.value autoproc.refinedCell_c = xdsout.cell_c.value autoproc.refinedCell_alpha = xdsout.cell_alpha.value autoproc.refinedCell_beta = xdsout.cell_beta.value autoproc.refinedCell_gamma = xdsout.cell_gamma.value output.AutoProc = autoproc # scaling container and all the things that go in scaling_container_noanom = AutoProcScalingContainer() scaling = AutoProcScaling() scaling.recordTimeStamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) scaling_container_noanom.AutoProcScaling = scaling # NOANOM PATH xscale_stats_noanom = self.xscale_generate.dataOutput.stats_noanom_merged inner_stats_noanom = xscale_stats_noanom.completeness_entries[0] outer_stats_noanom = xscale_stats_noanom.completeness_entries[-1] # use the previous shell's res as low res, if available prev_res = self.low_resolution_limit try: prev_res = xscale_stats_noanom.completeness_entries[ -2].outer_res.value except IndexError: pass total_stats_noanom = xscale_stats_noanom.total_completeness stats = _create_scaling_stats(inner_stats_noanom, 'innerShell', self.low_resolution_limit, False) overall_low = stats.resolutionLimitLow scaling_container_noanom.AutoProcScalingStatistics.append(stats) stats = _create_scaling_stats(outer_stats_noanom, 'outerShell', prev_res, False) overall_high = stats.resolutionLimitHigh scaling_container_noanom.AutoProcScalingStatistics.append(stats) stats = _create_scaling_stats(total_stats_noanom, 'overall', self.low_resolution_limit, False) stats.resolutionLimitLow = overall_low stats.resolutionLimitHigh = overall_high scaling_container_noanom.AutoProcScalingStatistics.append(stats) integration_container_noanom = AutoProcIntegrationContainer() image = Image() image.dataCollectionId = self.dataInput.data_collection_id.value integration_container_noanom.Image = image integration_noanom = AutoProcIntegration() if self.integration_id_noanom is not None: integration_noanom.autoProcIntegrationId = self.integration_id_noanom crystal_stats = self.parse_xds_noanom.dataOutput integration_noanom.cell_a = crystal_stats.cell_a.value integration_noanom.cell_b = crystal_stats.cell_b.value integration_noanom.cell_c = crystal_stats.cell_c.value integration_noanom.cell_alpha = crystal_stats.cell_alpha.value integration_noanom.cell_beta = crystal_stats.cell_beta.value integration_noanom.cell_gamma = crystal_stats.cell_gamma.value integration_noanom.anomalous = 0 # done with the integration integration_container_noanom.AutoProcIntegration = integration_noanom scaling_container_noanom.AutoProcIntegrationContainer = integration_container_noanom # ANOM PATH scaling_container_anom = AutoProcScalingContainer() scaling = AutoProcScaling() scaling.recordTimeStamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) scaling_container_anom.AutoProcScaling = scaling xscale_stats_anom = self.xscale_generate.dataOutput.stats_anom_merged inner_stats_anom = xscale_stats_anom.completeness_entries[0] outer_stats_anom = xscale_stats_anom.completeness_entries[-1] # use the previous shell's res as low res if available prev_res = self.low_resolution_limit try: prev_res = xscale_stats_anom.completeness_entries[ -2].outer_res.value except IndexError: pass total_stats_anom = xscale_stats_anom.total_completeness stats = _create_scaling_stats(inner_stats_anom, 'innerShell', self.low_resolution_limit, True) overall_low = stats.resolutionLimitLow scaling_container_anom.AutoProcScalingStatistics.append(stats) stats = _create_scaling_stats(outer_stats_anom, 'outerShell', prev_res, True) overall_high = stats.resolutionLimitHigh scaling_container_anom.AutoProcScalingStatistics.append(stats) stats = _create_scaling_stats(total_stats_anom, 'overall', self.low_resolution_limit, True) stats.resolutionLimitLow = overall_low stats.resolutionLimitHigh = overall_high scaling_container_anom.AutoProcScalingStatistics.append(stats) integration_container_anom = AutoProcIntegrationContainer() image = Image() image.dataCollectionId = self.dataInput.data_collection_id.value integration_container_anom.Image = image integration_anom = AutoProcIntegration() crystal_stats = self.parse_xds_anom.dataOutput if self.integration_id_anom is not None: integration_anom.autoProcIntegrationId = self.integration_id_anom integration_anom.cell_a = crystal_stats.cell_a.value integration_anom.cell_b = crystal_stats.cell_b.value integration_anom.cell_c = crystal_stats.cell_c.value integration_anom.cell_alpha = crystal_stats.cell_alpha.value integration_anom.cell_beta = crystal_stats.cell_beta.value integration_anom.cell_gamma = crystal_stats.cell_gamma.value integration_anom.anomalous = 1 # done with the integration integration_container_anom.AutoProcIntegration = integration_anom scaling_container_anom.AutoProcIntegrationContainer = integration_container_anom # ------ NO ANOM / ANOM end program_container = AutoProcProgramContainer() program_container.AutoProcProgram = AutoProcProgram() program_container.AutoProcProgram.processingCommandLine = ' '.join( sys.argv) program_container.AutoProcProgram.processingPrograms = 'edna-fastproc' # now for the generated files. There's some magic to do with # their paths to determine where to put them on pyarch pyarch_path = None # Note: the path is in the form /data/whatever # remove the edna-autoproc-import suffix original_files_dir = self.file_conversion.dataInput.output_directory.value #files_dir, _ = os.path.split(original_files_dir) files_dir = original_files_dir # the whole transformation is fragile! if files_dir.startswith('/data/visitor'): # We might get empty elements at the head/tail of the list tokens = [ elem for elem in files_dir.split(os.path.sep) if len(elem) > 0 ] pyarch_path = os.path.join('/data/pyarch', tokens[3], tokens[2], *tokens[4:]) else: # We might get empty elements at the head/tail of the list tokens = [ elem for elem in files_dir.split(os.path.sep) if len(elem) > 0 ] if tokens[2] == 'inhouse': pyarch_path = os.path.join('/data/pyarch', tokens[1], *tokens[3:]) if pyarch_path is not None: pyarch_path = pyarch_path.replace('PROCESSED_DATA', 'RAW_DATA') try: os.makedirs(pyarch_path) except OSError: # dir already exists, may happen when testing EDVerbose.screen( 'Target directory on pyarch ({0}) already exists, ignoring' .format(pyarch_path)) file_list = [] # we can now copy the files to this dir for f in os.listdir(original_files_dir): current = os.path.join(original_files_dir, f) if not os.path.isfile(current): continue if not os.path.splitext( current)[1].lower() in ISPYB_UPLOAD_EXTENSIONS: continue new_path = os.path.join(pyarch_path, f) file_list.append(new_path) shutil.copyfile(current, new_path) # now add those to the ispyb upload for path in file_list: dirname, filename = os.path.split(path) attach = AutoProcProgramAttachment() attach.fileType = "Result" attach.fileName = filename attach.filePath = dirname program_container.AutoProcProgramAttachment.append(attach) program_container.AutoProcProgram.processingStatus = True output.AutoProcProgramContainer = program_container # first with anom output.AutoProcScalingContainer = scaling_container_anom ispyb_input = XSDataInputStoreAutoProc() ispyb_input.AutoProcContainer = output with open(self.dataInput.output_file.path.value, 'w') as f: f.write(ispyb_input.marshal()) # store results in ispyb self.store_autoproc_anom.dataInput = ispyb_input t0 = time.time() self.store_autoproc_anom.executeSynchronous() self.stats['ispyb_upload'] = time.time() - t0 with open(self.log_file_path, 'w') as f: json.dump(self.stats, f) if self.store_autoproc_anom.isFailure(): self.ERROR('could not send results to ispyb') else: # store the autoproc ID as a filename in the # fastproc_integration_ids directory os.mknod( os.path.join(self.autoproc_ids_dir, str(self.integration_id_anom)), 0755) # then noanom stats output.AutoProcScalingContainer = scaling_container_noanom ispyb_input = XSDataInputStoreAutoProc() ispyb_input.AutoProcContainer = output with open(self.dataInput.output_file.path.value, 'w') as f: f.write(ispyb_input.marshal()) # store results in ispyb self.store_autoproc_noanom.dataInput = ispyb_input t0 = time.time() self.store_autoproc_noanom.executeSynchronous() self.stats['ispyb_upload'] = time.time() - t0 with open(self.log_file_path, 'w') as f: json.dump(self.stats, f) if self.store_autoproc_noanom.isFailure(): self.ERROR('could not send results to ispyb') else: # store the autoproc id os.mknod( os.path.join(self.autoproc_ids_dir, str(self.integration_id_noanom)), 0755)