def createInputStoreAutoProc(dataCollectionId, integrationId, isAnomalous=False,
                              programId=None, status="SUCCESS", timeStart=None, timeEnd=None,
                              processingCommandLine=None, processingPrograms=None):
     autoProcContainer = AutoProcContainer()
     integrationContainer = AutoProcIntegrationContainer()
     scalingContainer = AutoProcScalingContainer()
     integration = AutoProcIntegration()
     if integrationId is not None:
         integration.autoProcIntegrationId = integrationId
     integration.anomalous = isAnomalous
     integrationContainer.AutoProcIntegration = integration
     image = Image()
     image.dataCollectionId = dataCollectionId
     integrationContainer.Image = image
     scalingContainer.AutoProcIntegrationContainer = integrationContainer
     programContainer = AutoProcProgramContainer()
     programContainer.AutoProcProgram = EDHandlerXSDataISPyBv1_4.createAutoProcProgram(
         programId=programId, status=status, timeStart=timeStart, timeEnd=timeEnd,
         processingCommandLine=processingCommandLine, processingPrograms=processingPrograms)
     autoProcContainer.AutoProcProgramContainer = programContainer
     autoProcContainer.AutoProcScalingContainer = scalingContainer
     inputStoreAutoProc = XSDataInputStoreAutoProc()
     inputStoreAutoProc.AutoProcContainer = autoProcContainer
     return inputStoreAutoProc
    def createXSDataInputStoreAutoProc(self, xsDataResultXDSAPP, processDirectory, template,
                                       strPathXscaleLp, isAnom, proposal, timeStart, timeEnd, dataCollectionId,
                                       integrationId=None, programId=None):

        # Parse log file
        dictLog = self.parseLogFile(xsDataResultXDSAPP.logFile.path.value)
        dictXscale = self.parseXscaleLp(strPathXscaleLp)

        # Create path to pyarch
        self.pyarchDirectory = EDHandlerESRFPyarchv1_0.createPyarchFilePath(self.resultsDirectory)
        self.pyarchDirectory = self.pyarchDirectory.replace('PROCESSED_DATA', 'RAW_DATA')
        if self.pyarchDirectory is not None and not os.path.exists(self.pyarchDirectory):
            os.makedirs(self.pyarchDirectory, 0o755)

        # Determine pyarch prefix
        listPrefix = template.split("_")
        self.pyarchPrefix = "xa_{0}_run{1}".format(listPrefix[-3], listPrefix[-2])


        xsDataInputStoreAutoProc = XSDataInputStoreAutoProc()
        autoProcContainer = AutoProcContainer()


        # AutoProc
        autoProc = AutoProc()
        autoProc.spaceGroup = dictLog["spaceGroup"]
        autoProc.refinedCell_a = dictLog["cellA"]
        autoProc.refinedCell_b = dictLog["cellB"]
        autoProc.refinedCell_c = dictLog["cellC"]
        autoProc.refinedCell_alpha = dictLog["cellAlpha"]
        autoProc.refinedCell_beta = dictLog["cellBeta"]
        autoProc.refinedCell_gamma = dictLog["cellGamma"]
        autoProcContainer.AutoProc = autoProc

        # AutoProcIntegrationContainer
        autoProcIntegrationContainer = AutoProcIntegrationContainer()
        autoProcIntegration = AutoProcIntegration()
        autoProcIntegration.autoProcIntegrationId = integrationId
        autoProcIntegration.cell_a = dictLog["cellA"]
        autoProcIntegration.cell_b = dictLog["cellB"]
        autoProcIntegration.cell_c = dictLog["cellC"]
        autoProcIntegration.cell_alpha = dictLog["cellAlpha"]
        autoProcIntegration.cell_beta = dictLog["cellBeta"]
        autoProcIntegration.cell_gamma = dictLog["cellGamma"]
        autoProcIntegration.anomalous = isAnom

        image = Image()
        image.dataCollectionId = dataCollectionId
        autoProcIntegrationContainer.AutoProcIntegration = autoProcIntegration
        autoProcIntegrationContainer.Image = image


        # Scaling container
        if xsDataResultXDSAPP.correctLP is not None:
            isa = self.parseCorrectLp(xsDataResultXDSAPP.correctLP.path.value)
        else:
            isa = None
        autoProcScalingContainer = AutoProcScalingContainer()
        autoProcScaling = AutoProcScaling()
        autoProcScaling.recordTimeStamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
        autoProcScalingContainer.AutoProcScaling = autoProcScaling
        for scalingStatisticsType in dictXscale:
            autoProcScalingStatistics = AutoProcScalingStatistics()
            autoProcScalingStatistics.scalingStatisticsType = scalingStatisticsType
            autoProcScalingStatistics.anomalous = isAnom
            for scalingStatisticsAttribute in dictXscale[scalingStatisticsType]:
                setattr(autoProcScalingStatistics, scalingStatisticsAttribute, dictXscale[scalingStatisticsType][scalingStatisticsAttribute])
            if scalingStatisticsType == "overall" and isa is not None:
                autoProcScalingStatistics.isa = isa
            autoProcScalingContainer.addAutoProcScalingStatistics(autoProcScalingStatistics)
        autoProcScalingContainer.AutoProcIntegrationContainer = autoProcIntegrationContainer
        autoProcContainer.AutoProcScalingContainer = autoProcScalingContainer

        # Program
        autoProcProgramContainer = AutoProcProgramContainer()

        autoProcProgram = EDHandlerXSDataISPyBv1_4.createAutoProcProgram(
                programId=programId, status="SUCCESS", timeStart=timeStart, timeEnd=timeEnd,
                processingCommandLine=self.processingCommandLine, processingPrograms=self.processingPrograms)
        autoProcProgramContainer.AutoProcProgram = autoProcProgram

        # Attached files
        pyarchPath = EDHandlerESRFPyarchv1_0.createPyarchFilePath(processDirectory)
        pyarchResultPath = os.path.join(pyarchPath, "results")
        if not os.path.exists(pyarchResultPath):
            os.makedirs(pyarchResultPath, 0o755)

        # XDSAPP log and result files
        if xsDataResultXDSAPP.logFile is not None:
            self.addAttachment(autoProcProgramContainer, xsDataResultXDSAPP.logFile.path.value,
                               "xdsapp", "log", isAnom, attachmentType="Log")
        if xsDataResultXDSAPP.pointlessLog is not None:
            self.addAttachment(autoProcProgramContainer, xsDataResultXDSAPP.pointlessLog.path.value,
                               "pointless", "log", isAnom, attachmentType="Log")
        if xsDataResultXDSAPP.phenixXtriageLog is not None:
            self.addAttachment(autoProcProgramContainer, xsDataResultXDSAPP.phenixXtriageLog.path.value,
                               "xtriage", "log", isAnom, attachmentType="Log")
        if xsDataResultXDSAPP.correctLP is not None:
            self.addAttachment(autoProcProgramContainer, xsDataResultXDSAPP.correctLP.path.value,
                               "CORRECT", "LP", isAnom, attachmentType="Log")
        if xsDataResultXDSAPP.XDS_ASCII_HKL is not None:
            self.addAttachment(autoProcProgramContainer, xsDataResultXDSAPP.XDS_ASCII_HKL.path.value,
                               "XDS_ASCII", "HKL", isAnom, attachmentType="Result", doGzip=True)
        if xsDataResultXDSAPP.XDS_INP is not None:
            self.addAttachment(autoProcProgramContainer, xsDataResultXDSAPP.XDS_INP.path.value,
                               "XDS", "INP", isAnom, attachmentType="Result", doGzip=False, noMergedString=True)
        for mtz_F in xsDataResultXDSAPP.mtz_F:
            basenameMtz_F = os.path.splitext(os.path.basename(mtz_F.path.value))[0]
            self.addAttachment(autoProcProgramContainer, mtz_F.path.value,
                               basenameMtz_F, "mtz", isAnom, attachmentType="Result")
        for mtz_F_plus_F_minus in xsDataResultXDSAPP.mtz_F_plus_F_minus:
            basenameMtz_F_plus_F_minus = os.path.splitext(os.path.basename(mtz_F_plus_F_minus.path.value))[0]
            self.addAttachment(autoProcProgramContainer, mtz_F_plus_F_minus.path.value,
                               basenameMtz_F_plus_F_minus, "mtz", isAnom, attachmentType="Result")
#        for mtz_I in xsDataResultXDSAPP.mtz_I:
#            basenameMtz_I = os.path.splitext(os.path.basename(mtz_I.path.value))[0]
#            self.addAttachment(autoProcProgramContainer, mtz_I.path.value,
#                               basenameMtz_I, "mtz", isAnom, attachmentType="Result")
#        for hkl in xsDataResultXDSAPP.hkl:
#            basenameHkl = os.path.splitext(os.path.basename(hkl.path.value))[0]
#            self.addAttachment(autoProcProgramContainer, hkl.path.value,
#                               basenameHkl, "hkl", isAnom, attachmentType="Result", doGzip=True)
#        for cv in xsDataResultXDSAPP.cv:
#            basenameCv = os.path.splitext(os.path.basename(cv.path.value))[0]
#            self.addAttachment(autoProcProgramContainer, cv.path.value,
#                               basenameCv, "cv", isAnom, attachmentType="Result", doGzip=True)

        if os.path.exists(strPathXscaleLp):
            self.addAttachment(autoProcProgramContainer, strPathXscaleLp,
                               "XSCALE", "LP", isAnom, isMerged=True, attachmentType="Result")
        autoProcContainer.AutoProcProgramContainer = autoProcProgramContainer
        xsDataInputStoreAutoProc.AutoProcContainer = autoProcContainer
        return xsDataInputStoreAutoProc
Esempio n. 3
0
    def createXSDataInputStoreAutoProc(self,
                                       xsDataResultXDSAPP,
                                       processDirectory,
                                       template,
                                       strPathXscaleLp,
                                       isAnom,
                                       proposal,
                                       timeStart,
                                       timeEnd,
                                       dataCollectionId,
                                       integrationId=None,
                                       programId=None):

        # Parse log file
        dictLog = self.parseLogFile(xsDataResultXDSAPP.logFile.path.value)
        dictXscale = self.parseXscaleLp(strPathXscaleLp)

        # Create path to pyarch
        self.pyarchDirectory = EDHandlerESRFPyarchv1_0.createPyarchFilePath(
            self.resultsDirectory)
        self.pyarchDirectory = self.pyarchDirectory.replace(
            'PROCESSED_DATA', 'RAW_DATA')
        if self.pyarchDirectory is not None and not os.path.exists(
                self.pyarchDirectory):
            os.makedirs(self.pyarchDirectory, 0o755)

        # Determine pyarch prefix
        listPrefix = template.split("_")
        self.pyarchPrefix = "xa_{0}_run{1}".format(listPrefix[-3],
                                                   listPrefix[-2])

        xsDataInputStoreAutoProc = XSDataInputStoreAutoProc()
        autoProcContainer = AutoProcContainer()

        # AutoProc
        autoProc = AutoProc()
        autoProc.spaceGroup = dictLog["spaceGroup"]
        autoProc.refinedCell_a = dictLog["cellA"]
        autoProc.refinedCell_b = dictLog["cellB"]
        autoProc.refinedCell_c = dictLog["cellC"]
        autoProc.refinedCell_alpha = dictLog["cellAlpha"]
        autoProc.refinedCell_beta = dictLog["cellBeta"]
        autoProc.refinedCell_gamma = dictLog["cellGamma"]
        autoProcContainer.AutoProc = autoProc

        # AutoProcIntegrationContainer
        autoProcIntegrationContainer = AutoProcIntegrationContainer()
        autoProcIntegration = AutoProcIntegration()
        autoProcIntegration.autoProcIntegrationId = integrationId
        autoProcIntegration.cell_a = dictLog["cellA"]
        autoProcIntegration.cell_b = dictLog["cellB"]
        autoProcIntegration.cell_c = dictLog["cellC"]
        autoProcIntegration.cell_alpha = dictLog["cellAlpha"]
        autoProcIntegration.cell_beta = dictLog["cellBeta"]
        autoProcIntegration.cell_gamma = dictLog["cellGamma"]
        autoProcIntegration.anomalous = isAnom

        image = Image()
        image.dataCollectionId = dataCollectionId
        autoProcIntegrationContainer.AutoProcIntegration = autoProcIntegration
        autoProcIntegrationContainer.Image = image

        # Scaling container
        if xsDataResultXDSAPP.correctLP is not None:
            isa = self.parseCorrectLp(xsDataResultXDSAPP.correctLP.path.value)
        else:
            isa = None
        autoProcScalingContainer = AutoProcScalingContainer()
        autoProcScaling = AutoProcScaling()
        autoProcScaling.recordTimeStamp = time.strftime(
            "%Y-%m-%d %H:%M:%S", time.localtime())
        autoProcScalingContainer.AutoProcScaling = autoProcScaling
        for scalingStatisticsType in dictXscale:
            autoProcScalingStatistics = AutoProcScalingStatistics()
            autoProcScalingStatistics.scalingStatisticsType = scalingStatisticsType
            autoProcScalingStatistics.anomalous = isAnom
            for scalingStatisticsAttribute in dictXscale[
                    scalingStatisticsType]:
                setattr(
                    autoProcScalingStatistics, scalingStatisticsAttribute,
                    dictXscale[scalingStatisticsType]
                    [scalingStatisticsAttribute])
            if scalingStatisticsType == "overall" and isa is not None:
                autoProcScalingStatistics.isa = isa
            autoProcScalingContainer.addAutoProcScalingStatistics(
                autoProcScalingStatistics)
        autoProcScalingContainer.AutoProcIntegrationContainer = autoProcIntegrationContainer
        autoProcContainer.AutoProcScalingContainer = autoProcScalingContainer

        # Program
        autoProcProgramContainer = AutoProcProgramContainer()

        autoProcProgram = EDHandlerXSDataISPyBv1_4.createAutoProcProgram(
            programId=programId,
            status="SUCCESS",
            timeStart=timeStart,
            timeEnd=timeEnd,
            processingCommandLine=self.processingCommandLine,
            processingPrograms=self.processingPrograms)
        autoProcProgramContainer.AutoProcProgram = autoProcProgram

        # Attached files
        pyarchPath = EDHandlerESRFPyarchv1_0.createPyarchFilePath(
            processDirectory)
        pyarchResultPath = os.path.join(pyarchPath, "results")
        if not os.path.exists(pyarchResultPath):
            os.makedirs(pyarchResultPath, 0o755)

        # XDSAPP log and result files
        if xsDataResultXDSAPP.logFile is not None:
            self.addAttachment(autoProcProgramContainer,
                               xsDataResultXDSAPP.logFile.path.value,
                               "xdsapp",
                               "log",
                               isAnom,
                               attachmentType="Log")
        if xsDataResultXDSAPP.pointlessLog is not None:
            self.addAttachment(autoProcProgramContainer,
                               xsDataResultXDSAPP.pointlessLog.path.value,
                               "pointless",
                               "log",
                               isAnom,
                               attachmentType="Log")
        if xsDataResultXDSAPP.phenixXtriageLog is not None:
            self.addAttachment(autoProcProgramContainer,
                               xsDataResultXDSAPP.phenixXtriageLog.path.value,
                               "xtriage",
                               "log",
                               isAnom,
                               attachmentType="Log")
        if xsDataResultXDSAPP.correctLP is not None:
            self.addAttachment(autoProcProgramContainer,
                               xsDataResultXDSAPP.correctLP.path.value,
                               "CORRECT",
                               "LP",
                               isAnom,
                               attachmentType="Log")
        if xsDataResultXDSAPP.XDS_ASCII_HKL is not None:
            self.addAttachment(autoProcProgramContainer,
                               xsDataResultXDSAPP.XDS_ASCII_HKL.path.value,
                               "XDS_ASCII",
                               "HKL",
                               isAnom,
                               attachmentType="Result",
                               doGzip=True)
        if xsDataResultXDSAPP.XDS_INP is not None:
            self.addAttachment(autoProcProgramContainer,
                               xsDataResultXDSAPP.XDS_INP.path.value,
                               "XDS",
                               "INP",
                               isAnom,
                               attachmentType="Result",
                               doGzip=False,
                               noMergedString=True)
        for mtz_F in xsDataResultXDSAPP.mtz_F:
            basenameMtz_F = os.path.splitext(os.path.basename(
                mtz_F.path.value))[0]
            self.addAttachment(autoProcProgramContainer,
                               mtz_F.path.value,
                               basenameMtz_F,
                               "mtz",
                               isAnom,
                               attachmentType="Result")
        for mtz_F_plus_F_minus in xsDataResultXDSAPP.mtz_F_plus_F_minus:
            basenameMtz_F_plus_F_minus = os.path.splitext(
                os.path.basename(mtz_F_plus_F_minus.path.value))[0]
            self.addAttachment(autoProcProgramContainer,
                               mtz_F_plus_F_minus.path.value,
                               basenameMtz_F_plus_F_minus,
                               "mtz",
                               isAnom,
                               attachmentType="Result")
#        for mtz_I in xsDataResultXDSAPP.mtz_I:
#            basenameMtz_I = os.path.splitext(os.path.basename(mtz_I.path.value))[0]
#            self.addAttachment(autoProcProgramContainer, mtz_I.path.value,
#                               basenameMtz_I, "mtz", isAnom, attachmentType="Result")
#        for hkl in xsDataResultXDSAPP.hkl:
#            basenameHkl = os.path.splitext(os.path.basename(hkl.path.value))[0]
#            self.addAttachment(autoProcProgramContainer, hkl.path.value,
#                               basenameHkl, "hkl", isAnom, attachmentType="Result", doGzip=True)
#        for cv in xsDataResultXDSAPP.cv:
#            basenameCv = os.path.splitext(os.path.basename(cv.path.value))[0]
#            self.addAttachment(autoProcProgramContainer, cv.path.value,
#                               basenameCv, "cv", isAnom, attachmentType="Result", doGzip=True)

        if os.path.exists(strPathXscaleLp):
            self.addAttachment(autoProcProgramContainer,
                               strPathXscaleLp,
                               "XSCALE",
                               "LP",
                               isAnom,
                               isMerged=True,
                               attachmentType="Result")
        autoProcContainer.AutoProcProgramContainer = autoProcProgramContainer
        xsDataInputStoreAutoProc.AutoProcContainer = autoProcContainer
        return xsDataInputStoreAutoProc
Esempio n. 4
0
    def postProcess(self, _edObject = None):
        EDPluginControl.postProcess(self)
        self.DEBUG("EDPluginControlAutoproc.postProcess")

        #Now that we have executed the whole thing we need to create
        #the suitable ISPyB plugin input and serialize it to the file
        #we've been given as input
        output = AutoProcContainer()

        # AutoProc attr
        autoproc = AutoProc()

        xdsout = self.xds_first.dataOutput
        if xdsout.sg_number is not None: # and it should not
            autoproc.spaceGroup = SPACE_GROUP_NAMES[xdsout.sg_number.value]
        autoproc.refinedCell_a = xdsout.cell_a.value
        autoproc.refinedCell_b = xdsout.cell_b.value
        autoproc.refinedCell_c = xdsout.cell_c.value
        autoproc.refinedCell_alpha = xdsout.cell_alpha.value
        autoproc.refinedCell_beta = xdsout.cell_beta.value
        autoproc.refinedCell_gamma = xdsout.cell_gamma.value

        output.AutoProc = autoproc

        # scaling container and all the things that go in
        scaling_container_noanom = AutoProcScalingContainer()
        scaling = AutoProcScaling()
        scaling.recordTimeStamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())

        scaling_container_noanom.AutoProcScaling = scaling

        # NOANOM PATH
        xscale_stats_noanom = self.xscale_generate.dataOutput.stats_noanom_merged
        inner_stats_noanom = xscale_stats_noanom.completeness_entries[0]
        outer_stats_noanom = xscale_stats_noanom.completeness_entries[-1]

        # use the previous shell's res as low res, if available
        prev_res = self.low_resolution_limit
        try:
            prev_res = xscale_stats_noanom.completeness_entries[-2].outer_res.value
        except IndexError:
            pass
        total_stats_noanom = xscale_stats_noanom.total_completeness

        stats = _create_scaling_stats(inner_stats_noanom, 'innerShell',
                                      self.low_resolution_limit, False)
        overall_low = stats.resolutionLimitLow
        scaling_container_noanom.AutoProcScalingStatistics.append(stats)

        stats = _create_scaling_stats(outer_stats_noanom, 'outerShell',
                                      prev_res, False)
        overall_high = stats.resolutionLimitHigh
        scaling_container_noanom.AutoProcScalingStatistics.append(stats)
        stats = _create_scaling_stats(total_stats_noanom, 'overall',
                                      self.low_resolution_limit, False)
        stats.resolutionLimitLow = overall_low
        stats.resolutionLimitHigh = overall_high
        scaling_container_noanom.AutoProcScalingStatistics.append(stats)

        integration_container_noanom = AutoProcIntegrationContainer()
        image = Image()
        image.dataCollectionId = self.dataInput.data_collection_id.value
        integration_container_noanom.Image = image

        integration_noanom = AutoProcIntegration()
        if self.integration_id_noanom is not None:
            integration_noanom.autoProcIntegrationId = self.integration_id_noanom
        crystal_stats =  self.parse_xds_noanom.dataOutput
        integration_noanom.cell_a = crystal_stats.cell_a.value
        integration_noanom.cell_b = crystal_stats.cell_b.value
        integration_noanom.cell_c = crystal_stats.cell_c.value
        integration_noanom.cell_alpha = crystal_stats.cell_alpha.value
        integration_noanom.cell_beta = crystal_stats.cell_beta.value
        integration_noanom.cell_gamma = crystal_stats.cell_gamma.value
        integration_noanom.anomalous = 0

        # done with the integration
        integration_container_noanom.AutoProcIntegration = integration_noanom
        scaling_container_noanom.AutoProcIntegrationContainer = integration_container_noanom

        # ANOM PATH
        scaling_container_anom = AutoProcScalingContainer()
        scaling = AutoProcScaling()
        scaling.recordTimeStamp = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())

        scaling_container_anom.AutoProcScaling = scaling

        xscale_stats_anom = self.xscale_generate.dataOutput.stats_anom_merged
        inner_stats_anom = xscale_stats_anom.completeness_entries[0]
        outer_stats_anom = xscale_stats_anom.completeness_entries[-1]

        # use the previous shell's res as low res if available
        prev_res = self.low_resolution_limit
        try:
            prev_res = xscale_stats_anom.completeness_entries[-2].outer_res.value
        except IndexError:
            pass
        total_stats_anom = xscale_stats_anom.total_completeness

        stats = _create_scaling_stats(inner_stats_anom, 'innerShell',
                                      self.low_resolution_limit, True)
        overall_low = stats.resolutionLimitLow
        scaling_container_anom.AutoProcScalingStatistics.append(stats)

        stats = _create_scaling_stats(outer_stats_anom, 'outerShell',
                                      prev_res, True)
        overall_high = stats.resolutionLimitHigh
        scaling_container_anom.AutoProcScalingStatistics.append(stats)
        stats = _create_scaling_stats(total_stats_anom, 'overall',
                                      self.low_resolution_limit, True)
        stats.resolutionLimitLow = overall_low
        stats.resolutionLimitHigh = overall_high
        scaling_container_anom.AutoProcScalingStatistics.append(stats)


        integration_container_anom = AutoProcIntegrationContainer()
        image = Image()
        image.dataCollectionId = self.dataInput.data_collection_id.value
        integration_container_anom.Image = image

        integration_anom = AutoProcIntegration()
        crystal_stats =  self.parse_xds_anom.dataOutput
        if self.integration_id_anom is not None:
            integration_anom.autoProcIntegrationId = self.integration_id_anom
        integration_anom.cell_a = crystal_stats.cell_a.value
        integration_anom.cell_b = crystal_stats.cell_b.value
        integration_anom.cell_c = crystal_stats.cell_c.value
        integration_anom.cell_alpha = crystal_stats.cell_alpha.value
        integration_anom.cell_beta = crystal_stats.cell_beta.value
        integration_anom.cell_gamma = crystal_stats.cell_gamma.value
        integration_anom.anomalous = 1

        # done with the integration
        integration_container_anom.AutoProcIntegration = integration_anom
        scaling_container_anom.AutoProcIntegrationContainer = integration_container_anom


        # ------ NO ANOM / ANOM end



        program_container = AutoProcProgramContainer()
        program_container.AutoProcProgram = AutoProcProgram()
        program_container.AutoProcProgram.processingCommandLine = ' '.join(sys.argv)
        program_container.AutoProcProgram.processingPrograms = 'edna-fastproc'

        # now for the generated files. There's some magic to do with
        # their paths to determine where to put them on pyarch
        pyarch_path = None
        # Note: the path is in the form /data/whatever

        # remove the edna-autoproc-import suffix
        original_files_dir = self.file_conversion.dataInput.output_directory.value
        #files_dir, _ = os.path.split(original_files_dir)
        files_dir = original_files_dir

        # the whole transformation is fragile!
        if files_dir.startswith('/data/visitor'):
            # We might get empty elements at the head/tail of the list
            tokens = [elem for elem in files_dir.split(os.path.sep)
                      if len(elem) > 0]
            pyarch_path = os.path.join('/data/pyarch',
                                       tokens[3], tokens[2],
                                       *tokens[4:])
        else:
            # We might get empty elements at the head/tail of the list
            tokens = [elem for elem in files_dir.split(os.path.sep)
                      if len(elem) > 0]
            if tokens[2] == 'inhouse':
                pyarch_path = os.path.join('/data/pyarch', tokens[1],
                                           *tokens[3:])
        if pyarch_path is not None:
            pyarch_path = pyarch_path.replace('PROCESSED_DATA', 'RAW_DATA')
            try:
                os.makedirs(pyarch_path)
            except OSError:
                # dir already exists, may happen when testing
                EDVerbose.screen('Target directory on pyarch ({0}) already exists, ignoring'.format(pyarch_path))

            file_list = []
            # we can now copy the files to this dir
            for f in os.listdir(original_files_dir):
                current = os.path.join(original_files_dir, f)
                if not os.path.isfile(current):
                    continue
                if not os.path.splitext(current)[1].lower() in ISPYB_UPLOAD_EXTENSIONS:
                    continue
                new_path = os.path.join(pyarch_path, f)
                file_list.append(new_path)
                shutil.copyfile(current,
                                new_path)
            # now add those to the ispyb upload
            for path in file_list:
                dirname, filename = os.path.split(path)
                attach = AutoProcProgramAttachment()
                attach.fileType = "Result"
                attach.fileName = filename
                attach.filePath = dirname
                program_container.AutoProcProgramAttachment.append(attach)


        program_container.AutoProcProgram.processingStatus = True
        output.AutoProcProgramContainer = program_container

        # first with anom

        output.AutoProcScalingContainer = scaling_container_anom

        ispyb_input = XSDataInputStoreAutoProc()
        ispyb_input.AutoProcContainer = output


        with open(self.dataInput.output_file.path.value, 'w') as f:
            f.write(ispyb_input.marshal())

        # store results in ispyb
        self.store_autoproc_anom.dataInput = ispyb_input
        t0=time.time()
        self.store_autoproc_anom.executeSynchronous()
        self.stats['ispyb_upload'] = time.time() - t0

        with open(self.log_file_path, 'w') as f:
            json.dump(self.stats, f)

        if self.store_autoproc_anom.isFailure():
            self.ERROR('could not send results to ispyb')
        else:
            # store the autoproc ID as a filename in the
            # fastproc_integration_ids directory
            os.mknod(os.path.join(self.autoproc_ids_dir, str(self.integration_id_anom)), 0755)
        # then noanom stats

        output.AutoProcScalingContainer = scaling_container_noanom

        ispyb_input = XSDataInputStoreAutoProc()
        ispyb_input.AutoProcContainer = output


        with open(self.dataInput.output_file.path.value, 'w') as f:
            f.write(ispyb_input.marshal())

        # store results in ispyb
        self.store_autoproc_noanom.dataInput = ispyb_input
        t0=time.time()
        self.store_autoproc_noanom.executeSynchronous()
        self.stats['ispyb_upload'] = time.time() - t0

        with open(self.log_file_path, 'w') as f:
            json.dump(self.stats, f)

        if self.store_autoproc_noanom.isFailure():
            self.ERROR('could not send results to ispyb')
        else:
            # store the autoproc id
            os.mknod(os.path.join(self.autoproc_ids_dir, str(self.integration_id_noanom)), 0755)
Esempio n. 5
0
    def postProcess(self, _edObject=None):
        EDPluginControl.postProcess(self)
        self.DEBUG("EDPluginControlAutoproc.postProcess")

        #Now that we have executed the whole thing we need to create
        #the suitable ISPyB plugin input and serialize it to the file
        #we've been given as input
        output = AutoProcContainer()

        # AutoProc attr
        autoproc = AutoProc()

        xdsout = self.xds_first.dataOutput
        if xdsout.sg_number is not None:  # and it should not
            autoproc.spaceGroup = SPACE_GROUP_NAMES[xdsout.sg_number.value]
        autoproc.refinedCell_a = xdsout.cell_a.value
        autoproc.refinedCell_b = xdsout.cell_b.value
        autoproc.refinedCell_c = xdsout.cell_c.value
        autoproc.refinedCell_alpha = xdsout.cell_alpha.value
        autoproc.refinedCell_beta = xdsout.cell_beta.value
        autoproc.refinedCell_gamma = xdsout.cell_gamma.value

        output.AutoProc = autoproc

        # scaling container and all the things that go in
        scaling_container_noanom = AutoProcScalingContainer()
        scaling = AutoProcScaling()
        scaling.recordTimeStamp = time.strftime("%Y-%m-%d %H:%M:%S",
                                                time.localtime())

        scaling_container_noanom.AutoProcScaling = scaling

        # NOANOM PATH
        xscale_stats_noanom = self.xscale_generate.dataOutput.stats_noanom_merged
        inner_stats_noanom = xscale_stats_noanom.completeness_entries[0]
        outer_stats_noanom = xscale_stats_noanom.completeness_entries[-1]

        # use the previous shell's res as low res, if available
        prev_res = self.low_resolution_limit
        try:
            prev_res = xscale_stats_noanom.completeness_entries[
                -2].outer_res.value
        except IndexError:
            pass
        total_stats_noanom = xscale_stats_noanom.total_completeness

        stats = _create_scaling_stats(inner_stats_noanom, 'innerShell',
                                      self.low_resolution_limit, False)
        overall_low = stats.resolutionLimitLow
        scaling_container_noanom.AutoProcScalingStatistics.append(stats)

        stats = _create_scaling_stats(outer_stats_noanom, 'outerShell',
                                      prev_res, False)
        overall_high = stats.resolutionLimitHigh
        scaling_container_noanom.AutoProcScalingStatistics.append(stats)
        stats = _create_scaling_stats(total_stats_noanom, 'overall',
                                      self.low_resolution_limit, False)
        stats.resolutionLimitLow = overall_low
        stats.resolutionLimitHigh = overall_high
        scaling_container_noanom.AutoProcScalingStatistics.append(stats)

        integration_container_noanom = AutoProcIntegrationContainer()
        image = Image()
        image.dataCollectionId = self.dataInput.data_collection_id.value
        integration_container_noanom.Image = image

        integration_noanom = AutoProcIntegration()
        if self.integration_id_noanom is not None:
            integration_noanom.autoProcIntegrationId = self.integration_id_noanom
        crystal_stats = self.parse_xds_noanom.dataOutput
        integration_noanom.cell_a = crystal_stats.cell_a.value
        integration_noanom.cell_b = crystal_stats.cell_b.value
        integration_noanom.cell_c = crystal_stats.cell_c.value
        integration_noanom.cell_alpha = crystal_stats.cell_alpha.value
        integration_noanom.cell_beta = crystal_stats.cell_beta.value
        integration_noanom.cell_gamma = crystal_stats.cell_gamma.value
        integration_noanom.anomalous = 0

        # done with the integration
        integration_container_noanom.AutoProcIntegration = integration_noanom
        scaling_container_noanom.AutoProcIntegrationContainer = integration_container_noanom

        # ANOM PATH
        scaling_container_anom = AutoProcScalingContainer()
        scaling = AutoProcScaling()
        scaling.recordTimeStamp = time.strftime("%Y-%m-%d %H:%M:%S",
                                                time.localtime())

        scaling_container_anom.AutoProcScaling = scaling

        xscale_stats_anom = self.xscale_generate.dataOutput.stats_anom_merged
        inner_stats_anom = xscale_stats_anom.completeness_entries[0]
        outer_stats_anom = xscale_stats_anom.completeness_entries[-1]

        # use the previous shell's res as low res if available
        prev_res = self.low_resolution_limit
        try:
            prev_res = xscale_stats_anom.completeness_entries[
                -2].outer_res.value
        except IndexError:
            pass
        total_stats_anom = xscale_stats_anom.total_completeness

        stats = _create_scaling_stats(inner_stats_anom, 'innerShell',
                                      self.low_resolution_limit, True)
        overall_low = stats.resolutionLimitLow
        scaling_container_anom.AutoProcScalingStatistics.append(stats)

        stats = _create_scaling_stats(outer_stats_anom, 'outerShell', prev_res,
                                      True)
        overall_high = stats.resolutionLimitHigh
        scaling_container_anom.AutoProcScalingStatistics.append(stats)
        stats = _create_scaling_stats(total_stats_anom, 'overall',
                                      self.low_resolution_limit, True)
        stats.resolutionLimitLow = overall_low
        stats.resolutionLimitHigh = overall_high
        scaling_container_anom.AutoProcScalingStatistics.append(stats)

        integration_container_anom = AutoProcIntegrationContainer()
        image = Image()
        image.dataCollectionId = self.dataInput.data_collection_id.value
        integration_container_anom.Image = image

        integration_anom = AutoProcIntegration()
        crystal_stats = self.parse_xds_anom.dataOutput
        if self.integration_id_anom is not None:
            integration_anom.autoProcIntegrationId = self.integration_id_anom
        integration_anom.cell_a = crystal_stats.cell_a.value
        integration_anom.cell_b = crystal_stats.cell_b.value
        integration_anom.cell_c = crystal_stats.cell_c.value
        integration_anom.cell_alpha = crystal_stats.cell_alpha.value
        integration_anom.cell_beta = crystal_stats.cell_beta.value
        integration_anom.cell_gamma = crystal_stats.cell_gamma.value
        integration_anom.anomalous = 1

        # done with the integration
        integration_container_anom.AutoProcIntegration = integration_anom
        scaling_container_anom.AutoProcIntegrationContainer = integration_container_anom

        # ------ NO ANOM / ANOM end

        program_container = AutoProcProgramContainer()
        program_container.AutoProcProgram = AutoProcProgram()
        program_container.AutoProcProgram.processingCommandLine = ' '.join(
            sys.argv)
        program_container.AutoProcProgram.processingPrograms = 'edna-fastproc'

        # now for the generated files. There's some magic to do with
        # their paths to determine where to put them on pyarch
        pyarch_path = None
        # Note: the path is in the form /data/whatever

        # remove the edna-autoproc-import suffix
        original_files_dir = self.file_conversion.dataInput.output_directory.value
        #files_dir, _ = os.path.split(original_files_dir)
        files_dir = original_files_dir

        # the whole transformation is fragile!
        if files_dir.startswith('/data/visitor'):
            # We might get empty elements at the head/tail of the list
            tokens = [
                elem for elem in files_dir.split(os.path.sep) if len(elem) > 0
            ]
            pyarch_path = os.path.join('/data/pyarch', tokens[3], tokens[2],
                                       *tokens[4:])
        else:
            # We might get empty elements at the head/tail of the list
            tokens = [
                elem for elem in files_dir.split(os.path.sep) if len(elem) > 0
            ]
            if tokens[2] == 'inhouse':
                pyarch_path = os.path.join('/data/pyarch', tokens[1],
                                           *tokens[3:])
        if pyarch_path is not None:
            pyarch_path = pyarch_path.replace('PROCESSED_DATA', 'RAW_DATA')
            try:
                os.makedirs(pyarch_path)
            except OSError:
                # dir already exists, may happen when testing
                EDVerbose.screen(
                    'Target directory on pyarch ({0}) already exists, ignoring'
                    .format(pyarch_path))

            file_list = []
            # we can now copy the files to this dir
            for f in os.listdir(original_files_dir):
                current = os.path.join(original_files_dir, f)
                if not os.path.isfile(current):
                    continue
                if not os.path.splitext(
                        current)[1].lower() in ISPYB_UPLOAD_EXTENSIONS:
                    continue
                new_path = os.path.join(pyarch_path, f)
                file_list.append(new_path)
                shutil.copyfile(current, new_path)
            # now add those to the ispyb upload
            for path in file_list:
                dirname, filename = os.path.split(path)
                attach = AutoProcProgramAttachment()
                attach.fileType = "Result"
                attach.fileName = filename
                attach.filePath = dirname
                program_container.AutoProcProgramAttachment.append(attach)

        program_container.AutoProcProgram.processingStatus = True
        output.AutoProcProgramContainer = program_container

        # first with anom

        output.AutoProcScalingContainer = scaling_container_anom

        ispyb_input = XSDataInputStoreAutoProc()
        ispyb_input.AutoProcContainer = output

        with open(self.dataInput.output_file.path.value, 'w') as f:
            f.write(ispyb_input.marshal())

        # store results in ispyb
        self.store_autoproc_anom.dataInput = ispyb_input
        t0 = time.time()
        self.store_autoproc_anom.executeSynchronous()
        self.stats['ispyb_upload'] = time.time() - t0

        with open(self.log_file_path, 'w') as f:
            json.dump(self.stats, f)

        if self.store_autoproc_anom.isFailure():
            self.ERROR('could not send results to ispyb')
        else:
            # store the autoproc ID as a filename in the
            # fastproc_integration_ids directory
            os.mknod(
                os.path.join(self.autoproc_ids_dir,
                             str(self.integration_id_anom)), 0755)
        # then noanom stats

        output.AutoProcScalingContainer = scaling_container_noanom

        ispyb_input = XSDataInputStoreAutoProc()
        ispyb_input.AutoProcContainer = output

        with open(self.dataInput.output_file.path.value, 'w') as f:
            f.write(ispyb_input.marshal())

        # store results in ispyb
        self.store_autoproc_noanom.dataInput = ispyb_input
        t0 = time.time()
        self.store_autoproc_noanom.executeSynchronous()
        self.stats['ispyb_upload'] = time.time() - t0

        with open(self.log_file_path, 'w') as f:
            json.dump(self.stats, f)

        if self.store_autoproc_noanom.isFailure():
            self.ERROR('could not send results to ispyb')
        else:
            # store the autoproc id
            os.mknod(
                os.path.join(self.autoproc_ids_dir,
                             str(self.integration_id_noanom)), 0755)