Example #1
0
def main(dSysSettings, dEvalData, dPattEval, dPatt):

   # =================================================================
    # Check if the PDF evaluation should be performed
    # If not, return from the function

    (bEvalOn, dPDFEval) = _check_settings(0, dSysSettings, 0)
    if not bEvalOn:
        return (dEvalData, dPattEval, dSysSettings)

    # =================================================================

    # =================================================================
    # Report progress

    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Report to the console, if needed
    if bInfo == 1:

        # Print to the console
        stdout.write('EVAL: Probability density (PDF)...             ')

    # =================================================================

    # =================================================================
    # Check if the dictionary with internal data of PDF evaluation must be
    # created
    if not 'dPDFData' in dEvalData:

        # Dictionary does not exist, must be created:

        # -----------------------------------------------------------------
        # Get the number of patterns in a pack (size of a pattern pack)

        dMemory = dSysSettings['dMemory']   # Memory configuration dictionary

        nPattPack = dMemory['nPattPack']    # The number of patterns in a pack

        # -----------------------------------------------------------------

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Get the time of the sampling pattern
        tS = float(dPatt['tS_r'])

        # Get the grid period
        Tg = float(dPatt['Tg'])

        # Compute the number of grid points in the sampling pattern
        nGrids = math.floor(tS/Tg)

        # Create the dictionary with internal data of PDF evaluation
        dPDFData = {}

        # Reset the vector with PDF for all the grid points
        dPDFData['vPDF'] = np.zeros(nGrids)

        # Reset the vector with normalized PDF error
        dPDFData['vPDFNorm'] = np.nan*np.ones(nPattPack)

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Reset the vector with convergence of PDF error
        dPDFData['vPDFErrConv'] = np.nan*np.ones(nPattPack)

        # Reset the normalized PDF error
        dPDFData['iPDFErr'] = np.nan

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Reset the total number of sampling points in all the patterns
        dPDFData['nSPts'] = 0

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Store the the dictionary with internal data of PDF evaluation
        # in the dictionary with internal data of evaluation
        # functions and counters
        dEvalData['dPDFData'] = dPDFData

    # =================================================================

    # =================================================================
    # Get the settings for max allowed errors

    # ---------------------------------------------------
    #  Frequency stability data:

    # Is frequency stability evaluation on and
    # there are user settings for max frequency error?
    if 'dFreqData' in dEvalData and 'iPDF_iMaxErrFreq' in dPDFEval:
        iPDF_iMaxErrFreq = dPDFEval['iPDF_iMaxErrFreq']
    else:
        iPDF_iMaxErrFreq = float(np.inf)
    # ---------------------------------------------------

    # ---------------------------------------------------
    #  Minimum distance data

    # Is minimum distance evaluation on and
    # there are user setting for max minimum distance error?
    if 'dMinDData' in dEvalData and 'iPDF_iMaxErrMinD' in dPDFEval:
        iPDF_iMaxErrMinD = dPDFEval['iPDF_iMaxErrMinD']
    else:
        iPDF_iMaxErrMinD = float(np.inf)
    # ---------------------------------------------------

    # ---------------------------------------------------
    #  Maximum distance data

    # Is maximum distance evaluation on and
    # there is user setting for max maximum distance error?
    if 'dMaxDData' in dEvalData and 'iPDF_iMaxErrMaxD' in dPDFEval:
        iPDF_iMaxErrMaxD = dPDFEval['iPDF_iMaxErrMaxD']
    else:
        iPDF_iMaxErrMaxD = float(np.inf)
    # ---------------------------------------------------

    # =================================================================

    # =================================================================
    # Get the needed data

    mPatternsGrid = dPatt['mPatternsGrid']    # The matrix with sampling
                                              # patterns

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dPDFData = dEvalData['dPDFData']    # The dictionary with internal data
                                        # of PDF evaluation

    nSPts = dPDFData['nSPts']           # The total number of sampling
                                        # points in all the patterns

    # -----------------------------------------------------------------
    vPDF = dPDFData['vPDF']             # Get the vector with PDF for all
                                        # the grid points

    vPDFNorm = dPDFData['vPDFNorm']     # The normalized vector with
                                        # PDF errors
    # -----------------------------------------------------------------

    iPDFErr = dPDFData['iPDFErr']       # The current PDF error

    vPDFErrConv = dPDFData['vPDFErrConv']  # The vector with convergence of PDF
                                           # error

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dMemory = dSysSettings['dMemory']   # Memory configuration dictionary

    nPattPack = dMemory['nPattPack']    # The number of patterns in a pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    Tg = dPatt['Tg']    # The grid period

    tS = dPatt['tS_r']  # The time of the sampling pattern

    # - - - - - - - - - - - - - - - - - - - - - - - - -
    # Get the vector with frequency errors
    # (if needed)
    if not np.isinf(iPDF_iMaxErrFreq):

        dFreqData = dEvalData['dFreqData']  # The dictionary with frequency
                                            # stability evaluation data

        vFreqErr = dFreqData['vFreqErr']    # Vector with frequency errors for
                                            # the current patterns pack

    # Get the vector with minimum distance errors
    # (if needed)
    if not np.isinf(iPDF_iMaxErrMinD):

        dMinDData = dEvalData['dMinDData']  # Dictionary with data for
                                            # minimum distance evaluation

        vMinDErr = dMinDData['vMinDErr']    # Vector with minimum distances
                                            # errors for the current patterns
                                            # pack

    # Get the vector with maximum distance errors
    # (if needed)
    if not np.isinf(iPDF_iMaxErrMaxD):

        # Get the vector with maximum distance errors
        dMaxDData = dEvalData['dMaxDData']   # Dictionary with data for maximum
                                             # distance evaluation

        vMaxDErr = dMaxDData['vMaxDErr']     # Vector with maximum distance
                                             # errors for the current patterns
                                             # pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -
    # =================================================================

    # =================================================================
    # Compute the PDF errors

    # Compute the number of grid points in the sampling pattern
    nGrids = math.floor(tS/Tg)

    # Reset the number of patterns which fulfill the requirements
    # for maximum errors form the current pack
    nPOk_p = 0

    # Allocate temporary vector for PDF error convergence
    vPDFErrConv_ = np.nan*np.ones(vPDFErrConv.shape)

    # Loop over all sampling patterns
    for inxP in range(0, nPattPack):

        # - - - - - - - - - - - - - - - - - - - - - - - - -
        # Check if the current pattern fulfills requirements for max errors
        if not np.isinf(iPDF_iMaxErrFreq):
            iEf = vFreqErr[inxP]        # Get the frequency error
            if iEf > iPDF_iMaxErrFreq:
                continue                # Pattern do not fulfill the
                                        # requirements, move to the next one

        if not np.isinf(iPDF_iMaxErrMinD):
            iMinDE = vMinDErr[inxP]     # Get the minimum distance error
            if iMinDE > iPDF_iMaxErrMinD:
                continue                # Pattern do not fulfill the
                                        # requirements, move to the next one

        if not np.isinf(iPDF_iMaxErrMaxD):
            iMaxDE = vMaxDErr[inxP]    # Get the maximum distance error
            if iMaxDE > iPDF_iMaxErrMaxD:
                continue                # Pattern do not fulfill the
                                        # requirements, move to the next one

        # Update the number of patterns which fulfill the requirements
        # for maximum errors and which will be checked for convergence
        nPOk_p = nPOk_p + 1

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Get the current sampling pattern and clear it
        vPattern = mPatternsGrid[inxP, :]
        vPattern = vPattern[vPattern > 0]

        # Get the length of the sampling pattern
        # (the number of samples)
        nPts = vPattern.size

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Update the total number of sampling points
        nSPts = nSPts + nPts

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Add the current pattern to the PDF function
        vPattern = vPattern.astype(np.int)
        vPattern = vPattern - 1
        vPDF[vPattern] = vPDF[vPattern] + 1

        # Normalize the histogram
        vPDFNorm = vPDF/(nSPts/nGrids)

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Compute the current PDF error
        iPDFErr = sum((vPDFNorm-1)**2)/vPDFNorm.size

        # Store the error in the temporary vector
        vPDFErrConv_[nPOk_p-1] = iPDFErr

    # =================================================================

    # =================================================================
    # Store the PDF errors in the 'vPDFErrConv' vector

    # Calculate the size of data in the 'vPDFErrConv' vector
    nOld = sum(np.equal(np.isnan(vPDFErrConv), False))

    # Get the number of free spaces in the 'vPDFErrConv' vector
    nFree = sum(np.equal(np.isnan(vPDFErrConv), True))

    # Calculate the size of missing space in the 'vPDFErrConv' vector
    nMiss = max(0, (nPOk_p - nFree))

    # Calculate the size of data in the 'vPDFErrConv' vector which will
    # not be deleted
    nKept = nOld - nMiss

    # Move backward data in the vPDFErrConv vector
    if nKept > 0:
        vPDFErrConv[range(0, nKept)] = vPDFErrConv[range(nMiss, nOld)]

    # Move the data from temporary 'vPDFErrConv_' vector to the vPDFErrConv
    vPDFErrConv[range(nKept, (nKept+nPOk_p))] = vPDFErrConv_

    # =================================================================

    # =================================================================
    # Store the data

    # Store the vector with PDF
    dPDFData['vPDF'] = vPDF

    # Store the vector with normalized PDF
    dPDFData['vPDFNorm'] = vPDFNorm

    # Store the current PDF error
    dPDFData['iPDFErr'] = iPDFErr

    # Store the vector with convergence of PDF error
    dPDFData['vPDFErrConv'] = vPDFErrConv

    # Store the total number of sampling points in all the patterns
    dPDFData['nSPts'] = nSPts

    # -----------------------------------------------------------------

    # Store the dictionary with internal data of PDF evaluation in the
    # dictionary with internal data of evaluation functions and counters
    dEvalData['dPDFData'] = dPDFData

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------
    # The dictionary with patterns evaluation results (dPattEval):

    # Store the current PDF error for all the patterns in dictionary with
    # patterns evaluation results (dPattEval)
    dPattEval['iPDFErr'] = iPDFErr

    # Store vector with normalized PDF in the dictionary with patterns
    # evaluation results
    dPattEval['vPDFNorm'] = vPDFNorm

    # Store the vector with convergence of PDF error
    dPattEval['vPDFErrConv'] = vPDFErrConv

    # =================================================================

    # =================================================================
    # Report to the console, if needed
    if bInfo == 1:
        print('done. (error: %.4f)') % (iPDFErr)

    # =================================================================

    # =================================================================
    return (dEvalData, dPattEval, dSysSettings)
def main(dSysSettings, dEvalData, dPattEval):

    # =================================================================
    # Check if the counter of correct patterns  should be run

    # Check if the user setting for the correct patterns counter exist
    (bCntrOn, dRCPCntr) = _check_settings(dSysSettings)
    if not bCntrOn:
        return (dEvalData, dPattEval, dSysSettings)

    # =================================================================

    # =================================================================
    # Report progress

    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Report to the console, if needed
    if bInfo == 1:
        # Print to the console
        stdout.write('CNTR: Correct patterns counter...              ')

    # =================================================================

    # =================================================================
    # Check if the the dictionary with internal data 'dRCPData' should be reset
    if not 'dRCPData' in dEvalData:

        # -------------------------------------------------------------
        # Get the number of patterns in a pack (size of a pattern pack)

        dMemory = dSysSettings['dMemory']   # Memory configuration dictionary

        nPattPack = dMemory['nPattPack']    # The number of patterns in a pack

        # -----------------------------------------------------------------

        # Create the dictionary with internal data of the correct patterns
        # counter.
        dRCPData = {}

        # Reset the vector with convergence of the ratio of correct patterns
        dRCPData['vCorrConv'] = np.nan*np.ones(nPattPack)

        # Store the dictionary with internal data in the dictionary with
        # internal data of evaluation functions and counters
        dEvalData['dRCPData'] = dRCPData

        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

    # =================================================================

    # =================================================================
    # Get the settings for max allowed errors

    # ---------------------------------------------------
    #  Frequency stability data:

    # Is frequency stability evaluation on and
    # there is user setting for max frequency error?
    if 'dFreqData' in dEvalData and 'iRCP_iMaxErrFreq' in dRCPCntr:
        iRCP_iMaxErrFreq = dRCPCntr['iRCP_iMaxErrFreq']
    else:
        iRCP_iMaxErrFreq = np.inf
    # ---------------------------------------------------

    # ---------------------------------------------------
    #  Minimum distance data

    # Is minimum distance evaluation on and
    # there is user setting for max minimum distance error?
    if 'dMinDData' in dEvalData and 'iRCP_iMaxErrMinD' in dRCPCntr:
        iRCP_iMaxErrMinD = dRCPCntr['iRCP_iMaxErrMinD']
    else:
        iRCP_iMaxErrMinD = np.inf
    # ---------------------------------------------------

    # ---------------------------------------------------
    #  Maximum distance data

    # Is maximum distance evaluation on and
    # there is user setting for max maximum distance error?
    if 'dMaxDData' in dEvalData and 'iRCP_iMaxErrMaxD' in dRCPCntr:
        iRCP_iMaxErrMaxD = dRCPCntr['iRCP_iMaxErrMaxD']
    else:
        iRCP_iMaxErrMaxD = np.inf
    # ---------------------------------------------------

    # =================================================================

    # =================================================================
    # Get the needed data

    # Get the dictionary with internal data of the correct patterns counter.
    dRCPData = dEvalData['dRCPData']

    # Get the vector with convergence of the ratio of correct patterns
    vCorrConv = dRCPData['vCorrConv']

    # -----------------------------------------------------------------
    # Get the number of patterns in a pack (size of a pattern pack)

    dMemory = dSysSettings['dMemory']   # Memory configuration dictionary

    nPattPack = dMemory['nPattPack']    # The number of patterns in a pack

    # -----------------------------------------------------------------

    # - - - - - - - - - - - - - - - - - - - - - - - - -
    # Get the data needed for maximum errors

    # Get the vector with frequency errors (if needed)
    if not np.isinf(iRCP_iMaxErrFreq):

        dFreqData = dEvalData['dFreqData']   # The dictionary with frequency
                                             # stability evaluation data

        vFreqErr = dFreqData['vFreqErr']     # Vector with frequency errors
                                             # for the current patterns pack

    # Get the vector with minimum distance errors
    # (if needed)
    if not np.isinf(iRCP_iMaxErrMinD):

        dMinDData = dEvalData['dMinDData']    # Dictionary with data for
                                              # minimum distance evaluation

        vMinDErr = dMinDData['vMinDErr']      # Vector with minimum distances
                                              # errors for the current
                                              # patterns pack

    # Get the vector with maximum distance errors
    # (if needed)
    if not np.isinf(iRCP_iMaxErrMaxD):

        # Get the vector with maximum distance errors
        dMaxDData = dEvalData['dMaxDData']   # Dictionary with data for
                                             # maximum distance evaluation

        vMaxDErr = dMaxDData['vMaxDErr']     # Vector with maximum distance
                                             # errors for the current patterns
                                             # patterns pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Get the index of patterns pack
    inxPP = dSysSettings['inxPP']

    # - - - - - - - - - - - - - - - - - - - - - - - - -
    # =================================================================

    # =================================================================
    # Check if the vector is correct

    # Loop over all sampling patterns
    for inxP in range(0, nPattPack):

        # Compute global index of the current pattern
        inxGP = inxPP*nPattPack + inxP

        # Reset the flag to correct
        bCorr = 1

        # -------------------------------------------------
        # Check if the current pattern fulfills requirements
        # for max errors:

        # Frequency error:
        if not np.isinf(iRCP_iMaxErrFreq):

            # Get the frequency error for the current pattern
            iEf = vFreqErr[inxP]
            if iEf > iRCP_iMaxErrFreq:
                bCorr = 0    # Pattern do not fulfill the requirements

        # -------------------------------------------------
        # Minimum distance error:
        if not np.isinf(iRCP_iMaxErrMinD):

            # Get the min dist. error for the current pattern
            iMinDE = vMinDErr[inxP]
            if iMinDE > iRCP_iMaxErrMinD:
                bCorr = 0    # Pattern do not fulfill the requirements

        # -------------------------------------------------
        # Maximum distance error:
        if not np.isinf(iRCP_iMaxErrMaxD):

            # Get the max dist. error for the current pattern
            iMaxDE = vMaxDErr[inxP]
            if iMaxDE > iRCP_iMaxErrMaxD:
                bCorr = 0      # Pattern do not fulfill the requirements

        # -------------------------------------------------

        # Compute the ratio of correct patterns
        if inxGP == 0:
            iRCPatt = bCorr
        elif inxP == 0:
            iRCPatt = (inxGP/(inxGP+1))*vCorrConv[nPattPack-1] \
                + (1/(inxGP+1))*bCorr
        else:
            iRCPatt = (inxGP/(inxGP+1))*vCorrConv[inxP-1] + (1/(inxGP+1))*bCorr

        # Store the current ratio of correct patterns:
        vCorrConv[inxP] = iRCPatt

    # =================================================================

    # =================================================================
    # Store the data

    # Store the vector with convergence of ratio of correct patterns
    # in the dictionary with internal data of the correct patterns counter.
    dRCPData['vCorrConv'] = vCorrConv

    # -----------------------------------------------------------------

    # Store the dictionary with internal data of the correct patterns counter.
    # in the dictionary with internal data of evaluation functions and counters
    dEvalData['dRCPData'] = dRCPData

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------
    # Patterns evaluation dictionary (dPattEval):

    # Get the current ratio of correct patterns
    # and store it in the patterns evaluation dictionary (dPattEval)
    dPattEval['iCorrR'] = vCorrConv[inxP]

    # Store the vector with convergence of the ratio of correct patterns
    dPattEval['vCorrConv'] = vCorrConv

    #==================================================================

    #==================================================================
    # Report to the console, if needed
    if bInfo == 1:
        print('done. (ratio of correct patterns: %.4f)') % (vCorrConv[inxP])
    #==================================================================

    # ================================================================
    # Return
    return (dEvalData, dPattEval, dSysSettings)
def check_convergence(dSysSettings, dEvalData, dSTOPData, bStop):

    # =================================================================
    # Get the user setting for correct patterns counter
    (bCntrOn, dRCPCntr) = _check_settings(dSysSettings)

    # =================================================================

    # =================================================================
    # If this counter was not on, skip this function
    if bCntrOn == 0:
        return (bStop, dSysSettings, dSTOPData)

    # =================================================================

    # =================================================================
    # Check if the convergence check for this function is on.
    if 'bConvCheck' in dRCPCntr:
        bConvCheck = dRCPCntr['bConvCheck']
    else:
        bConvCheck = 0

    # =================================================================

    # =================================================================
    # If the convergence check is off, skip this function
    if bConvCheck == 0:
        return (bStop, dSysSettings, dSTOPData)

    # =================================================================

    # =================================================================
    # Get the needed data

    # -----------------------------------------------------------------
    # Get the settings for the convergence

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    iCheck = dSTOPData['EVAL_RCP']    # Index of counter of correct patterns

    iMinConvMargin = dSTOPData['iMinConvMargin']  # Minimum convergence
                                                  # margin

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

    iMargin = dRCPCntr['iConvMarg']          # Allowed margin

    iLastPatt = dRCPCntr['iConvLastPatt']    # Size of the last data which
                                             # is checked for convergence

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

    dRCPData = dEvalData['dRCPData']     # The dictionary with internal data
                                         # of evaluation functions and counter

    vCorrConv = dRCPData['vCorrConv']    # Get the vector with convergence of
                                         # the ratio of correct patterns

    # -----------------------------------------------------------------
    # =================================================================

    # =================================================================
    # Check the convergence

    # Measure the convergence:
    # Vector with convergence of the ratio of correct patterns
    (bConvOk, iConvDist) = \
        _data_convergence.is_data_converged(vCorrConv, iLastPatt,
                                            iMargin, iMinConvMargin)

    # Check the stop condition
    [bStop, dSTOPData, inxC, bLocalStop] = \
        _data_convergence.check_conv_counter(dSTOPData, bConvOk,
                                             bStop, iCheck)

    # =================================================================

    # =================================================================
    # Report to the console (if needed)

    # Get the verbose settings
    (bInfo, _) = _verbose.get_settings(dSysSettings)

    # Print the info
    if bInfo == 1:

        stdout.write('STOP CHECK: ')
        stdout.write('Correct patterns counter convergence:        ')
        if bConvOk == 1:
            stdout.write('[ratio]: OK             ')
            stdout.write('                                            ')
        else:
            strMessage = '[ratio]: FAILED (%2.2f) ' % (iConvDist)
            stdout.write(strMessage)

        if bConvOk == 1:
            strMessage = 'OK %d TIME' % (inxC)
            stdout.write(strMessage)
            if bLocalStop == 1:
                strMessage = ' (enough) \n' % (inxC)
                stdout.write(strMessage)
            else:
                strMessage = '\n'
                stdout.write(strMessage)
        else:
            strMessage = '\n'
            stdout.write(strMessage)

    # =================================================================

    # =================================================================
    return (bStop, dSysSettings, dSTOPData)
def main(dSysSettings, dEvalData, dPattEval, dPatt):

    # =================================================================
    # Check if the minimum distances evaluation should be performed

    # Check if the user setting for the minimum distances evaluation exists
    (bEvalOn, dMinDEval) = _check_settings(dSysSettings)
    if not bEvalOn:
        return (dEvalData, dPattEval, dSysSettings)

    # =================================================================

    # =================================================================
    # Report progress

    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Report to the console, if needed
    if bInfo == 1:
        # Print to the console
        stdout.write('EVAL: Minimum distance...                      ')

    # =================================================================

    # =================================================================
    # Check if the dictionary with internal data of evaluation of allowed
    # minimum distance must be created
    if not 'dMinDData' in dEvalData:

        # Dictionary does not exist, must be created:

        # -------------------------------------------------------------
        # Get the number of patterns in a pack (size of a pattern pack)

        dMemory = dSysSettings['dMemory']  # Memory configuration dictionary

        nPattPack = dMemory['nPattPack']   # The number of patterns in a pack

        # -------------------------------------------------------------

        # Create the dictionary with internal data of evaluation of allowed
        # minimum distance
        dMinDData = {}

        # Reset the vector with minimum distances errors for vectors from a
        # current pack
        dMinDData['vMinDErr'] = np.nan*np.ones(nPattPack)

        # Reset the vector with convergence of average minimum distances error
        dMinDData['vMinDErrConv'] = np.nan*np.ones(nPattPack)

        # Reset the vector with convergence of the ratio of patterns which
        # do not violate min distance requirement
        dMinDData['vMinDRConv'] = np.nan*np.ones(nPattPack)

        # Store the dictionary with internal data of evaluation of allowed
        # minimum distance
        dEvalData['dMinDData'] = dMinDData
        # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

    # =================================================================

    # =================================================================
    # Get the needed data

    mPatternsGrid = dPatt['mPatternsGrid']   # The matrix with sampling
                                             # patterns

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dMinDData = dEvalData['dMinDData']       # The dictionary with internal
                                             # data of evaluation of allowed
                                             # minimum distance

    vMinDErr = dMinDData['vMinDErr']         # Vector with minimum distances
                                             # errors for vectors from
                                             # the previous pack

    vMinDErrConv = dMinDData['vMinDErrConv']  # Vector with convergence of
                                              # average minimum distances error

    vMinDRConv = dMinDData['vMinDRConv']     # Vector with convergence of the
                                             # ratio of patterns which do not
                                             # violate minimum distance
                                             # requirement

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    tMinDist = dMinDEval['tMinDist']         # The wanted minimum distances

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    inxPP = dSysSettings['inxPP']            # The index of patterns pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dMemory = dSysSettings['dMemory']        # Memory configuration dictionary

    nPattPack = dMemory['nPattPack']         # The number of patterns in a pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    Tg = dPatt['Tg']   # Get the grid period

    # - - - - - - - - - - - - - - - - - - - - - - - - -
    # =================================================================

    # =================================================================
    # Compute the minimum distance errors

    # Recalculate the minimum distance to the grid
    iMinDistGrid = round(tMinDist/Tg)

    # Loop over all sampling patterns
    for inxP in range(0, nPattPack):

        # Compute global index of the current pattern
        inxGP = inxPP*nPattPack + inxP

        # - - - - - - - - - - - - - - - - - - - - - - -

        # Get the current sampling pattern and clear it
        vPattern = mPatternsGrid[inxP, :]
        vPattern = vPattern[vPattern > 0]

        # Get the length of the sampling pattern
        # (the number of samples)
        nSamp = vPattern.size

        #--------------------------------------------------------------
        # If the patterns is empty, it's error is 0
        if nSamp == 0:
            iMinDErr_ = 0
        else:
            # Get the length of a sampling pattern
            nLen = vPattern.size

            # - - - - - - - - - - - - - - - - - - - - - - -

            # Compute the distances
            vDist = vPattern[range(1, nLen)] - vPattern[range(0, nLen-1)]

            # Compute the number of distances between sampling
            # points in the current pattern
            nDist = nLen - 1

            # Compute the number of distances in the pattern
            # which brakes the minimum distance rule
            nMinErr = sum(vDist < iMinDistGrid)

            # - - - - - - - - - - - - - - - - - - - - - - -

            # Compute the current minimum distance error
            if nDist > 0:  # <-if there is only 1 sampling point,the error is 0
                iMinDErr_ = (nMinErr/nDist)**2
            else:
                iMinDErr_ = 0
        #--------------------------------------------------------------

        # Store the current minimum distance error
        vMinDErr[inxP] = iMinDErr_

        # - - - - - - - - - - - - - - - - - - - - - - -

        # Compute and the average distances error
        if inxGP == 0:
            iMinDErrAvg = iMinDErr_
        elif inxP == 0:
            iMinDErrAvg = \
                (inxGP/(inxGP+1))*vMinDErrConv[nPattPack-1] + \
                (1/(inxGP+1))*iMinDErr_
        else:
            iMinDErrAvg = \
                (inxGP/(inxGP+1))*vMinDErrConv[inxP-1] + \
                (1/(inxGP+1))*iMinDErr_

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Store the average distances error
        vMinDErrConv[inxP] = iMinDErrAvg

        # - - - - - - - - - - - - - - - - - - - - - - -

        # Compute the ratio of patterns which do not violate minimum distance
        # requirement
        if iMinDErr_ == 0:
            iC = 1
        else:
            iC = 0

        if inxGP == 0:
            iRPatt = iC
        elif inxP == 0:
            iRPatt = \
                (inxGP/(inxGP+1))*vMinDRConv[nPattPack-1] + (1/(inxGP+1))*iC
        else:
            iRPatt = \
                (inxGP/(inxGP+1))*vMinDRConv[inxP-1] + (1/(inxGP+1))*iC

        # Store the current ratio of patterns which do not violate minimum
        # distance requirement
        vMinDRConv[inxP] = iRPatt

    # =================================================================

    # =================================================================
    # Store the data

    # Store the vector with minimum distances errors for vectors from a
    # current pack
    dMinDData['vMinDErr'] = vMinDErr

    # Store the vector with average minimum distance errors
    dMinDData['vMinDErrConv'] = vMinDErrConv

    # Store the vector which the ratio of patterns which do not violate
    # minimum distance requirement
    dMinDData['vMinDRConv'] = vMinDRConv

    # -----------------------------------------------------------------

    # Store the dictionary with internal data of evaluation of allowed
    # minimum distance in the dictionary with internal data of evaluation
    # functions and counters
    dEvalData['dMinDData'] = dMinDData

    # -----------------------------------------------------------------

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------
    # Dictionary with patterns evaluation results (dPattEval):

    # Store the current average minimum distance error for all the patterns
    dPattEval['iMinDErr'] = vMinDErrConv[nPattPack-1]

    # Store the current ratio of patterns which do not violate minimum distance
    # requirement
    dPattEval['iMinDR'] = vMinDRConv[nPattPack-1]

    # Store the vector with convergence of average minimum distance error
    dPattEval['vMinDErrConv'] = vMinDErrConv

    # Store the vector with convergence of the ratio of patterns which do not
    # violate minimum distance requirement
    dPattEval['vMinDRConv'] = vMinDRConv

    # -----------------------------------------------------------------

    #==================================================================

    #==================================================================
    # Report to the console, if needed
    if bInfo == 1:
        print('done. (error: %.4f   ratio of correct patterns: %.4f)') \
            % (vMinDErrConv[nPattPack-1], vMinDRConv[nPattPack-1])

    #==================================================================

    # =================================================================
    # Return
    return (dEvalData, dPattEval, dSysSettings)
def check_convergence(dSysSettings, dEvalData, dSTOPData, bStop):

    # =================================================================
    # Get the user setting for minimum distance evaluation
    [bEvalOn, dMinDEval] = _check_settings(dSysSettings)

    # =================================================================

    # =================================================================
    # If this evaluation was not on, skip this function
    if bEvalOn == 0:
        return (bStop, dSysSettings, dSTOPData)

    # =================================================================

    # =================================================================
    # Check if the convergence evaluation for this function is on.
    if 'bConvCheck' in dMinDEval:
        bConvCheck = dMinDEval['bConvCheck']
    else:
        bConvCheck = 0

    # =================================================================

    # =================================================================
    # If the convergence evaluation is off, skip this function
    if bConvCheck == 0:
        return (bStop, dSysSettings, dSTOPData)

    # =================================================================

    # =================================================================
    # Get the needed data

    # -----------------------------------------------------------------
    # Get the settings for the convergence

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    iEval = dSTOPData['EVAL_MIND']             # Index of the minimum
                                               # distance evaluation

    iMinConvMargin = dSTOPData['iMinConvMargin']   # Minimum convergence
                                                   # margin

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

    iMargin = dMinDEval['iConvMarg']         # Allowed margin

    iLastPatt = dMinDEval['iConvLastPatt']   # Size of the last data which
                                             # is checked for convergence

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

    dMinDData = dEvalData['dMinDData']      # The dictionary with internal
                                            # data of evaluation functions and
                                            # counters

    vMinDErrConv = dMinDData['vMinDErrConv']  # Vector with convergence of
                                              # average minimum distance error

    vMinDRConv = dMinDData['vMinDRConv']    # Vector with convergence of the
                                            # ratio of patterns which do not
                                            # violate minimum distance
                                            # requirement

    # -----------------------------------------------------------------
    # =================================================================

    # =================================================================
    # Check the convergence

    # Measure the convergence:

    # [average min dist error]
    (bConvOkAE, iConvDistAE) = \
        _data_convergence.is_data_converged(vMinDErrConv, iLastPatt,
                                            iMargin, iMinConvMargin)

    # [ratio of patterns which do not violate minimum distance requirement]
    (bConvOkIP, iConvDistIP) = \
        _data_convergence.is_data_converged(vMinDRConv, iLastPatt,
                                            iMargin, iMinConvMargin)

    bConvOk = bConvOkAE and bConvOkIP
    # bConvOkAE == 0 if there is no convergence
    # bConvOkIP == 0 if there is no convergence

    # Check the stop condition
    (bStop, dSTOPData, inxC, bLocalStop) = \
        _data_convergence.check_conv_counter(dSTOPData, bConvOk,
                                             bStop, iEval)

    # =================================================================

    # =================================================================
    # Report to the console (if needed)

    # Get the verbose settings
    (bInfo, _) = _verbose.get_settings(dSysSettings)

    # Print the info
    if bInfo == 1:

        stdout.write('STOP CHECK: ')
        stdout.write('Minimum distance eval convergence:           ')
        if bConvOkAE == 1:
            stdout.write('[average error]: OK            ')
        else:
            strMessage = '[average error]: FAILED (%2.2f) ' % (iConvDistAE)
            stdout.write(strMessage)

        if bConvOkIP == 1:
            stdout.write('[rat. correct patterns]: OK                ')
        else:
            strMessage = '[rat. correct patterns]: FAILED (%2.2f) ' \
                % (iConvDistIP)
            stdout.write(strMessage)

        if bConvOk == 1:
            strMessage = 'OK %d TIME' % (inxC)
            stdout.write(strMessage)
            if bLocalStop == 1:
                strMessage = ' (enough) \n' % (inxC)
                stdout.write(strMessage)
            else:
                strMessage = '\n'
                stdout.write(strMessage)

        else:
            strMessage = '\n'
            stdout.write(strMessage)

    # =================================================================

    # =================================================================
    return (bStop, dSysSettings, dSTOPData)
def get_storing_settings(dSysSettings):

    # -----------------------------------------------------------------
    # Check if name of the directory into which patterns will be stored
    # already exists. If so, then all the job is done.
    if 'strUniqPattsDirName' in dSysSettings:

        return (dSysSettings['strUniqPattsDirName'], dSysSettings)
    # -----------------------------------------------------------------

    # -----------------------------------------------------------------
    # Get the dictionary with patterns storing configuration
    dPattStoring = dSysSettings['dPattStoring']

    # Get the flag with settings for patterns storing
    bPattStore = dPattStoring['bPattStore']
    # -----------------------------------------------------------------

    # -----------------------------------------------------------------
    # Check if the flag for patterns storing is on
    # If yes, then copy the name of the directory with all sampling patterns
    if bPattStore == 1:

        # Copy the name of the directory
        dSysSettings['strUniqPattsDirName'] = dPattStoring['strPattsDIRName']

        return (dSysSettings['strUniqPattsDirName'], dSysSettings)
    # -----------------------------------------------------------------

    # -----------------------------------------------------------------
    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)
    # -----------------------------------------------------------------

    # -----------------------------------------------------------------
    # If the flag for patterns storing is off, create the dictionary

    # Check if base name for the directory for storing
    # the generated patterns was given.
    if not 'strPattsDir' in dPattStoring:

        # If this name is not given while patterns storing is on, than
        # it is an error!
        strErrMsg = 'Field (dSysSettings.dPattStoring.strPattsDir)'
        strErrMsg = '%s is missing! \n' \
            % (strErrMsg)
        strErrMsg = '%sThis field must contain base name for the' \
            % (strErrMsg)
        strErrMsg = '%s directory for storing generated patterns.' \
            % (strErrMsg)
        sys.exit(strErrMsg)

    # Get the base name for the directory for storing
    # the generated patterns.
    strPattsDir = dPattStoring['strPattsDir']

    # Get parts of the time of beginning of computations
    sTime = dPattStoring['sTime']
    iM = sTime.tm_mon      # Month
    iD = sTime.tm_mday     # Day of the month
    iH = sTime.tm_hour     # Hour
    im = sTime.tm_min      # Minute
    iS = sTime.tm_sec      # Second

    # Get the month name
    lMonths = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
               'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
    strMonth = lMonths[iM - 1]

    # Expand tilda to the correct user home dir
    strBasicPattsDir = os.path.expanduser(strPattsDir)

    # Check if the basic directory for patterns exists
    if not os.path.exists(strBasicPattsDir):
        os.mkdir(strBasicPattsDir)

    # Construct a directory name and store it in the main system configuration
    # dictionary
    strUniqPattsDirName = "%s/patterns_%d%s_%d:%d:%d" \
        % (strBasicPattsDir, iD, strMonth, iH, im, iS)

    # Store the directory name in the main system configuration dictionary
    dSysSettings['strUniqPattsDirName'] = strUniqPattsDirName

    # Create a directory for patterns
    os.mkdir(strUniqPattsDirName)

    # Report, if needed
    if bInfo == 1:
        strMessage = '\nNew directory for storing the unique patterns created'
        strMessage = '%s (%s) \n' \
            % (strMessage, strUniqPattsDirName)
        stdout.write(strMessage)
    # -----------------------------------------------------------------

    return (dSysSettings['strUniqPattsDirName'], dSysSettings)
def main(dSysSettings, dEvalData, dPattEval, dPatt):

    # =================================================================
    # Check if the counter of unique patterns  should be run

    # Check if the user settings for the unique patterns counter exist
    (bCntrOn, dUniqueCntr) = _check_settings(dSysSettings)
    if not bCntrOn:
        return (dEvalData, dPattEval, dSysSettings)

    # =================================================================

    # =================================================================
    # Report progress

    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Report to the console, if needed
    if bInfo == 1:

        # Print to the console
        stdout.write('CNTR: Unique patterns counter...          ')

    # =================================================================

    # =================================================================
    # Check if the dictionary with internal data of the unique patterns
    # counter must be created
    if not 'dUniqData' in dEvalData:

        # Dictionary does not exist, must be created:

        # -----------------------------------------------------------------
        # Get the number of patterns in a pack (size of a pattern pack)

        dMemory = dSysSettings['dMemory']    # Memory configuration dictionary

        nMaxPacks = dMemory['nMaxPacks']     # The maximum number of packs with
                                             # patterns

        # -----------------------------------------------------------------

        # Create the dictionary with internal data of the unique patterns
        # counter
        dUniqData = {}

        # Reset the vector with the number of unique patterns
        dUniqData['vUniqPatt'] = np.nan*np.zeros(nMaxPacks)

        # Reset the index of the file with unique patterns
        dUniqData['iFil'] = 0

        # Reset the number of unique patterns
        dUniqData['nUnique'] = 0

        # - - - - - - - - - - - - - - - - - - - - - - - - -
        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Store the dictionary with internal data of the unique patterns
        # counter in the the dictionary with internal data of evaluation
        # functions and counters
        dEvalData['dUniqData'] = dUniqData

    # =================================================================

    # =================================================================
    # Get the settings for max allowed errors

    # ---------------------------------------------------
    #  Frequency stability data:

    # Is frequency evaluation on and
    # there is user setting for max frequency error?
    if 'dFreqData' in dEvalData and 'iUni_iMaxErrFreq' in dUniqueCntr:
        iUni_iMaxErrFreq = dUniqueCntr['iUni_iMaxErrFreq']
    else:
        iUni_iMaxErrFreq = float(np.inf)
    # ---------------------------------------------------

    # ---------------------------------------------------
    #  Minimum distance data

    # Is minimum distance evaluation on and
    # there is user setting for max minimum distance error?
    if 'dMinDData' in dEvalData and 'iUni_iMaxErrMinD' in dUniqueCntr:
        iUni_iMaxErrMinD = dUniqueCntr['iUni_iMaxErrMinD']
    else:
        iUni_iMaxErrMinD = float(np.inf)
    # ---------------------------------------------------

    # ---------------------------------------------------
    #  Maximum distance data

    # Is maximum distance evaluation on and
    # there is user setting for max maximum distance error?
    if 'dMaxDData' in dEvalData and 'iUni_iMaxErrMaxD' in dUniqueCntr:
        iUni_iMaxErrMaxD = dUniqueCntr['iUni_iMaxErrMaxD']
    else:
        iUni_iMaxErrMaxD = float(np.inf)
    # ---------------------------------------------------

    # =================================================================

    # =================================================================
    # Get the parameters of patterns storing
    (strUniqPattsDirName, dSysSettings) = get_storing_settings(dSysSettings)

    # =================================================================

    # =================================================================
    # Get the needed data

    mPatternsGrid = dPatt['mPatternsGrid']  # The matrix with sampling patterns

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dMemory = dSysSettings['dMemory']    # Memory configuration dictionary

    nPattPack = dMemory['nPattPack']     # The number of patterns in a pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Get the dictionary with internal data of the unique patterns counter
    dUniqData = dEvalData['dUniqData']

    # Get the vector with the number of unique patterns
    vUniqPatt = dUniqData['vUniqPatt']

    # Get the index of the file with unique patterns
    iFil = dUniqData['iFil']

    # Get the number of unique patterns
    nUnique = dUniqData['nUnique']

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # System settings:
    inxPS = dSysSettings['inxPS']       # Current index of patterns settings
                                        # (patterns type)
    # Get the index of patterns pack
    inxPP = dSysSettings['inxPP']

    # - - - - - - - - - - - - - - - - - - - - - - - - -
    # Get the data needed for maximum errors

    # Get the vector with frequency errors
    # (if needed)
    if not np.isinf(iUni_iMaxErrFreq):

        dFreqData = dEvalData['dFreqData']   # The dictionary with frequency
                                             # stability evaluation data

        vFreqErr = dFreqData['vFreqErr']     # Vector with frequency errors
                                             # for the current patterns pack

    # Get the vector with minimum distance errors
    # (if needed)
    if not np.isinf(iUni_iMaxErrMinD):

        dMinDData = dEvalData['dMinDData']   # Dictionary with data for
                                             # minimum distance evaluation

        vMinDErr = dMinDData['vMinDErr']    # Vector with minimum distances
                                            # errors for the current
                                            # patterns pack

    # Get the vector with maximum distance errors
    # (if needed)
    if not np.isinf(iUni_iMaxErrMaxD):

        # Get the vector with maximum distance errors
        dMaxDData = dEvalData['dMaxDData']  # Dictionary with data for
                                            # maximum distance evaluation

        vMaxDErr = dMaxDData['vMaxDErr']    # Vector with maximum distance
                                            # errors for the current patterns
                                            # pack
    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # =================================================================

    # =================================================================
    # Get only patterns without errors

    # Remove these patterns from 'mPatternsGrid' which do not fulfill
    # requirement for maximum errors
    # vCorr - vector with indices of correct patterns

    # Remove the incorrect patterns, (if needed)
    if not(np.isinf(iUni_iMaxErrFreq) and
           np.isinf(iUni_iMaxErrMinD) and
           np.isinf(iUni_iMaxErrMaxD)):

        # Reset the vector with indices of correct patterns
        vCorr = np.arange(nPattPack, dtype=int)

        # Loop over all sampling patterns
        for inxP in range(0, nPattPack):

            # Reset the correct flag to correct
            bCorr = 1

            # - - - - - - - - - - - - - - - - - - - - - - - - -
            # Check if the current pattern fulfills requirements for max errors
            if not np.isinf(iUni_iMaxErrFreq):
                iEf = vFreqErr[inxP]        # Frequency error
                if iEf > iUni_iMaxErrFreq:
                    bCorr = 0               # Pattern do not fulfill
                                            # the requirements

            if not np.isinf(iUni_iMaxErrMinD):
                iMinDE = vMinDErr[inxP]     # Minimum distance error
                if iMinDE > iUni_iMaxErrMinD:
                    bCorr = 0               # Pattern do not fulfill the
                                            # requirements

            if not np.isinf(iUni_iMaxErrMaxD):
                iMaxDE = vMaxDErr[inxP]     # Maximum distance error
                if iMaxDE > iUni_iMaxErrMaxD:
                    bCorr = 0               # Pattern do not fulfill the
                                            # requirements

            # Mark the as incorrect, if the pattern was found incorrect
            if bCorr == 0:
                vCorr[inxP] = -1

        # Remove the unwanted patterns
        vCorr = vCorr[vCorr != -1]
        mPatternsGrid = mPatternsGrid[vCorr, :]

    # =================================================================

    # =================================================================
    # Compute the number of unique patterns

    # Remove the non unique patterns
    (mPattUniqPack, _) = _uniq_rows(mPatternsGrid)

    # Calculate the number of patterns left in the current new pack with
    # patterns
    nPattUniqPack = mPattUniqPack.shape[0]

    # Reset the auxiliary index for file with unique patterns
    iFil_ = 1

    # ===========================================================
    # The loop over all previous files with unique patterns starts
    # here
    #
    # The loop ends if all the previous files were processed, or
    # the number of unique patterns in the current pack drops to 0

    # Reset the progress service
    strSpaceTab = '                                          '
    dProg = _loop_progress.reset(2, 2, strSpaceTab, iFil)

    nProgLines = 0              # Reset the number of lines printed by
                                # the progress function

    # Loop starts here!!!
    while (iFil_ <= iFil) and nPattUniqPack > 0:

        # -------------------------------------------------------
        # Read the current file with patterns
        #

        # Construct the current name of the file with unique patterns
        strPattFileName = '%s/patterns_unique%d_%d.dat' % \
            (strUniqPattsDirName, inxPS, iFil_)

        # Read the current file with unique patterns
        patts_file = open(strPattFileName, 'rb')
        mPattUniqFile = cPickle.load(patts_file)
        patts_file.close()

        # Calculate the number of unique patterns in the file
        nPattUniqFile = mPattUniqFile.shape[0]

        # -------------------------------------------------------

        # -------------------------------------------------------
        # Remove form the currently generated pack of patterns these
        # patterns, which are already in the currently processed file
        # with unique patterns

        # Construct a combined package
        #
        # A pack of unique patterns from a file are on the top
        # A new pack with unique patterns are on the bottom
        #
        mPattComb = _concatenate_arrays(mPattUniqFile, mPattUniqPack)

        # Find the unique patterns
        (_, vInxUniqComb_) = _uniq_rows(mPattComb)

        # Get the indices of unique patterns in the currently generated
        # pack of patterns
        # (Get indices of these unique rows, which first time occur in
        #  a new pack with patterns)
        vInxUniq = vInxUniqComb_[vInxUniqComb_ >= nPattUniqFile]

        # Calculate the new number of unique patterns in the current pack
        nPattUniqPackNew = vInxUniq.shape[0]

        # -------------------------------------------------------

        # Update the matrix with current unique patterns from the pack
        #
        if nPattUniqPackNew < nPattUniqPack:
            mPattUniqPack = mPattComb[vInxUniq, :]
            nPattUniqPack = nPattUniqPackNew

        # -------------------------------------------------------

        # Report progress
        (dProg, nProgLines) = _loop_progress.service(dProg, iFil_, iFil)

        # Move index of the current file forward
        iFil_ = iFil_ + 1

    # Report progress 100% progress
    dProg = _loop_progress.service(dProg, iFil, iFil)

    # ===========================================================

    # Update the total number of unique patterns
    nUnique = nUnique + nPattUniqPack

    # Store the number of unique patterns
    vUniqPatt[inxPP] = nUnique

    # =================================================================

    # =================================================================
    # Store the found unique patterns in the files, if there are any
    if inxPP == 0:

        #
        # This set of code runs for the first pack of patterns ONLY (!)
        #

        # Set the index of unique files
        iFil = 1

        # Construct the current name of the file with unique patterns
        strPattFileName = '%s/patterns_unique%d_%d.dat' % \
            (strUniqPattsDirName, inxPS, iFil)

        # Store all the unique patterns
        patts_file = open(strPattFileName, 'wb')
        cPickle.dump(mPattUniqPack, patts_file)
        patts_file.close()

        #
        # -------------------------------------------------------------
        #

    else:

        # Are there any unique patterns left?
        if nPattUniqPack > 0:

            # ---------------------------------------------------------

            #
            # First part of saving patterns: fill up the last file with
            # patterns

            # Calculate the number of patterns which could be stored in the
            # last file with uniqe patterns
            # (last file: the file with the highest number)
            nLeftSpaceFile = nPattPack - nPattUniqFile

            # Calculate the number of patterns to be put into the file
            nPatts2File = min(nLeftSpaceFile, nPattUniqPack)

            # Fill up the file, if something should be put to a file
            if nPatts2File > 0:

                # Construct a matrix to be put into file
                mPatt2File = \
                    _concatenate_arrays(mPattUniqFile,
                                        mPattUniqPack[range(nPatts2File), :])

                # Store the unique patterns to be put into the file
                patts_file = open(strPattFileName, 'wb')
                cPickle.dump(mPatt2File, patts_file)
                patts_file.close()

            # Calculate the number of unique patterns in a pack left
            nPattUniqPackLeft = nPattUniqPack - nPatts2File

            # ----------------------------------------------------------

            #
            # Second part of saving patterns: Save the rest of patterns
            #
            # Are there any unique patterns left?
            if nPattUniqPackLeft > 0:

                # Move the file index forward
                iFil = iFil + 1

                # Construct the current name of the file with unique patterns
                strPattFileName = '%s/patterns_unique%d_%d.dat' \
                    % (strUniqPattsDirName, inxPS, iFil)

                # Construct a matrix to be put into file
                mPatt2File = \
                    mPattUniqPack[range(nPatts2File, nPattUniqPack), :]

                # Store the unique patterns to be put into the file
                patts_file = open(strPattFileName, 'wb')
                cPickle.dump(mPatt2File, patts_file)
                patts_file.close()

            # =========================================================

    # =================================================================
    # Store the data

    # Store the vector with the number of unique patterns
    dUniqData['vUniqPatt'] = vUniqPatt

    # The index of the file with unique patterns
    dUniqData['iFil'] = iFil

    # The number of unique patterns
    dUniqData['nUnique'] = nUnique

    # -----------------------------------------------------------------

    # Store the dictionary with internal data of the unique patterns counter
    # in the dictionary with internal data of evaluation functions and
    # counters
    dEvalData['dUniqData'] = dUniqData

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------

    # Store the current total number of unique patterns
    # in the dictionary with patterns evaluation results (dPattEval)
    dPattEval['nUnique'] = nUnique

    # Store the vector with the number of unique patterns
    # in the dictionary with patterns evaluation results (dPattEval)
    dPattEval['vUniqPatt'] = vUniqPatt

    # =================================================================

    # =================================================================
    # Report to the console, if needed
    if bInfo == 1:
        if nProgLines == 0:
            stdout.write('     ')

        strMessage1 = ('done. The number of unique patterns:')

        print ('%s %d') % (strMessage1, nUnique)

    # =================================================================

    # =================================================================
    return (dEvalData, dPattEval, dSysSettings)
def main(dSysSettings, dEvalData, dPattEval, dPatt):

   # =================================================================
    # Check if the PDF evaluation should be performed
    # If not, return from the function

    (bEvalOn, _) = _check_settings(0, dSysSettings, 0)
    if not bEvalOn:
        return(dEvalData, dPattEval, dSysSettings)

    # =================================================================

    # =================================================================
    # Report progress

    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Report to the console, if needed
    if bInfo == 1:

        # Print to the console
        stdout.write('EVAL: Probability density (PDF) (total)...     ')

    # ==================================================================

    # =================================================================
    # Check if the dictionary with internal data of PDF evaluation
    # (for all patterns) must be created?
    if not 'dPDFTotData' in dEvalData:

        # Dictionary does not exist, must be created:

        # -----------------------------------------------------------------
        # Get the number of patterns in a pack (size of a pattern pack)

        dMemory = dSysSettings['dMemory']    # Memory configuration
                                             # dictionary

        nPattPack = dMemory['nPattPack']     # The number of patterns in a
                                             # pack

        # -------------------------------------------------------------

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Get the time of the sampling pattern
        tS = float(dPatt['tS_r'])

        # Get the grid period
        Tg = float(dPatt['Tg'])

        # Compute the number of grid points in the sampling pattern
        nGrids = math.floor(tS/Tg)

        # Create the dictionary with internal data of PDF evaluation
        # (for all patterns)
        dPDFTotData = {}

        # Reset the vector with PDF for all the grid points
        dPDFTotData['vPDFTot'] = np.zeros(nGrids)

        # Reset the vector with normalized PDF error
        dPDFTotData['vPDFNormTot'] = np.nan*np.ones(nPattPack)

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Reset the vector with convergence of PDF error
        dPDFTotData['vPDFErrTotConv'] = np.nan*np.ones(nPattPack)

        # Reset the normalized PDF error
        dPDFTotData['iPDFErrTot'] = np.nan

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Reset the total number of sampling points in all the patterns
        dPDFTotData['nSPtsTot'] = 0

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Store the dictionary with internal data of PDF evaluation
        # (for all patterns) in the dictionary with internal data
        # of evaluation functions and counters
        dEvalData['dPDFTotData'] = dPDFTotData
    # =================================================================

    # =================================================================
    # Get the needed data

    mPatternsGrid = dPatt['mPatternsGrid']      # The matrix with sampling
                                                # patterns

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dPDFTotData = dEvalData['dPDFTotData']      # The the dictionary with
                                                # internal data of PDF
                                                # evaluation (for all
                                                # patterns)

    nSPtsTot = dPDFTotData['nSPtsTot']          # The total number of sampling
                                                # points in all the patterns

    # -----------------------------------------------------------------
    vPDFTot = dPDFTotData['vPDFTot']            # Get the vector with PDF
                                                # for all the grid points

    vPDFErrTotConv = dPDFTotData['vPDFErrTotConv']   # Get the vector with
                                                     # convergence of PDF error

    # -----------------------------------------------------------------

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dMemory = dSysSettings['dMemory']    # Memory configuration dictionary

    nPattPack = dMemory['nPattPack']     # The number of patterns in a pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    Tg = dPatt['Tg']     # The grid period

    tS = dPatt['tS_r']   # The time of the sampling pattern

    # =================================================================

    # =================================================================
    # Compute the PDF errors

    # Compute the number of grid points in the sampling pattern
    nGrids = math.floor(tS/Tg)

    # Loop over all sampling patterns
    for inxP in range(0, nPattPack):

        # Get the current sampling pattern and clear it
        vPattern = mPatternsGrid[inxP, :]
        vPattern = vPattern[vPattern > 0]

        # Get the length of the sampling pattern
        # (the number of samples)
        nPts = vPattern.size

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Update the total number of sampling points
        nSPtsTot = nSPtsTot + nPts

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Add the current pattern to the PDF function
        vPattern = vPattern.astype(np.int)
        vPattern = vPattern - 1
        vPDFTot[vPattern] = vPDFTot[vPattern] + 1

        # Normalize the histogram
        vPDFNormTot = vPDFTot/(nSPtsTot/nGrids)

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Compute the current PDF error
        iPDFErrTot = sum((vPDFNormTot-1)**2)/vPDFNormTot.size

        # Store the error in the vector with convergence of PDF error
        vPDFErrTotConv[inxP] = iPDFErrTot

    # =================================================================

    # =================================================================
    # Store the data

    # Store the vector with PDF
    dPDFTotData['vPDFTot'] = vPDFTot

    # Store the vector with normalized PDF
    dPDFTotData['vPDFNormTot'] = vPDFNormTot

    # Store the current PDF error
    dPDFTotData['iPDFErrTot'] = iPDFErrTot

    # Store the vector with convergence of PDF error
    dPDFTotData['vPDFErrTotConv'] = vPDFErrTotConv

    # Store the total number of sampling points in all the patterns
    dPDFTotData['nSPtsTot'] = nSPtsTot

    # -----------------------------------------------------------------

    # Store the dictionary with internal data of PDF evaluation
    # (for all patterns)
    dEvalData['dPDFTotData'] = dPDFTotData

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------
    # The dictionary with patterns evaluation results (dPattEval):

    # Store the current PDF error for all the patterns in the dictionary with
    # patterns evaluation results (dPattEval)
    dPattEval['iPDFErrTot'] = iPDFErrTot

    # Store vector with normalized PDF in the dictionary with patterns
    # evaluation results
    dPattEval['vPDFNormTot'] = vPDFNormTot

    # Store the vector with convergence of PDF error
    dPattEval['vPDFErrTotConv'] = vPDFErrTotConv

    # =================================================================

    # =================================================================
    # Report to the console, if needed
    if bInfo == 1:
        print 'done. (error: %.4f)' % (iPDFErrTot)

    # =================================================================

    # =================================================================
    return (dEvalData, dPattEval, dSysSettings)
def check_convergence(dSysSettings, dEvalData, dSTOPData, bStop):

    # =================================================================
    # Get the user setting for PDF evaluation
    (bEvalOn, dPDFTotEval) = _check_settings(0, dSysSettings, 0)
    # =================================================================

    # =================================================================
    # If this evaluation was not on, skip this function
    if bEvalOn == 0:
        return (bStop, dSysSettings, dSTOPData)

    # =================================================================

    # =================================================================
    # Check if the convergence check for this function is on.
    if 'bConvCheck' in dPDFTotEval:
        bConvCheck = dPDFTotEval['bConvCheck']
    else:
        bConvCheck = 0
    # =================================================================

    # =================================================================
    # If the convergence check is off, skip this function
    if bConvCheck == 0:
        return (bStop, dSysSettings, dSTOPData)

    # =================================================================

    # =================================================================
    # Get the needed data

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    iEval = dSTOPData['EVAL_PDF']                   # Index of the PDF
                                                    # evaluation

    iMinConvMargin = dSTOPData['iMinConvMargin']    # Minimum convergence
                                                    # margin

    # -----------------------------------------------------------------
    # Get the settings for the convergence

    iMargin = dPDFTotEval['iConvMarg']            # Allowed margin

    iLastPatt = dPDFTotEval['iConvLastPatt']      # Size of the last data
                                                  # which is checked for
                                                  # convergence

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

    dPDFTotData = dEvalData['dPDFTotData']        # The dictionary with
                                                  # internal data of
                                                  # of PDF evaluation  (for
                                                  # all patterns)

    vPDFErrTotConv = dPDFTotData['vPDFErrTotConv']   # Vector with convergence
                                                     # of PDF error

    # -----------------------------------------------------------------
    # =================================================================

    # =================================================================

    # -----------------------------------------------------------------

    # Measure the convergence
    (bConvOk, iConvDist) = \
        _data_convergence.is_data_converged(vPDFErrTotConv, iLastPatt,
                                            iMargin, iMinConvMargin)

    # Check the stop condition
    (bStop, dSTOPData, inxC, bLocalStop) = \
        _data_convergence.check_conv_counter(dSTOPData, bConvOk,
                                             bStop, iEval)

    # =================================================================

    # =================================================================
    # Report to the console (if needed)

    # Get the verbose settings
    (bInfo, _) = _verbose.get_settings(dSysSettings)

    # Print the info
    if bInfo == 1:
        stdout.write('STOP CHECK: ')
        stdout.write('PDF eval convergence:                        ')

        if bConvOk == 1:
            stdout.write('[error]: OK              ')
            stdout.write('                                           ')
        else:
            strMessage = '[error]: FAILED (%2.2f) ' % (iConvDist)
            stdout.write(strMessage)

        if bConvOk == 1:
            strMessage = 'OK %d TIME' % (inxC)
            stdout.write(strMessage)
            if bLocalStop == 1:
                strMessage = ' (enough) \n' % (inxC)
                stdout.write(strMessage)
            else:
                strMessage = '\n'
                stdout.write(strMessage)

        else:
            strMessage = '\n'
            stdout.write(strMessage)

    # =================================================================

    # =================================================================
    return (bStop, dSysSettings, dSTOPData)
Example #10
0
def generate_patterns(dPattPar, dSysSettings):

    # =================================================================
    # HEADER: CHECK IF THERE ARE ALL NEEDED CONFIGURATION FIELDS

    # The time due of the sampling pattern
    strErr = 'ERROR (ANGIE_patterns):'
    if not 'tS' in dPattPar:
        print('%s No "tS" field in the configuration dict.(dPattPar)') \
            % (strErr)
        print('%s Incomplete configuration for the generator!') \
            % (strErr)
        print 'BAILING OUT!'
        sys.exit(1)

    # The sampling grid period
    if not 'Tg' in dPattPar:
        print('%s No "Tg" field in the configuration dict. (dPattPar)') \
            % (strErr)
        print('%s Incomplete configuration for the generator!\n') \
            % (strErr)
        print('BAILING OUT!')
        sys.exit(1)

    # The wanted average sampling frequency
    if not 'fR' in dPattPar:
        print('%s No "fR" field in the configuration dict. (dPattPar)') \
            % (strErr)
        print('%s Incomplete configuration for the generator!') \
            % (strErr)
        print('BAILING OUT!')
        sys.exit(1)

    # The sigma parameter
    if not 'iVar' in dPattPar:
        print('%s No "iVar" field in the configuration dict. (dPattPar)') \
            % (strErr)
        print('%s Incomplete configuration for the generator!') \
            % (strErr)
        print('BAILING OUT!')
        sys.exit(1)

    # The minimum time between samples
    if not 'tMin' in dPattPar:
        print('%s No "tMin" field in the configuration dict. (dPattPar)') \
            % (strErr)
        print('%s Incomplete configuration for the generator!') \
            % (strErr)
        print('BAILING OUT!')
        sys.exit(1)

    # The number of sampling patterns
    if not 'nPatts' in dPattPar:
        print('%s No "nPatts" field in the configuration dict. (dPattPar)') \
            % (strErr)
        print('%s Incomplete configuration for the generator!') \
            % (strErr)
        print('BAILING OUT!')
        sys.exit(1)

    # =================================================================

    # =================================================================
    # Get the needed data

    # -----------------------------------------------------------------
    # PATTERNS PARAMETERS (sPattPar):

    tau = dPattPar['tS']         # The time due of the sampling pattern

    Tg = dPattPar['Tg']          # The sampling grid period

    fdag_s = dPattPar['fR']      # The wanted average sampling frequency

    sigma = dPattPar['iVar']     # The sigma parameter

    N = dPattPar['nPatts']       # The number of patterns in a pack

    # Min distance:
    t_min = dPattPar['tMin']     # The minimum time between samples

    # Max distance (if given):
    if 'tMax' in dPattPar:
        t_max = dPattPar['tMax']  # The maximum time between samples
                                  # (if exists)
    else:
        t_max = np.inf            # The maximum time between samples
                                  # is equal to inf (if non value was given)

    # =================================================================

    # =================================================================
    # REPORT TO THE CONSOLE (If NEEDED)

    # Get the verbose settings
    (bInfo, _) = _verbose.get_settings(dSysSettings)

    if bInfo == 1:

        # -----------------------------------------------------------------
        # Get the needed data

        # System settings:
        inxPS = dSysSettings['inxPS']        # Current index of patterns
                                             # settings (patterns type)

        nPattSet = dSysSettings['nPattSet']  # The total number of different
                                             # patterns settings

        inxPP = dSysSettings['inxPP']        # Current index of patterns pack

        # Memory settings
        dMemory = dSysSettings['dMemory']    # The memory configuration
                                             # dictionary

        nMaxPacks = dMemory['nMaxPacks']     # The maximum number of packs with
                                             # patterns
        # -----------------------------------------------------------------


        # Print the info
        strMessage = '\n\nPatterns type ANGIE (c implementation)'
        strMessage = '%s (Variance: %.7f).  ' \
            % (strMessage, sigma)
        sys.stdout.write(strMessage)

        strMessage = 'Patterns settings index: %d/%d.\n' \
            % (inxPS, nPattSet)
        sys.stdout.write(strMessage)

        strMessage = 'Index of the patterns pack = %d. (max = %d).' \
            % (inxPP+1, nMaxPacks)

        strMessage = '%s Size of a pack = %.1fK' \
            % (strMessage, N / 1e3)

        strMessage = '%s Patterns generation...' \
            % (strMessage)

        sys.stdout.write(strMessage)

    # =================================================================

    # =================================================================
    # PRECALCULATIONS

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------
    # Calculate the number of grid points in the sampling period
    K_g = math.floor(tau/Tg)                              # equation (22

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------
    # Calculate the real time of the sampling pattern
    hattau = K_g * Tg                                     # equation (22)

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------
    # Calculate the expected number of sampling points in a pattern
    hatKdag_s = int(round(hattau*fdag_s))                 # equation (22)

    # --------------------------------------------------------------------
    # --------------------------------------------------------------------
    # Minimum time between the sampling points:

    # Minimum time as the amount of grid
    K_min = int(math.ceil(t_min/Tg))                       # equation (27)

    # --------------------------------------------------------------------
    # --------------------------------------------------------------------
    # Maximum time between the sampling points:

    # Maximum time as the amount of grid                  # equation (27)
    if not np.isposinf(t_max):
        K_max = int(math.ceil(t_max/Tg))
    else:
        K_max = t_max

    # --------------------------------------------------------------------
    # --------------------------------------------------------------------
    # Switch on the soft start
    bSoftStart = 1

    # --------------------------------------------------------------------
    # --------------------------------------------------------------------
    # Check given settings

    # Calculate the expected average sampling frequency
    hatfdag_s = hatKdag_s/hattau

    # Calculate the expected average sampling period and recalculate it to
    # the grid
    hatNdag_s = int(math.ceil(1/(hatfdag_s*Tg)))

    # Check: the expected number of sampling points can not be higher than
    # the number of grid points in a pattern
    if K_g < hatKdag_s:
        print('Error (ANGIE_pattern): ')
        print('The expected number of sampling points can not be higher than!')
        print('the number of grid points in a pattern!')
        sys.exit(1)

    # Check: minimum time must not be longer than the expected average
    # sampling period
    if K_min > hatNdag_s:
        print('Error (ANGIE_pattern):')
        print('The minimum time between samples (t_min) must not be longer ')
        print('than the average expected sampling period!')
        sys.exit(1)

    # Check: maximum time must not be shorter than the expected average
    # sampling period
    if K_max < hatNdag_s:
        print('Error (ANGIE_pattern):')
        print('The maximum time between samples (t_max) must not be shorter ')
        print('than the average expected sampling period!')
        sys.exit(1)

    # =================================================================

    # =================================================================
    # GENERATE THE PATTERNS

    # Allocate the matrix for all the sampling patterns
    mPatternsGrid = np.ones((N, hatKdag_s), dtype='int64')

    # Compute the square root of sigma
    sigma = math.sqrt(sigma)

    # Compute the start value of the maximum limit
    nplus_k_start = K_g - K_min*(hatKdag_s-1)

    # Normal distribution
    vRandn = np.random.randn(N*(hatKdag_s-1))

    # Uniform distribution
    vRand = np.random.rand(N)

    # Run the generators
    mPatternsGrid = angie_c.ANGIE(mPatternsGrid, vRand, vRandn,
                                  int(nplus_k_start), int(hatKdag_s),
                                  int(K_g),
                                  float(sigma),
                                  int(K_min), float(K_max), int(N), int(1))

    # =================================================================

    # =================================================================
    # Generate the output dictionary with a generated pack of patterns

    dPatt = {}

    # Matrix with the sampling patterns (grid distances)
    dPatt['mPatternsGrid'] = mPatternsGrid

    # The time due of a sampling pattern
    dPatt['tS_r'] = hattau

    # The sampling grid period
    dPatt['Tg'] = Tg

    # The wanted average sampling ratio
    dPatt['fR_d'] = fdag_s

    # The expected average sampling ratio
    dPatt['fR_e'] = hatfdag_s

    # The variance parameter
    dPatt['iVar'] = sigma

    # The minimum time between samples
    dPatt['tMin'] = t_min

    # The expected number of sampling points
    dPatt['iNS_e'] = hatKdag_s

    # The total number of grid points in a pattern
    dPatt['nGrid'] = K_g

    # =================================================================

    # =================================================================
    # Report to the console, if needed
    if bInfo == 1:
        print 'done \n'

    # =================================================================

    # =================================================================
    # Return the dictionary with sampling patterns
    return (dPatt)
def create_dir(dSysSettings):

    # =================================================================
    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)
    # =================================================================

    # =================================================================
    # Get the current time
    sTime = time.localtime()

    # =================================================================

    # =================================================================
    # Check if dictionary with settings for patterns storing exists
    # If not, create it and return from the function
    if not 'dPattStoring' in dSysSettings:

        # If it does not exists, create it and set the patterns storing flag
        # to 0
        dPattStoring = {}
        dPattStoring['bPattStore'] = 0

        # Add the time of the beginning
        dPattStoring['sTime'] = sTime

        # Store the dictionary with settings for patterns storing
        dSysSettings['dPattStoring'] = dPattStoring

        # Report, if needed
        if bInfo == 1:
            sys.stdout.write('Patterns storing is switched off!')

        return dSysSettings

    # =================================================================

    # =================================================================
    # Get the dictionary with settings for patterns storing
    dPattStoring = dSysSettings['dPattStoring']

    # =================================================================

    # =================================================================
    # Add the time of the beginning
    dPattStoring['sTime'] = sTime

    # =================================================================

    # =================================================================
    # Check if the patterns storing on/off flag exists.
    # If not, create it and return from the function
    if not 'bPattStore' in dPattStoring:

        # If does not exists, create it and clear it to 0
        dPattStoring['bPattStore'] = 0

        # Store the dictionary with settings for patterns storing
        # in the main system settings
        dSysSettings['dPattStoring'] = dPattStoring

        # Report, if needed
        if bInfo == 1:
            sys.stdout.write('Patterns storing is switched off!')

        return dSysSettings
    # =================================================================

    # =================================================================
    # Get the patterns storing on/off flag
    bPattStore = dPattStoring['bPattStore']

    # =================================================================

    # =================================================================
    # Create the correct directory name for patterns storing
    if bPattStore == 1:  # <- Patterns will be stored

        # Check if base name for the directory for storing
        # the generated patterns was given.
        if not 'strPattsDir' in dPattStoring:

            # If this name is not given while patterns storing is on, than
            # it is an error!
            strErrMsg = 'Field (dSysSettings.dPattStoring.strPattsDir)'
            strErrMsg = '%s is missing! \n' \
                % (strErrMsg)
            strErrMsg = '%sThis field must contain base name for the' \
                % (strErrMsg)
            strErrMsg = '%s directory for storing generated patterns.' \
                % (strErrMsg)
            sys.exit(strErrMsg)

        # Get the base name for the directory for storing
        # the generated patterns.
        strPattsDir = dPattStoring['strPattsDir']

        # Get parts of the current time
        iM = sTime.tm_mon      # Month
        iD = sTime.tm_mday     # Day of the month
        iH = sTime.tm_hour     # Hour
        im = sTime.tm_min      # Minute
        iS = sTime.tm_sec      # Second

        # Get the month name
        lMonths = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
                   'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
        strMonth = lMonths[iM - 1]

        # Expand tilda to the correct user home dir
        strBasicPattsDir = os.path.expanduser(strPattsDir)

        # Check if the basic directory for patterns exists
        if not os.path.exists(strBasicPattsDir):
            os.mkdir(strBasicPattsDir)

        # Construct a directory name and store it in the dictionary
        # with settings for patterns storing
        strPattsDIRName = "%s/patterns_%d%s_%d:%d:%d" \
            % (strBasicPattsDir, iD, strMonth, iH, im, iS)

        dPattStoring['strPattsDIRName'] = strPattsDIRName

        # Create a directory for patterns
        os.mkdir(strPattsDIRName)

        # Store directory which describes patterns storing
        dSysSettings['dPattStoring'] = dPattStoring

        # Report, if needed
        if bInfo == 1:
            strMessage = '\nNew directory for storing the patterns created'
            strMessage = '%s (%s)\n' \
                % (strMessage, strPattsDIRName)

            sys.stdout.write(strMessage)

    else:  # <- Patterns will not be stored
        # Report, if needed
        if bInfo == 1:
            sys.stdout.write('Patterns storing is switched off!')

    # =================================================================

    return dSysSettings
Example #12
0
def generate_patterns(dPattPar, dSysSettings):

    # =================================================================
    # HEADER: CHECK IF THERE ARE ALL NEEDED CONFIGURATION FIELDS

    # The time due of the sampling pattern
    strErr = 'ERROR (JS_pattern):'
    if not 'tS' in dPattPar:
        print('%s No "tS" field in the configuration dictionary!') % (strErr)
        print('%s Incomplete configuration for the generator!') % (strErr)
        print('BAILING OUT!')
        sys.exit(1)

    # The sampling grid period
    if not 'Tg' in dPattPar:
        print('%s No "Tg" field in the configuration dictionary!') % (strErr)
        print('%s Incomplete configuration for the generator!') % (strErr)
        print('BAILING OUT!')
        sys.exit(1)

    # The desired average sampling ratio
    if not 'fR' in dPattPar:
        print('%s No "fR" field in the configuration dictionary!') % (strErr)
        print('%s Incomplete configuration for the generator!') % (strErr)
        print('BAILING OUT!')
        sys.exit(1)

    # The variance parameter
    if not 'iVar' in dPattPar:
        print('%s No "iVar" field in the configuration dictionary!') % (strErr)
        print('%s Incomplete configuration for the generator!') % (strErr)
        print('BAILING OUT!')
        sys.exit(1)

    # The number of patterns
    if not 'nPatts' in dPattPar:
        print('%s No "nPatts" field in the configuration dictionary!') % \
            (strErr)
        print('%s Incomplete configuration for the generator!') % (strErr)
        print('BAILING OUT!')
        sys.exit(1)

    # =================================================================

    # =================================================================
    # Get the needed data

    # -----------------------------------------------------------------
    # TRAINS PARAMETERS (sPattPar):

    tau = dPattPar['tS']        # The time due of the sampling pattern

    Tg = dPattPar['Tg']         # The sampling grid period

    fdag_s = dPattPar['fR']     # The wanted average sampling frequency

    sigma = dPattPar['iVar']    # The variance parameter

    N = int(dPattPar['nPatts']) # The number of patterns in a pack

    # =================================================================

    # =================================================================
    # REPORT TO THE CONSOLE (If NEEDED)

    # Get the verbose settings
    (bInfo, _) = _verbose.get_settings(dSysSettings)

    if bInfo == 1:

        # -----------------------------------------------------------------
        # Get the needed data

        # System settings:
        inxPS = dSysSettings['inxPS']        # Current index of patterns
                                             # settings (patterns type)

        nPattSet = dSysSettings['nPattSet']  # The total number of different
                                             # patterns settings

        inxPP = dSysSettings['inxPP']        # Current index of patterns pack

        # Memory settings
        dMemory = dSysSettings['dMemory']    # The memory configuration
                                             # dictionary

        nMaxPacks = dMemory['nMaxPacks']     # The maximum number of packs with
                                             # patterns
        # -----------------------------------------------------------------

        # Print the info

        strMessage = '\n\nPatterns type JS (Variance: %.7f).  ' \
            % (sigma)
        sys.stdout.write(strMessage)

        strMessage = 'Patterns settings index: %d/%d.\n' \
            % (inxPS, nPattSet)
        sys.stdout.write(strMessage)

        strMessage = 'Index of the patterns pack = %d. (max = %d).' \
            % (inxPP+1, nMaxPacks)

        strMessage = '%s Size of a pack = %.1fK' \
            % (strMessage, N / 1e3)

        strMessage = '%s Patterns generation...' \
            % (strMessage)

        sys.stdout.write(strMessage)

    # =================================================================

    # =================================================================
    # PRECALCULATIONS

    # -----------------------------------------------------------------
    # Calculate the real time of the sampling pattern

    # Calculate the number of grid points in the sampling period
    K_g = math.floor(tau/Tg)                                # equation (22)

    # Calculate the real time of the sampling pattern
    hattau = K_g * Tg                                       # equation (22)

    # Calculate the expected number of sampling points in a pattern
    hatKdag_s = int(round(hattau*fdag_s))                   # equation (22)

    # Calculate the expected sampling frequency
    hatfdag_s = hatKdag_s/hattau                            # equation (23)

    # Calculate the expected sampling period
    hatTdag_s = 1/hatfdag_s                                 # equation (23)

    # -----------------------------------------------------------------
    # Recalculate to the grid

    # The expected sampling period
    hatNdag_s = round(hatTdag_s/Tg)                         # equation (23)

    # =================================================================

    # =================================================================
    # ENGINE STARTS HERE

    # Allocate the matrix for all the sampling patterns
    mPatternsGrid = np.ones((N, 2*hatKdag_s), dtype='int64')

    # Reset the max size of a pattern
    iSMaxPatt = 0

    # Loop over all patterns
    for inxP in range(1, N+1):

        # ------------------------------------------
        # Generate a vector with a sampling pattern
        vPattern = _js_engine(hatKdag_s, hatNdag_s, K_g, sigma)
        # ------------------------------------------

        # Get the size of the generated pattern
        (iS_v,) = vPattern.shape

        # Update the max size of a pattern
        iSMaxPatt = max((iS_v, iSMaxPatt))

        # Get the number of rows and columns in the matrix with patterns
        (iR_m, iC_m) = mPatternsGrid.shape

        # Check the inequalities between matrix with vectors and the current
        # vector
        if iS_v < iC_m:     # <- the size of a
                            # generated pattern
                            # is lower than the
                            # number of columns
                            # in the storage
                            # matrix

            # Calculate the size of empty space in the pattern
            iEmpty = iC_m - iS_v

            # Update the pattern (fill up the empty spaces with a patch of -1)
            vPatch = -1*np.ones(iEmpty)
            vPattern = np.hstack((vPattern, vPatch))

        elif iS_v > iC_m:   # <- the size of a
                            #    generated pattern
                            #    is higher than the
                            #    number of columns
                            #    in the storage matrix

            # The size of empty space in the storage matrix
            iEmpty = iS_v - iC_m

            # Update the storage matrix
            mPatch = -1*np.ones(iR_m, iEmpty)
            mPatternsGrid = np.hstack((mPatternsGrid, mPatch))

        # Store the generated pattern
        mPatternsGrid[inxP-1, :] = vPattern

    # Clip the matrix with patterns
    vInx = range(0, iSMaxPatt)
    mPatternsGrid = mPatternsGrid[:, vInx]

    # =================================================================
    # Generate the output dictionary with a generated pack of patterns

    dPatt = {}

    # Matrix with the sampling patterns (grid distances)
    dPatt['mPatternsGrid'] = mPatternsGrid

    # The time due of a sampling pattern
    dPatt['tS_r'] = hattau

    # The sampling grid period
    dPatt['Tg'] = Tg

    # The wanted average sampling frequency
    dPatt['fR_d'] = fdag_s

    # The expected average sampling frequency
    dPatt['fR_e'] = hatfdag_s

    # The variance parameter
    dPatt['iVar'] = sigma

    # The expected number of sampling points
    dPatt['iNS_e'] = hatKdag_s

    # The total number of grid points in a pattern
    dPatt['nGrid'] = K_g

    # =================================================================

    # =================================================================
    # Report to the console, if needed
    if bInfo == 1:
        print 'done \n'

    # =================================================================

    # =================================================================
    # Return the dictionary with a generated pack of patterns
    return (dPatt)
Example #13
0
def main(dSysSettings, dEvalData, dPattEval, dPatt):

    # =================================================================
    # Check if the frequency stability evaluation should be performed
    # If not, return form the function

    (bEvalOn, _) = _check_settings(dSysSettings)
    if not bEvalOn:
        return (dEvalData, dPattEval, dSysSettings)

    # =================================================================

    # =================================================================
    # Report progress

    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Report to the console, if needed
    if bInfo == 1:

        # Print to the console
        stdout.write('EVAL: Frequency stability...                   ')

    # =================================================================

    # =================================================================
    # Check if the dictionary with internal data of frequency stability
    # evaluation must be created
    if not 'dFreqData' in dEvalData:

        # Dictionary does not exist, must be created:

        # -------------------------------------------------------------
        # Get the number of patterns in a pack (size of a pattern pack)

        dMemory = dSysSettings['dMemory']   # Memory configuration dictionary

        nPattPack = dMemory['nPattPack']    # The number of patterns in a pack

        # -----------------------------------------------------------------

        # Create the the dictionary with internal data of frequency stability
        # evaluation
        dFreqData = {}

        # Reset the vector with frequency errors of patterns from a last pack
        # of patterns
        dFreqData['vFreqErr'] = np.nan*np.ones(nPattPack)

        # Reset the vector with convergence of average frequency error
        dFreqData['vFreqErrConv'] = np.nan*np.ones(nPattPack)

        # Reset the vector with convergence of the ratio of patterns which
        # do not violate frequency stability requirement
        dFreqData['vFreqRConv'] = np.nan*np.ones(nPattPack)

        # Store the dictionary with the dictionary with internal data of
        # frequency stability evaluation in the dictionary with internal data
        # of evaluation functions and counters
        dEvalData['dFreqData'] = dFreqData

    # =================================================================

    # =================================================================
    # Get the needed data

    mPatternsGrid = dPatt['mPatternsGrid']  # The matrix with sampling patterns

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dFreqData = dEvalData['dFreqData']    # The dictionary with internal data
                                          # of frequency stability evaluation

    vFreqErr = dFreqData['vFreqErr']         # Vector with frequency errors of
                                             # patterns from a last pack of
                                             # patterns

    vFreqErrConv = dFreqData['vFreqErrConv']    # Vector with convergence of
                                                # the average frequency error

    vFreqRConv = dFreqData['vFreqRConv']        # Vector with convergence of
                                                # the ratio of patterns which
                                                # do not violate frequency
                                                # stability requirement

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    iNS_e = dPatt['iNS_e']              # Wanted (expected) number of sampling
                                        # points in a pattern

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    inxPP = dSysSettings['inxPP']       # The current index of pattern pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dMemory = dSysSettings['dMemory']   # Memory configuration dictionary

    nPattPack = dMemory['nPattPack']    # The number of patterns in a pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -
    # =================================================================

    # =================================================================
    # Compute the frequency errors

    # Loop over all sampling patterns
    for inxP in range(0, nPattPack):

        # Get the current sampling pattern
        vPattern = mPatternsGrid[inxP, :]

        # Calculate the number of samples in the current pattern
        nN_ = int(sum(vPattern > 0))

        # Compute global index of the current pattern
        inxGP = inxPP*nPattPack + inxP

        # - - - - - - - - - - - - - - - - - - - - - - - - -
        # Calculate the error
        iEf = ((iNS_e - nN_)/iNS_e)**2

        # Store the frequency error of the current pattern
        vFreqErr[inxP] = iEf

        # - - - - - - - - - - - - - - - - - - - - - - - - -
        # Calculate the current average frequency error
        if inxGP == 0:
            iEfAvg = iEf
        elif inxP == 0:
            iEfAvg = \
                ((inxGP)/(inxGP+1))*vFreqErrConv[nPattPack-1] + \
                (1/(inxGP+1))*iEf
        else:
            iEfAvg = \
                ((inxGP)/(inxGP+1))*vFreqErrConv[inxP-1] + \
                (1/(inxGP+1))*iEf

        # Store the current average frequency error
        vFreqErrConv[inxP] = iEfAvg

        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Compute the ratio of patterns which do not
        # violate frequency stability requirement
        if iEf == 0:
            iC = 1
        else:
            iC = 0

        if inxGP == 0:
            iRPatt = iC
        elif inxP == 0:
            iRPatt = \
                (inxGP/(inxGP+1))*vFreqRConv[nPattPack-1] + \
                (1/(inxGP+1))*iC
        else:
            iRPatt = \
                (inxGP/(inxGP+1))*vFreqRConv[inxP-1] + \
                (1/(inxGP+1))*iC

        # Store the current ratio of patterns which do not
        # violate frequency stability requirement
        vFreqRConv[inxP] = iRPatt

    # =================================================================

    # =================================================================
    # Store the data:

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------
    # The dictionary with internal data of frequency stability evaluation
    # (dFreqData):

    # Store the vector with frequency errors of patterns from
    # a last pack of patterns
    dFreqData['vFreqErr'] = vFreqErr

    # Store the vector with convergence of the average frequency error
    dFreqData['vFreqErrConv'] = vFreqErrConv

    # Store the vector with convergence of the ratio of patterns which do not
    # violate frequency requirement
    dFreqData['vFreqRConv'] = vFreqRConv

    # -----------------------------------------------------------------

    # Store the dictionary with internal data of frequency stability evaluation
    dEvalData['dFreqData'] = dFreqData

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------
    # The dictionary with patterns evaluation results (dPattEval):

    # Store the current average frequency error value
    dPattEval['iFreqErr'] = vFreqErrConv[nPattPack-1]

    # Store the current ratio of patterns which do not violate
    # frequency stability requirement
    dPattEval['iFreqRPatt'] = vFreqRConv[nPattPack-1]

    # Store the vector with convergence of the average frequency error
    dPattEval['vFreqErrConv'] = vFreqErrConv

    # Store the vector with convergence of the ratio of patterns which do not
    # violate frequency requirement
    dPattEval['vFreqRConv'] = vFreqRConv

    # =================================================================

    # =================================================================
    # Report to the console, if needed
    if bInfo == 1:
        print('done. (error: %.4f   ratio of correct patterns: %.4f)') \
            % (vFreqErrConv[nPattPack-1], vFreqRConv[nPattPack-1])

    # =================================================================

    # =================================================================
    # Return
    return (dEvalData, dPattEval, dSysSettings)
def main(dSysSettings, dEvalData, dPattEval, dPatt):

    # =================================================================
    # Check if the counter of unique patterns  should be run

    # Check if the user settings for the unique patterns counter exist
    (bCntrOn) = _check_settings(dSysSettings)
    if not bCntrOn:
        return (dEvalData, dPattEval, dSysSettings)

    # =================================================================

    # =================================================================
    # Report progress

    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Report to the console, if needed
    if bInfo == 1:

        # Print to the console
        stdout.write('CNTR: Unique patterns (total) counter...  ')

    # =================================================================

    # =================================================================
    # Check if the dictionary with unique patterns counter data must be
    # created
    if not 'dUniqTotData' in dEvalData:

        # Dictionary does not exist, must be created:

        # -----------------------------------------------------------------
        # Get the number of patterns in a pack (size of a pattern pack)

        dMemory = dSysSettings['dMemory']    # Memory configuration dictionary

        nMaxPacks = dMemory['nMaxPacks']     # The maximum number of packs
                                             # with patterns

        # -----------------------------------------------------------------

        # Create the dictionary with unique patterns counter data
        dUniqTotData = {}

        # Reset the vector with the number of unique patterns
        dUniqTotData['vUniqPattTot'] = np.nan*np.zeros(nMaxPacks)

        # Reset the index of the file with unique patterns
        dUniqTotData['iFil'] = 0

        # Reset the number of unique patterns
        dUniqTotData['nUniqueTot'] = 0

        # - - - - - - - - - - - - - - - - - - - - - - - - -
        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Store the dictionary with unique patterns counter data in
        # the dictionary with internal data of evaluation functions
        # and counters
        dEvalData['dUniqTotData'] = dUniqTotData

    # =================================================================

    # =================================================================
    # Check if the directory for patterns storing was created

    # =================================================================
    # Get the parameters of patterns storing
    (strUniqPattsDirName, dSysSettings) = get_storing_settings(dSysSettings)

    # =================================================================

    # =================================================================
    # Get the needed data

    mPatternsGrid = dPatt['mPatternsGrid']  # The matrix with sampling patterns

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dMemory = dSysSettings['dMemory']    # Memory configuration dictionary

    nPattPack = dMemory['nPattPack']     # The number of patterns in a pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Get the directory with unique patterns (total) counter data
    dUniqTotData = dEvalData['dUniqTotData']

    # Get the vector with the number of unique patterns (total)
    vUniqPattTot = dUniqTotData['vUniqPattTot']

    # Get the index of the file with unique patterns
    iFil = dUniqTotData['iFil']

    # Get the number of unique patterns
    nUnique = dUniqTotData['nUniqueTot']

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # System settings:
    inxPS = dSysSettings['inxPS']  # Current index of patterns settings
                                   # (patterns type)
    # Get the index of patterns pack
    inxPP = dSysSettings['inxPP']

    # =================================================================

    # =================================================================
    # Compute the number of unique patterns

    # Remove the non unique patterns
    (mPattUniqPack, _) = _uniq_rows(mPatternsGrid)

    # Calculate the number of patterns left in the current new pack with
    # patterns
    nPattUniqPack = mPattUniqPack.shape[0]

    # Reset the auxiliary index for file with unique patterns
    iFil_ = 1

    # ===========================================================
    # The loop over all previous files with unique patterns starts here
    #
    # The loop ends if all the previous files were processed, or the number of
    # unique patterns in the current pack drops to 0

    # Reset the progress service
    strSpaceTab = '                                          '
    dProg = _loop_progress.reset(2, 2, strSpaceTab, iFil)

    nProgLines = 0  # Reset the number of lines printed by the progress
                    # function

    # Loop starts here
    while (iFil_ <= iFil) and nPattUniqPack > 0:

        # -------------------------------------------------------
        # Read the current file with patterns
        #

        # Construct the current name of the file with unique patterns
        strPattFileName = '%s/patterns_uniqueTot%d_%d.dat' % \
                          (strUniqPattsDirName, inxPS, iFil_)

        # Read the current file with unique patterns
        patts_file = open(strPattFileName, 'rb')
        mPattUniqFile = cPickle.load(patts_file)
        patts_file.close()

        # Calculate the number of unique patterns in the file
        nPattUniqFile = mPattUniqFile.shape[0]

        # -------------------------------------------------------

        # -------------------------------------------------------
        # Remove form the currently generated pack of patterns these
        # patterns, which are already in the currently processed file
        # with unique patterns

        # Construct a combined package
        mPattComb = _concatenate_arrays(mPattUniqFile, mPattUniqPack)

        # Find the unique patterns
        (_, vInxUniqComb_) = _uniq_rows(mPattComb)

        # Get the indices of unique patterns in the currently generated
        # pack of patterns
        vInxUniq = vInxUniqComb_[vInxUniqComb_ >= nPattUniqFile]

        # Calculate the new number of unique patterns in the current pack
        nPattUniqPackNew = vInxUniq.shape[0]

        # -------------------------------------------------------

        # Update the matrix with current unique patterns from the pack
        #
        if nPattUniqPackNew < nPattUniqPack:
            mPattUniqPack = mPattComb[vInxUniq, :]
            nPattUniqPack = nPattUniqPackNew

        # -------------------------------------------------------

        # Report progress
        (dProg, nProgLines) = _loop_progress.service(dProg, iFil_, iFil)

        # Move index of the current file forward
        iFil_ = iFil_ + 1

    # Report progress 100% progress
    dProg = _loop_progress.service(dProg, iFil, iFil)

    # ===========================================================

    # Update the total number of unique patterns
    nUnique = nUnique + nPattUniqPack

    # Store the number of unique patterns
    vUniqPattTot[inxPP] = nUnique

    # =================================================================

    # =================================================================
    # Store the found unique patterns in the files, if there are any
    if inxPP == 0:

        #
        # This set of code runs for the first pack of patterns ONLY (!)
        #

        # Set the index of unique files
        iFil = 1

        # Construct the current name of the file with unique patterns
        strPattFileName = '%s/patterns_uniqueTot%d_%d.dat'  \
            % (strUniqPattsDirName, inxPS, iFil)

        # Store all the unique patterns
        patts_file = open(strPattFileName, 'wb')
        cPickle.dump(mPattUniqPack, patts_file)
        patts_file.close()

        #
        # -------------------------------------------------------------
        #

    else:

        # Are there any unique patterns left?
        if nPattUniqPack > 0:

            # ---------------------------------------------------------

            #
            # First part of saving patterns: fill up the last file with
            # patterns

            # Calculate the number of patterns which could be stored in the
            # last file with uniqe patterns
            # (last file: the file with the highest number)
            nLeftSpaceFile = nPattPack - nPattUniqFile

            # Calculate the number of patterns to be put into the file
            nPatts2File = min(nLeftSpaceFile, nPattUniqPack)

            # Fill up the file, if something should be put to a file
            if nPatts2File > 0:

                # Construct a matrix to be put into file
                mPatt2File = \
                    _concatenate_arrays(mPattUniqFile,
                                        mPattUniqPack[range(nPatts2File), :])

                # Store the unique patterns to be put into the file
                patts_file = open(strPattFileName, 'wb')
                cPickle.dump(mPatt2File, patts_file)
                patts_file.close()

            # Calculate the number of unique patterns in a pack left
            nPattUniqPackLeft = nPattUniqPack - nPatts2File

            # ---------------------------------------------------------

            #
            # Second part of saving patterns: Save the rest of patterns
            #
            # Are there any unique patterns left?
            if nPattUniqPackLeft > 0:

                # Move the file index forward
                iFil = iFil + 1

                # Construct the current name of the file with unique patterns
                strPattFileName = '%s/patterns_uniqueTot%d_%d.dat' % \
                    (strUniqPattsDirName, inxPS, iFil)

                # Construct a matrix to be put into file
                mPatt2File = \
                    mPattUniqPack[range(nPatts2File, nPattUniqPack), :]

                # Store the unique patterns to be put into the file
                patts_file = open(strPattFileName, 'wb')
                cPickle.dump(mPatt2File, patts_file)
                patts_file.close()

            # =========================================================

    # =================================================================
    # Store the data

    # Store the vector with the number of unique patterns
    dUniqTotData['vUniqPattTot'] = vUniqPattTot

    # The index of the file with unique patterns
    dUniqTotData['iFil'] = iFil

    # The number of unique patterns
    dUniqTotData['nUniqueTot'] = nUnique

    # -----------------------------------------------------------------

    # Store the dictionary with data for unique patterns counter in the
    # dictionary with internal data of evaluation functions and counters
    dEvalData['dUniqTotData'] = dUniqTotData

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------

    # Store the current total number of unique patterns
    # in the dictionary with patterns evaluation results (dPattEval)
    dPattEval['nUniqueTot'] = nUnique

    # Store the vector with the number of unique patterns
    # in the dictionary with patterns evaluation results (dPattEval)
    dPattEval['vUniqPattTot'] = vUniqPattTot

    # =================================================================

    # =================================================================
    # Report to the console, if needed
    if bInfo == 1:
        if nProgLines == 0:
            stdout.write('     ')

        strMessage = ('done. (the number of unique patterns (total): %d') % \
            (nUnique)
        stdout.write(strMessage)

    # =================================================================

    # =================================================================
    return (dEvalData, dPattEval, dSysSettings)
Example #15
0
def _print_mem_parameters(dSysSettings):

    # Get the verbose settings
    (bInfo, _) = _verbose.get_settings(dSysSettings)

    # If being verbose is off, return from this function
    if bInfo == 0:
        return

    # Get the needed data

    # Get the dictionary with memory configuration
    dMemory = dSysSettings["dMemory"]

    # Get the amount of RAM which will be used by the system
    iRAM = dMemory["iRAM"]

    # Get the current total memory usage
    nTotMemUsage = dMemory["nTotMemUsage"]

    # -----------------------------------------------------------------

    # Get the expected memory use of a pattern
    iRAMPatt = dMemory["iRAMPatt"]

    # Get the max size of file-unbuffered patterns pack according to memory
    # conditions
    nMaxPattsRAM = dMemory["nMaxPattsRAM"]

    # Get the max size of file-unbuffered pack with patterns specified
    # by user
    nMaxPackUser = round(dMemory["nMaxPack"])

    # Get the correct size of a patterns pack
    nPattPack = dMemory["nPattPack"]

    #  ----------------------------------------------------------------

    # Get the dictionary with computations stop criteria
    dStop = dSysSettings["dStop"]

    # Get the maximum number of patterns analyzed for one type of patterns
    nMaxPatts = dStop["nMaxPatts"]

    # Get the maximum number of patterns packs
    nMaxPacks = dMemory["nMaxPacks"]

    # =================================================================
    # Start  a message
    print ("------------------------------------------------------------\n\n")

    # -----------------------------------------------------------------

    # General memory parameters:

    # Total physical memory
    # Total memory of a system
    print ("The max amount of memory which can be used by PaTeS:  %.3f [GB]") % (iRAM / 1e9)

    # Expected total memory usage
    strMessage = "The expected maximum RAM memory usage:               "
    print ("%s %.3f [MB] (%.2f %%)") % (strMessage, (nTotMemUsage / 1e6), (nTotMemUsage / iRAM * 100))

    # -----------------------------------------------------------------

    print ("\n------------------------------------------------------------\n\n")

    # Size of a patterns pack

    #  Memory of a pattern
    strMessage = "The expected amount of memory used by one pattern:          "
    print ("%s  %.3f [kB]") % (strMessage, iRAMPatt / 1e3)

    # Max size of a pack according to memory conditions
    strMessage = "Max size of a patterns pack according to memory conditions: "
    print "%s  %.3f M (%d)" % (strMessage, (nMaxPattsRAM / 1e6), (nMaxPattsRAM))

    # Max size of a pack by user
    stdout.write("Max size of a patterns pack specified by user:"******"                ")

    if nMaxPackUser == float("inf"):  # < if it is infinite,
        # use a little bit different printing
        print ("infinite")
    else:
        print ("%.3f M (%d)") % ((nMaxPackUser / 1e6), nMaxPackUser)

    # Correct size of a pack
    strMessage = "The correct size of a patterns pack:"
    print "%s                          %.3f M (%d)" % (strMessage, (nPattPack / 1e6), nPattPack)

    # -----------------------------------------------------------------

    # =================================================================
    # Max number of patterns and max number of patterns packs

    # Max number of patterns which will be evaluated
    strMessage = "The max number of patterns which will be eval."
    print ("%s for one type:  %.3f M (%d)") % (strMessage, (nMaxPatts / 1e6), nMaxPatts)

    # Max number of patterns pack
    strMessage = "The max number of patterns packs which will be generated"
    print ("%s:     %d (%.3f k)") % (strMessage, nMaxPacks, (nMaxPacks / 1e3))

    print ("\n")

    # Expected total memory usage (repeated)
    strMessage = "The expected maximum RAM memory usage:                "
    print ("%s %.3f [MB] (%.2f %%)") % (strMessage, (nTotMemUsage / 1e6), (nTotMemUsage / iRAM * 100))

    # Print a delimiter
    stdout.write("================================================")
    stdout.write("================================================")
    stdout.write("=========\n")

    return
Example #16
0
def _compute_mem_parameters_of_evals(lPattSettings, dSysSettings):

    # =================================================================
    # Reset the memory parameters

    # Reset the number of evaluations performed on patterns
    nEvals = 0

    # Reset the minimum number of patterns in a pattern pack
    nMinPatts = 0

    # Reset the pattern pack memory coefficient
    # NOTE: This coefficient scales memory usage of evaluations vs size
    #       of the patterns pack
    iMemCoef = 0

    # Reset the constant memory usage
    # NOTE: This parameter defines memory usage of evaluations and counters
    nMemUsage = 0

    # =================================================================

    # =================================================================
    # Frequency stability evaluation

    # Get the memory parameters for frequency stability evaluation
    (bEvalFreqOn, nMinPattsFreq, iMemCoefFreq, nMemUsageFreq) = _freq_eval.memory(dSysSettings)

    # Update the number of evaluations performed on patterns
    nEvals = nEvals + bEvalFreqOn

    # Update the minimum number of patterns in a patterns pack
    nMinPatts = max([nMinPatts, nMinPattsFreq])

    # Update the pattern pack memory coefficient
    iMemCoef = iMemCoef + iMemCoefFreq

    # Update the constant memory usage
    nMemUsage = nMemUsage + nMemUsageFreq

    # =================================================================

    # =================================================================
    # Evaluation of allowed minimum distance between patterns

    # Get the memory parameters for evaluation of allowed minimum distance
    (bEvalMinOn, nMinPattsEvalMin, iMemCoefEvalMin, nMemUsageEvalMin) = _min_distance_eval.memory(dSysSettings)

    # Update the number of evaluations performed on patterns
    nEvals = nEvals + bEvalMinOn

    # Update the minimum number of patterns in a patterns pack
    nMinPatts = max([nMinPatts, nMinPattsEvalMin])

    # Update the pattern pack memory coefficient
    iMemCoef = iMemCoef + iMemCoefEvalMin

    # Update the constant memory usage
    nMemUsage = nMemUsage + nMemUsageEvalMin

    # =================================================================

    # =================================================================
    # Evaluation of allowed maximum distance between patterns

    # Get the memory parameters for evaluation of allowed maximum distance
    (bEvalMaxOn, nMinPattsEvalMax, iMemCoefEvalMax, nMemUsageEvalMax) = _max_distance_eval.memory(dSysSettings)

    # Update the number of evaluations performed on patterns
    nEvals = nEvals + bEvalMaxOn

    # Update the minimum number of patterns in a patterns pack
    nMinPatts = max([nMinPatts, nMinPattsEvalMax])

    # Update the pattern pack memory coefficient
    iMemCoef = iMemCoef + iMemCoefEvalMax

    # Update the constant memory usage
    nMemUsage = nMemUsage + nMemUsageEvalMax

    # =================================================================

    # =================================================================
    # Correct patterns counter

    # Get the memory parameters for correct patterns counter
    (bEvalCorrOn, nMinPattsCorr, iMemCoefCorr, nMemUsageCorr) = _correct_counter.memory(dSysSettings)

    # Update the number of evaluations performed on patterns
    nEvals = nEvals + bEvalCorrOn

    # Update the minimum number of patterns in a patterns pack
    nMinPatts = max([nMinPatts, nMinPattsCorr])

    # Update the pattern pack memory coefficient
    iMemCoef = iMemCoef + iMemCoefCorr

    # Update the constant memory usage
    nMemUsage = nMemUsage + nMemUsageCorr

    # =================================================================

    # =================================================================
    # PDF evaluation

    # Get the memory parameters for PDF evaluation
    (bEvalPDFOn, nMinPattsPDF, iMemCoefPDF, nMemUsagePDF) = _pdf_eval.memory(lPattSettings, dSysSettings)

    # Update the number of evaluations performed on patterns
    nEvals = nEvals + bEvalPDFOn

    # Update the minimum number of patterns in a patterns pack
    nMinPatts = max([nMinPatts, nMinPattsPDF])

    # Update the pattern pack memory coefficient
    iMemCoef = iMemCoef + iMemCoefPDF

    # Update the constant memory usage
    nMemUsage = nMemUsage + nMemUsagePDF

    # =================================================================

    # =================================================================
    # PDF evaluation (total)

    # Get the memory parameters for PDF evaluation (total)
    (bEvalPDFTotOn, nMinPattsPDFTot, iMemCoefPDFTot, nMemUsagePDFTot) = _pdf_total_eval.memory(
        lPattSettings, dSysSettings
    )

    # Update the number of evaluations performed on patterns
    nEvals = nEvals + bEvalPDFTotOn

    # Update the minimum number of patterns in a patterns pack
    nMinPatts = max([nMinPatts, nMinPattsPDFTot])

    # Update the pattern pack memory coefficient
    iMemCoef = iMemCoef + iMemCoefPDFTot

    # Update the constant memory usage
    nMemUsage = nMemUsage + nMemUsagePDFTot

    # =================================================================

    # =================================================================
    # Unique patterns counter

    # Get the memory parameters for correct patterns counter
    (bEvalUniqueOn, nMinPattsUnique, iMemCoefUnique, nMemUsageUnique) = _uniq_counter.memory(dSysSettings)

    # Update the number of evaluations performed on patterns
    nEvals = nEvals + bEvalUniqueOn

    # Update the minimum number of patterns in a patterns pack
    nMinPatts = max([nMinPatts, nMinPattsUnique])

    # Update the pattern pack memory coefficient
    iMemCoef = iMemCoef + iMemCoefUnique

    # Update the constant memory usage
    nMemUsage = nMemUsage + nMemUsageUnique

    # =================================================================

    # =================================================================
    # Unique patterns counter (total)

    # Get the memory parameters for correct patterns counter
    (bEvalUniqueTotOn, nMinPattsUniqueTot, iMemCoefUniqueTot, nMemUsageUniqueTot) = _uniq_total_counter.memory(
        dSysSettings
    )

    # Update the number of evaluations performed on patterns
    nEvals = nEvals + bEvalUniqueTotOn

    # Update the minimum number of patterns in a patterns pack
    nMinPatts = max([nMinPatts, nMinPattsUniqueTot])

    # Update the pattern pack memory coefficient
    iMemCoef = iMemCoef + iMemCoefUniqueTot

    # Update the constant memory usage
    nMemUsage = nMemUsage + nMemUsageUniqueTot

    # =================================================================

    # =================================================================
    # Store all the computed data in the system settings

    # Get the dictionary with memory configuration
    dMemory = dSysSettings["dMemory"]

    # Store the number of evaluations performed on patterns
    dMemory["nEvals"] = nEvals

    # Store the minimum number of patterns in a patterns pack
    dMemory["nMinPatts"] = nMinPatts

    # Store the pattern pack memory coefficient
    dMemory["iMemCoef"] = iMemCoef

    # Store the constant memory usage
    dMemory["nMemUsage"] = nMemUsage

    # Store the dictionary with memory configuration in the dictionary with
    # main system settings
    dSysSettings["dMemory"] = dMemory

    # =================================================================

    # =================================================================
    # Report to the screen (if needed)

    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)

    # Print the info about the evaluations and counters which are on or off
    if bInfo == 1:

        # --------------------------------------------------------
        # Get the needed data

        # System settings:

        # Current index of patterns settings (patterns type)
        inxPS = dSysSettings["inxPS"]

        # The total number of different patterns settings
        nPattSet = dSysSettings["nPattSet"]

        # --------------------------------------------------------

        # List with on/off message
        lMes = ["off", "on"]

        # Print header
        stdout.write("\n")
        stdout.write("================================================")
        stdout.write("================================================")
        stdout.write("=========\n")
        stdout.write("================================================")
        stdout.write("================================================")
        stdout.write("=========\n")

        print "Patterns settings index: %d/%d.\n" % (inxPS, nPattSet)

        print "Evaluations on patterns: \n"

        # Print evaluations and counters info:

        # Frequency
        print ("Frequency eval:                       %s ") % lMes[bEvalFreqOn]

        # Min distance
        print ("Minimum distance eval:                %s ") % lMes[bEvalMinOn]

        # Max distance
        print ("Maximum distance eval:                %s ") % lMes[bEvalMaxOn]

        # PDF
        print ("PDF eval:                             %s ") % lMes[bEvalPDFOn]

        # PDF (total)
        print ("PDF eval (total):                     %s ") % lMes[bEvalPDFTotOn]

        # Correct patterns
        print ("Correct patterns counter:             %s ") % lMes[bEvalCorrOn]

        # Unique patterns
        print ("Unique patterns counter:              %s ") % lMes[bEvalUniqueOn]

        # Unique patterns (total)
        print ("Unique patterns counter (total):      %s ") % lMes[bEvalUniqueTotOn]

        strMessage = "\nThe number of evaluations which will be performed"
        print ("%s on patterns: %d\n") % (strMessage, nEvals)

    # =================================================================

    return dSysSettings