def main(dSysSettings, dEvalData, dPattEval, dPatt):

    # =================================================================
    # Check if the counter of unique patterns  should be run

    # Check if the user settings for the unique patterns counter exist
    (bCntrOn, dUniqueCntr) = _check_settings(dSysSettings)
    if not bCntrOn:
        return (dEvalData, dPattEval, dSysSettings)

    # =================================================================

    # =================================================================
    # Report progress

    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Report to the console, if needed
    if bInfo == 1:

        # Print to the console
        stdout.write('CNTR: Unique patterns counter...          ')

    # =================================================================

    # =================================================================
    # Check if the dictionary with internal data of the unique patterns
    # counter must be created
    if not 'dUniqData' in dEvalData:

        # Dictionary does not exist, must be created:

        # -----------------------------------------------------------------
        # Get the number of patterns in a pack (size of a pattern pack)

        dMemory = dSysSettings['dMemory']    # Memory configuration dictionary

        nMaxPacks = dMemory['nMaxPacks']     # The maximum number of packs with
                                             # patterns

        # -----------------------------------------------------------------

        # Create the dictionary with internal data of the unique patterns
        # counter
        dUniqData = {}

        # Reset the vector with the number of unique patterns
        dUniqData['vUniqPatt'] = np.nan*np.zeros(nMaxPacks)

        # Reset the index of the file with unique patterns
        dUniqData['iFil'] = 0

        # Reset the number of unique patterns
        dUniqData['nUnique'] = 0

        # - - - - - - - - - - - - - - - - - - - - - - - - -
        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Store the dictionary with internal data of the unique patterns
        # counter in the the dictionary with internal data of evaluation
        # functions and counters
        dEvalData['dUniqData'] = dUniqData

    # =================================================================

    # =================================================================
    # Get the settings for max allowed errors

    # ---------------------------------------------------
    #  Frequency stability data:

    # Is frequency evaluation on and
    # there is user setting for max frequency error?
    if 'dFreqData' in dEvalData and 'iUni_iMaxErrFreq' in dUniqueCntr:
        iUni_iMaxErrFreq = dUniqueCntr['iUni_iMaxErrFreq']
    else:
        iUni_iMaxErrFreq = float(np.inf)
    # ---------------------------------------------------

    # ---------------------------------------------------
    #  Minimum distance data

    # Is minimum distance evaluation on and
    # there is user setting for max minimum distance error?
    if 'dMinDData' in dEvalData and 'iUni_iMaxErrMinD' in dUniqueCntr:
        iUni_iMaxErrMinD = dUniqueCntr['iUni_iMaxErrMinD']
    else:
        iUni_iMaxErrMinD = float(np.inf)
    # ---------------------------------------------------

    # ---------------------------------------------------
    #  Maximum distance data

    # Is maximum distance evaluation on and
    # there is user setting for max maximum distance error?
    if 'dMaxDData' in dEvalData and 'iUni_iMaxErrMaxD' in dUniqueCntr:
        iUni_iMaxErrMaxD = dUniqueCntr['iUni_iMaxErrMaxD']
    else:
        iUni_iMaxErrMaxD = float(np.inf)
    # ---------------------------------------------------

    # =================================================================

    # =================================================================
    # Get the parameters of patterns storing
    (strUniqPattsDirName, dSysSettings) = get_storing_settings(dSysSettings)

    # =================================================================

    # =================================================================
    # Get the needed data

    mPatternsGrid = dPatt['mPatternsGrid']  # The matrix with sampling patterns

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dMemory = dSysSettings['dMemory']    # Memory configuration dictionary

    nPattPack = dMemory['nPattPack']     # The number of patterns in a pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Get the dictionary with internal data of the unique patterns counter
    dUniqData = dEvalData['dUniqData']

    # Get the vector with the number of unique patterns
    vUniqPatt = dUniqData['vUniqPatt']

    # Get the index of the file with unique patterns
    iFil = dUniqData['iFil']

    # Get the number of unique patterns
    nUnique = dUniqData['nUnique']

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # System settings:
    inxPS = dSysSettings['inxPS']       # Current index of patterns settings
                                        # (patterns type)
    # Get the index of patterns pack
    inxPP = dSysSettings['inxPP']

    # - - - - - - - - - - - - - - - - - - - - - - - - -
    # Get the data needed for maximum errors

    # Get the vector with frequency errors
    # (if needed)
    if not np.isinf(iUni_iMaxErrFreq):

        dFreqData = dEvalData['dFreqData']   # The dictionary with frequency
                                             # stability evaluation data

        vFreqErr = dFreqData['vFreqErr']     # Vector with frequency errors
                                             # for the current patterns pack

    # Get the vector with minimum distance errors
    # (if needed)
    if not np.isinf(iUni_iMaxErrMinD):

        dMinDData = dEvalData['dMinDData']   # Dictionary with data for
                                             # minimum distance evaluation

        vMinDErr = dMinDData['vMinDErr']    # Vector with minimum distances
                                            # errors for the current
                                            # patterns pack

    # Get the vector with maximum distance errors
    # (if needed)
    if not np.isinf(iUni_iMaxErrMaxD):

        # Get the vector with maximum distance errors
        dMaxDData = dEvalData['dMaxDData']  # Dictionary with data for
                                            # maximum distance evaluation

        vMaxDErr = dMaxDData['vMaxDErr']    # Vector with maximum distance
                                            # errors for the current patterns
                                            # pack
    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # =================================================================

    # =================================================================
    # Get only patterns without errors

    # Remove these patterns from 'mPatternsGrid' which do not fulfill
    # requirement for maximum errors
    # vCorr - vector with indices of correct patterns

    # Remove the incorrect patterns, (if needed)
    if not(np.isinf(iUni_iMaxErrFreq) and
           np.isinf(iUni_iMaxErrMinD) and
           np.isinf(iUni_iMaxErrMaxD)):

        # Reset the vector with indices of correct patterns
        vCorr = np.arange(nPattPack, dtype=int)

        # Loop over all sampling patterns
        for inxP in range(0, nPattPack):

            # Reset the correct flag to correct
            bCorr = 1

            # - - - - - - - - - - - - - - - - - - - - - - - - -
            # Check if the current pattern fulfills requirements for max errors
            if not np.isinf(iUni_iMaxErrFreq):
                iEf = vFreqErr[inxP]        # Frequency error
                if iEf > iUni_iMaxErrFreq:
                    bCorr = 0               # Pattern do not fulfill
                                            # the requirements

            if not np.isinf(iUni_iMaxErrMinD):
                iMinDE = vMinDErr[inxP]     # Minimum distance error
                if iMinDE > iUni_iMaxErrMinD:
                    bCorr = 0               # Pattern do not fulfill the
                                            # requirements

            if not np.isinf(iUni_iMaxErrMaxD):
                iMaxDE = vMaxDErr[inxP]     # Maximum distance error
                if iMaxDE > iUni_iMaxErrMaxD:
                    bCorr = 0               # Pattern do not fulfill the
                                            # requirements

            # Mark the as incorrect, if the pattern was found incorrect
            if bCorr == 0:
                vCorr[inxP] = -1

        # Remove the unwanted patterns
        vCorr = vCorr[vCorr != -1]
        mPatternsGrid = mPatternsGrid[vCorr, :]

    # =================================================================

    # =================================================================
    # Compute the number of unique patterns

    # Remove the non unique patterns
    (mPattUniqPack, _) = _uniq_rows(mPatternsGrid)

    # Calculate the number of patterns left in the current new pack with
    # patterns
    nPattUniqPack = mPattUniqPack.shape[0]

    # Reset the auxiliary index for file with unique patterns
    iFil_ = 1

    # ===========================================================
    # The loop over all previous files with unique patterns starts
    # here
    #
    # The loop ends if all the previous files were processed, or
    # the number of unique patterns in the current pack drops to 0

    # Reset the progress service
    strSpaceTab = '                                          '
    dProg = _loop_progress.reset(2, 2, strSpaceTab, iFil)

    nProgLines = 0              # Reset the number of lines printed by
                                # the progress function

    # Loop starts here!!!
    while (iFil_ <= iFil) and nPattUniqPack > 0:

        # -------------------------------------------------------
        # Read the current file with patterns
        #

        # Construct the current name of the file with unique patterns
        strPattFileName = '%s/patterns_unique%d_%d.dat' % \
            (strUniqPattsDirName, inxPS, iFil_)

        # Read the current file with unique patterns
        patts_file = open(strPattFileName, 'rb')
        mPattUniqFile = cPickle.load(patts_file)
        patts_file.close()

        # Calculate the number of unique patterns in the file
        nPattUniqFile = mPattUniqFile.shape[0]

        # -------------------------------------------------------

        # -------------------------------------------------------
        # Remove form the currently generated pack of patterns these
        # patterns, which are already in the currently processed file
        # with unique patterns

        # Construct a combined package
        #
        # A pack of unique patterns from a file are on the top
        # A new pack with unique patterns are on the bottom
        #
        mPattComb = _concatenate_arrays(mPattUniqFile, mPattUniqPack)

        # Find the unique patterns
        (_, vInxUniqComb_) = _uniq_rows(mPattComb)

        # Get the indices of unique patterns in the currently generated
        # pack of patterns
        # (Get indices of these unique rows, which first time occur in
        #  a new pack with patterns)
        vInxUniq = vInxUniqComb_[vInxUniqComb_ >= nPattUniqFile]

        # Calculate the new number of unique patterns in the current pack
        nPattUniqPackNew = vInxUniq.shape[0]

        # -------------------------------------------------------

        # Update the matrix with current unique patterns from the pack
        #
        if nPattUniqPackNew < nPattUniqPack:
            mPattUniqPack = mPattComb[vInxUniq, :]
            nPattUniqPack = nPattUniqPackNew

        # -------------------------------------------------------

        # Report progress
        (dProg, nProgLines) = _loop_progress.service(dProg, iFil_, iFil)

        # Move index of the current file forward
        iFil_ = iFil_ + 1

    # Report progress 100% progress
    dProg = _loop_progress.service(dProg, iFil, iFil)

    # ===========================================================

    # Update the total number of unique patterns
    nUnique = nUnique + nPattUniqPack

    # Store the number of unique patterns
    vUniqPatt[inxPP] = nUnique

    # =================================================================

    # =================================================================
    # Store the found unique patterns in the files, if there are any
    if inxPP == 0:

        #
        # This set of code runs for the first pack of patterns ONLY (!)
        #

        # Set the index of unique files
        iFil = 1

        # Construct the current name of the file with unique patterns
        strPattFileName = '%s/patterns_unique%d_%d.dat' % \
            (strUniqPattsDirName, inxPS, iFil)

        # Store all the unique patterns
        patts_file = open(strPattFileName, 'wb')
        cPickle.dump(mPattUniqPack, patts_file)
        patts_file.close()

        #
        # -------------------------------------------------------------
        #

    else:

        # Are there any unique patterns left?
        if nPattUniqPack > 0:

            # ---------------------------------------------------------

            #
            # First part of saving patterns: fill up the last file with
            # patterns

            # Calculate the number of patterns which could be stored in the
            # last file with uniqe patterns
            # (last file: the file with the highest number)
            nLeftSpaceFile = nPattPack - nPattUniqFile

            # Calculate the number of patterns to be put into the file
            nPatts2File = min(nLeftSpaceFile, nPattUniqPack)

            # Fill up the file, if something should be put to a file
            if nPatts2File > 0:

                # Construct a matrix to be put into file
                mPatt2File = \
                    _concatenate_arrays(mPattUniqFile,
                                        mPattUniqPack[range(nPatts2File), :])

                # Store the unique patterns to be put into the file
                patts_file = open(strPattFileName, 'wb')
                cPickle.dump(mPatt2File, patts_file)
                patts_file.close()

            # Calculate the number of unique patterns in a pack left
            nPattUniqPackLeft = nPattUniqPack - nPatts2File

            # ----------------------------------------------------------

            #
            # Second part of saving patterns: Save the rest of patterns
            #
            # Are there any unique patterns left?
            if nPattUniqPackLeft > 0:

                # Move the file index forward
                iFil = iFil + 1

                # Construct the current name of the file with unique patterns
                strPattFileName = '%s/patterns_unique%d_%d.dat' \
                    % (strUniqPattsDirName, inxPS, iFil)

                # Construct a matrix to be put into file
                mPatt2File = \
                    mPattUniqPack[range(nPatts2File, nPattUniqPack), :]

                # Store the unique patterns to be put into the file
                patts_file = open(strPattFileName, 'wb')
                cPickle.dump(mPatt2File, patts_file)
                patts_file.close()

            # =========================================================

    # =================================================================
    # Store the data

    # Store the vector with the number of unique patterns
    dUniqData['vUniqPatt'] = vUniqPatt

    # The index of the file with unique patterns
    dUniqData['iFil'] = iFil

    # The number of unique patterns
    dUniqData['nUnique'] = nUnique

    # -----------------------------------------------------------------

    # Store the dictionary with internal data of the unique patterns counter
    # in the dictionary with internal data of evaluation functions and
    # counters
    dEvalData['dUniqData'] = dUniqData

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------

    # Store the current total number of unique patterns
    # in the dictionary with patterns evaluation results (dPattEval)
    dPattEval['nUnique'] = nUnique

    # Store the vector with the number of unique patterns
    # in the dictionary with patterns evaluation results (dPattEval)
    dPattEval['vUniqPatt'] = vUniqPatt

    # =================================================================

    # =================================================================
    # Report to the console, if needed
    if bInfo == 1:
        if nProgLines == 0:
            stdout.write('     ')

        strMessage1 = ('done. The number of unique patterns:')

        print ('%s %d') % (strMessage1, nUnique)

    # =================================================================

    # =================================================================
    return (dEvalData, dPattEval, dSysSettings)
def main(dSysSettings, dEvalData, dPattEval, dPatt):

    # =================================================================
    # Check if the counter of unique patterns  should be run

    # Check if the user settings for the unique patterns counter exist
    (bCntrOn) = _check_settings(dSysSettings)
    if not bCntrOn:
        return (dEvalData, dPattEval, dSysSettings)

    # =================================================================

    # =================================================================
    # Report progress

    # Get the verbose settings
    (bInfo, dSysSettings) = _verbose.get_settings(dSysSettings)

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Report to the console, if needed
    if bInfo == 1:

        # Print to the console
        stdout.write('CNTR: Unique patterns (total) counter...  ')

    # =================================================================

    # =================================================================
    # Check if the dictionary with unique patterns counter data must be
    # created
    if not 'dUniqTotData' in dEvalData:

        # Dictionary does not exist, must be created:

        # -----------------------------------------------------------------
        # Get the number of patterns in a pack (size of a pattern pack)

        dMemory = dSysSettings['dMemory']    # Memory configuration dictionary

        nMaxPacks = dMemory['nMaxPacks']     # The maximum number of packs
                                             # with patterns

        # -----------------------------------------------------------------

        # Create the dictionary with unique patterns counter data
        dUniqTotData = {}

        # Reset the vector with the number of unique patterns
        dUniqTotData['vUniqPattTot'] = np.nan*np.zeros(nMaxPacks)

        # Reset the index of the file with unique patterns
        dUniqTotData['iFil'] = 0

        # Reset the number of unique patterns
        dUniqTotData['nUniqueTot'] = 0

        # - - - - - - - - - - - - - - - - - - - - - - - - -
        # - - - - - - - - - - - - - - - - - - - - - - - - -

        # Store the dictionary with unique patterns counter data in
        # the dictionary with internal data of evaluation functions
        # and counters
        dEvalData['dUniqTotData'] = dUniqTotData

    # =================================================================

    # =================================================================
    # Check if the directory for patterns storing was created

    # =================================================================
    # Get the parameters of patterns storing
    (strUniqPattsDirName, dSysSettings) = get_storing_settings(dSysSettings)

    # =================================================================

    # =================================================================
    # Get the needed data

    mPatternsGrid = dPatt['mPatternsGrid']  # The matrix with sampling patterns

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    dMemory = dSysSettings['dMemory']    # Memory configuration dictionary

    nPattPack = dMemory['nPattPack']     # The number of patterns in a pack

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # Get the directory with unique patterns (total) counter data
    dUniqTotData = dEvalData['dUniqTotData']

    # Get the vector with the number of unique patterns (total)
    vUniqPattTot = dUniqTotData['vUniqPattTot']

    # Get the index of the file with unique patterns
    iFil = dUniqTotData['iFil']

    # Get the number of unique patterns
    nUnique = dUniqTotData['nUniqueTot']

    # - - - - - - - - - - - - - - - - - - - - - - - - -

    # System settings:
    inxPS = dSysSettings['inxPS']  # Current index of patterns settings
                                   # (patterns type)
    # Get the index of patterns pack
    inxPP = dSysSettings['inxPP']

    # =================================================================

    # =================================================================
    # Compute the number of unique patterns

    # Remove the non unique patterns
    (mPattUniqPack, _) = _uniq_rows(mPatternsGrid)

    # Calculate the number of patterns left in the current new pack with
    # patterns
    nPattUniqPack = mPattUniqPack.shape[0]

    # Reset the auxiliary index for file with unique patterns
    iFil_ = 1

    # ===========================================================
    # The loop over all previous files with unique patterns starts here
    #
    # The loop ends if all the previous files were processed, or the number of
    # unique patterns in the current pack drops to 0

    # Reset the progress service
    strSpaceTab = '                                          '
    dProg = _loop_progress.reset(2, 2, strSpaceTab, iFil)

    nProgLines = 0  # Reset the number of lines printed by the progress
                    # function

    # Loop starts here
    while (iFil_ <= iFil) and nPattUniqPack > 0:

        # -------------------------------------------------------
        # Read the current file with patterns
        #

        # Construct the current name of the file with unique patterns
        strPattFileName = '%s/patterns_uniqueTot%d_%d.dat' % \
                          (strUniqPattsDirName, inxPS, iFil_)

        # Read the current file with unique patterns
        patts_file = open(strPattFileName, 'rb')
        mPattUniqFile = cPickle.load(patts_file)
        patts_file.close()

        # Calculate the number of unique patterns in the file
        nPattUniqFile = mPattUniqFile.shape[0]

        # -------------------------------------------------------

        # -------------------------------------------------------
        # Remove form the currently generated pack of patterns these
        # patterns, which are already in the currently processed file
        # with unique patterns

        # Construct a combined package
        mPattComb = _concatenate_arrays(mPattUniqFile, mPattUniqPack)

        # Find the unique patterns
        (_, vInxUniqComb_) = _uniq_rows(mPattComb)

        # Get the indices of unique patterns in the currently generated
        # pack of patterns
        vInxUniq = vInxUniqComb_[vInxUniqComb_ >= nPattUniqFile]

        # Calculate the new number of unique patterns in the current pack
        nPattUniqPackNew = vInxUniq.shape[0]

        # -------------------------------------------------------

        # Update the matrix with current unique patterns from the pack
        #
        if nPattUniqPackNew < nPattUniqPack:
            mPattUniqPack = mPattComb[vInxUniq, :]
            nPattUniqPack = nPattUniqPackNew

        # -------------------------------------------------------

        # Report progress
        (dProg, nProgLines) = _loop_progress.service(dProg, iFil_, iFil)

        # Move index of the current file forward
        iFil_ = iFil_ + 1

    # Report progress 100% progress
    dProg = _loop_progress.service(dProg, iFil, iFil)

    # ===========================================================

    # Update the total number of unique patterns
    nUnique = nUnique + nPattUniqPack

    # Store the number of unique patterns
    vUniqPattTot[inxPP] = nUnique

    # =================================================================

    # =================================================================
    # Store the found unique patterns in the files, if there are any
    if inxPP == 0:

        #
        # This set of code runs for the first pack of patterns ONLY (!)
        #

        # Set the index of unique files
        iFil = 1

        # Construct the current name of the file with unique patterns
        strPattFileName = '%s/patterns_uniqueTot%d_%d.dat'  \
            % (strUniqPattsDirName, inxPS, iFil)

        # Store all the unique patterns
        patts_file = open(strPattFileName, 'wb')
        cPickle.dump(mPattUniqPack, patts_file)
        patts_file.close()

        #
        # -------------------------------------------------------------
        #

    else:

        # Are there any unique patterns left?
        if nPattUniqPack > 0:

            # ---------------------------------------------------------

            #
            # First part of saving patterns: fill up the last file with
            # patterns

            # Calculate the number of patterns which could be stored in the
            # last file with uniqe patterns
            # (last file: the file with the highest number)
            nLeftSpaceFile = nPattPack - nPattUniqFile

            # Calculate the number of patterns to be put into the file
            nPatts2File = min(nLeftSpaceFile, nPattUniqPack)

            # Fill up the file, if something should be put to a file
            if nPatts2File > 0:

                # Construct a matrix to be put into file
                mPatt2File = \
                    _concatenate_arrays(mPattUniqFile,
                                        mPattUniqPack[range(nPatts2File), :])

                # Store the unique patterns to be put into the file
                patts_file = open(strPattFileName, 'wb')
                cPickle.dump(mPatt2File, patts_file)
                patts_file.close()

            # Calculate the number of unique patterns in a pack left
            nPattUniqPackLeft = nPattUniqPack - nPatts2File

            # ---------------------------------------------------------

            #
            # Second part of saving patterns: Save the rest of patterns
            #
            # Are there any unique patterns left?
            if nPattUniqPackLeft > 0:

                # Move the file index forward
                iFil = iFil + 1

                # Construct the current name of the file with unique patterns
                strPattFileName = '%s/patterns_uniqueTot%d_%d.dat' % \
                    (strUniqPattsDirName, inxPS, iFil)

                # Construct a matrix to be put into file
                mPatt2File = \
                    mPattUniqPack[range(nPatts2File, nPattUniqPack), :]

                # Store the unique patterns to be put into the file
                patts_file = open(strPattFileName, 'wb')
                cPickle.dump(mPatt2File, patts_file)
                patts_file.close()

            # =========================================================

    # =================================================================
    # Store the data

    # Store the vector with the number of unique patterns
    dUniqTotData['vUniqPattTot'] = vUniqPattTot

    # The index of the file with unique patterns
    dUniqTotData['iFil'] = iFil

    # The number of unique patterns
    dUniqTotData['nUniqueTot'] = nUnique

    # -----------------------------------------------------------------

    # Store the dictionary with data for unique patterns counter in the
    # dictionary with internal data of evaluation functions and counters
    dEvalData['dUniqTotData'] = dUniqTotData

    # -----------------------------------------------------------------
    # -----------------------------------------------------------------

    # Store the current total number of unique patterns
    # in the dictionary with patterns evaluation results (dPattEval)
    dPattEval['nUniqueTot'] = nUnique

    # Store the vector with the number of unique patterns
    # in the dictionary with patterns evaluation results (dPattEval)
    dPattEval['vUniqPattTot'] = vUniqPattTot

    # =================================================================

    # =================================================================
    # Report to the console, if needed
    if bInfo == 1:
        if nProgLines == 0:
            stdout.write('     ')

        strMessage = ('done. (the number of unique patterns (total): %d') % \
            (nUnique)
        stdout.write(strMessage)

    # =================================================================

    # =================================================================
    return (dEvalData, dPattEval, dSysSettings)