def main(parser):
    ''' Docstring (TODO) parameters '''

    # ====== 0 ====== Initial operations
    # TODO estrare dai parametri da terminale la bool 'verbose'
    verbose = False

    # read args from console
    args = parser.parse_args()
    source_path = manage_path_argument(args.source_folder)

    base_path = os.path.dirname(os.path.dirname(source_path))
    stack_name = os.path.basename(source_path)

    # Def .txt filepath
    txt_parameters_path = os.path.join(base_path, 'parameters.txt')
    txt_results_path = os.path.join(base_path, 'GAMMA_orientation_results.txt')

    Results_filename = 'orientation_Results.npy'
    Results_filepath = os.path.join(base_path, Results_filename)

    # print to video and write in results.txt init message
    init_message = [
        ' *****************   GAMMA - Orientation analysis of 3D stack   *****************\n'
        ' Source from path : {}'.format(source_path),
        ' Base path : {}'.format(base_path),
        ' Stack : {}'.format(stack_name),
    ]

    with open(txt_results_path, 'w') as f:
        for line in init_message:
            print(line)
            f.write(line + '\n')

    # reads parameters
    parameters = extract_parameters(txt_parameters_path)

    # analysis block dimension in z-axis
    num_of_slices_P = parameters['num_of_slices_P']

    # Parameters of Acquisition System:
    res_z = parameters['res_z']
    res_xy = parameters['res_xy']
    resolution_factor = res_z / res_xy
    block_side = row_P = col_P = int(num_of_slices_P * resolution_factor)
    shape_P = np.array((row_P, col_P, num_of_slices_P)).astype(np.int32)
    """ =====================================================================================
    __________________________  -1-  OPEN STACK _______________________________________________"""

    loading_mess = list()
    loading_mess.append(
        ' ***** Start load the Stack, this may take a few minutes... ')

    # extract data (OLD METHOD)
    volume = load_tif_data(source_path)
    if len(volume.shape) == 2:
        volume = np.expand_dims(volume, axis=2)  # add the zeta axis

    # extract stack (NEW METHOD)-------------------
    # infile = InputFile(source_path) # read virtual file (shape, dtype, ecc..)
    # volume = infile.whole() # load real images in RAM
    # # move axes from (z, y, x) -> to (r, c, z)=(y, x, z) [S.R. ZetaStitcher -> Strip Analysis]
    # volume = np.moveaxis(volume, 0, -1)
    # ---------------------------------------------

    loading_mess.append(' - Volume shape : {}'.format(volume.shape))
    with open(txt_results_path, 'a') as f:
        for m in loading_mess:
            print(m)
            f.write(m + '\n')

    # calculate dimension
    shape_V = np.array(volume.shape)
    """ =====================================================================================
    ________________________  -2-   CYCLE FOR BLOCKS EXTRACTION and ANALYSIS __________________"""
    t_start = time.time()

    # create empty Result matrix
    R, shape_R = create_R(shape_V, shape_P)

    # create 3D filter
    mask = create_3D_filter(block_side, res_xy, parameters['sarc_length'])

    count = 1  # count iteration
    total_iter = np.prod(shape_R)
    print(
        '\n \n ***** Start iteration of analysis, expectd iterations : {} \n'.
        format(total_iter))

    with open(txt_results_path, 'a') as f:
        for z in range(shape_R[2]):
            for r in range(shape_R[0]):
                for c in range(shape_R[1]):
                    # initialize list of string lines
                    lines = []

                    start_coord = create_coord_by_iter(r, c, z, shape_P)
                    slice_coord = create_slice_coordinate(start_coord, shape_P)
                    if verbose: lines.append('\n \n')
                    lines.append(
                        '- iter: {} - init_coord : {} - on total: {}'.format(
                            count, start_coord, total_iter))

                    # save init info in R
                    R[r, c, z]['id_block'] = count
                    R[r, c, z]['init_coord'] = start_coord

                    # extract parallelepiped
                    parall = volume[slice_coord]

                    # check dimension (if iteration is on border of volume, add zero_pad)
                    parall = pad_dimension(parall, shape_P)

                    if np.max(parall) != 0:
                        parall = (normalize(parall)).astype(
                            np.float32)  # fft analysis work with float

                        # analysis of parallelepiped extracted
                        there_is_cell, there_is_freq, results = block_analysis(
                            parall, shape_P, parameters, block_side, mask,
                            verbose, lines)
                        # save info in R[r, c, z]
                        if there_is_cell: R[r, c, z]['cell_info'] = True
                        if there_is_freq: R[r, c, z]['freq_info'] = True

                        # save results in R
                        for key in results.keys():
                            R[r, c, z][key] = results[key]

                    else:
                        if verbose: lines.append('   block rejected')

                    for l in lines:
                        print(l)
                    count += 1

    # execution time
    (h, m, s) = seconds_to_min_sec(time.time() - t_start)
    print('\n Iterations ended successfully \n')
    """ =====================================================================================
        ________________________  -3-   RESULTS ANALYSIS   __________________________________"""

    post_proc_mess = list()

    # count results, rejected and accepted blocks
    block_with_cell = np.count_nonzero(R['cell_info'])
    block_with_peak = np.count_nonzero(R['freq_info'])
    p_rejec_cell = 100 * (1 - block_with_cell / count)
    p_rejec_freq_tot = 100 * (1 - block_with_peak / count)
    p_rejec_freq = 100 * (1 - block_with_peak / block_with_cell)

    post_proc_mess.append(
        '\n ***** End of iterations, time of execution: {0:2d}h {1:2d}m {2:2d}s \n'
        .format(int(h), int(m), int(s)))
    post_proc_mess.append('\n - Expected iterations : {}'.format(total_iter))
    post_proc_mess.append(' - Total iterations : {}'.format(count - 1))
    post_proc_mess.append(
        '\n - block with cell : {}, rejected from total: {} ({}%)'.format(
            block_with_cell, count - block_with_cell, p_rejec_cell))
    post_proc_mess.append(
        ' - block with freq. info : {}'
        '\n    rejected from total: {} ({}%)'
        '\n    rejected from block with cell: {} ({}%)'.format(
            block_with_peak, count - block_with_peak, p_rejec_freq_tot,
            block_with_cell - block_with_peak, p_rejec_freq))
    with open(txt_results_path, 'a') as f:
        for m in post_proc_mess:
            print(m)
            f.write(m + '\n')

    post_proc_mess = list()

    # threshold results on frequency validation parameter and save matrix
    mess = '\n \n *** Analysis of Results : remove block with low frequency affidability \n'
    post_proc_mess.append(mess)
    print(mess)

    # - 1 normalization of psd_ratio values
    R = parameter_normalizer(R, 'psd_ratio')
    mess = '- 1 - Normalization on \'psd_ratio\': complete'
    post_proc_mess.append(mess)
    print(mess)

    # - 2 thresholding on psd_ratio values
    R, before, after = threshold_par(R, parameters, 'psd_ratio')
    mess = '- 2 - First thresholding based on PSD Information: selected {} blocks from {}'.format(
        after, before)
    post_proc_mess.append(mess)
    print(mess)

    # - 3 outlier remotion based of orientation and psd_ratio values - NOT EXECUTED
    mess = '-*** NO Outlier Remotion based on PSD Information.'
    post_proc_mess.append(mess)
    print(mess)

    # save Result matrix
    np.save(Results_filepath, R)

    # - 4 Estimate and write local disorder inside Result Matrix
    R, shape_LD, isolated_value = estimate_local_disorder(
        R, parameters, resolution_factor)
    mess = '- 4 - Local Disorder estimated inside result Matrix, with grane (r, c, z): ({}, {}, {}) ' \
           'and isolated points setted with local_disorder = {}'\
            .format(shape_LD[0], shape_LD[1], shape_LD[2], isolated_value)
    post_proc_mess.append(mess)
    print(mess)

    # se funziona tutto, salvare solo una versione di R
    Results_filename = 'orientation_Results_after_disorder.npy'
    Results_filepath = os.path.join(base_path, Results_filename)
    np.save(Results_filepath, R)

    with open(txt_results_path, 'a') as f:
        for m in post_proc_mess:
            f.write(m + '\n')
    del post_proc_mess
    """ =====================================================================================
            ________________________  -4-   STATISTICS   __________________________________"""

    stat = statistics(R, parameters)
    result_mess = list()
    result_mess.append(
        '\n \n *** Results of statistical analysis on accepted points: \n')
    result_mess.append(' - {0} : {1:.3f} um^(-1)'.format(
        'Mean module', stat['Mean Module']))
    result_mess.append(' - {0} : {1:.3f} um'.format('Mean Period',
                                                    stat['Mean Period']))
    result_mess.append(' - {0} : {1:.3f} % '.format('Alignment',
                                                    100 * stat['Alignment']))
    result_mess.append(' - {0} : {1:.3f} % '.format(
        'XZ Dispersion (area_ratio)', 100 * stat['area_ratio']))
    result_mess.append(' - {0} : {1:.3f} % '.format(
        'XZ Dispersion (sum dev.std)', 100 * stat['sum_std']))
    result_mess.append(
        ' \n \n ***************************** END GAMMA - orientation_analysis.py ********************'
        '\n \n \n \n ')

    with open(txt_results_path, 'a') as f:
        for l in result_mess:
            print(l)
            f.write(l + '\n')
def estimate_local_disorder(R, parameters, resolution_factor):
    ''' 
    :param R: Result matrix 'R'
    :param parameters: dictionary of parameters read from parameters.txt
    :return: Result matrix 'R' with 'local_disorder' value saved inside every valid cell (with freq_info == True
    :return shape_G (dimension of grane os local disorder analysis
    :return isolated value (value to assign at isolated points'''
    """ Calculate and save inside 'local_disorder' a float value between [0, 1] if valid, or -1 if not valid (read above):
    0 : max order (all neighbour has same direction)
    1 : max disorder (neighbour with orthogonal direction)
    -1: too many neighbours is not valid (freq_info == False) -> block is isolated (not valid)

    local_disorder := module of std. deviation (3, 1) array (for every dimension (r, c, z),
    the std. dev. of peak components of neighbour)

    SubBlock (Grane of analysis) dimension for std. dev. estimation have shape = shape_G = (Ng_xy, Ng_xy, Ng_z)
    with Ng_xy and Ng_z readed from parameters.txt file.

    Condition:
    1) if Ng_xy or Ng_z < 2, function use 2.
    2) if inside a SubBlock there is less than valid peak than 'lim_on_local_dispersion_eval' parameters value, local_disorder is setted to -1 (isolated block)
       for visualization, these blocks are setted with maximum local_disorder value."""

    neighbours_lim = parameters[
        'neighbours_lim'] if parameters['neighbours_lim'] > 3 else 3

    # extract analysis subblock dimension from parameters
    Ng_z = parameters['local_disorder_z_side'] if parameters[
        'local_disorder_z_side'] > 2 else 2
    Ng_xy = parameters['local_disorder_xy_side']

    # check if value is valid
    if Ng_xy == 0:
        Ng_xy = Ng_z * resolution_factor
    elif Ng_xy < 2:
        Ng_xy = 2

    # shape of grane of analysis
    shape_G = (int(Ng_xy), int(Ng_xy), int(Ng_z))

    # iteration long each axis
    iterations = tuple(
        np.ceil(np.array(R.shape) / np.array(shape_G)).astype(np.uint32))

    max_dev = min_dev = 0

    for z in range(iterations[2]):
        for r in range(iterations[0]):
            for c in range(iterations[1]):

                # grane extraction from R
                start_coord = create_coord_by_iter(r, c, z, shape_G)
                slice_coord = create_slice_coordinate(start_coord, shape_G)
                grane = R[slice_coord]

                # takes only block with valid frequency information
                f_map = grane['freq_info']
                grane_f = grane[f_map]

                # check if grane_f have at least neighbours_lim elements (default: 3)
                if grane_f.shape[0] > neighbours_lim:

                    # estraggo le componenti dei vettori picchi
                    coord = np.vstack((grane_f['quiver_comp'][:, 0, 0],
                                       grane_f['quiver_comp'][:, 0, 1],
                                       grane_f['quiver_comp'][:, 0, 2]))

                    # calcolo somma in quadratura delle tre deviazione standard delle tre componenti
                    dev_rcz = np.std(coord, axis=1)
                    dev_tot = np.linalg.norm(dev_rcz)

                    # salvo in R
                    R[slice_coord]['local_disorder'] = dev_tot  # dev_tot

                    # update maximun and minimum value finded
                    max_dev = dev_tot if dev_tot > max_dev else max_dev
                    min_dev = dev_tot if dev_tot < min_dev else min_dev

                else:
                    R[slice_coord][
                        'local_disorder'] = -1.  # assumption that isolated quiver have no caos

    # read isolated value from parameters and normalize values inside R between 0 and 1
    isolated_value = parameters['isolated']
    R = normalize_local_disorder(R, max_dev, min_dev, isolated_value)
    return R, shape_G, isolated_value
def estimate_local_disarray(R,
                            parameters,
                            ev_index=2,
                            _verb=True,
                            _verb_deep=False):
    ''' FUNCTION DESCRIPTION______________________________________________

    - given R and:
      . according with the space resolution defined in 'parameters'
      . using only the eigenvectors with index 'ev_index'
      
      estimate the local_disarray (in the versors cluster) over the whole input R matrix.
    
      Local disarray is defined as: 100 * (1 - align),
      where local align is estimated by the module of the average vector of the local versors.

      Two different averaging are applied:
      . arithmetic average (version 0);
      . weighted average, where every vector is weighted 
        by the corresponding value of Fractional Anisotropy (version 1).

      Both versions are saved to the structured variable 'matrices_of_disarrays', 
      as:
      . matrices_of_disarrays['arithmetic'] 
      . matrices_of_disarrays['weighted']

    - PARAMETERS
      R:            results matrix          (data type: numpy structured array )
      parameters:   dictonary of parameters (data type: dict)
      ev_index:     eigenvector index       (data type: int [0, 1 or 2])
      _verb:        verbose flag            (data type: boolean)
      _verb_deep:   deep verbose flag       (data type: boolean)

    - RETURN: results array R, disarray and fractional anisotropy matrices, shape of disarray analysis grane

    '''

    # import spatial resolution
    res_xy = parameters['px_size_xy']
    res_z = parameters['px_size_z']
    resolution_factor = res_z / res_xy

    # estimate the spatial resolution of R
    block_side = int(parameters['roi_xy_pix'])
    num_of_slices_P = block_side / resolution_factor
    shape_P = np.array(
        (block_side, block_side, num_of_slices_P)).astype(np.int32)

    # print to screen
    if _verb:
        print('\n\n*** Estimate_local_disarray()')
        print('R settings:')
        print('> R.shape:       ', R.shape)
        print('> R[ev].shape:   ', R['ev'].shape)
        print('> Pixel size ratio (z / xy) = ', resolution_factor)
        print('> Number of slices selected in R for each ROI ({} x {}): {}'.
              format(block_side, block_side, num_of_slices_P))
        print(
            '> Size of parallelepiped in R:', shape_P,
            'pixel = [{0:2.2f} {0:2.2f} {1:2.2f}] um'.format(
                block_side * res_xy, num_of_slices_P * res_z))

    # import disarray space resolution
    Ng_z = parameters['local_disarray_z_side']
    Ng_xy = parameters['local_disarray_xy_side']
    neighbours_lim = parameters['neighbours_lim']

    # check the validity of the disarray space resolution in the image plane
    if Ng_xy == 0:
        Ng_xy = int(Ng_z * resolution_factor)
    elif Ng_xy < 2:
        Ng_xy = 2

    # print to screen
    if _verb:
        print('Disarray settings:')
        print('> Grane size in XY: ', Ng_xy)
        print('> Grane size in Z:  ', Ng_z)
        print('> neighbours_lim for disarray: ', neighbours_lim)

    # shape of the disarray analysis 'grane' (sub-volume)
    shape_G = (int(Ng_xy), int(Ng_xy), int(Ng_z))

    # estimate expected iterations along each axis
    iterations = tuple(
        np.ceil(np.array(R.shape) / np.array(shape_G)).astype(np.uint32))
    if _verb:
        print('\n\n> Expected iterations on each axis: ', iterations)
        print('\n\n> Expected total iterations       : ', np.prod(iterations))

    # define matrices including local disarray and local FA data
    # - local disarray: disarray of selected cluster of orientation versors
    # - local FA:       mean of the FA of the selected cluster of orientation versors
    matrices_of_disarray = dict()
    matrices_of_disarray[Mode.ARITH] = np.zeros(iterations).astype(np.float32)
    matrices_of_disarray[Mode.WEIGHT] = np.zeros(iterations).astype(np.float32)
    matrix_of_local_fa = np.zeros(iterations).astype(np.float32)

    # print to screen
    if _verb: print('\n *** Start elaboration...')

    # open colored session
    if _verb_deep: print(Bcolors.VERB)
    _i = 0
    for z in range(iterations[2]):
        for r in range(iterations[0]):
            for c in range(iterations[1]):
                if _verb_deep:
                    print(Bcolors.FAIL + ' *** DEBUGGING MODE ACTIVATED ***')
                    print('\n\n\n\n')
                    print(Bcolors.WARNING +
                          'iter: {0:3.0f} - (z, r, c): ({1}, {2} , {3})'.
                          format(_i, z, r, c) + Bcolors.VERB)
                else:
                    print(
                        'iter: {0:3.0f} - (z, r, c): ({1}, {2} , {3})'.format(
                            _i, z, r, c))

                # extract sub-volume ('grane') with size (Gy, Gx, Gz) from R matrix
                start_coord = create_coord_by_iter(r, c, z, shape_G)
                slice_coord = tuple(
                    create_slice_coordinate(start_coord, shape_G))
                grane = R[slice_coord]
                if _verb_deep:
                    print(' 0 - grane -> ', end='')
                    print(grane.shape)

                # N = Gy*Gx*Gz = n° of orientation blocks
                # (gx, gy, gz) --> (N,)
                grane_reshaped = np.reshape(grane, np.prod(grane.shape))
                if _verb_deep:
                    print(' 1 - grane_reshaped --> ', end='')
                    print(grane_reshaped.shape)

                n_valid_cells = np.count_nonzero(
                    grane_reshaped[Param.CELL_INFO])
                if _verb_deep:
                    print(' valid_cells --> ', n_valid_cells)
                    print(' valid rows  --> ', grane_reshaped[Param.CELL_INFO])
                    print(' grane_reshaped[\'cell_info\'].shape:',
                          grane_reshaped[Param.CELL_INFO].shape)

                if n_valid_cells > parameters['neighbours_lim']:

                    # (N) -> (N x 3) (select the eigenvector with index 'ev_index' from the N cells available)
                    coord = grane_reshaped[Param.EV][:, :, ev_index]
                    if _verb_deep:
                        print(' 2 - coord --> ', coord.shape)
                        print(coord)

                    # extract fractional anisotropy
                    fa = grane_reshaped[Param.FA]

                    # print lin.norm and FA components of every versors (iv = index_of_versor)
                    if _verb_deep:
                        for iv in range(coord.shape[0]):
                            print(iv, ':', coord[iv, :], ' --> norm: ',
                                  np.linalg.norm(coord[iv, :]), ' --> FA:   ',
                                  fa[iv])

                    # select only versors and FAs estimated from valid cells:
                    valid_coords = coord[grane_reshaped[Param.CELL_INFO]]
                    valid_fa = fa[grane_reshaped[Param.CELL_INFO]]

                    # print to screen
                    if _verb_deep:
                        print(' valid coords - ', valid_coords.shape, ' :')
                        print(valid_coords)
                        print(' valid FAs - ', valid_fa.shape, ' :')
                        print(valid_fa)

                    # order valid versors by their FA ('-fa' is applied for descending order)
                    ord_coords = valid_coords[np.argsort(-valid_fa)]
                    ord_fa = valid_fa[np.argsort(-valid_fa)]

                    # take first versor (with highest FA) for moving all versors in the same half-space
                    v1 = ord_coords[0, :]

                    # move all versors in the same congruent direction
                    # (by checking the sign of their dot product between the first one, v1)
                    if _verb_deep:
                        print(
                            'Check if versors have congruent direction (i.e. lie in the same half-space)'
                        )

                    for iv in range(1, ord_coords.shape[0]):

                        scalar = np.dot(v1, ord_coords[iv])

                        if scalar < 0:
                            # change direction of the i-th versor
                            if _verb_deep:
                                print(ord_coords[iv], ' --->>',
                                      -ord_coords[iv])
                            ord_coords[iv] = -ord_coords[iv]

                    # print definitive lin.norm and FA components of each versor
                    if _verb_deep:
                        print(
                            'Definitive versor components in the same half-space:'
                        )
                        for iv in range(ord_coords.shape[0]):
                            print(iv, ':', ord_coords[iv, :], ' --> norm: ',
                                  np.linalg.norm(ord_coords[iv, :]),
                                  ' --> FA:   ', ord_fa[iv])

                    if _verb_deep:
                        print('np.average(ord_coords): \n',
                              np.average(ord_coords, axis=0))
                        print('np.average(ord_coords, weight = fa): \n',
                              np.average(ord_coords, axis=0, weights=ord_fa))

                    # compute the alignment degree as the modulus of the average vector
                    # (arithmetic and weighted wrt to the local FA)
                    alignment = dict()
                    alignment[Mode.ARITH] = np.linalg.norm(
                        np.average(ord_coords, axis=0))
                    alignment[Mode.WEIGHT] = np.linalg.norm(
                        np.average(ord_coords, axis=0, weights=ord_fa))
                    if _verb_deep:
                        print('alignment[Mode.ARITH] : ',
                              alignment[Mode.ARITH])
                        print('alignment[Mode.WEIGHT]: ',
                              alignment[Mode.WEIGHT])

                    # compute the local_disarray degree
                    local_disarray = dict()
                    local_disarray[Mode.ARITH] = 100 * (1 -
                                                        alignment[Mode.ARITH])
                    local_disarray[
                        Mode.WEIGHT] = 100 * (1 - alignment[Mode.WEIGHT])

                    # save the disarray estimated from each grane of the
                    # for plots and statistical analysis
                    R[slice_coord][Param.LOCAL_DISARRAY] = local_disarray[
                        Mode.ARITH]
                    R[slice_coord][Param.LOCAL_DISARRAY_W] = local_disarray[
                        Mode.WEIGHT]

                    # estimate the average Fractional Anisotropy
                    # and save results to local disarray matrices
                    matrix_of_local_fa[r, c, z] = np.mean(ord_fa)
                    matrices_of_disarray[Mode.ARITH][r, c, z] = local_disarray[
                        Mode.ARITH]
                    matrices_of_disarray[Mode.WEIGHT][
                        r, c, z] = local_disarray[Mode.WEIGHT]

                    # print to screen
                    if _verb_deep:
                        print('saving... rcz:({},{},{}):'.format(r, c, z))
                        print('local_disarray[Mode.ARITH] : ',
                              local_disarray[Mode.ARITH])
                        print('local_disarray[Mode.WEIGHT]: ',
                              local_disarray[Mode.WEIGHT])
                        print('mean Fractional Anisotropy : ',
                              matrix_of_local_fa[r, c, z])

                else:
                    # assign invalid value (-1)
                    # (assume that isolated quiver has no disarray)
                    R[slice_coord][Param.LOCAL_DISARRAY] = -1
                    R[slice_coord][Param.LOCAL_DISARRAY_W] = -1
                    matrices_of_disarray[Mode.ARITH][r, c, z] = -1
                    matrices_of_disarray[Mode.WEIGHT][r, c, z] = -1
                    matrix_of_local_fa[r, c, z] = -1

                # end iteration
                _i += 1

    # close colored session
    print(Bcolors.ENDC)

    return matrices_of_disarray, matrix_of_local_fa, shape_G, R
def estimate_local_disarry(R,
                           parameters,
                           ev_index=2,
                           _verb=True,
                           _verb_deep=False):
    res_xy = parameters['px_size_xy']
    res_z = parameters['px_size_z']
    resolution_factor = res_z / res_xy

    block_side = int(parameters['roi_xy_pix'])
    num_of_slices_P = block_side / resolution_factor
    shape_P = np.array(
        (block_side, block_side, num_of_slices_P)).astype(np.int32)

    if _verb:
        print('\n\n*** Estimate_local_disarray()')
        print('R settings:')
        print('> R.shape: ', R.shape)
        print('> R[ev].shape: ', R['ev'].shape)
        print('> Rapporto fra Pixel Size (z / xy) =', resolution_factor)
        print('> Numero di slice selezionate per ogni ROI in R ({} x {}): {}'.
              format(block_side, block_side, num_of_slices_P))
        print(
            '> Dimension of Parallelepiped in R:', shape_P,
            'pixel  =  [{0:2.2f} {0:2.2f} {1:2.2f}] um'.format(
                block_side * res_xy, num_of_slices_P * res_z))

    # extract analysis subblock dimension from parameters
    Ng_z = parameters['local_disarray_z_side'] if parameters[
        'local_disarray_z_side'] > 2 else 2
    Ng_xy = parameters['local_disarray_xy_side']
    neighbours_lim = parameters[
        'neighbours_lim'] if parameters['neighbours_lim'] > 3 else 3

    # check if value is valid
    if Ng_xy == 0:
        Ng_xy = Ng_z * resolution_factor
    elif Ng_xy < 2:
        Ng_xy = 2

    if _verb:
        print('Disarray settings:')
        print('> Grane side in XY: ', Ng_xy)
        print('> Grane side in Z: ', Ng_z)
        print('> neighbours_lim for disarray: ', neighbours_lim)

    # shape of grane of analysis
    shape_G = (int(Ng_xy), int(Ng_xy), int(Ng_z))

    # iteration long each axis
    iterations = tuple(
        np.ceil(np.array(R.shape) / np.array(shape_G)).astype(np.uint32))
    if _verb: print('\n\n> Expected iterations: ', iterations)

    # define global matrix that contains global disarray
    matrix_of_disarray = np.zeros(iterations).astype(np.float32)

    if _verb: print('\n *** Start elaboration...')
    _i = 0

    for z in range(iterations[2]):
        for r in range(iterations[0]):
            for c in range(iterations[1]):

                print('iter: {0:3.0f} - (z, r, c): ({1}, {2} , {3})'.format(
                    _i, z, r, c))
                if _verb_deep: print('\n\n\n\n')

                # grane extraction from R
                start_coord = create_coord_by_iter(r, c, z, shape_G)
                slice_coord = create_slice_coordinate(start_coord, shape_G)
                grane = R[slice_coord]  # (Gy, Gx, Gz)
                if _verb_deep:
                    print(' 0 - grane -> ', end='')
                    print(grane.shape)

                # N = Gy*Gx*Gz = n' of orientation blocks
                # (N, 3, 3)
                grane_reshaped = np.reshape(grane, np.prod(grane.shape))
                if _verb_deep:
                    print(' 1 - grane_reshaped -> ', end='')
                    print(grane_reshaped.shape)

                valid_cells = np.count_nonzero(grane_reshaped['cell_info'])
                if _verb_deep:
                    print('valid_cells --> ', valid_cells)
                    print(' valid rows: -> ', grane_reshaped['cell_info'])
                    print(' grane_reshaped[\'cell_info\'].shape:',
                          grane_reshaped['cell_info'].shape)

                if valid_cells > parameters['neighbours_lim']:

                    # estraggo autovettore selezionato da tutte le celle
                    # (N) -> (N x 3) (select eigenvector with index 'ev_index')
                    coord = grane_reshaped['ev'][:, :, ev_index]
                    if _verb_deep:
                        print(' 2 - coord --> ', coord.shape)
                        print(coord)

                    # for print components and lin.norm of every vectors
                    if _verb_deep:
                        for iv in range(coord.shape[0]):
                            print(iv, ':', coord[iv, :], ' --> norm:',
                                  np.linalg.norm(coord[c, :]))

                    # extract only non-zero vectors:
                    # mask = np.all(np.abs(b) == 0, axis=1)
                    # valid_coords = coord[mask]  # valid rows
                    # sopra va bene, ma l'info ce l'ho già:
                    valid_coords = coord[grane_reshaped['cell_info']]
                    if _verb_deep:
                        print(' VALILD COORDS:', valid_coords.shape)
                        print(valid_coords)

                    # take a random versor (for example, the first)
                    v1 = valid_coords[0, :]

                    ## move all the vectors in the same direction
                    # (by checking the positive or negative result of dot product between the first versor and the others)
                    for i in range(valid_coords.shape[0]):
                        scalar = np.dot(v1, valid_coords[i])
                        if scalar < 0:
                            # change the direction of i-th versor
                            if _verb_deep:
                                print(valid_coords[i], ' --->>',
                                      -valid_coords[i])
                            valid_coords[i] = -valid_coords[i]

                    # alignment degree: module of the average vector
                    alignment = np.linalg.norm(np.average(valid_coords,
                                                          axis=0))
                    if _verb_deep:
                        print('alignment: ', alignment)

                    # define local_disarray degree
                    local_disarray = 100 * (1 - alignment)
                    if _verb_deep:
                        print('local_disarray: ', local_disarray)

                    # save it in each block of this portion (grane) for future statistics and plot
                    R[slice_coord][
                        'local_disarray'] = local_disarray  # dev_tot

                    # and save it in the matrix of local_disarray
                    if _verb_deep:
                        print('saving.. rcz ({},{},{})'.format(r, c, z))

                    matrix_of_disarray[r, c, z] = local_disarray

                else:
                    R[slice_coord][
                        'local_disarray'] = -1.  # assumption that isolated quiver have no disarray
                    matrix_of_disarray[r, c, z] = -1

    return matrix_of_disarray, shape_G, R
def iterate_orientation_analysis(volume,
                                 R,
                                 parameters,
                                 shape_R,
                                 shape_P,
                                 _verbose=False):
    ''' FUNCTION DESCRIPTION______________________________________________
       
    - virtually dissect 'volume' in separate data blocks
    - perform the structure tensor analysis implemented in 'block_analysis' 
      within each data block
    - save results to R
    
    - PARAMETERS
      volume:     input stack volume        (data type: numpy array)
      R:          results matrix            (data type: numpy array)
      parameters: analysis parameters       (data type: ...)
      shape_R:    shape of results matrix   (data type: numpy array)
      shape_P:    size of parallelepiped extracted for orientation analysis (data type: numpy array)
      _verbose:   verbose flag              (data type: boolean)
      
    - RETURN: result matrix (R), iteration count (count)'''

    # estimate sigma of blurring for isotropic resolution
    sigma_blur = sigma_for_uniform_resolution(
        FWHM_xy=parameters['fwhm_xy'],
        FWHM_z=parameters['fwhm_z'],
        px_size_xy=parameters['px_size_xy'])

    perc = 0  # % progress
    count = 0  # iteration count
    tot = np.prod(shape_R)
    print(' > Expected iterations: ', tot)

    for z in range(shape_R[2]):

        if _verbose:
            print('\n\n')

        print('{0:0.1f} % - z: {1:3}'.format(perc, z))

        for r in range(shape_R[0]):

            for c in range(shape_R[1]):

                start_coord = create_coord_by_iter(r, c, z, shape_P)
                slice_coord = create_slice_coordinate(start_coord, shape_P)

                perc = 100 * (count / tot)

                if _verbose:
                    print('\n')

                # save init info to R
                R[r, c, z]['id_block'] = count
                R[r, c, z][Param.INIT_COORD] = start_coord

                # extract parallelepiped
                parall = volume[tuple(slice_coord)]

                # check parallelepiped size (add zero_pad if iteration is on the volume border)
                parall = pad_dimension(parall, shape_P)

                # if the whole data block is not black...
                if np.max(parall) != 0:

                    # ...analyze the extracted parallelepiped with block_analysis()
                    there_is_cell, there_is_info, results = block_analysis(
                        parall, shape_P, parameters, sigma_blur, _verbose)

                    # save info to R[r, c, z]
                    if there_is_cell: R[r, c, z]['cell_info'] = True
                    if there_is_info: R[r, c, z]['orient_info'] = True

                    # save results to R
                    if _verbose:
                        print(' saved to R:  ')

                    for key in results.keys():

                        R[r, c, z][key] = results[key]

                        if _verbose:
                            print(' > {} : {}'.format(key, R[r, c, z][key]))

                else:

                    if _verbose:
                        print(' data block rejected')

                count += 1

    return R, count
def estimate_local_disarry(R, parameters):
    '''
        :param R: Result matrix 'R'
        :param parameters: dictionary of parameters read from parameters.txt
        :return: Result matrix 'R' with 'local_disorder' value saved inside every valid cell (with freq_info == True
        :return shape_G (dimension of grane os local disorder analysis
        :return isolated value (value to assign at isolated points'''
    """ Calculate and save inside 'local_disorder' a float value between [0, 1] if valid, or -1 if not valid (read above):
    0 : max order (all neighbour has same direction)
    1 : max disorder (neighbour with orthogonal direction)
    -1: too many neighbours is not valid (freq_info == False) -> block is isolated (not valid)

    local_disorder := module of std. deviation (3, 1) array (for every dimension (r, c, z),
    the std. dev. of peak components of neighbour)

    SubBlock (Grane of analysis) dimension for std. dev. estimation have shape = shape_G = (grane_size_xy, grane_size_xy, grane_size_z)
    with grane_size_xy and grane_size_z readed from parameters.txt file.

    Condition:
    1) if grane_size_xy or grane_size_z < 2, function use 2.
    2) if inside a SubBlock there is less than valid peak than 'lim_on_local_dispersion_eval' parameters value, local_disorder is setted to -1 (isolated block)
       for visualization, these blocks are setted with maximum local_disorder value."""

    res_xy = parameters['res_xy']
    res_z = parameters['res_z']
    num_of_slices_P = parameters['num_of_slices_P']
    resolution_factor = res_z / res_xy
    block_side = int(num_of_slices_P * resolution_factor)

    #       Pixel size in um^-1
    pixel_size_F_xy = 1 / (block_side * res_xy)
    pixel_size_F_z = 1 / (num_of_slices_P * res_z)

    neighbours_lim = parameters[
        'neighbours_lim'] if parameters['neighbours_lim'] > 3 else 3
    print('neighbours_lim', neighbours_lim)

    # extract analysis subblock dimension from parameters
    grane_size_z = parameters['local_disarray_z_side'] if parameters[
        'local_disarray_z_side'] > 2 else 2
    grane_size_xy = parameters['local_disarray_xy_side']

    # check if value is valid
    if grane_size_xy == 0:
        grane_size_xy = grane_size_z * resolution_factor
    elif grane_size_xy < 2:
        grane_size_xy = 2

    # print('grane_size_z', grane_size_z)
    # print('grane_size_z', grane_size_xy)

    # shape of grane of analysis
    shape_G = (int(grane_size_xy), int(grane_size_xy), int(grane_size_z))
    # print('shape_G', shape_G)

    # iteration long each axis (ceil -> upper integer)
    iterations = tuple(
        np.ceil(np.array(R.shape) / np.array(shape_G)).astype(np.uint32))
    print('iterations', iterations)

    # define global matrix that contains each local disarray
    matrix_of_disarray_perc = np.zeros(iterations).astype(np.float32)

    # counter
    _i = 0

    for z in range(iterations[2]):
        for r in range(iterations[0]):
            for c in range(iterations[1]):
                print(_i)
                _i += 1

                # grane extraction from R
                start_coord = create_coord_by_iter(r, c, z, shape_G)
                slice_coord = create_slice_coordinate(start_coord, shape_G)
                grane = R[slice_coord]

                # select only blocks with valid frequency information
                f_map = grane['freq_info']
                grane_f = grane[f_map]

                # check if grane_f have at least neighbours_lim elements (default: 3)
                if grane_f.shape[0] >= neighbours_lim:

                    # vector components (as a N x 3 matrix) : N = grane_f.shape[0] = number of valid blocks
                    coord = grane_f['quiver_comp'][:, 0, :]

                    # resolution: from pixel to um-1
                    coord_um = coord * np.array(
                        [pixel_size_F_xy, pixel_size_F_xy, pixel_size_F_z])

                    # normalize vectors (every row is a 3D vector):
                    coord_um_norm = coord_um / np.linalg.norm(
                        coord_um, axis=1).reshape(coord_um.shape[0], 1)

                    # take a random versor (for example, the first)
                    v1 = coord_um_norm[0, :]

                    # move all the vectors in the same direction of v1
                    # (checking the positive or negative result of dot product
                    # between the v1 and the others)
                    for i in range(coord_um_norm.shape[0]):
                        scalar = np.dot(v1, coord_um_norm[i])
                        if scalar < 0:
                            # change the direction of i-th versor
                            coord_um_norm[i] = coord_um_norm[i] * -1

                    # allignment degree: module of the average vector
                    alignment = np.linalg.norm(
                        np.average(coord_um_norm, axis=0))

                    # define local_disarray degree
                    local_disarray_perc = 100 * (1 - alignment)

                    # save it in each block of this portion (grane) for future statistics and plot
                    R[slice_coord]['local_disarray'] = local_disarray_perc

                    # and save it in the matrix of local_disarray
                    matrix_of_disarray_perc[r, c, z] = local_disarray_perc

                else:
                    # there are tto few vectors in this grane
                    R[slice_coord][
                        'local_disarray'] = -1.  # assumption that isolated quiver have no disarray
                    matrix_of_disarray_perc[r, c, z] = -1

    # read isolated value from parameters and normalize values inside R between 0 and 1
    isolated_value = parameters['isolated']
    # R = normalize_local_disorder(R, max_dev, min_dev, isolated_value)
    return R, matrix_of_disarray_perc, shape_G, isolated_value
Exemple #7
0
def main(parser):

    args = parser.parse_args()

    # Extract input information
    source_path = manage_path_argument(args.source_path)
    parameter_filename = args.parameters_filename[0]

    # extract filenames and folders
    stack_name = os.path.basename(source_path)
    process_folder = os.path.basename(os.path.dirname(source_path))
    base_path = os.path.dirname(os.path.dirname(source_path))
    parameter_filepath = os.path.join(base_path, process_folder,
                                      parameter_filename)
    stack_prefix = stack_name.split('.')[0]

    # extract other preferences
    _verbose = args.verbose
    _save_csv = args.csv

    # create sointroductiveme informations
    mess_strings = list()
    mess_strings.append('\n\n*** ST orientation Analysis ***\n')
    mess_strings.append(' > source path: {}'.format(source_path))
    mess_strings.append(' > stack name: {}'.format(stack_name))
    mess_strings.append(' > process folder: {}'.format(process_folder))
    mess_strings.append(' > base path: {}'.format(base_path))
    mess_strings.append(' > Parameter filename: {}'.format(parameter_filename))
    mess_strings.append(' > Parameter filepath: {}'.format(parameter_filepath))
    mess_strings.append('')

    # TODO here added local_disarray_z_side and local_disarray_xy_side
    # extract parameters
    param_names = [
        'roi_xy_pix', 'px_size_xy', 'px_size_z', 'mode_ratio',
        'threshold_on_cell_ratio', 'local_disarray_xy_side',
        'local_disarray_z_side', 'neighbours_lim', 'fwhm_xy', 'fwhm_z'
    ]

    param_values = search_value_in_txt(parameter_filepath, param_names)

    # create dictionary of parameters
    parameters = {}
    mess_strings.append('\n\n*** Parameters used:')
    mess_strings.append(
        ' > Parameters extracted from {}\n'.format(parameter_filename))
    for i, p_name in enumerate(param_names):
        parameters[p_name] = float(param_values[i])
        mess_strings.append('> {} - {}'.format(p_name, parameters[p_name]))

    # Parameters of Acquisition System:
    # ratio between pixel size in z and xy
    ps_ratio = parameters['px_size_z'] / parameters['px_size_xy']

    # analysis block dimension in z-axis
    num_of_slices_P = int(parameters['roi_xy_pix'] / ps_ratio)

    row_P = col_P = int(parameters['roi_xy_pix'])
    shape_P = np.array((row_P, col_P, num_of_slices_P)).astype(np.int32)

    mess_strings.append('\n *** Analysis configuration')
    mess_strings.append(
        ' > Rapporto fra Pixel Size (z / xy) = {0:0.2f}'.format(ps_ratio))
    mess_strings.append(
        ' > Numero di slice selezionate per ogni ROI ({} x {}): {}'.format(
            row_P, col_P, num_of_slices_P))
    mess_strings.append(
        ' > Dimension of Parallelepiped: ({0},{1},{2}) pixel  ='
        '  [{3:2.2f} {4:2.2f} {5:2.2f}] um'.format(
            shape_P[0], shape_P[1], shape_P[2],
            row_P * parameters['px_size_xy'], col_P * parameters['px_size_xy'],
            num_of_slices_P * parameters['px_size_z']))

    # create result.txt filename:
    txt_filename = 'Orientations_' + stack_prefix + '_' \
                   + str(int(parameters['roi_xy_pix'] * parameters['px_size_xy'])) + 'um.txt'
    txt_path = os.path.join(os.path.dirname(source_path), txt_filename)

    # print and write into .txt introductive informations
    write_on_txt(mess_strings, txt_path, _print=True)
    # clear list of strings
    mess_strings.clear()

    # 1 ----------------------------------------------------------------------------------------------------
    # OPEN STACK

    # extract data - entire Volume: 'V'
    volume = InputFile(source_path).whole()
    # NB - in futuro va cambiata gestion assi
    volume = np.moveaxis(volume, 0, -1)  # (r, c, z) -> (z, y, x)

    # calculate dimension
    shape_V = np.array(volume.shape)
    pixel_for_slice = shape_V[0] * shape_V[1]
    total_voxel_V = pixel_for_slice * shape_V[2]

    mess_strings.append('\n\n*** Entire loaded Volume dimension:')
    mess_strings.append(' > Dimension if entire Volume : ({}, {}, {})'.format(
        shape_V[0], shape_V[1], shape_V[2]))
    mess_strings.append(
        ' > Pixel for slice            : {}'.format(pixel_for_slice))
    mess_strings.append(
        ' > Total voxel in Volume      : {}'.format(total_voxel_V))

    # extract list of math informations (as strings) about volume.npy variable
    info = print_info(volume,
                      text='\nVolume informations:',
                      _std=False,
                      _return=True)
    mess_strings = mess_strings + info

    # print and write into .txt
    write_on_txt(mess_strings, txt_path, _print=True)
    # clear list of strings
    mess_strings.clear()

    # 2 ----------------------------------------------------------------------------------------------------
    # CYCLE FOR BLOCKS EXTRACTION and ANALYSIS
    print('\n\n')
    print('*** Start Structure Tensor analysis... ')

    t_start = time.time()

    # create empty Result matrix
    R, shape_R = create_R(shape_V, shape_P)

    # estimate sigma of blurring for isotropic resolution
    sigma_blur = sigma_for_uniform_resolution(
        FWHM_xy=parameters['fwhm_xy'],
        FWHM_z=parameters['fwhm_z'],
        px_size_xy=parameters['px_size_xy'])
    perc = 0
    count = 0  # count iteration
    tot = np.prod(shape_R)
    print(' > Expected iterations : ', tot)

    for z in range(shape_R[2]):
        if _verbose: print('\n\n')
        print('{0:0.1f} % - z: {1:3}'.format(perc, z))
        for r in range(shape_R[0]):
            for c in range(shape_R[1]):

                start_coord = create_coord_by_iter(r, c, z, shape_P)
                slice_coord = create_slice_coordinate(start_coord, shape_P)

                perc = 100 * (count / tot)
                if _verbose: print('\n')

                # save init info in R
                R[r, c, z]['id_block'] = count
                R[r, c, z]['init_coord'] = start_coord

                # extract parallelepiped
                parall = volume[slice_coord]

                # check dimension (if iteration is on border of volume, add zero_pad)
                parall = pad_dimension(parall, shape_P)

                # If it's not all black...
                if np.max(parall) != 0:

                    # analysis of parallelepiped extracted
                    there_is_cell, there_is_info, results = block_analysis(
                        parall, shape_P, parameters, sigma_blur, _verbose)

                    # save info in R[r, c, z]
                    if there_is_cell: R[r, c, z]['cell_info'] = True
                    if there_is_info: R[r, c, z]['orient_info'] = True

                    # save results in R
                    if _verbose: print(' saved in R:  ')
                    for key in results.keys():
                        R[r, c, z][key] = results[key]
                        if _verbose:
                            print(' > {} : {}'.format(key, R[r, c, z][key]))

                else:
                    if _verbose: print('   block rejected   ')
                    print()

                count += 1

    block_with_cell = np.count_nonzero(R['cell_info'])
    block_with_info = np.count_nonzero(R['orient_info'])
    p_rejec_cell = 100 * (1 - (block_with_cell / count))
    p_rejec_info_tot = 100 * (1 - (block_with_info / count))
    p_rejec_info = 100 * (1 - (block_with_info / block_with_cell))

    t_process = time.time() - t_start

    mess_strings.append('\n\n*** Results of Orientation analysis:')
    mess_strings.append(' > Expected iterations : {}'.format(np.prod(shape_R)))
    mess_strings.append(' > total_ iteration : {}'.format(count))
    mess_strings.append(' > Time elapsed: {0:.3f} s'.format(t_process))
    mess_strings.append('\n > Total blocks: {}'.format(count))
    mess_strings.append(
        ' > block with cell : {0}, rejected from total: {1} ({2:0.1f}%)'.
        format(block_with_cell, count - block_with_cell, p_rejec_cell))
    mess_strings.append(
        ' > block with gradient information : {}'.format(block_with_info))
    mess_strings.append(' > rejected from total: {0} ({1:0.1f}%)'.format(
        count - block_with_info, p_rejec_info_tot))
    mess_strings.append(
        ' > rejected from block with cell: {0} ({1:0.1f}%)'.format(
            block_with_cell - block_with_info, p_rejec_info))

    # print and write into .txt
    write_on_txt(mess_strings, txt_path, _print=True)
    # clear list of strings
    mess_strings.clear()

    # 3 ----------------------------------------------------------------------------------------------------
    # Disarray estimation

    # the function estimate local disarrays and write these values also inside R
    matrix_of_disarrays, shape_G, R = estimate_local_disarry(R,
                                                             parameters,
                                                             ev_index=2,
                                                             _verb=True,
                                                             _verb_deep=False)

    # extract only valid disarray values
    disarray_values = matrix_of_disarrays[matrix_of_disarrays != -1]

    # 4 ----------------------------------------------------------------------------------------------------
    # WRITE RESULTS AND SAVE

    # create result matrix (R) filename:
    R_filename = 'R_' + stack_prefix + '_' + str(
        int(parameters['roi_xy_pix'] * parameters['px_size_xy'])) + 'um.npy'
    R_prefix = R_filename.split('.')[0]
    R_filepath = os.path.join(base_path, process_folder, R_filename)

    # Save Results in R.npy
    np.save(R_filepath, R)
    mess_strings.append('\n > R matrix saved in: {}'.format(
        os.path.dirname(source_path)))
    mess_strings.append(' > with name: {}'.format(R_filename))

    mess_strings.append('\n > Results .txt file saved in: {}'.format(
        os.path.dirname(txt_path)))
    mess_strings.append(' > with name: {}'.format(txt_filename))

    # create filename of numpy.file where save disarray matrix
    disarray_numpy_filename = 'MatrixDisarray_{}_G({},{},{})_limNeig{}.npy'.format(
        R_prefix, int(shape_G[0]), int(shape_G[1]), int(shape_G[2]),
        int(parameters['neighbours_lim']))

    mess_strings.append('\n> Matrix of Disarray saved in:')
    mess_strings.append(os.path.join(base_path, process_folder))
    mess_strings.append(' > with name: \n{}'.format(disarray_numpy_filename))

    # save numpy file
    np.save(os.path.join(base_path, process_folder, disarray_numpy_filename),
            matrix_of_disarrays)

    # create results strings
    mess_strings.append(
        '\n\n*** Results of statistical analysis of Disarray on accepted points. \n'
    )
    mess_strings.append('> Disarray (%):= 100 * (1 - alignment)\n')
    mess_strings.append('> Matrix of disarray shape: {}'.format(
        matrix_of_disarrays.shape))
    mess_strings.append('> Valid disarray values: {}'.format(
        disarray_values.shape))
    mess_strings.append('\n> Disarray mean: {0:0.2f}%'.format(
        np.mean(disarray_values)))
    mess_strings.append('> Disarray std: {0:0.2f}% '.format(
        np.std(disarray_values)))
    mess_strings.append('> Disarray (min, MAX)%: ({0:0.2f}, {1:0.2f})'.format(
        np.min(disarray_values), np.max(disarray_values)))

    # create results.txt filename and filepath
    disarray_results_filename = 'results_disarray_by_{}_G({},{},{})_limNeig{}.txt'.format(
        R_prefix, int(shape_G[0]), int(shape_G[1]), int(shape_G[2]),
        int(parameters['neighbours_lim']))

    disarray_txt_filepath = os.path.join(base_path, process_folder,
                                         disarray_results_filename)

    if _save_csv:
        disarray_csv_filename = disarray_results_filename.split(
            '.')[0] + '.csv'
        np.savetxt(os.path.join(base_path, process_folder,
                                disarray_csv_filename),
                   disarray_values,
                   delimiter=",",
                   fmt='%f')

    # print and write into .txt
    write_on_txt(mess_strings, disarray_txt_filepath, _print=True)