コード例 #1
0
ファイル: main.py プロジェクト: benfrankel/grid-editor
 def on_key_down(self, unicode, key, mod):
     if mod & pygame.KMOD_CTRL:
         if key == pygame.K_s:
             pygame.image.save(self.grid._display, 'grids/img/latest.png')
         elif key == pygame.K_g:
             save_grid(self.grid)
         elif key == pygame.K_l:
             load_grid(self.grid)
コード例 #2
0
ファイル: boards.py プロジェクト: christiana/sudoku_solver
def generate_board_medium_2():
    'generate a board from sodoku app, difficulty -medium-'
    data = \
    '''
    ||||
    ||||
    ||||
    -------------
    ||||
    ||||
    ||||
    -------------
    ||||
    ||||
    ||||
    '''
    data = \
    '''
    |   |   |6 9|
    |1  |  4|   |
    |  5|3 6|821|
    -------------
    |  4|67 | 5 |
    |  7|   |9  |
    |   |54 |   |
    -------------
    |37 |4 5|2 6|
    |   |   |51 |
    | 6 | 2 | 37|
    '''
    return grid.load_grid(data)
コード例 #3
0
ファイル: boards.py プロジェクト: christiana/sudoku_solver
def generate_board_hard_1():
    'generate a board from sodoku app, difficulty -hard-'
    data = \
    '''
    |984|5 1| 72|
    | 57|  9| 3 |
    |6  |  7|   |
    -------------
    |   |  2| 1 |
    |   |   |7  |
    |561|   | 28|
    -------------
    |   |4  |   |
    |   |2  |  6|
    |19 |  3|2  |
    '''
    return grid.load_grid(data)
コード例 #4
0
ファイル: boards.py プロジェクト: christiana/sudoku_solver
def generate_board_expert_2():
    'generate a board from sodoku app, difficulty -expert-'
    data = \
    '''
    |   |  1|3  |
    |76 |4  |1  |
    |  5| 7 | 6 |
    -------------
    |6  |   | 3 |
    |   |  7| 49|
    |5  | 1 |   |
    -------------
    |   | 32|   |
    | 9 |   |  8|
    | 84|   |   |
    '''
    return grid.load_grid(data)
コード例 #5
0
def run(percent_to_download=-1):
    globs.general.DATA_DIR_API = "./data/api/"
    globs.general.PROPER_DATA_FILE_API = globs.general.DATA_DIR_API + "actual_data{}.csv".format(
        percent_to_download)
    globs.general.LOG_FILE_PATH_API = globs.general.DATA_DIR_API + "logs{}".format(
        percent_to_download)
    globs.general.PROPER_DATA_FILE_API = globs.general.DATA_DIR_API + "actual_data{}.csv".format(
        percent_to_download)
    globs.general.NOT_VALID_DATA_FILE_API = globs.general.DATA_DIR_API + "to_much{}.csv".format(
        percent_to_download)

    init_files()
    # init grid
    if not os.path.exists(globs.general.GRID_FILE_TO_SAVE_API):
        grid.create_save_grid()
    g_tmp = grid.load_grid(percent_to_download)
    # capturing data
    capture_whole_data(g_tmp, 300)
コード例 #6
0
ファイル: boards.py プロジェクト: christiana/sudoku_solver
def generate_board_medium_1():
    'generate a board from sodoku app, difficulty -medium-'

    data = \
    '''
    |64 | 3 |  7|
    |5 1| 7 |9  |
    |   |   | 1 |
    -------------
    |  4|9 8| 6 |
    | 8 |  3| 2 |
    |   |4  |   |
    -------------
    |4  |157| 3 |
    |2 8|3  | 4 |
    |75 |   | 96|
    '''
    return grid.load_grid(data)
コード例 #7
0
def main(redo_cube_correlation_calculation = False,
        redo_grid_correlation_calculation = False):

    import grid
    import numpy as np

    if redo_grid_correlation_calculation:
        perseus_grid = grid.load_grid('/d/bip3/ezbc/perseus/data/galfa/' + \
                'perseus_galfa.138_62.10')

    # define directory locations
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/'
    av_dir = '/d/bip3/ezbc/perseus/data/2mass/'
    hi_dir = '/d/bip3/ezbc/perseus/data/galfa/'

    # load 2mass Av and GALFA HI images, on same grid
    av_data = load_fits(av_dir + '2mass_av_lee12_nocal_regrid.fits')
    av_SNR = load_fits(av_dir + '2mass_av_lee12_nocal_SNR_regrid.fits')
    hi_data,h = load_fits(hi_dir + 'perseus.galfa.cube.bin.4arcmin.fits',
            return_header=True)

    # make the velocity axis
    velocity_axis = (np.arange(h['NAXIS3']) - h['CRPIX3'] + 1) * h['CDELT3'] + \
            h['CRVAL3']
    velocity_axis /= 1000.

    # define the parameters to derive NHI from the GALFA cube
    velocity_centers = np.arange(-20,20,0.5)
    velocity_widths = np.arange(1,120,5)
    #velocity_centers = np.arange(-40,40,5)
    #velocity_centers = np.array([5])
    #velocity_widths = np.arange(1,100,20)
    if redo_grid_correlation_calculation:
        correlations = calculate_correlation(SpectralGrid=perseus_grid,
                av_image=av_data,
                velocity_centers=velocity_centers,
                velocity_widths=velocity_widths)
    else:
        correlations = np.load(output_dir + 'correlations.npy')

    if redo_cube_correlation_calculation:
        cube_correlations = calculate_correlation(cube=hi_data,
                velocity_axis=velocity_axis,av_image=av_data,
                av_SNR=av_SNR,
                velocity_centers=velocity_centers,
                velocity_widths=velocity_widths)
    else:
        cube_correlations = np.load(output_dir + 'cube_correlations.npy')

    # save arrays
    np.save(output_dir + 'cube_correlations', cube_correlations)
    np.save(output_dir + 'correlations', correlations)
    np.save(output_dir + 'velocity_centers', velocity_centers)
    np.save(output_dir + 'velocity_widths', velocity_widths)

    # Plot heat map of correlations
    cube_correlations_array = plot_correlations(cube_correlations,
            velocity_centers, velocity_widths, returnimage=True,
            savedir=figure_dir,
            filename='perseus.nhi_av_correlation.png',
            show=False)

    plot_center_velocity(cube_correlations,
            velocity_centers, velocity_widths,
            velocity_center=5, returnimage=True,
            savedir=figure_dir,
            filename='perseus.nhi_av_5kms_correlation.png',
            show=False)

    plot_center_velocity(cube_correlations,
            velocity_centers, velocity_widths,
            velocity_center=10, returnimage=True,
            savedir=figure_dir,
            filename='perseus.nhi_av_10kms_correlation.png',
            show=False)

    # Plot NHI vs. Av for a given velocity range
    #hi_data_corrected = np.ma.array(hi_data, mask=np.where(hi_data > -5))
    nhi_image = calculate_NHI(cube=hi_data,
            velocity_axis=velocity_axis,
        velocityrange=[-5,15])

    av_data_blk = load_fits(av_dir + '2mass_av_lee12_regrid.fits')

    plot_nhi_vs_av(nhi_image,av_data_blk,
            savedir=figure_dir,
            filename='perseus.av_nhi_2dDensity.png',)

    plot_nhi_vs_av(nhi_image,av_data,
        savedir=figure_dir,
        filename='perseus.av_nhi_2dDensity_2mass.png',)



    # Plot heat map of correlations
    if correlations is not None:
        correlations_array = plot_correlations(correlations,
                velocity_centers, velocity_widths, returnimage=True,show=False)
    # Print best-fit characteristics
    indices = np.where(cube_correlations_array == \
            cube_correlations_array.max())
    print('Maximum correlation values: ')
    print(str(velocity_centers[indices[0]][0]) + ' km/s center')
    print(str(velocity_widths[indices[1]][0]) + ' km/s width')
    plt.clf()
コード例 #8
0
        guesses=guesses,
        ncomp=len(guesses)/3,
        alpha=0.001,
        coords='image',
        numberOfFits=1e6,
        COcube=cfa,
        COwidthScale=1.)



################################################################################
# Loading cfa grid of persues
################################################################################

import grid
cfa = grid.load_grid('perseus.cfa.138_62.5sigma')

grid.plot_ncompImage(cfa)

box=[0,36,180,180]
reload(grid)
perseus_galfa = grid.SpectralGrid('../galfa/'+\
        'perseus.galfa.cube.bin.4arcmin.fits',
                        box=box,
                        noiseScale=10.,
                        noiseRange=((-110,-90),(90,110)),
                        basesubtract=True)
guesses = [48,5,10]
perseus_galfa.fit_profiles(
        growPos = (138,62),
        tileSaveFreq=100,
コード例 #9
0
def main(redo_cube_correlation_calculation=False,
         redo_grid_correlation_calculation=False):

    import grid
    import numpy as np

    if redo_grid_correlation_calculation:
        perseus_grid = grid.load_grid('/d/bip3/ezbc/perseus/data/galfa/' + \
                'perseus_galfa.138_62.10')

    # define directory locations
    output_dir = '/d/bip3/ezbc/perseus/data/python_output/nhi_av/'
    figure_dir = '/d/bip3/ezbc/perseus/figures/'
    av_dir = '/d/bip3/ezbc/perseus/data/2mass/'
    hi_dir = '/d/bip3/ezbc/perseus/data/galfa/'

    # load 2mass Av and GALFA HI images, on same grid
    av_data = load_fits(av_dir + '2mass_av_lee12_nocal_regrid.fits')
    av_SNR = load_fits(av_dir + '2mass_av_lee12_nocal_SNR_regrid.fits')
    hi_data, h = load_fits(hi_dir + 'perseus.galfa.cube.bin.4arcmin.fits',
                           return_header=True)

    # make the velocity axis
    velocity_axis = (np.arange(h['NAXIS3']) - h['CRPIX3'] + 1) * h['CDELT3'] + \
            h['CRVAL3']
    velocity_axis /= 1000.

    # define the parameters to derive NHI from the GALFA cube
    velocity_centers = np.arange(-20, 20, 0.5)
    velocity_widths = np.arange(1, 120, 5)
    #velocity_centers = np.arange(-40,40,5)
    #velocity_centers = np.array([5])
    #velocity_widths = np.arange(1,100,20)
    if redo_grid_correlation_calculation:
        correlations = calculate_correlation(SpectralGrid=perseus_grid,
                                             av_image=av_data,
                                             velocity_centers=velocity_centers,
                                             velocity_widths=velocity_widths)
    else:
        correlations = np.load(output_dir + 'correlations.npy')

    if redo_cube_correlation_calculation:
        cube_correlations = calculate_correlation(
            cube=hi_data,
            velocity_axis=velocity_axis,
            av_image=av_data,
            av_SNR=av_SNR,
            velocity_centers=velocity_centers,
            velocity_widths=velocity_widths)
    else:
        cube_correlations = np.load(output_dir + 'cube_correlations.npy')

    # save arrays
    np.save(output_dir + 'cube_correlations', cube_correlations)
    np.save(output_dir + 'correlations', correlations)
    np.save(output_dir + 'velocity_centers', velocity_centers)
    np.save(output_dir + 'velocity_widths', velocity_widths)

    # Plot heat map of correlations
    cube_correlations_array = plot_correlations(
        cube_correlations,
        velocity_centers,
        velocity_widths,
        returnimage=True,
        savedir=figure_dir,
        filename='perseus.nhi_av_correlation.png',
        show=False)

    plot_center_velocity(cube_correlations,
                         velocity_centers,
                         velocity_widths,
                         velocity_center=5,
                         returnimage=True,
                         savedir=figure_dir,
                         filename='perseus.nhi_av_5kms_correlation.png',
                         show=False)

    plot_center_velocity(cube_correlations,
                         velocity_centers,
                         velocity_widths,
                         velocity_center=10,
                         returnimage=True,
                         savedir=figure_dir,
                         filename='perseus.nhi_av_10kms_correlation.png',
                         show=False)

    # Plot NHI vs. Av for a given velocity range
    #hi_data_corrected = np.ma.array(hi_data, mask=np.where(hi_data > -5))
    nhi_image = calculate_NHI(cube=hi_data,
                              velocity_axis=velocity_axis,
                              velocityrange=[-5, 15])

    av_data_blk = load_fits(av_dir + '2mass_av_lee12_regrid.fits')

    plot_nhi_vs_av(
        nhi_image,
        av_data_blk,
        savedir=figure_dir,
        filename='perseus.av_nhi_2dDensity.png',
    )

    plot_nhi_vs_av(
        nhi_image,
        av_data,
        savedir=figure_dir,
        filename='perseus.av_nhi_2dDensity_2mass.png',
    )

    # Plot heat map of correlations
    if correlations is not None:
        correlations_array = plot_correlations(correlations,
                                               velocity_centers,
                                               velocity_widths,
                                               returnimage=True,
                                               show=False)
    # Print best-fit characteristics
    indices = np.where(cube_correlations_array == \
            cube_correlations_array.max())
    print('Maximum correlation values: ')
    print(str(velocity_centers[indices[0]][0]) + ' km/s center')
    print(str(velocity_widths[indices[1]][0]) + ' km/s width')
    plt.clf()
コード例 #10
0
ファイル: distributor.py プロジェクト: Na58/CCC
            break
    fhandle.close()
    return insta_coor


if __name__ == '__main__':

    startTime = time.time()
    
    #fname = 'mediumInstagram.json'
    #fname = 'tinyInstagram.json'
    fname = 'bigInstagram.json'
    log_file = 'result.txt'

    comm = MPI.COMM_WORLD
    grid = G.load_grid(log_file)

    print "Starting...", comm.size
    
    if comm.rank == 0:
        loghdl = open(log_file,'a')
        loghdl.write("**********" + str(startTime) + "**********\n")
        loghdl.flush()
        loghdl.close()
        #file_read_time = time.time()
        insta_list = generate_fpoint(fname, comm.size)
        result = []
        #print "through time", time.time() - file_read_time
    else:
        insta_list = []