Beispiel #1
0
def get_data_matrix(d, cell_type):
    # Get cell list from data keys
    celllist = util.get_cell_list(d)
    ncells = len(celllist)

    # Load params from meta data
    nsteps = util.num(d['const']['MOO_tn']['val'])  # time steps
    ntrials = util.num(d['ntrial'])

    normalize = False
    #cells = util.get_cell_type(cell_type)
    if cell_type == 'rgc':
        resp_ind = 'p'
    else:
        resp_ind = 'x'

    cellkey = util.get_cell_data(d, cell_type)

    data_mat = np.zeros((ncells, nsteps * ntrials))
    for t in range(ntrials):
        for i, r in enumerate(cellkey['tr'][t]['r']):
            data_mat[i, t * nsteps:(t + 1) *
                     nsteps] = d['tr'][t]['r'][r][resp_ind]

    return data_mat
Beispiel #2
0
def get_bonuses(slug, skill_data, primary_values, secondary_values):
    bonuses = []
    primary_bonus = {
        "type": skill_data["BonusTypeA"],
        "value": primary_values,
    }

    reductions = calc_reductions(slug, 0)
    if reductions:
        primary_bonus["reductions"] = reductions
    bonuses.append(primary_bonus)

    if skill_data["BonusTypeB"] != "None":
        secondary_bonus = {
            "type": skill_data["BonusTypeB"],
            "value": secondary_values
        }
        reductions = calc_reductions(slug, 1)
        if reductions:
            secondary_bonus["reductions"] = reductions
        bonuses.append(secondary_bonus)

    if skill_data["BonusTypeC"] != "None":
        tertiary_bonus = {
            "type": skill_data["BonusTypeC"],
            "value": num(skill_data["BonusAmountC"]),
        }
        reductions = calc_reductions(slug, 2)
        if reductions:
            tertiary_bonus["reductions"] = reductions
        bonuses.append(tertiary_bonus)
    return bonuses
Beispiel #3
0
def compute_s_dist_cone_weight(d, celldat, celllist, params):
    '''
    '''
    deg_per_pix, mm_per_deg = util.conversion_factors(params['species'])

    cellIDs = celldat[:, 0]

    # convert pixels into arcmin
    dist2S = nn.find_nearest_S(celldat[:, 2:4], params['mosaic_file'])[0]
    dist2S *= deg_per_pix * 60

    N = util.num(d['const']['MOO_tn']['val'])  # time steps
    tf = util.num(d['const']['tf']['val'])  # temporal frequency (Hz)

    lm_midgets = np.zeros((len(celllist), 2))
    keys = util.get_cell_data(d['tr'][0], 'bp')
    for i, r in enumerate(keys):
        # find distance to S
        cellID = int(celllist[i])
        ind = np.where(cellIDs == cellID)[0]
        distance = dist2S[ind]

        # Compute SML cone weights
        # Trials: 1. S iso; 2. M iso; 3. L iso
        sml_weights = np.zeros((3, 1))
        for t in [0, 1, 2]:
            # find amplitude of signal
            cell = d['tr'][t]['r'][r]['x']
            fft = np.fft.fft(cell)
            amp = np.abs(fft[int(tf)]) * 2 / N
            sml_weights[t] = amp / params['cone_contrast'][t]

        # skip S cones
        if celldat[ind, 1] != 0:
            lm_midgets[i, 0] = distance
            sml = sml_weights / sml_weights.sum()
            lm_midgets[i, 1] = sml[0]  # s-cone weight

    # remove zero rows (where S cones were)
    lm_midgets = lm_midgets[~np.all(lm_midgets == 0, axis=1)]

    return lm_midgets
Beispiel #4
0
def calculate_tree_column(_slot):
    slot = num(_slot)
    if slot == 0:
        return 1
    mod = slot % 3
    if mod == 0:
        return 2
    if mod == 1:
        return 0
    if mod == 2:
        return 1
Beispiel #5
0
def knn(d, params):
    '''
    TO DO:

    '''
    celllist = util.get_cell_list(d)
    celldat = np.genfromtxt('results/txt_files/nn_results.txt')
    cellIDs = celldat[:, 0]

    fig = plt.figure()
    fig.set_tight_layout(True)
    ax = fig.add_subplot(111)

    pf.AxisFormat(markersize=8)
    pf.TufteAxis(ax, ['bottom', 'left'], [5, 5])

    N = util.num(d['const']['MOO_tn']['val'])  # time steps
    tf = util.num(d['const']['tf']['val'])  # temporal frequency (Hz)

    keys = util.get_cell_data(d['tr'][0], 'h2')
    for i, r in enumerate(keys):
        # find distance to S
        cellID = int(celllist[i])
        ind = np.where(cellIDs == cellID)[0]
        distance = celldat[ind, 4][0]

        # find amplitude of signal
        cell = d['tr'][0]['r'][r]['x']
        fft = np.fft.fft(cell)
        amp = np.abs(fft[tf]) * 2 / N

        if celldat[ind, 1] == 0:
            ax.plot(distance, amp, 'bo')
        if celldat[ind, 1] == 1:
            ax.plot(distance, amp, 'go')
        if celldat[ind, 1] == 2:
            ax.plot(distance, amp, 'ro')

    savedir = util.get_save_dirname(params, check_randomized=True)
    fig.savefig(savedir + 'knn.svg', edgecolor='none')
    plt.show(block=params['block_plots'])
Beispiel #6
0
def call_seqprep(output_prefix, fq_read1_fn, fq_read2_fn,
    adapter1, adapter2, **kwargs):
    
    if not kwargs['multi']:
        print >> sys.stderr, 'Merging FASTQ reads with SeqPrep...'
        sys.stderr.flush()
    
    #generate file names
    files = {
    'fq1_out_fn': output_prefix+'.R1.sp.fq.gz',
    'fq2_out_fn': output_prefix+'.R2.sp.fq.gz',
    'fq1_disc_fn': output_prefix+'.R1.disc.fq.gz',
    'fq2_disc_fn': output_prefix+'.R2.disc.fq.gz',
    'merged_out_fn': output_prefix+'.M.fq.gz'}
    
    #skip if the option is turned on.
    if kwargs['skip_finished'] and os.path.exists(files['merged_out_fn']):
        if not kwargs['multi']:
            print >> sys.stderr, '\tFiles present; SeqPrep skipped.\n'
        return files
    
    #make command 
    command = '''
        SeqPrep 
        -f %(fq_read1_fn)s
        -r %(fq_read2_fn)s
        -1 %(fq1_out_fn)s
        -2 %(fq2_out_fn)s
        -3 %(fq1_disc_fn)s
        -4 %(fq2_disc_fn)s
        -s %(merged_out_fn)s
        -A %(adapter1)s -B %(adapter2)s
        -X 1''' % (dict(locals().items() + files.items()))
    
    #split command and wait for it to finish, pipe stderr and stdout to obj
    child = subprocess.Popen(command.split(),
        stdout= subprocess.PIPE,
        stderr= subprocess.PIPE)
    child.wait()
    
    #get stats, add to file dict for parsing later
    stats = {}
    for line in child.stderr.readlines():
        matches = re.match(r'(Pairs[ \w]+|CPU)[\w() ]*:\t([\d.]+)',line)
        if matches:
            stats[matches.groups()[0]] = util.num(matches.groups()[1])
    files['stats'] = stats
    
    if not kwargs['multi']:
        print >> sys.stderr, '\tSeqPrep output to %s.*\n' % (output_prefix)
    return files
Beispiel #7
0
def call_seqprep(output_prefix, fq_read1_fn, fq_read2_fn, adapter1, adapter2,
                 **kwargs):

    #generate file names
    files = {
        'fq1_out_fn': output_prefix + '.R1.sp.fq.gz',
        'fq2_out_fn': output_prefix + '.R2.sp.fq.gz',
        'fq1_disc_fn': output_prefix + '.R1.disc.fq.gz',
        'fq2_disc_fn': output_prefix + '.R2.disc.fq.gz',
        'merged_out_fn': output_prefix + '.M.fq.gz'
    }

    #skip if the option is turned on.
    if kwargs['skip_finished'] and os.path.exists(files['merged_out_fn']):
        return files

    #make command
    command = '''
        %(seqprep_path)s
        -f %(fq_read1_fn)s
        -r %(fq_read2_fn)s
        -1 %(fq1_out_fn)s
        -2 %(fq2_out_fn)s
        -3 %(fq1_disc_fn)s
        -4 %(fq2_disc_fn)s
        -s %(merged_out_fn)s
        -A %(adapter1)s -B %(adapter2)s
        -X 1 -g -L 5''' % (dict(locals().items() + files.items() +
                                config.bin_paths.items()))

    #split command and wait for it to finish, pipe stderr and stdout to obj
    child = subprocess.Popen(command.split(),
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
    child.wait()

    #get stats, add to file dict for parsing later
    stats = {}
    for line in child.stderr.readlines():
        matches = re.match(r'(Pairs[ \w]+|CPU)[\w() ]*:\t([\d.]+)', line)
        if matches:
            stats[matches.groups()[0]] = util.num(matches.groups()[1])
    stats['command'] = command
    files['stats'] = stats

    return files
Beispiel #8
0
def call_seqprep(output_prefix, fq_read1_fn, fq_read2_fn,
    adapter1, adapter2, **kwargs):

    #generate file names
    files = {
    'fq1_out_fn': output_prefix+'.R1.sp.fq.gz',
    'fq2_out_fn': output_prefix+'.R2.sp.fq.gz',
    'fq1_disc_fn': output_prefix+'.R1.disc.fq.gz',
    'fq2_disc_fn': output_prefix+'.R2.disc.fq.gz',
    'merged_out_fn': output_prefix+'.M.fq.gz'}

    #skip if the option is turned on.
    if kwargs['skip_finished'] and os.path.exists(files['merged_out_fn']):
        return files

    #make command
    command = '''
        /opt/SeqPrep.dbg/SeqPrep
        -f %(fq_read1_fn)s
        -r %(fq_read2_fn)s
        -1 %(fq1_out_fn)s
        -2 %(fq2_out_fn)s
        -3 %(fq1_disc_fn)s
        -4 %(fq2_disc_fn)s
        -s %(merged_out_fn)s
        -A %(adapter1)s -B %(adapter2)s
        -X 1 -g -L 5''' % (dict(locals().items() + files.items()))

    #split command and wait for it to finish, pipe stderr and stdout to obj

    child = subprocess.Popen(command.split(),
        stdout= subprocess.PIPE,
        stderr= subprocess.PIPE)
    child.wait()

    #get stats, add to file dict for parsing later
    stats = {}
    for line in child.stderr.readlines():
        matches = re.match(r'(Pairs[ \w]+|CPU)[\w() ]*:\t([\d.]+)',line)
        if matches:
            stats[matches.groups()[0]] = util.num(matches.groups()[1])
    stats['command'] = command
    files['stats'] = stats

    return files
Beispiel #9
0
def response(d, params):
    '''
    '''
    # Get cell list from data keys
    celllist = util.get_cell_list(d)

    # Load params from meta data
    N = util.num(d['const']['MOO_tn']['val'])  # time steps

    time = util.get_time(d)
    ncells = len(celllist)
    time_bin = 15

    normalize = False
    # analysis specific parameters
    if params['analysis_type'] == 'cone_inputs':
        normalize = True
        tf = util.num(d['const']['tf']['val'])  # temporal frequency (Hz)
    elif params['analysis_type'] == 'tf':
        sf = util.num(d['const']['sf']['val'])  # spatial freq (cpd)
        tf = util.num(d['const']['VAR_tf']['val'])  # temporal frequency (Hz)
    elif params['analysis_type'] == 'sf':
        sf = util.num(d['const']['VAR_sf']['val'])  # spatial freq (cpd)
        tf = util.num(d['const']['tf']['val'])  # temporal frequency (Hz)
    else:
        raise InputError('analysis type not supported (cone_input, sf, tf)')

    cells = util.get_cell_type(params['cell_type'])
    if params['cell_type'] == 'rgc':
        resp_ind = 'p'
    else:
        resp_ind = 'x'

    resp = {}
    for c in cells:  # for each cell type
        resp[c] = np.zeros((ncells * len(cells), int(d['ntrial'])))

        keys = util.get_cell_data(d, c)

        for t in range(d['ntrial']):  # for each trial
            for i, r in enumerate(keys['tr'][t]['r']):  # for each cell

                # handle case where TF is changing
                if params['analysis_type'] == 'tf':
                    _tf = int(tf[t])
                else:
                    _tf = int(tf)

                cell = d['tr'][t]['r'][r][resp_ind]

                if params['cell_type'] == 'rgc':
                    cell = compute_psth(cell, time.max(), delta_t=10)

                fft = np.fft.fft(cell)

                if params['analysis_type'] == 'cone_inputs':
                    amp = np.real(fft[_tf]) * 2 / N
                    resp[c][i, t] = amp / params['cone_contrast'][t]
                else:
                    amp = np.abs(fft[_tf]) * 2 / N
                    resp[c][i, t] = amp

        if normalize:
            resp[c] = (resp[c].T / np.abs(resp[c]).sum(1)).T

    return resp
Beispiel #10
0
def tuning_curve(d, params):
    '''
    '''
    # Get this with conversion call
    deg_per_pix, mm_per_deg = util.conversion_factors(params['species'])
    deg2um = mm_per_deg / 1000  #  conversion (micron / deg)

    if params['analysis_type'] == 'sf':
        figsize = (7, 7)
    else:
        figsize = (6, 5)

    fig = plt.figure(figsize=figsize)
    fig.set_tight_layout(True)
    ax1 = fig.add_subplot(111)
    if params['analysis_type'] == 'sf':
        ax2 = ax1.twiny()

    pf.AxisFormat()
    pf.TufteAxis(ax1, [
        'bottom',
        'left',
    ], [4, 4])
    if params['analysis_type'] == 'sf':
        pf.TufteAxis(ax2, [
            'top',
        ], [4, 4])

    # get the data
    r = an.response(d, params)

    colors = ['k', 'gray', 'r', 'b', 'g', 'c', 'm']
    cells = util.get_cell_type(params['cell_type'])
    # set some smart axes for second axis
    ymax = -100  # start small
    ymin = 1000  # start large
    if params['analysis_type'] == 'sf':
        x = util.num(d['const']['VAR_sf']['val'])  # spatial freq (cpd)
    elif params['analysis_type'] == 'tf':
        x = util.num(d['const']['VAR_tf']['val'])  # temp freq

    for i, c in enumerate(cells):
        ax1.loglog(x, r[c][i, :], 'o-', color=colors[i], label=c)
        vec = np.reshape(r[c], -1)
        max = np.max(vec)
        min = np.min(vec)
        if max > ymax:
            ymax = max
        if min < ymin:
            ymin = min

    ymax += 0.1
    ymin -= 0.1

    ax1.axis([x[0] - 0.05, x[-1] + 1, ymin, ymax])
    ax1.set_ylabel('amplitude')
    ax1.legend(fontsize=22, loc='lower center')

    if params['analysis_type'] == 'tf':
        ax1.set_xlabel('temporal frequency (Hz)')

    elif params['analysis_type'] == 'sf':
        ax1.set_xlabel('cycles / degree')
        ax2.xaxis.tick_top()
        ax2.yaxis.tick_left()
        ax2.axis([(x[0] - 0.05) * deg2um, (x[-1] + 1) * deg2um, ymin, ymax])
        ax2.set_xlabel('cycles / $\mu$m')
        ax2.set_xscale('log')

    savedir = util.get_save_dirname(params, check_randomized=True)
    fig.savefig(savedir + params['cell_type'] + '_' + params['analysis_type'] +
                '_tuning.eps',
                edgecolor='none')
    plt.show(block=params['block_plots'])
Beispiel #11
0
data_map = {
    "TalentID": {
        "key": "id",
        "val": lambda val: val
    },
    "Name": {
        "key": "name",
        "val": lambda val: val
    },
    "Class": {
        "key": "skill_tree",
        "val": lambda val: val
    },
    "S0": {
        "key": "unlock_stage",
        "val": lambda val: num(val)
    },
    "MaxLevel": {
        "key": "max_level",
        "val": lambda val: num(val)
    },
    "Note": {
        "key": "note",
        "val": lambda val: val
    },
    "Slot": {
        "key": "column",
        "val": calculate_tree_column
    },
    "TierNum": {
        "key": "tier",