コード例 #1
0
ファイル: eclipse.py プロジェクト: peppelorum/boxeeplay
def getItemsFromMRSS(url):
    result = ""
    data = utilities.getData(url)
    total_result = re.compile("<opensearch:totalResults>(.*?)</opensearch:totalResults>", re.DOTALL + re.IGNORECASE).search(str(data)).group(1)
    for i in range(int(total_result)):
        if (i==0):
            result = re.compile('<item [^>]*>[\W\w]+?</item>').findall(data, re.DOTALL)
        else:
            y=i+1
            data = utilities.getData(url+'&start='+str(y))
            result = result + re.compile('<item [^>]*>[\W\w]+?</item>').findall(data, re.DOTALL)

    for node in result:
        title = re.compile("<title>(.*?)</title>").findall(str(node), re.DOTALL)[0]
        print title

        image = re.compile('<media:thumbnail url="(.*?)"').findall(str(node), re.DOTALL)[0]
        print image
コード例 #2
0
    def get_rf(self, ng_name, all_group_names, simulation_filename):

        # From connections, get neuron positions and weights from input group to target group
        # In case of second order neurons, get the first-level RF first, then use them to get the
        # second level weights. w_coord is cortical coordinates, z_coord is visual field coordinates.

        connection_filename = simulation_filename.replace('results','connections')
        connection_data = getData(connection_filename)

        group_analysis_dict = self.analyze_groups(all_group_names, connection_data)

        positions_input = connection_data['positions_all']['z_coord'][group_analysis_dict['input']]
        input_space = np.asarray(positions_input)

        receptive_fields = {}
        receptive_fields['input_space'] = input_space

        # Get first order connection rf:s
        for group, connection in zip(group_analysis_dict['first_order'], group_analysis_dict['first_order_connections']):

            conn_mx = connection_data[connection]['data']
            conn_mx_scaled = conn_mx / np.max(conn_mx)
            receptive_fields[group] = np.transpose(conn_mx_scaled)

        # Get second order connection rf:s
        for group in group_analysis_dict['second_order']:
            connections = group_analysis_dict['second_order_connections_dict'][group]
            
            rf2 = np.zeros([1, 1])
            for connection in connections:
                # Matrices are M1 = G1 x IN and M2 = G1 x G2. The correct calculation
                # is M2.T * M1, after which you get M3 = G2 x IN dimension

                G1_name_stub = connection[:connection.find('__to__')]
                G1_name = [g for g in receptive_fields.keys() if g.endswith(G1_name_stub)]
                M1 = receptive_fields[G1_name[0]]
                
                # Scale second order connection
                conn_mx = connection_data[connection]['data']
                M2 = conn_mx / np.max(conn_mx)

                M3 = np.dot(np.transpose(M2), M1)

                try: 
                    rf2 += M3
                except UnboundLocalError: 
                    rf2 = M3 

            # multiply with first order connections RF 2nd = sigma (Conn 2nd * RF 1st)
            receptive_fields[group] = csr_matrix(rf2 / np.max(rf2))

        if self.show_neuron_idx:
            show_neuron_idx = self.show_neuron_idx
            self.show_rf(receptive_fields, ng_name, show_neuron_idx)
        
        return receptive_fields
コード例 #3
0
    def __init__(self, vm=True, raster=True, spectrum=True, coherence=True, transfer_entropy=True):

        # Set visible columns
        self.vm = vm
        self.raster = raster
        self.spectrum = spectrum
        self.coherence = coherence
        self.transfer_entropy = transfer_entropy
        self.ncols = vm + raster + spectrum + coherence + transfer_entropy

        # get data
        self.data = ut.getData(os.path.join(showFigure.path_to_data, showFigure.filename))

        # get visible rows
        self.neuron_groups = list(self.data['spikes_all'].keys())
        self.anat_config = self.data['Anatomy_configuration']
        self.nrows = len(self.neuron_groups)
コード例 #4
0
ファイル: recipeClasses.py プロジェクト: thortun/FoodRepo
    def __init__(self, url, knownRecipes=[]):
        """Initializes."""
        super(AllRecipesRecipe, self).__init__()  # Initialize
        self.url = url
        data = u.getData(url)  # Fetch the data
        if data is None:
            self.isBroken = True  # Set the recipie to broken
            return  # End the rest of the stuff
        else:
            soup = BeautifulSoup(data, 'html.parser')  # Make soup

            self.findName(soup)
            if self.recipeName in knownRecipes:  # If the recipe is already found, abort immidietly
                self.isBroken = True
                return None
            self.findTime(soup)  # Find the time
            self.findIngredients(soup)
            self.findSteps(soup)
コード例 #5
0
    def get_internal_image(self, ng_name,simulation_filename,path):
        # Pick neuron group, get rf for each neuron, weigh the rf with response rate from simulation. 
        # Show internal image
        full_filepath = os.path.join(path, simulation_filename)
        data = getData(full_filepath)
        simulation_time = data['runtime']
        all_group_names = data['number_of_neurons'].keys()

        assert ng_name in  all_group_names, 'Neuron group not found'

        # RF matrices are G x IN
        receptive_fields = self.get_rf(ng_name, all_group_names, full_filepath)

        # laske sisäiset edustukset alla kertomalla rf:t aktiivisuuksilla
        ng_spike_freq_data = data['spikes_all'][ng_name]['count'] / simulation_time
        ng_receptive_fields = receptive_fields[ng_name]
        ng_spikes = np.dot(ng_spike_freq_data, ng_receptive_fields.todense())
        internal_image = ng_spikes / np.max(ng_spikes)

        self.show_rf(receptive_fields, ng_name, internal_image=internal_image)
コード例 #6
0
ファイル: mass_fluc.py プロジェクト: johnbridstrup/analysis
import numpy as np
import json
import matplotlib.pyplot as plt
from utilities import getData

# masses = {}
# numbers = {}
# run_masses = {}
# run_numbers = {}
# run_ts = {}

folder_path = 'data/mFluc'
data = getData(folder_path, 'N')
plt.figure()
for key, sample in data.items():
    # plt.figure()
    # plt.plot(sample['t'][1:],sample['M_norm'][1:])
    # plt.title('Mass fraction vs. t (N = {})'.format(key))
    # plt.figure()
    # plt.plot(sample['t'],sample['M_dev'])
    # plt.title('Mass dev (relative to N) vs t (N = {})'.format(key))

    plt.plot(sample['t'][1:], sample['M_dev'][1:], label='{}'.format(key))
    plt.legend()
    plt.title('Relative mass deviation vs t')
plt.figure()
for key, sample in data.items():
    # plt.figure()
    # plt.plot(sample['t'][1:],sample['M_norm'][1:])
    # plt.title('Mass fraction vs. t (N = {})'.format(key))
    # plt.figure()
コード例 #7
0
import matplotlib.pyplot as plt
from utilities import getData
from matplotlib.ticker import FormatStrFormatter
import pandas as pd


def find_idx(data, val):
    for idx, i in enumerate(data):
        if i > val:
            return idx - 1


conc_folder = 'data/crowders-local/conc'
crowd_folder = 'data/crowders-local'

concData = getData(conc_folder, 'Co', runs=True)
crowdData = getData(crowd_folder, 'phi', runs=True)

fig, axes = plt.subplots(1, 2)
pidx = 0
for key, data in concData.items():
    N = data['N']
    axes[pidx].plot(data['t'][:30], [i / N for i in data['M'][:30]])
    for tkey, run in data['runs'].items():
        idx = find_idx(run['t'], data['t'][30])
        axes[pidx].plot(run['t'][:idx], [i / N for i in run['M'][:idx]],
                        linestyle='--',
                        linewidth=0.5)
    axes[pidx].set_title('M vs t ($c_0$ = {})'.format(key))
    axes[pidx].xaxis.set_major_formatter(FormatStrFormatter('%.3f'))
    axes[pidx].set_ylim(top=1)
コード例 #8
0
 def myData(self, pathname, filename):
     data = ut.getData(os.path.join(pathname, filename))
     return data
コード例 #9
0
ファイル: k2a_sweep.py プロジェクト: johnbridstrup/analysis
from utilities import getData, half_time, sigmoid, half_index
import matplotlib.pyplot as plt
import numpy as np
import math
from scipy.optimize import curve_fit
from scipy.interpolate import CubicSpline

#folder = '/Users/john/Development/Dissertation/analysis/data/k2a_sweep'
folder = '/Users/john/Development/Dissertation/analysis/data/k2a_sweep/n3/expanded'
data = getData(folder, 'k2', runs=True)

for key, sample in data.items():
    data[key]['t_half'] = half_time(sample['M'], sample['t'])
    R = math.pow(sample['conc'], sample['n2'] - 1) * sample['k2'] / sample['a']
    plt.figure(1)
    plt.scatter(R, data[key]['t_half'])
    mMax = sample['M'][-1]
    half_guess = sample['t'][half_index(sample['M'])]
    k_guess = mMax / (40 * half_guess)
    guess = [mMax, k_guess, half_guess]
    fit, _ = curve_fit(sigmoid, sample['t'], sample['M'], guess)
    data[key]['k'] = fit[1]
    thav = 0
    thsq = 0
    kav = 0
    ksq = 0
    kcb = 0
    for subkey, run in sample['runs'].items():
        th = half_time(run['M'], run['t'])
        data[key]['runs'][subkey]['t_half'] = th
        thav += th