Esempio n. 1
0
def localizer(q):  ## 2016-11-07
    # def main():
    llst = []
    try:
        discovery = watson_developer_cloud.DiscoveryV1(
            '2016-11-07',
            username='******',
            password='******')
        start_time = tim()

        # environments = discovery.get_environments()
        # environments = [x for x in environments['environments']]
        # environment_id = environments[0]['environment_id']
        # print(environment_id)
        # collection = discovery.list_collections(environment_id)
        # collections = [x for x in collection['collections']]
        # collection_id = collections[0]['collection_id']
        # print(collection_id)

        #### hard coding the ids to make it faster
        environment_id = '168946e2-4f0b-4398-8a56-b6799d99c2c3'
        # collection_id = '5d59fead-1f8f-46bd-8ee5-c6418bb961e5' ##iitd
        collection_id = 'a65e74f6-dd7a-4684-a2dc-7019722c90b8'  ##mit

        q = query_aug(q)
        ll = query(discovery, environment_id, collection_id, q)

        filepath = "/home/brojo/Flask/flask/localizer/html/"
        timepath = "/home/brojo/Flask/flask/localizer/time/"

        print("getting time")

        for l in ll:
            lst = []
            # print("---------------")
            # print(l[0])
            # print(l[1])
            print("---------------------------------")
            time, text2 = get_time(l[1], l[0], filepath, timepath)
            lst.append(l[1])  ##filename
            lst.append(time)
            lst.append(text2)  ###added on 15th april--text
            llst.append(lst)

        end_time = tim()
        executeion_time = end_time - start_time
        print("Total time taken ") + str(executeion_time) + " seconds"
        print(
            "--------------------------------------------------------------------------------------------------------"
        )
        return llst
    except:
        return llst
Esempio n. 2
0
def localizer(q):  ## 2016-11-07
    # def main():
    discovery = watson_developer_cloud.DiscoveryV1(
        '2016-11-07',
        username='******',
        password='******')
    start_time = tim()

    # environments = discovery.get_environments()
    # environments = [x for x in environments['environments']]
    # environment_id = environments[0]['environment_id']
    # print(environment_id)
    # collection = discovery.list_collections(environment_id)
    # collections = [x for x in collection['collections']]
    # collection_id = collections[0]['collection_id']
    # print(collection_id)

    #### hard coding the ids to make it faster
    environment_id = '168946e2-4f0b-4398-8a56-b6799d99c2c3'
    # collection_id = '8baa0d9c-57e8-479e-970c-8f95fca99ba5' ##old
    collection_id = '9df12949-305b-4b4f-90a1-bf6e5e43c3ff'

    ll = query(discovery, environment_id, collection_id, q)

    filepath = "/home/brojo/Flask/flask/localizer/html/"
    timepath = "/home/brojo/Flask/flask/localizer/time/"

    print("getting time")
    llst = []

    for l in ll:
        lst = []
        print("---------------")
        print(l[0])
        print(l[1])
        print("---------------------------------")
        time = get_time(l[1], l[0], filepath, timepath)
        lst.append(l[1])  ##filename
        lst.append(time)
        lst.append(l[2])  ###added on 15th april--text
        llst.append(lst)

    end_time = tim()
    executeion_time = end_time - start_time
    print("Total time taken ") + str(executeion_time) + " seconds"
    print(
        "--------------------------------------------------------------------------------------------------------"
    )
    return llst
Esempio n. 3
0
def alpBetSpectraBM(kt,alpha, beta, back, params, NBM, nsnaps, MTE):

    biAOut = np.array([]) 
    k1 = np.array([])
    k2 = np.array([])
    k3 = np.array([])
  
    times = np.array([])
    
    Hin = np.array([])    

    Nend = back[0,-1]
    
    for jj in range(0,np.size(back[:,0])):
            Hin[jj]=MTE.H(back[jj,1:])
            
    Nexit = interpolate.splev(kt/3., np.exp(back[:,0])*Hin,interpolate.splrep(back[:,1], back[:,0], s=1e-15),der=0)

    if (nsnaps ==1 or nsnaps == 0):
        snaps= np.array([Nend]) 

    snaps =  np.linspace(Nexit-2.0,Nend,nsnaps) 


    for l in range(0,np.size(alphaA)):
      alpha =  alphaA[l]          
      for j in range(0,side+1):
         print "l " ,  l,   " j " , j  
         beta =j*step
         if alpha>-(1-beta) and alpha < 1-beta :            
             k1 = k/2 - beta*k/2.
             k2 = k/4*(1+alpha+beta)
             k3 = k/4*(1-alpha+beta)
             #print k1, k2, k3
# find conditions that allows longest mode 4 e-folds inside horizon
             Nstart = Nexit - max(math.log(k/k1),math.log(k/k2),math.log(k/k3))-Nbefore
             backExitMinus = np.zeros(2*nF)
             for i in range (1,2*nF+1):
                 backExitMinus[i-1] = interpolate.splev(Nstart,interpolate.splrep(back[:,0], back[:,i], s=1e-15),der=0)

# run solver for this triangle
             t= np.concatenate((np.array([Nstart]),snaps))
             timebefore =     time.tim()            
             threePt = MTeasyPy.alphaEvolve(t,k1,k2,k3, backExitMinus,0)
             zzz= threePt[:,:5]
             dim= np.abs(zzz[:,4]*(k1*k2*k3)**2)
             biA= np.array([])
             biR= np.array([])
             for ii in range(1,nsnaps+1):
                 bi=5.0/6.0*np.divide(zzz[ii,4], (np.multiply(zzz[ii,2],zzz[ii,3])+np.multiply(zzz[ii,1],zzz[ii,2]) + np.multiply(zzz[ii,1],zzz[ii,3])))
                 biR=np.append(biR,bi)
                 biA=np.append(biA,dim[ii])
#fig3 = plt.                 
             biAOut=np.vstack((biAOut,biA))
             biROut=np.vstack((biROut,biR))
             betaA=np.append(betaA,beta)
             alphaA=np.append(alphaA,alpha)
             times = np.append(times,timebefore - time.time())
    return (alphaA, betaA, biAOut, biROut, times)
Esempio n. 4
0
def search():
    try:
        start_time = tim()
        q = request.form['query']
        q = q.lower()
        q = q.replace("logn", "log n")
        q = re.sub('\W+', ' ', q)

        print("searched query is ")
        print(q)
        llst = []

        ll = localize.localizer(q)

        for l in ll:
            lst = []
            file = "/static/" + str(l[0]) + ".webm"
            lst.append(file)
            lst.append(l[1])
            lst.append(l[2])  ####added on 15th april
            llst.append(lst)
            # print(file)
            # print(l[1])

        # lst = []
        # lst.append("/static/Lec-01-Brief_Overview_of_the_course.webm")
        # lst.append(205)
        # llst.append(lst)
        # lst = []
        # lst.append("/static/Lec-11-Deuteron-.webm")
        # lst.append(25)
        # llst.append(lst)
        # lst = []
        # lst.append("/static/Lec-16_Theories_of_nuclear_forces.webm")
        # lst.append(589)
        # llst.append(lst)
        end_time = tim()
        total_time = end_time - start_time
        print("Time to Response = {} seconds".format(total_time))

        return jsonify(query=q, result=llst)
        # path = os.path.abspath(q)
        # return render_template('index.html', llist=llst, query=q)
    except IOError as e:
        print(e)
Esempio n. 5
0
def f(alpha, x0fac, x1fac, CE, CI, sum_a):
    seed()
    CEonCI = CE / CI
    epsilonXCE = Jepsilon * CE
    b = x1fac * (pareto(alpha, CI) + 1) + x0fac
    tf0 = tim()
    # mean of ensembles a,b is CI,CE
    while (abs(sum_a - CEonCI * sum(b)) > epsilonXCE) and (tim() - tf0 <
                                                           timeout):
        b1 = x1fac * (pareto(alpha, CI) + 1) + x0fac
        if abs(sum_a - CEonCI * sum(b1)) < abs(sum_a - CEonCI * sum(b)):
            b = b1


#    print sum_a, sum(b), abs(sum_a-CEonCI*sum(b))/abs(sum_a)
    if tim() - tf0 > timeout:
        with nfailures_tightbalance.get_lock():
            nfailures_tightbalance.value += 1
    return b
Esempio n. 6
0
def stat(date, name):
    start = tim()
    data = []
    results = []
    response = requests.get(
        'http://iss.moex.com/iss/history/engines/stock/markets/shares/boards/tqbr/securities.xml?date={}'
        .format(date))
    soup = BeautifulSoup(response.text, 'lxml')
    a = soup.findAll('row')
    for i in a:
        if not i: continue
        if not i.has_attr('shortname'): continue
        if i['shortname'] == name:
            data.append(float(i['close']))

    if data:
        max_day = 0
        min_day = 0
        for i, n in enumerate(data):
            try:
                if max_day < data[i + 1] - n:
                    max_day = data[i + 1] - n
                else:
                    continue

                if min_day > data[i + 1] - n:
                    min_day = data[i + 1] - n
            except:
                pass

        data.sort()
        min = data[0]
        max = data[-1]
        results.append([data])
    end = tim()
    return (start, end, results)
Esempio n. 7
0
def test():
    image_name = os.environ.get('imageName')
    image_uuid = test_lib.lib_get_image_by_name(image_name).uuid
    l3_name = os.environ.get('l3VlanNetworkName1')

    l3_net_uuid = test_lib.lib_get_l3_by_name(l3_name).uuid
    l3_uuid_list = [l3_net_uuid]
    vm_creation_option = test_util.VmOption()
    conditions = res_ops.gen_query_conditions('type', '=', 'UserVm')
    instance_offering_uuid = res_ops.query_resource(res_ops.INSTANCE_OFFERING, conditions)[0].uuid
    vm_name = '10k_vm-' + str(time.tim())
    vm_creation_option.set_instance_offering_uuid(instance_offering_uuid)
    vm_creation_option.set_l3_uuids(l3_uuid_list)
    vm_creation_option.set_image_uuid(image_uuid)
    #vm_creation_option.set_data_disk_uuids(disk_offering_uuids)
    vm_creation_option.set_name(vm_name)
    vm = zstack_vm_header.ZstackTestVm()
    vm.set_creation_option(vm_creation_option)
    vm.create()
    test_obj_dict.add_vm(vm)
 
    time.sleep(3)
    vm.destroy()
    test_util.test_pass('Create/Destroy VM successfully')
Esempio n. 8
0
# coding: utf-8
import json, sys, os, watson_developer_cloud
from time import clock
from time import time as tim

discovery = watson_developer_cloud.DiscoveryV1(
    '2016-11-07',
    username='******',
    password='******')
start_time = tim()

environments = discovery.get_environments()
environments = [x for x in environments['environments']]
environment_id = environments[0]['environment_id']
# print(environment_id)
collection = discovery.list_collections(environment_id)
collections = [x for x in collection['collections']]
collection_id = collections[1]['collection_id']
print(collection_id)

#### hard coding the ids to make it faster
environment_id = '168946e2-4f0b-4398-8a56-b6799d99c2c3'
# collection_id = '8baa0d9c-57e8-479e-970c-8f95fca99ba5' ##old
# collection_id = '9df12949-305b-4b4f-90a1-bf6e5e43c3ff'
collection_id = '5d59fead-1f8f-46bd-8ee5-c6418bb961e5'  ##iitd
Esempio n. 9
0
def crawl_machine_learning(start_index: int, sort_order: str):
    # conn = connect_database()

    DBU = RDS_utils()

    check_axv = arxiv_id_check(DBU)

    machine_learning_categories = [
        'cs.CV', 'cs.CL', 'cs.LG', 'cs.AI', 'cs.NE', 'stat.ML', 'cs.MA'
    ]

    STEP = 100
    articles_per_minute = STEP * 2

    article_len = articles_per_minute

    # if start_index == -1:
    #     start_index = Article.get_n_articles(conn) - STEP

    logging.info('crawling start')
    logging.info('start index : ' + str(start_index))
    logging.info('sort_order : ' + sort_order)

    update_paper, insert_paper, insertfail, updatafail = 0, 0, 0, 0
    while article_len == articles_per_minute:
        # query 100 results per iteration
        # wait 30 seconds per query
        try:
            start = time.time()
            articles = arxivpy.query(search_query=machine_learning_categories,
                                     start_index=start_index,
                                     max_index=start_index +
                                     articles_per_minute,
                                     results_per_iteration=STEP,
                                     wait_time=5,
                                     sort_by='lastUpdatedDate',
                                     sort_order=sort_order)
            # crawling log
            # logging.info('last: ' + articles[-1].get('published', ''))
            logging.info(str(start_index + STEP * 2) + ' articles crawled')

            # save articles
            for article in articles:
                data = Article(article, None).tolist()

                print("'{}' cralwed / arxiv_id : {}".format(data[1], data[0]))
                axvid, pubyear = data[0], data[6]
                data[1], qt = get_qtitle(data[1])  # title
                if pubyear:
                    qt = qt.strip() + str(pubyear)
                pid = None
                if type(check_axv) == dict:
                    if check_axv.get(axvid):
                        pid = check_axv[axvid]
                else:
                    pid = DBU.get_pid_from_arXiv_id(axvid)
                if not pid:
                    pid = Duplication_check.check(qt)

                if pid:
                    ori = DBU.get_paper_by_p_id(pid)
                    data = Update_aXv_paper(ori, data)
                    if DBU.update_axv(pid, data):
                        update_paper += 1
                    else:
                        updatafail += 1
                else:
                    if DBU.insert_axv(data):
                        pid = DBU.get_pid_from_arXiv_id(axvid)
                        Duplication_check.insert_title_year(qt, pid)
                        insert_paper += 1
                    else:
                        insertfail += 1

            # compute start_index
            start_index += STEP * 2

            # compute article_len
            article_len = len(articles)
            e = int(time.tim() - start)
            print('took {:02d}:{:02d}:{:02d} to crawl {} paper'.format(
                e // 3600, (e % 3600 // 60), e % 60, article_len))

            # sleep 1 minute, no 30 seconds
            time.sleep(MINUTE / 2)
        except Exception as e:
            logging.error(e)
            print("insert fail : {}, update fail : {}".format(
                insertfail, updatafail))
            DBU.DB.conn.close()
            return start_index, insert_paper, update_paper

    print("insert fail : {}, update fail : {}".format(insertfail, updatafail))
    DBU.DB.conn.close()
    return start_index, insert_paper, update_paper
Esempio n. 10
0
#!/usr/bin/env python
import os
import sys
import struct
import time
import operator
import threading

import RPi.GPIO as GPIO

GPIO.setmode(GPIO.BCM)  # Use broadcom numbering
# Set output pins - pin 26 BCM, pin 37 BOARD
GPIO.setup(26, GPIO.OUT)

# Set frequency to 70 Hz - period=20ms
# Full Stop - High for 1.5 ms -- 7.5% duty cycle
# Full Forward - high for 2.0 ms -- 10.0% duty cycle
# Full Backward - high for 1.0 ms -- 5.0% duty cycle
p = GPIO.PWM(26, 70)  # Set BCM pin 26 at 70Hz
p.start(7.5)  # Set duty cycle to 7.5% and start

# Spin for 30 seconds
timeElapsed = time.tim()
try:
    while time.time() - timeElapsed < 30:
        pass

except KeyboardInterrupt:
    GPIO.cleanup()
GPIO.cleanup()
Esempio n. 11
0
def trigger_error():
    #  division_by_zero = 1 / 0
    time.tim()
Esempio n. 12
0
def network(**kwargs):
    with nfailures_tightbalance.get_lock():
        nfailures_tightbalance.value = 0
    globals().update(kwargs)

    nest.ResetKernel()
    startbuild = tim()
    order = int(orderCE / (epsilon * 4))  #2500
    NE = 4 * order  # number of excitatory neurons
    NI = 1 * order  # number of inhibitory neurons
    N_neurons = NE + NI  # number of neurons in total
    #    N_rec = 50  # record from 50 neurons
    CE = int(epsilon * NE)  # number of excitatory synapses per neuron
    CI = int(epsilon * NI)  # number of inhibitory synapses per neuron
    #C_tot = int(CI + CE)  # total number of synapses per neuron
    neuron_params = {
        "C_m": 1.0,
        "tau_m": tauMem,
        "t_ref": 2.0,
        "E_L": 0.0,
        "V_reset": Vr,
        "V_m": 0.0,
        "V_th": theta
    }
    J_ex = J  # amplitude of excitatory postsynaptic potential
    J_in = -g * J_ex  # amplitude of inhibitory postsynaptic potential
    nu_th = theta / (J * CE * tauMem)
    nu_ex = eta * nu_th
    p_rate = 1000.0 * nu_ex * CE
    nest.SetKernelStatus({
        "resolution": dt,
        "print_time": True,
        "overwrite_files": True,
        "local_num_threads": nthreads
    })
    nest.SetDefaults("iaf_psc_delta", neuron_params)
    nest.SetDefaults("poisson_generator", {"rate": p_rate / CE})
    nodes_ex = nest.Create("iaf_psc_delta", NE)
    nodes_in = nest.Create("iaf_psc_delta", NI)
    noise = nest.Create("poisson_generator", CE)
    espikes = nest.Create("spike_detector")
    ispikes = nest.Create("spike_detector")
    nest.SetStatus(espikes,
                   [{
                       "label": "%s/alpha%.2fespikes" % (datafolder, alpha),
                       "withtime": True,
                       "withgid": True,
                       "to_file": True
                   }])
    nest.SetStatus(ispikes,
                   [{
                       "label": "%s/alpha%.2fispikes" % (datafolder, alpha),
                       "withtime": True,
                       "withgid": True,
                       "to_file": True
                   }])
    nest.CopyModel("static_synapse", "excitatory", {
        "weight": J_ex,
        "delay": delay
    })
    nest.CopyModel("static_synapse", "inhibitory", {
        "weight": J_in,
        "delay": delay
    })
    A_alpha = gamma(1 + alpha) * numpy.sin(numpy.pi * alpha / 2) / numpy.pi
    D = 0.5
    # pareto pdf = alpha*x1**alpha/(x-x0)**(alpha+1), defined for x > x0+x1
    x1fac = (2 * A_alpha * D / alpha)**(1 / alpha)
    x0fac = 1 - x1fac * alpha / (alpha - 1)
    J_noise_ex = J_ex * (x1fac * (pareto(alpha, (NE, CE)) + 1) + x0fac)
    J_noise_in = J_ex * (x1fac * (pareto(alpha, (NI, CE)) + 1) + x0fac)
    # correlated amplitude populations:
    samples_ex = x1fac * (pareto(alpha, (NE + NI, CE)) + 1) + x0fac
    #    print x0fac,x1fac

    with Pool_(nthreads) as p:
        samples_in = numpy.array(
            p.map(partial(f, alpha, x0fac, x1fac, CE, CI),
                  numpy.sum(samples_ex, 1), 1))
    J_ex_tot = J_ex * samples_ex
    J_in_tot = J_in * samples_in
    multimeter = nest.Create("multimeter")
    nest.SetStatus(
        multimeter, {
            "to_memory": False,
            "withtime": True,
            "record_from": ["V_m"],
            "to_file": True,
            "label": "%s/alpha%.2fV_m" % (datafolder, alpha)
        })
    #"interval": 100.0,
    nest.Connect(multimeter, nodes_ex + nodes_in)  # nodes_ex[:N_rec]+...
    nest.Connect(noise,
                 nodes_ex,
                 syn_spec={
                     "model": "excitatory",
                     "weight": J_noise_ex
                 })
    nest.Connect(noise,
                 nodes_in,
                 syn_spec={
                     "model": "excitatory",
                     "weight": J_noise_in
                 })
    nest.Connect(nodes_ex, espikes, syn_spec="excitatory")  # nodes_ex[:N_rec]
    nest.Connect(nodes_in, ispikes, syn_spec="excitatory")  # nodes_in[:N_rec]
    conn_params_ex = {'rule': 'fixed_indegree', 'indegree': CE}
    nest.Connect(nodes_ex,
                 nodes_ex + nodes_in,
                 conn_params_ex,
                 syn_spec={
                     "model": "excitatory",
                     "weight": J_ex_tot
                 })
    conn_params_in = {'rule': 'fixed_indegree', 'indegree': CI}
    nest.Connect(nodes_in,
                 nodes_ex + nodes_in,
                 conn_params_in,
                 syn_spec={
                     "model": "inhibitory",
                     "weight": J_in_tot
                 })
    endbuild = tim()
    nest.Simulate(simtime)
    endsimulate = tim()
    events_ex = nest.GetStatus(espikes, "n_events")[0]
    events_in = nest.GetStatus(ispikes, "n_events")[0]
    rate_ex = events_ex / simtime * 1000.0 / NE
    rate_in = events_in / simtime * 1000.0 / NI
    num_synapses = (nest.GetDefaults("excitatory")["num_connections"] +
                    nest.GetDefaults("inhibitory")["num_connections"])
    build_time = endbuild - startbuild
    sim_time = endsimulate - endbuild
    print("Number of tight balance failures: {0}".format(
        nfailures_tightbalance.value))
    print("alpha             : {0}".format(alpha))
    print("Number of neurons : {0}".format(N_neurons))
    print("Number of synapses: {0}".format(num_synapses))
    print("       Exitatory  : {0}".format(int(CE * N_neurons) + N_neurons))
    print("       Inhibitory : {0}".format(int(CI * N_neurons)))
    print("Excitatory rate   : %.2f Hz" % rate_ex)
    print("Inhibitory rate   : %.2f Hz" % rate_in)
    print("Building time     : %.2f s" % build_time)
    print("Simulation time   : %.2f s" % sim_time)
Esempio n. 13
0
def networkclassical(**kwargs):
    globals().update(kwargs)

    nest.ResetKernel()
    startbuild = tim()
    order = int(orderCE / (epsilon * 4))  #2500
    NE = 4 * order  # number of excitatory neurons
    NI = 1 * order  # number of inhibitory neurons
    N_neurons = NE + NI  # number of neurons in total
    #    N_rec = 50  # record from 50 neurons
    CE = int(epsilon * NE)  # number of excitatory synapses per neuron
    CI = int(epsilon * NI)  # number of inhibitory synapses per neuron
    #C_tot = int(CI + CE)  # total number of synapses per neuron
    neuron_params = {
        "C_m": 1.0,
        "tau_m": tauMem,
        "t_ref": 2.0,
        "E_L": 0.0,
        "V_reset": Vr,
        "V_m": 0.0,
        "V_th": theta
    }
    J_ex = J  # amplitude of excitatory postsynaptic potential
    J_in = -g * J_ex  # amplitude of inhibitory postsynaptic potential
    nu_th = theta / (J * CE * tauMem)
    nu_ex = eta * nu_th
    p_rate = 1000.0 * nu_ex * CE
    nest.SetKernelStatus({
        "resolution": dt,
        "print_time": True,
        "overwrite_files": True,
        "local_num_threads": nthreads
    })
    nest.SetDefaults("iaf_psc_delta", neuron_params)
    nest.SetDefaults("poisson_generator", {"rate": p_rate})  # `CE` gen's in 1
    nodes_ex = nest.Create("iaf_psc_delta", NE)
    nodes_in = nest.Create("iaf_psc_delta", NI)
    noise = nest.Create("poisson_generator")  #, CE) in the superdiffusive case
    espikes = nest.Create("spike_detector")
    ispikes = nest.Create("spike_detector")
    nest.SetStatus(espikes,
                   [{
                       "label": "%s/alpha%.2fespikes" % (datafolder, alpha),
                       "withtime": True,
                       "withgid": True,
                       "to_file": True
                   }])
    nest.SetStatus(ispikes,
                   [{
                       "label": "%s/alpha%.2fispikes" % (datafolder, alpha),
                       "withtime": True,
                       "withgid": True,
                       "to_file": True
                   }])
    nest.CopyModel("static_synapse", "excitatory", {
        "weight": J_ex,
        "delay": delay
    })
    nest.CopyModel("static_synapse", "inhibitory", {
        "weight": J_in,
        "delay": delay
    })
    multimeter = nest.Create("multimeter")
    nest.SetStatus(
        multimeter, {
            "to_memory": False,
            "withtime": True,
            "record_from": ["V_m"],
            "to_file": True,
            "label": "%s/alpha%.2fV_m" % (datafolder, alpha)
        })
    #"interval": 100.0,
    nest.Connect(multimeter, nodes_ex + nodes_in)  # nodes_ex[:N_rec]+...
    nest.Connect(noise, nodes_ex, syn_spec="excitatory")
    nest.Connect(noise, nodes_in, syn_spec="excitatory")
    nest.Connect(nodes_ex, espikes, syn_spec="excitatory")  # nodes_ex[:N_rec]
    nest.Connect(nodes_in, ispikes, syn_spec="excitatory")  # nodes_in[:N_rec]
    conn_params_ex = {'rule': 'fixed_indegree', 'indegree': CE}
    nest.Connect(nodes_ex, nodes_ex + nodes_in, conn_params_ex, "excitatory")
    conn_params_in = {'rule': 'fixed_indegree', 'indegree': CI}
    nest.Connect(nodes_in, nodes_ex + nodes_in, conn_params_in, "inhibitory")
    endbuild = tim()
    nest.Simulate(simtime)
    endsimulate = tim()
    events_ex = nest.GetStatus(espikes, "n_events")[0]
    events_in = nest.GetStatus(ispikes, "n_events")[0]
    rate_ex = events_ex / simtime * 1000.0 / NE
    rate_in = events_in / simtime * 1000.0 / NI
    num_synapses = (nest.GetDefaults("excitatory")["num_connections"] +
                    nest.GetDefaults("inhibitory")["num_connections"])
    build_time = endbuild - startbuild
    sim_time = endsimulate - endbuild
    print("alpha             : {0}".format(alpha))
    print("Number of neurons : {0}".format(N_neurons))
    print("Number of synapses: {0}".format(num_synapses))
    print("       Exitatory  : {0}".format(int(CE * N_neurons) + N_neurons))
    print("       Inhibitory : {0}".format(int(CI * N_neurons)))
    print("Excitatory rate   : %.2f Hz" % rate_ex)
    print("Inhibitory rate   : %.2f Hz" % rate_in)
    print("Building time     : %.2f s" % build_time)
    print("Simulation time   : %.2f s" % sim_time)