Exemplo n.º 1
0
def populateGraph():
    conn = sqlite3.connect("sensor_data.db")
    conn.row_factory = sqlite3.Row
    c = conn.cursor()
    c.execute(
        "select humidity from humidity where strftime('%d', time) > '25' and strftime('%d', time) < '31'"
    )
    r = c.fetchall()
    hum = []
    for member in r:
        hum.append(member[0])
    c.execute(
        "select strftime('%m-%d %H:%M', time) from humidity where strftime('%d', time) > '25' and strftime('%d', time) < '31'"
    )
    r = c.fetchall()
    time = []
    for member in r:
        time.append(member[0])
    c.close()
    trace_high = go.Scatter(x=time,
                            y=hum,
                            name="AAPL High",
                            line=dict(color='#17BECF'),
                            opacity=0.8)
    data = [trace_high]
    layout = dict(title="Humidity History",
                  xaxis=dict(range=['2017-06-23', '2017-06-30']))

    fig = dict(data=data, layout=layout)
    plotly.offline.plot(fig, filename="humidity_graph.html")
    return render_template('templates/humidity_graph.html')
Exemplo n.º 2
0
def get_values(monitor):
    data = json.loads(json.dumps(monitor))
    df_time = pd.json_normalize(pd.DataFrame(monitor)['trace'])
    df_values = pd.json_normalize(data, record_path=[['trace', 'sensors']])
    L0, L1, L2, R0, R1, R2, time, anomaly = [], [], [], [], [], [], [], []
    lov = [L0, L1, L2, R0, R1, R2]
    for x in range(len(df_time.index)):
        i = x * 6
        for j in range(6):
            lov[j].append(df_values['value'][i + j])
        date_time_obj = datetime.datetime.strptime(str(df_time['id'][x]),
                                                   '%H%M%S%d%m%Y')
        time.append(date_time_obj)
        anomaly.append(df_values['anomaly'][i])
    dict2 = {
        'L0': L0,
        'L1': L1,
        'L2': L2,
        'R0': R0,
        'R1': R1,
        'R2': R2,
        'time': time,
        'anomaly': anomaly
    }
    df_finito = pd.DataFrame(dict2)
    df_finito.drop_duplicates(keep=False, inplace=True)
    return df_finito
Exemplo n.º 3
0
def populateHumGraph():
    conn = get_db()
    conn.row_factory = sqlite3.Row
    c = conn.cursor()
    c.execute("select humidity from humidity where strftime('%d', time) > '25' and strftime('%d', time) < '31'")
    hum_data = c.fetchall()
    hum = []
    for member in hum_data:
        hum.append(member[0])
    c.execute("select strftime('%m-%d %H:%M', time) from humidity where strftime('%d', time) > '25' and strftime('%d', time) < '31'")
    hum_time = c.fetchall()
    c.close()
    time = []
    for member in hum_time:
        time.append(member[0])
    trace_hum = go.Scatter(
                    x=time,
                    y=hum,
                    name = "Humidity",
                    line = dict(color = '#17BECF'),
                    opacity = 0.8)
    data = [trace_hum]
    layout = dict(
        title = "Humidity History",
        xaxis = dict(
            range = ['2017-06-23','2017-06-30'],
            autotick = False,
            tick0 = 0,
            dtick = 6
            )
    )
    fig = dict(data=data, layout=layout)
    h_graph = plotly.offline.plot(fig, show_link=False, filename="humidity_graph.html", output_type='div', auto_open=False)
    return (h_graph)
Exemplo n.º 4
0
    def save_record(self, save_option=False, filename=None):
        '''
            Function that makes pandas dataframe with each iteration
            information.
        '''

        t, c, time, objective_function = [], [], [], []

        for result in self.result_list_:
            t.append(result.iteration_)
            c.append(result.c_)
            time.append(result.time_)
            objective_function.append(result.objective_function_)

        df = {}
        df['iteration'] = t
        df['c'] = c
        df['time'] = time
        df['objective_function'] = objective_function
        df = pd.DataFrame(
            df, columns=['iteration', 'c', 'time', 'objective_function'])

        if save_option:
            df.to_csv('../result/%s.csv' % filename, index=False, sep=',')

        return df
    def elapsed_time(self, seconds, suffixes=['y', 'w', 'd', 'h', 'm', 's'], add_s=False, separator=' '):
        """
        Takes an amount of seconds and turns it into a human-readable amount of time.
        """
        # the formatted time string to be returned
        time = []

        # the pieces of time to iterate over (days, hours, minutes, etc)
        # - the first piece in each tuple is the suffix (d, h, w)
        # - the second piece is the length in seconds (a day is 60s * 60m * 24h)
        parts = [(suffixes[3], 60 * 60),
                 (suffixes[4], 60),
                 (suffixes[5], 1)]

        # for each time piece, grab the value and remaining seconds, and add it to
        # the time string
        for suffix, length in parts:
            value = int(seconds / length)
            if value > 0:
                seconds = seconds % length
                time.append('%s%s' % (str(value),
                           (suffix, (suffix, suffix + 's')[value > 1])[add_s]))
            if seconds < 1:
                break

        return separator.join(time)
    def elapsed_time(self,
                     seconds,
                     suffixes=['y', 'w', 'd', 'h', 'm', 's'],
                     add_s=False,
                     separator=' '):
        """
        Takes an amount of seconds and turns it into a human-readable amount of time.
        """
        # the formatted time string to be returned
        time = []

        # the pieces of time to iterate over (days, hours, minutes, etc)
        # - the first piece in each tuple is the suffix (d, h, w)
        # - the second piece is the length in seconds (a day is 60s * 60m * 24h)
        parts = [(suffixes[3], 60 * 60), (suffixes[4], 60), (suffixes[5], 1)]

        # for each time piece, grab the value and remaining seconds, and add it to
        # the time string
        for suffix, length in parts:
            value = int(seconds / length)
            if value > 0:
                seconds = seconds % length
                time.append('%s%s' %
                            (str(value),
                             (suffix,
                              (suffix, suffix + 's')[value > 1])[add_s]))
            if seconds < 1:
                if len(time) == 0:
                    time.append('<1s')
                break

        return separator.join(time)
Exemplo n.º 7
0
 def read_minispec(self, fname):
     """
     Read an exponential decay from a Bruker minispec .dps text data file.
     Return time and values.. 
     """
     with open(fname, 'r') as F:
         lines = F.readlines()
     time = []
     values = []
     for l in lines:
         if l.startswith("#"):  # skip comments
             continue
         v = l.strip().split()
         try:
             i = int(v[0])
             t = float(v[1])
             val = float(v[2])
         except:
             raise Exception("lines should contain 3 numbers")
         time.append(t)
         values.append(val)
     if i != len(time) - 1:
         print("*** Warning possible size mismatch in %s" % fname)
     self.v = np.array(values)
     if self.do_sane:
         self.v = irfft(sane(ifft(rfft(self.v)), 10))
     t = np.array(time)
     self.t = t * 1e2  # if time in 1/10 seconds instead of milliseconds
     print("#################### self.t is {0} ".format(self.t))
Exemplo n.º 8
0
def load_data():
    user = 1  # specify which user
    print("Loading dataset...")
    data, labels, time = [], [], []
    for us in range(user, user + 1):
        da, ti, la = [], [], []
        for ges_inst in range(1, 21):
            fx = path + "U" + "%02d" % us + "/"
            for ges in range(1, 21):
                fy = fx + "%02d" % ges + "/"
                fz = fy + '%02d' % ges_inst + '.txt'
                d = np.loadtxt(fz)
                t = d[:, 2]
                d = d[:, 3:]
                da.append(d)
                ti.append(t)
                la.append(ges)
        data.append(da)
        labels.append(la)
        time.append(ti)
    data = np.array(data)
    labels = np.array(labels)
    time = np.array(time)

    return data, labels, time
Exemplo n.º 9
0
    def get(self, request, pk, format=None):
        session = [Session.objects.get(id=pk)]
        first_page_load = PageLoad.objects.filter(session__in=session)[0]
        first_time = first_page_load.created_at

        all_clicks = MouseClick.objects.filter(session__in=session)
        mouse_moves = MouseMove.objects.filter(session__in=session)

        result_list = sorted(chain(all_clicks, mouse_moves),
                             key=lambda instance: instance.created_at)

        list = []
        for object in result_list:
            time = ((object.created_at - first_time).seconds) * 1000
            if isinstance(object, MouseMove):
                list.append(("MouseMove", object.coordinates,
                             object.page.path_name, time))
            else:
                list.append(("MouseClick", object.y, object.x,
                             object.page.path_name, time))

        time = []
        for object in result_list:
            time.append(((object.created_at - first_time).seconds) * 1000)

        clicks = all_clicks.values_list('y', 'x', 'page__path_name')

        return Response({
            'clicks': clicks,
            'time': time,
            'height': first_page_load.page.height,
            'moves': list,
            'url': session[0].tracker.url
        })
Exemplo n.º 10
0
def test(l):
    failure = []
    time = []
    for i in range(len(l)):
        [c, t] = tst("0123456789", l[i])
        time.append([l[i], t])
    time = np.array(time)
    return failure, time
def combine_results(results, amount_pe, resolution):
    grid = []
    time = []
    for result in results:
        time.append(result[0])
        grid.append(result[1])
    grid = np.array(grid).reshape((resolution, ))
    time = np.mean(time)
    return (time, grid)
Exemplo n.º 12
0
def doplot(label=""):
  f=open("data"+label,"rb")
  data=cPickle.load(f)
  f.close()
  inttypes=data['inttypes']
  for inttype in inttypes:
    N=data['N']
    logamins=data['logamins']
    time=[]
    time_disp=[]
    eerr=[]
    eerr_disp=[]
    minerr=[]
    maxerr=[]
    for logamin in logamins:
      t=numpy.array(map(lambda x: x[0],data[inttype][logamin]))
      de=numpy.array(map(lambda x: abs(x[1]),data[inttype][logamin]))
      time.append(numpy.average(t))
      time_disp.append(numpy.std(t))
      eerr.append(numpy.average(de))
      minerr.append(numpy.min(de))
      maxerr.append(numpy.max(de))
      eerr_disp.append(numpy.std(de))
    data[inttype]["logamins"]=numpy.array(logamins)  
    data[inttype]["time"]=numpy.array(time)  
    data[inttype]["timedisp"]=numpy.array(time_disp)
    data[inttype]["eerr"]=numpy.array(eerr)
    data[inttype]["eerrdisp"]=numpy.array(eerr_disp)
    data[inttype]["minerr"]=numpy.array(minerr)
    data[inttype]["maxerr"]=numpy.array(maxerr)
  
  linestyles=["-.","-",":","--"]
  colors="rgbcyk"
    
  f=pyplot.figure(figsize=(8,10))
  ax=f.add_subplot(211)
  for i,inttype in enumerate(inttypes):
    logamins=data[inttype]['logamins']
    eerr=data[inttype]['eerr']
    minerr=data[inttype]['minerr']
    maxerr=data[inttype]['maxerr']
    ax.loglog(10**logamins,eerr,'r'+linestyles[i])
#    ax.semilogy(fdims,minerr,colors[i]+":",lw=0.5)
#    ax.semilogy(fdims,maxerr,colors[i]+":",lw=0.5)
    ax.set_ylabel("|E-E0|/E0")
  ax=f.add_subplot(212)
  for i,inttype in enumerate(inttypes):
    logamins=data[inttype]['logamins']
    eerr=data[inttype]['time']
    ax.loglog(10**logamins,eerr,'r'+linestyles[i])
    ax.set_xlabel("log minimum a")
    ax.set_ylabel("wallclock time (s)")
    
  pyplot.savefig("plummer_w_binary"+label+'.eps')
Exemplo n.º 13
0
def write_summary(file1):
    content = loadfn(file1, cls=MontyDecoder)
    col_name = {
        "Formula": formula,
        "Energy": energy,
        "Time": time,
    }
    for dct in content:
        formula.append(dct["formula"])
        energy.append(dct["energy"] / len(dct["elements"]))
        time.append(dct["time"])
    df = pd.DataFrame(col_name)
    print(tabulate(df, headers="keys", tablefmt="psql"))
Exemplo n.º 14
0
def combine_results(results, amount_pe):
    time = []
    resolution = sum(map(lambda x: len(x[1]), results))
    grid = np.zeros((resolution,), dtype=np.float64)
    index = 0
    for result in results:
        time.append(result[0])
        grid[index:index+len(result[1])] = result[1]
        index += len(result[1])
    residual = np.linalg.norm(
                    exact_solution_end(resolution)-grid, np.inf)
    time = np.mean(time)
    return (time, residual)
Exemplo n.º 15
0
def write_summary(file1):
    content = loadfn(file1, cls=MontyDecoder)
    col_name = {
        'Formula': formula,
        'Energy': energy,
        'Time': time,
    }
    for dct in content:
        formula.append(dct['formula'])
        energy.append(dct['energy'] / len(dct['elements']))
        time.append(dct['time'])
    df = pd.DataFrame(col_name)
    print(tabulate(df, headers='keys', tablefmt='psql'))
Exemplo n.º 16
0
    def sample(Algorithm_, Network_, test):
        """
        Runs the Algorithm on Networks of the given type, varying n.
        After every execution, runs test on the resultant Network_.

        @param Algorithm_: a subclass of Synchronous_Algorithm, the algorithm to test.
        @param Network_: a subclass of Network, the network on which to benchmark the algorithm.
        @param test: a function that may throw an assertion error 
        @return: (size, time, comm) where size is a list of values of network size,
        and time and comm are lists of corresponding values of time and communication complexities.
        """
        size = []
        time = []
        comm = []
        n, lgn = 2, 1
        max_time = 0
        max_comm = 0
        print "Sampling n = ...",
        while max(max_time, max_comm) < 10000 and n < 500:

            #Progress
            if n == 2:
                print "\b\b\b\b"+str(n)+"...",
            else:
                print "\b\b\b\b, "+str(n)+"...",

            cur_times = []
            cur_comms = []
            for i in xrange( max(4, 2+lgn) ):
                A = Algorithm_(params={'draw': False, 'verbosity': Algorithm.SILENT})
                x = Network_(n)
                A(x)
                try:
                    test(x)
                except AssertionError, e:
                    print "Algorithm Failed"
                    return None
                else:
                    size.append(n)
                    cur_comms.append(A.message_count)
                    comm.append(A.message_count)

                    if issubclass(Algorithm_, Synchronous_Algorithm):
                        cur_times.append(A.r)
                        time.append(A.r)
                        max_time = max(max_time, A.r)
                    max_comm = max(max_comm, A.message_count)

            #TODO here, decide whether need more samples for this n, based on cur_times and cur_comms variance
            n*=2
            lgn += 1
def plot_results(results):
    for i, sub in enumerate(results):
        time = []
        grid = []
        for j, point in enumerate(sub):
            time.append(point[0])
            residual = np.linalg.norm(
                exact_solution(int(1 / (2 * math.pi * all_delta_t[j])),
                               grid_resolution[j]) - point[1], 2)
            grid.append(residual)
        plt.loglog(time,
                   grid,
                   colors[delays[i]],
                   label=delay_to_label[delays[i]])
Exemplo n.º 18
0
    def test_calculations(self):
        client = self.getPIWebApiClient()
        data_server = client.dataServer.get_by_path("\\\\PISRV1", None, None)
        expression = "'sinusoid'*2 + 'cdt158'"
        time = list()
        time.append("*-1d")
        values = client.calculation.get_at_times(web_id=data_server.web_id,
                                                 expression=expression,
                                                 time=time)

        expression2 = "'cdt158'+tagval('sinusoid','*-1d')"
        values2 = client.calculation.get_at_times(web_id=data_server.web_id,
                                                  expression=expression2,
                                                  time=time)

        pass
Exemplo n.º 19
0
 def __init__(self, csv_file):
     with open(csv_file) as filecsv:
         time = []
         alpha = []
         beta = []
         gamma = []
         self.reader = csv.reader(filecsv, delimiter=",")
         next(self.reader)
         for row in self.reader:
             time.append(float(row[0]))
             alpha.append(float(row[1]))
             beta.append(float(row[2]))
             gamma.append(float(row[3]))
     self.t = np.array(time)
     self.alpha = np.array(alpha)
     self.beta = np.array(beta)
     self.gamma = np.array(gamma)
Exemplo n.º 20
0
def doplot():
    f = open("data", "rb")
    data = cPickle.load(f)
    f.close()
    inttypes = [
        Huayno.inttypes.HOLD_DKD, Huayno.inttypes.CC_KEPLER,
        Huayno.inttypes.CC, Huayno.inttypes.EXTRAPOLATE
    ]
    for inttype in inttypes:
        fdims = [1.6, 2.0, 2.3, 2.6, 2.8, 3.0]
        time = []
        time_disp = []
        eerr = []
        eerr_disp = []
        for fd in fdims:
            t = numpy.array(map(lambda x: x[0], data[inttype][fd]))
            de = numpy.array(map(lambda x: abs(x[1]), data[inttype][fd]))
            time.append(numpy.average(t))
            time_disp.append(numpy.disp(t))
            eerr.append(numpy.average(de))
            eerr_disp.append(numpy.disp(de))
        data[inttype]["fdims"] = fdims
        data[inttype]["time"] = time
        data[inttype]["timedisp"] = time_disp
        data[inttype]["eerr"] = eerr
        data[inttype]["eerrdisp"] = eerr_disp

    linestyles = ["-.", "-", ":", "--"]

    f = pyplot.figure(figsize=(8, 10))
    ax = f.add_subplot(211)
    for i, inttype in enumerate(inttypes):
        fdims = data[inttype]['fdims']
        eerr = data[inttype]['eerr']
        ax.semilogy(fdims, eerr, 'r' + linestyles[i])
        ax.set_ylabel("|E-E0|/E0")
    ax = f.add_subplot(212)
    for i, inttype in enumerate(inttypes):
        fdims = data[inttype]['fdims']
        eerr = data[inttype]['time']
        ax.semilogy(fdims, eerr, 'r' + linestyles[i])
        ax.set_xlabel("fractal dimension")
        ax.set_ylabel("wallclock time (s)")

    pyplot.savefig('fractal_dimension_dE_time.eps')
Exemplo n.º 21
0
def read_from_res(path):
    """Read data from file ends with ".res".

    Args:
        path (str): The position of the res file.
        
    Returns:
        incremental_metrics (list, json array): The throughput at different time.

    """
    time, throughput, min_lat, lat_25th, median_lat, avg_lat, lat_75th, lat_90th, lat_95th, lat_99th, max_lat= [], [], [], [], [], [], [], [], [], [], []
    with open(path) as csvfile:
        reader = csv.DictReader(csvfile, delimiter=',')
        for row in reader:
            time.append(float(row['time(sec)']))
            throughput.append(float(row[' throughput(req/sec)']))
            min_lat.append(float(row[' min_lat(ms)']))
            lat_25th.append(float(row[' 25th_lat(ms)']))
            median_lat.append(float(row[' median_lat(ms)']))
            avg_lat.append(float(row[' avg_lat(ms)']))
            lat_75th.append(float(row[' 75th_lat(ms)']))
            lat_90th.append(float(row[' 90th_lat(ms)']))
            lat_95th.append(float(row[' 95th_lat(ms)']))
            lat_99th.append(float(row[' 99th_lat(ms)']))
            max_lat.append(float(row[' max_lat(ms)']))

    incremental_metrics = [{
        "time": t,
        "throughput": tp,
        "latency": {
            "min": ml,
            "l_25": l25,
            "median": mel,
            "avg": al,
            "l_75": l75,
            "l_90": l90,
            "l_95": l95,
            "l_99": l99,
            "max": mal
        }
    } for t, tp, ml, l25, mel, al, l75, l90, l95, l99, mal in zip(
        time, throughput, min_lat, lat_25th, median_lat, avg_lat, lat_75th,
        lat_90th, lat_95th, lat_99th, max_lat)]
    return json.dumps(incremental_metrics)
Exemplo n.º 22
0
    def vel_control(self):
        try: 
            t0 = perf_counter()
            t1 = 0
            i = 0
            x_act = []
            x_des = []
            time = []
            force_data = []
            while True:
                if self.start_vel_control.value == 0:
                    continue
                # print
                t = perf_counter() - t0
                # U = dX_d + self.__k*(X_d - X_cur)
                U = array(self.dXd_shared[:]) + self.__k*(array(self.Xd_shared[:]) - array(self.X_cur_shared[:3]))
                self.rob_c.speedL([U[0],U[1],U[2],0,0,0], self.__max_operational_acc, 1/self.__freq_log)
                if t -t1 < 1/self.__freq_log:
                    sleep(1/self.__freq_log - (t -t1))
                if t - t1 >= 1/self.__freq_log and np.linalg.norm(array(self.Xd_shared[:])) > 0.001:
                    x_act.append(list(self.X_cur_shared[:3]))
                    x_des.append(list(self.Xd_shared[:]))
                    force_data.append(self.__force_from_sensor.value)
                    time.append(t)
                    self.glob.x_act = x_act
                    self.glob.x_des = x_des
                    self.glob.time = time
                    self.glob.force_data = force_data
                    t1 = t
                    print(f'{i} {array(self.X_cur_shared[2]).round(3)} {array(self.Xd_shared[2]).round(3)} {self.__force_from_sensor.value} check_motion \
                    {np.linalg.norm(array(self.Xd_shared[:])-array(self.X_cur_shared[:3]))} \
                    {np.linalg.norm(array(self.dXd_shared[:]) - array(self.dX_cur_shared[:3]))} \
                    {abs(self.Fd_real_shared.value - self.__force_from_sensor.value)<=0.01} {self.check_finish_motion()}', end = '\r', flush = True)

        except KeyboardInterrupt:
            self.rob_c.speedL([0,0,0,0,0,0], self.__max_operational_acc, 1)
            print('Robot is stopped')
            self.draw_plots(x_act, x_des, time, force_data)
Exemplo n.º 23
0
def subject(user_id=None,
            chartID='chart_ID',
            chart_type='line',
            chart_height='100%'):

    chart = {"renderTo": chartID, "type": chart_type, "height": chart_height}
    person = Person.query.filter_by(user_id=user_id).filter(Person.mood)
    schema = PersonSchema(many=True)
    result = schema.dump(person)
    #pprint(result.data)
    result_data = result.data
    mood = []
    for i in result_data:
        mood.append(i['mood'])
    print('mood:', mood)

    pointStart = Person.query.filter_by(user_id=user_id).filter(
        Person.timestamp)
    pointStartSchema = PersonSchema(many=True)
    pointStartResult = pointStartSchema.dump(pointStart)
    #pprint(pointStartResult.data)
    pointStartData = pointStartResult.data
    #the datetime data has a T in the formatting. The following code correctly
    #formatts the datetime data to be used as highcharts data
    time = []
    dateAndTime = []
    dateTime_almost = []
    dateTime_formatted = []
    for i in pointStartData:
        # dateAndTime.append(i['timestamp'])
        time.append(i['timestamp'])
    for i in time:
        dateAndTime.append(i.split('T'))
    for i in dateAndTime:
        dateTime_almost.append(' '.join(i))
    for i in dateTime_almost:
        dateTime_formatted.append(i[:10])

    print('formatted', dateTime_formatted)

    #Here you format the mood and datetiem of each db entry into a list of lists with two membered elements
    #output: [[x,y], [a,b]]
    # timeAndMood = [[] for x in range(len(dateTime_formatted ))]
    # for i in range(len(dateTime_formatted)):
    #     timeAndMood[i].append(mood[i])
    #     timeAndMood[i].append(dateTime_formatted[i])

    #print('@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@')

    # for x, y in mood, dateTime_formatted:
    #    timeAndMood.append([x, y])
    #print('timeAndMood:', timeAndMood)

    #testList = []

    # for i in dateTime_formatted:
    #    x = datetime.strptime(i, '%Y-%M-%d').date()
    #    testList.append(x)
    # print('TESTTESTTEST:',testList)
    # print('type:', testList[1])

    series = [{
        'data': mood  #timeAndMood,
    }]

    title = {"text": 'Emotion Over Time'}
    xAxis = {"title": {'text': 'Time Point'}}
    # 'categories': [dateTime_formatted] }
    yAxis = {
        "title": {
            "text": 'Emotion Integer'
        },
        "categories": ['', 'Sad', 'Neurtral', 'Happy'],
        'className': 'yLabels'
    }

    params = {
        'chartID': chartID,
        'chart': chart,
        'series': series,
        'title': title,
        'xAxis': xAxis,
        'yAxis': yAxis
    }
    chartParams = [params]

    # plotOptions = plotOptions)
    return render_template('subjectDetails.html',
                           title=title,
                           chartParams=chartParams)
for i in range(5, 9):
   n = 4**i
   a = np.random.rand(1, n - 1)
   b = np.random.rand(1, n)
   c = np.random.rand(1, n - 1)
   x = np.random.rand(1, n)
   s.append[n]
   # start the timer
   Start = time()
   # call banded_matvec
   banded_matvec(a[0], b[0], c[0], x[0])

   # stop the timer and store the wall time
   end = time()
   time.append(end - Start)

   A = np.zeros((n, n))
   for j in range(n):
      A[j][j] = b[0][j]
      if j < i - 1:
         A[j][j+1] = c[0][j]
         A[j+1][j] = a[0][j]

   Start = time()
   np.matmul(A, x[0])
   end = time()
   time2.append(end - Start) 
plt.loglog(s,time2)
plt.loglog(s,time)
plt.show()
Exemplo n.º 25
0
    group.send_command(cmd)

m.set_led_color("red")

if enable_logging:
  # Stop logging
  log_file = group.stop_log()

  time = []
  position = []
  velocity = []
  effort = []
  # iterate through log
  for entry in log_file.feedback_iterate:
    time.append(entry.transmit_time)
    position.append(entry.position)
    velocity.append(entry.velocity)
    effort.append(entry.effort)

  # Offline Visualization
  # Plot the logged position feedback
  plt.figure(101)
  plt.plot(time, position)
  plt.title('Position')
  plt.xlabel('time (sec)')
  plt.ylabel('position (rad)')
  plt.grid(True)

  # Plot the logged velocity feedback
  plt.figure(102)
Exemplo n.º 26
0
    #r20 = load_h5_step_slim('r20', ii+1)
    #alpha2_tmp = -3*r20*c_tmp*c_tmp

    # Try getting the mean alpha and c variables
    #try:
    cmu_tmp = load_h5_step_slim('c1_mu', ii + 1)
    r10_mu = load_h5_step_slim('alpha_mu', ii + 1)
    cn_mu_t[0:ns, ii] = cmu_tmp
    alpha_mu_t[:ns, ii] = 1 * r10_mu

    #r20_mu = load_h5_step_slim('r20_mu', ii+1)
    #alpha2_mu_t[:ns,ii] = -3*cmu_tmp*cmu_tmp*r20_mu
    tnow = load_h5_step_slim('t1', ii + 1)
    if isinstance(tnow, (np.ndarray, )):
        time.append(np.datetime64('2099-01-01'))
    else:
        time.append(tnow.astype('<M8[ns]'))

    #try:
    #    time.append(load_h5_step_slim('t1',ii+1).astype('<M8[ns]'))
    #except:
    print(time[ii])

    #except:
    #    print('No mean variables saved...')

    #print(a0_tmp.max())
    a0_t[0:ns, ii] = a0_tmp
    amax_t[0:ns, ii] = amax_tmp
    tmax_t[0:ns, ii] = tmax_tmp
Exemplo n.º 27
0
def cosimulate(input_file, step_actions, variables_iterable, attributes, units, time_resolution = 1, show_error = False):
	'''
	Inputs:  input_file   		(str) 			-> Path to input file.
             step_actions 		(fcn) 			-> Function with instructions to manipulate variables during simulation.
	 	     variables_iterable (list/dict) 	-> constant, related to the attribute of the object.
	 	     attributes 		(list/dict/num)	-> constant or list of constants, related to the attribute of the object.
	 	     units 				(int)			-> constant, related to the units of the simulation.
	 	     time_resolution	(int)			-> sampling time in seconds,
	 	     show_error			(Bool)			-> prints the errors of the simulation on the terminal.
	Outputs: time 				(float[])		-> vector of time in hours.
			 vectors 			(nD Array)		-> n-dimensional array with the value the attributes requested by the user.
			 errors 			(tuple)			-> Values of the errors related to mass balance. [0] -> Runoff error [1] -> Flow error [2] -> Quality error
	Purpose: Runs a SWMM simulation, modify parameters during the simulation, retrieves information during the simulation.
			 Raises Exception if there is an error.
	'''

	time_resolution = int(time_resolution)

	# Parameter Errors.
	if units not in _unit_constants:
		raise _ERROR_MSG_INCOHERENT
	elif time_resolution < 1:
		raise _ERROR_MSG_INCOHERENT


	# Dynamic handling of data types
	if(variables_iterable == None):
		v_size = 0
	else:
		if type(variables_iterable) == dict:
			variables = variables_iterable.keys()
		elif type(variables_iterable) in (list, tuple):
			variables = variables_iterable
		else:
			raise _ERROR_MSG_INCOHERENT

	# Dynamic handling of attributes data type.
	if type(attributes) in (tuple, list):
		ATTRIBUTE_ITERABLE = True
		# Check errors
		for a in attributes:
			if a not in _attribute_constants:
				raise _ERROR_MSG_ATR
	# Single attribute request
	elif type(attributes) == int:
		ATTRIBUTE_ITERABLE = False
		# Check errors
		if attributes not in _attribute_constants:
			raise _ERROR_MSG_ATR
	else:
		return _ERROR_MSG_INCOHERENT

	# Allocating memory for the requested vectors
	if ATTRIBUTE_ITERABLE:
		vectors = []
		# Allocate space in the matrix
		for i in range(len(attributes)):
			vectors.append( [[] for j in range(len(variables))] )
	else:
		vectors = [[] for i in range(len(variables))] # Creates MxN Array -> M: len(variables) N: len(attributes)

	# Run starts
	open_file(input_file, False)  # Step 1
	start(WRITE_REPORT, False)  # Step 2
	time = []

	while( not is_over() ):
		# ----------------- Run step and run the actions given by the user -----------
		time_step = int(get_time()*3600*100)/100 # Get time_step in seconds as an integer
		if ( int(time_step % time_resolution) == 0): # Checks sampling time
			time.append(get_time())
		run_step()  # Step 3

		# -------- Retrieves the variables requested by the user --------
		# The variables are saved in accordance with the time resolution variable
		if ( int(time_step % time_resolution) == 0): # Checks sampling time
			if (variables_iterable != None):
				for i in range(len(variables)):
					# Dynamic handling
					if ATTRIBUTE_ITERABLE:
						for j in range(len(attributes)):
							vectors[j][i].append(  get(variables[i], attributes[j], units) )
					else:
						vectors[i].append( get(variables[i], attributes, units) )

		# -------- Implements Control Actions if exist -----------
		if (step_actions != None):
			step_actions()

	errors = finish()

	# --------- Prints simulation error if requested ----------

	if(show_error):
		print "\n		Runoff error: %.2f %%\n\
		Flow routing error: %.2f %%\n \
		Quality routing error: %.2f %%\n" % (errors[0], errors[1], errors[2])

	return time, vectors, errors
Exemplo n.º 28
0
excel_name = os.path.join(excel_folder, 'volpy_data.xlsx')
# run function
multiple_dfs(dfs, fig_name, excel_name, 2, text)

#%% This produces Fig 6b
# T vs cpu
memory = []
time = []
n_procs = [1, 2, 4, 8]
for n_proc in n_procs:
    mm = np.load(
        '/home/nel/Code/NEL_LAB/volpy/figures/figure3_performance/time_memory_proc{}.npz'
        .format(n_proc),
        allow_pickle=True)['arr_0'].item()
    memory.append(max(mm['%dprocess' % n_proc][0][0]))
    time.append(mm['%dprocess' % n_proc][0][1])

time = np.array(time)

plt.figure(figsize=(4, 4))
plt.title('parallelization speed')
plt.bar((n_procs), (time[:, 0]), width=0.5, bottom=0)
plt.bar((n_procs), (time[:, 1]), width=0.5, bottom=(time[:, 0]))
plt.bar((n_procs), (time[:, 2]), width=0.5, bottom=(time[:, 0] + time[:, 1]))
plt.bar((n_procs), (time[:, 3]),
        width=0.5,
        bottom=(time[:, 0] + time[:, 1] + time[:, 2]))
plt.legend([
    'motion corection', 'memory mapping', 'segmentation', 'Wspike extraction'
],
           frameon=False)
Exemplo n.º 29
0
def cosimulate(input_file,
               step_actions,
               variables_iterable,
               attributes,
               units,
               time_resolution=1,
               show_error=False):
    '''
    Inputs:  input_file   		(str) 			-> Path to input file.
             step_actions 		(fcn) 			-> Function with instructions to manipulate variables during simulation.
              variables_iterable (list/dict) 	-> constant, related to the attribute of the object.
              attributes 		(list/dict/num)	-> constant or list of constants, related to the attribute of the object.
              units 				(int)			-> constant, related to the units of the simulation.
              time_resolution	(int)			-> sampling time in seconds,
              show_error			(Bool)			-> prints the errors of the simulation on the terminal.
    Outputs: time 				(float[])		-> vector of time in hours.
             vectors 			(nD Array)		-> n-dimensional array with the value the attributes requested by the user.
             errors 			(tuple)			-> Values of the errors related to mass balance. [0] -> Runoff error [1] -> Flow error [2] -> Quality error
    Purpose: Runs a SWMM simulation, modify parameters during the simulation, retrieves information during the simulation.
             Raises Exception if there is an error.
    '''

    time_resolution = int(time_resolution)

    # Parameter Errors.
    if units not in _unit_constants:
        raise _ERROR_MSG_INCOHERENT
    elif time_resolution < 1:
        raise _ERROR_MSG_INCOHERENT

    # Dynamic handling of data types
    if (variables_iterable == None):
        v_size = 0
    else:
        if type(variables_iterable) == dict:
            variables = variables_iterable.keys()
        elif type(variables_iterable) in (list, tuple):
            variables = variables_iterable
        else:
            raise _ERROR_MSG_INCOHERENT

    # Dynamic handling of attributes data type.
    if type(attributes) in (tuple, list):
        ATTRIBUTE_ITERABLE = True
        # Check errors
        for a in attributes:
            if a not in _attribute_constants:
                raise _ERROR_MSG_ATR
    # Single attribute request
    elif type(attributes) == int:
        ATTRIBUTE_ITERABLE = False
        # Check errors
        if attributes not in _attribute_constants:
            raise _ERROR_MSG_ATR
    else:
        return _ERROR_MSG_INCOHERENT

    # Allocating memory for the requested vectors
    if ATTRIBUTE_ITERABLE:
        vectors = []
        # Allocate space in the matrix
        for i in range(len(attributes)):
            vectors.append([[] for j in range(len(variables))])
    else:
        vectors = [
            [] for i in range(len(variables))
        ]  # Creates MxN Array -> M: len(variables) N: len(attributes)

    # Run starts
    open_file(input_file, False)  # Step 1
    start(WRITE_REPORT, False)  # Step 2
    time = []

    while (not is_over()):
        # ----------------- Run step and run the actions given by the user -----------
        time_step = int(get_time() * 3600 *
                        100) / 100  # Get time_step in seconds as an integer
        if (int(time_step % time_resolution) == 0):  # Checks sampling time
            time.append(get_time())
        run_step()  # Step 3

        # -------- Retrieves the variables requested by the user --------
        # The variables are saved in accordance with the time resolution variable
        if (int(time_step % time_resolution) == 0):  # Checks sampling time
            if (variables_iterable != None):
                for i in range(len(variables)):
                    # Dynamic handling
                    if ATTRIBUTE_ITERABLE:
                        for j in range(len(attributes)):
                            vectors[j][i].append(
                                get(variables[i], attributes[j], units))
                    else:
                        vectors[i].append(get(variables[i], attributes, units))

        # -------- Implements Control Actions if exist -----------
        if (step_actions != None):
            step_actions()

    errors = finish()

    # --------- Prints simulation error if requested ----------

    if (show_error):
        print "\n		Runoff error: %.2f %%\n\
        Flow routing error: %.2f %%\n \
        Quality routing error: %.2f %%\n" % (errors[0], errors[1], errors[2])

    return time, vectors, errors
Exemplo n.º 30
0
from sys import stderr
from time import time

from .ggen import Generator
from .hamilton import hamilton

for i in [10, 20, 30, 40, 50, 55, 60, 65, 70, 75]:
    print(i, end=" ", file=stderr)
    graph = Generator(i, 0.3).list

    time = 0
    for i in range(3):
        start = time()
        hamilton(graph)
        stop = time()
        time.append(stop - start)

    print(time / 3, end=" ", file=stderr)

    print(file=stderr)