Esempio n. 1
0
    def write(self, detector):
        """Print all keys and values from detector structure

        they include also a metadata read from binary output file
        """
        for name, value in sorted(detector.__dict__.items()):
            # be careful not to check for np.array but for np.ndarray!
            if name not in {'data', 'data_raw', 'error', 'error_raw', 'counter'}:  # skip non-metadata fields
                line = "{:24s}: '{:s}'".format(str(name), str(value))
                print(line)
        # print some data-related statistics
        print(75 * "*")
        print("Data min: {:g}, max: {:g}".format(detector.data.min(), detector.data.max()))

        if self.options.details:
            # print data scatter-plot if possible
            if detector.dimension == 1:
                try:
                    from hipsterplot import plot
                    print(75 * "*")
                    print("Data scatter plot")
                    plot(detector.data_raw)
                except ImportError:
                    logger.warning("Detailed summary requires installation of hipsterplot package")
            # print data histogram if possible
            try:
                from bashplotlib.histogram import plot_hist
                print(75 * "*")
                print("Data histogram")
                plot_hist(detector.data_raw, bincount=70, xlab=False, showSummary=True)
            except ImportError:
                logger.warning("Detailed summary requires installation of bashplotlib package")

        return 0
Esempio n. 2
0
def print_dashboard(time_series, time_string=None, use_fahrenheit=None):
    # print(chr(27) + "[2J")
    print(
        " +-----------------------------------------------------------------------------+\n",
        "|" + bcolors.OKBLUE +
        "          88   88 .dP\"Y8  dP\"\"b8 .dP\"Y8     88  88 oP\"Yb.  dP\"Yb "
        + bcolors.ENDC + "            |\n",
        "|" + bcolors.OKBLUE +
        "          88   88 `Ybo.\" dP   `\" `Ybo.\"     88  88 \"' dP' dP   Yb"
        + bcolors.ENDC + "            |\n",
        "|" + bcolors.OKBLUE +
        "          Y8   8P o.`Y8b Yb  \"88 o.`Y8b     888888   dP'  Yb   dP/" +
        bcolors.ENDC + "           |\n",
        "|" + bcolors.OKBLUE +
        "          `YbodP' 8bodP'  YboodP 8bodP'     88  88 .d8888  YbodP " +
        bcolors.ENDC + "            |\n",
        "+-----------------------------------------------------------------------------+",
        end="")
    print(bordered_append(time_series[0]["sourceInfo"]["siteName"]))
    # Each sensor records data points in different time series
    i = 0
    y_loc = 10
    for series in time_series:
        # Iterate through the time stamped data points we have
        data = []
        timestamp = []

        site_name = series["sourceInfo"]["siteName"]
        variable_description = series["variable"]["variableDescription"]
        param_code = series["variable"]["variableCode"][0]["value"]

        convert_to_fahr = False
        if use_fahrenheit == True:
            if "degrees Celsius" in variable_description:
                convert_to_fahr = True
                variable_description = variable_description.replace(
                    "degrees Celsius", "degrees Fahrenheit")

        for point in series["values"][0]["value"]:
            if convert_to_fahr == True:
                data.append(celsius_to_fahrenheit(float(point["value"])))
            else:
                data.append(float(point["value"]))
            timestamp.append(i)
            i = i + 1

        print(bcolors.OKBLUE, end='', flush=True)
        hipsterplot.plot(data, timestamp, num_x_chars=66, num_y_chars=8)
        print(bcolors.ENDC, end='', flush=True)

        print(bordered(param_code + " - " + variable_description))

    if i == 0:
        print(
            "No data available for this site, perhaps you entered a bad id or param code?"
        )
        exit
Esempio n. 3
0
def plot_residuals(dset):
    """Plot residuals

    Args:
        dset:   Dataset, information about model run.
    """
    log.out(f"Residuals at stage {dset.vars['stage']}")
    hipsterplot.plot(x_vals=dset.time.utc.datetime,
                     y_vals=dset.residual,
                     num_x_chars=console.columns() - 12,
                     num_y_chars=20)
Esempio n. 4
0
def print_series_data(time_series,
                      time_string,
                      width=70,
                      height=15,
                      use_fahrenheit=None):
    # Each sensor records data points in different time series
    i = 0
    for series in time_series:
        # Iterate through the time stamped data points we have
        data = []
        timestamp = []

        site_name = series["sourceInfo"]["siteName"]
        variable_description = series["variable"]["variableDescription"]

        convert_to_fahr = False
        if use_fahrenheit == True:
            if "degrees Celsius" in variable_description:
                convert_to_fahr = True
                variable_description = variable_description.replace(
                    "degrees Celsius", "degrees Fahrenheit")

        print(site_name)
        print(variable_description)
        print("Displaying", time_string)

        for point in series["values"][0]["value"]:
            if convert_to_fahr == True:
                data.append(celsius_to_fahrenheit(float(point["value"])))
            else:
                data.append(float(point["value"]))
            timestamp.append(i)
            i = i + 1

        print('\033[94m')
        hipsterplot.plot(data,
                         timestamp,
                         num_x_chars=width,
                         num_y_chars=height)
        print('\033[0m')

    if i == 0:
        print(
            "No data available for this site, perhaps you entered a bad id or param code?"
        )
        exit
Esempio n. 5
0
    def analyse(self, z):
        z = z[:, :, 1]
        d = (z > self.z0) & (z - self.z0 > 25)
        self.z0 = z
        xy = np.where(d.ravel())[0]
        if xy.shape[0] > 999:
            pass
            #self.laser_xi = 0
            #self.laser_yi = 0
        elif xy.shape[0] > 4:
            xy = np.transpose(np.unravel_index(xy, d.shape))
            clust = scan.fit_predict(xy)
            ind = clust == 0
            if ind.sum() > 1:
                xc = xy[ind, 0].mean()
                yc = xy[ind, 1].mean()
                xint = int(xc.round())
                yint = int(yc.round())
                self.campoints.append([xint, yint])
                self.x_vals.append(self.laser_xi)
                self.y_vals.append(self.laser_yi)
                #printr("%s %s" % (xint, yint))
                os.system('clear')
                x_plot = np.concatenate(([0, 600], xy[:, 0]))
                y_plot = np.concatenate(([0, 600], 600 - xy[:, 1]))
                plot(y_plot, x_plot, 20, 20)

        if self.laser_xi > 0 and self.laser_xi < self.npoints - 1:
            self.laser_xi += self.x_inc
        elif self.inc_change:
            self.x_inc = -1 * self.x_inc
            self.laser_xi += self.x_inc
            self.inc_change = False
        elif self.laser_yi < self.npoints - 1:
            self.laser_yi += 1
            self.inc_change = True
        else:
            self.laser_xi = 0
            self.laser_yi = 0
            #self.camera.stop_recording()
        laser_x = self.laser_cal_points[self.laser_xi]
        laser_y = self.laser_cal_points[self.laser_yi]
        open('/dev/spidev0.0', 'wb').write(tohex(laser_x))
        open('/dev/spidev0.1', 'wb').write(tohex(laser_y))
Esempio n. 6
0
    def write(self, estimator):
        """Print all keys and values from estimator structure

        they include also a metadata read from binary output file
        """
        for name, value in sorted(estimator.__dict__.items()):
            # skip non-metadata fields
            if name not in {'data', 'data_raw', 'error', 'error_raw', 'counter', 'pages'}:
                line = "{:24s}: '{:s}'".format(str(name), str(value))
                print(line)
        # print some data-related statistics
        print(75 * "*")

        for page_no, page in enumerate(estimator.pages):
            print("Page {} / {}".format(page_no, len(estimator.pages)))
            for name, value in sorted(page.__dict__.items()):
                # skip non-metadata fields
                if name not in {'data', 'data_raw', 'error', 'error_raw'}:
                    line = "\t{:24s}: '{:s}'".format(str(name), str(value))
                    print(line)
            print("Data min: {:g}, max: {:g}".format(page.data_raw.min(), page.data_raw.max()))
            print(75 * "-")

        if self.options.details:
            # print data scatter-plot if possible
            if estimator.dimension == 1 and len(self.pages) == 1:
                try:
                    from hipsterplot import plot
                    print(75 * "*")
                    print("Data scatter plot")
                    plot(estimator.data_raw)
                except ImportError:
                    logger.warning("Detailed summary requires installation of hipsterplot package")
            # print data histogram if possible
            try:
                from bashplotlib.histogram import plot_hist
                print(75 * "*")
                print("Data histogram")
                plot_hist(estimator.data_raw, bincount=70, xlab=False, showSummary=True)
            except ImportError:
                logger.warning("Detailed summary requires installation of bashplotlib package")

        return 0
Esempio n. 7
0
def run_bike(x, y, kernels=None, epochs=5):
    # Start my custom stuff
    d = Dense(x, 3)
    d2 = Dense(d, 1)
    # Assign Kernel matrix to weights
    if kernels:
        d.W = kernels[0]
    weg = d.W
    relu = ReLu()
    sq = Square()
    sess = Session()
    sess.add_node(d)
    sess.add_node(d2)
    sess.add_node(relu)
    print '\n'

    losses = []
    for i in range(epochs):
        '''
        x1 = d2.forward(x1)
        print 'shape:%s\n'%str(x1.shape),x1
        x1 =relu.forward(x1)
        print 'shape:%s\n'%str(x1.shape),x1
        '''
        #d.W = kernels[i]
        print "Weight matr "
        #print str(d.W)
        err = sess.step(x, y)
        #print "err na\n", err
        loss = np.mean(np.square(err))
        print 'epoch ', i, "LOSS:", loss
        losses.append(loss)
        print
    hp.plot(d.W.flatten())

    return losses
Esempio n. 8
0
def sub_online_regress(blocks,
                       verbose=2,
                       group_sz_blocks=8,
                       max_shift=4,
                       only_16_shifts=True,
                       method='linreg',
                       numbits=8,
                       drop_first_half=False,
                       **sink):
    # drop_first_half=True, **sink):
    blocks = blocks.astype(np.int32)
    if only_16_shifts:
        shifts = SHIFT_PAIRS_16
        shift_coeffs = [_i16_for_shifts(*pair) for pair in shifts]
    else:
        shifts, shift_coeffs = all_shifts(max_shift=max_shift)
    encoder = OnlineRegressor(block_sz=blocks.shape[1],
                              verbose=verbose,
                              shifts=shifts,
                              shift_coeffs=shift_coeffs,
                              method=method,
                              numbits=numbits)

    # print "using group_sz_blocks: ", group_sz_blocks
    # print "using method: ", method
    # print "using nbits: ", numbits

    out = np.empty(blocks.shape, dtype=np.int32)
    if group_sz_blocks < 1:
        group_sz_blocks = len(blocks)  # global model

    ngroups = int(len(blocks) / group_sz_blocks)
    for g in range(ngroups):
        # if verbose and (g > 0) and (g % 100 == 0):
        #     print "running on block ", g
        start_idx = g * group_sz_blocks
        end_idx = start_idx + group_sz_blocks
        group = blocks[start_idx:end_idx]
        errs = encoder.feed_group(group.ravel())
        out[start_idx:end_idx] = errs.reshape(group.shape)
    out[end_idx:] = blocks[end_idx:]

    if verbose > 1:
        if method == 'linreg':
            if group_sz_blocks != len(blocks):
                import hipsterplot as hp  # pip install hipsterplot
                # hp.plot(x_vals=encoder.shift_coeffs, y_vals=encoder.best_idx_counts,
                hp.plot(encoder.best_idx_counts,
                        num_x_chars=len(encoder.shift_coeffs),
                        num_y_chars=12)
            else:
                coef_idx = np.argmax(encoder.best_idx_counts)
                coef = encoder.shift_coeffs[coef_idx]
                print "global linreg coeff: ", coef
        else:
            coeffs_counts = np.array(encoder.best_coef_counts.most_common())
            print "min, max coeff: {}, {}".format(coeffs_counts[:, 0].min(),
                                                  coeffs_counts[:, 0].max())
            print "most common (coeff, counts):\n", coeffs_counts[:16]
            # bias_counts = np.array(encoder.best_offset_counts.most_common())
            # print "most common (bias, counts):\n", bias_counts[:16]

            errs = np.array(encoder.errs)
            print "raw err mean, median, std, >0 frac: {}, {}, {}, {}".format(
                errs.mean(), np.median(errs), errs.std(), np.mean(errs > 0))

    if drop_first_half and method == 'gradient':
        keep_idx = len(out) // 2
        out[:keep_idx] = out[keep_idx:(2 * keep_idx)]
        print "NOTE: duplicating second half of data into first half!!" \
            " (blocks {}:)".format(keep_idx)

    return out
Esempio n. 9
0
    def terminator(self):
        cprint("terminator running", 'yellow')
        i = 0
        ratio_ts = []
        n_requests = []
        stats = {"Total": 0, "Successes": 0}
        past_datetime = datetime.datetime.now()
        total_time_elapsed = datetime.timedelta(seconds=0)
        while not self.terminated.value:
            i += 1
            if i % 30 == 0:
                new_datetime = datetime.datetime.now()
                time_elapsed = new_datetime - past_datetime
                total_time_elapsed += time_elapsed
                past_datetime = new_datetime
                while True:
                    unlocked = self.lock.acquire(False)
                    if unlocked:
                        log_copy = self.log.copy()
                        len_pool = len(self.proxy_pool)
                        pool_str = str(self.proxy_pool)
                        self.reset_log(self.log)
                        self.lock.release()
                        break
                    else:
                        print("LOCK IN USE!")
                        time.sleep(0.5)

                cprint("Proxy pool ({}):\n{}".format(len_pool, pool_str),
                       'cyan')

                ratio = log_copy['Successes'] / max(log_copy['Total'], .001)
                stats["Total"] += log_copy["Total"]
                stats["Successes"] += log_copy['Successes']
                total_ratio = stats['Successes'] / max(stats['Total'], .001)

                cprint(
                    "Timestep success rate: {}% ({}/{}). Time elapsed: ".
                    format(ratio, log_copy['Successes'], log_copy['Total']) +
                    str(time_elapsed), 'cyan')
                cprint(
                    "Accumulated success rate: {}% ({}/{}). Total time elapsed: "
                    .format(total_ratio, stats['Successes'], stats['Total']) +
                    str(total_time_elapsed), 'cyan')

                # Plot success rate over time
                ratio_ts.append(ratio)
                t_values = [float(x) for x in range(len(ratio_ts))]
                hp.plot(y_vals=ratio_ts,
                        x_vals=t_values,
                        num_x_chars=120,
                        num_y_chars=30)

                cprint("Number of requests/timestep", 'cyan')
                n_requests.append(log_copy['Total'])
                t_values = [float(x) for x in range(len(ratio_ts))]
                hp.plot(y_vals=n_requests,
                        x_vals=t_values,
                        num_x_chars=120,
                        num_y_chars=30)

                cprint("Successes/timestep", 'cyan')
                n_requests.append(log_copy['Successes'])
                t_values = [float(x) for x in range(len(ratio_ts))]
                hp.plot(y_vals=n_requests,
                        x_vals=t_values,
                        num_x_chars=120,
                        num_y_chars=30)

            time.sleep(
                1
            )  # If we sleep too much, the processesor will never schedule our job

        cprint("Plotting scraping progress to file...", 'cyan')
        t_values = [float(x) for x in range(len(ratio_ts))]
        hp.plot(y_vals=ratio_ts, x_vals=t_values)

        total_ratio = stats["Successes"] / max(stats["Total"], 0.0001)
        with open('Data/plot-{}.txt'.format(datetime.datetime.now()),
                  "w") as f:
            with stdout_redirected(f):
                print("Total average success rate: {}% ({}/{})".format(
                    total_ratio, stats["Successes"], stats["Total"]))
                print("Ended at timestep: {}".format(i))
                print("Proxy pool ({}):\n{}".format(len_pool, pool_str))
                hp.plot(y_vals=ratio_ts,
                        x_vals=t_values,
                        num_x_chars=240,
                        num_y_chars=60)

        cprint("Terminating proxy manager", 'red')
        for i in self.injectors:
            i.terminate()
        self.fetcher.terminate()
        self.virtualiser.terminate()