Exemplo n.º 1
0
    def _load_data(self, attempts=3):
        """Retrieve core data for Nucleus app.

		Parameters
		----------
		attempts : int, optional
			The number of data-loading attempts before raising an error. Each 
			attempt is called 1 second apart.

		Returns
		-------
		data : AppData

		Raises
		------
		StartUpError
			If data could not be loaded.

		"""
        for i in range(attempts):
            try:
                data = AppData(Path.DATA_XLSX)
            except (IOError, EOFError, OSError):
                if (i + 1) == attempts:
                    raise StartUpError()
                time.sleep(1)
            else:
                return data
Exemplo n.º 2
0
    print "[Info] Importing rate actions to PredictionIO..."
    count = 0
    for v in app_data.get_rate_actions():
        count += 1
        if all_info:
            print "[Info] Importing %s..." % v
        else:
            if (count % 32 == 0):
                sys.stdout.write('\r[Info] %s' % count)
                sys.stdout.flush()

        client.identify(v.uid)
        client.arecord_action_on_item("rate", v.iid, {
            "pio_rate": v.rating,
            "pio_t": v.t
        })

    sys.stdout.write('\r[Info] %s rate actions were imported.\n' % count)
    sys.stdout.flush()


if __name__ == '__main__':

    app_data = AppData()
    client = predictionio.Client(APP_KEY,
                                 THREADS,
                                 API_URL,
                                 qsize=REQUEST_QSIZE)
    batch_import_task(app_data, client)
    client.close()
	def __init__(self):
		self._app_data = AppData()
		self._client = predictionio.Client(APP_KEY, 1, API_URL)
Exemplo n.º 4
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import tkinter as tk
from datetime import datetime
import matplotlib.pyplot as plt

from appdata import AppData
from kpm import KPM

if __name__ == "__main__":
    d = AppData("sample_data.csv")
    today = datetime(2017, 1, 1)
    print(d.df)
    print(d.get_leaderboard_by_date(today))
    print(d.get_sums_by_date_range('1/1/2017', '1/7/2017'))

    plt.figure()

    d.plot_app_data_in_date_range('Facebook', '1/1/2017', '1/6/2017')
    d.plot_data_in_date_range('1/1/2017', '1/6/2017')

    plt.show()

    dashboard = KPM(d, today)
    dashboard.mainloop()

    # print(d.get_means_by_date_range('1/1/2017', '1/4/2017'))
    # print(d.get_mins_by_date_range('1/1/2017', '1/4/2017'))
    # print(d.get_maxs_by_date_range('1/1/2017', '1/4/2017'))
    # print(d.get_app_ranking_by_date('Pinterest', '1/1/2017'))
Exemplo n.º 5
0
def batch(recon_root, proc_recon_root, csv_path):
    """
    Script to automatically generate crops, scled images and compressed files
    :param recon_list:
    :param mount:
    :return:
    """

    with open(csv_path, 'r') as fh:
        recon_list = [row[0] for row in csv.reader(fh)]

    app = AppData()
    auto = Autofill(None)
    update = FakeUpdate()

    for recon_id in recon_list:

        stage = get_stage(recon_id)
        proc_recon_path = join(proc_recon_root, recon_id)
        cropped_dir = join(proc_recon_path, 'cropped')

        if not isdir(cropped_dir):
            mkdir(cropped_dir)

        scaled_dir = join(proc_recon_path, 'scaled_stacks')
        metadata_dir = join(proc_recon_path, 'Metadata')

        recon_id = get_input_id(join(metadata_dir, 'config4user.log'))
        recon_path = join(recon_root, recon_id)

        # Performing cropping if directory does not exist or is empty
        if len(listdir(cropped_dir)) == 0:

            fake_config = lambda: None
            fake_config.meta_path = metadata_dir
            fake_config.value = 0

            cropper = Crop(recon_path,
                           cropped_dir,
                           update.emit,
                           fake_config,
                           fake_config,
                           app,
                           def_crop=None,
                           repeat_crop=None)
            img_list = cropper.run(auto=True)

        else:
            img_list = app.getfilelist(cropped_dir)

        # Get recon log and pixel size
        log_paths = [f for f in listdir(cropped_dir) if f.endswith("_rec.log")]
        if len(log_paths) < 1:
            print('Cannot find log in cropped directory')
            continue
        log = join(cropped_dir, log_paths[0])

        with open(log, 'rb') as log_file:
            original_pixel_size = float(auto.get_pixel(stage, log_file))

        # Scaling
        if not isdir(scaled_dir):
            mkdir(scaled_dir)

        for scale in SCALING[stage]:

            scale_by_int = False
            if type(scale) is int:
                sf = scale
                new_pixel_size = original_pixel_size * float(scale)
                scale_by_int = True

            else:
                sf = float(scale) / float(original_pixel_size)
                new_pixel_size = sf * original_pixel_size

            out_name = join(
                scaled_dir, '{}_scaled_{:.4f}_pixel_{:.2f}.{}'.format(
                    recon_id, sf, new_pixel_size, ext))

            if scaled_stack_exists(scaled_dir, sf, new_pixel_size):
                continue

            resample(img_list, sf, out_name, scale_by_int, update)

        # Compression
        bz2_file = join(proc_recon_path,
                        'IMPC_cropped_{}.nrrd'.format(recon_id))
        if not isfile(bz2_file + '.bz2'):

            print "Generating missing bz2 file for '{}'".format(recon_id)
            try:
                bz2_nnrd(img_list, bz2_file, 'Compressing cropped recon',
                         update)
            except IOError as e:
                print(
                    'Failed to write the compressed bzp2 file. Network issues?\n{}'
                    .format(e))