Пример #1
0
def scrape():
    vprint('working from %s' % getcwd())
    handler = DataHandler(OUTPUT)
    handler.read_data()
    handler.remove_old_entries(DAYS_TO_KEEP)
    added, per_url = 0, 0
    for url in get_file_items(URLS_PATH):
        vprint('requesting feed from %s...' % url)
        feed = parse(url)
        if not feed or 'status' not in feed:
            tsprint('error: unable to reach target url. aborting.')
            exit(-1)
        status = feed['status']
        if status != 200:
            tsprint(
                'error: request from %s responded with error code %s. skipping...'
                % (url, status))
            continue
        vprint('status returned normal. scanning entries...')
        for entry in feed['entries']:
            try:
                id_ = entry['id']
                published = None
                if 'published_parsed' in entry:
                    published = entry['published_parsed']
                elif 'updated_parsed' in entry:
                    published = entry['updated_parsed']
                else:
                    print('entries may not have dates. skipping...')
                    break
                if not is_new(published) or handler.entry_exists(id_):
                    continue
                tags = []
                if 'www.reddit.com' in url or 'tags' not in entry or not entry[
                        'tags']:
                    tags = clean_unique_tags([entry['title']])
                    if per_url == 0:
                        vprint(
                            'no tags for entries, using title instead:\n  %s' %
                            tags)
                else:
                    tags = clean_unique_tags(
                        [tag['term'] for tag in entry['tags']])
                if not tags: continue
                extras = {}
                if 'www.reddit.com' in url and 'summary' in entry:
                    m = search(r'href="(\S+)">\[link\]', entry['summary'])
                    if m: extras = {"dlink": m.group(1)}
                handler.add_entry(url, id_, clean_entry(entry, tags, **extras))
                added += 1
                per_url += 1
            except KeyError as e:
                print('%s\nskipping...' % e)
                per_url = 0
                break
        vprint('got %s entries from %s' % (per_url, url))
        per_url = 0
    handler.write_data()
    tsprint('added %s new entries' % added)
    tsprint('%s entries total' % len(handler.get_all_entries()))
Пример #2
0
def get_by_distance():
    distance = request.args.get("dist")
    latitudeN = request.args.get("latN")
    longitudeE = request.args.get("lonE")
    dh = DataHandler()
    return dh.select_schools_by_distance(float(latitudeN), float(longitudeE),
                                         float(distance))
Пример #3
0
def find_distance(i_):
    path_to_covers = '/home/anosov/data/hard_base/covers/case_%i.dump' % (i_, )
    base, ext = os.path.splitext(path_to_covers)
    path_to_data = base + '__computing' + ext
    h = DataHandler(path_to_covers, path_to_data)
    tmp = h.distance_to_dead_ends
    h.dump()
Пример #4
0
    def runServer(self):
        Logger.writeInfo("Opening socket...")
        sock = socket(AF_INET, SOCK_STREAM)
        sock.bind((self.host, self.port))
        sock.listen(10)
        handler = DataHandler()

        try:
            while True:
                conn, addr = sock.accept()
                Logger.writeInfo("Connected by {}".format(addr))

                try:
                    data = Server.receiveAll(conn)
                    if data != None:
                        response = handler.process(data)
                        conn.sendall(response)
                    conn.close()
                except Exception as e:
                    Logger.writeError(str(e))

                Logger.writeInfo("Disconnected {}".format(addr))
        finally:
            Logger.writeInfo("Socket closed")
            sock.close()
            handler.close()
Пример #5
0
def test(path, run_id, runs):
    # load parser and data handler
    parser = CYKParser.load(path)
    data_handler = DataHandler(config.test_set, run_id, runs)

    # parse sentences in parallel
    executor = ProcessPoolExecutor(config.processes)
    futures = [
        executor.submit(parse_tree, parser, sent, run_id)
        for sent in data_handler.generator()
    ]

    # following code is to track progress
    kwargs = {
        'total': len(futures),
        'unit': 'nap',
        'unit_scale': True,
        'leave': True
    }
    for _ in tqdm(as_completed(futures), **kwargs):
        pass
    for future in futures:
        if future.exception() is not None:
            print(future.exception())

    # stitch files if number of runs is 1
    if runs == 1:
        stitch_files()
    print("Done parsing")
Пример #6
0
 def __init__(self):
     self.epoch_duration = 60
     self.vk_client = vk_autorization(LOGIN, PASSWORD)
     self.data = DataHandler()
     self.view = View(self, self.data.get_players(),
                      self.data.get_settings())
     self.view.show()
Пример #7
0
    def perform_exp(self):

        list_of_scores = []

        data_handler = DataHandler(self.data, self.var_dict)

        raw_X = data_handler.get_dummy_coded_data('dummy_only')
        n_cat_dummy_var = raw_X.shape[1] - len(self.var_dict['numerical_vars'])

        raw_clf_scores = self._get_classification_score(raw_X)
        list_of_scores.append(
            ('raw', raw_clf_scores, raw_X.shape[1] - n_cat_dummy_var))

        for n_init_bins in self.n_init_bins_list:
            sb_X = self.semantic_binning.fit_transform(self.data, n_init_bins)
            sb_clf_scores = self._get_classification_score(sb_X)
            list_of_scores.append(('sb_{}'.format(n_init_bins), sb_clf_scores,
                                   sb_X.shape[1] - n_cat_dummy_var))

        for n_bins in self.n_bins_range:
            ew_X = data_handler.get_dummy_coded_data('equal_width', n_bins)
            ew_clf_scores = self._get_classification_score(ew_X)
            list_of_scores.append(('ew_{}'.format(n_bins), ew_clf_scores,
                                   ew_X.shape[1] - n_cat_dummy_var))

            ef_X = data_handler.get_dummy_coded_data('equal_freq', n_bins)
            ef_clf_scores = self._get_classification_score(ef_X)
            list_of_scores.append(('ef_{}'.format(n_bins), ef_clf_scores,
                                   ef_X.shape[1] - n_cat_dummy_var))

        self.list_of_scores = list_of_scores
        print('Experiment Finished !. Result Saved in Exp Instance..')
Пример #8
0
    def __init__(self,master):
        super().__init__(master)
        
        self.data_handler = DataHandler("data.json")
        self.category_lists = self.data_handler.get_categories_list()
        self.category_data_labels = []

        self.element = 'Sports'
        
        #delete old labels..
        for self.element in self.category_data_labels:
            self.element.configure(text = "")
            del self.element

        #add new labels for name..
        self.list_data_for_category = self.data_handler.get_news_for_category(self.element)
        for i in range(len(self.list_data_for_category)):
            self.element = self.list_data_for_category[i]
            self.labelfont = ('times',15,'bold')
            self.labelcontent_font = ('times',10,'bold','italic')
            self.label_heading = Label(self, text = self.element['headline'])
            self.label_heading.config(font = self.labelfont)
            self.label_heading.grid(column = 0, row = 3*i)
            self.label_content = Label(self, text = self.element['content'])
            self.label_content.config(font = self.labelcontent_font)
            self.label_content.grid(column = 0, row = 3*i+1)
            self.label_content_separator = Label(self, text = '---------------------------')
            self.label_content_separator.grid(column = 0, row=3*i+2)
            self.category_data_labels.append(self.label_heading)
            self.category_data_labels.append(self.label_content)
            self.category_data_labels.append(self.label_content_separator)

        self.pack()
Пример #9
0
    def __init__(self, number_of_epochs=10):
        self.verbose = True
        self.number_of_channels = 2
        self.data_handler = \
            DataHandler(number_of_channels=self.number_of_channels,
                        number_of_negative_sets=50,
                        number_of_positive_sets=50,
                        number_of_test_sets=50,
                        verbose=self.verbose)
        self.data_handler.load_training_data()
        self.data_handler.load_test_data()
        self.data_handler.preprocess_data()

        self.mini_batch_size = 1
        self.model = CNN(number_of_channels=self.number_of_channels,
                         number_of_filters=12,
                         regularization_coefficient=1e0,
                         learning_rate=0.001,
                         filter_length=12,
                         pool_size=512,
                         fully_connected_layer_neurons=8,
                         momentum=0.9,
                         perform_normalization="no",
                         update_type="adam",
                         pool_mode="average_exc_pad")
        self.number_of_epochs = number_of_epochs

        self.training_errors = []
        self.test_errors = []
        self.classifier = SVC(C=11., kernel="rbf", gamma=1. / (2 * 2.85))
Пример #10
0
def main():
    opts = configs.model_config

    os.environ["CUDA_VISIBLE_DEVICES"] = '0'
    gpu_config = tf.ConfigProto(device_count={'GPU': 1},
                                allow_soft_placement=False,
                                log_device_placement=False)
    gpu_config.gpu_options.allow_growth = True
    sess = tf.Session(config=gpu_config)

    print('starting processing data ...')

    data = DataHandler(opts)

    print('starting initialising model ...')
    opts['r_range_upper'] = data.train_r_max
    opts['r_range_lower'] = data.train_r_min
    model = Model_Decon(sess, opts)

    opts['batch_size'] = 1
    opts['va_sample_num'] = 6
    opts['model_bn_is_training'] = False

    print('starting testing policy using AC_Decon ...')
    ac = AC_Decon(sess, opts, model)
    ac.policy_test(data)
Пример #11
0
    def test_replace_null_median(self):
        '''This function adds np.nan values to copies of self.df_5 and self.df_30
        and then sets the values of df_5 and df_30 in test_handler to the new
        copies of df_5 and df_30. Then the replace_null(method='median')
        function is called on test_handler whcih should replace the np.nan values
        with median values. Finally assertEqual is called to check that the
        replace_null function is working correctly.'''

        df_5_replace_null_check = self.df_5.copy()
        df_5_replace_null_check.loc[self.df_5.first_valid_index(),
                                    'C'] = np.nan

        df_30_replace_null_check = self.df_30.copy()
        df_30_replace_null_check.loc[self.df_30.last_valid_index(),
                                     'D'] = np.nan

        test_handler = DataHandler()
        test_handler.df_5 = df_5_replace_null_check
        test_handler.df_30 = df_30_replace_null_check
        test_handler.replace_null(method='median')

        self.assertEqual(
            test_handler.df_5.loc[test_handler.df_5.first_valid_index(), 'C'],
            30.0)
        self.assertEqual(
            test_handler.df_30.loc[test_handler.df_30.last_valid_index(), 'D'],
            4.0)
Пример #12
0
    def __init__(self, ):

        # Initialize data loader
        self.data = DataHandler()

        # Initialize model
        self.ada_network = ADDANet()
Пример #13
0
def run_model(model_name, data):
    dh = DataHandler()
    dh.import_data(data)
    dh.create_targets(-1)
    model = build_model(model_name, dh)
    m_eval = ModelEvaluator(model)
    acc, std = m_eval.n_time_k_cross_fold(10, 5)
    print('Accuracy: {}\nStandard Deviation: {}\n'.format(acc, std))
Пример #14
0
    def dataLoader(self):

        data_handler = DataHandler()
        npz = data_handler.npzLoader(self.target_file)

        data, label = npz[0], npz[1]
        data /= 255.0

        return data, label
Пример #15
0
 def __init__(self, server_socket, buffer, socket_ip):
     self.names = []
     self.record = {}
     self.buffer = buffer
     self.connected_list = []
     self.socket_ip = socket_ip
     self.data_handler = DataHandler(socket_ip)
     self.server_socket = server_socket
     self.add_connection(server_socket)
Пример #16
0
def run():
    global model
    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')

    # Create dataset
    template_dataset = TemplateDataset(config)
    training_loader, validation_loader, test_loader = template_dataset.get_loaders(
    )

    # Create the neural network
    model = NN(net, optimizer, loss_function, lr_scheduler, metric, device,
               config).to(device)

    # Create the data handler
    data_handler = DataHandler(training_loader, validation_loader, test_loader)

    for epoch in range(config['epochs']):
        # Training
        model.train()
        for i, data in enumerate(training_loader, 0):
            x, y = data
            x, y = x.to(device), y.to(device)
            y_hat = model(x)
            loss = model.backpropagate(y_hat, y)
            result = model.evaluate(y_hat, y)
            data_handler.train_loss.append(loss)
            data_handler.train_metric.append(result)

        with torch.no_grad():
            model.eval()
            # Validating
            if validation_loader is not None:
                for i, data in enumerate(validation_loader, 0):
                    x, y = data
                    x, y = x.to(device), y.to(device)
                    y_hat = model(x)
                    _, loss = model.calculate_loss(y_hat, y)
                    result = model.evaluate(y_hat, y)
                    data_handler.valid_loss.append(loss)
                    data_handler.valid_metric.append(result)

            # Testing
            if test_loader is not None:
                for i, data in enumerate(test_loader, 0):
                    x, y = data
                    x, y = x.to(device), y.to(device)
                    y_hat = model(x)
                    _, loss = model.calculate_loss(y_hat, y)
                    result = model.evaluate(y_hat, y)
                    data_handler.test_loss.append(loss)
                    data_handler.test_metric.append(result)

        model.lr_scheduler_step()
        data_handler.epoch_end(epoch, model.get_lr())
    data_handler.plot(loss=config['plot']['loss'],
                      metric=config['plot']['metric'])
    def create_wordcloud(self):
        movies = DataHandler().get_data()
        movies['title'] = movies['title'].fillna("").astype('str')
        title_corpus = ''.join(movies['title'])
        wordcloud = WordCloud(stopwords=STOPWORDS, background_color='black', height=2000,
                              width=4000).generate(title_corpus)

        plt.figure(figsize=(16, 8))
        plt.imshow(wordcloud)
        plt.show()
Пример #18
0
    def clustering(self, data):
        kmeans = KMeans(n_clusters=4,
                        init='k-means++',
                        max_iter=1000,
                        n_init=10)
        prediction = kmeans.fit_predict(data)
        df = DataHandler().read_data()
        df['Clusters'] = prediction

        return df, kmeans
Пример #19
0
def load_data(db_name,
              label="training_0000",
              n_imgs=None,
              thresh=1e5,
              step_size=1,
              db_dir=None):
    """
    loads rgb images and targets from an hdf5 database and returns them as a np array

    Expects data to be saved in the following group stucture:
        Training Data
        training_0000/data/0000     using %04d to increment data name

        Validation Data
        validation_0000/data/0000       using %04d to increment data name

        Both return an array with the rgb image saved under the 'rgb' key
        and the target saved under the 'target' key

    Parameters
    ----------
    db_name: string
        name of database to load from
    label: string, Optional (Default: 'training_0000')
        location in database to load from
    n_imgs: int
        how many images to load
    """
    # TODO: specify the data format expected in the comment above
    dat = DataHandler(db_dir=db_dir, db_name=db_name)

    # load training images
    images = []
    targets = []

    skip_list = ["datestamp", "timestamp"]
    keys = np.array([
        int(val) for val in dat.get_keys("%s" % label) if val not in skip_list
    ])
    n_imgs = max(keys) if n_imgs is None else n_imgs
    print("Total number of images in dataset: ", max(keys))

    for nn in range(0, n_imgs, step_size):
        data = dat.load(parameters=["rgb", "target"],
                        save_location="%s/%04d" % (label, nn))
        if np.linalg.norm(data["target"]) < thresh:
            images.append(data["rgb"])
            targets.append(data["target"])

    images = np.asarray(images)
    targets = np.asarray(targets)

    print("Total number of images within threshold: ", images.shape[0])

    return images, targets
Пример #20
0
    def __init__(self, file_sys_broadcast_addr, task_ping_addr,
                 current_worker_addr, job_url, job_id, function_url,
                 block_urls, task, answer_addr, load_byte, status_db_url,
                 block_id):

        self.status_handler = StatusHandler(
            status_db_url, self._verify_if_errors_in_fs,
            self._reset_method_if_no_answer_from_fs)
        self.data_handler = DataHandler(
            job_url, job_id, self._verify_if_errors_in_fs,
            self._reset_method_if_no_answer_from_fs)
        self.task_ping_addr = task_ping_addr
        self.file_sys_broadcast_addr = file_sys_broadcast_addr
        self.file_sys_addrs = []
        self.answer_addr = answer_addr
        self._update_filesystem_nodes()
        self.file_sys_addr = self._get_new_filesystem_node()
        self.job_url = job_url
        self.job_id = job_id
        self.function_url = function_url
        self.block_urls = block_urls
        # print("Task_Exc: Este es el block_urls: ",self.block_urls)
        self.task = task
        self.load_byte = load_byte
        self.current_worker_addr = current_worker_addr
        self.status_db_url = status_db_url
        self.block_id = block_id

        self.map_fun, self.red_fun, self.comb = self.get_func()
        self.record_readers = {
            True: self.record_reader_byte,
            False: self.record_reader_str
        }
        self.execute_methods = {
            'map': self.execute_map_task,
            'reduce': self.execute_reduce_task
        }

        self.start_listen_pings()
        #  nos aseguramos que si se cayo el master en el momento que me mando el mensaje de task, yo mismo pongo en
        # submitted el bloque
        to_update = [('state', mt.slices_states[1]),
                     ('worker_ip', self.task_ping_addr[0]),
                     ('worker_port', self.task_ping_addr[1])]
        print(
            "Task_Exc: ",
            "Salvamos el estado del bloque {} a SUBMITTED en el filesys: {}".
            format(block_id, self.file_sys_addr))

        self.status_handler.update_status_row(self.file_sys_addr, 'block',
                                              ('block_id', block_id),
                                              to_update)

        self.execute_task = self.execute_methods[self.task]
        self.record_reader = self.record_readers[load_byte]
Пример #21
0
 def test_parser(self):
     """Tests the correct behaviour of the parser method: an args string without a required input is
     passed to the method and the test checks if an exception is raised"""
     data_handler = DataHandler('')
     method_handler = MethodHandler(data_handler, '')
     cli_app = CLI(method_handler)
     args_exp = {'Ticker': {'Type': 'String', 'Required': 1}, 'StartDate': {'Type': 'Date', 'Required': 1},
                 'EndDate': {'Type': 'Date', 'Required': 1}, 'Currency': {'Type': 'String', 'Required': 1}}
     args_obtained = 'AAPL 2021/01/01 2021/02/14'
     with self.assertRaises(Exception):
         res = cli_app.parse_args(args_obtained, args_exp)
Пример #22
0
def main():

	parser = argparse.ArgumentParser()
	parser.add_argument('--batch_size', type=int, default=4)
	parser.add_argument('--emb_dim', type=int, default=200)
	parser.add_argument('--enc_hid_dim', type=int, default=128)
	parser.add_argument('--dec_hid_dim', type=int, default=256)
	parser.add_argument('--attn_size', type=int, default=200)
	parser.add_argument('--epochs', type=int, default=12)
	parser.add_argument('--learning_rate', type=float, default=2.5e-4)
	parser.add_argument('--dataset_path', type=str, default='../data/Maluuba/')
	parser.add_argument('--glove_path', type=str, default='../data/')
	parser.add_argument('--checkpoint', type=str, default="./trainDir/")
	config = parser.parse_args()

	DEVICE = "/gpu:0"

	logging.info("Loading Data")

	handler = DataHandler(
				emb_dim = config.emb_dim,
				batch_size = config.batch_size,
				train_path = config.dataset_path + "train.json",
				val_path = config.dataset_path + "val.json",
				test_path = config.dataset_path + "test.json",
				vocab_path = "./vocab.json",
				entities_path = config.dataset_path + "entities.json",
				glove_path = config.glove_path)

	logging.info("Loading Architecture")

	model = DialogueModel(
				device = DEVICE,
				batch_size = config.batch_size,
				inp_vocab_size = handler.input_vocab_size,
				out_vocab_size = handler.output_vocab_size,
				generate_size = handler.generate_vocab_size,
				emb_init = handler.emb_init,
				result_keys_vector = handler.result_keys_vector,
				emb_dim = config.emb_dim,
				enc_hid_dim = config.enc_hid_dim,
				dec_hid_dim = config.dec_hid_dim,
				attn_size = config.attn_size)

	logging.info("Loading Trainer")

	trainer = Trainer(
				model=model,
				handler=handler,
				ckpt_path="./trainDir/",
				num_epochs=config.epochs,
				learning_rate = config.learning_rate)

	trainer.trainData()
Пример #23
0
def main():
	scaleModes =	{
					  "50": "Normal mode",
					  "46": "Real-time mode"
					}

	board_handler = BoardHandler()
	data_handler = DataHandler(scaleModes)
	output_handler = OutputHandler()
	gui = GuiHandler(board_handler, data_handler, output_handler)
	gui.initWindow()
Пример #24
0
def main():
    SERVER_IP = '127.0.0.1'
    CLIENT_IP = '127.0.0.1'
    SERVER_PORT = '8443'
    CLIENT_PORT = '53432'

    data_handler = DataHandler()

    client_hello = get_packet('test_data/data/client_hello')
    server_hello = get_packet('test_data/data/server_hello')
    key_exchange = get_packet('test_data/data/client_key_exchange')
    client_encrypted_data1 = get_packet('test_data/data/client_encrypted_data1')
    server_encrypted_data1 = get_packet('test_data/data/server_encrypted_data1')
    client_encrypted_data2 = get_packet('test_data/data/client_encrypted_data2')
    server_encrypted_data2 = get_packet('test_data/data/server_encrypted_data2')




    print("Starting Parser: ")
    Parser(client_hello, data_handler,
           CLIENT_IP, SERVER_IP, CLIENT_PORT, SERVER_PORT).parse()
    Parser(server_hello, data_handler,
           SERVER_IP, CLIENT_IP, SERVER_PORT, CLIENT_PORT).parse()
    Parser(key_exchange, data_handler,
           CLIENT_IP, SERVER_IP, CLIENT_PORT, SERVER_PORT).parse()
    Parser(client_encrypted_data1, data_handler,
           CLIENT_IP, SERVER_IP, CLIENT_PORT, SERVER_PORT).parse()
    Parser(server_encrypted_data1, data_handler,
           SERVER_IP, CLIENT_IP, SERVER_PORT, CLIENT_PORT).parse()
    Parser(client_encrypted_data2, data_handler,
           CLIENT_IP, SERVER_IP, CLIENT_PORT, SERVER_PORT).parse()
    Parser(server_encrypted_data2, data_handler,
           SERVER_IP, CLIENT_IP, SERVER_PORT, CLIENT_PORT).parse()

    print("\nStarting Key Finder:")
    KeyFinder(data_handler, '../keys/privkey.pem',
              CLIENT_IP, SERVER_IP, CLIENT_PORT, SERVER_PORT,
              MAC_KEY_LENGTH, SYMMETRIC_KEY_LENGTH, IV_LENGTH).find()

    print("Handshake Table:\n")
    print(data_handler.handshake_table)

    print("Application Data:\n")
    print(data_handler.app_data_table)

    print("\nStarting Decryptor\n")
    decryptor = Decryptor(data_handler,
                          CLIENT_IP, SERVER_IP, CLIENT_PORT, SERVER_PORT)
    output = decryptor.start('results/')

    for packet in output:
        print(f'packet {packet}\n')
    print(len(output))
Пример #25
0
 def test_get_hist_prices(self):
     """Given a pre-defined set of inputs the test checks if the method correctly computes the results"""
     data_handler = DataHandler('')
     method_handler = MethodHandler(data_handler, '')
     data = {dt.datetime(2021, 1, 1): 120, dt.datetime(2021, 1, 2): 122, dt.datetime(2021, 1, 3): 123,
             dt.datetime(2021, 1, 4): 122, dt.datetime(2021, 1, 5): 119, dt.datetime(2021, 1, 8): 118,
             dt.datetime(2021, 1, 9): 120, dt.datetime(2021, 1, 10): 122}
     data_handler.load_db_manually('MSFT', data)
     res_test = {dt.datetime(2021, 1, 3): 123, dt.datetime(2021, 1, 4): 122, dt.datetime(2021, 1, 5): 119,
                 dt.datetime(2021, 1, 8): 118, dt.datetime(2021, 1, 9): 120}
     res = method_handler.get_hist_prices('MSFT', dt.datetime(2021, 1, 3), dt.datetime(2021, 1, 9), '')
     self.assertEqual(res, res_test)
Пример #26
0
    def store_analyzed_reviews(self, hotel_name, aspect_details, platforms):
        for key, value in self.aspect_details.items():
            value.review_list = {}

        to_json = json.dumps(aspect_details, cls=MyEncoder)
        data_handler = DataHandler()

        data_handler.set_analyzed_reviews(hotel_name, to_json, platforms)


# ra = ReviewAnalyzer()
# ra.get_analyzed_reviews('Kingsbury', ['ALL'])
Пример #27
0
    def get_analyzed_reviews(self, hotel_name, platforms):
        data_handler = DataHandler()
        reviews = data_handler.get_analyzed_reviews(hotel_name, platforms)
        print(hotel_name)
        print(reviews)
        if reviews is None:
            self.analyze_reviews(hotel_name, platforms)
            reviews = data_handler.get_analyzed_reviews(hotel_name, platforms)

            if reviews is None:
                return None

        return reviews[2]
Пример #28
0
def main():
    path_to_covers = '/home/anosov/data/hard_base/covers/case_0.dump'
    base, ext = os.path.splitext(path_to_covers)
    path_to_data = base + '__computing' + ext

    h = DataHandler(path_to_covers, path_to_data)
    gui = GUI(h)

    # gui.append(trivia.show_stat_rm)
    # gui.append(trivia.show_equal_stat)
    gui.append(trivia.case_0)

    gui.run()
Пример #29
0
    def __init__(self, **kwargs):
        super().__init__(**kwargs)

        self.ui = Ui_MainWindow()
        self.ui.setupUi(self)

        self.ui.graphicsView.setBackground(pg.mkColor(0.3))
        self.plot_box = self.ui.graphicsView.addViewBox(row=1,
                                                        col=1,
                                                        lockAspect=True,
                                                        enableMouse=True,
                                                        invertY=True)
        self.image_item = pg.ImageItem()
        self.image_item.setOpts(axisOrder='row-major')
        self.plot_box.addItem(self.image_item)

        self.roi = None
        self.ui.selectDataButton.toggled.connect(self.show_roi)
        self.ui.resetSelectDataButton.clicked.connect(self.reset_roi)

        self.settings_layout = QHBoxLayout()
        self.settings_widget = QWidget()
        self.settings_layout.addWidget(self.settings_widget)
        self.ui.camSettingsWidget.setLayout(self.settings_layout)

        self.data_handler = DataHandler()
        for plugin in self.data_handler.plugins:
            self.add_plugin(plugin.get_widget(), plugin.name)
        self.data_handler.ndarray_available.connect(self.show_ndarray)
        self.data_handler.camera_controls_changed.connect(
            self.set_camera_controls)
        self.ui.actionSave_image.triggered.connect(self.data_handler.save_file)
        self.data_handler.enable_saturation_widget.connect(
            self.enable_saturation_bar)
        self.data_handler.saturation_changed.connect(
            self.ui.progressBar.setValue)
        self.data_handler.message.connect(self.show_message)

        self.camera_dialog = CameraDialog()
        self.ui.actionChoose_camera.triggered.connect(
            self.camera_dialog.choose_camera)
        self.camera_dialog.camera_changed.connect(
            self.data_handler.change_camera)
        self.camera_dialog.choose_first_camera()
        self.ui.actionTune_camera_parameters.triggered.connect(self.tune_pid)

        self.ui.actionShow_Settings.toggled.connect(self.show_settings)

        self.ui.actionDraw_lines.toggled.connect(self.draw_lines)
        self.hline = None
        self.vline = None
Пример #30
0
    def __init__(self, worker_broadcast_addr, filesystem_broadcast_addr,
                 tracker_addr_ping, tracker_ip, current_worker_addr, job_url,
                 job_id, data_type, client_addr, functions_url, map_data_url,
                 status_db_url):

        self.worker_broadcast_addr = worker_broadcast_addr
        self.status_handler = StatusHandler(
            status_db_url, self._verify_if_errors_in_fs,
            self._reset_method_if_no_answer_from_fs)
        self.data_handler = DataHandler(
            job_url, job_id, self._verify_if_errors_in_fs,
            self._reset_method_if_no_answer_from_fs)

        self.filesystem_broadcast_addr = filesystem_broadcast_addr
        self.job_url = job_url
        self.file_sys_addrs = []
        self._update_filesystem_nodes()
        self.file_sys_addr = self._get_new_filesystem_node()
        self.client_addr = client_addr
        self.job_id = job_id

        self.current_worker_addr = current_worker_addr

        self.data_type = data_type
        self.states = ["map", "reduce"]
        self.job_phase = self.states[0]
        self.veto_workers = []
        self.tracker_ip = tracker_ip
        self.tracker_addr_ping = tracker_addr_ping
        self.tracker_addr = (tracker_ip, '8080')
        self.delimiters = [' ', '\n']
        self.map_results = None
        self.result_data_url = '{}/result_data'.format(self.job_url)
        self.map_data_url = map_data_url
        self.functions_url = functions_url
        self.status_db_url = status_db_url
        self.phases = [
            'GETWORKERS', 'SLICES', 'SENDTASK', 'WAITANSWERS', 'GETRESULTS',
            'DONE'
        ]
        self.load_job_methods = {
            'GETWORKERS': self.getting_workers,
            'SLICES': self.getting_workers,
            'SENDTASK': self._load_send_task_phase,
            'WAITANSWERS': self._load_wait_results,
            'GETRESULTS': self.getting_results,
        }
        self.status_phase = mt.task_phases[0]

        self.pinging_process = None
        self.get_data = self.data_handler.get_line_by_line_str