Beispiel #1
0
    def build(self, td_matrix, alpha, beta, n_topics, save_model=False):
        """
        run Sampler and update p(z|w) and p(w|z)

        """

        print('build model')

        self._initialize()

        sampler = Sampler(n_topics=n_topics, alpha=alpha, beta=beta)

        # extract 1. phi   2. nmz   3. nzw from sampler every iteration(sampling)
        # phi_nmz_nzw = (phi , nmz, nzw)  generator return a tuple for multiple values
        for i, phi_pzw in enumerate(
                sampler.run(matrix=td_matrix, maxiter=self.maxiter)):
            like = sampler.loglikelihood()

            self.likelihood_in_iters[i] = like

            # update best maximum likelihood and optimal phi = p(w| z)
            if like > self.maxlike:
                self.maxlike = like
                self.opt_iter = i
                self.opt_phi = phi_pzw[0]
                self.opt_pzw = phi_pzw[1]

        if save_model is True:
            self._save_lda_model()
Beispiel #2
0
    def _init_al_dataset(self):
        """ Initialises dataset for active learning
        """

        self._init_dataset()

        train_dataset = self.datasets['train']

        dataset_size = len(train_dataset)
        self.budget = math.ceil(self.budget_frac * dataset_size)
        Sampler.__init__(self, config,
                         self.budget)  # TODO: Weird place to initialise this

        all_indices = set(np.arange(dataset_size))
        k_initial = math.ceil(len(all_indices) * self.initial_budget_frac)
        initial_indices = random.sample(list(all_indices), k=k_initial)

        sampler_init = data.sampler.SubsetRandomSampler(
            initial_indices)  # need to sample from training dataset

        self.labelled_dataloader = data.DataLoader(train_dataset,
                                                   sampler=sampler_init,
                                                   batch_size=self.batch_size,
                                                   drop_last=True)
        self.val_dataloader = data.DataLoader(self.datasets['valid'],
                                              batch_size=self.batch_size,
                                              drop_last=False)
        self.test_dataloader = data.DataLoader(self.datasets['test'],
                                               batch_size=self.batch_size,
                                               drop_last=False)

        return all_indices, initial_indices
Beispiel #3
0
def main3():
    file = Path('./0000002.png')
    img = plt.imread(file)
    img = torch.tensor(img)

    img = img.unsqueeze(dim=0)
    img = img.unsqueeze(dim=0)
    img2 = torch.ones(4, 1, 128, 192)

    sam = Sampler(batch=1, channels=1, height=128, width=192, scales=6)
    scale_list = sam.down_resolution_sampling([img])

    #ims show
    nps = []
    for i in range(len(scale_list)):
        nps.append(scale_list[i].data.numpy())

    plt.subplot(2, 3, 1)
    plt.imshow(scale_list[0][0][0])
    plt.subplot(2, 3, 2)
    plt.imshow(scale_list[1][0][0])
    plt.subplot(2, 3, 3)
    plt.imshow(scale_list[2][0][0])
    plt.subplot(2, 3, 4)
    plt.imshow(scale_list[3][0][0])
    plt.subplot(2, 3, 5)
    plt.imshow(scale_list[4][0][0])
    plt.subplot(2, 3, 6)
    plt.imshow(scale_list[5][0][0])

    plt.show()
    print('ok')
 def grow_capacity(self, n):
     new_sampler = Sampler(self.max_size + n, self.max_size + n, 1)
     for item, count in self.items():
         item_id = self.item2id[item]
         new_sampler.add(item_id, count)
     self.sampler = new_sampler
     self.max_size += n
Beispiel #5
0
def main():
    ValidSampler = Sampler(utils.valid_file)
    TestSampler = Sampler(utils.test_file)
    networks = []
    weights = []
    for i in xrange(5):
        if i == 0:
            TrainSampler = Sampler(utils.train_file)
            prev_ys = np.copy(TrainSampler.labels)
        else:
            TrainSampler = Sampler(utils.train_file, prev_ys)

        network = Network()
        network.train(TrainSampler)

        cur_ys = network.predict(TrainSampler)
        b1 = np.sum(np.multiply(cur_ys, prev_ys))
        b2 = np.sum(np.multiply(cur_ys, cur_ys))
        w = float(b1) / b2
        prev_ys = np.subtract(prev_ys, w * cur_ys)

        print i, 'done with weight', w
        network.save('network_' + str(i) + '.ckpt')
        weights.append(w)
        networks.append(network)

        validate_boost(ValidSampler, networks, weights)
    validate_boost(TestSampler, networks, weights)

    np.save('weights.npy', weights)
Beispiel #6
0
    def __init__(self, kick=KICK, snare=SNARE, tom1=TOM1, tom2=TOM2, hihat=HIHAT, crash=CRASH, ride=RIDE):

        self.kick_file = kick
        self.snare_file = snare
        self.tom1_file = tom1
        self.tom2_file = tom2
        self.hihat_file = hihat
        self.crash_file = crash
        self.ride_file = ride

        drumset_files = [
            self.kick_file,
            self.snare_file,
            self.tom1_file,
            self.tom2_file,
            self.hihat_file,
            self.crash_file,
            self.ride_file,
        ]

        Sampler.__init__(self, drumset_files)

        self.kick = self[0]
        self.snare = self[1]
        self.tom1 = self[2]
        self.tom2 = self[3]
        self.hihat = self[4]
        self.crash = self[5]
        self.ride = self[6]
 def __init__(self, N):
     self.item2id = {}
     self.id2item = []
     self.item_count = Counter()
     self.max_size = N
     self.sampler = Sampler(N, N, 1)
     self.count = 0
Beispiel #8
0
    def _init_al_data(self):
        """ Initialises train, validation and test sets for active learning including partitions """
        self._init_dataset()

        train_dataset = self.datasets['train']
        dataset_size = len(train_dataset)
        self.budget = math.ceil(
            self.budget_frac *
            dataset_size)  # currently can only have a fixed budget size
        Sampler.__init__(self, self.budget)

        all_indices = set(np.arange(dataset_size))
        k_initial = math.ceil(len(all_indices) * self.initial_budget_frac)
        initial_indices = random.sample(list(all_indices), k=k_initial)

        sampler_init = data.sampler.SubsetRandomSampler(
            initial_indices)  # need to sample from training dataset

        self.labelled_dataloader = data.DataLoader(train_dataset,
                                                   sampler=sampler_init,
                                                   batch_size=self.batch_size,
                                                   drop_last=True)
        self.val_dataloader = data.DataLoader(self.datasets['valid'],
                                              batch_size=self.batch_size,
                                              shuffle=True,
                                              drop_last=False)
        self.test_dataloader = data.DataLoader(self.datasets['test'],
                                               batch_size=self.batch_size,
                                               shuffle=True,
                                               drop_last=False)

        print(
            f'{datetime.now()}: Dataloaders sizes: Train {len(self.labelled_dataloader)} Valid {len(self.val_dataloader)} Test {len(self.test_dataloader)}'
        )
        return all_indices, initial_indices
Beispiel #9
0
def build_graph():
    session = tf.Session()
    optimizer = tf.train.AdamOptimizer(learning_rate=0.01)
    writer = tf.summary.FileWriter("/home/drl/DRL/tensorflow-reinforce/tmp/")

    # Policy parameters for the exploration policy
    epsilon = 0.9
    target_update_rate = 0.1
    dqn_agent = DQNAgent(session,
                         optimizer,
                         q_network,
                         state_dim,
                         num_actions,
                         target_update_rate=target_update_rate,
                         summary_writer=writer)
    # Switch between greedy and exploratory policy
    exploration_policy = EpsilonGreedyPolicy(dqn_agent, num_actions, epsilon)
    # Always take greedy actions according to greedy policy
    greedy_policy = EpsilonGreedyPolicy(dqn_agent, num_actions, 1.0)

    # Sampler (collect trajectories using the present dqn agent)
    num_episodes = 10
    training_sampler = Sampler(exploration_policy,
                               env,
                               num_episodes=num_episodes)
    testing_sampler = Sampler(greedy_policy, env, num_episodes=5)

    # Initializing ReplayBuffer
    buffer_size = 100000
    replay_buffer = ReplayBuffer(buffer_size)

    return dqn_agent, training_sampler, testing_sampler, replay_buffer
Beispiel #10
0
def run_vmc(parameter):
    """Run the variational monte carlo."""
    # Set all values to zero for each new Monte Carlo run
    accumulate_energy = 0.0
    accumulate_psi_term = 0.0
    accumulate_both = 0.0
    new_energy = 0.0

    # Initialize the posistions for each new Monte Carlo run
    positions = np.random.rand(num_particles, num_dimensions)

    # Call system class in order to set new alpha parameter
    sys = System(num_particles, num_dimensions, parameter, beta, a)
    sam = Sampler(omega, numerical_step_length, sys)
    met = Metropolis(step_metropolis, step_importance, num_particles,
                     num_dimensions, sam, 0.0)
    for i in range(monte_carlo_cycles):

        new_energy, new_positions, count = met.metropolis(positions)
        positions = new_positions
        accumulate_energy += sam.local_energy(positions)

        accumulate_psi_term += sys.derivative_psi_term(positions)
        accumulate_both += sam.local_energy_times_wf(positions)

    expec_val_energy = accumulate_energy / (monte_carlo_cycles * num_particles)
    expec_val_psi = accumulate_psi_term / (monte_carlo_cycles * num_particles)
    expec_val_both = accumulate_both / (monte_carlo_cycles * num_particles)

    derivative_energy = 2 * (expec_val_both - expec_val_psi * expec_val_energy)
    print('deri energy = ', derivative_energy)
    print('counter (accepted moves in metropolis) = ', count)
    return derivative_energy, new_energy
def main():
    config = get_config()

    if config.mode == 'train':
        torch.backends.cudnn.benchmark = True

        dataLoader = DataLoader(config.data_root, config.dataset_name, config.img_size, config.img_type, config.batch_size)
        loader, n_classes = dataLoader.get_loader()
        config.n_classes = n_classes
        print(config)

        trainer = Trainer(loader, config)
        trainer.train()

    elif config.mode == 'sample':
        if config.config_path == '':
            raise Exception

        with open(config.config_path) as f:
            config_dict = json.load(f)

        for k, v in config_dict.items():
            if not k == 'model_state_path':
                setattr(config, k, v)
        dataLoader = DataLoader(config.data_root, config.dataset_name, config.img_size, config.img_type, config.batch_size)
        loader, n_classes = dataLoader.get_loader()
        config.n_classes = n_classes
        print(config)

        sampler = Sampler(config)
        sampler.sample()
Beispiel #12
0
def convert_with_processes(file_path, duration=30.0, half_part_length=0.1,
                           offset = 30, num_processes=multiprocessing.cpu_count()):
    global song

    song = Sampler(file_path, duration=duration, offset = offset)

    task_queue = multiprocessing.JoinableQueue()
    results = multiprocessing.Queue()

    parts = song.split(half_part_length)
    part_arr = [np.append(parts[i-1], parts[i]) for i in xrange(1, len(parts))]

    for element in part_arr:
        task_queue.put(element)
    for _ in xrange(num_processes):
        task_queue.put(None)

    tasks = []
    for _ in xrange(num_processes):
        process = QueueProcess(task_queue, results, take_feature)
        tasks.append(process)
        process.start()

    task_queue.join()

    result_array = []
    for i in xrange(len(part_arr)):
            result_array.append(results.get())
    result_array = np.asarray(result_array)
    return result_array
Beispiel #13
0
def test(task,
         num_episodes=10,
         policy_network_hidden_sizes=(32, 32),
         policy_adaptive_std=False):

    directory = 'log/{}/'.format(task)
    simulator = Simulator(task=task)

    input_shape = (None, simulator.obsevation_dim)
    output_size = simulator.action_dim

    if simulator.action_type == 'continuous':
        policy_network = GaussianMLPPolicy(
            input_shape=input_shape,
            output_size=output_size,
            hidden_sizes=policy_network_hidden_sizes,
            adaptive_std=policy_adaptive_std,
            std_hidden_sizes=policy_network_hidden_sizes)
    elif simulator.action_type == 'discrete':
        policy_network = CategoricalMLPPolicy(
            input_shape=input_shape,
            output_size=output_size,
            hidden_sizes=policy_network_hidden_sizes)

    sampler = Sampler(simulator, policy_network)

    with tf.Session() as sess:
        saver = tf.train.Saver()
        checkpoint_path = os.path.join(directory, '{}.ckpt'.format(task))
        saver.restore(sess, checkpoint_path)

        for i in range(num_episodes):
            path = sampler.rollout(sess, max_path_length=1000, render=True)
            print("epsiode {}, reward {}".format(i, path['total_reward']))
Beispiel #14
0
def prm(data, num_samples=1000, extra_points=[]):
    sampler = Sampler(data)
    nodes = sampler.sample(num_samples=num_samples)
    print('# sampled nodes {}'.format(len(nodes)))

    nodes += extra_points

    return create_graph(nodes, sampler.polygons), nodes
Beispiel #15
0
 def _run_module(self, graph_part):
     spl = Sampler(graph_part, 0)
     cell, graph, table = spl.sample()
     # pred = Predictor()
     # ops = pred.predictor([], graph_part)
     # table_ops = spl.ops2table(ops, table)
     # return cell, graph, table, table_ops
     return cell, graph, table
Beispiel #16
0
def build_caption_from_sampler(filename):
    saved_model = get_saved_model("checkpoint_2_7369.pt")
    model = Sampler(saved_model, vocab)
    seq, alpha = model.beam_search(filename)
    caption = ""
    for i in seq[1:-1]:
        caption += " " + vocab_idx2word[str(i)]
    return caption, seq, alpha
Beispiel #17
0
 def setupSampler(self):
     self.samplerThread = QtCore.QThread(self)
     self.sampler = Sampler(self.gain, self.sampRate, self.freqs, self.numSamples)
     self.sampler.moveToThread(self.samplerThread)
     self.samplerThread.started.connect(self.sampler.sampling)
     self.sampler.samplerError.connect(self.onError)
     self.sampler.dataAcquired.connect(self.worker.work)
     self.samplerThread.start(QtCore.QThread.NormalPriority)
Beispiel #18
0
 def setupSampler(self):
     self.samplerThread = QtCore.QThread(self)
     self.sampler = Sampler(self.gain, self.samp_rate, self.freqs,
                            self.num_samples, self.q_in)
     self.sampler.moveToThread(self.samplerThread)
     self.samplerThread.started.connect(self.sampler.sampling)
     self.sampler.abortStart.connect(self.onAbort)
     self.ui.gainSlider.valueChanged[int].connect(self.setGain)
     #self.ui.gainSlider.valueChanged[int].connect(self.sampler.changeGain, QtCore.Qt.QueuedConnection)
     self.samplerThread.start(QtCore.QThread.NormalPriority)
Beispiel #19
0
 def __init__(self, fwd_model, n_iterations, population_size, elite_frac,
              ang_sigma, len_sigma, smoothing_param):
     self.fwd_model = fwd_model
     self.n_iterations = n_iterations
     self.population_size = population_size
     self.elite_frac = elite_frac
     self.num_elites = int(population_size * elite_frac)
     self.smoothing_param = smoothing_param
     self.sampler = Sampler(ang_sigma=ang_sigma,
                            len_sigma=len_sigma,
                            population_size=population_size)
Beispiel #20
0
    def test_sampler(self):
        sample_time = 2
        sleep_time = 0.10

        start = datetime.datetime.now()
        sampler = Sampler( SDS011(True) , sample_time = sample_time , sleep_time = sleep_time )
        data = sampler.collect( )
        stop = datetime.datetime.now( )
        
        dt = stop - start
        self.assertTrue( (dt.total_seconds() - sample_time) > 0)
Beispiel #21
0
def random_test(nn=NetworkUnit()):
    """Fix a network structure, give a setting randomly and get its score"""
    spl = Sampler()
    eva = Evaluater()
    spl.renewp(CONFIG_RAND_VECTOR)
    scores = []

    for i in range(TEST_MAX_EPOCH):
        nn.set_cell(spl.sample(len(nn.graph_part)))
        score = eva.evaluate(nn)
        scores.append(score)
    return scores
Beispiel #22
0
    def test_sampler(self):
        sample_time = 2
        sleep_time = 0.10

        start = datetime.datetime.now()
        sampler = Sampler(SDS011(True),
                          sample_time=sample_time,
                          sleep_time=sleep_time)
        data = sampler.collect()
        stop = datetime.datetime.now()

        dt = stop - start
        self.assertTrue((dt.total_seconds() - sample_time) > 0)
Beispiel #23
0
    def __init__(self, monte_carlo_steps, delta_R, delta_t, num_particles,
                 num_dimensions, wavefunction, hamiltonian):
        """Instance of class."""
        self.mc_cycles = monte_carlo_steps
        self.delta_R = delta_R
        self.delta_t = delta_t
        self.num_p = num_particles
        self.num_d = num_dimensions
        self.w = wavefunction
        self.h = hamiltonian
        self.c = 0.0

        self.s = Sampler(self.w, self.h)
        self.sqrt_delta_t = np.sqrt(self.delta_t)
Beispiel #24
0
    def window(self):
        """
        Window for user to interact with
        """

        pygame.init()
        screen = pygame.display.set_mode(TextColorChooser.screen_size)
        pygame.display.set_caption('Input data')

        sampler = Sampler(self.map_size)

        # Generate first 10 values

        try:
            self.input_data = [
                self.show_buttons(screen,
                                  (sampler.sample(enable_iterator=False)))
                for _ in range(10)
            ]  # Generate baseline
        except Exception as e:
            print(e)
            return []

        predictor = Predictor(self.input_data)
        accuracy_tracker = AccuracyTracker()

        # Continuously get values

        while True:
            current_bg = (sampler.sample(enable_iterator=False))

            predictor.data = self.input_data
            prediction = predictor.predict(list(current_bg))

            print("Prediction: {}".format('White' if prediction ==
                                          'w' else 'Black'))
            screen.fill([0, 0, 0] if prediction == 'b' else [255, 255, 255])

            try:
                self.input_data.append(self.show_buttons(screen, current_bg))
            except Exception as e:
                print(e)
                break

            accuracy_tracker += (prediction == self.input_data[-1][1])
            print(str(accuracy_tracker.accuracy) +
                  '%')  # Show past 20 accuracy

        pygame.quit()
Beispiel #25
0
    def __init__(self, args):
        self.args = args
        self.model = Model()

        try:  # test: load model
            if args.test:
                self.model.load_model(args.model)
        except:  # train: init corpus and model
            self.corpus = Corpus()
            self.model.init_model(args)
            if args.rule:
                self.model.load_rules(args.rule)
            self.corpus.init_corpus_and_model(args.train, self.model)
        # init sampler
        self.sampler = Sampler(self.model)
Beispiel #26
0
    def __init__(self, cmd_args, flag):
        self.args = cmd_args
        self.model = Model()

        # test: load model
        if flag=='infer':
            self.model.load_model(cmd_args.model)    
        else: # train: init corpus and model
            self.corpus = Corpus()
            self.model.init_model(cmd_args)
            if cmd_args.rule:
                self.model.load_rules(cmd_args.rule)
            self.corpus.init_corpus_and_model(cmd_args.train, self.model) 
        # init sampler
        self.sampler = Sampler(self.model)
Beispiel #27
0
def run():
    my_preprocessor = Preprocessor('stop_words.txt')
    my_sampler = Sampler()
    my_represent = Represent()

    a_data = Data('../data/chat.txt')
    a_data.prepare_search(my_preprocessor, my_sampler, my_represent)
Beispiel #28
0
def macro_sampler():
    seed = '12345678901234567890abcdefghijklmnopqrstuvwxyz😊'

    risk_limit = .1
    contests = {
        'test1': {
            'cand1': 600,
            'cand2': 400,
            'ballots': 1000,
            'numWinners': 1
        },
    }

    batches = {}

    # 10 batches will have max error of .08
    for i in range(10):
        batches['pct {}'.format(i)] = {
            'test1': {
                'cand1': 40,
                'cand2': 10,
                'ballots': 50
            }
        }
        # 10 batches will have max error of .04
    for i in range(11, 20):
        batches['pct {}'.format(i)] = {
            'test1': {
                'cand1': 20,
                'cand2': 30,
                'ballots': 50
            }
        }

    yield Sampler('MACRO', seed, risk_limit, contests, batches)
def main():

    # number or tasks or quadrants
    ntasks = 2
    dim = 4
    data = Sampler(alpha=1.0,
                   verbose=True,
                   ntasks=ntasks,
                   dim=dim,
                   discriminator_offset=0.05,
                   distribution_offset=0.5,
                   uniform_width=1.,
                   nsamples=1000000,
                   ntrain=10000,
                   ntest=200)

    learning_object = Task_free_continual_learning(
        verbose=True,
        seed=123,
        dev='cpu',
        dim=dim,
        hidden_units=100,
        learning_rate=0.005,
        ntasks=ntasks,
        gradient_steps=5,
        loss_window_length=5,
        loss_window_mean_threshold=0.2,
        loss_window_variance_threshold=0.1,
        MAS_weight=0.5,
        recent_buffer_size=20,
        hard_buffer_size=5)

    tags = ['Online No Hardbuffer', 'Online Continual']
    experiment(data, learning_object, tags)
Beispiel #30
0
def build_samplers(im_path, feat_path, label_path, seed=0):

    ims_fname = sorted(glob.glob(os.path.join(im_path, '*.png')))
    feats_fname = sorted(glob.glob(os.path.join(feat_path, '*.npz')))
    labels_fname = sorted(glob.glob(os.path.join(label_path, '*.txt')))

    # shuffle
    if seed is not None:
        np.random.seed(seed)
        idx = np.arange(len(ims_fname))
        np.random.shuffle(idx)
        ims_fname = [ims_fname[i] for i in idx]
        feats_fname = [feats_fname[i] for i in idx]
        labels_fname = [labels_fname[i] for i in idx]

    samplers = []

    print('Loading data and building samplers')
    pbar = tqdm(total=len(feats_fname))
    for i, l, f in zip(ims_fname, labels_fname, feats_fname):
        labels = read_labels(l)
        xy, descs = read_feats(f)
        sampler = Sampler(labels, xy, descs, im_path=i)
        samplers.append(sampler)
        pbar.update(1)

    pbar.close()

    return samplers
Beispiel #31
0
    def setup_tables(self, tables, merged):
        self.merged_table = merged
        self.tables = tables

        for ef, t in zip(self.err_funcs, self.tables):
            ef.setup(t)

        self.sampler = Sampler(self.SCORE_ID)
        self.samp_rates = [
            best_sample_size(len(t), self.epsilon) / (float(len(t)) + 1)
            for t in self.tables
        ]

        if self.inf_bounds is None:
            self.inf_bounds = [[INF, -INF] for table in tables]

        # attributes to partition upon
        self.cont_attrs = [
            attr.name for attr in merged.domain if attr.name in self.cols
            and attr.var_type != Orange.feature.Type.Discrete
        ]
        self.dist_attrs = [
            attr.name for attr in merged.domain if attr.name in self.cols
            and attr.var_type == Orange.feature.Type.Discrete
        ]

        # remove undesirable columns
        self.cont_attrs = filter(lambda c: c in self.cols, self.cont_attrs)
        self.dist_attrs = filter(lambda c: c in self.cols, self.dist_attrs)
Beispiel #32
0
def max_balancer(input_csv_path, output_csv_path='./output.csv'):
    dataset = csv_handler.csv_readlines(input_csv_path)

    pos_dataset = transform.filter_func(dataset, lambda row: row[2] == '1')
    neg_dataset = transform.filter_func(dataset, lambda row: row[2] == '0')

    assert (len(pos_dataset) <= len(neg_dataset))
    sampler = Sampler()
    neg_dataset = sampler.sample_rows(neg_dataset, len(pos_dataset))

    pos_ids = transform.map_func(pos_dataset, lambda row: row[0])
    neg_ids = transform.map_func(neg_dataset, lambda row: row[0])

    select_id_set = set(pos_ids + neg_ids)
    final = transform.filter_func(dataset, lambda row: row[0] in select_id_set)

    csv_handler.csv_writelines(output_csv_path, final)
Beispiel #33
0
    def __init__(self, address="[::]", port=50051):
        self._address = address
        self._port = port
        self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))

        service_camera_sampler_pb2_grpc.add_CameraSamplerServicer_to_server(
            Sampler(), self._server)
        self._server.add_insecure_port(f"{self._address}:{self._port}")
Beispiel #34
0
class MDSim:
    def __init__(self, numpyart, rho, temp, force, stepsize=0.001):
        """Initializes the various parts of an MD simulation.

        Parameters:
        numpyart: Number of particles
        rho  : Density
        temp : Temperature
        force: A force model
        stepsize: Time step
        """ 
        self.geometry = CubicLattice( numpyart, rho )               # Initialize a geometry,
                                                                 # in this case a cubic lattice.
        self.system = Ensemble( self.geometry, temp )            # Create an ensemble to
                                                                 # store information about
                                                                 # the simulation.
        self.solver = VeloVerlet( self.system, force, stepsize ) # Create the sampler class.
        self.sampler = Sampler()
    
    def run( self, nsteps, tSet):
        """Simulates N steps of the simulation.""" 
        # Equilibration Run.
        print "equilibrating %i steps" % (nsteps/2)
        for n in range(0,nsteps/2,2):
            self.solver.step()
            if( n % 1000 == 0 ):
                #self.system.setT(tSet)
                print "step = %05i, temp = %12.7f" % (n, self.system.getT()) 
        print "Equilibration done." 
        # Production Run.  
        for n in range(nsteps/2, nsteps+1): 
            # sample every 100 steps but only after nsteps / 2 equilibration steps
            if( n % fil == 0 and n > nsteps / 2 ):
                U = copy.deepcopy(self.system.getPos()) 
                #Sampling
                self.sampler.sampleData( U[0], U[1], U[2],
                                         self.system.getEpot(),
                                         self.system.getEkin(),
                                         self.system.getT(),
                                         self.system.getP(),
                                         copy.deepcopy(self.system.getVel()) ) 
            # print every 500 steps
            if( n % 1000 == 0 ):
                print "step = %05i, temp = %12.7f" % (n, self.system.getT()) 
            self.solver.step()  # take a simulation step
Beispiel #35
0
 def setupSampler(self):
     self.samplerThread = QtCore.QThread(self)
     self.sampler = Sampler(self.gain, self.samp_rate, self.freqs, self.num_samples, self.q_in)
     self.sampler.moveToThread(self.samplerThread)
     self.samplerThread.started.connect(self.sampler.sampling)
     self.sampler.abortStart.connect(self.onAbort)
     self.ui.gainSlider.valueChanged[int].connect(self.setGain)
     #self.ui.gainSlider.valueChanged[int].connect(self.sampler.changeGain, QtCore.Qt.QueuedConnection)
     self.samplerThread.start(QtCore.QThread.NormalPriority)
    def __init__(self, data_map, params, likelihood_constraint, no):

        """
        Initializes the uniform sampler

        Parameters
        ----------
        likelihood_constraint: float
            name says it all
        no : int
            Number of likelihood evaluations until this point

        """

        self.LC     = likelihood_constraint
        self.number = no

        Sampler.__init__(self, data_map, params)
def main():
	from sampler import Sampler

	s = Sampler("/dev/ttyACM0", debug=True)
	s.set_freq(4000)
	s.set_trigger(1, 490)
	#s.set_trigger_enabled(False)
	s.print_conf()

	vcc = s.get_vcc()
	print "Vcc={}".format(vcc)

	scope = PowerScope(s)

	#scope.run(record_path="scope.avi", frames=40)
	scope.run()

	s.close()
Beispiel #38
0
 def setup_train_dataset(self):
     """
     Each self.batch_size of examples follows the same distribution
     """
     bd = BlockDesigner(self.train_examples)
     if self.sample_class:
         samp = Sampler(bd.remainder(), seed=self.random_seed)
         images, labels = samp.custom_distribution(self.sample_class, self.batch_size, self.custom_distribution)
         return {"X": images, "y": labels}
     else:
         blocks = bd.break_off_multiple_blocks(self.n_train_batches, self.batch_size)
         images = []
         labels = []
         for block in blocks:
             for y, ids in block.items():
                 for id in ids:
                     images.append(id)
                     labels.append(y)
         return {"X": images, "y": labels}
Beispiel #39
0
    def __init__(self, data_map, params, to_evolve, likelihood_constraint, no):

        """
        Initializes the Metropolis sampler

        Parameters
        ----------
        to_evolve : object
            The sample to evolve
        likelihood_constraint: float
            name says it all
        no : int
            Number of likelihood evaluations until this point

        """
        Sampler.__init__(self, data_map, params)

        self.source = to_evolve
        self.LC     = likelihood_constraint
        self.step   = params['dispersion']
        self.number = no
Beispiel #40
0
    def __init__(self, args):
        self.args = args
        self.model = Model()

        try: # test: load model
            if args.test:
                self.model.load_model(args.model)    
        except: # train: init corpus and model
            self.corpus = Corpus()
            self.model.init_model(args)
            if args.rule:
                self.model.load_rules(args.rule)
            self.corpus.init_corpus_and_model(args.train, self.model) 
        # init sampler
        self.sampler = Sampler(self.model)
Beispiel #41
0
    def __init__(self, numpyart, rho, temp, force, stepsize=0.001):
        """Initializes the various parts of an MD simulation.

        Parameters:
        numpyart: Number of particles
        rho  : Density
        temp : Temperature
        force: A force model
        stepsize: Time step
        """ 
        self.geometry = CubicLattice( numpyart, rho )               # Initialize a geometry,
                                                                 # in this case a cubic lattice.
        self.system = Ensemble( self.geometry, temp )            # Create an ensemble to
                                                                 # store information about
                                                                 # the simulation.
        self.solver = VeloVerlet( self.system, force, stepsize ) # Create the sampler class.
        self.sampler = Sampler()
Beispiel #42
0
 def reset(self,sample_rate,interface):
     self.time_of_last_meter = self.time_of_last_calc = time.time()
     self.last_samples = 0
     self.last_tx_agg = self.last_rx_agg = 0.0
     self.last_txb2 = self.last_rxb2 = 0.0
     self.mean_tx = self.mean_rx = 0.0
     self.var_tx = self.var_rx = 0.0
     self.mu_tx = self.mu_rx = 0.0
     self.sigma2_tx = self.sigma2_rx = 0.0
     self.request_queue = Queue()
     self.sample_queue = Queue()
     self.sampler = Sampler(self.request_queue,self.sample_queue,
                            sample_rate,self,interface=interface,
                            debug=self.debug)
     if self.debug:
         print("\33[H",end="")   # move cursor home
         print("\33[J",end="")   # clear screen
Beispiel #43
0
class SemiLDA(object):
    
    def __init__(self, args):
        self.args = args
        self.model = Model()

        try: # test: load model
            if args.test:
                self.model.load_model(args.model)    
        except: # train: init corpus and model
            self.corpus = Corpus()
            self.model.init_model(args)
            if args.rule:
                self.model.load_rules(args.rule)
            self.corpus.init_corpus_and_model(args.train, self.model) 
        # init sampler
        self.sampler = Sampler(self.model)

    def train(self):
        for i in xrange(self.args.burn_in):
            self.sampler.sample_corpus(self.corpus)
            if not self.args.slient:
                loglike = self.sampler.loglikelihood(self.corpus)
                print 'burn in:%s, loglikelihood:%s' % (i, loglike) 
        for i in xrange(self.args.max_iter):
            self.sampler.sample_corpus(self.corpus)
            self.model.accumulative()
            if not self.args.slient:
                loglike = self.sampler.loglikelihood(self.corpus)
                print 'iter:%s, loglikelihood:%s' % (i, loglike) 
        self.model.save_model(self.args.model)
        if self.args.dump:
            self.model.dump_topic_words(self.args.dump)

    def infer(self):
        self.sampler.sample_test(self.args.test, self.args.output, self.args.burn_in, self.args.max_iter)
Beispiel #44
0
class Analyzer(QtGui.QMainWindow):
    num_threads = 10   
    
    def __init__(self, parent=None):
        QtGui.QWidget.__init__(self, parent)
        #QtCore.QThread.currentThread().setPriority(QtCore.QThread.HighPriority)
        self.ui = Ui_MainWindow()
        self.ui.setupUi(self)
        self.q_in = Queue.Queue(maxsize=1000)
        self.q_out = Queue.Queue(maxsize=1000)
        
        self.startFreq = 130e6
        self.stopFreq = 140e6
        self.span = self.stopFreq - self.startFreq
        self.center = self.startFreq + self.span/2
        
        self.gain = 0
        self.samp_rate = 2.4e6
        self.nfft = self.ui.rbwEdit.itemData(self.ui.rbwEdit.currentIndex()).toInt()
        self.num_samples = self.nfft*2
        self.step = 1.8e6
        self.FFT = True
        self.HOLD = False
        self.PEAK = False
        self.CORRECT = False
        self.AVERAGE = False
        self.SAVE = [False, False, False]
        self.saved = [False, False, False]
        self.MARKERS = [False, False, False, False]
        self.DELTA = False
        self.delta_index = None
        self.delta_value = 0
        self.marker_index = [None, None, None, None]
        self.marker_value = [0, 0, 0, 0]
        self.DIF_MARKER = False
        self.correction = 0
        self.max_hold = []
        self.peak_search = ()
        self.avg = []
        self.avg_counter = 0
        self.num_avg = 1
        self.ref = 80
        self.length = 2048
        self.slice_length = int(np.floor(self.length*(self.step/self.samp_rate)))
        print "Slice: " + str(self.slice_length)
        
        
        
        self.getFreqs()    
        
        #self.waterfallData = WaterfallData(100)        
        
        self.setupSampler()
        self.setupWorkers()
        self.setupPrinter()
        
        self.ui.refEdit.setValue(self.ref)
        self.ui.gainDisp.display(20)
        
        self.ui.startEdit.valueChanged.connect(self.onStart)
        self.ui.stopEdit.valueChanged.connect(self.onStop)
        self.ui.rbwEdit.activated[int].connect(self.onRbw)
        self.ui.centerEdit.valueChanged.connect(self.onCenter)
        self.ui.spanEdit.valueChanged.connect(self.onSpan)
        self.ui.refEdit.valueChanged.connect(self.onRef)
        #self.ui.offsetButton.clicked.connect(self.onOffset)
        self.ui.correctButton.clicked.connect(self.onCorrect)
        self.ui.holdCheck.stateChanged.connect(self.onHold)
        self.ui.peakCheck.stateChanged.connect(self.onPeak)
        self.ui.avgCheck.stateChanged.connect(self.onAvg)
        self.ui.traceButton.clicked.connect(self.onSave_1)
        self.ui.traceButton_2.clicked.connect(self.onSave_2)
        self.ui.traceButton_3.clicked.connect(self.onSave_3)
        self.ui.markerCheck.stateChanged.connect(self.onMarker_1)
        self.ui.markerCheck_2.stateChanged.connect(self.onMarker_2)
        self.ui.markerCheck_3.stateChanged.connect(self.onMarker_3)
        self.ui.markerCheck_4.stateChanged.connect(self.onMarker_4)
        self.ui.markerEdit.valueChanged.connect(self.onMarkerEdit_1)
        self.ui.markerEdit_2.valueChanged.connect(self.onMarkerEdit_2)
        self.ui.markerEdit_3.valueChanged.connect(self.onMarkerEdit_3)
        self.ui.markerEdit_4.valueChanged.connect(self.onMarkerEdit_4)
        self.ui.deltaCheck.stateChanged.connect(self.onDelta)
        self.ui.deltaEdit.valueChanged.connect(self.onDeltaEdit)
        '''
        try:
            self.usb = USBController()     
        except:
            print "Failed to initialize USB controller. Please reconnect."
            self.close()
        '''
        self.getOffset()        
        
    def setupSampler(self):
        self.samplerThread = QtCore.QThread(self)
        self.sampler = Sampler(self.gain, self.samp_rate, self.freqs, self.num_samples, self.q_in)
        self.sampler.moveToThread(self.samplerThread)
        self.samplerThread.started.connect(self.sampler.sampling)
        self.sampler.abortStart.connect(self.onAbort)
        self.ui.gainSlider.valueChanged[int].connect(self.setGain)
        #self.ui.gainSlider.valueChanged[int].connect(self.sampler.changeGain, QtCore.Qt.QueuedConnection)
        self.samplerThread.start(QtCore.QThread.NormalPriority)
        
    def setupPrinter(self):
        self.printerThread = QtCore.QThread(self)
        self.printer = Printer(self.q_out, self.slice_length)
        self.printer.moveToThread(self.printerThread)
        self.printerThread.started.connect(self.printer.printing)
        self.printer.dataReady.connect(self.plotUpdate)
        self.printerThread.start(QtCore.QThread.NormalPriority)
        
    def setupWorkers(self):
        self.threads = []
        self.workers = []
        for i in range(self.num_threads):
            print 'Starting worker #' + str(i+1) + '...'
            self.threads.append(QtCore.QThread(self))
            self.workers.append(Worker(self.q_in, self.q_out, self.nfft, self.length, self.slice_length, self.samp_rate, i))
            self.workers[i].moveToThread(self.threads[i])
            self.threads[i].started.connect(self.workers[i].working)
            self.workers[i].abort.connect(self.onAbort)
            self.threads[i].start(QtCore.QThread.NormalPriority)
            
    def getFreqs(self):
        self.freqs = np.arange(self.startFreq+self.step/2, self.stopFreq+self.step/2, self.step)
        self.ui.plot.setAxisScale(self.ui.plot.xBottom, self.startFreq/1e6, self.stopFreq/1e6)        
        self.ui.startEdit.setValue(self.startFreq/1e6)
        self.ui.stopEdit.setValue(self.stopFreq/1e6)
        self.ui.centerEdit.setValue(self.center/1e6)
        self.ui.spanEdit.setValue(self.span/1e6)    
        self.ui.centerEdit.setSingleStep(self.span/1e6)
        
    def updateFreqs(self):
        self.getFreqs()
        self.sampler.freqs = self.freqs
        self.printer.xdata = []
        self.printer.ydata = []
        self.sampler.BREAK = True
        self.max_hold = []
        self.avg = []
        self.marker_index = [None, None, None, None]
        self.delta_index = None
        
        with self.q_in.mutex:
            self.q_in.queue.clear()
        with self.q_out.mutex:
            self.q_out.queue.clear()
        
        self.ui.plot.setAxisScale(self.ui.plot.xBottom, self.startFreq/1e6, self.stopFreq/1e6)
        self.ui.centerEdit.setSingleStep(self.span/1e6)
        
    def updateRbw(self):
        self.marker_index = [None, None, None, None]
        self.delta_index = None
        if self.nfft < 200:
            self.num_samples = 256
        else:
            self.num_samples = self.nfft*2
            
        if self.span >=50e6:
            threshold = 200
        elif self.span >= 20e6: 
            threshold = 500
        else:
            threshold = 1000
            
        if self.nfft < threshold:
            self.length = 1024
            self.slice_length = int(np.floor(self.length*(self.step/self.samp_rate)))        
        else:
            self.length = self.nfft
            self.slice_length = int(np.floor(self.length*(self.step/self.samp_rate)))
        

    @QtCore.pyqtSlot(object)
    def plotUpdate(self, data):
        ydata = data[0].tolist()
        xdata = data[1].tolist()
        
        if self.HOLD:
            if len(ydata) != self.slice_length*len(self.freqs):
                pass
            else:
                if len(self.max_hold) < len(data[0]):
                    self.max_hold = data[0]
                else:
                    dif = data[0] - self.max_hold
                    dif = dif.clip(0)
                    self.max_hold += dif
                self.ui.hold_curve.setData(xdata, self.max_hold.tolist())
            
        if self.AVERAGE:
            if len(ydata) != self.slice_length*len(self.freqs):
                pass
            else:
                if self.avg_counter == 0:
                    if len(self.avg)<self.num_avg:
                        self.avg.append(data[0])
                    else:
                        self.avg = self.avg[1:]
                        self.avg.append(data[0])
                    self.avg_counter = len(self.freqs)
                else:
                    self.avg_counter -= 1
                temp = np.sum(self.avg, 0)
                temp = temp/len(self.avg)
                ydata = temp
                
        if self.PEAK:
            index = np.argmax(data[0])
            self.peak_search = (xdata[index], ydata[index])
            self.ui.peak_marker.setValue(self.peak_search[0], self.peak_search[1])
            self.ui.peak_marker.setLabel(QwtText("Peak:\n%.2f MHz, %.2f dBm" % self.peak_search))
            
        for i in range(len(self.SAVE)):
            if self.SAVE[i]:
                if self.saved[i]:
                    self.ui.saved_curves[i].detach()
                    self.saved[i] = False
                    self.ui.traces[i].setDown(False)
                else:
                    self.ui.saved_curves[i].setData(xdata, ydata)
                    self.ui.saved_curves[i].attach(self.ui.plot)
                    self.saved[i] = True
                self.SAVE[i] = False
            
        for i in range(len(self.MARKERS)):
            if self.MARKERS[i]:
                if len(ydata) != self.slice_length*len(self.freqs):
                    pass
                else:
                    self.ui.markers[i].attach(self.ui.plot)
                    if self.marker_index[i] == None:
                        value = self.marker_value[i]
                        index = np.argmin(np.abs(data[1]-value))
                        self.marker_index[i] = index
                    self.ui.markers[i].setValue(xdata[self.marker_index[i]], ydata[self.marker_index[i]])
                    self.ui.markers[i].setLabel(QwtText("Mk%i\n%.2fdBm" % (i+1, ydata[self.marker_index[i]])))                
                    
        if self.DELTA:
            if len(ydata) != self.slice_length*len(self.freqs):
                pass
            else:
                self.ui.delta_marker.attach(self.ui.plot)
                if self.delta_index == None:
                    value = self.delta_value
                    index = np.argmin(np.abs(data[1]-value))
                    self.delta_index = index
                self.ui.delta_marker.setValue(xdata[self.delta_index], ydata[self.delta_index])
                temp_x = xdata[self.delta_index] - xdata[self.marker_index[0]]
                temp_y = ydata[self.delta_index] - ydata[self.marker_index[0]]
                self.ui.delta_marker.setLabel(QwtText("Delta\n%.2fMHz, %.2fdB" % (temp_x, temp_y)))
                    
        while self.CORRECT > 0:
            
            correction = np.reshape(data[0], (-1,self.slice_length))
            correction = np.sum(correction, 0)/len(correction)
            self.correction += correction
            self.CORRECT -= 1
            if self.CORRECT == 0:
                self.correction = self.correction/10000
                self.correction -= max(self.correction)
                print max(self.correction)
                #self.correction = self.correction[:100]+np.zeros(self.slice_length-200)+self.correction[-100:]
                self.correction -= np.mean(self.correction)                
                for i in range(self.num_threads):
                    self.workers[i].correction = self.correction
                print "New correction vector applied"
                
        #print len(ydata)
        self.ui.curve.setData(xdata, ydata)
        self.ui.plot.replot() 
        
    @QtCore.pyqtSlot(float)
    def setGain(self,gain):
        self.gain = gain
        self.sampler.gain = gain
       
    @QtCore.pyqtSlot(float)
    def onStart(self,start):
        if start*1e6 < self.stopFreq:
            self.startFreq = start*1e6
            self.span = self.stopFreq - self.startFreq
            self.center = self.startFreq + self.span/2
            self.updateFreqs()
        else:
            self.startFreq = start*1e6
            self.stopFreq = self.startFreq + self.step
            self.span = self.stopFreq - self.startFreq
            self.center = self.startFreq + self.span/2
            self.updateFreqs()
    
    @QtCore.pyqtSlot(float)   
    def onStop(self,stop):
        if stop*1e6 > self.startFreq:
            self.stopFreq = stop*1e6
            self.span = self.stopFreq - self.startFreq
            self.center = self.startFreq + self.span/2
            self.updateFreqs()
        else:
            self.stopFreq = stop*1e6
            self.startFreq = self.stopFreq - self.step
            self.span = self.stopFreq - self.startFreq
            self.center = self.startFreq + self.span/2
            self.updateFreqs()
            
    @QtCore.pyqtSlot(int)
    def onRbw(self,index):
        self.nfft = self.ui.rbwEdit.itemData(index).toInt()[0]
        self.updateRbw()
        self.sampler.num_samples = self.num_samples
        self.printer.length = self.slice_length
        self.updateFreqs()
        
        for i in range(self.num_threads):
            self.workers[i].nfft = self.nfft   
            self.workers[i].length = self.length
            self.workers[i].slice_length = self.slice_length
            self.workers[i].correction = 0
            
    @QtCore.pyqtSlot(float)   
    def onCenter(self,center):
        self.center = center*1e6
        self.startFreq = self.center - self.span/2
        self.stopFreq = self.center + self.span/2
        self.updateFreqs()
        
    @QtCore.pyqtSlot(float)   
    def onSpan(self,span):
        self.span = span*1e6
        self.startFreq = self.center - self.span/2
        self.stopFreq = self.center + self.span/2
        self.updateFreqs()
        
        
    @QtCore.pyqtSlot(int)
    def onRef(self, ref):
        self.ref = ref
        self.ui.plot.setAxisScale(self.ui.plot.yLeft, ref-100, ref)
        self.ui.scaleColors(self.ref)
                
    def getOffset(self):
        self.sampler.MEASURE = True
        time.sleep(0.5)
        self.sampler.MEASURE = False
        self.offset = self.sampler.offset
        print "New offset: " + str(self.offset)
        for i in range(self.num_threads):
                self.workers[i].offset = self.offset  
                
    @QtCore.pyqtSlot()          
    def onSave_1(self):
        self.SAVE[0] = True
        self.ui.traceButton.setDown(True)
        
    @QtCore.pyqtSlot()          
    def onSave_2(self):
        self.SAVE[1] = True
        self.ui.traceButton_2.setDown(True)
        
    @QtCore.pyqtSlot()          
    def onSave_3(self):
        self.SAVE[2] = True
        self.ui.traceButton_3.setDown(True)
        
    @QtCore.pyqtSlot(int)          
    def onMarker_1(self, state):
        if state == 2:
            self.MARKERS[0] = True
            self.ui.deltaCheck.setEnabled(True)
            self.ui.markerEdit.setEnabled(True)
            self.ui.markerEdit.setRange(self.startFreq/1e6, self.stopFreq/1e6)
            self.ui.markerEdit.setValue(self.center/1e6)
        elif state == 0:
            self.MARKERS[0] = False
            self.ui.markerEdit.setDisabled(True)
            self.ui.marker_1.detach()
            self.ui.delta_marker.detach()
            self.ui.deltaCheck.setDisabled(True)
            
    @QtCore.pyqtSlot(float)
    def onMarkerEdit_1(self, freq):
        self.marker_index[0] = None
        self.marker_value[0] = freq
        
    @QtCore.pyqtSlot(int)          
    def onMarker_2(self, state):
        if state == 2:
            self.MARKERS[1] = True
            self.ui.markerEdit_2.setEnabled(True)
            self.ui.markerEdit_2.setRange(self.startFreq/1e6, self.stopFreq/1e6)
            self.ui.markerEdit_2.setValue(self.center/1e6)
        elif state == 0:
            self.MARKERS[1] = False
            self.ui.markerEdit_2.setDisabled(True)
            self.ui.marker_2.detach()
        
    @QtCore.pyqtSlot(float)
    def onMarkerEdit_2(self, freq):
        self.marker_index[1] = None
        self.marker_value[1] = freq
        
    @QtCore.pyqtSlot(int)          
    def onMarker_3(self, state):
        if state == 2:
            self.MARKERS[2] = True
            self.ui.markerEdit_3.setEnabled(True)
            self.ui.markerEdit_3.setRange(self.startFreq/1e6, self.stopFreq/1e6)
            self.ui.markerEdit_3.setValue(self.center/1e6)
        elif state == 0:
            self.MARKERS[2] = False
            self.ui.markerEdit_3.setDisabled(True)
            self.ui.marker_3.detach()
        
    @QtCore.pyqtSlot(float)
    def onMarkerEdit_3(self, freq):
        self.marker_index[2] = None
        self.marker_value[2] = freq
        
    @QtCore.pyqtSlot(int)          
    def onMarker_4(self, state):
        if state == 2:
            self.MARKERS[3] = True
            self.ui.markerEdit_4.setEnabled(True)
            self.ui.markerEdit_4.setRange(self.startFreq/1e6, self.stopFreq/1e6)
            self.ui.markerEdit_4.setValue(self.center/1e6)
        elif state == 0:
            self.MARKERS[3] = False
            self.ui.markerEdit_4.setDisabled(True)
            self.ui.marker_4.detach()
        
    @QtCore.pyqtSlot(float)
    def onMarkerEdit_4(self, freq):
        self.marker_index[3] = None
        self.marker_value[3] = freq
        
    @QtCore.pyqtSlot(int)          
    def onDelta(self, state):
        if state == 2:
            self.DELTA = True
            self.ui.deltaEdit.setEnabled(True)
            self.ui.deltaEdit.setRange(self.startFreq/1e6, self.stopFreq/1e6)
            self.ui.deltaEdit.setValue(self.center/1e6)
        elif state == 0:
            self.DELTA = False
            self.ui.deltaEdit.setDisabled(True)
            self.ui.delta_marker.detach()
        
    @QtCore.pyqtSlot(float)
    def onDeltaEdit(self, freq):
        self.delta_index = None
        self.delta_value = freq
    
    @QtCore.pyqtSlot(int)            
    def onHold(self, state):
        if state == 2:
            self.HOLD = True
            self.ui.hold_curve.attach(self.ui.plot)
        elif state == 0:
            self.HOLD = False
            self.ui.hold_curve.detach()
            self.max_hold = []
            
    @QtCore.pyqtSlot(int)            
    def onAvg(self, state):
        if state == 2:
            self.AVERAGE = True
            self.num_avg = self.ui.avgEdit.value()
        elif state == 0:
            self.AVERAGE = False
            self.num_avg = 1
            self.avg = []
            
    @QtCore.pyqtSlot(int)            
    def onPeak(self, state):
        if state == 2:
            self.PEAK = True
            self.ui.peak_marker.attach(self.ui.plot)
        elif state == 0:
            self.PEAK = False
            self.ui.peak_marker.detach()
            self.peak_search = ()
            
    def onCorrect(self):
        self.correction = 0
        self.CORRECT = 10000
        
    @QtCore.pyqtSlot()
    def onAbort(self):
        print "Aborting..."
        self.close()
    
    def closeEvent(self, event):
        print "Closing..."  
        
        while self.samplerThread.isRunning():
            self.sampler.BREAK = True
            time.sleep(0.1)
            self.sampler.WORKING = False  
            time.sleep(0.1)
            #self.sampler.sdr.close()
            self.q_in.join()
            self.samplerThread.quit()
            
        
        #self.q_out.join()    
        print "dupa2"
        if self.printerThread.isRunning():
            self.printer.WORKING = False
            self.printerThread.quit()
            
        print 'dupa'
        for i in range(self.num_threads):
            if self.threads[i].isRunning():
                self.threads[i].quit()
        
        
            
        #self.q_out.join()    
        with self.q_out.mutex:
            self.q_out.queue.clear()
        print "dupa3"
        
            
        '''with self.q_in.mutex:
Beispiel #45
0
class Parser:
    def __init__(self, model, temp, c):
        self.model = model
        self.extractor = Extractor(model)
        self.sampler = Sampler(model, self.extractor, temp, c) # model may be  redundant


    def decode(self, sent, num = 30):
        trees = defaultdict(int)
        exp_scores = self.extractor.all_exp_scores(sent)
        seed = self.sampler.random_tree(sent, exp_scores)
        optimal = seed
        slw = self.extractor.tree_local_weights(seed)
        sls = self.extractor.tree_score(slw)
        sgw = self.extractor.tree_global_weights(seed)
        sgs = self.extractor.tree_score(sgw)
        ss = sls + sgs

        for i in xrange(num): # or converge
            tree = self.sampler.propose(sent, seed, exp_scores)
            tlw = self.extractor.tree_local_weights(tree)
            tls = self.extractor.tree_score(tlw)
            tgw = self.extractor.tree_global_weights(tree)
            tgs = self.extractor.tree_score(tgw)
            ts = tls + tgs
            if ts > ss or random() < self.sampler.trans_prob(sgs, tgs):
                seed, ss = tree, ts
        return optimal


    def learn(self, instances, epochs = 10, num = 10):
        model = self.model
        sampler = self.sampler
        extractor = self.extractor
        for e in xrange(epochs):
            print 'iteration: %d' % e
            for i, sent in enumerate(instances):
                if i % 100 == 0:
                    print i
                gold = sent.get_gold_tree()
                gw = extractor.tree_local_weights(gold) + extractor.tree_global_weights(gold)

                seed = sampler.random_tree(sent)
                sw = extractor.tree_local_weights(seed) + extractor.tree_global_weights(seed)
                se = seed.error(gold)

                for i in xrange(num):
                    tree = sampler.propose(sent, seed)
                    tw = extractor.tree_local_weights(tree) + extractor.tree_global_weights(tree)
                    te = tree.error(gold)

                    gs = self.extractor.tree_score(gw)
                    ts = self.extractor.tree_score(tw)
                    ss = self.extractor.tree_score(sw)

                    
                    # C1
                    if gs - ts < te:
                        model.update(gw, tw)

                    # C2
                    if te < se: # tree > seed
                        if ts - ss < (se - te):
                            model.update(tw, sw) # se - te) margin
                    else: # tree <= seed
                        if ss - ts < (te - se):
                            model.update(sw, tw)

                    # accept or reject, should be probablistic, deterministic for now
                    if te < se:
                        seed, sw, se = tree, tw, te

                    model.qadd()
        model.average()
        # model.show()


    def decode_show(self, sent, num = 30000):

        o = open('samples.conll06', 'w')

        statistics = {d: {h: 0 for h in sent if h is not d} for d in sent[1:]}
        trees = defaultdict(int)

        exp_scores = self.extractor.all_exp_scores(sent)
        seed = self.sampler.random_tree(sent, exp_scores)
        optimal = seed
        slw = self.extractor.tree_local_weights(seed)
        sls = self.extractor.tree_score(slw)
        sgw = self.extractor.tree_global_weights(seed)
        sgs = self.extractor.tree_score(sgw)
        ss = sls + sgs

        for i in xrange(num): # or converge
            tree = self.sampler.propose(sent, seed, exp_scores)
            tlw = self.extractor.tree_local_weights(tree)
            tls = self.extractor.tree_score(tlw)
            tgw = self.extractor.tree_global_weights(tree)
            tgs = self.extractor.tree_score(tgw)
            ts = tls + tgs
            if ts > ss or random() < self.sampler.trans_prob(sgs, tgs):
                seed, ss = tree, ts

            for (d, h) in seed.head_map.items():
                statistics[d][h] += 1
            trees[self.tree_key(seed)] += 1
        # print statistics
        print 'trees:', len(trees)
        for k, v in sorted(trees.items(), key = lambda x: x[1], reverse = True):
            print k, '\t', v
        for d in statistics:
            for h in statistics[d]:
                print d, h, exp_scores[d][h], statistics[d][h]
            o.write(tree.to_str())
        o.close()

        return optimal


    def change_head_stats(self, sent, num = 500):
        exp_scores = self.extractor.all_exp_scores(sent)
        statistics = {d: {h: 0 for h in sent if h is not d} for d in sent[1:]}
        trees = defaultdict(int)

        seed = self.sampler.random_tree(sent, exp_scores)
        sw = self.extractor.tree_local_weights(seed) # should be global
        ss = self.extractor.tree_score(sw)

        for i in xrange(num): # or converge
            tree = self.sampler.propose(sent, seed, exp_scores)
            tw = self.extractor.tree_local_weights(tree)
            ts = self.extractor.tree_score(tw)

            if ts > ss or random() < self.sampler.trans_prob(seed, tree, ss, ts, exp_scores):
                seed, ss = tree, ts

            for (d, h) in seed.head_map.items():
                statistics[d][h] += 1
            trees[self.tree_key(seed)] += 1


        print 'trees:', len(trees)
        for k, v in sorted(trees.items(), key = lambda x: x[1], reverse = True):
            print k, '\t', v

        for d in statistics:
            for h in statistics[d]:
                print d, h, exp_scores[d][h], statistics[d][h]

        # return optimal



    def random_tree_stats(self, sent, num = 100000):
        exp_scores = self.extractor.all_exp_scores(sent)
        trees = defaultdict(int)

        for i in xrange(num): # or converge
            tree = self.sampler.random_tree(sent, exp_scores)
            trees[self.tree_key(tree)] += 1
        print 'trees:', len(trees)
        for k, v in sorted(trees.items(), key = lambda x: x[1], reverse = True):
            print k, '\t', v


    def tree_key(self, tree):
        return ','.join(['%s<-%s' % (d, h) for (d, h) in sorted(tree.head_map.items())])        
Beispiel #46
0
 def __init__(self, model, temp, c):
     self.model = model
     self.extractor = Extractor(model)
     self.sampler = Sampler(model, self.extractor, temp, c) # model may be  redundant
Beispiel #47
0
import tensorflow as tf
with tf.device('/cpu:0'):
    from sampler import Sampler

    sampler = Sampler(c_dim = 1, z_dim = 4, scale = 10.0, net_size = 32)

    z1 = sampler.generate_z()
    # sampler.show_image(sampler.generate(z1))

    z2 = sampler.generate_z()
    # sampler.show_image(sampler.generate(z2))
    for i in xrange(8):
        sampler.cppn.num_tan_layers = i
        sampler.cppn.regen()
        sampler.save_anim_gif(z1, z2, i + '.mp4', n_frame = 180, x_dim=512, y_dim=512)
Beispiel #48
0
class Monitor():
    def __init__(self,name="mon0", sample_rate=1000,
                 estimation_interval=1.0,
                 meter_name=None,
                 meter_interval=30.0,
                 link_speed=10,
                 alarm_trigger_value=None,
                 cutoff=99,
                 interface="eth0",
                 controller_IP='10.0.0.11',
                 conflistener_IP='0.0.0.0',
                 confport=54736,
                 meter_file_name=None, # if not None, write metering
                                       # data to a file instead of to Ceilometer
                 meter_host_and_port=None, # if not None, write metering
                                       # data to a local port instead of to Ceilometer
                 debug=False,
                 log=False,
                 display_data=False,
                 resid=None,
                 projid=None,
                 username='******',
                 password=None,
                 tenantname='admin',
                 mode=None):
        self.exit_flag = False
        self.mode = mode
        self.debug = debug
        self.log = log
        self.display_data = display_data
#        self.meter_name = 'sdn_at_edge'
        self.meter_name = meter_name

        if alarm_trigger_value is None:
            raise RuntimeError('missing alarm_trigger_value in Monitor.__init__')
        else:
            self.set_alarm_trigger(alarm_trigger_value)

        self.set_cutoff(cutoff)

        if self.debug and self.log:
            self.logger = logging.getLogger(__name__)
            now = datetime.now()
            nowstr = now.strftime('%Y-%m-%dT%H.%M.%S')
            logging.basicConfig(filename='monitor_' + (meter_file_name if meter_name is None else meter_name) + '_' + nowstr + '.log',
                                level=logging.DEBUG,
                                format='%(asctime)s %(levelname)s %(message)s')
            # make the 'sh' module be quiet
            logging.getLogger("sh").setLevel(logging.CRITICAL + 1)
        self.conflistener_IP=conflistener_IP
        self.conflistenerport=confport
        self.name = name
        #
        self.config_queue = Queue()
        self.config_reply = Queue()
        self.reset(sample_rate,interface)
        #
        self.conf_event = Event()
        #
        if link_speed is None:
            self.interface_type,self.linerate = self.get_linerate(interface)
            if self.linerate is None:
                raise(ValueError("Cannot determine the linerate of " + interface))
        else:
            self.set_linerate(link_speed)
            self.interface_type = InterfaceType.Ethernet
        self.link_speed = self.linerate_to_link_speed(self.linerate)
        self.est_interval = estimation_interval
        self.meter_interval = meter_interval
        self.resource_ID = resid
        self.project_ID = projid
        self.username = username
        self.tenantname = tenantname
        if self.debug:
            self.ceilocomm = None
        else:
            self.ceilocomm = CeiloComm(resid,projid,controller=controller_IP,
                                       file_name=meter_file_name,
                                       host_and_port=meter_host_and_port)
        self.authpassword = password
        if not self.debug:
            self.get_auth_token()


    # Print some trace outout on the terminal, or if self.log is
    # True log the trace output in a log file.
    def debugPrint(self,tracestring):
        if self.log:
            self.logger.debug(tracestring)
        else:
            print(tracestring)


    def reset(self,sample_rate,interface):
        self.time_of_last_meter = self.time_of_last_calc = time.time()
        self.last_samples = 0
        self.last_tx_agg = self.last_rx_agg = 0.0
        self.last_txb2 = self.last_rxb2 = 0.0
        self.mean_tx = self.mean_rx = 0.0
        self.var_tx = self.var_rx = 0.0
        self.mu_tx = self.mu_rx = 0.0
        self.sigma2_tx = self.sigma2_rx = 0.0
        self.request_queue = Queue()
        self.sample_queue = Queue()
        self.sampler = Sampler(self.request_queue,self.sample_queue,
                               sample_rate,self,interface=interface,
                               debug=self.debug)
        if self.debug:
            print("\33[H",end="")   # move cursor home
            print("\33[J",end="")   # clear screen


    def clear_queue(self,q):
        while not q.empty():
            try:
                q.get(False)
            except Empty:
                continue
            q.task_done()


    # Start a new sampler
    def start_sampler(self):
        self.clear_queue(self.request_queue)
        self.clear_queue(self.sample_queue)
        self.reset(self.sampler.get_sample_rate(),
                   self.sampler.get_interface())
        self.sampler.setDaemon(True)
        self.sampler.start()


    # Stop the running sampler
    def stop_sampler(self):
        self.request_queue.put('stop');
        self.sampler.request_event.set()


    # Return status of the sampler
    def status_sampler(self,sampler):
        if sampler is None:
#            return 'stopped'
            return SamplerStatus.stopped
        elif sampler.stopped():
#            return 'stopped'
            return SamplerStatus.stopped
        elif sampler.running():
#            return 'running'
            return SamplerStatus.running
        else:
#            return 'unknown'
            return SamplerStatus.unknown


    # Set sample rate (in samples per second)
    def set_sample_rate(self, sampler, sample_rate):
        sampler.set_sample_rate(sample_rate)


    # Set estimation interval (in seconds)
    def set_estimation_interval(self,interval):
        self.est_interval = interval


    # Set meter interval (in seconds)
    def set_meter_interval(self,interval):
        self.meter_interval = interval


    # Set linerate (link_speed is given in Mbits/s)
    def set_linerate(self,link_speed):
#        self.linerate = link_speed * 1024 * 1024 / 8 # convert from Mbit/s to bytes/s
        self.linerate = link_speed * 1000 * 1000 / 8 # convert from Mbit/s to bytes/s
        
    # Convert back from linerate in bytes/s to link speed in Mbit/s
    def linerate_to_link_speed(self,linerate):
        return linerate * 8 / 1000 / 1000

    # Set alarm trigger value (overload risk which will trigger an alarm; percentage)
    def set_alarm_trigger(self,alarm_trigger_value):
        self.alarm_trigger_value = alarm_trigger_value        

    # Set the cutoff for the overload risk calculation
    def set_cutoff(self,cutoff):
        self.cutoff = cutoff / 100.0


    def listen_for_configuration(self):
        server = createConfServer(self.conflistener_IP,self.conflistenerport,self)
        server.serve_forever()


    def handle_configuration(self,sampler,config_queue,config_reply):
        reply = {}
        if self.debug:
            self.debugPrint('handle_configuration: config_queue.get()')
        data = config_queue.get()
        config_queue.task_done()
        #
        resume = data.get('resume')
        if not resume is None and sampler.keep_running == False:
            # resume is implemented by starting a new sampler thread.
            self.start_sampler();
## The start request is sent to the sampler asynchronously, so we
## can't ask it if it has stopped simply by calling a method on the
## sampler object
#            reply['started'] = sampler.running()
            reply['resumed'] = 'ok'
        #
        stop = data.get('pause')
        if not stop is None and sampler.keep_running == True:
            # pause is implemented by telling the sampler thread to exit.
            self.stop_sampler();
## The stop request is sent to the sampler asynchronously, so we
## can't ask it if it has stopped simply by calling a method on the
## sampler object
#            reply['stopped'] = sampler.stopped()
            reply['paused'] = 'ok'
        #
        status = data.get('status')
        if not status is None:
            pstatus = self.status_sampler(sampler)
            if pstatus == SamplerStatus.stopped:
                reply['status'] = 'paused'
            elif pstatus == SamplerStatus.running:
                reply['status'] = 'running'
            else:
                reply['status'] = 'unknown'
        #
        exit_cmd = data.get('exit')
        if not exit_cmd is None and sampler.keep_running == True:
            reply['exit'] = 'ok'
            self.exit();
        #
        interface = data.get('interface')
        if not interface is None:
            sampler.set_interface(interface)
            reply['interface'] = sampler.get_interface()
        #
        sample_rate = data.get('sample_rate')
        if not sample_rate is None:
            self.set_sample_rate(sampler,sample_rate)
            reply['sample_rate'] = sampler.get_sample_rate()
        #
        estimation_interval = data.get('estimation_interval')
        if not estimation_interval is None:
            self.set_estimation_interval(estimation_interval)
            reply['estimation_interval'] = self.est_interval
        #
        meter_interval = data.get('meter_interval')
        if not meter_interval is None:
            self.set_meter_interval(meter_interval)
            reply['meter_interval'] = self.meter_interval
        #
        link_speed = data.get('link_speed')
        if not link_speed is None:
            self.set_linerate(link_speed)
            reply['linerate'] = self.linerate
        #
        alarm_trigger = data.get('alarm_trigger')
        if not alarm_trigger is None:
            self.set_alarm_trigger(alarm_trigger)
            reply['alarm_trigger'] = self.alarm_trigger_value
        #
        cutoff = data.get('cutoff')
        if not cutoff is None:
            self.set_cutoff(cutoff)
            reply['cutoff'] = self.cutoff
        #
        if self.debug:
            self.debugPrint('handle_configuration: ' + repr(self.config_reply) + '.put(' + repr(reply) + ')')
#        pdb.set_trace()
        if reply == {}:
            reply['unknown option(s)'] = data
        self.config_reply.put(reply)
        if self.debug:
            self.debugPrint('handle_configuration: qsize==' + repr(self.config_reply.qsize()))


    # Get and save an authorization token and its expiration date/time
    # from Keystone.
    # The authorization token is used later when communicating with Ceilometer.
    def get_auth_token(self):
        if self.debug:
            return
        authtokenpair = self.ceilocomm.getAuthToken(tenantname=self.tenantname,username=self.username,password=self.authpassword)
        self.authtoken = authtokenpair.get('tok')
        authexpstr = authtokenpair.get('exp')
        if authexpstr[-1] == 'Z':
            expstrUTC = authexpstr[0:-1] # remove the Z
            local_authexptime = datetime.strptime(expstrUTC,'%Y-%m-%dT%H:%M:%S')
            local_tz_offset = datetime.fromtimestamp(time.mktime(time.localtime())) - datetime.fromtimestamp(time.mktime(time.gmtime()))
            authexptime_no_tz = local_authexptime - local_tz_offset # Convert local time to UTC
            self.authexptime = authexptime_no_tz.replace(tzinfo=UTC())
        else:
            self.authexptime = datetime.strptime(authexpstr,'%Y-%m-%dT%H:%M:%S')


    def estimate(self, sampler):

        # A wireless interface can increase or decrease its line rate
        # so the line rate is checked regularly for WiFi.
        if (self.interface_type == InterfaceType.Wireless):
            dummy,self.linerate = self.get_linerate_wireless(sampler.get_interface())

        t = time.time()
        est_timer = t - self.time_of_last_calc
        self.time_of_last_calc =  t

#        self.request_queue.put('r')
        self.request_queue.put('rate_data')
        rate_data = self.sample_queue.get()
        self.sample_queue.task_done()
        tx_agg = rate_data['tx_agg']
        rx_agg = rate_data['rx_agg']
        samples = rate_data['samples']
        txb2 = rate_data['txb2']
        rxb2 = rate_data['rxb2']

        n = samples - self.last_samples

        # Approximately kbytes/sec, but not really since we have a
        # measurement jitter of the number of samples recorded in each
        # sampling period. (Usually, by default ms). (The sampling
        # often cannot keep up).
        self.mean_tx = (tx_agg - self.last_tx_agg) / n
        self.mean_rx = (rx_agg - self.last_rx_agg) / n

        mean_square_tx = self.mean_tx*self.mean_tx
        mean_square_rx = self.mean_rx*self.mean_rx

        sum_square_tx = (txb2 - self.last_txb2) / n
        sum_square_rx = (rxb2 - self.last_rxb2) / n

        # NOTE: Rounding to 5 decimals is perhaps correct if we get
        # negative variance due to the measurement jitter.
        # It is not clear why we get a measurement jitter, so why this
        # is necessary is a somewhat of a mystery.
        self.var_tx = sum_square_tx - mean_square_tx
        if self.var_tx < 0:
            if self.display_data:
                print("\33[9;1H")  # 
                print("\33[0J")
            print("WARNING: self.var_tx == " + str(self.var_tx))
            self.var_tx = round(sum_square_tx - mean_square_tx,5) # round to avoid negative value
        self.var_rx = sum_square_rx - mean_square_rx
        if self.var_rx < 0:
            if self.display_data:
                print("\33[10;1H")  # 
                print("\33[0J")
            print("WARNING: self.var_rx == " + str(self.var_rx))
            self.var_rx = round(sum_square_rx - mean_square_rx,5) # round to avoid negative value

        if self.debug and False:
            print("\33[12;1H")
            print("\33[0J################### DEBUG ##################")
            print("\33[0Jest_timer:      %f"%est_timer)
            print("\33[0Jself.mean_tx:   %f        self.mean_rx:  %f"%(self.mean_tx,self.mean_rx))
            print("\33[0Jtxb2:           %f        rxb2           %f"%(txb2,rxb2))
            print("\33[0Jself.last_txb2  %f        self.last_rxb2 %f"%(self.last_txb2,self.last_rxb2))
            print("\33[0Jmean_square_tx  %f        mean_square_rx %f"%((mean_square_tx),(mean_square_rx)))
            print("\33[0Jsum_square_tx   %f        sum_square_rx  %f"%(sum_square_tx,sum_square_rx))
            print("\33[0Jself.var tx:         %f        self.var_rx:        %f"%(self.var_tx,self.var_rx))


        self.last_samples = samples

        self.last_tx_agg = tx_agg
        self.last_rx_agg = rx_agg

        self.last_txb2 = txb2
        self.last_rxb2 = rxb2

        # Estimate the moments
        try:
            if self.mean_tx != 0.0:
                self.sigma2_tx = math.log(1.0+(self.var_tx/mean_square_tx))
                self.mu_tx = math.log(self.mean_tx) - (self.sigma2_tx/2.0)
            else:
#                self.sigma2_tx = float('nan')
                self.sigma2_tx = 0.0
                self.mu_tx = 0.0

            if self.mean_rx != 0.0:
                self.sigma2_rx = math.log(1.0+(self.var_rx/(mean_square_rx)))
                self.mu_rx = math.log(self.mean_rx) - (self.sigma2_rx/2.0)
            else:
#                self.sigma2_rx = float('nan')
                self.sigma2_rx = 0.0
                self.mu_rx = 0.0

        # Calculate the overload risk

## Based on the original code, using the CDF (Cumulative Distribution Function).
#            self.overload_risk_tx = (1-lognorm.cdf(self.linerate * self.cutoff,math.sqrt(self.sigma2_tx),0,math.exp(self.mu_tx)))*100
#            self.overload_risk_rx = (1-lognorm.cdf(self.linerate * self.cutoff,math.sqrt(self.sigma2_rx),0,math.exp(self.mu_rx)))*100

## Using the survival function (1 - cdf). See http://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.lognorm.html for a motivation).
            self.overload_risk_tx = (lognorm.sf(self.linerate * self.cutoff,math.sqrt(self.sigma2_tx),0,math.exp(self.mu_tx)))*100
            self.overload_risk_rx = (lognorm.sf(self.linerate * self.cutoff,math.sqrt(self.sigma2_rx),0,math.exp(self.mu_rx)))*100

### According to our dicussion, using the PPF (Percentile Point Function (or Quantile function).
#            self.cutoff_rate_tx = (1-lognorm.ppf( self.cutoff,math.sqrt(self.sigma2_tx),0,math.exp(self.mu_tx)))
#            self.cutoff_rate_rx = (1-lognorm.ppf( self.cutoff,math.sqrt(self.sigma2_rx),0,math.exp(self.mu_rx)))
            # To estimate a risk: compare the calculated cutoff rate with the nominal line rate.

        except ValueError as ve:
            if self.display_data:
                print("\33[2K")
            print("Error in estimation: ({}):".format(ve))
            traceback.print_exc()
            if self.display_data:
                print("\33[2K")
            print("mean_tx: %.2e, mean_rx: %.2e "%(self.mean_tx,self.mean_rx))
            if self.display_data:
                print("\33[2K")
            print("var_tx: %.2e, var_rx: %.2e "%(self.var_tx,self.var_rx))
            if self.display_data:
                print("\33[2K")
            print("mean_square_tx: %.2e, mean_square_rx: %.2e "%(mean_square_tx,mean_square_rx))
            if self.display_data:
                print("\33[2K")
            print("rate_data: %s"%(rate_data,))
            exit(1)

        if self.display_data:
            try:
                print("\33[H",end="") # move cursor home
    # [PD] 2016-05-23, The calculation of "actual" seems to be buggy.
    #            print("\33[2KEstimate (sample_rate: {:d} actual({:d}), interface: {}, linerate: {:d}".format(sampler.get_sample_rate(), n, sampler.get_interface(),self.linerate))
                print("\33[2Ksample_rate (/s): {:d}, interface: {}, linerate (bytes/s): {:d}, link speed (Mbit/s): {:d}".format(sampler.get_sample_rate(), sampler.get_interface(),self.linerate,self.link_speed))
                print("\33[2KTX(mean: %.2e b/s std: %.2e mu: %.2e s2: %.2e, ol-risk: %.2e) "%(self.mean_tx,math.sqrt(self.var_tx),self.mu_tx,self.sigma2_tx, self.overload_risk_tx))
                print("\33[2KRX(mean: %.2e b/s std: %.2e mu: %.2e s2: %.2e, ol-risk: %.2e) "%(self.mean_rx,math.sqrt(self.var_rx),self.mu_rx,self.sigma2_rx, self.overload_risk_rx))
                print("\33[2Kestimation timer: {:.4f}".format(est_timer))
                print("\33[2Kestimation interval: {:.2f}".format(self.est_interval))
                print("\33[2Kmeter interval: %d"%(self.meter_interval))
                print("\33[2Kmode: %d"%(self.mode))
                if self.debug:
                    print("\33[2Kdebug: %s"%str(self.debug))
                    print("\33[2Ksample_queue size: %s"%str(self.sample_queue.qsize()))
            except ValueError as ve:
                print("\33[2KError in display ({}):".format(ve))
                traceback.print_exc()
                print("\33[2Kvar_tx: %.2e, var_rx: %.2e "%(self.var_tx,self.var_rx))
                print("\33[2Krate_data: %s"%(rate_data,))
                exit(1)

        # FIXME: It should not be necessary to empty the queue here
        # anymore, since the monitor code only puts stuff in the Queue
        # on request.
        # Verify this before remove this while loop!
        while not self.sample_queue.empty():
            self.sample_queue.get()
            self.sample_queue.task_done()

    # Return a tuple with interface type and linerate
    def get_linerate(self,interface):
        if OS == OS_type.darwin:      # Fake it on OS X
            # 1 Gbit/s in bytes/s (NOTE: 1000, not 1024; see IEEE 802.3-2008)
            lr = (InterfaceType.Ethernet,(1000*1000*1000)/8)
        else:
            lr = self.get_linerate_ethernet(interface)
            if lr is None:
                lr = self.get_linerate_wireless(interface)
        return lr

    def get_linerate_ethernet(self,interface):
        try:
            # The link speed in Mbits/sec.
            with open("/sys/class/net/{interface}/speed".format(interface=interface)) as f:
                speed = int(f.read()) * 1000 * 1000 / 8 # convert to bytes/s (NOTE: 1000, not 1024; see IEEE 802.3-2008)
        except IOError:
            speed = None
        finally:
            return InterfaceType.Ethernet,speed

    def get_linerate_wireless(self,interface):
        try:
            iwres = iwconfig(interface,_ok_code=[0,1])
            rx = re.compile("Bit Rate=([0-9]+)\s*Mb/s", re.MULTILINE | re.IGNORECASE)
            lrg = rx.search(iwres)
            if lrg.groups() != ():
                bit_rate = int(lrg.group(1)) # Mbits/s
                lr = (InterfaceType.Wireless,bit_rate * 1000 * 1000 / 8) # convert Mbit/s to bytes/s (NOTE: 1000, not 1024; see IEEE 802.3-2008)
                return lr
            else:
                return None,None
        except ErrorReturnCode:
            return None,None


    # Store metered data in Ceilometer
    def meter(self):
        t = time.time()
        self.time_of_last_meter = t

        alarm_value_tx = self.overload_risk_tx > self.alarm_trigger_value
        alarm_value_rx = self.overload_risk_rx > self.alarm_trigger_value

        now = datetime.now(tz=UTC())
        nowstr = now.strftime('%Y-%m-%dT%H.%M.%S')
        data = {'timestamp': nowstr,
                'interface': repr(self.sampler.get_interface()),
                'linerate': repr(self.linerate),
                'alarm_trigger_value': repr(self.alarm_trigger_value),
                'cutoff': repr(self.cutoff),
                'tx': repr(self.mean_tx),
                'var_tx': repr(self.var_tx),
                'mu_tx': repr(self.mu_tx),
                'sigma2_tx': repr(self.sigma2_tx),
                'overload_risk_tx': repr(self.overload_risk_tx),
                'alarm_tx': repr(alarm_value_tx),
                'rx': repr(self.mean_rx),
                'var_rx': repr(self.var_rx),
                'mu_rx': repr(self.mu_rx),
                'sigma2_rx': repr(self.sigma2_rx),
                'overload_risk_rx': repr(self.overload_risk_rx),
                'alarm_rx': repr(alarm_value_rx),
                'sample_rate': repr(self.sampler.get_sample_rate()),
                'estimation_interval': repr(self.est_interval),
                'meter_interval': repr(self.meter_interval)}
        self.ceilorecord(now,data)


    def ceilomessage(self,message):
        now = datetime.now(tz=UTC())
        nowstr = now.strftime('%Y-%m-%dT%H:%M:%S')
        data = {message: repr(nowstr)}
        self.ceilorecord(now,data)


    def ceilorecord(self,now,data):
        if not self.debug:
            # Check if the authorization token has expired.
            # Add one minute margin for possible bad time sync.
            delta = timedelta(minutes=1)
            if now + delta > self.authexptime:
                self.get_auth_token()
        if self.display_data:
            print("\33[15;1H")
            print("\33[0J")      # clear rest of screen
        if self.debug:
            print("\33[0J" + str(data))
        else:
            rjson = self.ceilocomm.putMeter(self.meter_name,data,self.authtoken,
                                            username=self.username,
                                            project_id=self.project_ID,
                                            resource_id=self.resource_ID)
            if self.display_data:
                print(json.dumps(rjson,indent=4))


    def main(self):
        self.time_of_last_meter = self.time_of_last_calc = time.time()
        # Sampling is done in a separate thread to avoid
        # introducing a delay jitter in our measurements.
#        self.sampler.setDaemon(True)
        self.sampler.start()
        sleep_time = min(self.est_interval,self.meter_interval)

        # Start the thread listening for configuration messages
        conflistener = Thread(target=self.listen_for_configuration)
        conflistener.setDaemon(True)
        conflistener.start()

### FIXME:
#   The conflistener may not be completely initialized when ceilocomm
#   starts sending messages on the local port (in mode 1).
#   We should wait for the conflistener to initialize before
#   proceeding here.

        if self.display_data:
            print("\33[2J")         # clear screen
        self.ceilomessage('initialized')

        try:
            while not self.exit_flag:
                if not self.config_queue.empty():
                    if self.debug:
                        self.debugPrint('main loop: handle_configuration()')
                    self.handle_configuration(self.sampler,self.config_queue,self.config_reply)
                    continue
                    
#                if self.status_sampler(self.sampler) == 'stopped':
                if self.status_sampler(self.sampler) == SamplerStatus.stopped:
                    self.conf_event.wait()
                    continue

                timestamp = time.time()
                # time to do an estimation?
                if timestamp >= (self.time_of_last_calc  + self.est_interval):
                    self.estimate(self.sampler)
                # time to store a meter value?
                if timestamp >= (self.time_of_last_meter  + self.meter_interval):
                    self.meter()        
                self.conf_event.wait(sleep_time)
                self.conf_event.clear()
            print('exit')

        except KeyboardInterrupt:
            self.exit()


    def exit(self):
        self.stop_sampler() # make sure the sampler thread is stopped in an
                            # orderly manner 
        self.ceilomessage('exited')
        self.exit_flag = True
Beispiel #49
0
class Analyzer(QtGui.QMainWindow):
    def __init__(self, parent=None):
        QtGui.QWidget.__init__(self, parent)

        ### MODE FLAGS ###
        self.RUNNING = False
        self.HF = False
        self.WATERFALL = False
        self.MARKERS = [False, False, False, False]
        self.DELTA = False
        self.HOLD = False
        self.AVERAGE = False
        self.PEAK = False
        self.SAVE = [False, False, False]

        ### VARIABLES ###
        self.step = 1.8e6
        self.ref = 0

        self.gain = 0
        self.sampRate = 2.4e6

        self.waterfallHistorySize = 100
        self.markers = [None, None, None, None]
        self.markerIndex = [None, None, None, None]
        self.markerValue = [None, None, None, None]
        self.markerText = [None, None, None, None]
        self.deltaIndex = None
        self.deltaValue = None
        self.saveCurves = [None, None, None]
        self.penColors = ['g', 'c', 'm']

        self.ui = Interface()
        self.ui.setupUi(self, self.step, self.ref)

        self.nwelch = 15
        self.nfft = self.ui.rbwEdit.itemData(self.ui.rbwEdit.currentIndex()).toInt()[0]
        self.numSamples = self.nfft*(1+self.nwelch)/2
        self.length = self.nfft
        self.sliceLength = int(np.floor(self.length*(self.step/self.sampRate)))

        self.createPlot()

        ### SIGNALS AND SLOTS ###
        self.ui.startButton.clicked.connect(self.onStart)
        self.ui.stopButton.clicked.connect(self.onStop)
        self.ui.plotTabs.currentChanged.connect(self.onMode)
        self.ui.startEdit.valueChanged.connect(self.onStartFreq)
        self.ui.stopEdit.valueChanged.connect(self.onStopFreq)
        self.ui.rbwEdit.activated[int].connect(self.onRbw)
        self.ui.centerEdit.valueChanged.connect(self.onCenter)
        self.ui.spanEdit.valueChanged.connect(self.onSpan)
        self.ui.refEdit.valueChanged.connect(self.onRef)
        self.ui.markerCheck_1.stateChanged.connect(self.onMarker_1)
        self.ui.markerEdit_1.valueChanged.connect(self.onMarkerEdit_1)
        self.ui.deltaCheck.stateChanged.connect(self.onDelta)
        self.ui.deltaEdit.valueChanged.connect(self.onDeltaEdit)
        self.ui.holdCheck.stateChanged.connect(self.onHold)
        self.ui.avgCheck.stateChanged.connect(self.onAvg)
        self.ui.avgEdit.valueChanged.connect(self.onAvgEdit)
        self.ui.peakCheck.stateChanged.connect(self.onPeak)
        self.ui.traceButton_1.clicked.connect(self.onSave_1)
        self.ui.traceButton_2.clicked.connect(self.onSave_2)
        self.ui.traceButton_3.clicked.connect(self.onSave_3)
        self.ui.waterfallCheck.stateChanged.connect(self.onWaterfall)

        #self.usb = USBController()

### PLOT FUNCTIONS ###
    def createPlot(self):
        self.plot = pg.PlotWidget()
        if self.HF == False:
            self.ui.startEdit.setRange(30, 1280-self.step/1e6)
            self.ui.stopEdit.setRange(30+self.step/1e6, 1280)
            self.ui.centerEdit.setRange(30+self.step/2e6, 1280-self.step/2e6)
            self.startFreq = 80e6
            self.stopFreq = 100e6
            self.ui.plotLayout.addWidget(self.plot)
        elif self.HF:
            self.ui.startEdit.setRange(1, 30-self.step/1e6)
            self.ui.stopEdit.setRange(1+self.step/1e6, 30)
            self.ui.centerEdit.setRange(1+self.step/2e6, 30-self.step/2e6)
            self.startFreq = 1e6
            self.stopFreq = 30e6
            self.ui.plotLayout_2.addWidget(self.plot)
        self.plot.showGrid(x=True, y=True)
        self.plot.setMouseEnabled(x=False, y=False)
        self.plot.setYRange(self.ref-100, self.ref, padding=0)
        self.plot.setXRange(self.startFreq/1e6, self.stopFreq/1e6, padding=0)
        self.curve = self.plot.plot(pen='y')

        self.span = self.stopFreq - self.startFreq
        self.center = self.startFreq + self.span/2

        # Crosshair
        self.vLine = pg.InfiniteLine(angle=90, movable=False)
        self.hLine = pg.InfiniteLine(angle=0, movable=False)
        self.plot.addItem(self.vLine, ignoreBounds=True)
        self.plot.addItem(self.hLine, ignoreBounds=True)
        self.posLabel = pg.TextItem(anchor=(0,1))
        self.plot.addItem(self.posLabel)
        self.mouseProxy = pg.SignalProxy(self.plot.scene().sigMouseMoved,
                                         rateLimit=20, slot=self.mouseMoved)

        self.updateFreqs()

    def deletePlot(self):
        self.curve.deleteLater()
        self.curve = None
        if self.HF == False:
            self.ui.plotLayout.removeWidget(self.plot)
        else:
            self.ui.plotLayout_2.removeWidget(self.plot)
        self.plot.deleteLater()
        self.plot = None

    def createWaterfall(self):
        self.WATERFALL = True
        self.waterfallPlot = pg.PlotWidget()
        if self.HF == False:
            self.ui.plotLayout.addWidget(self.waterfallPlot)
        else:
            self.ui.plotLayout_2.addWidget(self.waterfallPlot)
        self.waterfallPlot.setYRange(-self.waterfallHistorySize, 0)
        self.waterfallPlot.setXLink(self.plot)
        self.waterfallPlot.setMouseEnabled(x=False, y=False)

        self.waterfallHistogram = pg.HistogramLUTItem(fillHistogram=False)
        self.waterfallHistogram.gradient.loadPreset("flame")
        self.waterfallHistogram.setHistogramRange(self.ref-100, self.ref)

        self.waterfallImg = None

    def deleteWaterfall(self):
        if self.WATERFALL:
            self.WATERFALL = False
            if self.HF == False:
                self.ui.plotLayout.removeWidget(self.waterfallPlot)
            else:
                self.ui.plotLayout_2.removeWidget(self.waterfallPlot)
            self.waterfallPlot.deleteLater()
            self.waterfallPlot = None
            self.waterfallHistogram.deleteLater()
            self.waterfallHistogram = None
            if self.waterfallImg is not None:
                self.waterfallImg.deleteLater()
                self.waterfallImg = None

    def updateFreqs(self):
        self.freqs = np.arange(self.startFreq+self.step/2, self.stopFreq+self.step/2, self.step)
        self.markerIndex = [None, None, None, None]
        self.deltaIndex = None
        self.peakIndex = None
        self.holdData = None
        self.avgArray = None
        self.avgCounter = 0
        self.saveCurves = [None, None, None]

        if self.RUNNING:
            self.sampler.freqs = self.freqs
            self.sampler.BREAK = True

        self.xData = []
        self.yData = []
        self.waterfallImg = None
        self.plot.setXRange(self.startFreq/1e6, self.stopFreq/1e6, padding=0)

        self.ui.startEdit.setValue(self.startFreq/1e6)
        self.ui.stopEdit.setValue(self.stopFreq/1e6)
        self.ui.centerEdit.setValue(self.center/1e6)
        self.ui.spanEdit.setValue(self.span/1e6)


        
    def updateRbw(self):
        self.markerIndex = [None, None, None, None]
        self.deltaIndex = None
        self.holdData = None
        self.avgArray = None
        self.avgCounter = 0
        self.saveCurves = [None, None, None]

        self.numSamples = self.nfft*(1+self.nwelch)/2
        if self.numSamples < 200:
            self.numSamples = 256
            
        if self.span >=50e6:
            threshold = 200
        elif self.span >= 20e6: 
            threshold = 500
        else:
            threshold = 1000
            
        if self.nfft < threshold:
            self.length = 1024
            self.sliceLength = int(np.floor(self.length*(self.step/self.sampRate)))        
        else:
            self.length = self.nfft
            self.sliceLength = int(np.floor(self.length*(self.step/self.sampRate)))

    @QtCore.pyqtSlot(object)
    def plotUpdate(self, data):
        index = data[0]
        xTemp = data[2]
        yTemp = data[1]
        if len(yTemp) == 0:
                self.xData = xTemp
                self.yData = yTemp
        else:
            self.xData = np.concatenate((self.xData[:index*self.sliceLength], xTemp, self.xData[(index+1)*self.sliceLength:]))
            self.yData = np.concatenate((self.yData[:index*self.sliceLength], yTemp, self.yData[(index+1)*self.sliceLength:]))

        yData = self.yData

        if len(self.xData) == self.sliceLength*len(self.freqs):
            if self.AVERAGE:
                if self.avgCounter == 0:
                    if self.avgArray is None:
                        self.avgArray = np.array([self.yData])

                    elif self.avgArray.shape[0] < self.numAvg:
                        self.avgArray = np.append(self.avgArray, np.array([self.yData]), axis=0)

                    else:
                        self.avgArray = np.roll(self.avgArray, -1, axis=0)
                        self.avgArray[-1] = self.yData
                    self.avgData = np.average(self.avgArray, axis=0)
                    #self.curve.setData(self.xData, yData)
                    self.avgCounter = len(self.freqs)
                else:
                    self.avgCounter -= 1
                yData = self.avgData

            for i in range(len(self.MARKERS)):
                if self.MARKERS[i]:
                    if self.markerIndex[i] == None:
                        index = np.argmin(np.abs(self.xData-self.markerValue[i]))
                        self.markerIndex[i] = index
                    self.markers[i].setIndex(self.markerIndex[i])
                    self.markerText[i].setText("Mk1:\nf=%0.1f MHz\nP=%0.1f dBm" % (self.xData[self.markerIndex[i]], yData[self.markerIndex[i]]))

            if self.DELTA:
                if self.deltaIndex == None:
                    index = np.argmin(np.abs(self.xData-self.deltaValue))
                    self.deltaIndex = index
                self.delta.setIndex(self.deltaIndex)
                dx = self.xData[self.deltaIndex] - self.xData[self.markerIndex[0]]
                dy = yData[self.deltaIndex] - yData[self.markerIndex[0]]
                self.deltaText.setText("Delta:\ndf=%0.1f MHz\ndP=%0.1f dB" % (dx, dy))

            if self.HOLD:
                if self.holdData is None:
                    self.holdData = yData
                else:
                    self.holdData = np.amax([self.holdData, yData], axis=0)
                self.holdCurve.setData(self.xData, self.holdData)

            if self.PEAK:
                self.peakIndex = np.argmax(yData)
                self.peak.setIndex(self.peakIndex)
                self.peakText.setText("Peak:\nf=%0.1f MHz\nP=%0.1f dBm" % (self.xData[self.peakIndex], yData[self.peakIndex]))

            for i in range(len(self.SAVE)):
                if self.SAVE[i]:
                    if self.saveCurves[i] is None:
                        self.saveCurves[i] = self.plot.plot(pen=self.penColors[i])
                        self.plot.addItem(self.saveCurves[i])
                        self.saveCurves[i].setData(self.xData, yData)

                    else:
                        self.plot.removeItem(self.saveCurves[i])
                        self.saveCurves[i] = None

                    self.SAVE[i] = False

            if self.WATERFALL:
                self.waterfallUpdate(self.xData, yData)
        #print len(yData)
        self.curve.setData(self.xData, yData)

    def waterfallUpdate(self, xData, yData):
        if self.waterfallImg is None:
            self.waterfallImgArray = np.zeros((self.waterfallHistorySize, len(xData)))
            self.waterfallImg = pg.ImageItem()
            self.waterfallImg.scale((xData[-1] - xData[0]) / len(xData), 1)
            self.waterfallImg.setPos(xData[0],-self.waterfallHistorySize)
            self.waterfallPlot.clear()
            self.waterfallPlot.addItem(self.waterfallImg)
            self.waterfallHistogram.setImageItem(self.waterfallImg)
            self.plot.setXRange(self.startFreq/1e6, self.stopFreq/1e6)

        self.waterfallImgArray = np.roll(self.waterfallImgArray, -1, axis=0)
        self.waterfallImgArray[-1] = yData
        self.waterfallImg.setImage(self.waterfallImgArray.T,
                                   autoLevels=True, autoRange=False)
### SETUP SAMPLER AND WORKER
    def setupSampler(self):
        self.samplerThread = QtCore.QThread(self)
        self.sampler = Sampler(self.gain, self.sampRate, self.freqs, self.numSamples)
        self.sampler.moveToThread(self.samplerThread)
        self.samplerThread.started.connect(self.sampler.sampling)
        self.sampler.samplerError.connect(self.onError)
        self.sampler.dataAcquired.connect(self.worker.work)
        self.samplerThread.start(QtCore.QThread.NormalPriority)

    def setupWorker(self):
        self.workerThread = QtCore.QThread(self)
        self.worker = Worker(self.nfft, self.length, self.sliceLength, self.sampRate, self.nwelch)
        self.worker.moveToThread(self.workerThread)
        self.worker.dataReady.connect(self.plotUpdate)
        self.workerThread.start(QtCore.QThread.NormalPriority)


### GUI FUNCTIONS ###
    def mouseMoved(self, evt):
        pos = evt[0]
        if self.plot.sceneBoundingRect().contains(pos):
            mousePoint = self.plot.getViewBox().mapSceneToView(pos)
            self.posLabel.setText("f=%0.1f MHz, P=%0.1f dBm" % (mousePoint.x(),mousePoint.y()))
            self.vLine.setPos(mousePoint.x())
            self.hLine.setPos(mousePoint.y())
            self.posLabel.setPos(mousePoint.x(), mousePoint.y())

    @pyqtSlot()
    def onStart(self):
        self.ui.startButton.setEnabled(False)
        self.ui.stopButton.setEnabled(True)
        self.ui.statusbar.setVisible(False)
        self.ui.statusbar.clearMessage()
        self.ui.settingsTabs.setEnabled(True)

        self.setupWorker()
        self.setupSampler()

        self.RUNNING = True

    @pyqtSlot()
    def onStop(self):
        self.ui.startButton.setEnabled(True)
        self.ui.stopButton.setEnabled(False)
        self.ui.settingsTabs.setEnabled(False)

        self.samplerThread.exit(0)
        self.sampler.WORKING = False
        self.sampler = None

        self.workerThread.exit(0)
        self.worker = None

        self.RUNNING = False

    @pyqtSlot(int)
    def onMode(self, index):
        if index == 0:
            self.deletePlot()
            self.ui.waterfallCheck.setChecked(False)
            self.HF = False

            self.createPlot()

            self.ui.settingsTabs.setEnabled(True)

        elif index == 1:
            self.deletePlot()
            self.ui.waterfallCheck.setChecked(False)
            self.HF = True

            self.createPlot()

            self.ui.settingsTabs.setEnabled(True)

        elif index == 2:
            self.ui.settingsTabs.setEnabled(True)

        elif index == 3:
            self.ui.settingsTabs.setEnabled(False)

    @pyqtSlot(float)
    def onStartFreq(self, value):
        self.startFreq = value*1e6
        if self.startFreq > self.stopFreq - self.step:
            self.stopFreq = self.startFreq + self.step
        self.span = self.stopFreq - self.startFreq
        self.center = self.startFreq + self.span/2
        self.updateFreqs()

    @pyqtSlot(float)
    def onStopFreq(self, value):
        self.stopFreq = value*1e6
        if self.stopFreq < self.startFreq + self.step:
            self.startFreq = self.stopFreq - self.step
        self.span = self.stopFreq - self.startFreq
        self.center = self.startFreq + self.span/2
        self.updateFreqs()

    @pyqtSlot(int)
    def onRbw(self, index):
        self.nfft = self.ui.rbwEdit.itemData(index).toInt()[0]
        self.updateRbw()
        if self.RUNNING:
            self.sampler.numSamples = self.numSamples
            self.worker.nfft = self.nfft
            self.worker.length = self.length
            self.worker.sliceLength = self.sliceLength
            self.worker.correction = 0
            self.sampler.BREAK = True

        self.xData = []
        self.yData = []
        self.waterfallImg = None
        print self.numSamples
        print self.nfft

    @pyqtSlot(float)
    def onCenter(self, center):
        self.center = center*1e6
        self.startFreq = self.center - self.span/2
        self.stopFreq = self.center + self.span/2
        self.updateFreqs()

    @pyqtSlot(float)
    def onSpan(self,span):
        self.span = span*1e6
        self.startFreq = self.center - self.span/2
        self.stopFreq = self.center + self.span/2
        self.updateFreqs()

    @pyqtSlot(int)
    def onRef(self, ref):
        self.ref = ref
        self.plot.setYRange(self.ref-100, self.ref)
        if self.WATERFALL:
            self.waterfallHistogram.setHistogramRange(self.ref-100, self.ref)

    # Markers
    @pyqtSlot(int)
    def onMarker_1(self, state):
        if state == 2:
            self.MARKERS[0] = True
            self.ui.deltaCheck.setEnabled(True)
            self.ui.markerEdit_1.setEnabled(True)
            self.ui.markerEdit_1.setRange(1, 1280)
            self.ui.markerEdit_1.setValue(self.center/1e6)
            self.markerValue[0] = self.ui.markerEdit_1.value()

            self.marker_1 = pg.CurvePoint(self.curve)
            self.plot.addItem(self.marker_1)
            self.markers[0] = self.marker_1
            self.markerArrow_1 = pg.ArrowItem(angle=270)
            self.markerArrow_1.setParentItem(self.marker_1)
            self.markerText_1 = pg.TextItem("Mk1", anchor=(0.5, 1.5))
            self.markerText_1.setParentItem(self.marker_1)
            self.markerText[0] = self.markerText_1

        elif state == 0:
            self.MARKERS[0] = False
            self.markerIndex[0] = None
            self.markerValue[0] = None
            self.markerText[0] = None
            self.ui.markerEdit_1.setDisabled(True)
            self.ui.deltaCheck.setDisabled(True)
            self.plot.removeItem(self.marker_1)
            self.marker_1.deleteLater()
            self.marker_1 = None

    @pyqtSlot(float)
    def onMarkerEdit_1(self, freq):
        self.markerIndex[0] = None
        self.markerValue[0] = freq

    @pyqtSlot(int)
    def onDelta(self, state):
        if state == 2:
            self.DELTA = True
            self.ui.deltaEdit.setEnabled(True)
            self.ui.deltaEdit.setRange(1, 1280)
            self.ui.deltaEdit.setValue(self.center/1e6)
            self.deltaValue = self.ui.deltaEdit.value()

            self.delta = pg.CurvePoint(self.curve)
            self.plot.addItem(self.delta)
            self.deltaArrow = pg.ArrowItem(angle=270)
            self.deltaArrow.setParentItem(self.delta)
            self.deltaText = pg.TextItem("Delta:", anchor=(0.5, 1.5))
            self.deltaText.setParentItem(self.delta)

        elif state == 0:
            self.DELTA = False
            self.ui.deltaEdit.setDisabled(True)
            self.plot.removeItem(self.delta)
            self.delta.deleteLater()
            self.delta = None

    @pyqtSlot(float)
    def onDeltaEdit(self, freq):
        self.deltaIndex = None
        self.deltaValue = freq

    # MAX HOLD
    @pyqtSlot(int)
    def onHold(self, state):
        if state == 2:
            self.HOLD = True
            self.holdCurve = self.plot.plot(pen='r')
            self.plot.addItem(self.holdCurve)
            self.holdData = None
        elif state == 0:
            self.HOLD = False
            self.holdData = None
            self.plot.removeItem(self.holdCurve)

    # AVERAGE
    @pyqtSlot(int)
    def onAvg(self, state):
        if state == 2:
            self.AVERAGE = True
            self.numAvg = self.ui.avgEdit.value()
            self.avgArray = None
            self.avgCounter = 0
        elif state == 0:
            self.AVERAGE = False
            self.numAvg = None
            self.avg = []

    @pyqtSlot(float)
    def onAvgEdit(self, num):
        self.numAvg = num
        self.avgArray = None
        self.avgCounter = 0

    # PEAK
    @pyqtSlot(int)
    def onPeak(self, state):
        if state == 2:
            self.PEAK = True
            self.peak = pg.CurvePoint(self.curve)
            self.plot.addItem(self.peak)
            self.peakArrow = pg.ArrowItem(angle=270)
            self.peakArrow.setParentItem(self.peak)
            self.peakText = pg.TextItem("Peak:", anchor=(0.5, 1.5))
            self.peakText.setParentItem(self.peak)

        elif state == 0:
            self.PEAK = False
            self.plot.removeItem(self.peak)
            self.peak.deleteLater()
            self.peak = None

    @QtCore.pyqtSlot()
    def onSave_1(self):
        self.SAVE[0] = True
        self.ui.traceButton_1.setDown(True)

    @QtCore.pyqtSlot()
    def onSave_2(self):
        self.SAVE[1] = True
        self.ui.traceButton_2.setDown(True)

    @QtCore.pyqtSlot()
    def onSave_3(self):
        self.SAVE[2] = True
        self.ui.traceButton_3.setDown(True)

    @pyqtSlot(object)
    def onError(self, errorMsg):
        #self.ui.statusbar.addWidget(QtGui.QLabel(errorMsg))
        self.ui.statusbar.showMessage("ERROR: " + errorMsg)
        self.ui.statusbar.setVisible(True)
        self.ui.stopButton.click()

    @pyqtSlot(int)
    def onWaterfall(self, state):
        if state ==2:
            self.createWaterfall()
        elif state == 0:
            self.deleteWaterfall()
Beispiel #50
0
from sampler import Sampler

if __name__ == '__main__':
    sampler = Sampler(z_dim=4, c_dim=3, scale=8.0, net_size=64)
    z1 = sampler.generate_z()
    img = sampler.generate(z1)
    sampler.show_image(img)
class SuperSID():
    '''
    This is the main class which creates all other objects.
    In CMV pattern, this is the Controller.
    '''
    running = False  # class attribute to indicate if the SID application is running

    def __init__(self, config_file='', read_file=None):
        self.version = "EG 1.4 20150801"
        self.timer = None
        self.sampler = None
        self.viewer = None

        # Read Config file here
        print("Reading supersid.cfg ...", end='')
        # this script accepts a .cfg file as optional argument else we default
        # so that the "historical location" or the local path are explored
        self.config = Config(config_file or "supersid.cfg")
        # once the .cfg read, some sanity checks are necessary
        self.config.supersid_check()
        if not self.config.config_ok:
            print("ERROR:", self.config.config_err)
            exit(1)
        else:
            print(self.config.filenames) # good for debugging: what .cfg file(s) were actually read
        self.config["supersid_version"] = self.version

        # Create Logger - Logger will read an existing file if specified as -r|--read script argument
        self.logger = Logger(self, read_file)
        if 'utc_starttime' not in self.config:
            self.config['utc_starttime'] = self.logger.sid_file.sid_params["utc_starttime"]

        # Create the viewer based on the .cfg specification (or set default):
        # Note: the list of Viewers can be extended provided they implement the same interface
        if self.config['viewer'] == 'wx' and wx_imported:
            # GUI Frame to display real-time VLF Spectrum based on wxPython
            self.viewer = wxSidViewer(self)
        elif self.config['viewer'] == 'tk':
            # GUI Frame to display real-time VLF Spectrum based on tkinter (python 2 and 3)
            self.viewer = tkSidViewer(self)
        elif self.config['viewer'] == 'text':
            # Lighter text version a.k.a. "console mode"
            self.viewer = textSidViewer(self)
        else:
            print("ERROR: Unknown viewer", sid.config['viewer'])
            exit(2)

        # Assign desired psd function for calculation after capture
        # currently: using matplotlib's psd
        if (self.config['viewer'] == 'wx' and wx_imported) or self.config['viewer'] == 'tk':
            self.psd = self.viewer.get_psd  # calculate psd and draw result in one call
        else:
            self.psd = mlab_psd             # calculation only

        # calculate Stations' buffer_size
        self.buffer_size = int(24*60*60 / self.config['log_interval'])

        # Create Sampler to collect audio buffer (sound card or other server)
        self.sampler = Sampler(self, audio_sampling_rate = self.config['audio_sampling_rate'], NFFT = 1024);
        if not self.sampler.sampler_ok:
            self.close()
            exit(3)
        else:
            self.sampler.set_monitored_frequencies(self.config.stations);

        # Link the logger.sid_file.data buffers to the config.stations
        for ibuffer, station  in enumerate(self.config.stations):
            station['raw_buffer'] =  self.logger.sid_file.data[ibuffer]

        # Create Timer
        self.viewer.status_display("Waiting for Timer ... ")
        self.timer = SidTimer(self.config['log_interval'], self.on_timer)


    def clear_all_data_buffers(self):
        """Clear the current memory buffers and pass to the next day"""
        self.logger.sid_file.clear_buffer(next_day = True)

    def on_timer(self):
        """Callback function triggered by SidTimer every 'log_interval' seconds"""
        # current_index is the position in the buffer calculated from current UTC time
        current_index = self.timer.data_index
        utc_now = self.timer.utc_now

        # Get new data and pass them to the View
        message = "%s  [%d]  Capturing data..." % (self.timer.get_utc_now(), current_index)
        self.viewer.status_display(message, level=1)
        signal_strengths = []
        try:
            data = self.sampler.capture_1sec()  # return a list of 1 second signal strength
            Pxx, freqs = self.psd(data, self.sampler.NFFT, self.sampler.audio_sampling_rate)
            for binSample in self.sampler.monitored_bins:
                signal_strengths.append(Pxx[binSample])
        except IndexError as idxerr:
            print("Index Error:", idxerr)
            print("Data len:", len(data))
        except TypeError as err_te:
            print("Warning:", err_te)

        # ensure that one thread at the time accesses the sid_file's' buffers
        with self.timer.lock:
            # do we need to save some files (hourly) or switch to a new day?
            if self.timer.utc_now.minute == 0 and self.timer.utc_now.second < self.config['log_interval']:
                if self.config['hourly_save'] == 'YES':
                    fileName = "hourly_current_buffers.raw.ext.%s.csv" % (self.logger.sid_file.sid_params['utc_starttime'][:10])
                    self.save_current_buffers(filename=fileName, log_type='raw', log_format='supersid_extended')
                # a new day!
                if self.timer.utc_now.hour == 0:
                    # use log_type and log_format(s) requested by the user in the .cfg
                    for log_format in self.config['log_format'].split(','):
                        self.save_current_buffers(log_type=self.config['log_type'], log_format=log_format)
                    self.clear_all_data_buffers()
            # Save signal strengths into memory buffers ; prepare message for status bar
            message = self.timer.get_utc_now() + "  [%d]  " % current_index
            for station, strength in zip(self.config.stations, signal_strengths):
                station['raw_buffer'][current_index] = strength
                message +=  station['call_sign'] + "=%f " % strength
            self.logger.sid_file.timestamp[current_index] = utc_now

        # end of this thread/need to handle to View to display captured data & message
        self.viewer.status_display(message, level=2)

    def save_current_buffers(self, filename='', log_type='raw', log_format = 'both'):
        ''' Save buffer data from logger.sid_file

            log_type = raw or filtered
            log_format = sid_format|sid_extended|supersid_format|supersid_extended|both|both_extended'''
        filenames = []
        if log_format.startswith('both') or log_format.startswith('sid'):
            fnames = self.logger.log_sid_format(self.config.stations, '', log_type=log_type, extended=log_format.endswith('extended')) # filename is '' to ensure one file per station
            filenames += fnames
        if log_format.startswith('both') or log_format.startswith('supersid'):
            fnames = self.logger.log_supersid_format(self.config.stations, filename, log_type=log_type, extended=log_format.endswith('extended'))
            filenames += fnames
        return filenames

    def on_close(self):
        self.close()

    def run(self, wx_app = None):
        """Start the application as infinite loop accordingly to need"""
        self.__class__.running = True
        self.viewer.run()

    def close(self):
        """Call all necessary stop/close functions of children objects"""
        self.__class__.running = False
        if self.sampler:
            self.sampler.close()
        if self.timer:
            self.timer.stop()
        if self.viewer:
            self.viewer.close()

    def about_app(self):
        """return a text indicating various information on the app, incl, versions"""
        msg = """This program is designed to detect Sudden Ionosphere Disturbances (SID), \
which are caused by a blast of intense X-ray radiation when there is a Solar Flare on the Sun.\n\n""" + \
            "Controller: " + self.version + "\n" +  \
            "Sampler: " + self.sampler.version  + "\n"  \
            "Timer: " + self.timer.version  + "\n"  \
            "Config: " + self.config.version  + "\n"  \
            "Logger: " + self.logger.version  + "\n"  \
            "Sidfile: " + self.logger.sid_file.version  + "\n" + \
            "Viewer: " + self.viewer.version  + "\n"  + \
            "\n\nAuthor: Eric Gibert  [email protected]" +  \
            "\n\nVisit http://solar-center.stanford.edu/SID/sidmonitor/ for more information."
        return msg
    def __init__(self, config_file='', read_file=None):
        self.version = "EG 1.4 20150801"
        self.timer = None
        self.sampler = None
        self.viewer = None

        # Read Config file here
        print("Reading supersid.cfg ...", end='')
        # this script accepts a .cfg file as optional argument else we default
        # so that the "historical location" or the local path are explored
        self.config = Config(config_file or "supersid.cfg")
        # once the .cfg read, some sanity checks are necessary
        self.config.supersid_check()
        if not self.config.config_ok:
            print("ERROR:", self.config.config_err)
            exit(1)
        else:
            print(self.config.filenames) # good for debugging: what .cfg file(s) were actually read
        self.config["supersid_version"] = self.version

        # Create Logger - Logger will read an existing file if specified as -r|--read script argument
        self.logger = Logger(self, read_file)
        if 'utc_starttime' not in self.config:
            self.config['utc_starttime'] = self.logger.sid_file.sid_params["utc_starttime"]

        # Create the viewer based on the .cfg specification (or set default):
        # Note: the list of Viewers can be extended provided they implement the same interface
        if self.config['viewer'] == 'wx' and wx_imported:
            # GUI Frame to display real-time VLF Spectrum based on wxPython
            self.viewer = wxSidViewer(self)
        elif self.config['viewer'] == 'tk':
            # GUI Frame to display real-time VLF Spectrum based on tkinter (python 2 and 3)
            self.viewer = tkSidViewer(self)
        elif self.config['viewer'] == 'text':
            # Lighter text version a.k.a. "console mode"
            self.viewer = textSidViewer(self)
        else:
            print("ERROR: Unknown viewer", sid.config['viewer'])
            exit(2)

        # Assign desired psd function for calculation after capture
        # currently: using matplotlib's psd
        if (self.config['viewer'] == 'wx' and wx_imported) or self.config['viewer'] == 'tk':
            self.psd = self.viewer.get_psd  # calculate psd and draw result in one call
        else:
            self.psd = mlab_psd             # calculation only

        # calculate Stations' buffer_size
        self.buffer_size = int(24*60*60 / self.config['log_interval'])

        # Create Sampler to collect audio buffer (sound card or other server)
        self.sampler = Sampler(self, audio_sampling_rate = self.config['audio_sampling_rate'], NFFT = 1024);
        if not self.sampler.sampler_ok:
            self.close()
            exit(3)
        else:
            self.sampler.set_monitored_frequencies(self.config.stations);

        # Link the logger.sid_file.data buffers to the config.stations
        for ibuffer, station  in enumerate(self.config.stations):
            station['raw_buffer'] =  self.logger.sid_file.data[ibuffer]

        # Create Timer
        self.viewer.status_display("Waiting for Timer ... ")
        self.timer = SidTimer(self.config['log_interval'], self.on_timer)
Beispiel #53
0
def take_feature(part):
    sample = Sampler(part, sample_rate=song.sample_rate)
    sample.compute_features()
    feature = sample.extract_features()
    return feature
Beispiel #54
0
                           config["gru_unit_size"],
                           config["num_step"],
                           config["num_layers"],
                           save_path + env.spec.id,
                           global_step,
                           config["max_gradient_norm"],
                           config["entropy_bonus"],
                           writer,
                           loss_function=config["loss_function"],
                           summary_every=10)

sampler = Sampler(pg_rnn,
                  env,
                  config["gru_unit_size"],
                  config["num_step"],
                  config["num_layers"],
                  config["max_step"],
                  config["batch_size"],
                  config["discount"],
                  writer)

reward = []
for _ in tqdm(range(config["num_itr"])):
    if train:
        batch = sampler.samples()
        pg_rnn.update_parameters(batch["observations"], batch["actions"],
                                batch["returns"], batch["init_states"],
                                batch["seq_len"])
    else:
        episode = sampler.collect_one_episode(render=True)
        print("reward is {0}".format(np.sum(episode["rewards"])))
Beispiel #55
0
from git_module import GitModule

def restart(config):
    git_module = GitModule( url = config.getRepoURL() )
    git_module.checkout( config.getGitRef( ) )
    git_module.runTests( "tests/run_tests" )
    #git_module.install( )
    #os.fork+++


#choose one of the following depending on how your SDS is connected 
#ser = serial.Serial('/dev/tty.wchusbserial1430', baudrate=9600, stopbits=1, parity="N",  timeout=2)
#ser = serial.Serial('/dev/ttyUSB0', baudrate=9600, stopbits=1, parity="N",  timeout=2)
#ser = serial.Serial('/dev/ttyAMA0', baudrate=9600, stopbits=1, parity="N",  timeout=2)


config = DeviceConfig( config_file )

device_id = config.getDeviceID( )
client_pm10 = FriskbyClient(config , "%s_PM10" % device_id)
client_pm25 = FriskbyClient(config , "%s_PM25" % device_id)
sampler = Sampler( SDS011 , sample_time = SAMPLE_TIME , sleep_time = 0.50 )

while True:
    data = sampler.collect( )
    client_pm10.post( data[0].mean() )
    client_pm25.post( data[1].mean() )

    if config.updateRequired():
        restart(config)