Beispiel #1
0
    def unpack(self):
        if os.path.isdir(self.build_dir):
            utils.output("SCIPY source already unpacked, not redoing.")
            return

        utils.output("Unpacking SCIPY source.")
        if os.name == 'posix':
            utils.unpack_build(self.tbfilename)
        else:
            os.mkdir(self.build_dir)
            os.chdir(self.build_dir)
            utils.unpack(self.tbfilename)
 def unpack(self):
     if os.path.isdir(self.build_dir):
         utils.output("MATPLOTLIB source already unpacked, not redoing.")
     else:
         if os.name == 'posix':
             utils.output("Unpacking MATPLOTLIB source.")
             utils.unpack_build(self.tbfilename)
         else:
             utils.output("Unpacking MATPLOTLIB binaries.")
             os.mkdir(self.build_dir)
             os.chdir(self.build_dir)
             utils.unpack(self.tbfilename)
Beispiel #3
0
    def install_nt(self):
        config.CMAKE_BINPATH = os.path.join(
                self.inst_dir, 'bin', 'cmake.exe')


        utils.goto_inst()
        if os.path.exists('cmake'):
            utils.output('CMAKE already installed, Skipping.')
            return

        # this will unpack into inst/cmake-VER-win32-x86/bin etc
        utils.unpack(self.afilename)
        # so we rename it to plain 'cmake'
        os.rename(CMAKE_DIRBASE, 'cmake')
Beispiel #4
0
def process(mj, since, till, path_incoming, path_to, force_unpack=False):
	if force_unpack or not os.path.isdir('tmp/%s' % mj):
		logging.debug('will unpack')
		utils.unpack(mj, since, till, path_incoming, path_to)

	production_times, unmatched_files = get_production_times(path_to, mj)
	logging.debug('production times count: %s' % len(production_times))

	base_ini_fn = '%s/%s/atx300/set/base/base.ini' % (path_incoming, mj)
	base_to_ior, ior_to_base = atxutils.build_signal_maps(base_ini_fn)

	signals_fns = utils.get_signals_files('%s/%s' % (path_incoming, mj), since, till)

	'''
	for fn in signals_fns:
		header = atxsignals.read_header(fn)
		assert(header['is_deaggregated'])
'''

	logging.debug('reading signals')
	deaggregated = atxsignals.read_deaggregated(signals_fns, since, till)

	illegal_cement_openings = []
	cem_bin_closed = None
	cem_scale_value = None
	for rec in deaggregated:
		if rec['type'] == 'header':
			continue
		k = rec['k']
		v = rec['v'] if 'v' in rec else rec['avg']
		#v_recalc = rec['v'] * coeffs[rec['k']] - offs[rec['k']]

		if ior_to_base.get(k) == 'Cement_Scale1':
			cem_scale_value = v
		elif ior_to_base.get(k) == 'I_Bin1CEMclosed':
			if v == 0 and cem_bin_closed != 0:
				is_illegal = True
				for prod_since, prod_till in production_times:
					if prod_since <= rec['t'] <= prod_till:
						is_illegal = False
						break
				if is_illegal:
					#logging.debug('%s %s' % (dt, cem_scale_value))
					illegal_cement_openings.append((rec['t'], cem_scale_value))
			cem_bin_closed = v

	logging.debug('found %d illegal cement openings' % len(illegal_cement_openings))

	return unmatched_files, illegal_cement_openings
Beispiel #5
0
    def parse_option(self, option_data):
        p = 0
        while p<len(option_data):
            kind = ord(option_data[p])
            if kind == 0: # end
                break
            elif kind == 1:     # NOP
                p+=1
                continue

            p+=1
            length = ord(option_data[p])

            p+=1
            if kind == 2:       # MAX segment_size
                self.option_max_segment_size = unpack('!H', option_data[p:p+2])
            elif kind == 3:     # Window scale factor
                self.option_window_scale_factor = ord(option_data[p])
            elif kind == 4:     # TCP SACK Permitted Option
                self.option_SACK_Permit = True
            elif kind == 5:     # SACK
                pass
            elif kind == 8:     # time stamp
                self.option_timestamp = struct.unpack("!II", option_data[p:p+8]) # timestamp value / timestamp echo reply
            else:
                print "Warning, unknown TCP options:", kind
            p+=length-2
Beispiel #6
0
    def update(self, uid, properties):
        # lookup edge
        self._cursor.set_key(uid)
        if self._cursor.search() == WT_NOT_FOUND:
            raise KeyError('Edge not found, identifier: %s' % uid)
        else:
            # update properties
            start, label, end, data = self._cursor.get_value()
            other = unpack(data)

            # remove old indices if any
            for key in other.keys():
                if key in self._indices:
                    self._keys.set_key(key, other[key], uid)
                    self._cursor.remove()

            # update
            data = pack(properties)
            self._cursor.set_value(start, label, end, data)

            # index properties
            for key in properties.keys():
                if key in self._indices:
                    self._keys.set_key(key, properties[key], uid)
                    self._keys.set_value('')
                    self._keys.insert()
    def collect_data(self,
                     num_steps: int = 1000,
                     deterministic: Optional[Dict[str, bool]] = None,
                     disable_tqdm: bool = True,
                     train_mode: bool = True,
                     gamma: float = 0.99,
                     tau: float = 0.95) -> DataBatch:
        """
        Performs a rollout of the agents in the environment, for an indicated number of steps or episodes.

        Args:
            num_steps: number of steps to take; either this or num_episodes has to be passed (not both)
            deterministic: whether each agent should use the greedy policy; False by default
            disable_tqdm: whether a live progress bar should be (not) displayed
            train_mode:

        Returns: dictionary with the gathered data
        """

        if deterministic is None:
            deterministic = defaultdict(lambda: False)

        self.reset()

        obs, _, _, _ = unpack(self.env.reset(train_mode=train_mode)[self.brain_name], self.agent_ids)

        for step in trange(num_steps + 1, disable=disable_tqdm):
            # Compute the action for each agent
            action_info = {  # action, logprob, entropy
                agent_id: self.agents[agent_id].compute_single_action(obs[agent_id],
                                                                      deterministic[agent_id])
                for agent_id in self.agent_ids
            }

            action = {agent_id: action_info[agent_id][0] for agent_id in self.agent_ids}
            logprob = {agent_id: action_info[agent_id][1] for agent_id in self.agent_ids}
            entropy = {agent_id: action_info[agent_id][2] for agent_id in self.agent_ids}
            value = {agent_id: action_info[agent_id][3] for agent_id in self.agent_ids}

            # Actual step in the environment
            next_obs, reward, done, info = unpack(self.env.step(pack(action))[self.brain_name], self.agent_ids)

            # Saving to memory
            self.memory.store(obs, action, reward, logprob, entropy, value, done)
            obs = next_obs

        return self.memory.get_torch_data(gamma=gamma, tau=tau)
Beispiel #8
0
 def decompress_into(self, params, scalar_weight, messages, metadata):
     """
     Adds scaled, decompressed, parameters from `messages` to `params`.
     This modifies `params`.
     """
     shapes = [p.shape for p in params]
     for param, values in zip(params, unpack(messages[0], shapes)):
         param[:].add_(scalar_weight, values)
def word_at_offset(filename, offset):
    # filename = ensure_src_file_path(filename)
    with open(filename, "rb") as f:
        result = ""
        f.seek(offset)
        data = f.read(2)
        for b in data:
            result += "%02x" % ord(b)
        return unpack(result[0:2], result[2:])
def word_at_offset(filename, offset):
    #filename = ensure_src_file_path(filename)
    with open(filename, 'rb') as f:
        result = ""
        f.seek(offset)
        data = f.read(2)
        for b in data:
            result += "%02x" % ord(b)
        return unpack(result[0:2], result[2:])
Beispiel #11
0
 def read(name):
     try:
         with open(name, "r") as f:
             s = f.readline()
         s = unpack(s)
         return s
     except Exception as e:
         tf.logging.warning(e)
         return None
Beispiel #12
0
    def init_state(self, params, seed=0):
        my_rank = torch.distributed.get_rank()
        ps = {}
        qs = {}

        rngs = {}
        for neighbor in self.topology.neighbor_ranks(my_rank):
            rngs[neighbor] = self._rng_for_neighbor(seed, neighbor)
            # Ensure that the p's and q's are consequtive in memory so we can quickly send them
            p_buffer, shapes = pack([self._init_p(param) for param in params])
            self.fill_with_random_values(p_buffer, rngs[neighbor])
            ps[neighbor] = {"list": unpack(p_buffer, shapes), "buffer": p_buffer}

            q_buffer, shapes = pack([self._init_q(param) for param in params])
            self.fill_with_random_values(q_buffer, rngs[neighbor])
            qs[neighbor] = {"list": unpack(q_buffer, shapes), "buffer": q_buffer}

        return self.State(ps=ps, qs=qs, iteration_number=0, bits_sent=0, messages_sent=0, rngs=rngs)
def plot_trajectories(goal_object, runs_dir, img_dir, filename, n_runs=10):
    """

    :param runs_dir:
    :param img_dir:
    :param filename:
    :param goal_object
    :param n_runs:
    """
    dataset_states = load_dataset(runs_dir)
    dataset_states = dataset_states[[
        'goal_position', 'goal_angle', 'position', 'initial_position',
        'initial_angle'
    ]]

    runs = np.arange(n_runs)
    run_states = dataset_states.where(dataset_states.run.isin(runs), drop=True)

    fig, ax = plt.subplots(figsize=(7.8, 4.8), constrained_layout=True)

    ax.set_xlabel('x axis', fontsize=11)
    ax.set_ylabel('y axis', fontsize=11)

    ax.grid()

    for run_id, run in run_states.groupby('run'):
        init_position = run.initial_position[run_id]
        init_angle = run.initial_angle[run_id]

        x_position, y_position = unpack(run.position, 'axis')

        if run_id == 0:
            goal_position = run_states.goal_position[run_id]
            goal_angle = run_states.goal_angle[run_id]

            draw_docking_station(ax, goal_object)
            draw_marxbot(ax, goal_position, goal_angle, label='goal position')

            draw_marxbot(ax,
                         init_position,
                         init_angle,
                         label='initial positions')
            plt.plot(x_position,
                     y_position,
                     color='black',
                     label='trajectories',
                     linewidth=1)
        else:
            draw_marxbot(ax, init_position, init_angle)
            plt.plot(x_position, y_position, color='black', linewidth=1)

    ax.set_ylim(-220, 220)
    ax.set_xlim(-250, 250)
    ax.set_aspect('equal')

    plt.legend()
    save_visualisation(filename, img_dir)
def surface(symbol, qdate, qtime=None, errors=False, n=False, loc=0, scale=0):
    df = query.vols(["*"], ["Date", "Time", "Tenor", "Strike"], symbol, qdate,
                    qtime)
    st = utils.pack(df)
    del df

    #----------------------------------------------------------------------------------
    f = lambda LST: pricer.norm_weights(LST, loc, scale)
    wgt = utils.apply(f, 1, st, ["Time", "Tenor"],
                      ["LogStrike"])  #retrive weights vector

    sv = pricer.SSVI(errors)
    f = lambda LST, VAR, TNR, VOL: sv.calibrate(LST, VAR, TNR, VOL, wgt)

    if errors == True:
        prm = utils.apply(f,
                          6,
                          st, ["Time"],
                          ["LogStrike", "TotAtmfVar", "Tenor", "SmtVol"],
                          diff=True,
                          fill=False)
        eps = prm[-1]
        prm = np.asarray(prm[:-1]).T
    else:
        prm = utils.apply(f,
                          5,
                          st, ["Time"],
                          ["LogStrike", "TotAtmfVar", "Tenor", "SmtVol"],
                          fill=False)

    #----------------------------------------------------------------------------------
    reduced = [
        "Time", "Group", "Tenor", "SpotMid", "Forward", "CumDivDays", "Div",
        "ImpBor", "Rate", "CallH0", "CallH1", "CallKh", "PutH0", "PutH1",
        "PutKh", "TotAtmfVar"
    ]
    st = utils.unique_nosort(st[reduced])

    prm = utils.restack(prm, st["TotAtmfVar"], tile=False)

    rho = sv.correlation(st["TotAtmfVar"], prm[:, 0], prm[:, 1], prm[:, 2])
    phi = sv.power_law(st["TotAtmfVar"], prm[:, 3], prm[:, 4])
    atm, skw, krt = sv.raw2jw(st["Tenor"] / base, st["TotAtmfVar"], phi, rho)

    arr = unstruct(st)
    arr = np.column_stack([arr, atm, skw, krt, phi, rho])
    df = utils.unpack(arr, qdate, raw=False, symbol=symbol)

    if errors == True and n == False:
        return df, eps
    elif errors == False and n == True:
        return df, n
    elif errors == True and n == True:
        return df, eps, n
    else:
        return df
Beispiel #15
0
 def get(self, uid):
     """Look for a vertice with the given identifier `uid`"""
     # lookup the uid
     self._cursor.set_key(uid)
     if self._cursor.search() == WT_NOT_FOUND:
         raise KeyError('Vertex not found, identifier: %s' % uid)
     else:
         # uid found, return label and properties
         label, data = self._cursor.get_value()
         return label, unpack(data)
Beispiel #16
0
 def decompress_into(self, params, scalar_weight, messages, metadata):
     """
     Adds scaled, decompressed, parameters from `messages` to `params`.
     This modifies `params`.
     """
     parts = unpack(messages[0], metadata)
     us = parts[: len(parts) // 2]
     vs = parts[len(parts) // 2 :]
     for param, u, v in zip(params, us, vs):
         param[:].addmm_(1, scalar_weight, u, v.t())
Beispiel #17
0
    def parse(self, incoming_data):
        self.hardware_type = unpack("!H", incoming_data[:2]) # should be 1
        assert self.hardware_type == 1

        self.protocol = unpack("!H", incoming_data[2:4])     # should be ETHERTYPE_IP
        assert self.protocol == ETHERTYPE_IP

        self.hardware_size = ord(incoming_data[4])           # should be 6
        assert self.hardware_size == 6

        self.protocol_size = ord(incoming_data[5])           # should be 4
        assert self.protocol_size == 4

        self.opcode = unpack("!H", incoming_data[6:8])
        self.sender_mac = incoming_data[8:14]
        self.sender_ip  = tuple([ord(x) for x in incoming_data[14:18]])
        self.target_mac = incoming_data[18:24]
        self.target_ip  = tuple([ord(x) for x in incoming_data[24:28]])
        self.headlen = 28
Beispiel #18
0
 def get(self, uid):
     """Retrieve the edge with the given `uid`"""
     # lookup `uid`
     self._cursor.set_key(uid)
     if self._cursor.search() == WT_NOT_FOUND:
         raise KeyError('Edge not found, identifier: %s' % uid)
     else:
         # return edge
         start, label, end, data = self._cursor.get_value()
         return start, label, end, unpack(data)
Beispiel #19
0
 def get(self, uid):
     """Retrieve the edge with the given `uid`"""
     # lookup `uid`
     self._cursor.set_key(uid)
     if self._cursor.search() == WT_NOT_FOUND:
         raise KeyError('Edge not found, identifier: %s' % uid)
     else:
         # return edge
         start, label, end, data = self._cursor.get_value()
         return start, label, end, unpack(data)
Beispiel #20
0
 def get(self, uid):
     """Look for a vertice with the given identifier `uid`"""
     # lookup the uid
     self._cursor.set_key(uid)
     if self._cursor.search() == WT_NOT_FOUND:
         raise KeyError('Vertex not found, identifier: %s' % uid)
     else:
         # uid found, return label and properties
         label, data = self._cursor.get_value()
         return label, unpack(data)
Beispiel #21
0
 def parse(self, incoming_data):
     data = [ord(x) for x in incoming_data[:20]]
     self.version = data[0] >> 4
     self.headlen = (data[0] & 0xf) * 4
     self.identification = unpack("!H", incoming_data[4:6])
     self.flags = (data[6] >> 5)
     self.segment_id = ((data[6] & 0x1f) << 8) + data[7]
     self.TTL = data[8]
     self.protocol = data[9]
     self.src_ip = tuple(data[12:16])
     self.dst_ip = tuple(data[16:20])
Beispiel #22
0
 def forward(self, sentences:list, to_str:bool=False) -> PackedSequence:
     packed = self.extractor(sentences)
     packed = PackedSequence(self.dropout(packed.data), packed.batch_sizes)
     packed, _ = self.rnn(packed)
     data = self.linear(packed.data)
     if to_str:
         max_idx = data.argmax(-1)
         word_pack = [self.extractor.vocab[i] for i in max_idx.cpu()]
         return unpack(word_pack, packed.batch_sizes)
     else:
         return PackedSequence(data, packed.batch_sizes)
Beispiel #23
0
def runSequence(sequence, intcode, endCondition, startSignal=0):
    runners = [Runner(iter([phase])) for phase in sequence]
    runnerIterators = [runner.output_iterator(intcode) for runner in runners]

    # pipe last signal to current runner's input and add its next output signal to signals
    def step(signals, index):
        runners[index].feed(signals[-1])
        signal = next(runnerIterators[index], None)
        return signals + [signal], (index + 1) % len(runners)

    return reduceUntil(endCondition, unpack(step), ([startSignal], 0))[0]
def find_pointers():
    p = re.compile(pointer_regex)
    for file in files:
        file_path = os.path.join(abs_file_path, file)
        in_file = open(file_path, "rb")
        print file
        bytes = in_file.read()
        # text.decode('shift_jis', errors='ignore').encode('utf-8')
        only_hex = ""
        for c in bytes:
            # print ord(c)
            only_hex += "\\x%02x" % ord(c)
        # print only_hex
        # print bytes.encode('hex')
        tables = p.finditer(only_hex)
        for table in tables:
            last_part = table.group(4).split("\\x")
            # print last_part
            if last_part[1] == last_part[2] == last_part[3] == last_part[4]:  # ignore FFFFFFFFFF sections
                pass
            elif "\\x00\\x00\\x00\\x00" in table.group(0):  # sometimes they sneak by. catch them here
                pass
            else:
                # print table.group(0)
                start = table.start() / 4  # divide by four, since 4 characters per byte in our dump)
                stop = table.end() / 4
                count = (stop - start) / 4  # div by 4 again, since 4 bytes per pointer
                delimiter = table.group(2) + table.group(3)
                # print table.group(0)
                # print delimiter
                values = []
                # Can't just do this - sometimes part of the delimiter shows up in the pointer itself! (10-00-00-00)
                # values = table.group(0).split(delimiter)
                # So just slice the string into the first two bytes.
                for x in range(0, len(table.group(0)) - 15, 16):
                    pointer_string = table.group(0)[x : x + 8]
                    pointer_tuple = pointer_string.split("\\x")[1], pointer_string.split("\\x")[2]
                    values.append(pointer_tuple)
                pointers = []
                for (first, second) in values:
                    pointers.append(hex(unpack(first, second)))
                print str(count) + " pointers at " + hex(start) + ", delimiter: " + delimiter
                pointers.sort()
                print pointers
                pointers_filename = "pointers_" + file
                out_file = open(pointers_filename, "w")
                for ptr in pointers:
                    out_file.write(ptr + "\n")

                # next, calculate the diffs. and figure out if it's just 4 over and over
                diffs = []
                for pointer in range(0, len(pointers) - 1):
                    diffs.append(int(pointers[pointer + 1], 16) - (int(pointers[pointer], 16)))
                print diffs
Beispiel #25
0
def key_set():
	if request.method == "POST":
		try:
			data = loads(request.data.decode('utf-8'))
			app.sk = unpack(data["sk"])
			return dumps({"status": "OK"})
		except KeyError as e:
			return dumps({"status": "ERROR", "message": e.args})
		except Exception as e:
			return dumps({"status": "ERROR", "message": e.args})
	else:
		return dumps({"status": "ERROR", "message":"Use POST method."})
Beispiel #26
0
 def read(name):
     try:
         start_time = time.time()
         with pa.OSFile(name) as f:
             s = f.read_buffer()
         readtime = time.time() - start_time
         start_time = time.time()
         s = unpack(s)
         untime = time.time() - start_time
         return s, readtime, untime
     except Exception as e:
         logging.warning(e)
         return None
Beispiel #27
0
def laserize(asteroid):
    # sorter of asteroid tracings depending on the laser movement
    def laserSort(slope, positive):
        prioritize_if = lambda value: 0 if value else 1
        startPointingUp = prioritize_if(slope == infinity and not positive)
        clockwise = (prioritize_if(positive), slope)
        return startPointingUp, clockwise
    
    rotation = cycle(sorted(asteroid.traces.keys(), key=unpack(laserSort)))
    while any(asteroid.traces.values()):
        aligned = asteroid.traces[next(rotation)]
        if aligned:
            yield aligned.pop(0)
Beispiel #28
0
def capture_mic():
    tscale = 1.0
    p = pyaudio.PyAudio()

    in_stream = p.open(format=pyaudio.paInt16,
                    channels=2,
                    rate=44100,
                    input = True)
    out_stream = p.open(format=pyaudio.paInt16,
                    channels=2,
                    rate=int(44100*tscale),
                    output=True)

    data_to_file = []

    chunk = 512
    prev_data = []
    data = []
    after_data = in_stream.read(chunk)

    for i in range(300):

        after_data = utils.unpack(after_data)
        # after_data = reduce_noise(after_data)

        if prev_data != []:
            concat = np.concatenate(
                                        ((
                                            np.concatenate(((prev_data,data))),after_data
                                        ))
                                    )
            shifted_data = shift_pv(concat, tscale)

        #     data_to_file.extend(shift_data)

            data_out = utils.pack(shifted_data)

            data_out = ''.join(data_out)
            out_stream.write(data_out)

        prev_data = data
        data = after_data
        after_data = in_stream.read(chunk)

    in_stream.stop_stream()
    in_stream.close()

    out_stream.stop_stream()
    out_stream.close()
    wav.write('test.wav',44100*2, array(data_to_file, dtype='int16'))
    p.terminate()
Beispiel #29
0
    def decompress_into(self, params, scalar_weight, messages, metadata):
        """
        Adds scaled, decompressed, parameters from `messages` to `params`.
        This modifies `params`.
        """
        sign_size = metadata

        signs = bit2byte.unpacking(messages[0], sign_size)
        norms = messages[1]

        shapes = [p.shape for p in params]

        for param, signs, norm in zip(params, unpack(signs, shapes), norms):
            param[:].add_(scalar_weight * norm / signs.nelement(), signs)
    def collect_episodes(self,
                         num_episodes: int = 100,
                         deterministic: Optional[Dict[str, bool]] = None,
                         disable_tqdm: bool = True,
                         train_mode: bool = True,
                         gamma: float = 0.99,
                         tau: float = 0.95) -> DataBatch:
        if deterministic is None:
            deterministic = defaultdict(lambda: False)

        self.reset()

        for episode in trange(num_episodes, disable=disable_tqdm):
            obs, _, _, _ = unpack(self.env.reset(train_mode=train_mode)[self.brain_name], self.agent_ids)
            done_ = False
            while not done_:
            # Compute the action for each agent
                action_info = {  # action, logprob, entropy
                    agent_id: self.agents[agent_id].compute_single_action(obs[agent_id],
                                                                          deterministic[agent_id])
                    for agent_id in self.agent_ids
                }

                action = {agent_id: action_info[agent_id][0] for agent_id in self.agent_ids}
                logprob = {agent_id: action_info[agent_id][1] for agent_id in self.agent_ids}
                entropy = {agent_id: action_info[agent_id][2] for agent_id in self.agent_ids}
                value = {agent_id: action_info[agent_id][3] for agent_id in self.agent_ids}

                # Actual step in the environment
                next_obs, reward, done, info = unpack(self.env.step(pack(action))[self.brain_name], self.agent_ids)

                # Saving to memory
                self.memory.store(obs, action, reward, logprob, entropy, value, done)
                obs = next_obs
                done_ = done['Agent0']

        return self.memory.get_torch_data(gamma=gamma, tau=tau)
Beispiel #31
0
    def step(self, params, state):
        bits_sent = state.bits_sent
        messages_sent = state.messages_sent

        # Send our values to the neighbors
        buffer, shapes = pack(params)

        torch.distributed.all_reduce(buffer)
        bits_sent += num_bits(buffer)
        messages_sent += 1

        buffer /= torch.distributed.get_world_size()

        params = unpack(buffer, shapes)

        return params, self.State(bits_sent, messages_sent)
def plot_trajectory(goal_object, runs_dir, img_dir, filename, run_id=0):
    """

    :param goal_object
    :param runs_dir:
    :param img_dir:
    :param filename:
    :param run_id:
    """
    dataset_states = load_dataset(runs_dir)
    dataset_states = dataset_states[[
        'goal_position', 'goal_angle', 'position', 'initial_position',
        'initial_angle'
    ]]

    run_states = dataset_states.where(dataset_states.run == run_id, drop=True)

    init_position = run_states.initial_position[run_id]
    init_angle = run_states.initial_angle[run_id]

    goal_position = run_states.goal_position[run_id]
    goal_angle = run_states.goal_angle[run_id]

    x_position, y_position = unpack(run_states.position, 'axis')

    fig, ax = plt.subplots(figsize=(7.8, 4.8), constrained_layout=True)
    ax.set_xlabel('x axis', fontsize=11)
    ax.set_ylabel('y axis', fontsize=11)
    ax.grid()

    draw_docking_station(ax, goal_object)
    draw_marxbot(ax, init_position, init_angle, label='initial position')
    draw_marxbot(ax, goal_position, goal_angle, label='goal position')
    plt.plot(x_position,
             y_position,
             color='black',
             label='trajectory',
             linewidth=1)

    ax.set_ylim(-220, 220)
    ax.set_xlim(-250, 250)
    ax.set_aspect('equal')

    plt.legend()
    plt.title('Run %d' % run_id, fontsize=14, weight='bold')

    save_visualisation(filename, img_dir)
Beispiel #33
0
    def update(self):
        include('jquery-plugins')

        request = self.request

        if 'form.upload' in request:
            dataField = IPhoto['data']
            context = self.context

            files = []

            data = request.form.get('uploadFile', None)
            if data is not None:
                if type(data) is list:
                    files.extend(data)
                else:
                    files.append(data)

            updated = False
            for file in files:
                if not file:
                    continue

                for file in unpack(file):
                    name = file.filename

                    if name in context and \
                            not IPhoto.providedBy(context[name]):
                        photo = context[name]
                        field = dataField.bind(photo)
                        field.set(photo, file)
                        transaction.commit()
                        continue
                    elif name in context:
                        del context[name]

                    photo = Photo(name)
                    event.notify(ObjectCreatedEvent(photo))
                    context[name] = photo
                    field = dataField.bind(photo)
                    field.set(photo, file)
                    transaction.commit()
                    updated = True

            if updated:
                IStatusMessage(request).add(_(u'Files have been uploaded.'))
Beispiel #34
0
    def delete(self, uid):
        """Remove the edge with the given identifier `uid`"""
        # lookup the uid
        self._cursor.set_key(uid)
        if self._cursor.search() == WT_NOT_FOUND:
            raise KeyError('Edge not found, identifier: %s' % uid)
        else:
            # remove primary row
            _, _, _, data = self._cursor.get_value()
            properties = unpack(data)
            self._cursor.remove()

            # remove indices if any
            for key in properties.keys():
                if key in self._indices:
                    self._keys.set_key(key, properties[key], uid)
                    self._cursor.remove()
Beispiel #35
0
    def delete(self, uid):
        """Remove the edge with the given identifier `uid`"""
        # lookup the uid
        self._cursor.set_key(uid)
        if self._cursor.search() == WT_NOT_FOUND:
            raise KeyError('Edge not found, identifier: %s' % uid)
        else:
            # remove primary row
            _, _, _, data = self._cursor.get_value()
            properties = unpack(data)
            self._cursor.remove()

            # remove indices if any
            for key in properties.keys():
                if key in self._indices:
                    self._keys.set_key(key, properties[key], uid)
                    self._cursor.remove()
def plot_positions_heatmap(goal_object, runs_dir, img_dir, filename):
    """

    :param goal_object:
    :param runs_dir:
    :param img_dir:
    :param filename:
    """

    dataset_states = load_dataset(runs_dir)

    x, y = unpack(dataset_states.position, 'axis')

    n_bins = 100
    grid_x, bins_x = pd.cut(x.data, n_bins, retbins=True)
    grid_y, bins_y = pd.cut(y.data, n_bins, retbins=True)

    grid = np.stack([grid_y.codes, grid_x.codes])
    unique, counts = np.unique(grid, axis=-1, return_counts=True)

    mesh = np.zeros([n_bins, n_bins])
    mesh[unique[0], unique[1]] = counts

    plt.figure()

    cmap = plt.get_cmap('viridis')
    cmap.set_over('w')

    plt.pcolormesh(bins_x,
                   bins_y,
                   mesh,
                   cmap=cmap,
                   norm=colors.PowerNorm(0.5),
                   vmax=200)

    cbar = plt.colorbar()
    cbar.set_label('samples per grid cell (clipped)', labelpad=15)

    plt.axis('image')
    plt.xlabel('x axis', fontsize=11)
    plt.ylabel('y axis', fontsize=11)

    draw_docking_station(plt.gca(), goal_object)

    save_visualisation(filename, img_dir)
Beispiel #37
0
    def generator_read_next_data_block_int8(self, chan=-1, nchan=1):
        """ Read the next block of data and its header

        Returns: (header, data)
            header (dict): dictionary of header metadata
            data (np.array): Numpy array of data, converted into to complex64.
        """
        header, head_idx = self.read_header()
        #print(head_idx)
        n_chan = int(header['OBSNCHAN'])
        n_pol = int(header['NPOL'])  #should be 4
        if n_pol == 2:
            n_pol = 4
        n_bit = int(header['NBITS'])
        blocsize = int(header['BLOCSIZE'])
        if chan > 0:
            data_idx = head_idx + chan * n_pol * int(n_bit / 8)
            blocsize //= (n_chan // nchan)
            n_chan = nchan
        else:
            data_idx = head_idx
        self.file_obj.seek(data_idx, 0)

        n_samples = int(blocsize / (n_chan * n_pol * (float(n_bit) / 8)))

        d = np.fromfile(self.file_obj, count=blocsize, dtype='int8')

        # Handle 2-bit and 4-bit data
        if n_bit != 8:
            d = unpack(d, n_bit)

        d = d.reshape((n_chan, n_samples, n_pol))  # Real, imag

        if self._d_x.shape != d[..., 0:2].shape:
            self._d_x = np.ascontiguousarray(
                np.zeros(d[..., 0:2].shape, dtype='int8'))
            self._d_y = np.ascontiguousarray(
                np.zeros(d[..., 2:4].shape, dtype='int8'))

        self._d_x[:] = d[..., 0:2]
        self._d_y[:] = d[..., 2:4]
        if chan > 0:
            data_idx = head_idx + header['BLOCSIZE']
            self.file_obj.seek(data_idx, 0)
        return header, self._d_x, self._d_y
Beispiel #38
0
    def parse(self, data):
        self.src_port = unpack("!H", data[:2])
        self.dst_port = unpack("!H", data[2:4])

        self.seq_num = unpack("!I", data[4:8])
        self.ack_num = unpack("!I", data[8:12])
        self.headlen = (ord(data[12]) >> 4) * 4 # head len is counted as 4 byte words
        self.flags = ((ord(data[12]) & 1) << 8) + (ord(data[13]))
        self.window = unpack("!H", data[14:16])
        self.urgent = unpack("!H", data[18:20])
        
        if self.headlen>20:       # have option
            self.parse_option(data[20:self.headlen])
Beispiel #39
0
def post_userCon_setupFirstUser():
    jdata = bu.get_jdata(ensure="email fname lname pw")
    email, fname, lname, pw = utils.unpack(jdata, "email fname lname pw")
    print(email, fname, lname, pw)
    userCount = userMod.getUserCount()
    if userCount >= 1:
        return bu.abort(
            "Setup already completed. Please visite /login to log in.")
    user = userMod.buildUser(
        email=email,
        fname=fname,
        lname=lname,
        pw=pw,
        isRootAdmin=True,
        isVerified=True,
        accessLevel=auth.alm.getMaxLevel(),
    )
    userMod.insertUser(user)
    return auth.sendAuthSuccessResponse(user)
def plot_initial_positions(goal_object, runs_dir, img_dir, filename):
    """
    :param goal_object
    :param runs_dir:
    :param img_dir:
    :param filename:
    """
    dataset_states, splits = load_dataset(runs_dir, load_splits=True)
    step_states = dataset_states.where(dataset_states.step == 0, drop=True)

    plt.figure(figsize=(7.8, 4.8), constrained_layout=True)

    radius = 8.5
    for i, name in enumerate(splits.split_names):
        split_states = step_states.where(splits == i)
        x, y = unpack(split_states.initial_position, 'axis')
        plt.plot(x,
                 y,
                 'o',
                 label=name,
                 alpha=0.1,
                 markersize=(radius * np.pi) / 2,
                 markeredgecolor='none')

    ax = plt.gca()

    draw_docking_station(ax, goal_object)

    goal_position = step_states.goal_position[0]
    goal_angle = step_states.goal_angle[0]

    draw_marxbot(ax, goal_position, goal_angle, label='goal position')

    ax.set_ylim(-220, 220)
    ax.set_xlim(-250, 250)
    ax.set_aspect('equal')
    plt.legend()

    plt.xlabel('x axis', fontsize=11)
    plt.ylabel('y axis', fontsize=11)

    save_visualisation(filename, img_dir)
Beispiel #41
0
    def recv_packet(self, payme=False, timeout=None):
        buff = []
        packet_len = -1
        t = time.time()
        while len(buff) != packet_len:

            if timeout and time.time() >= t + timeout:
                print('Timeout')
                return None

            #print(packet_len, print(len(buff)))
            c = self.ser.read()
            c = ord(c)
            if not buff:
                if c in self.packet_starts:
                    buff.append(c)
            elif len(buff) == 1:
                buff.append(c)
                packet_len = 4 + (buff[0] & 0x07) + buff[1]
            else:
                buff.append(c)

        payload = bytes(buff[4:])
        if buff[0] == 0x80:
            try:
                c, attr, typ = unpack('BHB', payload[:4])
                if (attr in self.emg_char) and not self.terminate:  #==0x27:
                    if self.emg_count == 0:
                        self.start_time = time.time()
                    self.emg_count += 1
                    vals = tuple(int(b) - (b // 128) * 256
                                 for b in buff[9:])  #unpack('8HB', payload[5:]
                    self.on_emg_data(vals)
                    #self.on_emg_data(vals[8:])
            except Exception:
                pass

        if time.time() >= (t + time.time()):
            print('Timeout.')
            return None
        elif payme:
            return buff[:4] + [payload]
Beispiel #42
0
    def step(self, params, state):
        bits_sent, messages_sent = state

        # Pack all parameters in one flat vector `p` to speed up the computation
        p, original_shapes = pack(params)

        seed = self.rng.randint(1_000_000_000)
        noise = torch.rand_like(p)

        q = self.stochastic_rounding(self.modulo(p / self.Btheta, 1) + 0.5, noise) - 0.5
        xhat = q * self.Btheta - self.modulo(p, self.Btheta)  # left out + x_{k_i}

        # Send compressed messages to the neighbors
        my_rank = torch.distributed.get_rank()
        send_handles = []
        for neighbor_rank in self.topology.neighbor_ranks(my_rank):
            handle = isend(q, neighbor_rank)
            bits_sent += q.nelement() * 2
            messages_sent += 1
            send_handles.append(handle)

        # Receive messages and update the parameter p
        recv_buffer = torch.empty_like(q)
        for neighbor_rank in self.topology.neighbor_ranks(my_rank):
            recv(recv_buffer, neighbor_rank)
            weight = self.topology.weight(my_rank, neighbor_rank)
            p.add_(
                self.diffusion_rate * weight,
                self.modulo(recv_buffer * self.Btheta - p, self.Btheta) - xhat,
            )

        # Make sure all sends are finished
        for handle in send_handles:
            handle.wait()

        # Unpack the parameters back into a list form
        params = unpack(p, original_shapes)
        return params, self.State(bits_sent, messages_sent)
    def fromUnicode(self, u):
        uri, title, description = utils.unpack(u)

        m = utils.uri.match(uri)
        if m is not None and m.group('protocol'):
            return ExternalLink(uri, title, description)
        else:
            # resolve object
            path = str(uri)
            
            try:
                portal = getSite()
                context = getattr(self.context, '__parent__', portal)
                if path.startswith('/'):
                    item = portal.restrictedTraverse(path[1:])
                else:
                    item = context.restrictedTraverse(path)

                reference = item.UID()
                
            except KeyError:
                reference = None

            return InternalLink(reference, title, description)
    def fromUnicode(self, u):
        uri, title, description = utils.unpack(u)

        m = utils.uri.match(uri)
        if m is not None and m.group('protocol'):
            return ExternalLink(uri, title, description)
        else:
            # resolve object
            path = str(uri)

            try:
                portal = getSite()
                context = getattr(self.context, '__parent__', portal)
                if path.startswith('/'):
                    item = portal.restrictedTraverse(path[1:])
                else:
                    item = context.restrictedTraverse(path)

                reference = item.UID()

            except KeyError:
                reference = None

            return InternalLink(reference, title, description)
Beispiel #45
0
 def parse(self, incoming_data):
     self.src_port = unpack("!H", incoming_data[:2])
     self.dst_port = unpack("!H", incoming_data[2:4])
     self.headlen = unpack("!H", incoming_data[4:6])
Beispiel #46
0
 def parse(self, incoming_data):
     self.dst_mac = incoming_data[:6]
     self.src_mac = incoming_data[6:12]
     self.protocol = unpack("!H", incoming_data[12:14])
     self.headlen = 14
Beispiel #47
0
 def fromtype(self, stuff):
     if(G.SEEINTERNAL):
         #print "The type sees: " + repr(stuff)
         #print "Function sees: " + repr(unpack(stuff))
         pass
     return unpack(stuff)
Beispiel #48
0
  install, and also for correct apt-get invocation to install them all
  on for example Debian / Ubuntu.
            """
            return

        if not posix_test_cc():
            utils.output('c++ compiler not found.')
            return

        utils.goto_build()
        tbfn = os.path.join(config.archive_dir, python_fname)
        pybasename = 'Python-%s' % (PYVER_STR,)
        build_dir = os.path.join(config.build_dir, pybasename)

        if not os.path.exists(build_dir):
            utils.unpack(tbfn)

        os.chdir(build_dir)
        ret = os.system(
            './configure --enable-shared --prefix=%s/python' %
            (config.inst_dir,))

        if ret != 0:
            utils.error('Python configure error.')

        # config.MAKE contains -j setting
        # I've had this break with Python 2.6.2, so I'm using straight make here...
        ret = os.system('%s install' % ('make',))
        if ret != 0:
            utils.error('Python build error.')
            
Beispiel #49
0
def read_chunk(host, port, chunkid, version, size, offset=0):
    if offset + size > CHUNKSIZE:
        raise ValueError("size too large %s > %s" % 
            (size, CHUNKSIZE-offset))
    
    from dpark.accumulator import RemoteReadBytes

    conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    conn.settimeout(10)
    conn.connect((host, port))

    msg = pack(CUTOCS_READ, uint64(chunkid), version, offset, size) 
    n = conn.send(msg)
    while n < len(msg):
        if not n:
            raise IOError("write failed")
        msg = msg[n:]
        n = conn.send(msg)
   
    def recv(n):
        d = conn.recv(n)
        while len(d) < n:
            nd = conn.recv(n-len(d))
            if not nd:
                raise IOError("not enough data")
            d += nd 
        return d

    while size > 0:
        cmd, l = unpack("II", recv(8))

        if cmd == CSTOCU_READ_STATUS:
            if l != 9:
                raise Exception("readblock: READ_STATUS incorrect message size")
            cid, code = unpack("QB", recv(l))
            if cid != chunkid:
                raise Exception("readblock; READ_STATUS incorrect chunkid")
            conn.close()
            return

        elif cmd == CSTOCU_READ_DATA:
            if l < 20 :
                raise Exception("readblock; READ_DATA incorrect message size")
            cid, bid, boff, bsize, crc = unpack("QHHII", recv(20))
            if cid != chunkid:
                raise Exception("readblock; READ_STATUS incorrect chunkid")
            if l != 20 + bsize:
                raise Exception("readblock; READ_DATA incorrect message size ")
            if bsize == 0 : # FIXME
                raise Exception("readblock; empty block")
                #yield ""
                #continue
            if bid != offset >> 16:
                raise Exception("readblock; READ_DATA incorrect block number")
            if boff != offset & 0xFFFF:
                raise Exception("readblock; READ_DATA incorrect block offset")
            breq = 65536 - boff
            if size < breq:
                breq = size
            if bsize != breq:
                raise Exception("readblock; READ_DATA incorrect block size")
           
            while breq > 0:
                data = conn.recv(breq)
                if not data:
                    #print chunkid, version, offset, size, bsize, breq
                    raise IOError("unexpected ending: need %d" % breq)
                RemoteReadBytes.add(len(data))
                yield data
                breq -= len(data)

            offset += bsize
            size -= bsize
        else:
            raise Exception("readblock; unknown message: %s" % cmd)
    conn.close()
Beispiel #50
0
 def fromtype(self, stuff):
     result = unpack(stuff[1:-1:2])
     if(stuff[0]=="\x02"):
         result *= -1
     return result
Beispiel #51
0
 def get(self, key):
     self._cursor.set_key(key)
     if self._cursor.search() == WT_NOT_FOUND:
         raise KeyError(key)
     return unpack(self._cursor.get_value())
Beispiel #52
0
def real_time_modify():
    tscale = 1.0
    p = pyaudio.PyAudio()

    in_stream = p.open(format=pyaudio.paInt16,
                    channels=2,
                    rate=44100,
                    input = True)
    out_stream = p.open(format=pyaudio.paInt16,
                    channels=2,
                    rate=int(44100*tscale),
                    output=True)

    chunk = 1024

    data = in_stream.read(chunk)
    data = utils.unpack(data)
    print 'data', len(data)

    next_data = in_stream.read(chunk)
    next_data = utils.unpack(next_data)

    amp = 0
    out_data = np.concatenate(([], zeros(chunk)))

    L = len(data)
    N = L/2
    H = N/2

    phi = zeros(N)
    out = zeros(N, dtype=complex)

    win = hanning(N)
    p = 0
    pp = 0

    for i in range(0,300):
        concat = np.concatenate(((data, next_data)))
        amp = max(amp, max(concat))

        out_data = np.concatenate((out_data, zeros(N/tscale)))
        p = 0
        for i in range(2):
            # take the spectra of two consecutive windows
            p1 = int(p)
            spec1 = fft(win*concat[p1:p1+N])
            spec2 = fft(win*concat[p1+H:p1+N+H])

            # take their phase difference and integrate
            phi += (angle(spec2) - angle(spec1))
            out.real, out.imag = cos(phi), sin(phi)

            # inverse FFT and overlap-add
            print 'pp:pp+N', pp, pp+N
            out_data[pp:pp+N] += win*ifft(abs(spec2)*out)
            pp += H
            p += H*tscale

        out_data = amp*out_data/max(out_data)

        out_formatted = utils.pack(out_data[i*chunk:(i+1)*chunk])
        out_formatted = ''.join(out_formatted)
        out_stream.write(out_formatted)

        data = next_data
        next_data = in_stream.read(chunk)
        next_data = utils.unpack(next_data)

    in_stream.stop_stream()
    in_stream.close()

    out_stream.stop_stream()
    out_stream.close()
    # wav.write('test.wav',44100*2, array(data_to_file, dtype='int16'))
    p.terminate()