Beispiel #1
0
def parser(polynom_str, flag):
    matrix = []
    matrix2 = []
    polynom1 = []
    polynom2 = []

    if (polynom_str.find('(') != -1 or polynom_str.find(')') != -1):
        print('\033[1m\033[31mError: no bracket handling\033[0m')
        exit()

    polynom_str = polynom_str.split('=')
    polynom1 = list(filter(None, polynom_str[0].split(' ')))
    if (len(polynom_str) > 1):
        polynom2 = list(filter(None, polynom_str[1].split(' ')))
    else:
        print(
            '\033[1m\033[31mWarning: no equal sign (ignored). Value by default: = 0\033[0m'
        )

    polynom1 = tools.transform(polynom1)
    polynom2 = tools.transform(polynom2)

    matrix = toMatrix(polynom1)
    matrix2 = toMatrix(polynom2)

    if (matrix == None or matrix2 == None):
        return None

    if (flag):
        print("STEP 1: ",
              str(matrix[2]) + " * X^2 + " if matrix[2] != 0 else "",
              str(matrix[1]) + " * X + " if matrix[1] != 0 else "",
              str(matrix[0]) if matrix[0] != 0 else "0",
              " = ",
              str(matrix2[2]) + " * X^2 + " if matrix2[2] != 0 else "",
              str(matrix2[1]) + " * X + " if matrix2[1] != 0 else "",
              str(matrix2[0]) if matrix2[0] != 0 else "0",
              sep='')

    matrix[0] -= matrix2[0]
    matrix[1] -= matrix2[1]
    matrix[2] -= matrix2[2]

    print("\033[1m\033[32mPolynomial degree is:", str(tools.degree(matrix)),
          "\033[0m")

    print("\033[1m\033[32mReduced form: ",
          str(matrix[2]) + " * X^2 + " if matrix[2] != 0 else "",
          str(matrix[1]) + " * X + " if matrix[1] != 0 else "",
          str(matrix[0]) if matrix[0] != 0 else "0",
          " = 0\033[0m",
          sep='')

    return matrix
Beispiel #2
0
def mapping(grid, data, pose, res, angles):
    '''
        Mapping with the given data points and robot pose
        
        Input:
            grid  - N x N Map
            data  - data included lidar scan data and joint data
            pose  - best estimated robot pose (x, y, theta)
            res   - resolution
            angles - lidar scan angles (from -135 ~ 135 degree)
        Outputs:
            grid  - constructed map
    '''
    free_odd = np.log(9)/4
    occu_odd = np.log(9)

    X, Y     = polar2cart(data['scan'], angles)  # polar coord -> cartesian coord
    scan     = transform(X, Y, data['joint'], pose)
    scan     = filtering(scan, pose)
    xi, yi   = (scan[0]/res).astype(int), (scan[1]/res).astype(int)

    for (a, b) in zip(xi, yi):
        line = bresenham2D(int(pose[0]/res), int(pose[1]/res), a, b).astype(np.int16)
        x    = a + grid.shape[0]//2  # offset to center
        y    = b + grid.shape[1]//2  # offset to center
        grid[x, y] += occu_odd
        grid[line[0] + grid.shape[0]//2, line[1] + grid.shape[1]//2] -= free_odd
        
    grid[grid >= 100]  = 100
    grid[grid <= -100] = -100
        
    return grid
def base64_to_tensor(img, crop_n_resize=True):
    model = model_init()
    msg = base64.b64decode(img)
    buf = io.BytesIO(msg)
    img = Image.open(buf).convert('RGB')
    if crop_n_resize:
        img = crop_and_resize(img, model.input_size)
    img.save("last_image.jpg")
    transform = transforms.Compose([
        transforms.ToTensor(),
        transforms.Resize((model.input_size, model.input_size))
    ])
    return transform(img)
def measurement_model_update(MAP, P, data, angles):
    '''
        measurement model update
        
        Input:
            MAP    - map object
            P      - list of particle states
            data   - current scan data
            angles - lidar scan angles (from -135 ~ 135 degree)
        Outputs:
            best_particle - chosen best particle (x, y, theta)
    '''
    # calculate map correlation for each particle
    l = 2
    corrs = []
    res = MAP['res']
    particles = P['states']

    grid_tmp = np.zeros_like(MAP['map'])  # for calculate correlation
    grid_tmp[MAP['map'] > 0] = 1  # occupied
    grid_tmp[MAP['map'] < 0] = 0  # free

    X, Y = polar2cart(data['scan'], angles)  # polar coord -> cartesian coord
    x_im, y_im = np.arange(MAP['xmin'], MAP['xmax'] + res,
                           res), np.arange(MAP['ymin'], MAP['ymax'] + res, res)
    x_range, y_range = np.arange(-res * l, res * l + res,
                                 res), np.arange(-res * l, res * l + res, res)

    for i in range(len(particles)):
        scan = transform(X, Y, data['joint'], particles[i])
        scan = filtering(scan, particles[i])

        x, y = scan[0], scan[1]
        corr = mapCorrelation(grid_tmp, x_im, y_im, np.vstack(
            (x, y)), particles[i][0] + x_range, particles[i][1] + y_range)
        corrs.append(np.max(corr))

    # get the particle with largest weight
    corrs = np.array(corrs)
    P['weight'] = softmax(P['weight'] * corrs)
    best_idx = np.where(P['weight'] == np.max(P['weight']))[0][0]
    best_particle = particles[best_idx]

    return best_particle
Beispiel #5
0
 def __init__(self,
              _id,
              nb_shards,
              nb_notaries,
              powdiff,
              main_genesis,
              beacon_genesis,
              shard_geneses,
              sleepy=False,
              careless=False,
              base_ts_diff=1,
              skip_ts_diff=6):
     self.blocks = {
         beacon_genesis.hash: beacon_genesis,
         main_genesis.hash: main_genesis
     }
     for s in shard_geneses:
         self.blocks[s.hash] = s
     self.sigs = {}
     self.latency_dist = transform(normal_distribution(20, (20 * 2) // 5),
                                   lambda x: max(x, 0))
     self.beacon_chain = [beacon_genesis.hash]
     self.main_chain = [main_genesis.hash]
     self.shard_chains = [[g.hash] for g in shard_geneses]
     self.timequeue = []
     self.parentqueue = {}
     self.children = {}
     self.ts = 0
     self.id = _id
     self.agents = []
     self.peers = []
     self.objqueue = {}
     self.globalTime = 0
     self.used_parents = {}
     self.processed = {}
     self.sleepy = sleepy
     self.careless = careless
     self.powdiff = powdiff
     self.nb_shards = nb_shards
     self.nb_notaries = nb_notaries
     self.base_ts_diff = base_ts_diff
     self.skip_ts_diff = skip_ts_diff
     self.reliability = 0.9
Beispiel #6
0
 def __init__(self, id, nb_shards, nb_notaries, powdiff, main_genesis, beacon_genesis, shard_geneses, sleepy=False, careless=False, base_ts_diff=1, skip_ts_diff=6):
     self.blocks = {
         beacon_genesis.hash: beacon_genesis,
         main_genesis.hash: main_genesis
     }
     for s in shard_geneses:
         self.blocks[s.hash] = s
     self.sigs = {}
     self.latency_dist = transform(normal_distribution(20, (20 * 2) // 5), lambda x: max(x, 0))
     self.beacon_chain = [beacon_genesis.hash]
     self.main_chain = [main_genesis.hash]
     self.shard_chains = [[g.hash] for g in shard_geneses]
     self.timequeue = []
     self.parentqueue = {}
     self.children = {}
     self.ts = 0
     self.sreq = []
     self.agents = []
     self.peers = []
     self.objqueue = {}
     self.globalTime = 0
     self.used_parents = {}
     self.processed = {}
     self.sleepy = sleepy
     self.careless = careless
     self.powdiff = powdiff
     self.nb_shards = nb_shards
     self.base_ts_diff = base_ts_diff
     self.skip_ts_diff = skip_ts_diff
     self.reliability = 0.9
     self.comm = MPI.COMM_WORLD
     #self.id = id
     self.id = self.comm.Get_rank()
     #self.nb_notaries = nb_notaries
     self.nb_notaries = self.comm.Get_size()
     self.log_file = "results/" + str(self.id) + ".txt"
     try:
         os.remove(self.log_file)
     except:
         dummy = 0
     self.tag = 1226
     self.log("Agent initialized!")