Ejemplo n.º 1
0
def main():
    args = []
    for line in fileinput.input():
        args.append(line.rstrip())
    input = args[0]
    skew_array = iterative_skew(input)
    utils.print_array(skew_array)
Ejemplo n.º 2
0
def main():
    args = []
    for line in fileinput.input():
        args.append(line.rstrip())
    input = args[0]
    skew_array = iterative_skew(input)
    utils.print_array(skew_array)
Ejemplo n.º 3
0
def main():
    args = []
    for line in fileinput.input():
        args.append(line.rstrip())
    input = args[0]
    pattern = args[1]
    count, indices = find_incidence(input, pattern)
    utils.print_array(indices)
Ejemplo n.º 4
0
def main():
    args = []
    for line in fileinput.input():
        elems = line.rstrip().split(' ')
        for el in elems:
            if el != 'Input':
                args.append(el)
    input = args[0]
    k = int(args[1])
    L = int(args[2])
    t = int(args[3])
    kmers = find_clumps(input, k, L, t)
    utils.print_array(kmers)
Ejemplo n.º 5
0
def main():
    args = []
    for line in fileinput.input():
        elems = line.rstrip().split(' ')
        for el in elems:
            if el != 'Input':
                args.append(el)
    input = args[0]
    k = int(args[1])
    L = int(args[2])
    t = int(args[3])
    kmers = find_clumps(input, k, L, t);
    utils.print_array(kmers)
Ejemplo n.º 6
0
    def simple_plan(self, c_start, c_goal, verbose=True, **kwargs):
        """
        Generate a plan in observation space given start and goal states via interpolation.
        :param c_start: bs x c_dim
        :param c_goal: bs x c_dim
        :return: rollout: horizon x bs x channel_dim x img_W x img_H
        """
        with torch.no_grad():
            rollout = []
            _z = Variable(torch.randn(c_start.size()[0],
                                      self.rand_z_dim)).cuda()
            for t in range(self.plan_length):
                c = c_start + (c_goal - c_start) * t / self.plan_length
                c_next = c_start + (c_goal - c_start) * (t +
                                                         1) / self.plan_length
                # _z = Variable(torch.randn(c.size()[0], self.rand_z_dim)).cuda()

                _cur_img, _next_img = self.G(_z, c, c_next)
                if t == 0:
                    rollout.append(_cur_img)
                next_img = _next_img
                rollout.append(next_img)
                if verbose:
                    # import ipdb; ipdb.set_trace()
                    print("\t c_%d: %s" % (t, print_array(c[0].data)))
                    # print("\t Transition var: %s" % print_array(self.T.get_var(c_start[0, None]).data[0]))
                    # print("\t Direction: %s" % print_array((c_goal-c_start).data[0]/self.planning_horizon))
        return rollout
Ejemplo n.º 7
0
 def dump(self):
     return print_array('Commands Registered',
                        commandFormat.format('Name', 'Min Permissions', 'Channel Type', 'Challonge', 'Aliases', 'Required Args', 'Optional Args'),
                        self._commands,
                        lambda c: commandFormat.format(c.name,
                                                       c.attributes.minPermissions.name,
                                                       c.attributes.channelRestrictions.name,
                                                       c.attributes.challongeAccess.name,
                                                       '-' if len(c.aliases) == 0 else '/'.join(c.aliases),
                                                       '-' if len(c.reqParams) == 0 else '/'.join(c.reqParams),
                                                       '-' if len(c.optParams) == 0 else '/'.join(c.optParams)))
Ejemplo n.º 8
0
 def dump(self):
     return print_array(
         'Commands Registered',
         commandFormat.format('Name', 'Min Permissions', 'Channel Type',
                              'Challonge', 'Aliases', 'Required Args',
                              'Optional Args'), self._commands,
         lambda c: commandFormat.format(
             c.name, c.attributes.minPermissions.name, c.attributes.
             channelRestrictions.name, c.attributes.challongeAccess.name,
             '-' if len(c.aliases) == 0 else '/'.join(c.aliases), '-'
             if len(c.reqParams) == 0 else '/'.join(c.reqParams), '-'
             if len(c.optParams) == 0 else '/'.join(c.optParams)))
Ejemplo n.º 9
0
    def astar_plan(self, c_start, c_goal, verbose=True, **kwargs):
        """
        Generate a plan in observation space given start and goal states via A* search.
        :param c_start: bs x c_dim
        :param c_goal: bs x c_dim
        :return: rollout: horizon x bs x channel_dim x img_W x img_H
        """
        with torch.no_grad():
            rollout = []
            # _z = Variable(torch.randn(c_start.size()[0], self.rand_z_dim)).cuda()
            bs = c_start.size()[0]
            traj = plan_traj_astar(
                kwargs['start_obs'],
                kwargs['goal_obs'],
                start_state=c_start[0].data.cpu().numpy(),
                goal_state=c_goal[0].data.cpu().numpy(),
                transition_function=self.continuous_transition_function,
                preprocess_function=self.preprocess_function,
                discriminator_function=self.discriminator_function_np,
                generator_function=self.conditional_generator_function)

            for t, disc in enumerate(traj[:-1]):
                state = undiscretize(disc.state, self.discretization_bins,
                                     self.P.unif_range)
                state_next = undiscretize(traj[t + 1].state,
                                          self.discretization_bins,
                                          self.P.unif_range)
                c = from_numpy_to_var(state).repeat(bs, 1)
                c_next = from_numpy_to_var(state_next).repeat(bs, 1)
                _z = Variable(torch.randn(c.size()[0], self.rand_z_dim)).cuda()

                _cur_img, _next_img = self.G(_z, c, c_next)
                if t == 0:
                    rollout.append(_cur_img)
                next_img = _next_img
                rollout.append(next_img)
                if verbose:
                    # import ipdb; ipdb.set_trace()
                    print("\t c_%d: %s" % (t, print_array(c[0].data)))
        return rollout
def main():
    args = []
    for line in fileinput.input():
        args.append(line.rstrip())
    input = args[0]
    utils.print_array(nucleiotide_incidence(input))
Ejemplo n.º 11
0
    def closest_code(self,
                     obs,
                     n_trials,
                     use_second,
                     metric,
                     regress_bs,
                     verbose=True):
        """
        Get the code that generates an image with closest distance to obs.
        :param obs: 1 x channel_dim x img_W x img_H
        :param n_trials: number of copies to search
        :param use_second: bool, to measure distance using the second image
        :param metric: str, choose either l2 or D to measure distance
        :param regress_bs: int, regression batch size when 0 do just sampling.
        :return: the best noise and codes
        """
        if metric == 'L2':
            f = lambda x, y: ((x - y)**2).view(n_trials, -1).sum(1)
        elif metric == 'classifier':
            f = lambda x, y: -self.classifier(x, y).view(-1) + (
                (x - y)**2).view(n_trials, -1).sum(1) / 10
        else:
            assert metric == 'D'
            # turned max into min using minus.
            f = lambda x, y: -self.D(x, y).view(-1)

        if regress_bs:
            z_var = Variable(0.1 *
                             torch.randn(n_trials, self.rand_z_dim).cuda(),
                             requires_grad=True)
            c_var = Variable(0.1 * torch.randn(n_trials, self.c_dim).cuda(),
                             requires_grad=True)
            # c_var = Variable(self.Q.forward_soft(self.FE(obs.repeat(n_trials, 1, 1, 1))).data, requires_grad=True)
            optimizer = optim.Adam([c_var, z_var], lr=1e-2)
            n_iters = 1000
            for i in range(n_iters):
                optimizer.zero_grad()
                if self.planner == self.astar_plan:
                    c = F.tanh(c_var.repeat(regress_bs, 1))
                else:
                    c = c_var.repeat(regress_bs, 1)
                _z = z_var.repeat(regress_bs, 1)

                c_next = self.T(c)
                o, o_next = self.G(_z, c, c_next)

                if use_second:
                    out = o_next
                else:
                    out = o

                dist = f(obs.repeat(n_trials * regress_bs, 1, 1, 1),
                         out).sum(0) / regress_bs
                if i % 100 == 0:
                    print("\t Closest code (%d/%d): %.3f" % (i, n_iters, dist))
                dist.backward()
                optimizer.step()

            _z = z_var.detach()
            if self.planner == self.astar_plan:
                c = F.tanh(c_var.detach())
            else:
                c = c_var.detach()
        else:
            _z = Variable(torch.randn(n_trials, self.rand_z_dim)).cuda()
            c = self.Q.forward_soft(self.FE(obs)).repeat(n_trials, 1)

        # Select best c and c_next from different initializations.
        if self.planner == self.astar_plan:
            c_next = torch.clamp(self.T(c), -1 + 1e-3, 1 - 1e-3)
        else:
            c_next = self.T(c)
        o, o_next = self.G(_z, c, c_next)
        if use_second:
            out = o_next
        else:
            out = o

        dist = f(obs.repeat(n_trials, 1, 1, 1), out)
        min_dist, min_idx = dist.min(0)
        if verbose:
            # import ipdb; ipdb.set_trace()
            print("\t best_c: %s" % print_array(c[min_idx.item()].data))
            print("\t best_c_next: %s" %
                  print_array(c_next[min_idx.item()].data))
            print('\t %s measure: %.3f' % (metric, min_dist))
        return _z[min_idx].detach(), c[min_idx].detach(
        ), c_next[min_idx].detach(), out[min_idx].detach()