예제 #1
0
def main():
    parser = argparse.ArgumentParser(description='Simple RPC code generator.')
    parser.add_argument("-n", "--name", help="name of the RPC server code",
                        type=str)
    parser.add_argument('filepath', type=str, nargs='*', 
                   help='path to C++ header files that are scanned for prototypes')
    parser.add_argument('--functions', type=str,
                        help='comma separated list of functions to be wrapped')
    parser.add_argument('--include-dir', action='store_true')
    args = parser.parse_args()
    if args.include_dir:
        print os.path.join(os.path.dirname (__file__), 'include')
        return
    prototypes = defaultdict(list)
    for filename in args.filepath:
        original_filename = os.path.basename(filename)
        prototypes[original_filename] += parse_prototypes(filename, args.functions)

    special_list = [
        ('set_debug_level', 'void', [('debug_level', 'const int &')], '%(srpc)sdebug_level = debug_level;'),
        ('get_counter', 'uint64_t', [('', 'void')], '%(srpc)sreturn_value = %(srpc)scounter;'),
        ]

    for original_filename, prototype_list in prototypes.iteritems():
        source_info = dict(FILENAME=os.path.basename(original_filename).upper().replace('.','_'),
                           original_filename=os.path.basename(original_filename),
                           namespace = os.path.splitext(os.path.basename(original_filename))[0],
                           )
        server_name = source_info['namespace']
        source_info['NAMESPACE'] = source_info['namespace'].upper()

        for prototype in special_list:
            interface_dict = make_interface_source(server_name, 'simple_rpc', prototype)
            collect(source_info, interface_dict)

        for prototype in prototype_list:
            interface_dict = make_interface_source(server_name, 'simple_rpc::' + source_info['namespace'], prototype)
            del interface_dict['special_prototype']
            collect(source_info, interface_dict)

        client_filename = os.path.splitext(original_filename)[0] + '-rpc.hpp'
        print 'creating file', client_filename
        f = open(client_filename, 'w')
        f.write(templates.client_header % (source_info))
        f.close()

        client_filename = os.path.splitext(original_filename)[0] + '-rpc.cpp'
        print 'creating file', client_filename
        f = open(client_filename, 'w')
        f.write(templates.client_source % (source_info))
        f.close()

        server_filename = os.path.splitext(original_filename)[0] + '-rpc-server.cpp'
        print 'creating file', server_filename
        f = open(server_filename, 'w')
        f.write(templates.server_source % (source_info))
        f.close()
예제 #2
0
 def sample(self, batchSize, K=None):
     x = torch.zeros([batchSize] + self.nvars)
     for no in range(len(self.priorList)):
         x_ = self.priorList[no].sample(batchSize)
         x = collect(self.factorOutIList[no], self.factorOutJList[no], x,
                     x_)
     return x
예제 #3
0
def novel_junctions_from_unannotated_exons(dbs, confs):
    """List novel junctions from unannotated exons."""
    description = [
        ('start chr', 'string'),
        ('end chr', 'string'),
        ('start', 'number'),
        ('end', 'number'),
        ('# Reads', 'number'),
        ('Replicate Id', 'string'),
        ('Lane Id', 'string'),
    ]
    chart = {}
    chart['table_description'] = description

    def strategy(conf, row):
        """Insert replicateid"""
        return (row[:-1] + (conf['replicateid'], row[-1]))

    stats = collect(dbs, confs['configurations'],
                    _top_novel_junctions_from_unannotated_exons, strategy)

    if stats:
        stats = sorted(stats, key=lambda row: row[4])
        stats.reverse()
        chart['table_data'] = stats[:20]
    else:
        chart['table_data'] = [[None] * len(chart['table_description'])]
    return chart
예제 #4
0
def novel_junctions_from_unannotated_exons(dbs, confs):
    """List novel junctions from unannotated exons."""
    description = [('start chr', 'string'),
                   ('end chr', 'string'),
                   ('start', 'number'),
                   ('end', 'number'),
                   ('# Reads', 'number'),
                   ('Replicate Id', 'string'),
                   ('Lane Id', 'string'),
                   ]
    chart = {}
    chart['table_description'] = description

    def strategy(conf, row):
        """Insert replicateid"""
        return (row[:-1] + (conf['replicateid'], row[-1]))

    stats = collect(dbs,
                    confs['configurations'],
                    _top_novel_junctions_from_unannotated_exons,
                    strategy)

    if stats:
        stats = sorted(stats, key=lambda row: row[4])
        stats.reverse()
        chart['table_data'] = stats[:20]
    else:
        chart['table_data'] = [[None] * len(chart['table_description'])]
    return chart
예제 #5
0
def collect(request):

    success = utils.collect()

    end = datetime.datetime.now()
    if success:
        html = "<html><body>Collection Complete at %s</body></html>" % (end)
    else:
        html = "<html><body>Collection Failed</body></html>"
    print("request received")

    return HttpResponse(html)
예제 #6
0
def collect(request):

    success = utils.collect()

    end = datetime.datetime.now()
    if success:
        html = "<html><body>Collection Complete at %s</body></html>" % (end)
    else:
        html = "<html><body>Collection Failed</body></html>"
    print("request received")

    return HttpResponse(html)
예제 #7
0
    def inverse(self, z):
        batch_size = z.shape[0]
        inv_ldj = z.new_zeros(batch_size)
        for layer, indexI, indexJ in reversed(
                list(zip(self.layers, self.indexI, self.indexJ))):
            z, z_ = utils.dispatch(indexI, indexJ, z)
            z_ = utils.stackRGblock(z_)

            z_, log_prob = layer.inverse(z_)
            inv_ldj = inv_ldj + log_prob.view(batch_size, -1).sum(dim=1)

            z_ = utils.unstackRGblock(z_, batch_size)
            z = utils.collect(indexI, indexJ, z, z_)

        return z, inv_ldj
예제 #8
0
    def get_action(self, gamestate: G) -> A:
        """
        Send this gamestate to redis, wait <timeout> seconds and return
        whatever action ends up in redis
        """
        gamestate_id = random.getrandbits(64)
        utils.write_chan(
            self.r,
            "commands",
            ctypes.NewGamestate(
                command_type="new-gamestate",
                game_type=self.game_type,
                gamestate_id=gamestate_id,
                gamestate=gamestate.__dict__,
            ),
        )

        def event_stream(interval):
            while True:
                yield utils.read_chan(self.pubsub, interval)

        msg_stream = (
            msg
            for msg in event_stream(interval=0.1)
            if msg is None or msg["gamestate_id"] == gamestate_id
        )
        messages = utils.collect(
            msg_stream, min_time=self.timeout, max_time=max(self.timeout, 5)
        )
        if messages is None:
            utils.print_err(
                f"Failed to receive message from engineservers "
                f"after timeout of {max(self.timeout, 5)}",
                exit=True,
            )
        actions = [
            a["best_move"]
            for a in messages
            if a["gamestate_id"] == gamestate_id
        ]
        print([a["gamestate_id"] for a in messages])
        assert (
            len(actions) > 0
        ), f"No engineserver responded with actions for gamestate id {gamestate_id}"
        return actions[-1]
예제 #9
0
    def forward(self, x):
        # dim(x) = (B, C, H, W)
        batch_size = x.shape[0]
        ldj = x.new_zeros(batch_size)
        for layer, indexI, indexJ in zip(self.layers, self.indexI,
                                         self.indexJ):
            x, x_ = utils.dispatch(indexI, indexJ, x)
            # dim(x_) = (B, C, num_RG_blocks, K*K)
            x_ = utils.stackRGblock(x_)
            # dim(x_) = (B*num_RG_blocks, C, K, K)

            x_, log_prob = layer.forward(x_)
            ldj = ldj + log_prob.view(batch_size, -1).sum(dim=1)

            x_ = utils.unstackRGblock(x_, batch_size)
            x = utils.collect(indexI, indexJ, x, x_)

        return x, ldj
    def read_input(self):
        """Reads the input data from source given in arguments.

        Returns:
            A list of (hash, count, size) tuples.
        """

        print("+++ Reading data from %s" % self.args.input, file=sys.stderr)

        def convert(tokens):
            """A helper that converts tokens on a line to integers."""
            return (int(tokens[0], 16), int(tokens[1]), int(tokens[2]))

        with fileinput.input(self.args.input) as lines:
            # Tokenize the lines on two spaces
            tokenised = map(lambda line: line.split("  "), lines)

            # Parse the tokenized data
            data = map(convert, tokenised)

            # Collect the data to a list since he file is closed when we leave
            # this with block
            return utils.collect(data)
예제 #11
0
def start(message):
    response = utils.collect()
    if response.get('error'):
        return bot.send_message(chat_id=message.chat.id, text='Error: {}'.format(response['error']))
    text = '\n'.join([f'{i}: {j}%' for i, j in response.items()])
    bot.send_message(chat_id=message.chat.id, text=text)
예제 #12
0
def test_hierarchyPrior():

    class UniTestPrior(source.Source):
        def __init__(self, nvars, element, name="UniTestPrior"):
            super(UniTestPrior, self).__init__(nvars, 1.0, name)
            self.element = torch.nn.Parameter(torch.tensor(element), requires_grad=False)

        def sample(self, batchSize):
            return torch.ones([batchSize] + self.nvars).to(self.element).float() * self.element

        def _energy(self, z):
            return (torch.tensor([2])**self.element * np.prod(z.shape[2:]))

    length = 32
    channel = 3
    decimal = flow.ScalingNshifting(256, -128)
    p1 = source.DiscreteLogistic([channel, 256, 3], decimal, rounding=utils.roundingWidentityGradient)
    p2 = source.DiscreteLogistic([channel, 64, 3], decimal, rounding=utils.roundingWidentityGradient)
    p3 = source.DiscreteLogistic([channel, 16, 3], decimal, rounding=utils.roundingWidentityGradient)
    p4 = source.DiscreteLogistic([channel, 4, 3], decimal, rounding=utils.roundingWidentityGradient)
    p5 = source.MixtureDiscreteLogistic([channel, 1, 4], 5, decimal, rounding=utils.roundingWidentityGradient)

    P = source.HierarchyPrior(channel, length, [p1, p2, p3, p4, p5], repeat=1)

    x = P.sample(100)
    logp = P.logProbability(x)

    import math
    zparts = []
    for no in range(int(math.log(length, 2))):
        _, parts = utils.dispatch(P.factorOutIList[no], P.factorOutJList[no], x)
        zparts.append(parts)

    rcnX = torch.zeros_like(x)
    for no in range(int(math.log(length, 2))):
        part = zparts[no]
        rcnX = utils.collect(P.factorOutIList[no], P.factorOutJList[no], rcnX, part)

    assert_allclose(x.detach(), rcnX.detach())

    length = 8

    p1 = UniTestPrior([channel, 16, 3], 1)
    p2 = UniTestPrior([channel, 4, 3], 2)
    p3 = UniTestPrior([channel, 1, 4], 3)

    Pp = source.HierarchyPrior(channel, length, [p1, p2, p3], repeat=2)

    x = Pp.sample(1)
    logp = Pp.logProbability(x)

    target = np.array([[3, 1, 2, 1, 3, 1, 2, 1], [1, 1, 1, 1, 1, 1, 1, 1], [2, 1, 2, 1, 2, 1, 2, 1], [1, 1, 1, 1, 1, 1, 1, 1], [3, 1, 2, 1, 3, 1, 2, 1], [1, 1, 1, 1, 1, 1, 1, 1], [2, 1, 2, 1, 2, 1, 2, 1], [1, 1, 1, 1, 1, 1, 1, 1]])
    assert_allclose(x[0, 0].detach().numpy(), target)
    assert logp == -(16 * 3 * 2**1 + 4 * 3 * 2**2 + 1 * 4 * 2**3)

    p1 = UniTestPrior([channel, 16, 3], 1)
    p2 = UniTestPrior([channel, 4, 3], 2)
    p3 = UniTestPrior([channel, 1, 4], 3)

    Ppodd = source.HierarchyPrior(channel, length, [p1, p2, p3], repeat=1)

    x = Ppodd.sample(1)
    logp = Ppodd.logProbability(x)

    target = np.array([[3, 1, 2, 1, 3, 1, 2, 1], [1, 1, 1, 1, 1, 1, 1, 1], [2, 1, 2, 1, 2, 1, 2, 1], [1, 1, 1, 1, 1, 1, 1, 1], [3, 1, 2, 1, 3, 1, 2, 1], [1, 1, 1, 1, 1, 1, 1, 1], [2, 1, 2, 1, 2, 1, 2, 1], [1, 1, 1, 1, 1, 1, 1, 1]])
    assert_allclose(x[0, 0].detach().numpy(), target)
    assert logp == -(16 * 3 * 2**1 + 4 * 3 * 2**2 + 1 * 4 * 2**3)
예제 #13
0
def main():
    parser = argparse.ArgumentParser(description='Simple RPC code generator.')
    parser.add_argument("-n",
                        "--name",
                        help="name of the RPC server code",
                        type=str)
    parser.add_argument(
        'filepath',
        type=str,
        nargs='*',
        help='path to C++ header files that are scanned for prototypes')
    parser.add_argument('--functions',
                        type=str,
                        help='comma separated list of functions to be wrapped')
    parser.add_argument('--include-dir', action='store_true')
    args = parser.parse_args()
    if args.include_dir:
        print os.path.join(os.path.dirname(__file__), 'include')
        return
    prototypes = defaultdict(list)
    for filename in args.filepath:
        original_filename = os.path.basename(filename)
        prototypes[original_filename] += parse_prototypes(
            filename, args.functions)

    special_list = [
        ('set_debug_level', 'void', [('debug_level', 'const int &')],
         '%(srpc)sdebug_level = debug_level;'),
        ('get_counter', 'uint64_t', [('', 'void')],
         '%(srpc)sreturn_value = %(srpc)scounter;'),
    ]

    for original_filename, prototype_list in prototypes.iteritems():
        source_info = dict(
            FILENAME=os.path.basename(original_filename).upper().replace(
                '.', '_'),
            original_filename=os.path.basename(original_filename),
            namespace=os.path.splitext(os.path.basename(original_filename))[0],
        )
        server_name = source_info['namespace']
        source_info['NAMESPACE'] = source_info['namespace'].upper()

        for prototype in special_list:
            interface_dict = make_interface_source(server_name, 'simple_rpc',
                                                   prototype)
            collect(source_info, interface_dict)

        for prototype in prototype_list:
            interface_dict = make_interface_source(
                server_name, 'simple_rpc::' + source_info['namespace'],
                prototype)
            del interface_dict['special_prototype']
            collect(source_info, interface_dict)

        client_filename = os.path.splitext(original_filename)[0] + '-rpc.hpp'
        print 'creating file', client_filename
        f = open(client_filename, 'w')
        f.write(templates.client_header % (source_info))
        f.close()

        client_filename = os.path.splitext(original_filename)[0] + '-rpc.cpp'
        print 'creating file', client_filename
        f = open(client_filename, 'w')
        f.write(templates.client_source % (source_info))
        f.close()

        server_filename = os.path.splitext(
            original_filename)[0] + '-rpc-server.cpp'
        print 'creating file', server_filename
        f = open(server_filename, 'w')
        f.write(templates.server_source % (source_info))
        f.close()