Exemplo n.º 1
0
def open_folder(folder_path):
    '''
    Opens the folder

    Args:
        folder_path: Path to the C++ folder

    Returns:
        None
    '''
    c_graph = graph.Graph()

    traverse.traverse(c_graph, folder_path)

    if c_graph.is_empty():
        print "Graph is empty!"
        sys.exit(-1)

    print c_graph

    json = construct_json(c_graph.to_json())

    with web.Server("temp") as server:
        server.write(json)
        server.run(True, 8093)

    print('Shutting down')
Exemplo n.º 2
0
def getDot(skipCompound):
    buffer = StringIO.StringIO()
    dotHeader(buffer)

    traverse.traverse(lambda nd: node(buffer, nd),
                      lambda sg: subGraphHeader(buffer, sg),
                      lambda sg: subGraphFooter(buffer, sg), skipCompound,
                      lambda nd: compoundHeader(buffer, nd),
                      lambda nd: compoundFooter(buffer, nd))

    dotFooter(buffer)
    str = buffer.getvalue()
    buffer.close()
    return str
Exemplo n.º 3
0
Arquivo: dot.py Projeto: mathsaey/DLC
def getDot(graph, buffer = None):
	buffer = StringIO.StringIO()
	dotHeader(buffer)

	traverse(
		graph,
		lambda x : writeNode(buffer, x),
		lambda x : subGraphHeader(buffer, x),
		lambda x : subGraphFooter(buffer, x),
		lambda x : compoundHeader(buffer, x),
		lambda x : compoundFooter(buffer, x)
		)

	dotFooter(buffer)
	str = buffer.getvalue()
	buffer.close()
	return str
Exemplo n.º 4
0
def getDot(skipCompound):
	buffer = StringIO.StringIO()
	dotHeader(buffer)

	traverse.traverse(
		lambda nd: node(buffer, nd),
		lambda sg: subGraphHeader(buffer, sg),
		lambda sg: subGraphFooter(buffer, sg),
		skipCompound,
		lambda nd: compoundHeader(buffer, nd),
		lambda nd: compoundFooter(buffer, nd)
	)

	dotFooter(buffer)
	str = buffer.getvalue()
	buffer.close()
	return str
Exemplo n.º 5
0
def extract(py_modules, private_map, do_not_descend_map):
  """Extract docs from tf namespace and write them to disk."""
  # Traverse the first module.
  visitor = doc_generator_visitor.DocGeneratorVisitor(py_modules[0][0])
  api_visitor = public_api.PublicAPIVisitor(visitor)
  api_visitor.set_root_name(py_modules[0][0])
  add_dict_to_dict(private_map, api_visitor.private_map)
  add_dict_to_dict(do_not_descend_map, api_visitor.do_not_descend_map)

  traverse.traverse(py_modules[0][1], api_visitor)

  # Traverse all py_modules after the first:
  for module_name, module in py_modules[1:]:
    visitor.set_root_name(module_name)
    api_visitor.set_root_name(module_name)
    traverse.traverse(module, api_visitor)

  return visitor
def extract(py_modules, private_map, do_not_descend_map):
    """Extract docs from tf namespace and write them to disk."""
    # Traverse the first module.
    visitor = doc_generator_visitor.DocGeneratorVisitor(py_modules[0][0])
    api_visitor = public_api.PublicAPIVisitor(visitor)
    api_visitor.set_root_name(py_modules[0][0])
    add_dict_to_dict(private_map, api_visitor.private_map)
    add_dict_to_dict(do_not_descend_map, api_visitor.do_not_descend_map)

    traverse.traverse(py_modules[0][1], api_visitor)

    # Traverse all py_modules after the first:
    for module_name, module in py_modules[1:]:
        visitor.set_root_name(module_name)
        api_visitor.set_root_name(module_name)
        traverse.traverse(module, api_visitor)

    return visitor
Exemplo n.º 7
0
    def eval_value_network(self, mode, t, steps, traverse_player_idx):
        """
    Evaluate the (total) exploitability of the value networks, as in Brown et. al.
    """
        print("\nEvaluating value network for player {} (t={})".format(
            traverse_player_idx, t))
        self.value_networks[0]._network = self.value_networks[0]._network.to(
            self.opt.TRAVERSE_DEVICE)
        self.value_networks[1]._network = self.value_networks[1]._network.to(
            self.opt.TRAVERSE_DEVICE)
        self.value_networks[0]._device = self.opt.TRAVERSE_DEVICE
        self.value_networks[1]._device = self.opt.TRAVERSE_DEVICE

        manager = mp.Manager()
        save_lock = manager.Lock()

        t0 = time.time()
        exploits = []

        for k in range(self.opt.NUM_TRAVERSALS_EVAL):
            sb_player_idx = k % 2
            round_state = create_new_round(sb_player_idx)
            precomputed_ev = make_precomputed_ev(round_state)
            info = traverse(round_state, make_actions, make_infoset,
                            traverse_player_idx, sb_player_idx,
                            self.value_networks, None, None, t, precomputed_ev)
            exploits.append(info.exploitability.sum())

        elapsed = time.time() - t0
        print("Time for {} eval traversals {} sec".format(
            self.opt.NUM_TRAVERSALS_EVAL, elapsed))

        mbb_per_game = 1e3 * torch.Tensor(exploits) / (
            2.0 * Constants.SMALL_BLIND_AMOUNT)
        mean_mbb_per_game = mbb_per_game.mean()
        stdev_mbb_per_game = mbb_per_game.std()

        writer = self.writers[mode]

        if mode == "train":
            writer.add_scalar("train_exploit_mbbg_mean/{}".format(t),
                              mean_mbb_per_game, steps)
            writer.add_scalar("train_exploit_mbbg_stdev/{}".format(t),
                              stdev_mbb_per_game, steps)

        # In eval mode, we log the mbb/g exploitability after each CFR iteration.
        else:
            writer.add_scalar("cfr_exploit_mbbg_mean", mean_mbb_per_game, t)
            writer.add_scalar("cfr_exploit_mbbg_stdev", stdev_mbb_per_game, t)

        writer.close()
        print(
            "===> [EVAL] Exploitability | mean={} mbb/g | stdev={} | (cfr_iter={})"
            .format(mean_mbb_per_game, stdev_mbb_per_game, t))
Exemplo n.º 8
0
def checkio(plat):
    """Compute Express Delivery Route."""

    pprint(plat)
    print()

    start, end, blist, tree = make_tree(plat)
    print(start, end, blist, pformat(tree))
    print()

    result = traverse(start, end, blist, tree)

    return result
Exemplo n.º 9
0
    def eval_strategy_network(self, steps):
        print("\nEvaluating strategy network after {} steps".format(steps))
        self.strategy_network._network = self.strategy_network._network.cpu()
        self.strategy_network._device = torch.device("cpu")

        for p in self.strategy_network._network.parameters():
            assert (p.device == torch.device("cpu"))

        manager = mp.Manager()
        save_lock = manager.Lock()

        t0 = time.time()
        exploits = []

        strategies = {0: self.strategy_network, 1: self.strategy_network}

        for k in range(self.opt.NUM_TRAVERSALS_EVAL):
            sb_player_idx = k % 2
            round_state = create_new_round(sb_player_idx)
            precomputed_ev = make_precomputed_ev(round_state)
            info = traverse(round_state, make_actions, make_infoset, 0,
                            sb_player_idx, strategies, None, None, 0,
                            precomputed_ev)
            exploits.append(info.exploitability.sum())

        elapsed = time.time() - t0
        print("Time for {} eval traversals {} sec".format(
            self.opt.NUM_TRAVERSALS_EVAL, elapsed))

        mbb_per_game = 1e3 * torch.Tensor(exploits) / (
            2.0 * Constants.SMALL_BLIND_AMOUNT)
        mean_mbb_per_game = mbb_per_game.mean()
        stdev_mbb_per_game = mbb_per_game.std()

        writer = self.writers["train"]
        writer.add_scalar("strt_exploit_mbbg_mean", mean_mbb_per_game, steps)
        writer.add_scalar("strt_exploit_mbbg_stdev", stdev_mbb_per_game, steps)
        writer.close()
        print(
            "===> [EVAL] [STRATEGY] Exploitability | mean={} mbb/g | stdev={} | (steps={})"
            .format(mean_mbb_per_game, stdev_mbb_per_game, steps))
Exemplo n.º 10
0
def traverse_worker(worker_id, traverse_player_idx, strategies, save_lock, opt,
                    t, eval_mode, info_queue):
    """
  A worker that traverses the game tree K times, saving things to memory buffers. Each worker
  maintains its own memory buffers and saves them after finishing.

  If eval_mode is set to True, no memory buffers are created.
  """
    # assert(strategies[0]._network.device == torch.device("cpu"))
    # assert(strategies[1]._network.device == torch.device("cpu"))

    advt_mem = MemoryBuffer(
        Constants.INFO_SET_SIZE,
        Constants.NUM_ACTIONS,
        max_size=opt.SINGLE_PROC_MEM_BUFFER_MAX_SIZE,
        autosave_params=(opt.MEMORY_FOLDER,
                         opt.ADVT_BUFFER_FMT.format(traverse_player_idx)),
        save_lock=save_lock) if eval_mode == False else None

    strt_mem = MemoryBuffer(
        Constants.INFO_SET_SIZE,
        Constants.NUM_ACTIONS,
        max_size=opt.SINGLE_PROC_MEM_BUFFER_MAX_SIZE,
        autosave_params=(opt.MEMORY_FOLDER, opt.STRT_BUFFER_FMT),
        save_lock=save_lock) if eval_mode == False else None

    if eval_mode:
        num_traversals_per_worker = int(opt.NUM_TRAVERSALS_EVAL /
                                        opt.NUM_TRAVERSE_WORKERS)
    else:
        num_traversals_per_worker = int(opt.NUM_TRAVERSALS_PER_ITER /
                                        opt.NUM_TRAVERSE_WORKERS)

    t0 = time.time()
    for k in range(num_traversals_per_worker):
        ctr = [0]

        # Generate a random initialization, alternating the SB player each time.
        sb_player_idx = k % 2
        round_state = create_new_round(sb_player_idx)

        precomputed_ev = make_precomputed_ev(round_state)
        info = traverse(round_state,
                        make_actions,
                        make_infoset,
                        traverse_player_idx,
                        sb_player_idx,
                        strategies,
                        advt_mem,
                        strt_mem,
                        t,
                        precomputed_ev,
                        recursion_ctr=ctr)

        if (k % opt.TRAVERSE_DEBUG_PRINT_HZ) == 0 and eval_mode == False:
            elapsed = time.time() - t0
            print(
                "[WORKER #{}] done with {}/{} traversals | recursion depth={} | advt={} strt={} | elapsed={} sec"
                .format(worker_id, k, num_traversals_per_worker, ctr[0],
                        advt_mem.size(), strt_mem.size(), elapsed))

    # Save all the buffers one last time.
    print("[WORKER #{}] Final autosave ...".format(worker_id))
    if advt_mem is not None: advt_mem.autosave()
    if strt_mem is not None: strt_mem.autosave()
Exemplo n.º 11
0
def main():
	src_dir = sys.argv[1]
	dst_dir = sys.argv[2]
	traverse(src_dir, dst_dir, truncate, '.wav')
Exemplo n.º 12
0
import wave
import os
import sys
from traverse import traverse

def pcm2wav(srcfn, dstfn):
    dirname = os.path.dirname(dstfn)
    if not os.path.exists(dirname):
        os.makedirs(dirname)

    with open(srcfn,'rb') as pcmf:
        pcmdata = pcmf.read()   
    nframes = os.path.getsize(srcfn) / 2
    
    wavf = wave.open(dstfn.replace('.pcm', '.wav'),'wb')    
    wavf.setparams((1, 2, 16000, nframes, 'NONE', 'not compressed'))
    wavf.writeframes(pcmdata)
    wavf.close()


if __name__ == '__main__':
    original = sys.argv[1]
    traverse(original, 'converted', pcm2wav, target='.pcm')
Exemplo n.º 13
0
def main():
	root_dir = sys.argv[1]
	traverse(root_dir, '', adapter, '.txt')
Exemplo n.º 14
0
    def _assign_group(self, client, pkg_name, data):
        try:
            group = client.group_entity_get(data.get('name'))
            if pkg_name not in group['packages']:
                group['packages'] += [pkg_name]
                client.group_entity_put(group)
        except CkanApiError, e:
            group = {'name': data.get('name'),
                     'title': data.get('title', data.get('name')),
                     'description': data.get('description', ''),
                     'packages': [pkg_name]
                     }
            client.group_register_post(group)

    def __call__(self, client, pkg):
        cats = pkg.get('extras', {}).get('categories', [])
        if not isinstance(cats, (list, tuple)):
            cats = [cats]
        for cat in cats:
            data = self.normalizer.get(cat, source_hint=pkg.get('ckan_url'))
            if data.get('name'):
                self._assign_group(client, pkg.get('name'), data)

if __name__ == '__main__':
    if len(sys.argv) == 2:
        traverse(categories(),query=sys.argv[1])
    else:
        traverse(categories())


Exemplo n.º 15
0
		wr = wave.open(wav_path, 'rb')
		# nchannels, sampwidth(bytes), framerate, nframes, comptype, compname
		header = wr.getparams()
		# body_size = nframes * sampwidth
		# or 
		# os.stat(os.path.join(dirpath, filename)).st_size - HEADER_SIZE
		wr.close()
		return estimate(header[0], header[1]*8.0, header[2], header[3]*header[1])
	except Exception, e:
		print("Unable to read %s" % wav_path)
		return 0

def eval_dir(fn, files_list):
	pool = ThreadPool(WORKER_NUM)
	results = pool.map(fn, files_list)
	# close the pool and wait for the work to finish
	pool.close()
	pool.join()
	return sum(results)

files_list = []
def traverse_adaptor(src_dir, dst_dir):
	files_list.append(src_dir)

if __name__ == '__main__':
	src_dir = sys.argv[1]
	# files_list = [ os.path.join(src_dir, x) for x in os.listdir(src_dir) ]
	# print eval_dir(eval_wav_duration, files_list)
	traverse(src_dir, 'holder', traverse_adaptor)
	print eval_dir(eval_wav_duration, files_list)
Exemplo n.º 16
0
import wave
import os
import sys
from traverse import traverse


def pcm2wav(srcfn, dstfn):
    dirname = os.path.dirname(dstfn)
    if not os.path.exists(dirname):
        os.makedirs(dirname)

    with open(srcfn, 'rb') as pcmf:
        pcmdata = pcmf.read()
    nframes = os.path.getsize(srcfn) / 2

    wavf = wave.open(dstfn.replace('.pcm', '.wav'), 'wb')
    wavf.setparams((1, 2, 16000, nframes, 'NONE', 'not compressed'))
    wavf.writeframes(pcmdata)
    wavf.close()


if __name__ == '__main__':
    original = sys.argv[1]
    traverse(original, 'converted', pcm2wav, target='.pcm')
Exemplo n.º 17
0
def main():
	root_dir = sys.argv[1]
	traverse(root_dir, '', encode, '.txt')
Exemplo n.º 18
0
def process_pack(root_dir,server_pack):
    server_pack = dict((x['name'],x) for x in server_pack)
    update_pack = {}

    def remove_unused(el,subel,kind,designator,locale,name,canonic_name,text,filename,priority,context):
        if locale is None:
            if canonic_name in server_pack:
                update_pack[canonic_name] = server_pack[canonic_name]
        return False

    def check_rec(el,subel,kind,designator,locale,name,canonic_name,text,filename,priority,context):
        modified = False
        if locale is not None:
            localized = update_pack.get(canonic_name,{}).get('locales',{})
            text = localized.get(locale)
            if text is not None:

                if check_locale(locale) and subel.text != text:
                    subel.text = text
                    print canonic_name,"<--",locale,"<--",text.encode('utf8')
                    modified = True
                del localized[locale]
                if len(localized)==0:
                    del update_pack[canonic_name]

        return modified

    traverse(root_dir,remove_unused)
    traverse(root_dir,check_rec)

    to_write = sorted(update_pack.values(),key=lambda x:(x['filename'],x['name']))
    to_write = itertools.groupby(to_write,lambda x:x['filename'])
    for filename,resources in to_write:
        filename = filename.lstrip(os.path.sep)
        filename = os.path.join(root_dir,filename)
        for res in resources:
            for locale, text in res.get('locales').iteritems():
                if not check_locale(locale):
                    continue
                localized_filename = filename.replace(os.path.sep+'values'+os.path.sep,os.path.sep+'values-%s' % locale+os.path.sep)
                # Create the file if needed
                if not os.path.isfile(localized_filename):
                    if not os.path.exists(os.path.dirname(localized_filename)):
                        os.makedirs(os.path.dirname(localized_filename))
                    dummy = open(localized_filename,'w')
                    dummy.write(BASE_XML)
                    dummy.close()
                # Now, open (or re-open) the localized file and parse it
                parser = etree.XMLParser(remove_blank_text=True)
                tree = etree.parse(localized_filename,parser)
                root = tree.getroot()
                assert(root.tag=='resources')
                # Find out if this is a regular resource, string-array or plurals
                name_parts = res['name'].split('::')
                parent = None
                new_el = None
                if len(name_parts) == 1:
                    new_el = etree.fromstring('<string name="%s"></string>' % name_parts[0])
                    new_el.text = text
                    print name_parts[0],"<==",locale,"<==",text.encode('utf8')
                    parent=root
                elif len(name_parts) == 3:
                    if name_parts[1] == 'A':
                        # Array
                        new_el = etree.fromstring('<item></item>')
                        new_el.text = text
                        # Find parent
                        array_name=name_parts[0]
                        parent = root.xpath("string-array[@name='%s']" % array_name)
                        if len(parent)==0:
                            # Not found - create a new array element
                            parent = etree.fromstring('<string-array name="%s"></string-array>' % array_name)
                            parent.append(new_el)
                            new_el = parent
                            parent = root
                        else:
                            parent = parent[0]
                    elif name_parts[1] == 'P':
                        # plurals
                        new_el = etree.fromstring('<item quantity="%s"></item>' % name_parts[2])
                        new_el.text = text
                        # Find parent
                        plural_name=name_parts[0]
                        parent = root.xpath("plurals[@name='%s']" % plural_name)
                        if len(parent)==0:
                            # Not found - create a new plurals element
                            parent = etree.fromstring('<plurals name="%s"></plurals>' % plural_name)
                            parent.append(new_el)
                            new_el = parent
                            parent = root
                        else:
                            parent = parent[0]
                # make sure we have a known resource
                if parent is None or new_el is None:
                    raise RuntimeException("Bad string type: %s" % res['name'])
                # Make the necessary change
                parent.append(new_el)
                # Bail out
                indent(root)
                out = open(localized_filename, 'w')
                out.write(etree.tostring(root,encoding='UTF-8'))
                out.close()
Exemplo n.º 19
0
def rename_duplicated(slots, dst_dir):
    for basename, locations in slots.items():
        if len(locations) > 1:  # duplicated names
            benchmark = os.stat(locations[0]).st_size
            for location in locations[1:]:
                if os.stat(location).st_size == benchmark:
                    record_fp.write("file " + location + " is the same with " +
                                    dst_file + '\n')
                else:
                    parent_dir = os.path.basename(os.path.dirname(location))
                    dst_file = os.path.join(dst_dir,
                                            parent_dir + '_' + basename)
                    record_fp.write("rename " + location + " to " + dst_file +
                                    '\n')
                    shutil.copy(location, dst_file)
        else:
            dst_file = os.path.join(dst_dir, basename)
            shutil.copy(locations[0], dst_file)


if __name__ == '__main__':
    slots = {}
    src_dir = sys.argv[1]
    dst_dir = sys.argv[2]
    if not os.path.exists(dst_dir):
        os.makedirs(dst_dir)

    record_fp = open(RENAMES_LIST, 'a', 0)
    traverse(src_dir, "", slot_in, target='.wav')
    rename_duplicated(slots, dst_dir)
Exemplo n.º 20
0
def main():
    src_dir = sys.argv[1]
    dst_dir = sys.argv[2]
    traverse(src_dir, dst_dir, truncate, '.wav')
Exemplo n.º 21
0
def main():
	# dst_dir = sys.argv[2]
	traverse(src_dir, '', traverse_adaptor, target='.wav')
Exemplo n.º 22
0
                        new_path = list(path)

                        # print('new room:', player.current_room.id)
                        new_path.append(n)
                        # print('new_path:', new_path)
                        queue.append(new_path)

    return traversal_path, 'traaaaa'

    # return player.current_room.id, traversal_path


# Fill this out with directions to walk
traversal_path = ['n', 'n']
traversal_path = traverse(player)

# TRAVERSAL TEST
visited_rooms = set()
player.current_room = world.starting_room
visited_rooms.add(player.current_room)

for move in traversal_path:
    player.travel(move)
    visited_rooms.add(player.current_room)

if len(visited_rooms) == len(room_graph):
    print(
        f"TESTS PASSED: {len(traversal_path)} moves, {len(visited_rooms)} rooms visited"
    )
else:
Exemplo n.º 23
0
def main(target):
    traverse(target, '', collect, '.wav')
Exemplo n.º 24
0
    def _assign_group(self, client, pkg_name, data):
        try:
            group = client.group_entity_get(data.get('name'))
            if pkg_name not in group['packages']:
                group['packages'] += [pkg_name]
                client.group_entity_put(group)
        except CkanApiError, e:
            group = {
                'name': data.get('name'),
                'title': data.get('title', data.get('name')),
                'description': data.get('description', ''),
                'packages': [pkg_name]
            }
            client.group_register_post(group)

    def __call__(self, client, pkg):
        cats = pkg.get('extras', {}).get('categories', [])
        if not isinstance(cats, (list, tuple)):
            cats = [cats]
        for cat in cats:
            data = self.normalizer.get(cat, source_hint=pkg.get('ckan_url'))
            if data.get('name'):
                self._assign_group(client, pkg.get('name'), data)


if __name__ == '__main__':
    if len(sys.argv) == 2:
        traverse(categories(), query=sys.argv[1])
    else:
        traverse(categories())
Exemplo n.º 25
0
def slot_in(src_file, _):
	filename = os.path.basename(src_file)
	slots.setdefault(filename, []).append(src_file)

def rename_duplicated(slots, dst_dir):
	for basename, locations in slots.items():
		if len(locations) > 1:	# duplicated names
			benchmark =  os.stat(locations[0]).st_size
			for location in locations[1:]:
				if os.stat(location).st_size == benchmark:
					record_fp.write("file " + location + " is the same with " + dst_file + '\n')
				else:
					parent_dir = os.path.basename(os.path.dirname(location))
					dst_file = os.path.join(dst_dir, parent_dir + '_' + basename)
					record_fp.write("rename " + location + " to " + dst_file + '\n')
					shutil.copy(location, dst_file)
		else:
			dst_file = os.path.join(dst_dir, basename)
			shutil.copy(locations[0], dst_file)

if __name__ == '__main__':
	slots = {}
	src_dir = sys.argv[1]
	dst_dir = sys.argv[2]
	if not os.path.exists(dst_dir):
		os.makedirs(dst_dir)
		
	record_fp = open(RENAMES_LIST, 'a', 0)
	traverse(src_dir, "", slot_in, target='.wav')
	rename_duplicated(slots, dst_dir)
Exemplo n.º 26
0
def main(info_txt, root):
    traverse(root, '', correct_names, target='.jpg')
    with open(info_txt + '.bak', 'w') as f:
        for info in info_dict.values():
            f.write(json.dumps(info, ensure_ascii=False).encode('utf-8'))
            f.write(os.linesep)