Example #1
0
    def save(self):

        # Parser won't pickle.
        try:
            parser = self.parser
        except:
            # TODO: Sometimes there is an error here.
            import pdb
            pdb.set_trace()
        targets = self.arguments.targets
        _arg_map = self._arg_map
        _node_map = self._node_map
        del self.parser
        self.arguments.targets = None
        del self._arg_map
        del self._node_map
        if hasattr(self, '_pool'):
            del self._pool

        # Set the pickle recursion limit much higher.
        sys.setrecursionlimit(10000)

        with open('.use.db', 'w') as out:
            pickle.dump(self, out)

        # Reset recursion limit.
        sys.setrecursionlimit(1000)

        # Reset.
        self.parser = parser
        self.arguments.targets = targets
        self._arg_map = _arg_map
        self._node_map = _node_map
def init(question):
    global arg, line, row, house, house_list, start_line, start_row, history, failed_set, driver, num_map
    arg = question.replace("\n", "")
    line = int(arg.split("&")[1].split("=")[1])
    row = int(arg.split("&")[2].split("=")[1])
    pos = arg.split("&")[3].split("=")[1]
    house = [[0 for i in range(row)] for j in range(line)]
    house_list = []
    history = ""
    failed_set = set()
    sys.setrecursionlimit(1000 * line * row)
    num_map = {"num_-1": 0, "num_0": 0, "num_1": 0, "num_2": 0, "num_3": 0, "num_4": 0}

    # init each position
    for each in range(len(pos)):
        j = each % row
        i = (each - j) / row
        if pos[each] == "1":
            house[i][j] = -1
            num_map["num_-1"] += 1
        else:
            house[i][j] = 0

    # count position value
    for i in range(line):
        for j in range(row):
            if house[i][j] >= 0:
                posCount(house, i, j, num_map, True)

    print datetime.datetime.now().strftime("%Y%m%d-%H%M%S.%f")
    print arg
    print line, row
    print_house(house)
Example #3
0
def paint_canvas(x, y, colors):
	"""creates the png file and colors colors it
	"""
	sys.setrecursionlimit(15000)
	padding = 1
	point1hat = math.atan((y[3] - y[0])/(x[3] - x[0])) + math.pi

	if x[75] - x[0] < 0:
		point1hat = math.pi + point1hat
	point2hat = math.atan((y[-1] - y[-3])/(x[-1] - x[-3]))
	if x[-1] - x[-75] < 0:
		point2hat = math.pi + point2hat

	closeIt = bezier((x[0],y[0]), point1hat, (x[-1],y[-1]), point2hat)	#calls bezier function in order to close the open loop
	xnew = x + closeIt[0]
	ynew = y + closeIt[1]

	canvas = Image.new("RGB",(int(round(max(xnew)- min(xnew)))+2*padding, int(round((max(ynew) - min(ynew))))+2*padding))
	pixels = canvas.load()

	for i in range(len(xnew)):
		pixels[xnew[i]+padding+0, ynew[i]+padding+0] = (255, 255, 255)

	imagepath = "images/techtest.png"
	canvas.save(imagepath)
	centers = findZones(imagepath)

	for i in range(len(centers)):
		flood(pixels,centers[i], color = colors[i%len(colors)], visited = [])

	canvas = canvas.resize((1000,1000), Image.NEAREST)
	canvas.save(imagepath)	
Example #4
0
File: misc.py Project: 2xR/legacy
def recursion_limit(n):
    """Context manager that temporarily sets Python's recursion limit to 'n', and restores the
    previous recursion limit when the context is exited."""
    m = sys.getrecursionlimit()
    sys.setrecursionlimit(n)
    yield
    sys.setrecursionlimit(m)
 def _nestingTest(self, nestedObject, expected):
     limit = sys.getrecursionlimit()
     sys.setrecursionlimit(100)
     try:
         self.assertStringEqual(self.flatten(nestedObject), expected)
     finally:
         sys.setrecursionlimit(limit)
Example #6
0
def main():
    global e,memo

    sys.setrecursionlimit(100000)

    n = int( raw_input() )
    e = [ [] ] * n
    for i in range(n):
        e[i] = []
    
    for i in range(n-1):
        a,b = map( int, raw_input().split() )
        a -= 1
        b -= 1
        e[a].append( (b,1) )
        e[b].append( (a,-1) )
    
    ans = n
    memo = [0] * n
    # single
    for i in range(n):
        curr = go(i,-1)
        ans = min(ans,curr)
    # double
    for i in range(n):
        if len( e[i] ) <= 1:
            continue
        go(i,-1)
        curr = go1(i,-1,3)
        ans = min(ans,curr)
    print ans
Example #7
0
File: conf.py Project: bgamari/ghc
def increase_python_stack():
    # Workaround sphinx-build recursion limit overflow:
    # pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
    #  RuntimeError: maximum recursion depth exceeded while pickling an object
    #
    # Default python allows recursion depth of 1000 calls.
    sys.setrecursionlimit(10000)
def calculate_acc_index(dir_arr, origin_upper_left=True):

    # modify maximum recursion depth if required
    import sys
    rec_depth = sys.getrecursionlimit()
    sys.setrecursionlimit(max(dir_arr.shape[0] * dir_arr.shape[1], rec_depth))



    acc_index = np.ones(dir_arr.shape)
    cache = -np.ones(dir_arr.shape)




    n1, n2 = acc_index.shape

    for i in range(n1):
        if i % 100 == 0:
            print("{}/{} ...".format(i, n1))

        for j in range(n2):
            acc_index[i, j] = calculate_acc_index_for_point(i, j, dir_arr, cache, origin_upper_left=origin_upper_left)

    # print(acc_index.min(), acc_index.max())

    return acc_index
    def test_setrecursionlimit_recursion_depth(self):
        # Issue #25274: Setting a low recursion limit must be blocked if the
        # current recursion depth is already higher than the "lower-water
        # mark". Otherwise, it may not be possible anymore to
        # reset the overflowed flag to 0.

        from _testcapi import get_recursion_depth

        def set_recursion_limit_at_depth(depth, limit):
            recursion_depth = get_recursion_depth()
            if recursion_depth >= depth:
                with self.assertRaises(RecursionError) as cm:
                    sys.setrecursionlimit(limit)
                self.assertRegex(str(cm.exception),
                                 "cannot set the recursion limit to [0-9]+ "
                                 "at the recursion depth [0-9]+: "
                                 "the limit is too low")
            else:
                set_recursion_limit_at_depth(depth, limit)

        oldlimit = sys.getrecursionlimit()
        try:
            sys.setrecursionlimit(1000)

            for limit in (10, 25, 50, 75, 100, 150, 200):
                # formula extracted from _Py_RecursionLimitLowerWaterMark()
                if limit > 200:
                    depth = limit - 50
                else:
                    depth = limit * 3 // 4
                set_recursion_limit_at_depth(depth, limit)
        finally:
            sys.setrecursionlimit(oldlimit)
Example #10
0
def main():
    """主函数"""
    sys.setrecursionlimit(10000)
    app = QtGui.QApplication(sys.argv)
    calib = RecWindow()
    calib.show()
    sys.exit(app.exec_())
def mergeSort(alist1):
	sys.setrecursionlimit(100000)
	if len(alist1)>1:
		mid1=len(alist1)//2
		lefthalf1 = alist1[:mid1]
		righthalf1 = alist1[mid1:]
		mergeSort(lefthalf1)
		mergeSort(righthalf1)

		i=0
		j=0
		k=0
		while i<len(lefthalf1) and j<len(righthalf1):
			if lefthalf1[i]<righthalf1[j]:
				alist1[k]=lefthalf1[i]
				i=i+1
			else:
				alist1[k]=righthalf1[j]
				j=j+1
			k=k+1

		
		while i<len(lefthalf1):
			alist1[k]=lefthalf1[i]
			i=i+1
			k=k+1

		while j<len(righthalf1):
			alist1[k]=righthalf1[j]
			j=j+1
			k=k+1
    def main(self):
        try:
            with open('linkdb.pickle'):
                return pickle.load(open('linkdb.pickle'))

        except IOError:
            sys.setrecursionlimit(10000)
            cw = Crawler()
            data = cw.main()
            soup = BeautifulSoup(data)
            dtable = soup.findAll('table')[1]
            drows = dtable.findAll('tr')
            j = 1
            pdata = []
            while j<len(drows):
                drele = dtable.findAll('td')
                k = 1
                while k < len(drele):
                    flag = 0
                    pdict = dict()
                    try:
                        pdict['name'] = drele[k].find('a').contents[0]
                        pdict['link'] = 'http://en.wikipedia.org' + drele[k].find('a')['href']
                    except:
                        flag = 1
                        
                    if flag == 1 :
                        k += 1
                        continue
                    #print pdict
                    pdata.append(pdict)
                    k += 1
                j += 1
            pickle.dump(pdata, open('linkdb.pickle', 'wb'))
            return pdata
Example #13
0
 def sbo(self, name):
     """
     Build all dependencies of a package
     """
     if self.meta.rsl_deps in ["on", "ON"] and self.flag != "--resolve-off":
         try:
             sys.setrecursionlimit(10000)
             dependencies = []
             requires = SBoGrep(name).requires()
             if requires:
                 for req in requires:
                     status(0.03)
                     # toolbar_width = status(index, toolbar_width, 1)
                     # avoid to add %README% as dependency and
                     # if require in blacklist
                     if "%README%" not in req and req not in self.blacklist:
                         dependencies.append(req)
                 if dependencies:
                     self.dep_results.append(dependencies)
                     for dep in dependencies:
                         self.sbo(dep)
             return self.dep_results
         except KeyboardInterrupt:
             print("")   # new line at exit
             sys.exit(0)
     else:
         return []
Example #14
0
    def mst_prim(self, A, w, path, degree, tree_nbr):
        """
        'A' is the adjacency matrix
        'w' is the list of all connected vertices (in order of discovery)
        'path' is a list of tuples showing (from, to)
        """
        import sys
        import numpy as np

        sys.setrecursionlimit(30000)

        # Stop when we've added all nodes to the path
        # (number of 1-D arrays within matrix that contain nonzero elements)
        if len(w) == sum([any(np.array(x)[0]) for x in A]):
            return (A, w, path, degree, tree_nbr)

        # Find minimum path coming OUT of the known vertices
        vfrom, vto, vcost = self.find_min(A, w)

        # Increase the degree for vertices vfrom and vto
        degree[vfrom] += 1
        degree[vto] += 1

        # Update tree_nbr list for vfrom and vto
        tree_nbr[vfrom].append(vto)
        tree_nbr[vto].append(vfrom)

        # Store this vertex as part of the MST path
        w.append(vto)
        path.append((vfrom, vto))

        self.remove_route(A, vto)

        return self.mst_prim(A, w, path, degree, tree_nbr)
Example #15
0
def save(filepath, obj):
    try:
        _save(filepath, obj)
    except RuntimeError, e:
        """ Sometimes for large theano graphs, pickle/cPickle exceed the
            maximum recursion depth. This seems to me like a fundamental
            design flaw in pickle/cPickle. The workaround I employ here
            is the one recommended to someone who had a similar problem
            on stackexchange:

            http://stackoverflow.com/questions/2134706/hitting-maximum-recursion-depth-using-pythons-pickle-cpickle

            The workaround is just to raise the max recursion depth.
            Obviously this does not scale and could cause a crash
            but I don't see another solution short of writing our
            own implementation of pickle.
        """
        if str(e).find('recursion') != -1:
            warnings.warn('pylearn2.utils.save encountered the following error: ' \
                    + str(e) + \
                    '\nAttempting to resolve this error by calling ' + \
                    'sys.setrecusionlimit and retrying')

            sys.setrecursionlimit(50000)
            _save(filepath, obj)
Example #16
0
def tag_molecules(struct):
    """
    Sets the ``marked`` attribute of every Atom in struct to the molecule number
    it is a part of. If no bonds are present, every atom is its own molecule.

    Parameters
    ----------
    struct : :class:`parmed.Structure`
        Input structure to tag the molecules for
    """
    # Make sure our recursion limit is large enough, but never shrink it
    from sys import setrecursionlimit, getrecursionlimit
    setrecursionlimit(max(len(struct.atoms), getrecursionlimit()))

    if not struct.bonds:
        for i, atom in enumerate(struct.atoms):
            atom.marked = i + 1
        return
    # We do have bonds, this is the interesting part
    struct.atoms.unmark()
    mol_id = 1
    for atom in struct.atoms:
        if atom.marked: continue
        atom.marked = mol_id
        _set_owner(atom, mol_id)
        mol_id += 1
    def doPost(self,request,response):
        self.valid_row_count = 0
        self.log_dic = {}
        sys.setrecursionlimit(sys.maxint)
        sb = []
        out = response.getWriter()
        
        self.log_para = request.getParameter("log")
        if self.log_para == None:
            self.log_para = self.default_log_para
        if self.log_para == None:
            self.log_para = '[request]\n' + warHome + '/logs/request.log*\n\n[sql]\n' + warHome + '/logs/sql.log*\n\n[method]\n' + warHome + '/logs/method.log*'
            
        ui_public.print_query_head(request, response);
        result = ui_public.parse_log_para(request, response, self.log_para)
        fs = result[0]['method']
        date_from = result[1]
        date_to = result[2]
        date_from_ms= date_from*1000
        date_to_ms = date_to*1000
        start_time = datetime.datetime.now()
        if request.getParameter("cmd") == "executeTime":
            self.statisticMethodExecuteTime(fs, sb, date_from_ms, date_to_ms)

        if request.getParameter('cmd') != None and len(request.getParameter('cmd')) > 0:
            cost = datetime.datetime.now() - start_time
            print request.getParameter('cmd'), ' cost, ', cost
            ui_public.print_result_head(request, response, fs, date_from, date_to, self.valid_row_count, cost)
        else:
            ui_public.print_query_form(request, response, self.log_para, [['executeTime', '统计method耗时']])
        #输出结果
        ui_public.print_result_content_html(request, response, sb)
Example #18
0
    def __setstate__(self, state):
        sys.setrecursionlimit(5000)
        self._managed_partitions = state['managed_partitions']
        self._partitions = PartitionDict()
        self.process_groups = BGProcessGroupDict()
        self.process_groups.item_cls = BGProcessGroup
        self.node_card_cache = dict()
        self._partitions_lock = thread.allocate_lock()
        self.pending_diags = dict()
        self.failed_diags = list()
        self.diag_pids = dict()
        self.pending_script_waits = sets.Set()
        self.bridge_in_error = False
        self.cached_partitions = None
        self.offline_partitions = []

        self.configure()
        if 'partition_flags' in state:
            for pname, flags in state['partition_flags'].items():
                if pname in self._partitions:
                    self._partitions[pname].scheduled = flags[0]
                    self._partitions[pname].functional = flags[1]
                    self._partitions[pname].queue = flags[2]
                else:
                    logger.info("Partition %s is no longer defined" % pname)
        
        self.update_relatives()
        # initiate the process before starting any threads
        thread.start_new_thread(self.update_partition_state, tuple())
        self.lock = threading.Lock()
        self.statistics = Statistics()
Example #19
0
def test_main(type="short"):
    oldRecursionDepth = sys.getrecursionlimit()
    try:
        sys.setrecursionlimit(1001)
        t0 = clock()
        import b0
        import b1
        import b2
        import b3
        import b4
        import b5
        import b6
        print 'import time = %.2f' % (clock()-t0)
    
        tests = [b0,b1,b2,b3,b4,b5,b6]
        N = { "short" : 1, "full" : 1, "medium" : 2, "long" : 4 }[type]
    
        results = {}
    
        t0 = clock()
        for i in range(N):
            for test in tests:
                ts0 = clock()
                test.main()
                tm = (clock()-ts0)
                results.setdefault(test, []).append(tm)
                print '%.2f sec running %s' % ( tm, test.__name__)
    
        for test in tests:
            print '%s = %f -- %r' % (test.__name__, sum(results[test])/N, results[test])
    
        print 'all done in %.2f sec' % (clock()-t0)
    finally:
        sys.setrecursionlimit(oldRecursionDepth)
Example #20
0
 def printSmiles(self, smiles):
   self.limit = sys.getrecursionlimit()
   sys.setrecursionlimit(10000);
   self.memo = {}
   retval = self._printSmiles(smiles, 1, 1)
   sys.setrecursionlimit(self.limit)
   return retval
Example #21
0
def init(dir_name):
    '''setup the target and initialise the elfFile'''
    target_objects.load_target(dir_name)
    sys.setrecursionlimit(2000)
    import graph_refine.stack_logic as stack_logic
    stack_logic.add_hooks ()
    #silence graph-refine outputs that  we don't care about when doing wcet
    def silent_tracer (s,v):
        if s.startswith('Loop') or re.search(r'\s*\(=',s) or re.search(r'\s*\(',s):
          return
        if s.startswith('requests') or s.startswith('Result:') or s.startswith('Now'):
          return
        if s.startswith('testing') or s.startswith('done') or s.startswith('rep_graph'):
          return
        if s.startswith('Testing') or s.startswith('Group'):
          return
        print s
    target_objects.tracer[0] = silent_tracer
    ef = elf_parser.parseElf(dir_name)

    #build a dict of asm_fs -> fs from functions_by_tag and functions
    asm_fs = dict ([(x,functions[x]) for x in functions_by_tag['ASM']])

    tran_call_graph = call_graph_utils.transitiveCallGraph(asm_fs,dir_name,dummy_funs)

    elfFile().tcg = tran_call_graph
    elfFile().asm_idents = None
    elfFile().immed = None
    return asm_fs
 def opt_recursionlimit(self, arg):
     """see sys.setrecursionlimit()"""
     try:
         sys.setrecursionlimit(int(arg))
     except (TypeError, ValueError):
         raise usage.UsageError(
             "argument to recursionlimit must be an integer")
Example #23
0
def main(command, filename):
    sys.setrecursionlimit(2000)
    try:
        lexer = OrgLexer(filename)
        tokens = lexer.tokenize()
        parser = OrgParser(tokens)
        todo = parser.parse()
    except LexerException as e:
        print(e.message)
        exit(1)
    except ParserException as e:
        print(e.message)
        exit(1)

    if "week" == command:
        schedule = Schedule(todo)
        print(schedule)
    elif 'today' == command:
        schedule = Schedule(todo)
        schedule.days = schedule.days[0:1]
        print("{}:".format(schedule.days[0].name))
        print(schedule)
    elif "tomorrow" == command:
        schedule = Schedule(todo)
        schedule.days = schedule.days[1:2]
        print("{}:".format(schedule.days[0].name))
        print(schedule)
    elif "active" == command:
        active_todo = todo.get_active()
        print(active_todo)
    else:
        usage()
        exit(1)
Example #24
0
def expected(hic_data, bads=None, signal_to_noise=0.05, inter_chrom=False, **kwargs):
    """
    Computes the expected values by averaging observed interactions at a given
    distance in a given HiC matrix.

    :param hic_data: dictionary containing the interaction data
    :param None bads: dictionary with column not to be considered
    :param 0.05 signal_to_noise: to calculate expected interaction counts,
       if not enough reads are observed at a given distance the observations
       of the distance+1 are summed. a signal to noise ratio of < 0.05
       corresponds to > 400 reads.

    :returns: a vector of biases (length equal to the size of the matrix)
    """
    min_n = signal_to_noise ** -2. # equals 400 when default

    size = len(hic_data)
    try:
        if not inter_chrom:
            size = max(hic_data.chromosomes.values())
    except AttributeError:
        pass

    if size > 1200:
        import sys
        sys.setrecursionlimit(size + 100)

    expc = {}
    dist = 0
    while dist < size:
        diag = []
        new_dist, val = _meandiag(hic_data, dist, diag, min_n, size, bads)
        for dist in range(dist, new_dist + 1):
            expc[dist] = val
    return expc
Example #25
0
    def __init__(self, G1, G2):
        """Initialize GraphMatcher.
        
        Parameters
        ----------
        G1,G2: NetworkX Graph or MultiGraph instances.
           The two graphs to check for isomorphism.

        Examples
        --------
        To create a GraphMatcher which checks for syntactic feasibility:

        >>> G1 = nx.path_graph(4)
        >>> G2 = nx.path_graph(4)
        >>> GM = nx.GraphMatcher(G1,G2)
        
        """
        self.G1 = G1
        self.G2 = G2
        self.G1_nodes = set(G1.nodes())
        self.G2_nodes = set(G2.nodes())

        # Set recursion limit.
        self.old_recursion_limit = sys.getrecursionlimit()
        expected_max_recursion_level = len(self.G2)
        if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
            # Give some breathing room.
            sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
        
        # Declare that we will be searching for a graph-graph isomorphism.
        self.test = 'graph'
        
        # Initialize state
        self.initialize()
Example #26
0
def main():
    import argparse
    parser = argparse.ArgumentParser(
        prog="lipy", description='lipy - a pythonic lisp'
    )
    parser.add_argument("--test", "-t", action="store_true")
    parser.add_argument("--verbose", "-v", action="store_true")
    parser.add_argument("--eval", "-e")
    parser.add_argument("--tree", action="store_true")
    parser.add_argument("file", nargs="?")

    args = parser.parse_args()

    if args.test:
        test()
    elif args.eval:
        evals(args.eval, args.tree)
    elif args.file:
        sys.setrecursionlimit(40000)
        if args.file == "-":
            evals(sys.stdin.read(), args.tree)
        else:
            evals(file(args.file).read(), args.tree)
    else:
        parser.print_help()
    if args.verbose:
        print("Done, max_stack =", max_stack)
Example #27
0
def build_model_by_method(filename):

    sys.setrecursionlimit(10000)

    f = open(filename,'r')
    tree = ET.parse(f)
    root = tree.getroot()
    schedule = {}
    next = {}
    for child in root.findall('Activity'):
        id = child.find('id').text
        start_date = get_child(child,'start_date')
        finish_date = get_child(child,'finish_date')
        duration = get_child(child,'duration')
        not_early_date = get_child(child,'not_early_date')
        a = Activity(id, start_date, finish_date, duration, not_early_date)
        schedule[id] = a
        next_activity = '' if child.find('next_activity').text is None else child.find('next_activity').text
        next[id] = next_activity
    for key in schedule:
        if next[key] != '':
            for next_id in next[key].split(';'):
                schedule[key].append_next(schedule[next_id])

    sys.setrecursionlimit(1000)
Example #28
0
    def __init__(self, G1, G2):
        """Initialize GraphMatcher.
        
        Suppose G1 and G2 are undirected graphs.
    
        >>> G1=nx.path_graph(4)
        >>> G2=nx.path_graph(4)
        >>> GM = nx.GraphMatcher(G1,G2)
        
        creates a GraphMatcher which only checks for syntactic feasibility.
        """
        self.G1 = G1
        self.G2 = G2
 
        # Set recursion limit.
        self.old_recursion_limit = sys.getrecursionlimit()
        expected_max_recursion_level = len(self.G2)
        if self.old_recursion_limit < 1.5 * expected_max_recursion_level:
            # Give some breathing room.
            sys.setrecursionlimit(int(1.5 * expected_max_recursion_level))
        
        # Declare that we will be searching for a graph-graph isomorphism.
        self.test = 'graph'

        # Initialize the isomorphism mapping.
        self.state = GMState(self)
Example #29
0
File: ext.py Project: zencoding/cle
 def exe(self, mainloop):
     """
     Pickle the mainloop
     """
     if np.mod(mainloop.trainlog._batch_seen, self.freq) == 0:
         pkl_path = mainloop.name + '.pkl'
         path = os.path.join(self.path, pkl_path)
         logger.info("\tSaving model to: %s" % path)
         try:
             import sys
             sys.setrecursionlimit(50000)
             f = open(path, 'wb')
             cPickle.dump(mainloop, f, -1)
             f.close()
             #secure_pickle_dump(mainloop, path)
         except Exception:
             raise
     if np.mod(mainloop.trainlog._batch_seen, self.force_save_freq) == 0:
         force_pkl_path = mainloop.name + '_' +\
                          str(mainloop.trainlog._batch_seen) +\
                          'updates.pkl'
         force_path = os.path.join(self.path, force_pkl_path)
         logger.info("\tSaving model to: %s" % force_path)
         try:
             import sys
             sys.setrecursionlimit(50000)
             f = open(force_path, 'wb')
             cPickle.dump(mainloop, f, -1)
             f.close()
             #secure_pickle_dump(mainloop, path)
         except Exception:
             raise
Example #30
0
def scrape(url):
    sys.setrecursionlimit(10000)
    month_links = set([])
    try:
        r = http.request('GET', url)
    except urllib3.exceptions.SSLError as e:
        print(e)
    soup = BeautifulSoup(r.data)
    #looking for anything with the header matching re archive
    # EG, if on a blog such as http://terrytao.wordpress.com/ where no 
    # separate archive page exists but instead a sidebar
    archive = soup.find_all(id=re.compile("archive", re.I))
    if not len(archive) == 0:
        for i in map(lambda x: x.find_all('a', href=True), archive):
            for j in i:
                month_links.add(j)
    #find all things with dates in content
    # import pdb; pdb.set_trace()
    links = soup.find_all('a', href=True)
    for i in filter(lambda x: tools.containsDate(x.contents) or tools.dictContainsDate(x.attrs), links):
    # for i in filter(lambda x: tools.date_of_url(x) is not None, links):
        month_links.add(i)
    #make sure it comes from same url
    def same(x):
        #Same function fails on xkcd <= 999
        try:
            att = x.attrs['href']
            return not arrow.get(att) == None or att[0:len(url)] == url or att[0] == '/'
        except Exception:
            return False
    # month_links = list(filter(same, month_links))
    month_links = tools.filterArchiveLinks(month_links, url)
    month_links = tools.sort_by_date(month_links)

    return month_links
Example #31
0
def train(args, model_args, lrate):

    print("Copying the dataset to the current node's  dir...")

    tmp = '/Tmp/vermavik/'
    home = '/u/vermavik/'
    """
    tmp='/tmp/vermav1/'
    home='/u/79/vermav1/unix/'
    """

    dataset = args.dataset
    data_source_dir = home + 'data/' + dataset + '/'
    """
    if not os.path.exists(data_source_dir):
        os.makedirs(data_source_dir)
    data_target_dir = tmp+'data/CelebA/'
    copy_tree(data_source_dir, data_target_dir)
    """
    ### set up the experiment directories########

    exp_name = experiment_name(dataset=args.dataset,
                               act=args.activation,
                               meta_steps=args.meta_steps,
                               sigma=args.sigma,
                               temperature_factor=args.temperature_factor,
                               alpha1=args.alpha1,
                               alpha2=args.alpha2,
                               alpha3=args.alpha3,
                               grad_norm_max=args.grad_max_norm,
                               epochs=args.epochs,
                               job_id=args.job_id,
                               add_name=args.add_name)

    #temp_model_dir = tmp+'experiments/HVWB/'+dataset+'/model/'+ exp_name
    #temp_result_dir = tmp+'experiments/HVWB/'+dataset+'/results/'+ exp_name
    model_dir = home + 'experiments/HVWB/' + dataset + '/model/' + exp_name
    result_dir = home + 'experiments/HVWB/' + dataset + '/results/' + exp_name

    if not os.path.exists(model_dir):
        os.makedirs(model_dir)

    #if not os.path.exists(temp_result_dir):
    #    os.makedirs(temp_result_dir)
    """   
    #copy_script_to_folder(os.path.abspath(__file__), temp_result_dir)
    result_path = os.path.join(temp_result_dir , 'out.txt')
    filep = open(result_path, 'w')
    
    out_str = str(args)
    print(out_str)
    filep.write(out_str + '\n') 
    
      
    #torch.backends.cudnn.enabled = False # slower but repeatable
    torch.backends.cudnn.enabled = True # faster but not repeatable
                      
    out_str = 'initial seed = ' + str(args.manualSeed)
    print(out_str)
    filep.write(out_str + '\n\n')
    """
    #model_id = '/data/lisatmp4/anirudhg/minst_walk_back/walkback_'
    """
    model_id = '/data/lisatmp4/anirudhg/celebA_latent_walkback/walkback_'
    model_dir = create_log_dir(args, model_id)
    model_id2 =  '../celebA_logs/walkback_'
    model_dir2 = create_log_dir(args, model_id2)
    print model_dir
    print model_dir2 + '/' + 'log.jsonl.gz'
    logger = mimir.Logger(filename=model_dir2  + '/log.jsonl.gz', formatter=None)
    """
    # TODO batches_per_epoch should not be hard coded
    lrate = args.lr
    import sys
    sys.setrecursionlimit(10000000)
    args, model_args = parse_args()
    print args

    ## load the training data

    print 'loading celebA'
    train_loader, test_loader = load_celebA(args.data_aug, args.batch_size,
                                            args.batch_size, args.cuda,
                                            data_source_dir)
    n_colors = 3
    spatial_width = 64

    for batch_idx, (data, target) in enumerate(train_loader):

        Xbatch = data.numpy()
        #print Xbatch
        scl = 1. / np.sqrt(np.mean((Xbatch - np.mean(Xbatch))**2))
        shft = -np.mean(Xbatch * scl)

        break  ### TO DO : calculate statistics on whole data

    print "Width", WIDTH, spatial_width

    model = Net(args)

    if args.cuda:
        model.cuda()
    loss_fn = nn.BCELoss()
    if args.optimizer == 'sgd':
        optimizer_encoder = optim.SGD(model.encoder_params,
                                      lr=args.lr,
                                      momentum=args.momentum,
                                      weight_decay=0)
        optimizer_transition = optim.SGD(model.transition_params,
                                         lr=args.lr,
                                         momentum=args.momentum,
                                         weight_decay=0)
        optimizer_decoder = optim.SGD(model.decoder_params,
                                      lr=args.lr,
                                      momentum=args.momentum,
                                      weight_decay=0)
    elif args.optimizer == 'adam':
        optimizer_encoder = optim.Adam(model.parameters(),
                                       lr=args.lr,
                                       betas=(0.9, 0.999),
                                       eps=1e-08,
                                       weight_decay=0)
        optimizer_transition = optim.Adam(model.transition_params,
                                          lr=args.lr,
                                          betas=(0.9, 0.999),
                                          eps=1e-08,
                                          weight_decay=0)
        optimizer_decoder = optim.Adam(model.decoder_params,
                                       lr=args.lr,
                                       betas=(0.9, 0.999),
                                       eps=1e-08,
                                       weight_decay=0)
    uidx = 0
    estop = False
    bad_counter = 0
    batch_index = 1
    n_samples = 0
    print 'Number of steps....'
    print args.num_steps
    print "Number of metasteps...."
    print args.meta_steps
    print 'Done'
    count_sample = 1

    #### for saving metrics for all steps ###
    train_loss = []
    train_x_loss = []
    train_log_p_reverse = []
    train_kld = []

    #### for saving metrics for each step individually ###
    train_loss_each_step = [[]]
    train_x_loss_each_step = [[]]
    train_log_p_reverse_each_step = [[]]
    #train_kld_each_step = [[]]
    for i in range(args.meta_steps - 1):
        train_loss_each_step.append([])
        train_x_loss_each_step.append([])
        train_log_p_reverse_each_step.append([])
        #train_kld_each_step.append([])

    for epoch in range(args.epochs):
        print('epoch', epoch)
        for batch_idx, (data, target) in enumerate(train_loader):
            if args.cuda:
                data, target = data.cuda(), target.cuda()
            data, target = Variable(data), Variable(target)

            t0 = time.time()
            #batch_index += 1
            n_samples += data.data.shape[0]
            #print (n_samples)
            temperature_forward = args.temperature
            meta_cost = []
            x = data
            z = None
            encode = True
            for meta_step in range(0, args.meta_steps):
                #print ('meta_step', meta_step)
                #print encode
                loss, x_loss, log_p_reverse, KLD, z, z_tilde, x_tilde = compute_loss(
                    x,
                    z,
                    model,
                    loss_fn,
                    temperature_forward,
                    meta_step,
                    encode=encode)
                #meta_cost.append(loss)
                #print compute_param_norm(model.conv_x_z_1.weight.data)
                optimizer_encoder.zero_grad()
                optimizer_transition.zero_grad()
                optimizer_decoder.zero_grad()
                loss.backward()
                total_norm = clip_grad_norm(model.parameters(),
                                            args.grad_max_norm)
                #print ('step', meta_step, total_norm)
                if encode == True:
                    optimizer_encoder.step()
                optimizer_transition.step()
                optimizer_decoder.step()

                #print ('step', meta_step, clip_grad_norm(model.parameters(), 1000000))
                ### store metrics#######
                train_loss.append(loss.data[0])
                train_x_loss.append(x_loss.data[0])
                train_log_p_reverse.append(-log_p_reverse.data[0])
                if KLD is not None:
                    train_kld.append(KLD.data[0])

                #### store metrices for each step separately###
                train_loss_each_step[meta_step].append(loss.data[0])
                train_x_loss_each_step[meta_step].append(x_loss.data[0])
                train_log_p_reverse_each_step[meta_step].append(
                    -log_p_reverse.data[0])
                #if KLD is not None:
                #    train_kld_each_step[meta_step].append(KLD.data[0])

                if args.meta_steps > 1:
                    #data, _, _, _, _, _, _ = forward_diffusion(data, model, loss_fn,temperature_forward,meta_step)
                    #data = data.view(-1,3, 64,64)
                    #data = Variable(data.data, requires_grad=False)
                    x = Variable(x_tilde.data.view(-1, 3, spatial_width,
                                                   spatial_width),
                                 requires_grad=False)
                    z = Variable(z_tilde.data, requires_grad=False)
                    if args.encode_every_step == 0:
                        encode = False
                    temperature_forward *= args.temperature_factor

                #print loss.data
            #print loss.data

            #cost = sum(meta_cost) / len(meta_cost)
            #print cost
            #gradient_updates_ = get_grads(data_use[0],args.temperature)

            if np.isnan(loss.data.cpu()[0]) or np.isinf(loss.data.cpu()[0]):
                print loss.data
                print 'NaN detected'
                return 1.

            #batch_idx=0
            if batch_idx % 500 == 0:
                plot_loss(model_dir, train_loss, train_x_loss,
                          train_log_p_reverse, train_kld, train_loss_each_step,
                          train_x_loss_each_step,
                          train_log_p_reverse_each_step, args.meta_steps)

                count_sample += 1
                temperature = args.temperature * (args.temperature_factor**(
                    args.num_steps * args.meta_steps - 1))
                temperature_forward = args.temperature
                #print 'this'

                data_forward_diffusion = data
                for num_step in range(args.num_steps * args.meta_steps):
                    #print "Forward temperature", temperature_forward
                    print num_step
                    data_forward_diffusion, _, _, _, _, _, _ = forward_diffusion(
                        data_forward_diffusion, model, loss_fn,
                        temperature_forward, num_step)
                    #print data_forward_diffusion.shape
                    #data_forward_diffusion = np.asarray(data).astype('float32').reshape(args.batch_size, INPUT_SIZE)
                    data_forward_diffusion = data_forward_diffusion.view(
                        -1, 3, spatial_width, spatial_width
                    )  #reshape(args.batch_size, n_colors, WIDTH, WIDTH)
                    if num_step % 2 == 1:
                        plot_images(
                            data_forward_diffusion.data.cpu().numpy(),
                            model_dir + '/' + "batch_" + str(batch_idx) +
                            '_corrupted_' + 'epoch_' + str(epoch) +
                            '_time_step_' + str(num_step))

                    temperature_forward = temperature_forward * args.temperature_factor

                print "PLOTTING ORIGINAL IMAGE"
                temp = data
                plot_images(
                    temp.data.cpu().numpy(), model_dir + '/' + 'orig_' +
                    'epoch_' + str(epoch) + '_batch_index_' + str(batch_idx))

                print "DONE PLOTTING ORIGINAL IMAGE"
                '''

                temperature = args.temperature * (args.temperature_factor ** (args.num_steps*args.meta_steps - 1 ))

                for i in range(args.num_steps*args.meta_steps + args.extra_steps):
                    x_data, sampled, sampled_activation, sampled_preactivation  = f_sample(x_data, temperature)
                    print 'On backward step number, using temperature', i, temperature
                    reverse_time(scl, shft, x_data, model_dir + '/'+ "batch_" + str(batch_index) + '_samples_backward_' + 'epoch_' + str(count_sample) + '_time_step_' + str(i))
                    x_data = np.asarray(x_data).astype('float32')
                    x_data = x_data.reshape(args.batch_size, INPUT_SIZE)
                    if temperature == args.temperature:
                        temperature = temperature
                    else:
                        temperature /= args.temperature_factor

                '''

                #print 'this'

                if args.noise == "gaussian":
                    z_sampled = np.random.normal(
                        0.0, 1.0,
                        size=(args.batch_size, args.nl))  #.clip(0.0, 1.0)
                else:
                    z_sampled = np.random.binomial(1,
                                                   0.5,
                                                   size=(args.batch_size,
                                                         args.nl))

                temperature = args.temperature * (args.temperature_factor**(
                    args.num_steps * args.meta_steps - 1))

                z = torch.from_numpy(np.asarray(z_sampled).astype('float32'))
                if args.cuda:
                    z = z.cuda()
                    z = Variable(z)
                for i in range(args.num_steps *
                               args.meta_steps):  # + args.extra_steps):
                    z_new_to_x, z_to_x, z_new = model.sample(
                        z, temperature,
                        args.num_steps * args.meta_steps - i - 1)
                    #print 'On step number, using temperature', i, temperature
                    if i % 2 == 1:
                        reverse_time(
                            scl, shft,
                            z_new_to_x.data.cpu().numpy(), model_dir +
                            '/batch_index_' + str(batch_idx) + '_inference_' +
                            'epoch_' + str(epoch) + '_step_' + str(i))

                    if temperature == args.temperature:
                        temperature = temperature
                    else:
                        temperature /= args.temperature_factor
                    z = z_new
Example #32
0
import copy
import sys
sys.setrecursionlimit(10**6)  # 재귀함수의 호출 횟수 정하는 함수. 원래는 998번인데 10**6 까지 늘림.

n = int(input())
space = [['X' for _ in range(n)] for _ in range(n)]
for i in range(n):
    a = input()
    for j in range(n):
        space[i][j] = a[j]
blue = 0
red = 0
green = 0
normal = 0
blind = 0
normal_visited = [[0 for _ in range(n)] for _ in range(n)]  # 평범한 사람의 영역 구분
blind_visited = [[0 for _ in range(n)] for _ in range(n)]  # 색맹의 영역 구분


def dfs(y, x, color, visited, sp, blind):
    if blind == 0:  # 색맹이 아닌 경우
        visited[y][x] = 1  # 방문한 자리는 1로 설정
        sp[y][x] = 0  # 확인한 자리는 0으로 초기화 시킴
        if y > 0:  # 주변에 방문한 적이 없고 같은 색이 있으면 다 0으로 바꿔버림.
            if sp[y - 1][x] == color and visited[y - 1][x] == 0:
                dfs(y - 1, x, color, visited, sp, 0)
        if y < len(sp) - 1:
            if sp[y + 1][x] == color and visited[y + 1][x] == 0:
                dfs(y + 1, x, color, visited, sp, 0)
        if x > 0:
            if sp[y][x - 1] == color and visited[y][x - 1] == 0:
Example #33
0
import sys
sys.setrecursionlimit(10**6)
def input():
  return sys.stdin.readline()[:-1]

N,Q = map(int,input().split())
adj = [ [] for _ in range(N) ]
for _ in range(N-1):
  u,v = map(int,input().split())
  u -= 1
  v -= 1
  adj[u].append(v)
  adj[v].append(u)

scores = [0] * N
for _ in range(Q):
  u,p = map(int,input().split())
  u -= 1
  scores[u] += p

res = [0] * N
def dfs(n, p, s):
  res[n] = s
  for nei in adj[n]:
    if nei != p:
      dfs(nei, n, s+scores[nei])

dfs(0,-1,scores[0])

print(" ".join(map(str, res)))
Example #34
0
#!/usr/bin/env python3
import sys
sys.setrecursionlimit(10000000)
INF = 1<<32

YES = "Yes"  # type: str
NO = "No"  # type: str

def solve(A: "List[List[int]]", N: int, b: "List[int]"):
    f = [[False] * (N) for i in range(N)]
    for i in range(N):
        for j in range(3):
            for k in range(3):
                if b[i] == A[j][k]:
                    f[j][k] = True
    t = False
    for i in range(3):
        if all([f[i][k] for k in range(3)]):
            t = True

    for i in range(3):
        if all([f[k][i] for k in range(3)]):
            t = True
    
    if all([f[k][k] for k in range(3)]):
        t = True

    if f[0][2] and f[1][1] and f[2][0]:
        t = True

    if t:
Example #35
0
import sys
from functools import lru_cache
sys.setrecursionlimit(2500)


# With recursion
@lru_cache(maxsize=None)
def longest_palim(l, r):
  if l == r:
    return 1
  
  if l  > r :
    return 0

  if text[l] == text[r]:
    return longest_palim(l+1, r-1) + 2
  else:
    return max(longest_palim(l, r-1), longest_palim(l+1, r))


# With Dynamic Programming
dp_table = [[0]*1001 for i in range(1001)]
for i in range(1001):
  dp_table[i][i] = 1
def longest_palim_dp(text):
  global dp_table

  if len(text) <= 1:
    return len(text)

  length = len(text)
# 뱀
import sys
sys.setrecursionlimit(12000)  # 이거 안걸면 런타임 에러 남

#런타임 종류:
# 배열 인덱스 벗어난 경우
# 가능한 재귀 호출 값을 넘어간 경우

dx, dy = [1, 0, -1, 0], [0, 1, 0, -1]  #동남서북

current = 0


def pr():
    print()
    for i in Map:
        print(i)


def move(y, x, ty, tx, answer, direction):
    global current

    eat = False
    if y < 0 or x < 0 or y == N or x == N:
        print(answer)
        sys.exit(0)

    if Map[y][x] == 2:  # 자기 몸이면 종료
        print(answer)
        sys.exit(0)
Example #37
0
def set_python_recursion_limit(n) -> None:
    "Sets the required python recursion limit given $RecursionLimit value"
    python_depth = python_recursion_depth(n)
    sys.setrecursionlimit(python_depth)
    if sys.getrecursionlimit() != python_depth:
        raise OverflowError
Example #38
0
    global leader
    global leaders
    global edgeOut

    explored[startVertex] = True
    leader[startVertex] = s
    leaders[s].append(startVertex)
    for edgeOut in edgesOut[startVertex]:
        if not explored[edgeOut]:
            DFS(edgeOut)


import sys, thread, threading, time
from math import *

sys.setrecursionlimit(100000)  # don't know what I actually needed here
thread.stack_size(
    2**27
)  # largest power of 2 that didn't give me an error, don't know what I needed
begin = time.clock()

print "opening file..."
filehandle = open("SCC.txt")
lines = filehandle.readlines()
filehandle.close()
edges = [0] * len(lines)
maxN = 0

for i in range(len(lines)):
    line = lines[i]
    #    if(i%1000==0):
Example #39
0
#https://rextester.com/l/python3_online_compiler
#E:\!Учеба\!Аспирантура\Предметы\ДИССЕРТ_материалы\diss_materials\Lisp\github\lispVSpython
#https://github.com/Toljanchiman/lispVSpython
from collections import OrderedDict
from sys import setrecursionlimit
setrecursionlimit(2000000000)
import pickle as cPickle
#tracemalloc не имеет смысла в pypy, потому что объекты не выделяются с помощью malloc
import tracemalloc
import timeit
import cProfile
import gc
gc.enable()
#from pprint import pprint
'''# 
(Решение1 '(<Spd2> (<Accl> 0.3) (<Time> 20) (<Spd1> 4)) Механика) ответ (f1 <Spd1> <Accl> <Time>) 10 
(Решение1 '(<Time> (<Accl> 0.4) (<Spd1> 12) (<Spd2> 20)) Механика) ответ (f8 <Spd2> <Spd1> <Accl>) 20 
(Решение1 '(<Time> (<Accl> 0.6) (<Spd1> 0) (<Dist> 30)) Механика) ответ (f9 <Spd1> <Accl> <Dist>) 10.0 
(Решение1 '(<Accl> (<Spd1> 0) (<Time> 10) (<Dist> 5000)) Механика) ответ (f10 <Dist> <Spd1> <Time>) 100.0 
(Решение1 '(<Spd2> (<Spd1> 0) (<Time> 10) (<Dist> 5000)) Механика) ответ (f1 <Spd1> (f10 <Dist> <Spd1> <Time>) <Time>) 1000.0 
(Решение1 '(<Spd2> (<Accl> 616000) (<Spd1> 0) (<Dist> 0.415)) Механика) ответ (f2 <Accl> <Dist> <Spd1>) 715.038460504049 -- (Решение1 '(<Dist> (<Spd1> 20) (<Time> 5) (<Spd2> 0)) Механика) ответ (f6 <Spd1> <Time> (f8 <Spd2> <Spd1> <Time>)) 50.0 -- (Решение1 '(<Spd1> (<Dist> 100) (<Time> 20) (<Accl> 0.3)) Механика) ответ (f4 <Dist> <Time> <Accl>) 2.0 
(Решение1 '(<Spd2> (<Dist> 100) (<Time> 20) (<Accl> 0.3)) Механика) ответ (f1 (f4 <Dist> <Time> <Accl>) <Accl> <Time>) 8.0 (Решение1 '(<Accl> (<Mass> 60000) (<FrPull> 90000) (<FrFric> 0)) Механика) ответ (f8 <FrPull> <FrFric> <Mass>) 1.5 
-- (Решение1 '(<CfFric> (<FrFric> 2300) (<FrPull> 2300) (<Mass> 23000) (<Cfg> 9.8)) Механика) ответ (f13 <FrFric> <Mass> <Cfg>) 1.02040816326531E-02 
-- (Решение1 '(<KinEn1> (<Mass> 6600) (<Spd1> 7800) (<Accl> 0)) Механика) ответ (f16 <Mass> <Spd1>) 200772000000.0 
A = '<FrFric>' VT = ['<Dist>', '<KinEn1>', '<KinEn2>', '<FrPull>']'''
#(Решение1 '(<FrFric> (<Dist> 0.3) (<KinEn1> 20) (<KinEn2> 4) (<FrPull> 0.05)) Механика)
#(f12 (f17 (f18 <FrPull> <Dist>) <KinEn2> <KinEn1>) <Dist>)

f1 = lambda x, y, z: (x + (y * z))
f8 = lambda x, y, z: ((x - y) / z)
f12 = lambda x, y: (x / y)
Example #40
0
def configure_theano():
    theano.config.floatX = 'float32'
    sys.setrecursionlimit(10000)
Example #41
0
import sys
sys.setrecursionlimit(1 << 25)
read = sys.stdin.readline
ra = range
enu = enumerate


def mina(*argv, sub=1): return list(map(lambda x: x - sub, argv))
# 受け渡されたすべての要素からsubだけ引く.リストを*をつけて展開しておくこと


def read_a_int(): return int(read())


def read_ints(): return list(map(int, read().split()))


def read_col(H):
    '''H is number of rows
    A列、B列が与えられるようなとき
    ex1)A,B=read_col(H)    ex2) A,=read_col(H) #一列の場合'''
    ret = []
    for _ in range(H):
        ret.append(list(map(int, read().split())))
    return tuple(map(list, zip(*ret)))


def read_tuple(H):
    '''H is number of rows'''
    ret = []
    for _ in range(H):
Example #42
0
#!/usr/bin/env python3
# encoding:utf-8
import copy
import random
import bisect  #bisect_left これで二部探索の大小検索が行える
import fractions  #最小公倍数などはこっち
import math
import sys
import collections
from decimal import Decimal  # 10進数で考慮できる

mod = 10**9 + 7
sys.setrecursionlimit(mod)  # 再帰回数上限はでdefault1000

d = collections.deque()


def LI():
    return list(map(int, sys.stdin.readline().split()))


D, T, S = LI()

if S * T >= D:
    print("Yes")
else:
    print("No")
Example #43
0
def save_model(model, filename='okapi_model.pk'):
    sys.setrecursionlimit(10000)
    file = open(filename, 'wb')
    pickle.dump(model, file, protocol=pickle.HIGHEST_PROTOCOL)
    file.close()
Example #44
0
# 14731 미분계산기 (LARGE)
from sys import setrecursionlimit
setrecursionlimit(10**9)
input = __import__('sys').stdin.readline
MOD = 10**9 + 7
# def pwrOld(n):
#     return (2**n)%MOD

# cache=[0]*(10**9+5)
# def pwrCache(n):
#     # if n == 1: return 2
#     # if n == 0: return 0
#     if cache[n]: return cache[n]
#     if n < 10*5:
#         cache[n] = (2**n)%MOD
#         return cache[n]
#     if n%2 == 1:
#         cache[n] = (expoCache(n//2)*expoCache(n//2+1))%MOD
#         return cache[n]
#     else:
#         cache[n] = (expoCache(n//2)**2)%MOD
#         return cache[n]


def pwr(n):
    if n < 10 * 5: return (2**n) % MOD
    if n % 2 == 1: return (pwr(n // 2) * pwr(n // 2 + 1)) % MOD
    else:
        return (pwr(n // 2)**2) % MOD

Example #45
0
def detect_beats(activations, interval, look_aside=0.2):
    """
    Detects the beats in the given activation function as in [1]_.

    Parameters
    ----------
    activations : numpy array
        Beat activations.
    interval : int
        Look for the next beat each `interval` frames.
    look_aside : float
        Look this fraction of the `interval` to each side to detect the beats.

    Returns
    -------
    numpy array
        Beat positions [frames].

    Notes
    -----
    A Hamming window of 2 * `look_aside` * `interval` is applied around the
    position where the beat is expected to prefer beats closer to the centre.

    References
    ----------
    .. [1] Sebastian Böck and Markus Schedl,
           "Enhanced Beat Tracking with Context-Aware Neural Networks",
           Proceedings of the 14th International Conference on Digital Audio
           Effects (DAFx), 2011.

    """
    # TODO: make this faster!
    sys.setrecursionlimit(len(activations))
    # always look at least 1 frame to each side
    frames_look_aside = max(1, int(interval * look_aside))
    win = np.hamming(2 * frames_look_aside)

    # list to be filled with beat positions from inside the recursive function
    positions = []

    def recursive(position):
        """
        Recursively detect the next beat.

        Parameters
        ----------
        position : int
            Start at this position.

        """
        # detect the nearest beat around the actual position
        act = signal_frame(activations, position, frames_look_aside * 2, 1)
        # apply a filtering window to prefer beats closer to the centre
        act = np.multiply(act, win)
        # search max
        if np.argmax(act) > 0:
            # maximum found, take that position
            position = np.argmax(act) + position - frames_look_aside
        # add the found position
        positions.append(position)
        # go to the next beat, until end is reached
        if position + interval < len(activations):
            recursive(position + interval)
        else:
            return

    # calculate the beats for each start position (up to the interval length)
    sums = np.zeros(interval)
    for i in range(interval):
        positions = []
        # detect the beats for this start position
        recursive(i)
        # calculate the sum of the activations at the beat positions
        sums[i] = np.sum(activations[positions])
    # take the winning start position
    start_position = np.argmax(sums)
    # and calc the beats for this start position
    positions = []
    recursive(start_position)
    # return indices
    return np.array(positions)
import re
import sys

MAX = 875714
sys.setrecursionlimit(MAX)
adjacent_list = [[] for i in range(MAX)]
reversed_adjacent_list = [[] for i in range(MAX)]
finish_time = [-1 for i in range(MAX)]
current_source = -1
current_time = 0
counter = 0
leaders = [-1 for i in range(MAX)]
max_scc = [0] * 5

def dfs_loop_1():
    global current_source
    for each_vertex in range(MAX):
        if leaders[each_vertex] < 0:
            current_source = each_vertex
            dfs_1(each_vertex)

def dfs_1(vertex):
    global current_time
    leaders[vertex] = current_source
    for each_vertex in reversed_adjacent_list[vertex]:
        if leaders[each_vertex] < 0:
            dfs_1(each_vertex)
    finish_time[current_time] = vertex;
    current_time += 1;

def dfs_loop_2():
Example #47
0
#------------------Bombermans Team---------------------------------#
# Author  : B3mB4m
# Concat  : [email protected]
# Project : https://github.com/b3mb4m/Shellsploit
# LICENSE : https://github.com/b3mb4m/Shellsploit/blob/master/LICENSE
#------------------------------------------------------------------#

from random import randint
from itertools import product
from sys import setrecursionlimit
from re import findall
from sys import exit
from sys import version_info
from binascii import hexlify

setrecursionlimit(9999)


def xorme(shellcode):
    cache = shellcode
    mylist = ["".join(x) for x in list(product("ABCDEF", repeat=2))]
    insert = mylist[randint(0, len(mylist) - 1)]
    xorfirst = [
        r"\x40",  # inc eax
        r"\x43",  # inc ebx
        r"\x42",  # inc edx
        r"\x47",  # inc edi
    ]
    header = xorfirst[randint(0, len(xorfirst) - 1)]
    header += r"\xEB\x0D"
    header += xorfirst[randint(0, len(xorfirst) - 1)]
Example #48
0
import math
import copy
import logging
import string
import sys
sys.setrecursionlimit(10000000) # 10000 is an example, t

trace = logging.info

bestLocalSolution = 999999999

def bestTimePossible(elapsedTime, cookies, cookiesPerSecond, C, F, X):
    global bestLocalSolution
    if elapsedTime > bestLocalSolution:
        trace("found best Local solution: %s"%bestLocalSolution)    
        return bestLocalSolution
    trace("time: %s - cookies: %s/%s - CPS: %s"%(elapsedTime, cookies, X, cookiesPerSecond))
   
    ## cuanto tardaria esperar a ganar
    timeToWin = (X - cookies) / cookiesPerSecond;
    trace("timeToWin: %s"%timeToWin)
    if elapsedTime + timeToWin < bestLocalSolution:
        bestLocalSolution = elapsedTime + timeToWin
    
    ##cuanto tardaria esperar a comprar fabrica y esperar
    # si no nos alcanza para comprar fabrica, sumar tiempo
    if cookies < C:
        timeToNextFactory = (C - cookies) / cookiesPerSecond;
        elapsedTime = elapsedTime + timeToNextFactory 
        cookies = C
    
Example #49
0
import sys
sys.setrecursionlimit(10**9)  # 효율성 테스트 4 통과


def rfind(visited, idx):
    if idx not in visited:
        visited[idx] = idx + 1
        return idx
    else:
        ret = rfind(visited, visited[idx])  # visited[idx] > 0 인 상태
        visited[idx] = ret + 1
        return ret


def solution(k, room_number):
    answer = []
    visited = dict()  # [] -> {} 효율성 테스트 5, 6, 7 통과됨
    for num in room_number:
        idx = rfind(visited, num)
        answer.append(idx)
    return answer
    else:
        k = q1.get()
        reverse(q1)
        q1.put(k)


def reverseFirstK(q, k):
    if k == 0:
        return
    else:
        ele = q.get()
        r = k - 1
        reverseFirstK(q, r)
        q.put(ele)


from sys import setrecursionlimit

setrecursionlimit(11000)
n = int(input())
li = [int(ele) for ele in input().split()]
q = queue.Queue()
for ele in li:
    q.put(ele)
k = int(input())
reverse(q)
reverseFirstK(q, q.qsize() - k)
while (q.qsize() > 0):
    print(q.get())
    n -= 1
#uses python3

import sys
import threading

# This code is used to avoid stack overflow issues
sys.setrecursionlimit(10**6) # max depth of recursion
threading.stack_size(2**26)  # new thread will get stack of such size


class Vertex:
    def __init__(self, weight):
        self.weight = weight
        self.children = []


def ReadTree():
    size = int(input())
    tree = [Vertex(w) for w in map(int, input().split())]
    for i in range(1, size):
        a, b = list(map(int, input().split()))
        tree[a - 1].children.append(b - 1)
        tree[b - 1].children.append(a - 1)
    return tree


def dfs(tree, vertex, parent, weights):
    # for child in tree[vertex].children:
    #     if child != parent:
    #         dfs(tree, child, vertex)
Example #52
0
                if m > n and m <= b:
                    ans += 1

        print "Case #%d: %d" % (i, ans)

    return 0


## -------------------------------------------
## TEMPLATE

from sys import stdin
from sys import setrecursionlimit
from copy import deepcopy
from math import sqrt
from itertools import permutations
from itertools import combinations

def getline():
	return stdin.readline()

def getLineAs(tp):
	return map(tp, getline().split())

def array(n, init = 0):
	return [deepcopy(init) for i in range(n)]

if __name__ == "__main__":
	setrecursionlimit(1024 * 1024)
	main()
#!/usr/bin/env python3
import sys
from sys import setrecursionlimit
setrecursionlimit(10**8)


def win_from(n, m):
    assert (1 <= n and 1 <= m)

    if (n == 1 and m == 1):
        return 0

    if (any(not win_from(n - s, m) for s in range(1, n // 2 + 1))
            or any(not win_from(n, m - s) for s in range(1, m // 2 + 1))):
        return 1
    else:
        return 0


def cut_direction(n, m):
    return 0


def eat_size(n, m):
    return 0


if __name__ == "__main__":
    data = sys.stdin.readlines()
    assert (len(data) >= 3)
    m = int(data[1])
Example #54
0
def main():
    import sys
    input=sys.stdin.readline
    sys.setrecursionlimit(10**6)
Example #55
0
def set_recursion_limit():
    
    sys.setrecursionlimit(12345)
Example #56
0

def preOrder(inLeft, inRight, postLeft, postRight):
    # print()
    # print(inOrder[inLeft], inOrder[inRight], postOrder[postLeft], postOrder[postRight])
    if postLeft <= postRight and inLeft <= inRight:
        print()
        print(inOrder[inLeft], inOrder[inRight], postOrder[postLeft],
              postOrder[postRight])
        # print("hit")
        parent = postOrder[postRight]
        # print(parent, end=" ")
        pIdx = inPos[parent]
        # print(parent, pIdx)
        preOrder(inLeft, pIdx - 1, postLeft,
                 postLeft + pIdx - inLeft - 1)  # 부모 노드 기준 왼쪽
        preOrder(pIdx + 1, inRight, postRight + pIdx - inRight, postRight - 1)
    # else:
    #     print(inLeft, inRight, postLeft, postRight)


setrecursionlimit(10**6)

N = int(input())
inOrder = list(map(int, input().split()))
postOrder = list(map(int, input().split()))
inPos = [0] * (N + 1)
for idx, i in enumerate(inOrder):
    inPos[i] = idx
print(inPos)
preOrder(0, N - 1, 0, N - 1)
Example #57
0
#!/usr/bin/env python
import sys

if len(sys.argv) == 1:
    sys.stdin = open(__file__.replace('.py', '.in'))
else:
    sys.stdin = open(sys.argv[1])
    sys.stdout = open(sys.argv[1].replace('.in', '') + '.out', 'w')

def get_ints():
    return map(int, raw_input().split())

n_cases = input()

sys.setrecursionlimit(20)

def prev_jump(n):
    if n <= 10:
        return 1
    n_s = str(n)
    n_l = len(n_s)
    prev = int(n_s[:(n_l+1)/2] + '0' * (n_l - 1 - (n_l+1)/2) + '1')
    if str(prev) == str(prev)[::-1]:
        # palindromes aren't valid jump points
        assert n_s[0] == '1'
        return prev_jump(10 ** (n_l - 1) - 1)
    elif prev > n:
        return prev_jump(n - 1)

    return prev
Example #58
0
def run_thresholds(
        decoder,
        lattice_type="toric",
        lattices = [],
        perror = [],
        superoperator=[],
        iters = 0,
        measurement_error=False,
        multithreading=False,
        threads=None,
        modified_ansatz=False,
        save_result=True,
        file_name="thres",
        show_plot=False,
        plot_title=None,
        folder = "./",
        P_store=1000,
        debug=False,
        **kwargs
        ):
    '''
    ############################################
    '''
    run_oopsc = oopsc.multiprocess if multithreading else oopsc.multiple

    if measurement_error:
        from oopsc.graph import graph_3D as go
    else:
        from oopsc.graph import graph_2D as go

    sys.setrecursionlimit(100000)
    r = git.Repo(os.path.dirname(__file__))
    full_name = r.git.rev_parse(r.head, short=True) + f"_{lattice_type}_{go.__name__}_{decoder.__name__}_{file_name}"
    if not plot_title:
        plot_title = full_name


    if not os.path.exists(folder):
        os.makedirs(folder)

    if kwargs.pop("subfolder"):
        os.makedirs(folder + "/data/", exist_ok=True)
        os.makedirs(folder + "/figures/", exist_ok=True)
        file_path = folder + "/data/" + full_name + ".csv"
        fig_path = folder + "/figures/" + full_name + ".pdf"
    else:
        file_path = folder + "/" + full_name + ".csv"
        fig_path = folder + "/" + full_name + ".pdf"

    progressbar = kwargs.pop("progressbar")

    data = None
    int_P = [int(p*P_store) for p in perror]
    config = oopsc.default_config(**kwargs)

    # Simulate and save results to file
    for lati in lattices:

        if multithreading:
            if threads is None:
                threads = mp.cpu_count()
            graph = [oopsc.lattice_type(lattice_type, config, decoder, go, lati) for _ in range(threads)]
        else:
            graph = oopsc.lattice_type(lattice_type, config, decoder, go, lati)

        for i, (pi, int_p) in enumerate(zip(perror, int_P)):

            print("Calculating for L = ", str(lati), "and p =", str(pi))

            superop = None
            GHZ_success = None
            if superoperator:
                pi = 0
                superop = superoperator[i]
                if "GHZ_success" in kwargs:
                    GHZ_success = kwargs["GHZ_success"][i]

            oopsc_args = dict(
                paulix=pi,
                superoperator=superop,
                GHZ_success=GHZ_success,
                lattice_type=lattice_type,
                debug=debug,
                processes=threads,
                progressbar=progressbar
            )
            if measurement_error and superoperator is None:
                oopsc_args.update(measurex=pi)
            output = run_oopsc(lati, config, iters, graph=graph, **oopsc_args)

            pprint(dict(output))
            print("")

            if data is None:
                if os.path.exists(file_path):
                    data = pd.read_csv(file_path, header=0)
                    data = data.set_index(["L", "p"])
                else:
                    columns = list(output.keys())
                    index = pd.MultiIndex.from_product([lattices, int_P], names=["L", "p"])
                    data = pd.DataFrame(
                        np.zeros((len(lattices) * len(perror), len(columns))), index=index, columns=columns
                    )

            if data.index.isin([(lati, int_p)]).any():
                for key, value in output.items():
                    data.loc[(lati, int_p), key] += value
            else:
                for key, value in output.items():
                    data.loc[(lati, int_p), key] = value

            data = data.sort_index()
            if save_result:
                data.to_csv(file_path)

    print(data.to_string())

    par = fit_data(data, modified_ansatz)

    if show_plot:
        plot_thresholds(data, file_name, fig_path, modified_ansatz, save_result=save_result, par=par)

    if save_result:
        data.to_csv(file_path)
Example #59
0
# https://onlinejudge.u-aizu.ac.jp/courses/lesson/1/ALDS1/7/ALDS1_7_B

import sys
sys.setrecursionlimit(2**20)  # 再帰回数上限の向上 かなり多くしないとREになる


class Node:
    def __init__(self,
                 parent,
                 left,
                 right):
        self.parent = parent
        self.left = left  # 左の子ノード
        self.right = right  # 右の**子**


def get_all_height(T: dict, H: dict, u: int):
    '''
    深さ優先探索で各地点をめぐりながらHに高さ情報セットしていく
    '''
    h_left, h_right = 0, 0  # Noneによって代入されなかったとき用
    # 本では右から左の方に探索していったけど
    # キモいのでここでは左から右に探索していくことにする
    if T[u].left is not None:
        h_left = get_all_height(T, H, T[u].left) + 1
    if T[u].right is not None:
        h_right = get_all_height(T, H, T[u].right) + 1
    ret = max(h_left, h_right)
    H[u] = ret
    return ret
Example #60
0
from LAM import cidoc_crm_502 as crm

import sys
sys.setrecursionlimit(100) # minimum limit

# IMPORTANT
# http://www.cafepy.com/article/python_attributes_and_methods/python_attributes_and_methods.html

def list_fproperty_names(cls, i):
    
    f_data = ''
    fproperties = cls.crm_fproperties()
    
    if fproperties:
        for p in fproperties:
            f_data += '    ' + p.flabel + ': ' + p.range.__doc__ + '\n'
    else:
        if i == 0:
            f_data += '    No declared forward properties\n'
        else:
            f_data += '    No inherited forward properties\n'
    
    f_data += '\n'
    return f_data

def list_rproperty_names(cls, i):
    
    r_data = ''
    rproperties = cls.crm_rproperties()
    
    if rproperties: