Beispiel #1
0
def nsmallest(n, iterable):
    """Find the n smallest elements in a dataset.

    Equivalent to:  sorted(iterable)[:n]
    """
    if hasattr(iterable, '__len__') and n * 10 <= len(iterable):
        # For smaller values of n, the bisect method is faster than a minheap.
        # It is also memory efficient, consuming only n elements of space.
        it = iter(iterable)
        result = sorted(islice(it, 0, n))
        if not result:
            return result
        insort = bisect.insort
        pop = result.pop
        los = result[-1]    # los --> Largest of the nsmallest
        for elem in it:
            if los <= elem:
                continue
            insort(result, elem)
            pop()
            los = result[-1]
        return result
    # An alternative approach manifests the whole iterable in memory but
    # saves comparisons by heapifying all at once.  Also, saves time
    # over bisect.insort() which has O(n) data movement time for every
    # insertion.  Finding the n smallest of an m length iterable requires
    #    O(m) + O(n log m) comparisons.
    h = list(iterable)
    heapify(h)
    return map(heappop, repeat(h, min(n, len(h))))
Beispiel #2
0
def merge(*iterables):
    '''Merge multiple sorted inputs into a single sorted output.

    Similar to sorted(itertools.chain(*iterables)) but returns a generator,
    does not pull the data into memory all at once, and assumes that each of
    the input streams is already sorted (smallest to largest).

    >>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
    [0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]

    '''
    _heappop, _heapreplace, _StopIteration = heappop, heapreplace, StopIteration

    h = []
    h_append = h.append
    for itnum, it in enumerate(map(iter, iterables)):
        try:
            next = it.next
            h_append([next(), itnum, next])
        except _StopIteration:
            pass
    heapify(h)

    while 1:
        try:
            while 1:
                v, itnum, next = s = h[0]   # raises IndexError when h is empty
                yield v
                s[0] = next()               # raises StopIteration when exhausted
                _heapreplace(h, s)          # restore heap condition
        except _StopIteration:
            _heappop(h)                     # remove empty iterator
        except IndexError:
            return
Beispiel #3
0
    def merge(self, other):
        """

        Merge this TaskManager with another.  After the merge, the two
        objects share the same (merged) internal data structures, so
        either can be used to manage the combined task set.

        """

        if not isinstance(other, TaskManager):
            raise TypeError("'other' must be a TaskManager instance")

        # Merge the data structures
        self._queue.extend(other._queue)
        self._timeouts.extend(other._timeouts)
        heapq.heapify(self._timeouts)

        # Make other reference the merged data structures.  This is
        # necessary because other's tasks may reference and use other
        # (e.g. to add a new task in response to an event).
        other._queue = self._queue
        other._timeouts = self._timeouts

        # Merge custom wait handlers
        MetaYieldCondition._merge(self, other)
Beispiel #4
0
def djikstra(nodes,links,source,dest):
    """An implementation of Djikstra's Algorithm for our Node and Link classes"""
    route = []
    vertexes = []
    for v in nodes:
        v.set_dist(float("inf"))
        v.set_prev(None)
        heappush(vertexes, v)
    source.set_dist(0)
    heapify(vertexes)
    while vertexes:
        unsorted = False
        u = heappop(vertexes)
        if u == dest:
            break #because we found the destination no need to look further
        for v in u.get_links():
            if v.get_enabled():
                alt = u.get_dist() + 1
                target = v.get_target()
                if alt < target.get_dist():
                    target.set_dist(alt)
                    target.set_prev(u)
                    unsorted = True #just a variable that help check if changes were made to the objects inside the heap
            if unsorted: #because i updated the variables but the heap wasn't maintained, i just heapify it again
                heapify(vertexes) 
    #this is the part that saves the distance and route  
    if dest.get_dist() == float("inf"): #if there is no route then we just return None
        return None
    u = dest
    while u.get_prev() != None:
        v = u.get_prev()
        route.insert(0, v.get_specific_link(u)) 
        u = v
    return route
Beispiel #5
0
def __old_heapify_markers(markers, image):
    """Create a priority queue heap with the markers on it"""
    age = 0
    pq = []
    stride = __get_strides_for_shape(image.shape)
    for coords in numpy.argwhere(markers != 0):
        offset = numpy.dot(coords, stride)
        tcoords = tuple(coords)
        entry = [image.__getitem__(tcoords), age, offset]
        entry.extend(tcoords)
        pq.append(tuple(entry))
        age += 1
    heapify(pq)
    return (pq, age)
Beispiel #6
0
def nlargest(n, iterable):
    """Find the n largest elements in a dataset.

    Equivalent to:  sorted(iterable, reverse=True)[:n]
    """
    it = iter(iterable)
    result = list(islice(it, n))
    if not result:
        return result
    heapify(result)
    _heappushpop = heappushpop
    for elem in it:
        _heappushpop(result, elem)
    result.sort(reverse=True)
    return result
Beispiel #7
0
def nlargest(n, iterable):
    """Find the n largest elements in a dataset.

    Equivalent to:  sorted(iterable, reverse=True)[:n]
    """
    it = iter(iterable)
    result = list(islice(it, n))
    if not result:
        return result
    heapify(result)
    _heapreplace = heapreplace
    sol = result[0]         # sol --> smallest of the nlargest
    for elem in it:
        if elem <= sol:
            continue
        _heapreplace(result, elem)
        sol = result[0]
    result.sort(reverse=True)
    return result
Beispiel #8
0
    def start(self, script):
        self.inicio = Nodo(script)
        self.final = Nodo()
        lista_abierta = []
        lista_cerrada = {}
        
        heappush(lista_abierta, self.inicio)

        while(lista_abierta):

            nodo_actual = heappop(lista_abierta)
            while(nodo_actual.g > len(script)):
                nodo_actual = heappop(lista_abierta)

            if(nodo_actual == self.final):
                print "Solucion Encontrada"
                self.lista_cerrada = lista_cerrada
                print 'Estados recorridos: '+str(len(lista_cerrada))
                print 'Estados a visitar: '+str(len(lista_abierta))
                print 'g: '+str(nodo_actual.g)
                print 'h: '+str(nodo_actual.h)
                return self.devolverRuta(nodo_actual)
            
            lista_cerrada[nodo_actual] = 'eliminado'
            
            for y in nodo_actual.gethijos():
                if(not y in lista_cerrada):
                    
                    if(y.g <= len(script)):
                        a = self.seek(lista_abierta,y)
                        if (a == None):
                            heappush(lista_abierta,y)
                        elif(nodo_actual.g + 1 < y.g):
                            a.g = y.g
                            a.h = y.h
                            a.f = y.f
                            a.padre = y.padre
                            a.script = y.script
                            heapify(lista_abierta)
        return "Ha ocurrido un Error"
Beispiel #9
0
import heapq
from _heapq import heapify, heappop, heappush
"""
给一个数n,表示有n个0,给两个操作 :L 表示将最左边的0变成1,C(index)表示将某个index的1变成0;
比如n =5, operations=["L","L","L","C0","L","C1"]

"""
ans = []
n = 5
x = ["L", "L", "L", "C0", "L", "C1"]

temp = [i for i in range(n)]
heapify(temp)
for j in range(len(x)):
    if x[j] == "L":
        heappop(temp)
    elif "C" in x[j]:
        t = int(x[j].replace("C", ""))
        heappush(temp, t)

ans = [1] * n
for k in temp:
    ans[k] = 0
print(ans)
    # data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
    # for item in data:
    #     heappush(heap, item)
    # sort = []
    # while heap:
    #     sort.append(heappop(heap))
    # print sort

    # import doctest
    # doctest.testmod()

    # test = [10]
    # if test < data:
    #     print "sdf"
    # test = [0.1, 0.3, 0.5, 0.7, 0.9, 0.2, 0.4, 0.6, 0.8, 0.0]
    test = [ (0.1, [1, 5, 6]), (0.6, [9, 5, 6]) ]
    # tt = [[0.1, [4, 3,10,100]], [0.3, [5,5,1,23,23]], [0.5,3], [0.7, [1,5]], [0.7, [2,5]], [0.9, 1], [0.2, 4], [0.4, 10], [0.6,9], [0.8, 1], [0.0, 124]]
    heapify(test)
    print test
    # heapify(tt)
    heappush(test, (0.0, [1, 3]))
    print test
    dist, data = heappop(test)
    print dist, data
    # print tt
    

    # print pow(float(3), 2)


    def __init__(self, sedml_experiment):
        """
        :rtype: None
        :raises: RuntimeError
        """
        self.name = ''
        self.network = None
        self.reportingInterval = 0.0
        self.timeHorizon = 0.0
        self.log = daeLogs.daePythonStdOutLog()
        self.datareporter = pyDataReporting.daeNoOpDataReporter(
        )  #pyDataReporting.daeTCPIPDataReporter()
        self.simulations = []
        self.events_heap = []
        self.average_firing_rate = {}
        self.raster_plot_data = []
        self.pathParser = CanonicalNameParser()
        self.report_variables = {}
        self.output_plots = []
        self.number_of_equations = 0
        self.number_of_neurones = 0
        self.number_of_synapses = 0
        self.spike_count = 0
        self.minimal_delay = 1.0E10

        heapify(self.events_heap)

        # Create daetoolsPointNeuroneNetwork object and the simulation runtime information
        self.processSEDMLExperiment(sedml_experiment)

        # Connect the DataReporter
        simName = self.name + strftime(" [%d.%m.%Y %H.%M.%S]", localtime())
        if not self.datareporter.Connect("", simName):
            raise RuntimeError('Cannot connect the data reporter')

        # Set the random number generators of the daetoolsComponentSetup
        daetoolsComponentSetup._random_number_generators = self.network.randomNumberGenerators

        # Setup neurones
        try:
            self.log.Enabled = False

            for group_name, group in self.network._groups.iteritems():
                for projection_name, projection in group._projections.iteritems(
                ):
                    if projection.minimal_delay < self.minimal_delay:
                        self.minimal_delay = projection.minimal_delay

                for population_name, population in group._populations.iteritems(
                ):
                    print("Creating simulations for: {0}...".format(
                        population_name))
                    for neurone in population.neurones:
                        simulation = daetoolsPointNeuroneSimulation(
                            neurone, population._parameters, {})
                        neurone.events_heap = self.events_heap
                        self.simulations.append(simulation)
                        self.number_of_neurones += 1
                        for (synapse, params) in neurone.incoming_synapses:
                            self.number_of_synapses += synapse.Nitems

            self.simulations.sort()

            if self.minimal_delay < self.reportingInterval:
                raise RuntimeError(
                    'The minimal delay ({0}s) is greater than the reporting interval ({1}s)'
                    .format(self.minimal_delay, self.reportingInterval))

        except:
            raise

        finally:
            self.log.Enabled = True
Beispiel #12
0
        "/home/krishna/Documents/ZipFileCompressor/Web_Development_with_Node_Express.txt",
        "r") as f:
    lines = f.readlines()
    to_compress = '\n'.join(lines)
# with open("/home/krishna/Documents/ZipFileCompressor/download.jpeg","rb") as image:
#     to_compress=base64.b64decode(image.read())

# to_compress=str(to_compress)
# print(to_compress)
compressed = ""
convert_LZ77(to_compress)
res = list(set(compressed))
f = list(map(lambda x: (compressed.count(x), x), res))
# f=list(map(lambda x: (compressed.count(x),x),list(set(to_compress))))
tree = list(map(lambda x: (x[0], x[1], Tree(x[0], x[1])), f))
_heapq.heapify(tree)
root = construct(tree)
codes = dict()
# print(type(root[0][2].left.value()))
Print(root[0][2], '', codes)
# print(repr(compressed))
bin_array = array("B")
document = encode(compressed, codes)
# print(document)
s = 1000
# sys.stdout.write(s.to_bytes(2,'little'))
# sys.stdout.write(bytes('1',encoding="ascii"))
# sys.stdout.write(bytes('\n',encoding="ascii"))
# code_write(codes)
file_write(document, bin_array)
sys.stdout = op
Beispiel #13
0
 def _remove_timeout(self, item):
     self._timeouts.remove((item.expiration, item))
     heapq.heapify(self._timeouts)
Beispiel #14
0
 def _remove_timeout(self, item):
     self._timeouts.remove((item.expiration, item))
     heapq.heapify(self._timeouts)
 def __init__(self):
     self.requests = {}
     self.call_times = []
     heapify(self.call_times)
 def __init__(self):
     self.requests = {}
     self.call_times = []
     heapify(self.call_times)
Beispiel #17
0
 def pop(self) -> None:
     q = self.head.nxt
     self.head.nxt = self.head.nxt.nxt
     self.hp.remove(q.v)
     heapify(self.hp)
     return q.v
    def __init__(self, sedml_experiment):
        """
        :rtype: None
        :raises: RuntimeError
        """
        self.name                = ''
        self.network             = None
        self.reportingInterval   = 0.0
        self.timeHorizon         = 0.0        
        self.log                 = daeLogs.daePythonStdOutLog()
        self.datareporter        = pyDataReporting.daeNoOpDataReporter() #pyDataReporting.daeTCPIPDataReporter()
        self.simulations         = []
        self.events_heap         = []
        self.average_firing_rate = {}
        self.raster_plot_data    = []
        self.pathParser          = CanonicalNameParser()
        self.report_variables    = {}
        self.output_plots        = []
        self.number_of_equations = 0
        self.number_of_neurones  = 0
        self.number_of_synapses  = 0
        self.spike_count         = 0
        self.minimal_delay       = 1.0E10

        heapify(self.events_heap)
        
        # Create daetoolsPointNeuroneNetwork object and the simulation runtime information
        self.processSEDMLExperiment(sedml_experiment)
        
        # Connect the DataReporter
        simName = self.name + strftime(" [%d.%m.%Y %H.%M.%S]", localtime())
        if not self.datareporter.Connect("", simName):
            raise RuntimeError('Cannot connect the data reporter')
        
        # Set the random number generators of the daetoolsComponentSetup
        daetoolsComponentSetup._random_number_generators = self.network.randomNumberGenerators

        # Setup neurones
        try:
            self.log.Enabled = False
            
            for group_name, group in self.network._groups.iteritems():
                for projection_name, projection in group._projections.iteritems():
                    if projection.minimal_delay < self.minimal_delay:
                        self.minimal_delay = projection.minimal_delay
                    
                for population_name, population in group._populations.iteritems():
                    print("Creating simulations for: {0}...".format(population_name))
                    for neurone in population.neurones:
                        simulation = daetoolsPointNeuroneSimulation(neurone, population._parameters, {})
                        neurone.events_heap = self.events_heap
                        self.simulations.append(simulation)
                        self.number_of_neurones += 1
                        for (synapse, params) in neurone.incoming_synapses:
                            self.number_of_synapses += synapse.Nitems                            
            
            self.simulations.sort()
            
            if self.minimal_delay < self.reportingInterval:
                raise RuntimeError('The minimal delay ({0}s) is greater than the reporting interval ({1}s)'.format(self.minimal_delay, self.reportingInterval))
            
        except:
            raise
        
        finally:
            self.log.Enabled = True