コード例 #1
0
ファイル: vm.py プロジェクト: Jerryzcn/Theano
    def make_all(self, profiler=None, input_storage=None,
                 output_storage=None,
                ):
        fgraph = self.fgraph
        order = self.schedule(fgraph)
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
                fgraph, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = []
        for node in order:
            try:
                thunks.append(node.op.make_thunk(node,
                                                 storage_map,
                                                 compute_map,
                                                 no_recycling))
                if not hasattr(thunks[-1], 'lazy'):
                    # We don't want all ops maker to think about lazy Ops.
                    # So if they didn't specify that its lazy or not, it isn't.
                    # If this member isn't present, it will crash later.
                    thunks[-1].lazy = False
            except Exception, e:
                e.args = ("The following error happened while"
                          " compiling the node", node, "\n") + e.args
                raise
コード例 #2
0
ファイル: vm.py プロジェクト: wycg1984/Theano
    def make_all(
        self,
        profiler=None,
        input_storage=None,
        output_storage=None,
    ):
        fgraph = self.fgraph
        order = self.schedule(fgraph)
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
            fgraph, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = []
        for node in order:
            try:
                thunks.append(
                    node.op.make_thunk(node, storage_map, compute_map,
                                       no_recycling))
                if not hasattr(thunks[-1], 'lazy'):
                    # We don't want all ops maker to think about lazy Ops.
                    # So if they didn't specify that its lazy or not, it isn't.
                    # If this member isn't present, it will crash later.
                    thunks[-1].lazy = False
            except Exception, e:
                e.args = ("The following error happened while"
                          " compiling the node", node, "\n") + e.args
                raise
コード例 #3
0
    def make_all(
        self,
        profiler=None,
        input_storage=None,
        output_storage=None,
    ):
        fgraph = self.fgraph
        order = self.schedule(fgraph)
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
            fgraph, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = []
        for node in order:
            try:
                thunks.append(
                    node.op.make_thunk(node, storage_map, compute_map,
                                       no_recycling))
            except Exception, e:
                e.args = ("The following error happened while"
                          " compiling the node", node, "\n") + e.args
                raise
コード例 #4
0
ファイル: vm.py プロジェクト: honghaizhu/Theano
    def make_all(
        self,
        profiler=None,
        input_storage=None,
        output_storage=None,
    ):
        expanded_inputs = self.expanded_inputs  # hacky argumentpassing workaround
        env = self.env
        order = list(env.toposort())
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
            env, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = [
            node.op.make_thunk(node, storage_map, compute_map, no_recycling)
            for node in order
        ]

        computed, last_user = link.gc_helper(order)
        if self.allow_gc:
            post_thunk_clear = []
            for node in order:
                clear_after_this_thunk = []
                for input in node.inputs:
                    if ((input in computed) and (input not in env.outputs)
                            and (node == last_user[input])):
                        clear_after_this_thunk.append(storage_map[input])
                post_thunk_clear.append(clear_after_this_thunk)
        else:
            post_thunk_clear = None

        # calculate the update_storage map whose keys are shared var inputs
        # and whose values are the outputs that hold their updates

        updated_vars = {}
        if expanded_inputs:
            # Update the inputs that have an update function
            potential_values = list(env.outputs)
            assert len(expanded_inputs) == len(env.inputs)
            for e_input, ivar in reversed(zip(expanded_inputs, env.inputs)):
                if e_input.update is not None:
                    updated_vars[ivar] = potential_values.pop()

        vm = self.make_vm(order, thunks, input_storage, output_storage,
                          storage_map, post_thunk_clear, computed, compute_map,
                          updated_vars)

        return (vm, [
            link.Container(input, storage)
            for input, storage in zip(env.inputs, input_storage)
        ], [
            link.Container(output, storage, True)
            for output, storage in zip(env.outputs, output_storage)
        ], thunks, order)
コード例 #5
0
ファイル: vm.py プロジェクト: ouais/Theano
    def make_all(self, profiler=None, input_storage=None,
                 output_storage=None,
                ):
        fgraph = self.fgraph
        order = self.schedule(fgraph)
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
                fgraph, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = [node.op.make_thunk(node,
                    storage_map,
                    compute_map,
                    no_recycling)
                        for node in order]
        for node, thunk in zip(order, thunks):
            thunk.inputs = [storage_map[v] for v in node.inputs]
            thunk.outputs = [storage_map[v] for v in node.outputs]

        computed, last_user = link.gc_helper(order)
        if self.allow_gc:
            post_thunk_clear = []
            for node in order:
                clear_after_this_thunk = []
                for input in node.inputs:
                    if ((input in computed)
                            and (input not in fgraph.outputs)
                            and (node == last_user[input])):
                        clear_after_this_thunk.append(storage_map[input])
                post_thunk_clear.append(clear_after_this_thunk)
        else:
            post_thunk_clear = None

        vm = self.make_vm(order, thunks,
                input_storage, output_storage, storage_map,
                post_thunk_clear,
                computed,
                compute_map,
                self.updated_vars
                )

        return (vm,
                [link.Container(input, storage)
                 for input, storage in zip(fgraph.inputs, input_storage)],
                [link.Container(output, storage, True)
                 for output, storage in zip(fgraph.outputs, output_storage)],
                thunks,
                order)
コード例 #6
0
    def make_all(self, profiler=None, input_storage=None,
                 output_storage=None,
                ):
        fgraph = self.fgraph
        order = self.schedule(fgraph)
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
                fgraph, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = [node.op.make_thunk(node,
                    storage_map,
                    compute_map,
                    no_recycling)
                        for node in order]
        for node, thunk in zip(order, thunks):
            thunk.inputs = [storage_map[v] for v in node.inputs]
            thunk.outputs = [storage_map[v] for v in node.outputs]

        computed, last_user = link.gc_helper(order)
        if self.allow_gc:
            post_thunk_clear = []
            for node in order:
                clear_after_this_thunk = []
                for input in node.inputs:
                    if ((input in computed)
                            and (input not in fgraph.outputs)
                            and (node == last_user[input])):
                        clear_after_this_thunk.append(storage_map[input])
                post_thunk_clear.append(clear_after_this_thunk)
        else:
            post_thunk_clear = None

        vm = self.make_vm(order, thunks,
                input_storage, output_storage, storage_map,
                post_thunk_clear,
                computed,
                compute_map,
                self.updated_vars
                )

        return (vm,
                [link.Container(input, storage)
                 for input, storage in zip(fgraph.inputs, input_storage)],
                [link.Container(output, storage, True)
                 for output, storage in zip(fgraph.outputs, output_storage)],
                thunks,
                order)
コード例 #7
0
    def make_all(
        self,
        profiler=None,
        input_storage=None,
        output_storage=None,
    ):
        fgraph = self.fgraph
        order = self.schedule(fgraph)
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
            fgraph, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = []

        # Collect Reallocation Info
        compute_map_re = defaultdict(lambda: [0])
        for var in fgraph.inputs:
            compute_map_re[var][0] = 1

        if getattr(fgraph.profile, 'dependencies', None):
            dependencies = getattr(fgraph.profile, 'dependencies')
        else:
            dependencies = self.compute_gc_dependencies(storage_map)

        reallocated_info = calculate_reallocate_info(order, fgraph,
                                                     storage_map,
                                                     compute_map_re,
                                                     dependencies)

        for node in order:
            try:
                thunks.append(
                    node.op.make_thunk(node, storage_map, compute_map,
                                       no_recycling))
                if not hasattr(thunks[-1], 'lazy'):
                    # We don't want all ops maker to think about lazy Ops.
                    # So if they didn't specify that its lazy or not, it isn't.
                    # If this member isn't present, it will crash later.
                    thunks[-1].lazy = False
            except Exception, e:
                e.args = ("The following error happened while"
                          " compiling the node", node, "\n") + e.args
                raise
コード例 #8
0
ファイル: vm.py プロジェクト: LEEKYOUNGHUN/Theano
    def make_all(self, profiler=None, input_storage=None,
                 output_storage=None,
                 ):
        fgraph = self.fgraph
        order = self.schedule(fgraph)
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
            fgraph, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = []

        # Collect Reallocation Info
        compute_map_re = defaultdict(lambda: [0])
        for var in fgraph.inputs:
            compute_map_re[var][0] = 1

        if getattr(fgraph.profile, 'dependencies', None):
            dependencies = getattr(fgraph.profile, 'dependencies')
        else:
            dependencies = self.compute_gc_dependencies(storage_map)

        reallocated_info = calculate_reallocate_info(order, fgraph, storage_map, compute_map_re,dependencies)

        for node in order:
            try:
                thunks.append(node.op.make_thunk(node,
                                                 storage_map,
                                                 compute_map,
                                                 no_recycling))
                if not hasattr(thunks[-1], 'lazy'):
                    # We don't want all ops maker to think about lazy Ops.
                    # So if they didn't specify that its lazy or not, it isn't.
                    # If this member isn't present, it will crash later.
                    thunks[-1].lazy = False
            except Exception, e:
                e.args = ("The following error happened while"
                          " compiling the node", node, "\n") + e.args
                raise
コード例 #9
0
ファイル: vm.py プロジェクト: bin2000/Theano
    def make_all(self, profiler=None, input_storage=None,
                 output_storage=None,
                ):
        fgraph = self.fgraph
        order = self.schedule(fgraph)
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
                fgraph, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = []
        for node in order:
            try:
                thunks.append(node.op.make_thunk(node,
                                                 storage_map,
                                                 compute_map,
                                                 no_recycling))
            except Exception, e:
                e.args = ("The following error happened while"
                          " compiling the node", node, "\n") + e.args
                raise
コード例 #10
0
    def make_all(self, profiler=None, input_storage=None,
                 output_storage=None,
                 ):
        fgraph = self.fgraph
        order = self.schedule(fgraph)
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
            fgraph, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = []

        # Collect Reallocation Info
        compute_map_re = defaultdict(lambda: [0])
        for var in fgraph.inputs:
            compute_map_re[var][0] = 1

        if getattr(fgraph.profile, 'dependencies', None):
            dependencies = getattr(fgraph.profile, 'dependencies')
        else:
            dependencies = self.compute_gc_dependencies(storage_map)

        reallocated_info = calculate_reallocate_info(order, fgraph, storage_map, compute_map_re,dependencies)

        for node in order:
            try:
                thunks.append(node.op.make_thunk(node,
                                                 storage_map,
                                                 compute_map,
                                                 no_recycling))
                if not hasattr(thunks[-1], 'lazy'):
                    # We don't want all ops maker to think about lazy Ops.
                    # So if they didn't specify that its lazy or not, it isn't.
                    # If this member isn't present, it will crash later.
                    thunks[-1].lazy = False
            except Exception as e:
                e.args = ("The following error happened while"
                          " compiling the node", node, "\n") + e.args
                raise
        for node, thunk in zip(order, thunks):
            thunk.inputs = [storage_map[v] for v in node.inputs]
            thunk.outputs = [storage_map[v] for v in node.outputs]

        lazy = self.lazy
        if lazy is None:
            lazy = config.vm.lazy
        if lazy is None:
            lazy = not all([(not th.lazy) for th in thunks])
        if not (lazy or (config.profile and config.profile_memory) or self.use_cloop or self.callback):
            for pair in reallocated_info.values():
                storage_map[pair[1]] = storage_map[pair[0]]

        computed, last_user = link.gc_helper(order)
        if self.allow_gc:
            post_thunk_clear = []
            for node in order:
                clear_after_this_thunk = []
                for input in node.inputs:
                    if ((input in computed)
                            and (input not in fgraph.outputs)
                            and (node == last_user[input])
                            and input not in reallocated_info.keys()):
                        clear_after_this_thunk.append(storage_map[input])
                post_thunk_clear.append(clear_after_this_thunk)
        else:
            post_thunk_clear = None

        vm = self.make_vm(order, thunks,
                          input_storage, output_storage, storage_map,
                          post_thunk_clear,
                          computed,
                          compute_map,
                          self.updated_vars,
                          )

        vm.storage_map = storage_map

        return (vm,
                [link.Container(input, storage)
                 for input, storage in zip(fgraph.inputs, input_storage)],
                [link.Container(output, storage, True)
                 for output, storage in zip(fgraph.outputs, output_storage)],
                thunks,
                order)
コード例 #11
0
ファイル: vm.py プロジェクト: olivierverdier/Theano
    def make_all(self, profiler = None, input_storage = None,
            output_storage = None,
            ):
        expanded_inputs=self.expanded_inputs # hacky argumentpassing workaround
        env = self.env
        order = list(env.toposort())
        no_recycling = self.no_recycling

        input_storage, output_storage, storage_map = link.map_storage(
                env, order, input_storage, output_storage)
        compute_map = {}
        for k in storage_map:
            compute_map[k] = [k.owner is None]

        thunks = [node.op.make_thunk(node,
                    storage_map,
                    compute_map,
                    no_recycling)
                        for node in order]

        computed, last_user = link.gc_helper(order)
        if self.allow_gc:
            post_thunk_clear = []
            for node in order:
                clear_after_this_thunk = []
                for input in node.inputs:
                    if ((input in computed)
                            and (input not in env.outputs)
                            and (node == last_user[input])):
                        clear_after_this_thunk.append(storage_map[input])
                post_thunk_clear.append(clear_after_this_thunk)
        else:
            post_thunk_clear = None

        # calculate the update_storage map whose keys are shared var inputs
        # and whose values are the outputs that hold their updates

        updated_vars = {}
        if expanded_inputs:
            # Update the inputs that have an update function
            potential_values = list(env.outputs)
            assert len(expanded_inputs)==len(env.inputs)
            for e_input, ivar in reversed(zip(expanded_inputs, env.inputs)):
                if e_input.update is not None:
                    updated_vars[ivar] = potential_values.pop()

        vm = self.make_vm(order, thunks,
                input_storage, output_storage, storage_map,
                post_thunk_clear,
                computed,
                compute_map,
                updated_vars
                )

        return (vm,
                [link.Container(input, storage)
                    for input, storage in zip(env.inputs, input_storage)],
                [link.Container(output, storage, True)
                    for output, storage in zip(env.outputs, output_storage)],
                thunks,
                order)