Example #1
0
 def __init__(self, name: str = "main", lvl: int = logging.ERROR):
     self.name = name
     self.placeholders: Dict[str, PlaceholderNode] = {}
     self.nodes: List[RealNode] = []
     self.log = make_logger(lvl, "DeltaGraph")
     # used below to silent doctest unwanted outputs
     self._org_displayhook = None
Example #2
0
    def __init__(self,
                 tb_num_iter: int = None,
                 name: str = None,
                 lvl: int = logging.ERROR,
                 vcd_name: str = None,
                 generics: dict = None,
                 node_template: NodeTemplate = None,
                 tags: List[str] = []):
        if name is None:
            name = type(self).__name__
        super().__init__(name, Latency(clocks=1), lvl, tags)

        self.module_name = self.name
        self.log = make_logger(lvl, f"{self.name}")
        self.vcd_name = vcd_name

        if generics is not None:
            self.generics = generics

        # Predefining values to satisfy linting check.
        self.submodules = None
        self.in_buffer = None

        # None for out buffer implies there are no out-ports
        self.out_buffer = None
        self.specials = None
        self.comb = None
        self.sync = None
        self.debug_signals = {}

        self._dut = migen.Module()
        self._dut.in_ports = {}
        self._dut.out_ports = {}

        self.__class__.migen_body(self=self._dut, template=self)
        self.rename_port_signals()

        if len(self._dut.out_ports.items()) > 0:
            # Make all outputs named, as it's needed for verilog
            self._outputs, self._ForkedOutput = make_forked_return(
                {name: type_ for name, (_, type_)
                 in self._dut.out_ports.items()}
            )
        else:
            self._outputs = Void

        # define a stimulus generator
        self._tb = self.tb_generator(tb_num_iter)

        # set up a simulator and perform run-once elaboration
        self._sim_object = None
        atexit.register(self.cleanup)

        # Merge with or create a new NodeTemplate
        inputs = OrderedDict()
        for name, (_, type_) in self._dut.in_ports.items():
            inputs[name] = type_
        NodeTemplate.merge_migenblock(node_template, self, inputs,
                                      self._outputs)
Example #3
0
    def __init__(self,
                 out_port: OutPort,
                 maxsize: int = 16,
                 queue_interval: float = 1.0):
        super().__init__(maxsize=maxsize)
        self._src = out_port
        self._log = make_logger(logging.WARNING, f"Queue {out_port.name}")
        self._queue_interval = queue_interval
        self.optional = out_port.destination.is_optional

        self._type = out_port.port_type
Example #4
0
    def __init__(self,
                 graph,
                 bodies: typing.List[Body],
                 inputs: typing.OrderedDict[str,
                                            typing.Union[BaseDeltaType,
                                                         Optional]] = None,
                 outputs: typing.OrderedDict[str, BaseDeltaType] = None,
                 name: typing.Optional[str] = None,
                 lvl: int = logging.ERROR,
                 is_autogenerated: bool = False):
        self.graph = graph
        self.graph.add_node(self)  # Registering self with parent graph

        self.is_autogenerated = is_autogenerated

        self._body = None
        self.bodies = bodies

        if len(self.bodies) == 1:
            self._body = bodies[0]

        self.inputs = inputs if inputs is not None else typing.OrderedDict()
        self.outputs = outputs if outputs is not None else typing.OrderedDict()

        idx = RealNode.get_next_index()
        if name is None:
            # set my name to the next unique available name
            self._name = ("node", idx)
        else:
            self._name = (name, idx)

        # Ports in/out to this node
        # Note that in_ports are always stored in the same order as inputs
        self.in_ports: typing.List[InPort] = []
        # Note that out_ports are always stored in the same order as outputs
        self.out_ports: typing.List[OutPort] = []

        self.log = make_logger(lvl,
                               f"{self.__class__.__name__} {self.full_name}")

        # See MessageLog for detail
        self._clock = 0

        self.out_names = list(self.outputs.keys())
        for out_name in self.out_names:
            if out_name in dir(self):
                raise NameError("Invalid out name: " + out_name +
                                " for node " + self.full_name)
Example #5
0
    def __init__(self,
                 graph,
                 bodies: List[Body],
                 inputs: OrderedDict[str, Union[BaseDeltaType,
                                                DOptional]] = {},
                 outputs: BaseDeltaType = None,
                 name: Optional[str] = None,
                 lvl: int = logging.ERROR,
                 is_autogenerated: bool = False):
        self.graph = graph
        self.graph.add_node(self)  # Registering self with parent graph

        self.is_autogenerated = is_autogenerated

        self._body = None
        self.bodies = bodies

        if len(self.bodies) == 1:
            self._body = bodies[0]

        self.inputs = inputs
        self.outputs = outputs

        idx = RealNode.get_next_index()
        if name is None:
            # set my name to the next unique available name
            self._name = f"node_{idx}"
        else:
            self._name = f"{name}_{idx}"

        # Ports in/out to this node
        self.in_ports: Dict[NamespacedName, InPort] = {}
        self.out_ports: List[OutPort] = []

        self.log = make_logger(lvl, f"{self.__class__.__name__} {self._name}")

        # See MessageLog for detail
        self._clock = 0

        self.fork_names = None
        if isinstance(self.outputs, ForkedReturn):
            self.fork_names = self.outputs.keys

            for fork_name in self.fork_names:
                if fork_name in dir(self):
                    raise NameError("Invalid fork name: " + fork_name +
                                    " for node " + self.name)
Example #6
0
    def __init__(self,
                 out_port: OutPort,
                 maxsize: int = 16,
                 queue_interval: float = 1.0):
        super().__init__(maxsize=maxsize)
        self._src = out_port
        self._log = make_logger(logging.WARNING, f"Queue {out_port.port_name}")
        self._queue_interval = queue_interval
        self.optional = out_port.destination.is_optional

        if out_port.port_name.n_index is not None:
            self._index = out_port.port_name.n_index
        else:
            self._index = None

        self._type = out_port.port_type
        if isinstance(self._type, ForkedReturn):
            self._type = self._type.elem_dict[self._index]
Example #7
0
    def __init__(self,
                 graph: DeltaGraph,
                 lvl: int = logging.ERROR,
                 msg_lvl: int = logging.ERROR,
                 switchinterval: float = None,
                 queue_size: int = 16,
                 queue_interval: float = 1.0):
        self.log = make_logger(lvl, "DeltaPySimulator")
        self.msg_log = MessageLog(msg_lvl)
        self.set_excepthook()

        # speed optimization
        if switchinterval is not None:
            sys.setswitchinterval(switchinterval)
        self.queue_size = queue_size
        self.queue_interval = queue_interval

        # the graph
        self.graph = graph
        self.graph.do_automatic_splitting()
        self.graph.check()
        self.add_message_log()

        # i/o queues
        self.in_queues: Dict[str, Dict[str, DeltaQueue]] = {
            node.name: {}
            for node in self.graph.nodes
        }
        self.out_queues: Dict[str, Dict[str, DeltaQueue]] = {
            node.name: {}
            for node in self.graph.nodes
        }
        for node in self.graph.nodes:
            self._create_io_queues(node)

        # Signal to stop child threads
        self.sig_stop = threading.Event()

        for node in self.graph.nodes:
            node.set_communications(self)

        # child threads for node's workers
        self.threads: Dict[str, threading.Thread] = {}
        self.running = False
from ._node_classes.latency import Latency
from ._node_classes.node_bodies import Body
from ._node_classes.real_nodes import (PythonNode,
                                       as_node,
                                       get_func_inputs_outputs,
                                       inputs_as_delta_types,
                                       outputs_as_delta_types)
from ._body_templates import (BodyTemplate,
                              FuncBodyTemplate,
                              InteractiveBodyTemplate,
                              MethodBodyTemplate)

if TYPE_CHECKING:
    from ._delta_graph import DeltaGraph

log = make_logger(logging.WARNING, "Node Templates")


class NodeTemplate():
    """NodeTemplates are recipes for how to create a specific node.
    They have a list of :py:class:`BodyTemplate`s that represent how to
    construct a number of different bodies for the node. Each
    ``BodyTemplate`` has a call method to allow creation of the
    node. This node will be given bodies from all valid ``BodyTemplates``.

    Parameters
    ----------
    name : str
        Default name for the nodes created using this node template
    inputs : Union[List[Tuple[str, Type]],
                      OrderedDict[str, Type]],
from deltalanguage.logging import make_logger
from .._decorators import get_func_inputs_outputs
from .._node_templates import NodeTemplate

from .abstract_node import ForkedNode, ProxyNode
from .node_bodies import PyInteractiveBody
from .real_nodes import as_node, PythonNode

if TYPE_CHECKING:
    from .._node_templates import InteractiveBodyTemplate
    from .._delta_graph import DeltaGraph
    from .abstract_node import AbstractNode
    from .port_classes import InPort

log = make_logger(logging.WARNING, "PROXY NODES")


class PlaceholderNode(ProxyNode):
    """Node class to represent nodes that have yet to be specified.

    These nodes have to be specified in the context of :py:class:`DeltaGraph`,
    their main use case is creation of cyclic dependencies in the graph,
    i.e. loops.

    Use :py:class:`placeholder_node_factory` for construction in the
    context of :py:class:`DeltaGraph`.

    Parameters
    ----------
    graph : DeltaGraph
Example #10
0
 def __init__(self, name=None, lvl: int = logging.ERROR):
     self.name = name
     self.placeholders: Dict[str, PlaceholderNode] = {}
     self.nodes: List[RealNode] = []
     self.log = make_logger(lvl, "DeltaGraph")
Example #11
0
"""Classes to represent different node bodies a Deltaflow node could represent.
"""
from abc import ABC, abstractmethod
from typing import Callable, List

import dill
import dis
import logging

from deltalanguage.logging import make_logger
from deltalanguage.data_types import DeltaIOError
from .latency import Latency


log = make_logger(logging.WARNING, "Bodies")


class Body(ABC):

    def __init__(self,
                 latency: Latency = Latency(time=300),
                 tags: List[str] = None):
        """
        Parameters
        ----------
        latency : Latency
            Estimated time this body takes to run
        tags : List[str]
            List of strings to be added as access tags.
        """
        tags = tags if tags is not None else []
Example #12
0
"""

from abc import ABC, abstractmethod
import logging
import struct
from typing import (Any, Dict, Iterable, List, NamedTuple, Tuple, Type, Union)

import attr
import numpy as np

from deltalanguage.logging import make_logger

from ._exceptions import DeltaTypeError
from ._special import DSize, Void

logger = make_logger(logging.WARNING, "Data Types")


class BaseDeltaType(ABC):
    """The base Deltaflow type, of which other types are derived.

    Attributes
    ----------
    size : DSize
        Full size of the data type. There are several cases:

        * for primitive types it's the number of bits
        * for compound types it's the sum of all componenets' sizes
        * for :py:class:`DUnion` that is the the size of the largest element
          PLUS 1 byte that stores which particular data type is encoded
    """