Esempio n. 1
0
def _list_field(func):
  """A decorator for methods corresponding to list-valued fields of an `ExtensibleAlgebraic`.

  The result is also wrapped in `abstractproperty`.
  """
  wrapped = abstractproperty(func)
  wrapped._field_type = 'list'
  return wrapped
Esempio n. 2
0
File: abc.py Progetto: Answeror/aip
 def __new__(meta, name, bases, attr):
     if "FIELDS" in attr:
         fields = attr["FIELDS"]
         for field in fields:
             if type(field) is tuple:
                 key = field[0]
             else:
                 key = field
             attr[key] = abc.abstractproperty(lambda self: None)
     return abc.ABCMeta.__new__(meta, name, bases, attr)
    def __new__(mcs, class_name, class_bases, class_dict):
        # inherit class docstring: the docstring is constructed by traversing
        # the mro for the class and merging their docstrings, with each next
        # docstring as serving as the 'parent', and the accumulated docstring
        # serving as the 'child'
        this_doc = class_dict.get("__doc__", None)
        for mro_cls in (mro_cls for base in class_bases for mro_cls in base.mro()):
            prnt_cls_doc = mro_cls.__doc__
            if prnt_cls_doc is not None:
                if prnt_cls_doc == "The most base type":
                    prnt_cls_doc = None
            this_doc = mcs.class_doc_inherit(prnt_cls_doc, this_doc)

        class_dict["__doc__"] = this_doc

        # inherit docstring for method, static-method, class-method, abstract-method, decorated-method, and property
        for attr, attribute in class_dict.items():
            is_doc_type = isinstance(attribute, (FunctionType, MethodType, classmethod, staticmethod, property))

            if (attr.startswith("__") and attr.endswith("__")) or not is_doc_type:
                continue

            is_static_or_class = isinstance(attribute, (staticmethod, classmethod))
            child_attr = attribute if not is_static_or_class else attribute.__func__

            prnt_attr_doc = None
            for mro_cls in (mro_cls for base in class_bases
                            for mro_cls in base.mro() if hasattr(mro_cls, attr)):
                prnt_attr_doc = getattr(mro_cls, attr).__doc__

                if prnt_attr_doc is not None:
                    break

            if prnt_attr_doc is None:
                continue

            doc = mcs.attr_doc_inherit(prnt_attr_doc, child_attr.__doc__)
            try:
                child_attr.__doc__ = doc
            # property.__doc__ is read-only in Python 2 (TypeError), 3.3 - 3.4 (AttributeError)
            except (TypeError, AttributeError) as err:
                if type(child_attr) in (property, abstractproperty):
                    new_prop = property(fget=child_attr.fget,
                                        fset=child_attr.fset,
                                        fdel=child_attr.fdel,
                                        doc=doc)
                    if isinstance(child_attr, abstractproperty):
                        new_prop = abstractproperty(new_prop)
                    class_dict[attr] = new_prop
                else:
                    raise type(err)(err)

        return type.__new__(mcs, class_name, class_bases, class_dict)
Esempio n. 4
0
    def __new__(mcs, class_name, class_bases, class_dict):

        # inherit class docstring
        clsdoc = class_dict.get("__doc__", None)
        prnt_cls_doc = None
        for mro_cls in (mro_cls for base in class_bases for mro_cls in base.mro()):
            prnt_cls_doc = mro_cls.__doc__
            if prnt_cls_doc is not None:
                if prnt_cls_doc == "The most base type":
                    prnt_cls_doc = None
                break
        class_dict["__doc__"] = mcs.class_doc_inherit(prnt_cls_doc, clsdoc)

        # inherit docstring for method, staticmethod, classmethod, abstractmethod, decorated method, and property
        for attr, attribute in class_dict.items():
            is_doc_type = isinstance(attribute, (FunctionType, classmethod, staticmethod, property))

            if (attr.startswith("__") and attr.endswith("__")) or not is_doc_type:
                continue

            is_static_or_class = isinstance(attribute, (staticmethod, classmethod))
            child_attr = attribute if not is_static_or_class else attribute.__func__

            prnt_attr_doc = None
            for mro_cls in (mro_cls for base in class_bases
                            for mro_cls in base.mro() if hasattr(mro_cls, attr)):
                prnt_attr_doc = getattr(mro_cls, attr).__doc__

                if prnt_attr_doc is not None:
                    break

            if prnt_attr_doc is None:
                continue

            try:
                child_attr.__doc__ = mcs.attr_doc_inherit(prnt_attr_doc, child_attr.__doc__)
            except TypeError as err:
                if isinstance(child_attr, property):  # property.__doc__ is read-only in Python 2
                    new_prop = property(fget=child_attr.fget,
                                        fset=child_attr.fset,
                                        fdel=child_attr.fdel,
                                        doc=mcs.attr_doc_inherit(prnt_attr_doc, child_attr.__doc__))
                    if isinstance(child_attr, abstractproperty):
                        new_prop = abstractproperty(new_prop)
                    class_dict[attr] = new_prop
                else:
                    raise TypeError(err)

        return type.__new__(mcs, class_name, class_bases, class_dict)
Esempio n. 5
0
class BuildTest(object):
    excluded_images = abc.abstractproperty()

    def setUp(self):
        super(BuildTest, self).setUp()
        self.useFixture(log_fixture.SetLogLevel([__name__],
                                                logging.logging.INFO))

        self.threads = multiprocessing.cpu_count()
        if self.threads < 4:
            self.threads = 4

        self.build_args = [__name__, "--debug", '--threads', str(self.threads)]

    @testtools.skipUnless(os.environ.get('DOCKER_BUILD_TEST'),
                          'Skip the docker build test')
    def runTest(self):
        with patch.object(sys, 'argv', self.build_args):
            LOG.info("Running with args %s", self.build_args)
            (bad_results, good_results, unmatched_results,
             skipped_results) = build.run_build()

        failures = 0
        for image, result in bad_results.items():
            if image in self.excluded_images:
                if result is 'error':
                    continue
                failures = failures + 1
                LOG.warning(">>> Expected image '%s' to fail, please update"
                            " the excluded_images in source file above if the"
                            " image build has been fixed.", image)
            else:
                if result is not 'error':
                    continue
                failures = failures + 1
                LOG.critical(">>> Expected image '%s' to succeed!", image)

        for image in unmatched_results.keys():
            LOG.warning(">>> Image '%s' was not matched", image)

        self.assertEqual(failures, 0, "%d failure(s) occurred" % failures)
Esempio n. 6
0
class SecondaryPeril(metaclass=abc.ABCMeta):
    """
    Abstract base class. Subclasses of SecondaryPeril have:

    1. a ``__init__`` method with global parameters coming from the job.ini
    2. a ``prepare(sitecol)`` method modifying on the site collection, called
    in the ``pre_execute`` phase, i.e. before running the calculation
    3. a ``compute(mag, imt, gmf, sites)`` method called during the calculation
    of the GMFs; gmf is an array of length N1 and sites is a (filtered)
    site collection of length N1 (normally N1 < N, the total number of sites)
    4. an ``outputs`` attribute which is a list of column names which will be
    added to the gmf_data array generated by the ground motion calculator

    The ``compute`` method will return a tuple with ``O`` arrays where ``O``
    is the number of outputs.
    """
    @classmethod
    def instantiate(cls, secondary_perils, sec_peril_params):
        inst = []
        for clsname in secondary_perils:
            c = globals()[clsname]
            lst = []
            for param in inspect.signature(c).parameters:
                if param in sec_peril_params:
                    lst.append(sec_peril_params[param])
            inst.append(c(*lst))
        return inst

    outputs = abc.abstractproperty()

    @abc.abstractmethod
    def prepare(self, sites):
        """Add attributes to sites"""

    @abc.abstractmethod
    def compute(self, mag, imt, gmf, sites):
        # gmv is an array with (N, M) elements
        return gmf[:, 0] * .1,  # fake formula

    def __repr__(self):
        return '<%s %s>' % self.__class__.__name__
Esempio n. 7
0
class Request(object):
    __metaclass__ = ABCMeta

    method = abstractproperty()

    @abstractmethod
    def __init__(self, *args):
        self.id = '{:x}'.format(getrandbits(32))
        self.args = list(args)
        self.deferred = defer.Deferred()

    @property
    def __data__(self):
        return dict(jsonrpc='2.0',
                    id=self.id,
                    method=self.method,
                    params=self.args)

    @abstractmethod
    def process_response(self, response):
        raise NotImplementedError
Esempio n. 8
0
def test_bbox_target():
    pos_bboxes = torch.tensor([[0.072, 0.47, 0.84, 0.898],
                               [0.23, 0.215, 0.781, 0.534],
                               [0.195, 0.128, 0.643, 0.944],
                               [0.236, 0.189, 0.689, 0.74]])
    neg_bboxes = torch.tensor([[0.375, 0.371, 0.726, 0.804],
                               [0.024, 0.398, 0.776, 0.719]])
    pos_gt_labels = torch.tensor([[0., 0., 1., 0.], [0., 0., 0., 1.],
                                  [0., 1., 0., 0.], [0., 1., 0., 0.]])
    cfg = abstractproperty()
    cfg.pos_weight = 0.8
    labels, label_weights = bbox_target([pos_bboxes], [neg_bboxes],
                                        [pos_gt_labels], cfg)
    assert torch.all(
        torch.isclose(
            labels,
            torch.tensor([[0., 0., 1., 0.], [0., 0., 0., 1.], [0., 1., 0., 0.],
                          [0., 1., 0., 0.], [0., 0., 0., 0.], [0., 0., 0.,
                                                               0.]])))
    assert torch.all(
        torch.isclose(label_weights, torch.tensor([0.8] * 4 + [1.0] * 2)))
Esempio n. 9
0
class OpportunisticTestCase(DbTestCase):
    """Base test case to use default CI databases.

    The subclasses of the test case are running only when openstack_citest
    database is available otherwise a tests will be skipped.
    """

    FIXTURE = abc.abstractproperty(lambda: None)

    def setUp(self):
        credentials = {
            'backend': self.FIXTURE.DRIVER,
            'user': self.FIXTURE.USERNAME,
            'passwd': self.FIXTURE.PASSWORD,
            'database': self.FIXTURE.DBNAME}

        if self.FIXTURE.DRIVER and not utils.is_backend_avail(**credentials):
            msg = '%s backend is not available.' % self.FIXTURE.DRIVER
            return self.skip(msg)

        super(OpportunisticTestCase, self).setUp()
Esempio n. 10
0
class Device(metaclass=abc.ABCMeta):
    type: DeviceType = abc.abstractproperty()

    def __init__(self, device_id: Optional[str] = None):
        self.device_id = device_id

    def info(self, index: int) -> Dict[str, Any]:
        device_id = self.device_id or f'{self.type.value}{index}'
        return {
            'id': device_id,
            'type': self.type.value,
        }

    def _tick(self, **payload: Any) -> Tick:
        return Tick(Action(self, payload))

    def pause(self, duration: int) -> Tick:
        return self._tick(
            type='pause',
            duration=duration,
        )
Esempio n. 11
0
class _UpdateContactListMembership(object, metaclass=ABCMeta):

    url_path_list_action = abstractproperty()

    def __init__(self, contact_list, contacts_to_update, updated_contacts):
        super(_UpdateContactListMembership, self).__init__()
        self._contact_list = contact_list

        self._contacts_by_page = \
            paginate(contacts_to_update, BATCH_SAVING_SIZE_LIMIT)
        self._updated_contacts = updated_contacts

    def __call__(self):
        api_calls = []

        for contacts_page in self._contacts_by_page:
            request_body_deserialization = \
                {'vids': self._get_contact_vids(contacts_page)}

            updated_contacts_in_page = \
                [c for c in self._updated_contacts if c in contacts_page]
            response_body_deserialization = \
                {'updated': self._get_contact_vids(updated_contacts_in_page)}
            path_info = '/lists/{}/{}'.format(
                self._contact_list.id,
                self.url_path_list_action,
                )
            api_call = SuccessfulAPICall(
                CONTACTS_API_SCRIPT_NAME + path_info,
                'POST',
                request_body_deserialization=request_body_deserialization,
                response_body_deserialization=response_body_deserialization,
                )
            api_calls.append(api_call)

        return api_calls

    @staticmethod
    def _get_contact_vids(contacts):
        return [c.vid for c in contacts]
Esempio n. 12
0
class FiniteCycicGroup(Group):
    # Order of subgroup
    N = abstractproperty()

    def craft(self, o):
        value = getattr(o, 'value', o)
        return value % self.N

    @classmethod
    def identity(cls):
        return cls(0)

    def inverse(self):
        return self.__class__(invmod(self.value, self.N))

    def op(self, g):
        if isinstance(g, int):
            g = self.type(g)
        return self.__class__((self.value + g.value) % self.N)

    def __pow__(self, times):
        return self.__class__(pow(self.value, times, self.N))
def _ensure_everything_is_abstract(attributes):
    # all methods and properties are abstract on a pure interface
    namespace = {}
    functions = []
    interface_method_signatures = {}
    interface_property_names = set()
    for name, value in six.iteritems(attributes):
        if _builtin_attrs(name):
            pass  # shortcut
        elif getattr(value, '__isabstractmethod__', False):
            if isinstance(value, (staticmethod, classmethod, types.FunctionType)):
                if isinstance(value, (staticmethod, classmethod)):
                    func = value.__func__
                else:
                    func = value
                functions.append(func)
                interface_method_signatures[name] = _get_function_signature(func)
            elif isinstance(value, property):
                interface_property_names.add(name)
        elif isinstance(value, staticmethod):
            func = value.__func__
            functions.append(func)
            interface_method_signatures[name] = _get_function_signature(func)
            value = abstractstaticmethod(func)
        elif isinstance(value, classmethod):
            func = value.__func__
            interface_method_signatures[name] = _get_function_signature(func)
            functions.append(func)
            value = abstractclassmethod(func)
        elif isinstance(value, types.FunctionType):
            functions.append(value)
            interface_method_signatures[name] = _get_function_signature(value)
            value = abstractmethod(value)
        elif isinstance(value, property):
            interface_property_names.add(name)
            functions.extend([value.fget, value.fset, value.fdel])  # may contain Nones
            value = abstractproperty(value.fget, value.fset, value.fdel)
        namespace[name] = value
    return namespace, functions, interface_method_signatures, interface_property_names
Esempio n. 14
0
class FSMinSize(task.BasicApplication, fstask.FSTask):

    """ An abstract class that represents min size information extraction. """

    description = "minimum filesystem size"

    options = abc.abstractproperty(doc="Options for use with app.")

    def _resize_command(self):
        return [str(self.ext)] + self.options + [self.fs.device]

    def _get_resize_info(self):
        """ Get info from fsresize program.

            :rtype: str
            :returns: output returned by fsresize program
        """
        error_msg = None
        try:
            (rc, out) = util.run_program_and_capture_output(self._resize_command())
            if rc:
                error_msg = "failed to gather info from resize program: %d" % rc
        except OSError as e:
            error_msg = "failed to gather info from resize program: %s" % e

        if error_msg:
            raise FSError(error_msg)
        return out

    @abc.abstractmethod
    def do_task(self):
        """ Returns the minimum size for this filesystem object.

            :rtype: :class:`~.size.Size`
            :returns: the minimum size
            :raises FSError: if filesystem can not be obtained
        """
        raise NotImplementedError()
Esempio n. 15
0
class LinemodeBase(object):
    """Supplies the file line contents for BrowserColumn.

    Attributes:
        name (required!) - Name by which the linemode is referred to by the user

        uses_metadata - True if metadata should to be loaded for this linemode

        required_metadata -
            If any of these metadata fields are absent, fall back to
            the default linemode
    """

    __metaclass__ = ABCMeta

    uses_metadata = False
    required_metadata = []

    name = abstractproperty()

    @abstractmethod
    def filetitle(self, fobj, metadata):
        """The left-aligned part of the line."""
        raise NotImplementedError

    def infostring(self, fobj, metadata):
        """The right-aligned part of the line.

        If `NotImplementedError' is raised (e.g. this method is just
        not implemented in the actual linemode), the caller should
        provide its own implementation (which in this case means
        displaying the hardlink count of the directories, size of the
        files and additionally a symlink marker for symlinks). Useful
        because only the caller (BrowserColumn) possesses the data
        necessary to display that information.

        """
        raise NotImplementedError
Esempio n. 16
0
class MultipleUsersScenarios:
    """A mixin that uses testscenarios to repeat a testcase as different
    users.

    The scenarios should inject a `userfactory` variable that will
    be called to produce the user used in the tests e.g.:

    class ExampleTest(MultipleUsersScenarios, MAASServerTestCase):
        scenarios = [
            ('anon', dict(userfactory=lambda: AnonymousUser())),
            ('user', dict(userfactory=factory.make_user)),
            ('admin', dict(userfactory=factory.make_admin)),
            ]

        def test_something(self):
            pass

    The test `test_something` with be run 3 times: one with a anonymous user
    logged in, once with a simple (non-admin) user logged in and once with
    an admin user logged in.
    """

    __metaclass__ = ABCMeta

    scenarios = abstractproperty(
        "The scenarios as defined by testscenarios.")

    def setUp(self):
        super(MultipleUsersScenarios, self).setUp()
        user = self.userfactory()
        if not user.is_anonymous():
            password = factory.getRandomString()
            user.set_password(password)
            user.save()
            self.logged_in_user = user
            self.client.login(
                username=self.logged_in_user.username, password=password)
Esempio n. 17
0
class LazySherdogObject(object):
    __metaclass__ = abc.ABCMeta
    _lazy = True
    _url_path = abc.abstractproperty()

    def __init__(self, id_or_url, **kwargs):
        for key, value in kwargs.iteritems():
            setattr(self, key, value)

        if isinstance(id_or_url, basestring):
            self.id = int(id_or_url[id_or_url.rfind('-') + 1:])
        else:
            self.id = int(id_or_url)

        self.url = self._url_path % self.id

    def __getattr__(self, key):
        if not self._lazy:
            raise AttributeError(key)

        self._lazy = False
        self._load_properties()
        return getattr(self, key)

    def __getitem__(self, key):
        return getattr(self, key)

    def __eq__(self, other):
        assert isinstance(other, LazySherdogObject)
        return self.id == other.id

    def __hash__(self):
        return hash(self.id)

    @abc.abstractmethod
    def _load_properties(self):
        pass
Esempio n. 18
0
class FSInfo(task.BasicApplication, fstask.FSTask):
    """ An abstract class that represents an information gathering app. """

    description = "filesystem info"

    options = abc.abstractproperty(doc="Options for invoking the application.")

    @property
    def _info_command(self):
        """ Returns the command for reading filesystem information.

            :returns: a list of appropriate options
            :rtype: list of str
        """
        return [str(self.ext)] + self.options + [self.fs.device]

    def do_task(self):
        """ Returns information from the command.

            :returns: a string representing the output of the command
            :rtype: str
            :raises FSError: if info cannot be obtained
        """
        error_msgs = self.availability_errors
        if error_msgs:
            raise FSError("\n".join(error_msgs))

        error_msg = None
        try:
            (rc, out) = util.run_program_and_capture_output(self._info_command)
            if rc:
                error_msg = "failed to gather fs info: %s" % rc
        except OSError as e:
            error_msg = "failed to gather fs info: %s" % e
        if error_msg:
            raise FSError(error_msg)
        return out
Esempio n. 19
0
class FSResize(task.BasicApplication, FSResizeTask):
    """ An abstract class for resizing a filesystem. """

    description = "resize filesystem"

    args = abc.abstractproperty(doc="Resize arguments.")

    # IMPLEMENTATION methods

    @abc.abstractmethod
    def size_spec(self):
        """ Returns a string specification for the target size of the command.
            :returns: size specification
            :rtype: str
        """
        raise NotImplementedError()

    def _resize_command(self):
        return [str(self.ext)] + self.args

    def do_task(self):
        """ Resize the device.

            :raises FSError: on failure
        """
        error_msgs = self.availability_errors
        if error_msgs:
            raise FSError("\n".join(error_msgs))

        try:
            ret = util.run_program(self._resize_command())
        except OSError as e:
            raise FSError(e)

        if ret:
            raise FSError("resize failed: %s" % ret)
    class Options(OptionsBase, metaclass=ABCMeta):
        type = abstractproperty()

        def generate_valid_fields(self, is_training=True):
            for field in super().generate_valid_fields(is_training):
                if isinstance(self.type, str) and \
                        field.name.endswith("_options") and field.name != self.type + "_options":
                    continue
                yield field

        def __getattribute__(self, key):
            # ignore getattribute from self
            current_frame = inspect.currentframe()
            if current_frame.f_back.f_locals.get("self") is self:
                return super().__getattribute__(key)

            if isinstance(self.type, str) \
                    and key.endswith("_options") and key != self.type + "_options":
                raise KeyError(f'try to use {key} when type is "{self.type}"')
            return super().__getattribute__(key)

        def __repr__(self):
            return f"{self.__class__.__name__}(type={self.type}," \
                f'{self.type}_options={getattr(self, self.type + "_options")})'
Esempio n. 21
0
class HasRevisions(ABCMixin):
    """Mixin for tables that should be versioned in the transaction log."""
    @property
    def versioned_relationships(self):
        """May be overriden by subclasses. This should be the list of
        relationship attribute names that should trigger an update revision
        when changed. (We want to version changes to some, but not all,
        relationship attributes.)"""
        return []

    @property
    def should_suppress_transaction_creation(self):
        """May be overridden by subclasses. We don't want to version certain
        specific objects -- for example, Block instances that are just raw
        message parts and not real attachments. Use this property to suppress
        revisions of such objects. (The need for this is really an artifact of
        current deficiencies in our models. We should be able to get rid of it
        eventually.)"""
        return False

    # Must be defined by subclasses
    API_OBJECT_NAME = abc.abstractproperty()

    def has_versioned_changes(self):
        """Return True if the object has changes on column properties, or on
        any relationship attributes named in self.versioned_relationships."""
        obj_state = inspect(self)
        versioned_attribute_names = list(self.versioned_relationships)
        for mapper in obj_state.mapper.iterate_to_root():
            for attr in mapper.column_attrs:
                versioned_attribute_names.append(attr.key)

        for attr_name in versioned_attribute_names:
            if getattr(obj_state.attrs, attr_name).history.has_changes():
                return True
        return False
Esempio n. 22
0
class SourceCustom():
    """ Настройки для работы источника """

    __metaclass__ = ABCMeta

    @abstractmethod
    def setCustom(self, custom):
        """
        Установить данные для работы с источником

        :param custom:  данные для работы с источником
        """
        pass

    @abstractmethod
    def getCustom(self):
        """
        Получить данные для работы с источником

        :return:  данные для работы с источником
        """
        return None

    custom = abstractproperty(getCustom, setCustom)
Esempio n. 23
0
class OpportunisticTestCase(DbTestCase):
    """Base test case to use default CI databases.

    The subclasses of the test case are running only when openstack_citest
    database is available otherwise a tests will be skipped.
    """

    FIXTURE = abc.abstractproperty(lambda: None)

    def setUp(self):
        # TODO(bnemec): Remove this once infra is ready for
        # https://review.openstack.org/#/c/74963/ to merge.
        self.useFixture(lockutils.LockFixture('opportunistic-db'))
        credentials = {
            'backend': self.FIXTURE.DRIVER,
            'user': self.FIXTURE.USERNAME,
            'passwd': self.FIXTURE.PASSWORD,
            'database': self.FIXTURE.DBNAME}

        if self.FIXTURE.DRIVER and not utils.is_backend_avail(**credentials):
            msg = '%s backend is not available.' % self.FIXTURE.DRIVER
            return self.skip(msg)

        super(OpportunisticTestCase, self).setUp()
Esempio n. 24
0
class AbstractAnimal(metaclass=ABCMeta):

    # abstract method
    @abstractmethod
    def make_noise(self):
        pass

    # an abstract read-only-property
    @abstractproperty
    def species(self):
        pass

    # abstract read/write property
    def getname(self):
        pass

    def setname(self, value):
        pass

    name = abstractproperty(getname, setname)

    # non-abstract method
    def is_alive(self):
        return True
Esempio n. 25
0
class DHCPServer(metaclass=ABCMeta):
    """Represents the settings for a DHCP server.

    :cvar descriptive_name: A name to use for this server in human-readable
        texts.
    :cvar template_basename: The base filename for the template to use when
        generating configuration for this server.
    :cvar interfaces_filename: The full path and filename for the server's
        interfaces file.
    :cvar config_filename: The full path and filename for the server's
        configuration file.
    :ivar omapi_key: The OMAPI secret key for the server.
    """

    descriptive_name = abstractproperty()
    template_basename = abstractproperty()
    interfaces_filename = abstractproperty()
    config_filename = abstractproperty()
    dhcp_service = abstractproperty()
    ipv6 = abstractproperty()

    def __init__(self, omapi_key):
        super(DHCPServer, self).__init__()
        self.omapi_key = omapi_key
Esempio n. 26
0
class LabelingAsRoot(loopbackedtestcase.LoopBackedTestCase):
    """Tests various aspects of labeling a filesystem where there
       is no easy way to read the filesystem's label once it has been
       set and where the filesystem can not be relabeled.
    """

    _fs_class = abc.abstractproperty(
        doc="The class of the filesystem being tested on.")

    _invalid_label = abc.abstractproperty(
        doc="A label which is invalid for this filesystem.")

    def __init__(self, methodName='run_test'):
        super(LabelingAsRoot, self).__init__(methodName=methodName,
                                             device_spec=[Size("100 MiB")])

    def setUp(self):
        an_fs = self._fs_class()
        if not an_fs.formattable:
            self.skipTest("can not create filesystem %s" % an_fs.name)
        if not an_fs.labeling():
            self.skipTest("can not label filesystem %s" % an_fs.name)
        super(LabelingAsRoot, self).setUp()

    def test_labeling(self):
        """A sequence of tests of filesystem labeling.

           * create the filesystem when passing an invalid label
           * raise an exception when reading the filesystem
           * raise an exception when relabeling the filesystem
        """
        an_fs = self._fs_class(device=self.loop_devices[0],
                               label=self._invalid_label)
        if an_fs._readlabel.availability_errors or not an_fs.relabels():
            self.skipTest("can not read or write label for filesystem %s" %
                          an_fs.name)
        self.assertIsNone(an_fs.create())

        with self.assertRaises(FSReadLabelError):
            an_fs.read_label()

        an_fs.label = "an fs"
        with self.assertRaises(FSError):
            an_fs.write_label()

    def test_creating(self):
        """Create the filesystem when passing a valid label """
        an_fs = self._fs_class(device=self.loop_devices[0], label="start")
        self.assertIsNone(an_fs.create())

    def test_creating_none(self):
        """Create the filesystem when passing None
           (indicates filesystem default)
        """
        an_fs = self._fs_class(device=self.loop_devices[0], label=None)
        self.assertIsNone(an_fs.create())

    def test_creating_empty(self):
        """Create the filesystem when passing the empty label."""
        an_fs = self._fs_class(device=self.loop_devices[0], label="")
        self.assertIsNone(an_fs.create())
Esempio n. 27
0
class LatticeECP5Platform(TemplatedPlatform):
    """
    Trellis toolchain
    -----------------

    Required tools:
        * ``yosys``
        * ``nextpnr-ecp5``
        * ``ecppack``

    The environment is populated by running the script specified in the environment variable
    ``NMIGEN_ENV_Trellis``, if present.

    Available overrides:
        * ``verbose``: enables logging of informational messages to standard error.
        * ``read_verilog_opts``: adds options for ``read_verilog`` Yosys command.
        * ``synth_opts``: adds options for ``synth_ecp5`` Yosys command.
        * ``script_after_read``: inserts commands after ``read_ilang`` in Yosys script.
        * ``script_after_synth``: inserts commands after ``synth_ecp5`` in Yosys script.
        * ``yosys_opts``: adds extra options for ``yosys``.
        * ``nextpnr_opts``: adds extra options for ``nextpnr-ecp5``.
        * ``ecppack_opts``: adds extra options for ``ecppack``.
        * ``add_preferences``: inserts commands at the end of the LPF file.

    Build products:
        * ``{{name}}.rpt``: Yosys log.
        * ``{{name}}.json``: synthesized RTL.
        * ``{{name}}.tim``: nextpnr log.
        * ``{{name}}.config``: ASCII bitstream.
        * ``{{name}}.bit``: binary bitstream.
        * ``{{name}}.svf``: JTAG programming vector.

    Diamond toolchain
    -----------------

    Required tools:
        * ``pnmainc``
        * ``ddtcmd``

    The environment is populated by running the script specified in the environment variable
    ``NMIGEN_ENV_Diamond``, if present.

    Available overrides:
        * ``script_project``: inserts commands before ``prj_project save`` in Tcl script.
        * ``script_after_export``: inserts commands after ``prj_run Export`` in Tcl script.
        * ``add_preferences``: inserts commands at the end of the LPF file.
        * ``add_constraints``: inserts commands at the end of the XDC file.

    Build products:
        * ``{{name}}_impl/{{name}}_impl.htm``: consolidated log.
        * ``{{name}}.bit``: binary bitstream.
        * ``{{name}}.svf``: JTAG programming vector.
    """

    toolchain = None  # selected when creating platform

    device = abstractproperty()
    package = abstractproperty()
    speed = abstractproperty()
    grade = "C"  # [C]ommercial, [I]ndustrial

    # Trellis templates

    _nextpnr_device_options = {
        "LFE5U-12F": "--12k",
        "LFE5U-25F": "--25k",
        "LFE5U-45F": "--45k",
        "LFE5U-85F": "--85k",
        "LFE5UM-25F": "--um-25k",
        "LFE5UM-45F": "--um-45k",
        "LFE5UM-85F": "--um-85k",
        "LFE5UM5G-25F": "--um5g-25k",
        "LFE5UM5G-45F": "--um5g-45k",
        "LFE5UM5G-85F": "--um5g-85k",
    }
    _nextpnr_package_options = {
        "BG256": "caBGA256",
        "MG285": "csfBGA285",
        "BG381": "caBGA381",
        "BG554": "caBGA554",
        "BG756": "caBGA756",
    }

    _trellis_required_tools = ["yosys", "nextpnr-ecp5", "ecppack"]
    _trellis_file_templates = {
        **TemplatedPlatform.build_script_templates, "{{name}}.il":
        r"""
            # {{autogenerated}}
            {{emit_rtlil()}}
        """,
        "{{name}}.debug.v":
        r"""
            /* {{autogenerated}} */
            {{emit_debug_verilog()}}
        """,
        "{{name}}.ys":
        r"""
            # {{autogenerated}}
            {% for file in platform.iter_extra_files(".v") -%}
                read_verilog {{get_override("read_verilog_opts")|options}} {{file}}
            {% endfor %}
            {% for file in platform.iter_extra_files(".sv") -%}
                read_verilog -sv {{get_override("read_verilog_opts")|options}} {{file}}
            {% endfor %}
            {% for file in platform.iter_extra_files(".il") -%}
                read_ilang {{file}}
            {% endfor %}
            read_ilang {{name}}.il
            {{get_override("script_after_read")|default("# (script_after_read placeholder)")}}
            synth_ecp5 {{get_override("synth_opts")|options}} -top {{name}}
            {{get_override("script_after_synth")|default("# (script_after_synth placeholder)")}}
            write_json {{name}}.json
        """,
        "{{name}}.lpf":
        r"""
            # {{autogenerated}}
            BLOCK ASYNCPATHS;
            BLOCK RESETPATHS;
            {% for port_name, pin_name, attrs in platform.iter_port_constraints_bits() -%}
                LOCATE COMP "{{port_name}}" SITE "{{pin_name}}";
                {% if attrs -%}
                IOBUF PORT "{{port_name}}"
                    {%- for key, value in attrs.items() %} {{key}}={{value}}{% endfor %};
                {% endif %}
            {% endfor %}
            {% for net_signal, port_signal, frequency in platform.iter_clock_constraints() -%}
                {% if port_signal is not none -%}
                    FREQUENCY PORT "{{port_signal.name}}" {{frequency}} HZ;
                {% else -%}
                    FREQUENCY NET "{{net_signal|hierarchy(".")}}" {{frequency}} HZ;
                {% endif %}
            {% endfor %}
            {{get_override("add_preferences")|default("# (add_preferences placeholder)")}}
        """
    }
    _trellis_command_templates = [
        r"""
        {{invoke_tool("yosys")}}
            {{quiet("-q")}}
            {{get_override("yosys_opts")|options}}
            -l {{name}}.rpt
            {{name}}.ys
        """, r"""
        {{invoke_tool("nextpnr-ecp5")}}
            {{quiet("--quiet")}}
            {{get_override("nextpnr_opts")|options}}
            --log {{name}}.tim
            {{platform._nextpnr_device_options[platform.device]}}
            --package {{platform._nextpnr_package_options[platform.package]|upper}}
            --speed {{platform.speed}}
            --json {{name}}.json
            --lpf {{name}}.lpf
            --textcfg {{name}}.config
        """, r"""
        {{invoke_tool("ecppack")}}
            {{verbose("--verbose")}}
            {{get_override("ecppack_opts")|options}}
            --input {{name}}.config
            --bit {{name}}.bit
            --svf {{name}}.svf
        """
    ]

    # Diamond templates

    _diamond_required_tools = ["pnmainc", "ddtcmd"]
    _diamond_file_templates = {
        **TemplatedPlatform.build_script_templates,
        "build_{{name}}.sh":
        r"""
            # {{autogenerated}}
            set -e{{verbose("x")}}
            if [ -z "$BASH" ] ; then exec /bin/bash "$0" "$@"; fi
            if [ -n "${{platform._toolchain_env_var}}" ]; then
                bindir=$(dirname "${{platform._toolchain_env_var}}")
                . "${{platform._toolchain_env_var}}"
            fi
            {{emit_commands("sh")}}
        """,
        "{{name}}.v":
        r"""
            /* {{autogenerated}} */
            {{emit_verilog()}}
        """,
        "{{name}}.debug.v":
        r"""
            /* {{autogenerated}} */
            {{emit_debug_verilog()}}
        """,
        "{{name}}.tcl":
        r"""
            prj_project new -name {{name}} -impl impl -impl_dir top_impl \
                -dev {{platform.device}}-{{platform.speed}}{{platform.package}}{{platform.grade}} \
                -lpf {{name}}.lpf \
                -synthesis synplify
            {% for file in platform.iter_extra_files(".v", ".sv", ".vhd", ".vhdl") -%}
                prj_src add {{file|tcl_escape}}
            {% endfor %}
            prj_src add {{name}}.v
            prj_impl option top {{name}}
            prj_src add {{name}}.sdc
            {{get_override("script_project")|default("# (script_project placeholder)")}}
            prj_project save
            prj_run Synthesis -impl impl -forceAll
            prj_run Translate -impl impl -forceAll
            prj_run Map -impl impl -forceAll
            prj_run PAR -impl impl -forceAll
            prj_run Export -impl impl -forceAll -task Bitgen
            {{get_override("script_after_export")|default("# (script_after_export placeholder)")}}
        """,
        "{{name}}.lpf":
        r"""
            # {{autogenerated}}
            BLOCK ASYNCPATHS;
            BLOCK RESETPATHS;
            {% for port_name, pin_name, extras in platform.iter_port_constraints_bits() -%}
                LOCATE COMP "{{port_name}}" SITE "{{pin_name}}";
                IOBUF PORT "{{port_name}}"
                    {%- for key, value in extras.items() %} {{key}}={{value}}{% endfor %};
            {% endfor %}
            {{get_override("add_preferences")|default("# (add_preferences placeholder)")}}
        """,
        "{{name}}.sdc":
        r"""
            {% for net_signal, port_signal, frequency in platform.iter_clock_constraints() -%}
                {% if port_signal is not none -%}
                    create_clock -name {{port_signal.name|tcl_escape}} -period {{1000000000/frequency}} [get_ports {{port_signal.name|tcl_escape}}]
                {% else -%}
                    create_clock -name {{net_signal.name|tcl_escape}} -period {{1000000000/frequency}} [get_nets {{net_signal|hierarchy("/")|tcl_escape}}]
                {% endif %}
            {% endfor %}
            {{get_override("add_constraints")|default("# (add_constraints placeholder)")}}
        """,
    }
    _diamond_command_templates = [
        # These don't have any usable command-line option overrides.
        r"""
        {{invoke_tool("pnmainc")}}
            {{name}}.tcl
        """,
        r"""
        {{invoke_tool("ddtcmd")}}
            -oft -bit
            -if {{name}}_impl/{{name}}_impl.bit -of {{name}}.bit
        """,
        r"""
        {{invoke_tool("ddtcmd")}}
            -oft -svfsingle -revd -op "Fast Program"
            -if {{name}}_impl/{{name}}_impl.bit -of {{name}}.svf
        """,
    ]

    # Common logic

    def __init__(self, *, toolchain="Trellis"):
        super().__init__()

        assert toolchain in ("Trellis", "Diamond")
        self.toolchain = toolchain

    @property
    def required_tools(self):
        if self.toolchain == "Trellis":
            return self._trellis_required_tools
        if self.toolchain == "Diamond":
            return self._diamond_required_tools
        assert False

    @property
    def file_templates(self):
        if self.toolchain == "Trellis":
            return self._trellis_file_templates
        if self.toolchain == "Diamond":
            return self._diamond_file_templates
        assert False

    @property
    def command_templates(self):
        if self.toolchain == "Trellis":
            return self._trellis_command_templates
        if self.toolchain == "Diamond":
            return self._diamond_command_templates
        assert False

    @property
    def default_clk_constraint(self):
        if self.default_clk == "OSCG":
            return Clock(310e6 / self.oscg_div)
        return super().default_clk_constraint

    def create_missing_domain(self, name):
        # Lattice ECP5 devices have two global set/reset signals: PUR, which is driven at startup
        # by the configuration logic and unconditionally resets every storage element, and GSR,
        # which is driven by user logic and each storage element may be configured as affected or
        # unaffected by GSR. PUR is purely asynchronous, so even though it is a low-skew global
        # network, its deassertion may violate a setup/hold constraint with relation to a user
        # clock. To avoid this, a GSR/SGSR instance should be driven synchronized to user clock.
        if name == "sync" and self.default_clk is not None:
            m = Module()
            if self.default_clk == "OSCG":
                if not hasattr(self, "oscg_div"):
                    raise ValueError(
                        "OSCG divider (oscg_div) must be an integer between 2 "
                        "and 128")
                if not isinstance(
                        self.oscg_div,
                        int) or self.oscg_div < 2 or self.oscg_div > 128:
                    raise ValueError(
                        "OSCG divider (oscg_div) must be an integer between 2 "
                        "and 128, not {!r}".format(self.oscg_div))
                clk_i = Signal()
                m.submodules += Instance("OSCG",
                                         p_DIV=self.oscg_div,
                                         o_OSC=clk_i)
            else:
                clk_i = self.request(self.default_clk).i
            if self.default_rst is not None:
                rst_i = self.request(self.default_rst).i
            else:
                rst_i = Const(0)

            gsr0 = Signal()
            gsr1 = Signal()
            # There is no end-of-startup signal on ECP5, but PUR is released after IOB enable, so
            # a simple reset synchronizer (with PUR as the asynchronous reset) does the job.
            m.submodules += [
                Instance("FD1S3AX",
                         p_GSR="DISABLED",
                         i_CK=clk_i,
                         i_D=~rst_i,
                         o_Q=gsr0),
                Instance("FD1S3AX",
                         p_GSR="DISABLED",
                         i_CK=clk_i,
                         i_D=gsr0,
                         o_Q=gsr1),
                # Although we already synchronize the reset input to user clock, SGSR has dedicated
                # clock routing to the center of the FPGA; use that just in case it turns out to be
                # more reliable. (None of this is documented.)
                Instance("SGSR", i_CLK=clk_i, i_GSR=gsr1),
            ]
            # GSR implicitly connects to every appropriate storage element. As such, the sync
            # domain is reset-less; domains driven by other clocks would need to have dedicated
            # reset circuitry or otherwise meet setup/hold constraints on their own.
            m.domains += ClockDomain("sync", reset_less=True)
            m.d.comb += ClockSignal("sync").eq(clk_i)
            return m

    _single_ended_io_types = [
        "HSUL12",
        "LVCMOS12",
        "LVCMOS15",
        "LVCMOS18",
        "LVCMOS25",
        "LVCMOS33",
        "LVTTL33",
        "SSTL135_I",
        "SSTL135_II",
        "SSTL15_I",
        "SSTL15_II",
        "SSTL18_I",
        "SSTL18_II",
    ]
    _differential_io_types = [
        "BLVDS25",
        "BLVDS25E",
        "HSUL12D",
        "LVCMOS18D",
        "LVCMOS25D",
        "LVCMOS33D",
        "LVDS",
        "LVDS25E",
        "LVPECL33",
        "LVPECL33E",
        "LVTTL33D",
        "MLVDS",
        "MLVDS25E",
        "SLVS",
        "SSTL135D_II",
        "SSTL15D_II",
        "SSTL18D_II",
        "SUBLVDS",
    ]

    def should_skip_port_component(self, port, attrs, component):
        # On ECP5, a differential IO is placed by only instantiating an IO buffer primitive at
        # the PIOA or PIOC location, which is always the non-inverting pin.
        if attrs.get("IO_TYPE", "LVCMOS25"
                     ) in self._differential_io_types and component == "n":
            return True
        return False

    def _get_xdr_buffer(self, m, pin, *, i_invert=False, o_invert=False):
        def get_ireg(clk, d, q):
            for bit in range(len(q)):
                m.submodules += Instance("IFS1P3DX",
                                         i_SCLK=clk,
                                         i_SP=Const(1),
                                         i_CD=Const(0),
                                         i_D=d[bit],
                                         o_Q=q[bit])

        def get_oreg(clk, d, q):
            for bit in range(len(q)):
                m.submodules += Instance("OFS1P3DX",
                                         i_SCLK=clk,
                                         i_SP=Const(1),
                                         i_CD=Const(0),
                                         i_D=d[bit],
                                         o_Q=q[bit])

        def get_iddr(sclk, d, q0, q1):
            for bit in range(len(d)):
                m.submodules += Instance("IDDRX1F",
                                         i_SCLK=sclk,
                                         i_RST=Const(0),
                                         i_D=d[bit],
                                         o_Q0=q0[bit],
                                         o_Q1=q1[bit])

        def get_iddrx2(sclk, eclk, d, q0, q1, q2, q3):
            for bit in range(len(d)):
                m.submodules += Instance("IDDRX2F",
                                         i_SCLK=sclk,
                                         i_ECLK=eclk,
                                         i_RST=Const(0),
                                         i_D=d[bit],
                                         o_Q0=q0[bit],
                                         o_Q1=q1[bit],
                                         o_Q2=q2[bit],
                                         o_Q3=q3[bit])

        def get_iddr71b(sclk, eclk, d, q0, q1, q2, q3, q4, q5, q6):
            for bit in range(len(d)):
                m.submodules += Instance(
                    "IDDR71B",
                    i_SCLK=sclk,
                    i_ECLK=eclk,
                    i_RST=Const(0),
                    i_D=d[bit],
                    o_Q0=q0[bit],
                    o_Q1=q1[bit],
                    o_Q2=q2[bit],
                    o_Q3=q3[bit],
                    o_Q4=q4[bit],
                    o_Q5=q5[bit],
                    o_Q6=q6[bit],
                )

        def get_oddr(sclk, d0, d1, q):
            for bit in range(len(q)):
                m.submodules += Instance("ODDRX1F",
                                         i_SCLK=sclk,
                                         i_RST=Const(0),
                                         i_D0=d0[bit],
                                         i_D1=d1[bit],
                                         o_Q=q[bit])

        def get_oddrx2(sclk, eclk, d0, d1, d2, d3, q):
            for bit in range(len(q)):
                m.submodules += Instance("ODDRX2F",
                                         i_SCLK=sclk,
                                         i_ECLK=eclk,
                                         i_RST=Const(0),
                                         i_D0=d0[bit],
                                         i_D1=d1[bit],
                                         i_D2=d2[bit],
                                         i_D3=d3[bit],
                                         o_Q=q[bit])

        def get_oddr71b(sclk, eclk, d0, d1, d2, d3, d4, d5, d6, q):
            for bit in range(len(q)):
                m.submodules += Instance("ODDR71B",
                                         i_SCLK=sclk,
                                         i_ECLK=eclk,
                                         i_RST=Const(0),
                                         i_D0=d0[bit],
                                         i_D1=d1[bit],
                                         i_D2=d2[bit],
                                         i_D3=d3[bit],
                                         i_D4=d4[bit],
                                         i_D5=d5[bit],
                                         i_D6=d6[bit],
                                         o_Q=q[bit])

        def get_ineg(z, invert):
            if invert:
                a = Signal.like(z, name_suffix="_n")
                m.d.comb += z.eq(~a)
                return a
            else:
                return z

        def get_oneg(a, invert):
            if invert:
                z = Signal.like(a, name_suffix="_n")
                m.d.comb += z.eq(~a)
                return z
            else:
                return a

        if "i" in pin.dir:
            if pin.xdr < 2:
                pin_i = get_ineg(pin.i, i_invert)
            elif pin.xdr == 2:
                pin_i0 = get_ineg(pin.i0, i_invert)
                pin_i1 = get_ineg(pin.i1, i_invert)
            elif pin.xdr == 4:
                pin_i0 = get_ineg(pin.i0, i_invert)
                pin_i1 = get_ineg(pin.i1, i_invert)
                pin_i2 = get_ineg(pin.i2, i_invert)
                pin_i3 = get_ineg(pin.i3, i_invert)
            elif pin.xdr == 7:
                pin_i0 = get_ineg(pin.i0, i_invert)
                pin_i1 = get_ineg(pin.i1, i_invert)
                pin_i2 = get_ineg(pin.i2, i_invert)
                pin_i3 = get_ineg(pin.i3, i_invert)
                pin_i4 = get_ineg(pin.i4, i_invert)
                pin_i5 = get_ineg(pin.i5, i_invert)
                pin_i6 = get_ineg(pin.i6, i_invert)
        if "o" in pin.dir:
            if pin.xdr < 2:
                pin_o = get_oneg(pin.o, o_invert)
            elif pin.xdr == 2:
                pin_o0 = get_oneg(pin.o0, o_invert)
                pin_o1 = get_oneg(pin.o1, o_invert)
            elif pin.xdr == 4:
                pin_o0 = get_oneg(pin.o0, o_invert)
                pin_o1 = get_oneg(pin.o1, o_invert)
                pin_o2 = get_oneg(pin.o2, o_invert)
                pin_o3 = get_oneg(pin.o3, o_invert)
            elif pin.xdr == 7:
                pin_o0 = get_oneg(pin.o0, o_invert)
                pin_o1 = get_oneg(pin.o1, o_invert)
                pin_o2 = get_oneg(pin.o2, o_invert)
                pin_o3 = get_oneg(pin.o3, o_invert)
                pin_o4 = get_oneg(pin.o4, o_invert)
                pin_o5 = get_oneg(pin.o5, o_invert)
                pin_o6 = get_oneg(pin.o6, o_invert)

        i = o = t = None
        if "i" in pin.dir:
            i = Signal(pin.width, name="{}_xdr_i".format(pin.name))
        if "o" in pin.dir:
            o = Signal(pin.width, name="{}_xdr_o".format(pin.name))
        if pin.dir in ("oe", "io"):
            t = Signal(1, name="{}_xdr_t".format(pin.name))

        if pin.xdr == 0:
            if "i" in pin.dir:
                i = pin_i
            if "o" in pin.dir:
                o = pin_o
            if pin.dir in ("oe", "io"):
                t = ~pin.oe
        elif pin.xdr == 1:
            # Note that currently nextpnr will not pack an FF (*FS1P3DX) into the PIO.
            if "i" in pin.dir:
                get_ireg(pin.i_clk, i, pin_i)
            if "o" in pin.dir:
                get_oreg(pin.o_clk, pin_o, o)
            if pin.dir in ("oe", "io"):
                get_oreg(pin.o_clk, ~pin.oe, t)
        elif pin.xdr == 2:
            if "i" in pin.dir:
                get_iddr(pin.i_clk, i, pin_i0, pin_i1)
            if "o" in pin.dir:
                get_oddr(pin.o_clk, pin_o0, pin_o1, o)
            if pin.dir in ("oe", "io"):
                # It looks like Diamond will not pack an OREG as a tristate register in a DDR PIO.
                # It is not clear what is the recommended set of primitives for this task.
                # Similarly, nextpnr will not pack anything as a tristate register in a DDR PIO.
                get_oreg(pin.o_clk, ~pin.oe, t)
        elif pin.xdr == 4:
            if "i" in pin.dir:
                get_iddrx2(pin.i_clk, pin.i_fclk, i, pin_i0, pin_i1, pin_i2,
                           pin_i3)
            if "o" in pin.dir:
                get_oddrx2(pin.o_clk, pin.o_fclk, pin_o0, pin_o1, pin_o2,
                           pin_o3, o)
            if pin.dir in ("oe", "io"):
                get_oreg(pin.o_clk, ~pin.oe, t)
        elif pin.xdr == 7:
            if "i" in pin.dir:
                get_iddr71b(pin.i_clk, pin.i_fclk, i, pin_i0, pin_i1, pin_i2,
                            pin_i3, pin_i4, pin_i5, pin_i6)
            if "o" in pin.dir:
                get_oddr71b(pin.o_clk, pin.o_fclk, pin_o0, pin_o1, pin_o2,
                            pin_o3, pin_o4, pin_o5, pin_o6, o)
            if pin.dir in ("oe", "io"):
                get_oreg(pin.o_clk, ~pin.oe, t)
        else:
            assert False

        return (i, o, t)

    def get_input(self, pin, port, attrs, invert):
        self._check_feature("single-ended input",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2, 4, 7),
                            valid_attrs=True)
        m = Module()
        i, o, t = self._get_xdr_buffer(m, pin, i_invert=invert)
        for bit in range(len(port)):
            m.submodules["{}_{}".format(pin.name,
                                        bit)] = Instance("IB",
                                                         i_I=port[bit],
                                                         o_O=i[bit])
        return m

    def get_output(self, pin, port, attrs, invert):
        self._check_feature("single-ended output",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2, 4, 7),
                            valid_attrs=True)
        m = Module()
        i, o, t = self._get_xdr_buffer(m, pin, o_invert=invert)
        for bit in range(len(port)):
            m.submodules["{}_{}".format(pin.name,
                                        bit)] = Instance("OB",
                                                         i_I=o[bit],
                                                         o_O=port[bit])
        return m

    def get_tristate(self, pin, port, attrs, invert):
        self._check_feature("single-ended tristate",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2, 4, 7),
                            valid_attrs=True)
        m = Module()
        i, o, t = self._get_xdr_buffer(m, pin, o_invert=invert)
        for bit in range(len(port)):
            m.submodules["{}_{}".format(pin.name,
                                        bit)] = Instance("OBZ",
                                                         i_T=t,
                                                         i_I=o[bit],
                                                         o_O=port[bit])
        return m

    def get_input_output(self, pin, port, attrs, invert):
        self._check_feature("single-ended input/output",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2, 4, 7),
                            valid_attrs=True)
        m = Module()
        i, o, t = self._get_xdr_buffer(m,
                                       pin,
                                       i_invert=invert,
                                       o_invert=invert)
        for bit in range(len(port)):
            m.submodules["{}_{}".format(pin.name,
                                        bit)] = Instance("BB",
                                                         i_T=t,
                                                         i_I=o[bit],
                                                         o_O=i[bit],
                                                         io_B=port[bit])
        return m

    def get_diff_input(self, pin, p_port, n_port, attrs, invert):
        self._check_feature("differential input",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2, 4, 7),
                            valid_attrs=True)
        m = Module()
        i, o, t = self._get_xdr_buffer(m, pin, i_invert=invert)
        for bit in range(len(p_port)):
            m.submodules["{}_{}".format(pin.name,
                                        bit)] = Instance("IB",
                                                         i_I=p_port[bit],
                                                         o_O=i[bit])
        return m

    def get_diff_output(self, pin, p_port, n_port, attrs, invert):
        self._check_feature("differential output",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2, 4, 7),
                            valid_attrs=True)
        m = Module()
        i, o, t = self._get_xdr_buffer(m, pin, o_invert=invert)
        for bit in range(len(p_port)):
            m.submodules["{}_{}".format(pin.name, bit)] = Instance(
                "OB",
                i_I=o[bit],
                o_O=p_port[bit],
            )
        return m

    def get_diff_tristate(self, pin, p_port, n_port, attrs, invert):
        self._check_feature("differential tristate",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2, 4, 7),
                            valid_attrs=True)
        m = Module()
        i, o, t = self._get_xdr_buffer(m, pin, o_invert=invert)
        for bit in range(len(p_port)):
            m.submodules["{}_{}".format(pin.name, bit)] = Instance(
                "OBZ",
                i_T=t,
                i_I=o[bit],
                o_O=p_port[bit],
            )
        return m

    def get_diff_input_output(self, pin, p_port, n_port, attrs, invert):
        self._check_feature("differential input/output",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2, 4, 7),
                            valid_attrs=True)
        m = Module()
        i, o, t = self._get_xdr_buffer(m,
                                       pin,
                                       i_invert=invert,
                                       o_invert=invert)
        for bit in range(len(p_port)):
            m.submodules["{}_{}".format(pin.name, bit)] = Instance(
                "BB",
                i_T=t,
                i_I=o[bit],
                o_O=i[bit],
                io_B=p_port[bit],
            )
        return m
Esempio n. 28
0
class OdooProtocol():
    __metaclass__ = ABCMeta

    def set_link_type(self, value):
        """
        Initialize the endpoint type to connect with Odoo instance
        :param value: string
        :return: None
        """
        self.link_type = value

    def get_link_type(self):
        """
        Return the endopint type to connect with Odoo instance.
        :return: string
        """
        return self.link_type

    link_type = abstractproperty(get_link_type, set_link_type)

    def set_username(self, value):
        """
        Initialize the username to connect with Odoo instance.
        :param value: string
        :return: None
        """
        self.username = value

    def get_username(self):
        """
        return the username used to connect with Odoo instance
        :return: string
        """
        return self.username

    username = abstractproperty(get_username, set_username)

    def set_password(self, value):
        """
        Initialize the password to connect with Odoo instance
        :param value: string
        :return: None
        """
        self.password = value

    def get_password(self):
        """
        return the password to connect with Odoo instance.
        :return: string
        """
        return self.password

    password = abstractproperty(get_password, set_password)

    def set_endpoint(self, value):
        """
        Initialize the endpoint  to connect with Odoo instance.
        :param value: string
        :return: None
        """
        self.endpoint = value

    def get_endpoint(self):
        """
        Return the endpoint to connect with Odoo Instance
        :return: string
        """
        return self.endpoint

    endpoint = abstractproperty(get_endpoint, set_endpoint)

    def set_dbname(self, value):
        """
        Set the database name on the Odoo Connection
        :param value: string
        :return: None
        """
        self.dbname = value

    def get_dbname(self):
        """
        Return the database name on the Odoo Connection
        :return: string
        """
        return self.dbname

    dbname = abstractproperty(get_dbname, set_dbname)

    @abstractmethod
    def link(self, link_type='common'):
        """
        Build the connection link with Odoo instance
        :param link_type: string (endpoint type to connect by default is 'common')
        :return: object (connection link with Odoo instance)
        """
        raise NotImplementedError()

    @abstractmethod
    def connect(self, dbname=None, username=None, password=None):
        """
        Connect with Odoo instance
        :param dbname: string (database name to connect with the Odoo instance)
        :param username: string (username to connect with the Odoo instance)
        :param password: string (password to connect with the Odoo instance)
        :return: int (UID from the user connected.)
        """
        raise NotImplementedError()

    @abstractmethod
    def write(self, uid, model, action, data, password=None):
        """
        Universal method to create, update, signup, unlink methods from Odoo connection
        :param uid: int (UID from the Odoo user connected)
        :param model: string (the name from the model to call on the method)
        :param action: string (the action required to call on the Odoo Web API method.)
        :param data: list (by default is None, they are the data to send to the Web API methods.)
        :param password: string (by default is None, the password to execute the Odoo Web API method.)
        :return: dictionaries list (result from the method called from the Odoo Web API.)
        """
        raise NotImplementedError()

    @abstractmethod
    def search(self, uid, model, action, password=None, queries=None, parameters=None, formatted=False):
        """
        Universal method to Execute or call the methods availables on the Odoo Web API.
        :param uid: int (UID from the Odoo user connected)
        :param model: string (the name from the model to call on the method)
        :param action: string (the action required to call on the Odoo Web API method.)
        :param password: string (by default is None, the password to execute the Odoo Web API method.)
        :param queries: list (by default is None, they are the queries to filter on the Odoo Web API method.)
        :param parameters: list (by default is None, the list from parameters passed by position)
        :param formatted: boolean (by default is False, change to True if you want format the result)
        :return: dictionaries list (result from the method called from the Odoo Web API.)
        """
        raise NotImplementedError()
Esempio n. 29
0
class FSSize(fstask.FSTask):
    """ An abstract class that represents size information extraction. """
    description = "current filesystem size"

    tags = abc.abstractproperty(
        doc="Strings used for extracting components of size.")

    # TASK methods

    @property
    def _availability_errors(self):
        return []

    @property
    def depends_on(self):
        return [self.fs._info]

    # IMPLEMENTATION methods

    def do_task(self):
        """ Returns the size of the filesystem.

            :returns: the size of the filesystem
            :rtype: :class:`~.size.Size`
            :raises FSError: on failure
        """
        error_msgs = self.availability_errors
        if error_msgs:
            raise FSError("\n".join(error_msgs))

        if self.fs._current_info is None:
            raise FSError("No info available for size computation.")

        # Setup initial values
        values = {}
        for k in _tags:
            values[k] = None

        # Attempt to set values from info
        for line in (l.strip() for l in self.fs._current_info.splitlines()):
            key = next(
                (k for k in _tags if line.startswith(getattr(self.tags, k))),
                None)
            if not key:
                continue

            if values[key] is not None:
                raise FSError("found two matches for key %s" % key)

            # Look for last numeric value in matching line
            fields = line.split()
            fields.reverse()
            for field in fields:
                try:
                    values[key] = int(field)
                    break
                except ValueError:
                    continue

        # Raise an error if a value is missing
        missing = next((k for k in _tags if values[k] is None), None)
        if missing is not None:
            raise FSError("Failed to parse info for %s." % missing)

        return values["count"] * Size(values["size"])
Esempio n. 30
0
class NodeAction(metaclass=ABCMeta):
    """Base class for node actions."""

    name = abstractproperty("""
        Action name.

        Will be used as the name for the action in all the forms.
        """)

    display = abstractproperty("""
        Action name.

        Will be used as the label for the action's button.
        """)

    for_type = abstractproperty("""
        Can only be performed when the node type is in the for_type set.

        A list of NODE_TYPEs which are applicable for this action.
        """)

    action_type = abstractproperty("""
        The type of action being performed.

        Used to divide action menu into relevant groups.
        """)

    # Optional node states for which this action makes sense.
    # A collection of NODE_STATUS values.  The action will be available
    # only if `node.status in action.actionable_statuses`.
    actionable_statuses = None

    permission = abstractproperty("""
        Required permission.

        A `NodePermission` value.  The action will be available only if the
        user has this given permission on the subject node.
        """)

    # Optional machine permission that will be used when the action
    # is being applied to a node_type which is a machine.
    machine_permission = None

    # Optional controller permission that will be used when the action
    # is being applied to a node_type which is a controller.
    controller_permission = None

    # Whether the action is allowed when the node is locked
    allowed_when_locked = False

    def __init__(self, node, user, request=None, endpoint=ENDPOINT.UI):
        """Initialize a node action.

        All node actions' initializers must accept these same arguments,
        without variations.
        """
        self.node = node
        self.user = user
        self.request = request
        self.endpoint = endpoint

    def is_actionable(self):
        """Can this action be performed?

        If the node is not node_type node then actionable_statuses will not
        be used, as the status doesn't matter for a non-node type.
        """
        if self.node.node_type not in self.for_type:
            return False
        elif self.node.locked and not self.allowed_when_locked:
            return False
        elif (self.node.node_type == NODE_TYPE.MACHINE
              and self.node.status not in self.actionable_statuses):
            return False
        return self.is_permitted()

    @abstractmethod
    def get_node_action_audit_description(self, action):
        """Retrieve the node action audit description."""

    def execute(self, *args, **kwargs):
        """Perform this action.

        Even though this is not the API, the action may raise
        :class:`MAASAPIException` exceptions.  When this happens, the view
        will return to the client an http response reflecting the exception.
        """
        self._execute(*args, **kwargs)
        description = self.get_node_action_audit_description(self)
        # Log audit event for the action.
        create_audit_event(EVENT_TYPES.NODE,
                           self.endpoint,
                           self.request,
                           self.node.system_id,
                           description=description)

    @abstractmethod
    def _execute(self):
        """Perform this action."""

    def get_permission(self):
        """Return the permission value depending on if the node_type."""
        if self.node.is_machine and self.machine_permission is not None:
            return self.machine_permission
        if self.node.is_controller and self.controller_permission is not None:
            return self.controller_permission
        return self.permission

    def is_permitted(self):
        """Does the current user have the permission required?"""
        return self.user.has_perm(self.get_permission(), self.node)
Esempio n. 31
0
class BaseMFD(metaclass=abc.ABCMeta):
    """
    Abstract base class for Magnitude-Frequency Distribution function.
    """

    #: The set of modification type names that are supported by an MFD.
    #: Each modification should have a corresponding method named
    #: ``modify_modificationname()`` where the actual modification
    #: logic resides.
    MODIFICATIONS = abc.abstractproperty()

    def modify(self, modification, parameters):
        """
        Apply a single modification to an MFD parameters.

        Reflects the modification method and calls it passing ``parameters``
        as keyword arguments. See also :attr:`MODIFICATIONS`.

        Modifications can be applied one on top of another. The logic
        of stacking modifications is up to a specific MFD implementation.

        :param modification:
            String name representing the type of modification.
        :param parameters:
            Dictionary of parameters needed for modification.
        :raises ValueError:
            If ``modification`` is missing from :attr:`MODIFICATIONS`.
        """
        if modification not in self.MODIFICATIONS:
            raise ValueError('Modification %s is not supported by %s' %
                             (modification, type(self).__name__))
        meth = getattr(self, 'modify_%s' % modification)
        meth(**parameters)
        self.check_constraints()

    @abc.abstractmethod
    def check_constraints(self):
        """
        Check MFD-specific constraints and raise :exc:`ValueError`
        in case of violation.

        This method must be implemented by subclasses.
        """

    @abc.abstractmethod
    def get_annual_occurrence_rates(self):
        """
        Return an MFD annual occurrence rates histogram.

        This method must be implemented by subclasses.

        :return:
            The list of tuples, each tuple containing a pair
            ``(magnitude, occurrence_rate)``. Each pair represents
            a single bin of the histogram with ``magnitude`` being
            the center of the bin. Magnitude values are monotonically
            increasing by value of bin width. ``occurence_rate``
            represents the number of events per year with magnitude
            that falls in between bin's boundaries.
        """

    @abc.abstractmethod
    def get_min_max_mag(self):
        """
        Return the minimum and maximum magnitudes this MFD is defined for.

        This method must be implemented by subclasses.

        :return:
            Magnitude value, float number.
        """

    def __repr__(self):
        """
        Returns the name of the magnitude frequency distribution class
        """
        return "<%s>" % self.__class__.__name__
Esempio n. 32
0
class QuicklogicPlatform(TemplatedPlatform):
    """
    Symbiflow toolchain
    -------------------

    Required tools:
        * ``symbiflow_synth``
        * ``symbiflow_pack``
        * ``symbiflow_place``
        * ``symbiflow_route``
        * ``symbiflow_write_fasm``
        * ``symbiflow_write_bitstream``

    The environment is populated by running the script specified in the environment variable
    ``NMIGEN_ENV_QLSymbiflow``, if present.

    Available overrides:
        * ``add_constraints``: inserts commands in XDC file.
    """

    device = abstractproperty()
    package = abstractproperty()

    # Since the QuickLogic version of SymbiFlow toolchain is not upstreamed yet
    # we should distinguish the QuickLogic version from mainline one.
    # QuickLogic toolchain: https://github.com/QuickLogic-Corp/quicklogic-fpga-toolchain/releases
    toolchain = "QLSymbiflow"

    required_tools = [
        "symbiflow_synth", "symbiflow_pack", "symbiflow_place",
        "symbiflow_route", "symbiflow_write_fasm", "symbiflow_write_bitstream"
    ]
    file_templates = {
        **TemplatedPlatform.build_script_templates, "{{name}}.v":
        r"""
            /* {{autogenerated}} */
            {{emit_verilog()}}
        """,
        "{{name}}.debug.v":
        r"""
            /* {{autogenerated}} */
            {{emit_debug_verilog()}}
        """,
        "{{name}}.pcf":
        r"""
            # {{autogenerated}}
            {% for port_name, pin_name, attrs in platform.iter_port_constraints_bits() -%}
                set_io {{port_name}} {{pin_name}}
            {% endfor %}
        """,
        "{{name}}.xdc":
        r"""
            # {{autogenerated}}
            {% for port_name, pin_name, attrs in platform.iter_port_constraints_bits() -%}
                {% for attr_name, attr_value in attrs.items() -%}
                    set_property {{attr_name}} {{attr_value}} [get_ports {{port_name|tcl_escape}} }]
                {% endfor %}
            {% endfor %}
            {{get_override("add_constraints")|default("# (add_constraints placeholder)")}}
        """,
        "{{name}}.sdc":
        r"""
            # {{autogenerated}}
            {% for net_signal, port_signal, frequency in platform.iter_clock_constraints() -%}
                {% if port_signal is not none -%}
                    create_clock -period {{100000000/frequency}} {{port_signal.name|ascii_escape}}
                {% endif %}
            {% endfor %}
        """
    }
    command_templates = [
        r"""
        {{invoke_tool("symbiflow_synth")}}
            -t {{name}}
            -v {% for file in platform.iter_extra_files(".v", ".sv", ".vhd", ".vhdl") -%} {{file}} {% endfor %} {{name}}.v
            -d {{platform.device}}
            -p {{name}}.pcf
            -P {{platform.package}}
            -x {{name}}.xdc
        """, r"""
        {{invoke_tool("symbiflow_pack")}}
            -e {{name}}.eblif
            -d {{platform.device}}
            -s {{name}}.sdc
        """, r"""
        {{invoke_tool("symbiflow_place")}}
            -e {{name}}.eblif
            -d {{platform.device}}
            -p {{name}}.pcf
            -n {{name}}.net
            -P {{platform.package}}
            -s {{name}}.sdc
        """, r"""
        {{invoke_tool("symbiflow_route")}}
            -e {{name}}.eblif
            -d {{platform.device}}
            -s {{name}}.sdc
        """, r"""
        {{invoke_tool("symbiflow_write_fasm")}}
            -e {{name}}.eblif
            -d {{platform.device}}
            -s {{name}}.sdc
        """, r"""
        {{invoke_tool("symbiflow_write_bitstream")}}
            -f {{name}}.fasm
            -d {{platform.device}}
            -P {{platform.package}}
            -b {{name}}.bit
        """
    ]

    # Common logic

    def __init__(self):
        super().__init__()

    def add_clock_constraint(self, clock, frequency):
        super().add_clock_constraint(clock, frequency)
        clock.attrs["keep"] = "TRUE"

    def create_missing_domain(self, name):
        if name == "sync" and self.default_clk is not None:
            m = Module()
            if self.default_clk == "sys_clk0":
                clk_i = Signal()
                sys_clk0 = Signal()
                m.submodules += Instance("qlal4s3b_cell_macro",
                                         o_Sys_Clk0=sys_clk0)
                m.submodules += Instance("gclkbuff", o_A=sys_clk0, o_Z=clk_i)
            else:
                clk_i = self.request(self.default_clk).i

            if self.default_rst is not None:
                rst_i = self.request(self.default_rst).i
            else:
                rst_i = Const(0)

            m.domains += ClockDomain("sync")
            m.d.comb += ClockSignal("sync").eq(clk_i)
            m.submodules.reset_sync = ResetSynchronizer(rst_i, domain="sync")
            return m
Esempio n. 33
0
class LatticeICE40Platform(TemplatedPlatform):
    """
    IceStorm toolchain
    ------------------

    Required tools:
        * ``yosys``
        * ``nextpnr-ice40``
        * ``icepack``

    The environment is populated by running the script specified in the environment variable
    ``NMIGEN_ENV_IceStorm``, if present.

    Available overrides:
        * ``verbose``: enables logging of informational messages to standard error.
        * ``read_verilog_opts``: adds options for ``read_verilog`` Yosys command.
        * ``synth_opts``: adds options for ``synth_ice40`` Yosys command.
        * ``script_after_read``: inserts commands after ``read_ilang`` in Yosys script.
        * ``script_after_synth``: inserts commands after ``synth_ice40`` in Yosys script.
        * ``yosys_opts``: adds extra options for ``yosys``.
        * ``nextpnr_opts``: adds extra options for ``nextpnr-ice40``.
        * ``add_pre_pack``: inserts commands at the end in pre-pack Python script.
        * ``add_constraints``: inserts commands at the end in the PCF file.

    Build products:
        * ``{{name}}.rpt``: Yosys log.
        * ``{{name}}.json``: synthesized RTL.
        * ``{{name}}.tim``: nextpnr log.
        * ``{{name}}.asc``: ASCII bitstream.
        * ``{{name}}.bin``: binary bitstream.

    iCECube2 toolchain
    ------------------

    This toolchain comes in two variants: ``LSE-iCECube2`` and ``Synplify-iCECube2``.

    Required tools:
        * iCECube2 toolchain
        * ``tclsh``

    The environment is populated by setting the necessary environment variables based on
    ``NMIGEN_ENV_iCECube2``, which must point to the root of the iCECube2 installation, and
    is required.

    Available overrides:
        * ``verbose``: enables logging of informational messages to standard error.
        * ``lse_opts``: adds options for LSE.
        * ``script_after_add``: inserts commands after ``add_file`` in Synplify Tcl script.
        * ``script_after_options``: inserts commands after ``set_option`` in Synplify Tcl script.
        * ``add_constraints``: inserts commands in SDC file.
        * ``script_after_flow``: inserts commands after ``run_sbt_backend_auto`` in SBT
          Tcl script.

    Build products:
        * ``{{name}}_lse.log`` (LSE) or ``{{name}}_design/{{name}}.htm`` (Synplify): synthesis log.
        * ``sbt/outputs/router/{{name}}_timing.rpt``: timing report.
        * ``{{name}}.edf``: EDIF netlist.
        * ``{{name}}.bin``: binary bitstream.
    """

    toolchain = None  # selected when creating platform

    device = abstractproperty()
    package = abstractproperty()

    # IceStorm templates

    _nextpnr_device_options = {
        "iCE40LP384": "--lp384",
        "iCE40LP1K": "--lp1k",
        "iCE40LP4K": "--lp8k",
        "iCE40LP8K": "--lp8k",
        "iCE40HX1K": "--hx1k",
        "iCE40HX4K": "--hx8k",
        "iCE40HX8K": "--hx8k",
        "iCE40UP5K": "--up5k",
        "iCE40UP3K": "--up5k",
        "iCE5LP4K": "--u4k",
        "iCE5LP2K": "--u4k",
        "iCE5LP1K": "--u4k",
    }
    _nextpnr_package_options = {
        "iCE40LP4K": ":4k",
        "iCE40HX4K": ":4k",
        "iCE40UP3K": "",
        "iCE5LP2K": "",
        "iCE5LP1K": "",
    }

    _icestorm_required_tools = [
        "yosys",
        "nextpnr-ice40",
        "icepack",
    ]
    _icestorm_file_templates = {
        **TemplatedPlatform.build_script_templates,
        "{{name}}.il":
        r"""
            # {{autogenerated}}
            {{emit_rtlil()}}
        """,
        "{{name}}.debug.v":
        r"""
            /* {{autogenerated}} */
            {{emit_debug_verilog()}}
        """,
        "{{name}}.ys":
        r"""
            # {{autogenerated}}
            {% for file in platform.iter_extra_files(".v") -%}
                read_verilog {{get_override("read_verilog_opts")|options}} {{file}}
            {% endfor %}
            {% for file in platform.iter_extra_files(".sv") -%}
                read_verilog -sv {{get_override("read_verilog_opts")|options}} {{file}}
            {% endfor %}
            {% for file in platform.iter_extra_files(".il") -%}
                read_ilang {{file}}
            {% endfor %}
            read_ilang {{name}}.il
            {{get_override("script_after_read")|default("# (script_after_read placeholder)")}}
            synth_ice40 {{get_override("synth_opts")|options}} -top {{name}}
            {{get_override("script_after_synth")|default("# (script_after_synth placeholder)")}}
            write_json {{name}}.json
        """,
        "{{name}}.pcf":
        r"""
            # {{autogenerated}}
            {% for port_name, pin_name, attrs in platform.iter_port_constraints_bits() -%}
                set_io {{port_name}} {{pin_name}}
            {% endfor %}
            {% for net_signal, port_signal, frequency in platform.iter_clock_constraints() -%}
                set_frequency {{net_signal|hierarchy(".")}} {{frequency/1000000}}
            {% endfor%}
            {{get_override("add_constraints")|default("# (add_constraints placeholder)")}}
        """,
    }
    _icestorm_command_templates = [
        r"""
        {{invoke_tool("yosys")}}
            {{quiet("-q")}}
            {{get_override("yosys_opts")|options}}
            -l {{name}}.rpt
            {{name}}.ys
        """, r"""
        {{invoke_tool("nextpnr-ice40")}}
            {{quiet("--quiet")}}
            {{get_override("nextpnr_opts")|options}}
            --log {{name}}.tim
            {{platform._nextpnr_device_options[platform.device]}}
            --package
                {{platform.package|lower}}{{platform._nextpnr_package_options[platform.device]|
                                            default("")}}
            --json {{name}}.json
            --pcf {{name}}.pcf
            --asc {{name}}.asc
        """, r"""
        {{invoke_tool("icepack")}}
            {{verbose("-v")}}
            {{name}}.asc
            {{name}}.bin
        """
    ]

    # iCECube2 templates

    _icecube2_required_tools = [
        "yosys",
        "synthesis",
        "synpwrap",
        "tclsh",
    ]
    _icecube2_file_templates = {
        **TemplatedPlatform.build_script_templates,
        "build_{{name}}.sh":
        r"""
            # {{autogenerated}}
            set -e{{verbose("x")}}
            if [ -n "${{platform._toolchain_env_var}}" ]; then
                # LSE environment
                export LD_LIBRARY_PATH=${{platform._toolchain_env_var}}/LSE/bin/lin64:$LD_LIBRARY_PATH
                export PATH=${{platform._toolchain_env_var}}/LSE/bin/lin64:$PATH
                export FOUNDRY=${{platform._toolchain_env_var}}/LSE
                # Synplify environment
                export LD_LIBRARY_PATH=${{platform._toolchain_env_var}}/sbt_backend/bin/linux/opt/synpwrap:$LD_LIBRARY_PATH
                export PATH=${{platform._toolchain_env_var}}/sbt_backend/bin/linux/opt/synpwrap:$PATH
                export SYNPLIFY_PATH=${{platform._toolchain_env_var}}/synpbase
                # Common environment
                export SBT_DIR=${{platform._toolchain_env_var}}/sbt_backend
            else
                echo "Variable ${{platform._toolchain_env_var}} must be set" >&2; exit 1
            fi
            {{emit_commands("sh")}}
        """,
        "{{name}}.v":
        r"""
            /* {{autogenerated}} */
            {{emit_verilog()}}
        """,
        "{{name}}.debug.v":
        r"""
            /* {{autogenerated}} */
            {{emit_debug_verilog()}}
        """,
        "{{name}}_lse.prj":
        r"""
            # {{autogenerated}}
            -a SBT{{platform.family}}
            -d {{platform.device}}
            -t {{platform.package}}
            {{get_override("lse_opts")|options|default("# (lse_opts placeholder)")}}
            {% for file in platform.iter_extra_files(".v") -%}
                -ver {{file}}
            {% endfor %}
            -ver {{name}}.v
            -sdc {{name}}.sdc
            -top {{name}}
            -output_edif {{name}}.edf
            -logfile {{name}}_lse.log
        """,
        "{{name}}_syn.prj":
        r"""
            # {{autogenerated}}
            {% for file in platform.iter_extra_files(".v", ".sv", ".vhd", ".vhdl") -%}
                add_file -verilog {{file}}
            {% endfor %}
            add_file -verilog {{name}}.v
            add_file -constraint {{name}}.sdc
            {{get_override("script_after_add")|default("# (script_after_add placeholder)")}}
            impl -add {{name}}_design -type fpga
            set_option -technology SBT{{platform.family}}
            set_option -part {{platform.device}}
            set_option -package {{platform.package}}
            {{get_override("script_after_options")|default("# (script_after_options placeholder)")}}
            project -result_format edif
            project -result_file {{name}}.edf
            impl -active {{name}}_design
            project -run compile
            project -run map
            project -run fpga_mapper
            file copy -force -- {{name}}_design/{{name}}.edf {{name}}.edf
        """,
        "{{name}}.sdc":
        r"""
            # {{autogenerated}}
            {% for net_signal, port_signal, frequency in platform.iter_clock_constraints() -%}
                {% if port_signal is not none -%}
                    create_clock -name {{port_signal.name}} -period {{1000000000/frequency}} [get_ports {{port_signal.name}}]
                {% else -%}
                    create_clock -name {{net_signal.name}} -period {{1000000000/frequency}} [get_nets {{net_signal|hierarchy("/")}}]
                {% endif %}
            {% endfor %}
            {{get_override("add_constraints")|default("# (add_constraints placeholder)")}}
        """,
        "{{name}}.tcl":
        r"""
            # {{autogenerated}}
            set device {{platform.device}}-{{platform.package}}
            set top_module {{name}}
            set proj_dir .
            set output_dir .
            set edif_file {{name}}
            set tool_options ":edifparser -y {{name}}.pcf"
            set sbt_root $::env(SBT_DIR)
            append sbt_tcl $sbt_root "/tcl/sbt_backend_synpl.tcl"
            source $sbt_tcl
            run_sbt_backend_auto $device $top_module $proj_dir $output_dir $tool_options $edif_file
            {{get_override("script_after_file")|default("# (script_after_file placeholder)")}}
            file copy -force -- sbt/outputs/bitmap/{{name}}_bitmap.bin {{name}}.bin
            exit
        """,
        "{{name}}.pcf":
        r"""
            # {{autogenerated}}
            {% for port_name, pin_name, attrs in platform.iter_port_constraints_bits() -%}
                set_io {{port_name}} {{pin_name}}
            {% endfor %}
        """,
    }
    _lse_icecube2_command_templates = [
        r"""synthesis -f {{name}}_lse.prj""",
        r"""tclsh {{name}}.tcl""",
    ]
    _synplify_icecube2_command_templates = [
        r"""synpwrap -prj {{name}}_syn.prj -log {{name}}_syn.log""",
        r"""tclsh {{name}}.tcl""",
    ]

    # Common logic

    def __init__(self, *, toolchain="IceStorm"):
        super().__init__()

        assert toolchain in ("IceStorm", "LSE-iCECube2", "Synplify-iCECube2")
        self.toolchain = toolchain

    @property
    def family(self):
        if self.device.startswith("iCE40"):
            return "iCE40"
        if self.device.startswith("iCE5"):
            return "iCE5"
        assert False

    @property
    def _toolchain_env_var(self):
        if self.toolchain == "IceStorm":
            return f"NMIGEN_ENV_{self.toolchain}"
        if self.toolchain in ("LSE-iCECube2", "Synplify-iCECube2"):
            return f"NMIGEN_ENV_iCECube2"
        assert False

    @property
    def required_tools(self):
        if self.toolchain == "IceStorm":
            return self._icestorm_required_tools
        if self.toolchain in ("LSE-iCECube2", "Synplify-iCECube2"):
            return self._icecube2_required_tools
        assert False

    @property
    def file_templates(self):
        if self.toolchain == "IceStorm":
            return self._icestorm_file_templates
        if self.toolchain in ("LSE-iCECube2", "Synplify-iCECube2"):
            return self._icecube2_file_templates
        assert False

    @property
    def command_templates(self):
        if self.toolchain == "IceStorm":
            return self._icestorm_command_templates
        if self.toolchain == "LSE-iCECube2":
            return self._lse_icecube2_command_templates
        if self.toolchain == "Synplify-iCECube2":
            return self._synplify_icecube2_command_templates
        assert False

    @property
    def default_clk_constraint(self):
        # Internal high-speed oscillator: 48 MHz / (2 ^ div)
        if self.default_clk == "SB_HFOSC":
            return Clock(48e6 / 2**self.hfosc_div)
        # Internal low-speed oscillator: 10 KHz
        elif self.default_clk == "SB_LFOSC":
            return Clock(10e3)
        # Otherwise, use the defined Clock resource.
        return super().default_clk_constraint

    def create_missing_domain(self, name):
        # For unknown reasons (no errata was ever published, and no documentation mentions this
        # issue), iCE40 BRAMs read as zeroes for ~3 us after configuration and release of internal
        # global reset. Note that this is a *time-based* delay, generated purely by the internal
        # oscillator, which may not be observed nor influenced directly. For details, see links:
        #  * https://github.com/cliffordwolf/icestorm/issues/76#issuecomment-289270411
        #  * https://github.com/cliffordwolf/icotools/issues/2#issuecomment-299734673
        #
        # To handle this, it is necessary to have a global reset in any iCE40 design that may
        # potentially instantiate BRAMs, and assert this reset for >3 us after configuration.
        # (We add a margin of 5x to allow for PVT variation.) If the board includes a dedicated
        # reset line, this line is ORed with the power on reset.
        #
        # If an internal oscillator is selected as the default clock source, the power-on-reset
        # delay is increased to 100 us, since the oscillators are only stable after that long.
        #
        # The power-on reset timer counts up because the vendor tools do not support initialization
        # of flip-flops.
        if name == "sync" and self.default_clk is not None:
            m = Module()

            # Internal high-speed clock: 6 MHz, 12 MHz, 24 MHz, or 48 MHz depending on the divider.
            if self.default_clk == "SB_HFOSC":
                if not hasattr(self, "hfosc_div"):
                    raise ValueError(
                        "SB_HFOSC divider exponent (hfosc_div) must be an integer "
                        "between 0 and 3")
                if not isinstance(
                        self.hfosc_div,
                        int) or self.hfosc_div < 0 or self.hfosc_div > 3:
                    raise ValueError(
                        "SB_HFOSC divider exponent (hfosc_div) must be an integer "
                        "between 0 and 3, not {!r}".format(self.hfosc_div))
                clk_i = Signal()
                m.submodules += Instance("SB_HFOSC",
                                         i_CLKHFEN=1,
                                         i_CLKHFPU=1,
                                         p_CLKHF_DIV="0b{0:b}".format(
                                             self.hfosc_div),
                                         o_CLKHF=clk_i)
                delay = int(100e-6 * self.default_clk_frequency)
            # Internal low-speed clock: 10 KHz.
            elif self.default_clk == "SB_LFOSC":
                clk_i = Signal()
                m.submodules += Instance("SB_LFOSC",
                                         i_CLKLFEN=1,
                                         i_CLKLFPU=1,
                                         o_CLKLF=clk_i)
                delay = int(100e-6 * self.default_clk_frequency)
            # User-defined clock signal.
            else:
                clk_i = self.request(self.default_clk).i
                delay = int(15e-6 * self.default_clk_frequency)

            if self.default_rst is not None:
                rst_i = self.request(self.default_rst).i
            else:
                rst_i = Const(0)

            # Power-on-reset domain
            m.domains += ClockDomain("por", reset_less=True, local=True)
            timer = Signal(range(delay))
            ready = Signal()
            m.d.comb += ClockSignal("por").eq(clk_i)
            with m.If(timer == delay):
                m.d.por += ready.eq(1)
            with m.Else():
                m.d.por += timer.eq(timer + 1)

            # Primary domain
            m.domains += ClockDomain("sync")
            m.d.comb += ClockSignal("sync").eq(clk_i)
            if self.default_rst is not None:
                m.submodules.reset_sync = ResetSynchronizer(~ready | rst_i,
                                                            domain="sync")
            else:
                m.d.comb += ResetSignal("sync").eq(~ready)

            return m

    def should_skip_port_component(self, port, attrs, component):
        # On iCE40, a differential input is placed by only instantiating an SB_IO primitive for
        # the pin with z=0, which is the non-inverting pin. The pinout unfortunately differs
        # between LP/HX and UP series:
        #  * for LP/HX, z=0 is DPxxB   (B is non-inverting, A is inverting)
        #  * for UP,    z=0 is IOB_xxA (A is non-inverting, B is inverting)
        if attrs.get("IO_STANDARD",
                     "SB_LVCMOS") == "SB_LVDS_INPUT" and component == "n":
            return True
        return False

    def _get_io_buffer(self,
                       m,
                       pin,
                       port,
                       attrs,
                       *,
                       i_invert=False,
                       o_invert=False,
                       invert_lut=False):
        def get_dff(clk, d, q):
            m.submodules += Instance("$dff",
                                     p_CLK_POLARITY=1,
                                     p_WIDTH=len(d),
                                     i_CLK=clk,
                                     i_D=d,
                                     o_Q=q)

        def get_ineg(y, invert):
            if invert_lut:
                a = Signal.like(y,
                                name_suffix="_x{}".format(1 if invert else 0))
                for bit in range(len(y)):
                    m.submodules += Instance("SB_LUT4",
                                             p_LUT_INIT=Const(
                                                 0b01 if invert else 0b10, 16),
                                             i_I0=a[bit],
                                             i_I1=Const(0),
                                             i_I2=Const(0),
                                             i_I3=Const(0),
                                             o_O=y[bit])
                return a
            elif invert:
                a = Signal.like(y, name_suffix="_n")
                m.d.comb += y.eq(~a)
                return a
            else:
                return y

        def get_oneg(a, invert):
            if invert_lut:
                y = Signal.like(a,
                                name_suffix="_x{}".format(1 if invert else 0))
                for bit in range(len(a)):
                    m.submodules += Instance("SB_LUT4",
                                             p_LUT_INIT=Const(
                                                 0b01 if invert else 0b10, 16),
                                             i_I0=a[bit],
                                             i_I1=Const(0),
                                             i_I2=Const(0),
                                             i_I3=Const(0),
                                             o_O=y[bit])
                return y
            elif invert:
                y = Signal.like(a, name_suffix="_n")
                m.d.comb += y.eq(~a)
                return y
            else:
                return a

        if "GLOBAL" in attrs:
            is_global_input = bool(attrs["GLOBAL"])
            del attrs["GLOBAL"]
        else:
            is_global_input = False
        assert not (is_global_input and i_invert)

        if "i" in pin.dir:
            if pin.xdr < 2:
                pin_i = get_ineg(pin.i, i_invert)
            elif pin.xdr == 2:
                pin_i0 = get_ineg(pin.i0, i_invert)
                pin_i1 = get_ineg(pin.i1, i_invert)
        if "o" in pin.dir:
            if pin.xdr < 2:
                pin_o = get_oneg(pin.o, o_invert)
            elif pin.xdr == 2:
                pin_o0 = get_oneg(pin.o0, o_invert)
                pin_o1 = get_oneg(pin.o1, o_invert)

        if "i" in pin.dir and pin.xdr == 2:
            i0_ff = Signal.like(pin_i0, name_suffix="_ff")
            i1_ff = Signal.like(pin_i1, name_suffix="_ff")
            get_dff(pin.i_clk, i0_ff, pin_i0)
            get_dff(pin.i_clk, i1_ff, pin_i1)
        if "o" in pin.dir and pin.xdr == 2:
            o1_ff = Signal.like(pin_o1, name_suffix="_ff")
            get_dff(pin.o_clk, pin_o1, o1_ff)

        for bit in range(len(port)):
            io_args = [
                ("io", "PACKAGE_PIN", port[bit]),
                *(("p", key, value) for key, value in attrs.items()),
            ]

            if "i" not in pin.dir:
                # If no input pin is requested, it is important to use a non-registered input pin
                # type, because an output-only pin would not have an input clock, and if its input
                # is configured as registered, this would prevent a co-located input-capable pin
                # from using an input clock.
                i_type = 0b01  # PIN_INPUT
            elif pin.xdr == 0:
                i_type = 0b01  # PIN_INPUT
            elif pin.xdr > 0:
                i_type = 0b00  # PIN_INPUT_REGISTERED aka PIN_INPUT_DDR
            if "o" not in pin.dir:
                o_type = 0b0000  # PIN_NO_OUTPUT
            elif pin.xdr == 0 and pin.dir == "o":
                o_type = 0b0110  # PIN_OUTPUT
            elif pin.xdr == 0:
                o_type = 0b1010  # PIN_OUTPUT_TRISTATE
            elif pin.xdr == 1 and pin.dir == "o":
                o_type = 0b0101  # PIN_OUTPUT_REGISTERED
            elif pin.xdr == 1:
                o_type = 0b1101  # PIN_OUTPUT_REGISTERED_ENABLE_REGISTERED
            elif pin.xdr == 2 and pin.dir == "o":
                o_type = 0b0100  # PIN_OUTPUT_DDR
            elif pin.xdr == 2:
                o_type = 0b1100  # PIN_OUTPUT_DDR_ENABLE_REGISTERED
            io_args.append(("p", "PIN_TYPE", C((o_type << 2) | i_type, 6)))

            if hasattr(pin, "i_clk"):
                io_args.append(("i", "INPUT_CLK", pin.i_clk))
            if hasattr(pin, "o_clk"):
                io_args.append(("i", "OUTPUT_CLK", pin.o_clk))

            if "i" in pin.dir:
                if pin.xdr == 0 and is_global_input:
                    io_args.append(("o", "GLOBAL_BUFFER_OUTPUT", pin.i[bit]))
                elif pin.xdr < 2:
                    io_args.append(("o", "D_IN_0", pin_i[bit]))
                elif pin.xdr == 2:
                    # Re-register both inputs before they enter fabric. This increases hold time
                    # to an entire cycle, and adds one cycle of latency.
                    io_args.append(("o", "D_IN_0", i0_ff[bit]))
                    io_args.append(("o", "D_IN_1", i1_ff[bit]))
            if "o" in pin.dir:
                if pin.xdr < 2:
                    io_args.append(("i", "D_OUT_0", pin_o[bit]))
                elif pin.xdr == 2:
                    # Re-register negedge output after it leaves fabric. This increases setup time
                    # to an entire cycle, and doesn't add latency.
                    io_args.append(("i", "D_OUT_0", pin_o0[bit]))
                    io_args.append(("i", "D_OUT_1", o1_ff[bit]))

            if pin.dir in ("oe", "io"):
                io_args.append(("i", "OUTPUT_ENABLE", pin.oe))

            if is_global_input:
                m.submodules["{}_{}".format(pin.name, bit)] = Instance(
                    "SB_GB_IO", *io_args)
            else:
                m.submodules["{}_{}".format(pin.name, bit)] = Instance(
                    "SB_IO", *io_args)

    def get_input(self, pin, port, attrs, invert):
        self._check_feature("single-ended input",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2),
                            valid_attrs=True)
        m = Module()
        self._get_io_buffer(m, pin, port, attrs, i_invert=invert)
        return m

    def get_output(self, pin, port, attrs, invert):
        self._check_feature("single-ended output",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2),
                            valid_attrs=True)
        m = Module()
        self._get_io_buffer(m, pin, port, attrs, o_invert=invert)
        return m

    def get_tristate(self, pin, port, attrs, invert):
        self._check_feature("single-ended tristate",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2),
                            valid_attrs=True)
        m = Module()
        self._get_io_buffer(m, pin, port, attrs, o_invert=invert)
        return m

    def get_input_output(self, pin, port, attrs, invert):
        self._check_feature("single-ended input/output",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2),
                            valid_attrs=True)
        m = Module()
        self._get_io_buffer(m,
                            pin,
                            port,
                            attrs,
                            i_invert=invert,
                            o_invert=invert)
        return m

    def get_diff_input(self, pin, p_port, n_port, attrs, invert):
        self._check_feature("differential input",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2),
                            valid_attrs=True)
        m = Module()
        # See comment in should_skip_port_component above.
        self._get_io_buffer(m, pin, p_port, attrs, i_invert=invert)
        return m

    def get_diff_output(self, pin, p_port, n_port, attrs, invert):
        self._check_feature("differential output",
                            pin,
                            attrs,
                            valid_xdrs=(0, 1, 2),
                            valid_attrs=True)
        m = Module()
        # Note that the non-inverting output pin is not driven the same way as a regular
        # output pin. The inverter introduces a delay, so for a non-inverting output pin,
        # an identical delay is introduced by instantiating a LUT. This makes the waveform
        # perfectly symmetric in the xdr=0 case.
        self._get_io_buffer(m,
                            pin,
                            p_port,
                            attrs,
                            o_invert=invert,
                            invert_lut=True)
        self._get_io_buffer(m,
                            pin,
                            n_port,
                            attrs,
                            o_invert=not invert,
                            invert_lut=True)
        return m
Esempio n. 34
0
class HomeServer(object):
    """A basic homeserver object without lazy component builders.

    This will need all of the components it requires to either be passed as
    constructor arguments, or the relevant methods overriding to create them.
    Typically this would only be used for unit tests.

    For every dependency in the DEPENDENCIES list below, this class creates one
    method,
        def get_DEPENDENCY(self)
    which returns the value of that dependency. If no value has yet been set
    nor was provided to the constructor, it will attempt to call a lazy builder
    method called
        def build_DEPENDENCY(self)
    which must be implemented by the subclass. This code may call any of the
    required "get" methods on the instance to obtain the sub-dependencies that
    one requires.

    Attributes:
        config (synapse.config.homeserver.HomeserverConfig):
        _listening_services (list[twisted.internet.tcp.Port]): TCP ports that
            we are listening on to provide HTTP services.
    """

    __metaclass__ = abc.ABCMeta

    DEPENDENCIES = [
        "http_client",
        "db_pool",
        "federation_client",
        "federation_server",
        "handlers",
        "auth",
        "room_creation_handler",
        "state_handler",
        "state_resolution_handler",
        "presence_handler",
        "sync_handler",
        "typing_handler",
        "room_list_handler",
        "acme_handler",
        "auth_handler",
        "device_handler",
        "stats_handler",
        "e2e_keys_handler",
        "e2e_room_keys_handler",
        "event_handler",
        "event_stream_handler",
        "initial_sync_handler",
        "application_service_api",
        "application_service_scheduler",
        "application_service_handler",
        "device_message_handler",
        "profile_handler",
        "event_creation_handler",
        "deactivate_account_handler",
        "set_password_handler",
        "notifier",
        "event_sources",
        "keyring",
        "pusherpool",
        "event_builder_factory",
        "filtering",
        "http_client_context_factory",
        "simple_http_client",
        "proxied_http_client",
        "media_repository",
        "media_repository_resource",
        "federation_transport_client",
        "federation_sender",
        "receipts_handler",
        "macaroon_generator",
        "tcp_replication",
        "read_marker_handler",
        "action_generator",
        "user_directory_handler",
        "groups_local_handler",
        "groups_server_handler",
        "groups_attestation_signing",
        "groups_attestation_renewer",
        "secrets",
        "spam_checker",
        "third_party_event_rules",
        "room_member_handler",
        "federation_registry",
        "server_notices_manager",
        "server_notices_sender",
        "message_handler",
        "pagination_handler",
        "room_context_handler",
        "sendmail",
        "registration_handler",
        "account_validity_handler",
        "saml_handler",
        "event_client_serializer",
        "storage",
    ]

    REQUIRED_ON_MASTER_STARTUP = ["user_directory_handler", "stats_handler"]

    # This is overridden in derived application classes
    # (such as synapse.app.homeserver.SynapseHomeServer) and gives the class to be
    # instantiated during setup() for future return by get_datastore()
    DATASTORE_CLASS = abc.abstractproperty()

    def __init__(self, hostname, reactor=None, **kwargs):
        """
        Args:
            hostname : The hostname for the server.
        """
        if not reactor:
            from twisted.internet import reactor

        self._reactor = reactor
        self.hostname = hostname
        self._building = {}
        self._listening_services = []
        self.start_time = None

        self.clock = Clock(reactor)
        self.distributor = Distributor()
        self.ratelimiter = Ratelimiter()
        self.admin_redaction_ratelimiter = Ratelimiter()
        self.registration_ratelimiter = Ratelimiter()

        self.datastores = None

        # Other kwargs are explicit dependencies
        for depname in kwargs:
            setattr(self, depname, kwargs[depname])

    def setup(self):
        logger.info("Setting up.")
        with self.get_db_conn() as conn:
            self.datastores = DataStores(self.DATASTORE_CLASS, conn, self)
            conn.commit()
        self.start_time = int(self.get_clock().time())
        logger.info("Finished setting up.")

    def setup_master(self):
        """
        Some handlers have side effects on instantiation (like registering
        background updates). This function causes them to be fetched, and
        therefore instantiated, to run those side effects.
        """
        for i in self.REQUIRED_ON_MASTER_STARTUP:
            getattr(self, "get_" + i)()

    def get_reactor(self):
        """
        Fetch the Twisted reactor in use by this HomeServer.
        """
        return self._reactor

    def get_ip_from_request(self, request):
        # X-Forwarded-For is handled by our custom request type.
        return request.getClientIP()

    def is_mine(self, domain_specific_string):
        return domain_specific_string.domain == self.hostname

    def is_mine_id(self, string):
        return string.split(":", 1)[1] == self.hostname

    def get_clock(self):
        return self.clock

    def get_datastore(self):
        return self.datastores.main

    def get_config(self):
        return self.config

    def get_distributor(self):
        return self.distributor

    def get_ratelimiter(self):
        return self.ratelimiter

    def get_registration_ratelimiter(self):
        return self.registration_ratelimiter

    def get_admin_redaction_ratelimiter(self):
        return self.admin_redaction_ratelimiter

    def build_federation_client(self):
        return FederationClient(self)

    def build_federation_server(self):
        return FederationServer(self)

    def build_handlers(self):
        return Handlers(self)

    def build_notifier(self):
        return Notifier(self)

    def build_auth(self):
        return Auth(self)

    def build_http_client_context_factory(self):
        return (InsecureInterceptableContextFactory() if
                self.config.use_insecure_ssl_client_just_for_testing_do_not_use
                else BrowserLikePolicyForHTTPS())

    def build_simple_http_client(self):
        return SimpleHttpClient(self)

    def build_proxied_http_client(self):
        return SimpleHttpClient(
            self,
            http_proxy=os.getenvb(b"http_proxy"),
            https_proxy=os.getenvb(b"HTTPS_PROXY"),
        )

    def build_room_creation_handler(self):
        return RoomCreationHandler(self)

    def build_sendmail(self):
        return sendmail

    def build_state_handler(self):
        return StateHandler(self)

    def build_state_resolution_handler(self):
        return StateResolutionHandler(self)

    def build_presence_handler(self):
        return PresenceHandler(self)

    def build_typing_handler(self):
        return TypingHandler(self)

    def build_sync_handler(self):
        return SyncHandler(self)

    def build_room_list_handler(self):
        return RoomListHandler(self)

    def build_auth_handler(self):
        return AuthHandler(self)

    def build_macaroon_generator(self):
        return MacaroonGenerator(self)

    def build_device_handler(self):
        if self.config.worker_app:
            return DeviceWorkerHandler(self)
        else:
            return DeviceHandler(self)

    def build_device_message_handler(self):
        return DeviceMessageHandler(self)

    def build_e2e_keys_handler(self):
        return E2eKeysHandler(self)

    def build_e2e_room_keys_handler(self):
        return E2eRoomKeysHandler(self)

    def build_acme_handler(self):
        return AcmeHandler(self)

    def build_application_service_api(self):
        return ApplicationServiceApi(self)

    def build_application_service_scheduler(self):
        return ApplicationServiceScheduler(self)

    def build_application_service_handler(self):
        return ApplicationServicesHandler(self)

    def build_event_handler(self):
        return EventHandler(self)

    def build_event_stream_handler(self):
        return EventStreamHandler(self)

    def build_initial_sync_handler(self):
        return InitialSyncHandler(self)

    def build_profile_handler(self):
        if self.config.worker_app:
            return BaseProfileHandler(self)
        else:
            return MasterProfileHandler(self)

    def build_event_creation_handler(self):
        return EventCreationHandler(self)

    def build_deactivate_account_handler(self):
        return DeactivateAccountHandler(self)

    def build_set_password_handler(self):
        return SetPasswordHandler(self)

    def build_event_sources(self):
        return EventSources(self)

    def build_keyring(self):
        return Keyring(self)

    def build_event_builder_factory(self):
        return EventBuilderFactory(self)

    def build_filtering(self):
        return Filtering(self)

    def build_pusherpool(self):
        return PusherPool(self)

    def build_http_client(self):
        tls_client_options_factory = context_factory.ClientTLSOptionsFactory(
            self.config)
        return MatrixFederationHttpClient(self, tls_client_options_factory)

    def build_db_pool(self):
        name = self.db_config["name"]

        return adbapi.ConnectionPool(name,
                                     cp_reactor=self.get_reactor(),
                                     **self.db_config.get("args", {}))

    def get_db_conn(self, run_new_connection=True):
        """Makes a new connection to the database, skipping the db pool

        Returns:
            Connection: a connection object implementing the PEP-249 spec
        """
        # Any param beginning with cp_ is a parameter for adbapi, and should
        # not be passed to the database engine.
        db_params = {
            k: v
            for k, v in self.db_config.get("args", {}).items()
            if not k.startswith("cp_")
        }
        db_conn = self.database_engine.module.connect(**db_params)
        if run_new_connection:
            self.database_engine.on_new_connection(db_conn)
        return db_conn

    def build_media_repository_resource(self):
        # build the media repo resource. This indirects through the HomeServer
        # to ensure that we only have a single instance of
        return MediaRepositoryResource(self)

    def build_media_repository(self):
        return MediaRepository(self)

    def build_federation_transport_client(self):
        return TransportLayerClient(self)

    def build_federation_sender(self):
        if self.should_send_federation():
            return FederationSender(self)
        elif not self.config.worker_app:
            return FederationRemoteSendQueue(self)
        else:
            raise Exception("Workers cannot send federation traffic")

    def build_receipts_handler(self):
        return ReceiptsHandler(self)

    def build_read_marker_handler(self):
        return ReadMarkerHandler(self)

    def build_tcp_replication(self):
        raise NotImplementedError()

    def build_action_generator(self):
        return ActionGenerator(self)

    def build_user_directory_handler(self):
        return UserDirectoryHandler(self)

    def build_groups_local_handler(self):
        return GroupsLocalHandler(self)

    def build_groups_server_handler(self):
        return GroupsServerHandler(self)

    def build_groups_attestation_signing(self):
        return GroupAttestationSigning(self)

    def build_groups_attestation_renewer(self):
        return GroupAttestionRenewer(self)

    def build_secrets(self):
        return Secrets()

    def build_stats_handler(self):
        return StatsHandler(self)

    def build_spam_checker(self):
        return SpamChecker(self)

    def build_third_party_event_rules(self):
        return ThirdPartyEventRules(self)

    def build_room_member_handler(self):
        if self.config.worker_app:
            return RoomMemberWorkerHandler(self)
        return RoomMemberMasterHandler(self)

    def build_federation_registry(self):
        if self.config.worker_app:
            return ReplicationFederationHandlerRegistry(self)
        else:
            return FederationHandlerRegistry()

    def build_server_notices_manager(self):
        if self.config.worker_app:
            raise Exception("Workers cannot send server notices")
        return ServerNoticesManager(self)

    def build_server_notices_sender(self):
        if self.config.worker_app:
            return WorkerServerNoticesSender(self)
        return ServerNoticesSender(self)

    def build_message_handler(self):
        return MessageHandler(self)

    def build_pagination_handler(self):
        return PaginationHandler(self)

    def build_room_context_handler(self):
        return RoomContextHandler(self)

    def build_registration_handler(self):
        return RegistrationHandler(self)

    def build_account_validity_handler(self):
        return AccountValidityHandler(self)

    def build_saml_handler(self):
        from synapse.handlers.saml_handler import SamlHandler

        return SamlHandler(self)

    def build_event_client_serializer(self):
        return EventClientSerializer(self)

    def build_storage(self) -> Storage:
        return Storage(self, self.datastores)

    def remove_pusher(self, app_id, push_key, user_id):
        return self.get_pusherpool().remove_pusher(app_id, push_key, user_id)

    def should_send_federation(self):
        "Should this server be sending federation traffic directly?"
        return self.config.send_federation and (
            not self.config.worker_app
            or self.config.worker_app == "synapse.app.federation_sender")
Esempio n. 35
0
def attr_factory(name):
    '''Factory functin for the abstract fields of a geometric object'''

    func = copy.copy(not_implemented)
    func.__name__ = name
    return abc.abstractproperty(func)
Esempio n. 36
0
#!/user/bin/python
"""
"""
import abc

_notimplemented = abc.abstractproperty(lambda self, *args, **kwargs: NotImplemented)


class WorkflowInterface(object):
    __metaclass__ = abc.ABCMeta

    steps = abc.abstractproperty(_notimplemented)
    # For Generic Function: this part would be the function proper
    if not validator.__instancecheck__(obj):
        raise validator.exception(
            validator.message(obj, name=name)
        )

    return obj








interface = abc.abstractproperty(_NOT_IMPLEMENTED)  # type: InterfaceType
invoke = abc.abstractproperty(_NOT_IMPLEMENTED) # type: Callable[[AnyArgs], Any]
exception = abc.abstractproperty(_NOT_IMPLEMENTED)  # type: Exception
message = abc.abstractmethod(_NOT_IMPLEMENTED)  # type: Callable[[AnyArgs], Any]
            






class AssertInterface(GenericFunctionInterface):
    pass

class Assert(GenericFunction):
    inter