Beispiel #1
0
def decorate_with_parse_from_dict_fns(cls, **kwargs):
    '''Decorates target class with
        * parse_from_dict - returning cls instance with values taken from the passed dict
        * parse_from_dicts - returns list of cls, corresponding to passed seq of dictionaries
    methods

    @param cls: target class do decorate
    @type cls: class
    @param kwargs: mapping between the names of the attributes of target class
        and the keys to use while getting values from passed dict
    @param kwargs: dict[str, str]
    @return: decorated cls
    @rtype: class
    '''

    def parse_from_dict(dictionary):
        kwargs_ = {}
        for key, value in kwargs.items():
            kwargs_[key] = dictionary.get(value)
        return cls(**kwargs_)
    cls.parse_from_dict = staticmethod(parse_from_dict)

    cls.parse_from_dicts = staticmethod(partial(map, cls.parse_from_dict))

    return cls
def _AddStaticMethods(cls):

  def RegisterExtension(extension_handle):
    extension_handle.containing_type = cls.DESCRIPTOR
    _AttachFieldHelpers(cls, extension_handle)



    actual_handle = cls._extensions_by_number.setdefault(
        extension_handle.number, extension_handle)
    if actual_handle is not extension_handle:
      raise AssertionError(
          'Extensions "%s" and "%s" both try to extend message type "%s" with '
          'field number %d.' %
          (extension_handle.full_name, actual_handle.full_name,
           cls.DESCRIPTOR.full_name, extension_handle.number))

    cls._extensions_by_name[extension_handle.full_name] = extension_handle

    handle = extension_handle
    if _IsMessageSetExtension(handle):

      cls._extensions_by_name[
          extension_handle.message_type.full_name] = extension_handle

  cls.RegisterExtension = staticmethod(RegisterExtension)

  def FromString(s):
    message = cls()
    message.MergeFromString(s)
    return message
  cls.FromString = staticmethod(FromString)
Beispiel #3
0
    def from_file(cls, file_):
        """
        Construct a test class from a feature file.
        """

        feature = TestFeature.from_file(file_)

        background = cls.make_background(feature.background)
        scenarios = [
            cls.make_scenario(scenario, i + 1)
            for i, scenario in enumerate(feature.scenarios)
        ]

        before_feature, after_feature = \
            CALLBACK_REGISTRY.before_after('feature')

        members = {
            'feature': feature,
            'background': background,
            'before_feature': staticmethod(before_feature),
            'after_feature': staticmethod(after_feature),
        }

        members.update({
            scenario.__name__: scenario
            for scenario in scenarios
        })

        class_name = identifier(feature.name)

        testclass = type(class_name, (cls,), members)
        testclass.feature.testclass = testclass
        return testclass
    def Checked(cls, name, datum_to_value=None, value_to_datum=None, **other):
        """Create a custom `ObjectField` that validates values and datums.

        :param name: The name of the field. This is the name that's used to
            store the datum in the MAAS-side data dictionary.
        :param datum_to_value: A callable taking a single ``datum`` argument,
            passed positionally. This callable should convert the datum to a
            Python-side value, and/or raise an exception for invalid datums.
        :param value_to_datum: A callable taking a single ``value`` argument,
            passed positionally. This callable should convert the value to a
            MAAS-side datum, and/or raise an exception for invalid values.
        :param other: Additional arguments to pass to the default
            `ObjectField` constructor.
        """
        attrs = {}
        if datum_to_value is not None:
            @wraps(datum_to_value)
            def datum_to_value_method(instance, datum):
                return datum_to_value(datum)
            attrs["datum_to_value"] = staticmethod(datum_to_value_method)
        if value_to_datum is not None:
            @wraps(value_to_datum)
            def value_to_datum_method(instance, value):
                return value_to_datum(value)
            attrs["value_to_datum"] = staticmethod(value_to_datum_method)
        cls = type("%s.Checked#%s" % (cls.__name__, name), (cls,), attrs)
        return cls(name, **other)
def _AddStaticMethods(cls):
  # TODO(robinson): This probably needs to be thread-safe(?)
  def RegisterExtension(extension_handle):
    extension_handle.containing_type = cls.DESCRIPTOR
    _AttachFieldHelpers(cls, extension_handle)

    # Try to insert our extension, failing if an extension with the same number
    # already exists.
    actual_handle = cls._extensions_by_number.setdefault(
        extension_handle.number, extension_handle)
    if actual_handle is not extension_handle:
      raise AssertionError(
          'Extensions "%s" and "%s" both try to extend message type "%s" with '
          'field number %d.' %
          (extension_handle.full_name, actual_handle.full_name,
           cls.DESCRIPTOR.full_name, extension_handle.number))

    cls._extensions_by_name[extension_handle.full_name] = extension_handle

    handle = extension_handle  # avoid line wrapping
    if _IsMessageSetExtension(handle):
      # MessageSet extension.  Also register under type name.
      cls._extensions_by_name[
          extension_handle.message_type.full_name] = extension_handle

  cls.RegisterExtension = staticmethod(RegisterExtension)

  def FromString(s):
    message = cls()
    message.MergeFromString(s)
    return message
  cls.FromString = staticmethod(FromString)
Beispiel #6
0
 def read_wrapper(cls, fp, *args, **kwargs):
     # check for old style
     check_group = '{}/{}'.format(fp.samples_group, fp.variable_args[0])
     if not isinstance(fp[check_group], h5py.Dataset):
         convert_cmd = ("pycbc_inference_extract_samples --input-file {} "
                        "--thin-start 0 --thin-interval 1 --output-file "
                        "FILE.hdf".format(fp.filename))
         logging.warning("\n\nDEPRECATION WARNING: The file {} appears to  "
                         "have been written using an older style file "
                         "format. Support for this format will be removed "
                         "in a future update. To convert this file, run: "
                         "\n\n{}\n\n"
                         "where FILE.hdf is the name of the file to "
                         "convert to. (Ignore this warning if you are "
                         "doing that now.)\n\n".format(fp.filename,
                         convert_cmd))
         # we'll replace cls._read_fields with _read_oldstyle_fields, so
         # that when the read_function calls cls._read_fields, it points
         # to the oldstyle function. First we'll keep a backup copy of
         # _read_fields, so that oldstyle and new style files can be loaded
         # in the same environment
         cls._bkup_read_fields = cls._read_fields
         cls._read_fields = staticmethod(cls._read_oldstyle_fields)
     else:
         # if an oldstyle file was loaded previously, _read_fields will
         # point to _read_oldstyle_fields; restore _read_fields
         try:
             cls._read_fields = staticmethod(cls._bkup_read_fields.im_func)
         except AttributeError:
             pass
     return read_function(cls, fp, *args, **kwargs)
Beispiel #7
0
def configure_libweasyl(
        dbsession, not_found_exception, base_file_path,
        staff_config_path, media_link_formatter_callback):
    """
    Configure libweasyl for the current application. This sets up some
    global state around libweasyl.

    This function can be called multiple times without issues; each call will
    replace the values set by the previous call.

    Parameters:
        dbsession: A SQLAlchemy ``scoped_session`` instance configured for the
            application's database usage.
        not_found_exception: An exception to be raised on the ``*_or_404``
            methods of queries.
        base_file_path: The path to where static content lives on disk.
        staff_config_path: The path to the config file containing the ids of staff.
        media_link_formatter_callback: A callback to format the URL for a media
            link. The callback will be called as ``callback(media_item, link)``
            and is expected to return a URL or ``None`` to use the default.
    """
    _configure_dbsession(dbsession)
    BaseQuery._not_found_exception = staticmethod(not_found_exception)
    DiskMediaItem._base_file_path = staticmethod(base_file_path)
    _init_staff(staff_config_path)
    MediaItem._media_link_formatter_callback = staticmethod(media_link_formatter_callback)
Beispiel #8
0
def MigratingUserRel(name, relation, disable_ids_fn=False,
                     disable_reverse_ids_fn=False, permission_class=None):
    """
    Replacement for UserRel to be used during migrations away from the system.

    The resulting "UserRel" classes generated are to be used as standalones and
    not included in Subreddit.__bases__.

    """

    mgr = MemoizedUserRelManager(
        name, relation, permission_class,
        disable_ids_fn, disable_reverse_ids_fn)

    class URM: pass

    setattr(URM, 'is_' + name, mgr.get)
    setattr(URM, 'get_' + name, mgr.get)
    setattr(URM, 'add_' + name, staticmethod(mgr.add))
    setattr(URM, 'remove_' + name, staticmethod(mgr.remove))
    setattr(URM, 'each_' + name, mgr.by_thing)
    setattr(URM, name + '_permission_class', permission_class)

    if not disable_ids_fn:
        setattr(URM, mgr.ids_fn_name, mgr.ids)

    if not disable_reverse_ids_fn:
        setattr(URM, mgr.reverse_ids_fn_name, staticmethod(mgr.reverse_ids))

    return URM
Beispiel #9
0
def add_operator(name, arity, dispatch=None, pyfunc=None, pure=False, ovf=False):
    operator_func = getattr(operator, name, None)
    if dispatch == 1:
        bases = [SingleDispatchMixin]
    elif dispatch == 2:
        bases = [DoubleDispatchMixin]
    else:
        bases = []
    if ovf:
        assert pure
        base_cls = OverflowingOperation
    elif pure:
        base_cls = PureOperation
    else:
        base_cls = HLOperation
    bases.append(base_cls)
    cls = HLOperationMeta(name, tuple(bases), {'opname': name, 'arity': arity,
                                               'canraise': [],
                                               'dispatch': dispatch})
    if pyfunc is not None:
        func2op[pyfunc] = cls
    if operator_func:
        func2op[operator_func] = cls
    if pyfunc is not None:
        cls.pyfunc = staticmethod(pyfunc)
    elif operator_func is not None:
        cls.pyfunc = staticmethod(operator_func)
    if ovf:
        from rpython.rlib.rarithmetic import ovfcheck
        ovf_func = lambda *args: ovfcheck(cls.pyfunc(*args))
        add_operator(name + '_ovf', arity, dispatch, pyfunc=ovf_func)
        cls.ovf_variant = getattr(op, name + '_ovf')
Beispiel #10
0
    def __init__(self, pattern, method="GET", base_url=None,
                 requires_auth=False, request_callback=None,
                 response_callback=None):
        """
        :param pattern: a URL pattern relative to your base URL
        :param method: the HTTP method for the request.
        :param requires_auth: whether or not the client must be authenticated
        :param base_url: the base URL for the request
        :param request_callback: a function to process the request
        :param response_callback: a function to process the response

        ``base_url``, ``request_callback``, and ``response_callback`` can
        override ``BASE_URL``, ``REQUEST_CALLBACK``, or ``RESPONSE_CALLBACK``
        for the enclosing ``API`` subclass."""
        self.requires_auth = requires_auth
        if self.requires_auth:
            self.username, self.password = None, None
        self.base_url = base_url
        self.pattern = URLPattern(pattern)
        self.http = httplib2.Http()
        self.method = method
        # If request_callback or response_callback arguments were not supplied,
        # provide default callbacks that do nothing
        default_callback = lambda x, y: (x, y)
        request_callback = request_callback or default_callback 
        response_callback = response_callback or default_callback
        # Callback attributes need to be staticmethods so that they don't
        # get passed self when called by an instance
        self.request_callback = staticmethod(request_callback)
        self.response_callback = staticmethod(response_callback)
Beispiel #11
0
    def init(self, taskspec):
        super(BodyMotionAgent, self).init(taskspec)

        ts = TaskSpecParser(taskspec)
        if ts.valid:
            extra = ts.get_extra()

            v = ['FEATUREREP', 'STATESPERDIM', 'STATEDESCR', 'ACTIONDESCR', 'COPYRIGHT']
            pos = []
            for i, id_ in enumerate(list(v)):
                try:
                    pos.append(extra.index(id_))
                except:
                    v.remove(id_)
            sorted_v = sorted(zip(pos, v))
            v = [s[1] for s in sorted_v]

            for i, id_ in enumerate(v):
                val = ts.get_value(i, extra, v)
                if id_ == 'FEATUREREP':
                    self._feature_rep = val

            if self._feature_rep == 'larm':
                def map_state_key(key):
                    return {
                        "x": 0,
                        "y": 1,
                        "z": 2,
                    }[key]

                def map_action_key(key):
                    return {
                        "dx": 0,
                        "dy": 1,
                        "dz": 2,
                    }[key]

            else:
                def map_state_key(key):
                    return {
                        "x": 0,
                        "y": 1,
                        "z": 2,
                        "wx": 3,
                        "wy": 4,
                        "wz": 5,
                    }[key]

                def map_action_key(key):
                    return {
                        "dx": 0,
                        "dy": 1,
                        "dz": 2,
                        "dwx": 3,
                        "dwy": 4,
                        "dwz": 5
                    }[key]

            MDPState.key_to_index = staticmethod(map_state_key)
            MDPAction.key_to_index = staticmethod(map_action_key)
Beispiel #12
0
 def init():
     Kqueue._kqueue = Kqueue()
     # only one instance is allowed
     Event.addEvent = staticmethod(lambda ev: Kqueue._kqueue.register(ev))
     Event.delEvent = staticmethod(lambda ev: Kqueue._kqueue.deregister(ev))
     Event.isEventSet = staticmethod(lambda ev: Kqueue._kqueue.is_set(ev))
     Event.processEvents = staticmethod(lambda t: Kqueue._kqueue.process_events(t))
Beispiel #13
0
def patch_pyqt():
    """Patch PyQt classes to strip trailing null characters on Python 2

    It works around a bug triggered by Unicode characters above 0xFFFF.
    """
    if sys.version_info >= (3, 3):
        return

    old_toLocalFile = QUrl.toLocalFile
    QUrl.toLocalFile = lambda url: old_toLocalFile(url).rstrip('\0')

    old_toString = QUrl.toString
    QUrl.toString = lambda *args: old_toString(*args).rstrip('\0')

    old_path = QUrl.path
    QUrl.path = lambda self: old_path(self).rstrip('\0')

    old_arguments = QApplication.arguments
    QApplication.arguments = staticmethod(
        lambda: [arg.rstrip('\0') for arg in old_arguments()])

    from PyQt5.QtWidgets import QFileDialog
    old_getOpenFileNames = QFileDialog.getOpenFileNames
    QFileDialog.getOpenFileNames = staticmethod(
        lambda *args: [f.rstrip('\0') for f in old_getOpenFileNames(*args)])

    old_getOpenFileName = QFileDialog.getOpenFileName
    QFileDialog.getOpenFileName = staticmethod(
        lambda *args: old_getOpenFileName(*args).rstrip('\0'))

    old_getSaveFileName = QFileDialog.getSaveFileName
    QFileDialog.getSaveFileName = staticmethod(
        lambda *args: old_getSaveFileName(*args).rstrip('\0'))
    def setUpClass(cls):
        ArakoonInstaller.ARAKOON_CONFIG_DIR = '/tmp/cfg'
        ArakoonInstaller.ARAKOON_CONFIG_FILE = '/tmp/cfg/{0}/{0}.cfg'

        TestArakoonInstaller.expected_global = '[global]\ncluster_id = {0}\ncluster = {1}\nplugins = \n\n'
        TestArakoonInstaller.expected_base = '[{0}]\nname = {0}\nip = {1}\nclient_port = {2}\nmessaging_port = {3}\ntlog_compression = snappy\nlog_level = info\nlog_dir = /var/log/arakoon/one\nhome = /tmp/db/arakoon/one\ntlog_dir = /tmp/db/tlogs/one\nfsync = true\n\n'

        # System
        def _get_my_machine_id(_client):
            return TestArakoonInstaller.nodes[_client.ip]

        System.get_my_machine_id = staticmethod(_get_my_machine_id)

        # Configuration
        def _get(key):
            if key == 'ovs.core.storage.persistent':
                return 'arakoon'
            c = PersistentFactory.get_client()
            if c.exists(key):
                return c.get(key)
            return None

        def _get_int(key):
            return int(Configuration.get(key))

        def _set(key, value):
            c = PersistentFactory.get_client()
            c.set(key, value)

        Configuration.get = staticmethod(_get)
        Configuration.get_int = staticmethod(_get_int)
        Configuration.set = staticmethod(_set)

        Configuration.set('ovs.ports.arakoon', [22000])
        Configuration.set('ovs.arakoon.location', '/tmp/db')
Beispiel #15
0
    def __init__(cls, name, bases, attrs):
        if not hasattr(cls, 'members'):
            # This branch only executes when processing the mount point itself.
            # So, since this is a new plugin type, not an implementation, this
            # class shouldn't be registered as a plugin. Instead, it sets up a
            # list where plugins can be registered later.
            cls.members = []
        else:
            # This must be a plugin implementation, which should be registered.
            # Simply appending it to the list is all that's needed to keep
            # track of it later.
            cls.members.append(cls)
            cls.index = len(cls.members) - 1
            setattr(cls.__class__, cls.__name__, cls)
            cls.key = cls.__name__

            if hasattr(cls, 'foregrounds'):
                cls.foreground_slugs = []
                for fg in cls.foregrounds:
                    cls.foreground_slugs.append(palette.entries.registerForeground(fg))
                if not hasattr(cls, 'foreground_slug'):
                    cls.foreground_slug = staticmethod(lambda x: random.choice(cls.foreground_slugs))
            elif hasattr(cls, 'foreground'):
                cls.foreground_slug = palette.entries.registerForeground(cls.foreground)

            if hasattr(cls, 'backgrounds'):
                cls.background_slugs = []
                for bg in cls.backgrounds:
                    cls.background_slugs.append(palette.entries.registerBackground(bg))
                if not hasattr(cls, 'background_slug'):
                    cls.background_slug = staticmethod(lambda x: random.choice(cls.background_slugs))
            elif hasattr(cls, 'background'):
                cls.background_slug = palette.entries.registerBackground(cls.background)
    def setUp(self):
        self.log_printer = ListLogPrinter()
        self.console_printer = ConsolePrinter(print_colored=False)
        self.no_color = not self.console_printer.print_colored
        self.file_diff_dict = {}
        self.section = Section('t')
        self.local_bears = OrderedDict([('default', [SomelocalBear]),
                                        ('test', [SomelocalBear])])
        self.global_bears = OrderedDict([('default', [SomeglobalBear]),
                                         ('test', [SomeglobalBear])])

        self.old_open_editor_applicable = OpenEditorAction.is_applicable
        OpenEditorAction.is_applicable = staticmethod(
            lambda *args: 'OpenEditorAction cannot be applied')

        self.old_apply_patch_applicable = ApplyPatchAction.is_applicable
        ApplyPatchAction.is_applicable = staticmethod(
            lambda *args: 'ApplyPatchAction cannot be applied')

        self.lexer = TextLexer()
        self.lexer.add_filter(VisibleWhitespaceFilter(
            spaces=True,
            tabs=True,
            tabsize=SpacingHelper.DEFAULT_TAB_WIDTH))

        patcher = patch('coalib.results.result_actions.OpenEditorAction.'
                        'subprocess')
        self.addCleanup(patcher.stop)
        patcher.start()
Beispiel #17
0
    def setup_connection(self, cls, factory=DEFAULT_FACTORY):
        """Temporarily override client and connection factories in `cls`.

        This is intended for use with `MAASOAuthConnection` and subclasses.

        The connection factory is always set to a function that logs the call
        but always returns `None`. This prevents tests from making external
        connections.

        Returns a list, to which events will be logged. See `LogRecord` for
        the form these logs take.
        """
        assert issubclass(cls, MAASOAuthConnection), (
            "setup_connection() is only suitable for use "
            "with MAASOAuthConnection and its subclasses.")

        log = []

        def factory_logger(*args, **kwargs):
            instance = factory(*args, **kwargs)
            record = LogRecord("factory", args, kwargs, instance)
            log.append(record)
            return instance

        def connect_logger(*args, **kwargs):
            record = LogRecord("connect", args, kwargs, None)
            log.append(record)
            return None

        self.patch(cls, "factory", staticmethod(factory_logger))
        self.patch(cls, "connect", staticmethod(connect_logger))
        return log
Beispiel #18
0
def load_service_helpers():
	for service in services:
		# Create a class to represent the service
		c = type(
				service, # Class Name
				(object,), # Inheritance
				dict() # Class Dictionary - initially empty
			)
		
		if services[service]['helpers'] != None:
			try:
				c.helpers = services[service]['helpers']()
				c.helpers.get_service_instances = lambda s: get_service_instances(s)
				c.helpers.module_is_loaded = staticmethod(module_is_loaded)
				c.helpers.service_is_loaded = staticmethod(service_is_loaded)
				c.helpers.load_mod = staticmethod(load_mod)
				c.helpers.unload_mod = staticmethod(unload_mod)
				c.helpers.reload_mod = staticmethod(reload_mod)
				c.helpers.logger = logging.getLogger("SERVICES.%s.%s" % (service, services[service]['helpers'].__name__))
			except:
				logger.error("Unable to instantiate helpers class for service '%s'(Helpers for this service will be unavailible): %s" % (service, traceback.format_exc()))
				c.helpers = None
		else:
			c.helpers = services[service]['helpers']

		c.instances = services[service]['instances']
		service_helpers[service] = c
Beispiel #19
0
def build_wmi_class_descriptor(name, **attributes):
    '''
    Builds WMI class descriptor with class name equal to passed name and having attributes listed in `attributes` argument.
    It also enhances the class with several static methods like
        * get_type_by_name - returns wmi_types.__Type instance by provided name
        * parse - returns parsed value according to defined WMI type

    @param name: WMI class name
    @type name: basestring
    @param attributes: attribute name to WMI type mapping
    @type attributes: dict[str, wmi_types.__Type]
    @return: WMI class descriptor with all needed methods to create an instance
        of such descriptor with properly parsed values
    @rtype: namedtuple class
    '''
    cls = collections.namedtuple(name, ' '.join(attributes.keys()))

    def get_type_by_name(name, type_by_name=attributes):
        return type_by_name.get(name)

    def parse(parser, name, value, type_by_name=attributes):
        return (type_by_name.get(name) or wmi_types.string).parse(parser, value)

    cls.parse = staticmethod(parse)
    cls.get_type_by_name = staticmethod(get_type_by_name)
    return cls
Beispiel #20
0
    def compile(cls):
        attrs = cls._detect_attributes()
        sorted_attrs = sorted(map(lambda attr_tup: (attr_tup[1].index, attr_tup[1], attr_tup[0]), attrs.items()))
        cls.STRUCT_STRING = Message.generate_struct_string(sorted_attrs)

        _struct = finstruct.Finstruct(cls.STRUCT_STRING)
        cls.STRUCT = _struct
        _unpack = _struct.unpack
        _pack = _struct.pack

        # Read Methods
        cls.unpack = _unpack

        interface = namedtuple(cls.__name__, map(lambda attr_tup: attr_tup[2], sorted_attrs))
        cls.INTERFACE = interface

        _read_into_tuple = lambda raw_string: interface(*_unpack(raw_string))
        cls.unpack_into_namedtuple = staticmethod(_read_into_tuple)

        field_names = map(lambda attr_tup: attr_tup[2], sorted_attrs)
        _read_into_dict = lambda raw_string: dict(zip(field_names, _unpack(raw_string)))
        cls.unpack_into_dict = staticmethod(_read_into_dict)

        _pack_from_dict = lambda d: _pack(*map(d.__getitem__, field_names))
        _pack_from_namedtuple = lambda nt: _pack(*nt)

        # Write Methods
        cls.pack = _pack
        cls.pack_from_string = _pack
        cls.pack_from_dict = staticmethod(_pack_from_dict)
        cls.pack_from_namedtuple = staticmethod(_pack_from_namedtuple)
 def setUpClass(cls):
     cls.fmt    = "GTF2"
     cls.tokens = _GTF2_TOKENS
     cls.token_dicts = _GTF2_TOKEN_DICTS
     cls.repeated = _GTF2_REPEATED
     cls.tokenizer = staticmethod(make_GTF2_tokens)
     cls.parser = staticmethod(parse_GTF2_tokens)
Beispiel #22
0
def edit(request, id):
    provider = request.user.service_provider
    employee = get_object_or_404(Employee, employer=request.user.service_provider, id=id)
    hours = EmployeeWorkingHours.objects.filter(employee=employee)
    services = Service.objects.filter(service_provider=provider, employees__in=[employee.id])
    if request.method == 'POST':
        if request.POST.get('action') == 'delete':
            EmployeeWorkingHours.objects.get(id=request.POST.get('workinghours')).delete()
            return HttpResponseRedirect('/myemployees/edit/' + id)
        form = EmployeeForm(request.POST, request.FILES, instance=employee)
        qs = EmployeeWorkingHours.objects.filter(employee=employee)
        EmployeeWorkingHoursFormSet.form = staticmethod(curry(EmployeeWorkingHoursForm, employee=employee))
        forms = EmployeeWorkingHoursFormSet(request.POST)
        form_s = EmployeeServicesForm(request.POST, service_provider=provider, employee=employee, data=services)
        form_valid = form.is_valid()
        form_s_valid = form_s.is_valid()
        forms_valid = True
        for f in forms:
            if not f.is_valid():
                forms_valid = False
        if form_valid and form_s_valid and forms_valid:
            form.save()
            for f in forms:
                f.save()
            form_s.save()
            return HttpResponseRedirect(reverse(myemployees))
    else:
        form = EmployeeForm(instance=employee)
        qs = EmployeeWorkingHours.objects.filter(employee=employee)
        EmployeeWorkingHoursFormSet.form = staticmethod(curry(EmployeeWorkingHoursForm, employee=employee))
        forms = EmployeeWorkingHoursFormSet(queryset=qs)
        form_s = EmployeeServicesForm(service_provider=provider, employee=employee, data=services)
    return render_to_response('employees/edit.html', locals(), context_instance=RequestContext(request))
Beispiel #23
0
def logify(cls):
    """ Alter a class so that it interacts well with LogPy

    The __class__ and __dict__ attributes are used to define the LogPy term

    See Also:
        _as_logpy
        _from_logpy


    >>> from logpy import logify, run, var, eq
    >>> class A(object):
    ...     def __init__(self, a, b):
    ...         self.a = a
    ...         self.b = b
    >>> logify(A)

    >>> x = var('x')
    >>> a = A(1, 2)
    >>> b = A(1, x)

    >>> run(1, x, eq(a, b))
    (2,)
    """
    if hasattr(cls, '__slots__'):
        cls._as_logpy = _as_logpy_slot
        cls._from_logpy = staticmethod(_from_logpy_slot)
    else:
        cls._as_logpy = _as_logpy
        cls._from_logpy = staticmethod(_from_logpy)
Beispiel #24
0
def test_grad_useless_sum():
    """Test absence of useless sum.

    When an operation (such as T.mul) is done on a broadcastable vector and
    a matrix, the gradient in backward path is computed for the broadcasted
    vector. So a sum reverts the broadcasted vector to a vector. In the case
    of operations on two broadcastable vectors, the sum should not be generated.

    This test checks whether there is a useless sum in the gradient
    computations.
    """
    mode = theano.compile.get_default_mode().including("canonicalize")
    mode.check_isfinite = False
    x = TensorType(theano.config.floatX, (True,))("x")
    l = tensor.log(1.0 - tensor.nnet.sigmoid(x))[0]
    g = tensor.grad(l, x)
    nodes = theano.gof.graph.ops([x], [g])

    f = theano.function([x], g, mode=mode)
    test_values = [-100, -1, 0, 1, 100]
    outputs = []
    old_values_eq_approx = staticmethod(TensorType.values_eq_approx)
    TensorType.values_eq_approx = staticmethod(tensor.type.values_eq_approx_remove_nan)
    try:
        for test_value in test_values:
            outputs.append(f(numpy.array([test_value]).astype("float32")))
    finally:
        TensorType.values_eq_approx = old_values_eq_approx

    assert not any([isinstance(node.op, theano.tensor.elemwise.Sum) for node in nodes])
    assert numpy.allclose(outputs, [[-3.72007598e-44], [-0.26894142], [-0.5], [-0.73105858], [-1.0]])
Beispiel #25
0
 def init():
     Epoll._epoll = Epoll()
     # only one instance is allowed
     Event.addEvent = staticmethod(lambda ev: Epoll._epoll.register(ev))
     Event.delEvent = staticmethod(lambda ev: Epoll._epoll.deregister(ev))
     Event.isEventSet = staticmethod(lambda ev: Epoll._epoll.is_set(ev))
     Event.processEvents = staticmethod(lambda t: Epoll._epoll.process_events(t))
Beispiel #26
0
	def wrap(func):
		args = extension.split("|")
		if args[0] == "ext": setattr(link_opener,args[1],staticmethod(func))
		elif args[0] == "site": link_opener.sites[args[1]] = func
		elif extension == "default": setattr(link_opener,extension,staticmethod(func))
		#allow stacking wrappers
		return func
Beispiel #27
0
def build_class(clsname, lname, sname, testfunc):
    def name():
        return lname

    def shortname():
        return sname

    def test(self, region, offset, length, physaddr):
        """
        - region supports __getitem(x)__ and __setitem(x,b)__, with b being casted to a byte 
        - offset is the first byte tested, length is the length (1..X). The bytes region[offset..offset+(length -1)] are tested
        
        return: None if page ok, offset of all detected errors if test failed
        """
        
        error_offsets = testfunc(region.get_mmap(), offset, length)

        # TODO: do proper reporting
        if len(error_offsets) > 0:
            for item in error_offsets:
                frame = item/physmem.PAGE_SIZE
                self.reporting.report_bad_memory(physaddr[frame] + item % physmem.PAGE_SIZE, 0x55, 0xaa)

        return error_offsets

    return type(clsname, (ScannerBaseclass,), {
            'name': staticmethod(name),
            'shortname': staticmethod(shortname),
            'test': test
            })
Beispiel #28
0
	def __init__(self, conf):
		# The service loader takes care of naming the thread for you.
		# You have access to: self.call_hook and self.self.logger

		try:
			self.content_hooks = {}
			self.server_name = conf['server_name']
			self.server_port = conf['server_port']
			self.docroot = conf['docroot']
			self.render_markdown = conf['render_markdown']
			if conf['markdown_css'] not in [None, '']:
				self.markdown_css = conf['markdown_css']
			else:
				self.markdown_css = ''
			self.handler = HttpHandler
			self.handler.md = markdown.Markdown()
			self.handler.logger = self.logger
			self.handler.server_name = self.server_name
			self.handler.server_port = self.server_port
			self.handler.docroot = self.docroot
			self.handler.render_markdown = self.render_markdown
			self.handler.markdown_css = self.markdown_css
			self.handler.content_hooks = self.content_hooks
			self.handler.module_is_loaded = staticmethod(self.module_is_loaded)
			self.handler.service_is_loaded = staticmethod(self.service_is_loaded)

			self.server = HTTPServer((self.server_name, self.server_port), self.handler)
		except KeyError:
			self.logger.error("Error loading configuration: %s" % traceback.print_exc())
Beispiel #29
0
def DocInheritMeta(style="parent", abstract_base_class=False):
    """ A metaclass that merges the respective docstrings of a parent class and of its child, along with their
        properties, methods (including classmethod, staticmethod, decorated methods).

        Parameters
        ----------
        style: Union[Any, Callable[[str, str], str]], optional (default: "parent")
            A valid inheritance-scheme style ID or function that merges two docstrings.

        abstract_base_class: bool, optional(default: False)
            If True, the returned metaclass inherits from abc.ABCMeta.

            Thus a class that derives from DocInheritMeta(style="numpy", abstract_base_class=True)
            will be an abstract base class, whose derived classes will inherit docstrings
            using the numpy-style inheritance scheme.


        Returns
        -------
        custom_inherit.DocInheritorBase"""

    merge_func = store[style]
    metaclass = _DocInheritorBase
    metaclass.class_doc_inherit = staticmethod(merge_func)
    metaclass.attr_doc_inherit = staticmethod(merge_func)

    return metaclass if not abstract_base_class else type("abc" + metaclass.__name__, (_ABCMeta, metaclass), {})
 def __init__(self, port, root_dir, data_dir, partials_dir,
              dns_srv=None):
     self.root_dir = root_dir
     self.shares_dir = os.path.join(os.path.dirname(root_dir), 'shares_dir')
     self.shares_dir_link = os.path.join(root_dir, 'shares_link')
     self.data_dir = data_dir
     self.fsmdir = os.path.join(data_dir, 'fsmdir')
     self.partials_dir = partials_dir
     self.tritcask_dir = os.path.join(self.data_dir, 'tritcask')
     self.hash_q = type('fake hash queue', (),
                        {'empty': staticmethod(lambda: True),
                         '__len__': staticmethod(lambda: 0)})()
     self.logger = logger
     self.db = tritcask.Tritcask(self.tritcask_dir)
     self.vm = DumbVolumeManager(self)
     self.fs = FileSystemManager(self.fsmdir, self.partials_dir, self.vm,
                                 self.db)
     self.event_q = EventQueue(self.fs)
     self.event_q.subscribe(self.vm)
     self.fs.register_eq(self.event_q)
     self.sync = Sync(self)
     self.action_q = ActionQueue(self.event_q, self, '127.0.0.1', port,
                                 dns_srv, disable_ssl_verify=True)
     self.state_manager = main.StateManager(self, handshake_timeout=30)
     self.state_manager.connection.waiting_timeout = .1
     self.vm.init_root()
Beispiel #31
0
class FlowFlowLabel(IOperationByteShortLong, NumericString, IPv6):
    ID = 0x0D
    NAME = 'flow-label'
    converter = staticmethod(converter(LabelValue))
    decoder = staticmethod(_number)
Beispiel #32
0
class Trigger(Command):

    KEY = "Trigger:triggers2"
    STATIC_TRIGGER_KEY = "Trigger:static_trigger"
    STATIC_TRIGGER_COUNT = "Trigger:static_trigger_count"
    INTEGRAL_TRIGGER_KEY = "Trigger:integral_trigger"
    NOTIFICATION_KEY = "Trigger:notifications"
    TRIGGER_EVENT_KEY = "triggers:cache"
    NOTIFICATION_EVENT_KEY = "notifications:cache"

    def __init__(my, **kwargs):
        my.caller = None
        my.message = None
        my.trigger_sobj = None
        my.input = {}
        my.output = {}
        my.description = ''
        my.kwargs = kwargs
        super(Trigger, my).__init__()

    def get_title(my):
        print "WARNING: Should override 'get_title' function for %s" % my
        return Common.get_full_class_name(my)

    def set_trigger_sobj(my, trigger_sobj):
        my.trigger_sobj = trigger_sobj

    def get_trigger_sobj(my):
        return my.trigger_sobj

    def get_trigger_data(my):
        data = my.trigger_sobj.get_value("data")
        if not data:
            return {}
        else:
            return jsonloads(data)

    def set_command(my, command):
        my.caller = command

    def set_message(my, message):
        my.message = message

    def get_message(my):
        return my.message

    def set_event(my, event):
        my.message = event

    def get_event(my):
        return my.message

    def get_command(my):
        return my.caller

    def set_caller(my, caller):
        my.caller = caller

    def get_caller(my):
        return my.caller

    def get_command_class(my):
        command_class = my.caller.__class__.__name__
        return command_class

    # set inputs and outputs
    def set_input(my, input):
        my.input = input

    def get_input(my):
        return my.input

    def set_output(my, output):
        my.output = output

    def get_output(my):
        return my.output

    def set_description(my, description):
        my.description = description

    def get_description(my):
        return my.description

    def execute(my):
        raise TriggerException("Must override execute function")

    # static functions

    # DEPRECATED
    def append_trigger(caller, trigger, event):
        '''append to the the list of called triggers'''
        #print "Trigger.append_trigger is DEPRECATED"
        trigger.set_caller(caller)
        trigger.set_event(event)
        triggers = Container.append_seq("Trigger:called_triggers", trigger)

    append_trigger = staticmethod(append_trigger)

    def call_all_triggers():
        '''calls all triggers for events that have occurred'''
        triggers = Container.get("Trigger:called_triggers")
        Container.remove("Trigger:called_triggers")
        if not triggers:
            return

        prev_called_triggers = Container.get_seq(
            "Trigger:prev_called_triggers")

        # run each trigger in a separate transaction
        for trigger in triggers:

            # prevent recursive triggers shutting down the system
            input = trigger.get_input()

            # remove timestamp (Why was it commented out? i.e. sync related?)
            #sobject = input.get('sobject')
            #if sobject and sobject.has_key('timestamp'):
            #    del sobject['timestamp']
            input_json = jsondumps(input)

            class_name = Common.get_full_class_name(trigger)

            event = trigger.get_event()

            if class_name == 'pyasm.command.subprocess_trigger.SubprocessTrigger':
                class_name = trigger.get_class_name()

            if (event, class_name, input_json) in prev_called_triggers:
                # handle the emails, which can have multiple per event
                if class_name in [
                        "pyasm.command.email_trigger.EmailTrigger",
                        "pyasm.command.email_trigger.EmailTrigger2"
                ]:
                    pass
                else:
                    #print("Recursive trigger (event: %s,  class: %s)" % (event, class_name))
                    continue

            # store previous called triggers
            prev_called_triggers.append((event, class_name, input_json))

            # set call_trigger to false to prevent infinite loops
            if not issubclass(trigger.__class__, Trigger):
                # if this is not a trigger, then wrap in a command
                handler_cmd = HandlerCmd(trigger)
                handler_cmd.add_description(trigger.get_description())
                trigger = handler_cmd

            # triggers need to run in their own transaction when
            # they get here.
            Trigger.execute_cmd(trigger, call_trigger=False)
            # DEPRECATED
            #in_transaction = trigger.is_in_transaction()

    call_all_triggers = staticmethod(call_all_triggers)

    def _get_triggers(cls, call_event, integral_only=False, project_code=None):
        if integral_only:
            trigger_key = "%s:integral" % cls.TRIGGER_EVENT_KEY
        else:
            trigger_key = cls.TRIGGER_EVENT_KEY

        notification_key = cls.NOTIFICATION_EVENT_KEY
        trigger_dict = Container.get(trigger_key)
        notification_dict = Container.get(notification_key)

        call_event_key = jsondumps(call_event)

        # NOTE: get_db_triggers only get triggers for this project ...
        # need to update so that triggers from other projects
        # are also executed

        # static triggers could grow when more sTypes are searched
        last_static_count = Container.get(cls.STATIC_TRIGGER_COUNT)
        static_trigger_sobjs = cls.get_static_triggers()
        current_static_count = len(static_trigger_sobjs)
        renew = last_static_count != current_static_count and not integral_only

        if trigger_dict == None or renew:
            # assign keys to each trigger
            trigger_dict = {}
            Container.put(trigger_key, trigger_dict)

            if integral_only:
                # just get all the integral triggers
                trigger_sobjs = cls.get_integral_triggers()
            else:

                # build a list of site and db of the triggers for current
                # project
                trigger_sobjs = cls.get_db_triggers()

                # append all static triggers
                if static_trigger_sobjs:
                    Container.put(cls.STATIC_TRIGGER_COUNT,
                                  current_static_count)
                    trigger_sobjs.extend(static_trigger_sobjs)

                # append all integral triggers
                integral_trigger_sobjs = cls.get_integral_triggers()
                if integral_trigger_sobjs:
                    trigger_sobjs.extend(integral_trigger_sobjs)

                # append all notifications

                #notification_sobjs = cls.get_notifications_by_event()
                #trigger_sobjs.extend(notification_sobjs)

            for trigger_sobj in trigger_sobjs:
                trigger_event = trigger_sobj.get_value("event")
                trigger_process = trigger_sobj.get_value("process")
                trigger_stype = trigger_sobj.get_value("search_type",
                                                       no_exception=True)

                listen_event = {}
                listen_event['event'] = trigger_event
                if trigger_process:
                    listen_event['process'] = trigger_process
                if trigger_stype:
                    listen_event['search_type'] = trigger_stype

                listen_key = jsondumps(listen_event)

                trigger_list = trigger_dict.get(listen_key)
                if trigger_list == None:
                    trigger_list = []
                    trigger_dict[listen_key] = trigger_list

                trigger_list.append(trigger_sobj)

        called_triggers = trigger_dict.get(call_event_key)

        # assign keys to each notification
        if notification_dict == None:
            notification_dict = {}
            Container.put(notification_key, notification_dict)

            # append all notifications without going thru all the logics with project_code
            notification_sobjs = cls.get_notifications_by_event()

            for trigger_sobj in notification_sobjs:
                trigger_event = trigger_sobj.get_value("event")
                trigger_process = trigger_sobj.get_value("process")
                trigger_stype = trigger_sobj.get_value("search_type",
                                                       no_exception=True)
                trigger_project = trigger_sobj.get_value("project_code",
                                                         no_exception=True)

                listen_event = {}
                listen_event['event'] = trigger_event
                if trigger_process:
                    listen_event['process'] = trigger_process
                if trigger_stype:
                    listen_event['search_type'] = trigger_stype
                # notification specific
                if trigger_project:
                    listen_event['project_code'] = trigger_project

                listen_key = jsondumps(listen_event)

                notification_list = notification_dict.get(listen_key)
                if notification_list == None:
                    notification_list = []
                    notification_dict[listen_key] = notification_list

                notification_list.append(trigger_sobj)

        # we have to call with and without project_code to cover both cases
        key2 = call_event.copy()

        if not project_code:
            from pyasm.biz import Project
            project_code = Project.get_project_code()

        key2['project_code'] = project_code

        call_event_key2 = jsondumps(key2)
        matched_notifications = []
        for call_event_key in [call_event_key, call_event_key2]:
            matched = notification_dict.get(call_event_key)
            if matched:
                matched_notifications.extend(matched)

        combined_triggers = []
        if called_triggers:
            combined_triggers.extend(called_triggers)
        if matched_notifications:
            combined_triggers.extend(matched_notifications)

        return combined_triggers

    _get_triggers = classmethod(_get_triggers)

    def get_db_triggers(cls):

        site_triggers = Container.get(cls.KEY)
        if site_triggers == None:
            # find all of the triggers
            search = Search("sthpw/trigger")
            search.add_project_filter()
            site_triggers = search.get_sobjects()
            Container.put(cls.KEY, site_triggers)

        # find all of the project triggers
        from pyasm.biz import Project
        project_code = Project.get_project_code()
        key = "%s:%s" % (cls.KEY, project_code)
        project_triggers = Container.get(key)
        if project_triggers == None:
            if project_code not in ['admin', 'sthpw']:
                try:
                    search = Search("config/trigger")
                    project_triggers = search.get_sobjects()
                except SearchException, e:
                    print "WARNING: ", e
                    project_triggers = []
            else:
                project_triggers = []
            Container.put(key, project_triggers)

        triggers = []
        triggers.extend(site_triggers)
        triggers.extend(project_triggers)
        return triggers
Beispiel #33
0
class WKBWriteFunc(GEOSFuncFactory):
    argtypes = [WKB_WRITE_PTR, GEOM_PTR, POINTER(c_size_t)]
    restype = c_uchar_p
    errcheck = staticmethod(check_sized_string)
Beispiel #34
0
    class BaseConfigurator(object):
        """
        The configurator base class which defines some useful defaults.
        """

        CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')

        WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
        DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
        INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
        DIGIT_PATTERN = re.compile(r'^\d+$')

        value_converters = {
            'ext' : 'ext_convert',
            'cfg' : 'cfg_convert',
        }

        # We might want to use a different one, e.g. importlib
        importer = staticmethod(__import__)

        def __init__(self, config):
            self.config = ConvertingDict(config)
            self.config.configurator = self

        def resolve(self, s):
            """
            Resolve strings to objects using standard import and attribute
            syntax.
            """
            name = s.split('.')
            used = name.pop(0)
            try:
                found = self.importer(used)
                for frag in name:
                    used += '.' + frag
                    try:
                        found = getattr(found, frag)
                    except AttributeError:
                        self.importer(used)
                        found = getattr(found, frag)
                return found
            except ImportError:
                e, tb = sys.exc_info()[1:]
                v = ValueError('Cannot resolve %r: %s' % (s, e))
                v.__cause__, v.__traceback__ = e, tb
                raise v

        def ext_convert(self, value):
            """Default converter for the ext:// protocol."""
            return self.resolve(value)

        def cfg_convert(self, value):
            """Default converter for the cfg:// protocol."""
            rest = value
            m = self.WORD_PATTERN.match(rest)
            if m is None:
                raise ValueError("Unable to convert %r" % value)
            else:
                rest = rest[m.end():]
                d = self.config[m.groups()[0]]
                #print d, rest
                while rest:
                    m = self.DOT_PATTERN.match(rest)
                    if m:
                        d = d[m.groups()[0]]
                    else:
                        m = self.INDEX_PATTERN.match(rest)
                        if m:
                            idx = m.groups()[0]
                            if not self.DIGIT_PATTERN.match(idx):
                                d = d[idx]
                            else:
                                try:
                                    n = int(idx) # try as number first (most likely)
                                    d = d[n]
                                except TypeError:
                                    d = d[idx]
                    if m:
                        rest = rest[m.end():]
                    else:
                        raise ValueError('Unable to convert '
                                         '%r at %r' % (value, rest))
            #rest should be empty
            return d

        def convert(self, value):
            """
            Convert values to an appropriate type. dicts, lists and tuples are
            replaced by their converting alternatives. Strings are checked to
            see if they have a conversion format and are converted if they do.
            """
            if not isinstance(value, ConvertingDict) and isinstance(value, dict):
                value = ConvertingDict(value)
                value.configurator = self
            elif not isinstance(value, ConvertingList) and isinstance(value, list):
                value = ConvertingList(value)
                value.configurator = self
            elif not isinstance(value, ConvertingTuple) and\
                     isinstance(value, tuple):
                value = ConvertingTuple(value)
                value.configurator = self
            elif isinstance(value, string_types):
                m = self.CONVERT_PATTERN.match(value)
                if m:
                    d = m.groupdict()
                    prefix = d['prefix']
                    converter = self.value_converters.get(prefix, None)
                    if converter:
                        suffix = d['suffix']
                        converter = getattr(self, converter)
                        value = converter(suffix)
            return value

        def configure_custom(self, config):
            """Configure an object with a user-supplied factory."""
            c = config.pop('()')
            if not callable(c):
                c = self.resolve(c)
            props = config.pop('.', None)
            # Check for valid identifiers
            kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
            result = c(**kwargs)
            if props:
                for name, value in props.items():
                    setattr(result, name, value)
            return result

        def as_tuple(self, value):
            """Utility function which converts lists to tuples."""
            if isinstance(value, list):
                value = tuple(value)
            return value
Beispiel #35
0
class LoopingCall:
    """Call a function repeatedly.

    @ivar f: The function to call.
    @ivar a: A tuple of arguments to pass the function.
    @ivar kw: A dictionary of keyword arguments to pass to the function.
    """

    call = None
    running = False
    deferred = None
    interval = None
    count = None
    starttime = None

    def _callLater(self, delay):
        return reactor.callLater(delay, self)

    _seconds = staticmethod(seconds)

    def __init__(self, f, *a, **kw):
        self.f = f
        self.a = a
        self.kw = kw

    def start(self, interval, now=True):
        """Start running function every interval seconds.

        @param interval: The number of seconds between calls.  May be
        less than one.  Precision will depend on the underlying
        platform, the available hardware, and the load on the system.

        @param now: If True, run this call right now.  Otherwise, wait
        until the interval has elapsed before beginning.

        @return: A Deferred whose callback will be invoked with
        C{self} when C{self.stop} is called, or whose errback will be
        invoked if the function raises an exception.
        """
        assert not self.running, ("Tried to start an already running "
                                  "LoopingCall.")
        if interval < 0:
            raise ValueError, "interval must be >= 0"
        self.running = True
        d = self.deferred = defer.Deferred()
        self.starttime = self._seconds()
        self.count = 0
        self.interval = interval
        if now:
            self()
        else:
            self._reschedule()
        return d

    def stop(self):
        """Stop running function.
        """
        assert self.running, ("Tried to stop a LoopingCall that was "
                              "not running.")
        self.running = False
        if self.call is not None:
            self.call.cancel()
            self.call = None
            d, self.deferred = self.deferred, None
            d.callback(self)

    def __call__(self):
        self.call = None
        try:
            self.f(*self.a, **self.kw)
        except:
            self.running = False
            d, self.deferred = self.deferred, None
            d.errback()
        else:
            if self.running:
                self._reschedule()
            else:
                d, self.deferred = self.deferred, None
                d.callback(self)

    def _reschedule(self):
        if self.interval == 0:
            self.call = self._callLater(0)
            return

        fromNow = self.starttime - self._seconds()

        while self.running:
            self.count += 1
            fromStart = self.count * self.interval
            delay = fromNow + fromStart
            if delay > 0:
                self.call = self._callLater(delay)
                return

    def __repr__(self):
        if hasattr(self.f, 'func_name'):
            func = self.f.func_name
            if hasattr(self.f, 'im_class'):
                func = self.f.im_class.__name__ + '.' + func
        else:
            func = reflect.safe_repr(self.f)

        return 'LoopingCall<%r>(%s, *%s, **%s)' % (
            self.interval, func, reflect.safe_repr(self.a),
            reflect.safe_repr(self.kw))
class SpecialistPoolServiceAsyncClient:
    """A service for creating and managing Customer SpecialistPools.
    When customers start Data Labeling jobs, they can reuse/create
    Specialist Pools to bring their own Specialists to label the
    data. Customers can add/remove Managers for the Specialist Pool
    on Cloud console, then Managers will get email notifications to
    manage Specialists and tasks on CrowdCompute console.
    """

    _client: SpecialistPoolServiceClient

    DEFAULT_ENDPOINT = SpecialistPoolServiceClient.DEFAULT_ENDPOINT
    DEFAULT_MTLS_ENDPOINT = SpecialistPoolServiceClient.DEFAULT_MTLS_ENDPOINT

    specialist_pool_path = staticmethod(
        SpecialistPoolServiceClient.specialist_pool_path)
    parse_specialist_pool_path = staticmethod(
        SpecialistPoolServiceClient.parse_specialist_pool_path)
    common_billing_account_path = staticmethod(
        SpecialistPoolServiceClient.common_billing_account_path)
    parse_common_billing_account_path = staticmethod(
        SpecialistPoolServiceClient.parse_common_billing_account_path)
    common_folder_path = staticmethod(
        SpecialistPoolServiceClient.common_folder_path)
    parse_common_folder_path = staticmethod(
        SpecialistPoolServiceClient.parse_common_folder_path)
    common_organization_path = staticmethod(
        SpecialistPoolServiceClient.common_organization_path)
    parse_common_organization_path = staticmethod(
        SpecialistPoolServiceClient.parse_common_organization_path)
    common_project_path = staticmethod(
        SpecialistPoolServiceClient.common_project_path)
    parse_common_project_path = staticmethod(
        SpecialistPoolServiceClient.parse_common_project_path)
    common_location_path = staticmethod(
        SpecialistPoolServiceClient.common_location_path)
    parse_common_location_path = staticmethod(
        SpecialistPoolServiceClient.parse_common_location_path)

    @classmethod
    def from_service_account_info(cls, info: dict, *args, **kwargs):
        """Creates an instance of this client using the provided credentials
            info.

        Args:
            info (dict): The service account private key info.
            args: Additional arguments to pass to the constructor.
            kwargs: Additional arguments to pass to the constructor.

        Returns:
            SpecialistPoolServiceAsyncClient: The constructed client.
        """
        return SpecialistPoolServiceClient.from_service_account_info.__func__(
            SpecialistPoolServiceAsyncClient, info, *args,
            **kwargs)  # type: ignore

    @classmethod
    def from_service_account_file(cls, filename: str, *args, **kwargs):
        """Creates an instance of this client using the provided credentials
            file.

        Args:
            filename (str): The path to the service account private key json
                file.
            args: Additional arguments to pass to the constructor.
            kwargs: Additional arguments to pass to the constructor.

        Returns:
            SpecialistPoolServiceAsyncClient: The constructed client.
        """
        return SpecialistPoolServiceClient.from_service_account_file.__func__(
            SpecialistPoolServiceAsyncClient, filename, *args,
            **kwargs)  # type: ignore

    from_service_account_json = from_service_account_file

    @classmethod
    def get_mtls_endpoint_and_cert_source(
            cls, client_options: Optional[ClientOptions] = None):
        """Return the API endpoint and client cert source for mutual TLS.

        The client cert source is determined in the following order:
        (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
        client cert source is None.
        (2) if `client_options.client_cert_source` is provided, use the provided one; if the
        default client cert source exists, use the default one; otherwise the client cert
        source is None.

        The API endpoint is determined in the following order:
        (1) if `client_options.api_endpoint` if provided, use the provided one.
        (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
        default mTLS endpoint; if the environment variabel is "never", use the default API
        endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
        use the default API endpoint.

        More details can be found at https://google.aip.dev/auth/4114.

        Args:
            client_options (google.api_core.client_options.ClientOptions): Custom options for the
                client. Only the `api_endpoint` and `client_cert_source` properties may be used
                in this method.

        Returns:
            Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
                client cert source to use.

        Raises:
            google.auth.exceptions.MutualTLSChannelError: If any errors happen.
        """
        return SpecialistPoolServiceClient.get_mtls_endpoint_and_cert_source(
            client_options)  # type: ignore

    @property
    def transport(self) -> SpecialistPoolServiceTransport:
        """Returns the transport used by the client instance.

        Returns:
            SpecialistPoolServiceTransport: The transport used by the client instance.
        """
        return self._client.transport

    get_transport_class = functools.partial(
        type(SpecialistPoolServiceClient).get_transport_class,
        type(SpecialistPoolServiceClient),
    )

    def __init__(
        self,
        *,
        credentials: ga_credentials.Credentials = None,
        transport: Union[str, SpecialistPoolServiceTransport] = "grpc_asyncio",
        client_options: ClientOptions = None,
        client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
    ) -> None:
        """Instantiates the specialist pool service client.

        Args:
            credentials (Optional[google.auth.credentials.Credentials]): The
                authorization credentials to attach to requests. These
                credentials identify the application to the service; if none
                are specified, the client will attempt to ascertain the
                credentials from the environment.
            transport (Union[str, ~.SpecialistPoolServiceTransport]): The
                transport to use. If set to None, a transport is chosen
                automatically.
            client_options (ClientOptions): Custom options for the client. It
                won't take effect if a ``transport`` instance is provided.
                (1) The ``api_endpoint`` property can be used to override the
                default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
                environment variable can also be used to override the endpoint:
                "always" (always use the default mTLS endpoint), "never" (always
                use the default regular endpoint) and "auto" (auto switch to the
                default mTLS endpoint if client certificate is present, this is
                the default value). However, the ``api_endpoint`` property takes
                precedence if provided.
                (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
                is "true", then the ``client_cert_source`` property can be used
                to provide client certificate for mutual TLS transport. If
                not provided, the default SSL client certificate will be used if
                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
                set, no client certificate will be used.

        Raises:
            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
                creation failed for any reason.
        """
        self._client = SpecialistPoolServiceClient(
            credentials=credentials,
            transport=transport,
            client_options=client_options,
            client_info=client_info,
        )

    async def create_specialist_pool(
        self,
        request: Union[specialist_pool_service.CreateSpecialistPoolRequest,
                       dict] = None,
        *,
        parent: str = None,
        specialist_pool: gca_specialist_pool.SpecialistPool = None,
        retry: OptionalRetry = gapic_v1.method.DEFAULT,
        timeout: float = None,
        metadata: Sequence[Tuple[str, str]] = (),
    ) -> operation_async.AsyncOperation:
        r"""Creates a SpecialistPool.

        .. code-block:: python

            from google.cloud import aiplatform_v1beta1

            def sample_create_specialist_pool():
                # Create a client
                client = aiplatform_v1beta1.SpecialistPoolServiceClient()

                # Initialize request argument(s)
                specialist_pool = aiplatform_v1beta1.SpecialistPool()
                specialist_pool.name = "name_value"
                specialist_pool.display_name = "display_name_value"

                request = aiplatform_v1beta1.CreateSpecialistPoolRequest(
                    parent="parent_value",
                    specialist_pool=specialist_pool,
                )

                # Make the request
                operation = client.create_specialist_pool(request=request)

                print("Waiting for operation to complete...")

                response = operation.result()

                # Handle the response
                print(response)

        Args:
            request (Union[google.cloud.aiplatform_v1beta1.types.CreateSpecialistPoolRequest, dict]):
                The request object. Request message for
                [SpecialistPoolService.CreateSpecialistPool][google.cloud.aiplatform.v1beta1.SpecialistPoolService.CreateSpecialistPool].
            parent (:class:`str`):
                Required. The parent Project name for the new
                SpecialistPool. The form is
                ``projects/{project}/locations/{location}``.

                This corresponds to the ``parent`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            specialist_pool (:class:`google.cloud.aiplatform_v1beta1.types.SpecialistPool`):
                Required. The SpecialistPool to
                create.

                This corresponds to the ``specialist_pool`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            retry (google.api_core.retry.Retry): Designation of what errors, if any,
                should be retried.
            timeout (float): The timeout for this request.
            metadata (Sequence[Tuple[str, str]]): Strings which should be
                sent along with the request as metadata.

        Returns:
            google.api_core.operation_async.AsyncOperation:
                An object representing a long-running operation.

                The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.SpecialistPool` SpecialistPool represents customers' own workforce to work on their data
                   labeling jobs. It includes a group of specialist
                   managers and workers. Managers are responsible for
                   managing the workers in this pool as well as
                   customers' data labeling jobs associated with this
                   pool. Customers create specialist pool as well as
                   start data labeling jobs on Cloud, managers and
                   workers handle the jobs using CrowdCompute console.

        """
        # Create or coerce a protobuf request object.
        # Quick check: If we got a request object, we should *not* have
        # gotten any keyword arguments that map to the request.
        has_flattened_params = any([parent, specialist_pool])
        if request is not None and has_flattened_params:
            raise ValueError("If the `request` argument is set, then none of "
                             "the individual field arguments should be set.")

        request = specialist_pool_service.CreateSpecialistPoolRequest(request)

        # If we have keyword arguments corresponding to fields on the
        # request, apply these.
        if parent is not None:
            request.parent = parent
        if specialist_pool is not None:
            request.specialist_pool = specialist_pool

        # Wrap the RPC method; this adds retry and timeout information,
        # and friendly error handling.
        rpc = gapic_v1.method_async.wrap_method(
            self._client._transport.create_specialist_pool,
            default_timeout=5.0,
            client_info=DEFAULT_CLIENT_INFO,
        )

        # Certain fields should be provided within the metadata header;
        # add these here.
        metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(
            (("parent", request.parent), )), )

        # Send the request.
        response = await rpc(
            request,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )

        # Wrap the response in an operation future.
        response = operation_async.from_gapic(
            response,
            self._client._transport.operations_client,
            gca_specialist_pool.SpecialistPool,
            metadata_type=specialist_pool_service.
            CreateSpecialistPoolOperationMetadata,
        )

        # Done; return the response.
        return response

    async def get_specialist_pool(
        self,
        request: Union[specialist_pool_service.GetSpecialistPoolRequest,
                       dict] = None,
        *,
        name: str = None,
        retry: OptionalRetry = gapic_v1.method.DEFAULT,
        timeout: float = None,
        metadata: Sequence[Tuple[str, str]] = (),
    ) -> specialist_pool.SpecialistPool:
        r"""Gets a SpecialistPool.

        .. code-block:: python

            from google.cloud import aiplatform_v1beta1

            def sample_get_specialist_pool():
                # Create a client
                client = aiplatform_v1beta1.SpecialistPoolServiceClient()

                # Initialize request argument(s)
                request = aiplatform_v1beta1.GetSpecialistPoolRequest(
                    name="name_value",
                )

                # Make the request
                response = client.get_specialist_pool(request=request)

                # Handle the response
                print(response)

        Args:
            request (Union[google.cloud.aiplatform_v1beta1.types.GetSpecialistPoolRequest, dict]):
                The request object. Request message for
                [SpecialistPoolService.GetSpecialistPool][google.cloud.aiplatform.v1beta1.SpecialistPoolService.GetSpecialistPool].
            name (:class:`str`):
                Required. The name of the SpecialistPool resource. The
                form is
                ``projects/{project}/locations/{location}/specialistPools/{specialist_pool}``.

                This corresponds to the ``name`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            retry (google.api_core.retry.Retry): Designation of what errors, if any,
                should be retried.
            timeout (float): The timeout for this request.
            metadata (Sequence[Tuple[str, str]]): Strings which should be
                sent along with the request as metadata.

        Returns:
            google.cloud.aiplatform_v1beta1.types.SpecialistPool:
                SpecialistPool represents customers'
                own workforce to work on their data
                labeling jobs. It includes a group of
                specialist managers and workers.
                Managers are responsible for managing
                the workers in this pool as well as
                customers' data labeling jobs associated
                with this pool. Customers create
                specialist pool as well as start data
                labeling jobs on Cloud, managers and
                workers handle the jobs using
                CrowdCompute console.

        """
        # Create or coerce a protobuf request object.
        # Quick check: If we got a request object, we should *not* have
        # gotten any keyword arguments that map to the request.
        has_flattened_params = any([name])
        if request is not None and has_flattened_params:
            raise ValueError("If the `request` argument is set, then none of "
                             "the individual field arguments should be set.")

        request = specialist_pool_service.GetSpecialistPoolRequest(request)

        # If we have keyword arguments corresponding to fields on the
        # request, apply these.
        if name is not None:
            request.name = name

        # Wrap the RPC method; this adds retry and timeout information,
        # and friendly error handling.
        rpc = gapic_v1.method_async.wrap_method(
            self._client._transport.get_specialist_pool,
            default_timeout=5.0,
            client_info=DEFAULT_CLIENT_INFO,
        )

        # Certain fields should be provided within the metadata header;
        # add these here.
        metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(
            (("name", request.name), )), )

        # Send the request.
        response = await rpc(
            request,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )

        # Done; return the response.
        return response

    async def list_specialist_pools(
        self,
        request: Union[specialist_pool_service.ListSpecialistPoolsRequest,
                       dict] = None,
        *,
        parent: str = None,
        retry: OptionalRetry = gapic_v1.method.DEFAULT,
        timeout: float = None,
        metadata: Sequence[Tuple[str, str]] = (),
    ) -> pagers.ListSpecialistPoolsAsyncPager:
        r"""Lists SpecialistPools in a Location.

        .. code-block:: python

            from google.cloud import aiplatform_v1beta1

            def sample_list_specialist_pools():
                # Create a client
                client = aiplatform_v1beta1.SpecialistPoolServiceClient()

                # Initialize request argument(s)
                request = aiplatform_v1beta1.ListSpecialistPoolsRequest(
                    parent="parent_value",
                )

                # Make the request
                page_result = client.list_specialist_pools(request=request)

                # Handle the response
                for response in page_result:
                    print(response)

        Args:
            request (Union[google.cloud.aiplatform_v1beta1.types.ListSpecialistPoolsRequest, dict]):
                The request object. Request message for
                [SpecialistPoolService.ListSpecialistPools][google.cloud.aiplatform.v1beta1.SpecialistPoolService.ListSpecialistPools].
            parent (:class:`str`):
                Required. The name of the SpecialistPool's parent
                resource. Format:
                ``projects/{project}/locations/{location}``

                This corresponds to the ``parent`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            retry (google.api_core.retry.Retry): Designation of what errors, if any,
                should be retried.
            timeout (float): The timeout for this request.
            metadata (Sequence[Tuple[str, str]]): Strings which should be
                sent along with the request as metadata.

        Returns:
            google.cloud.aiplatform_v1beta1.services.specialist_pool_service.pagers.ListSpecialistPoolsAsyncPager:
                Response message for
                [SpecialistPoolService.ListSpecialistPools][google.cloud.aiplatform.v1beta1.SpecialistPoolService.ListSpecialistPools].

                Iterating over this object will yield results and
                resolve additional pages automatically.

        """
        # Create or coerce a protobuf request object.
        # Quick check: If we got a request object, we should *not* have
        # gotten any keyword arguments that map to the request.
        has_flattened_params = any([parent])
        if request is not None and has_flattened_params:
            raise ValueError("If the `request` argument is set, then none of "
                             "the individual field arguments should be set.")

        request = specialist_pool_service.ListSpecialistPoolsRequest(request)

        # If we have keyword arguments corresponding to fields on the
        # request, apply these.
        if parent is not None:
            request.parent = parent

        # Wrap the RPC method; this adds retry and timeout information,
        # and friendly error handling.
        rpc = gapic_v1.method_async.wrap_method(
            self._client._transport.list_specialist_pools,
            default_timeout=5.0,
            client_info=DEFAULT_CLIENT_INFO,
        )

        # Certain fields should be provided within the metadata header;
        # add these here.
        metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(
            (("parent", request.parent), )), )

        # Send the request.
        response = await rpc(
            request,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )

        # This method is paged; wrap the response in a pager, which provides
        # an `__aiter__` convenience method.
        response = pagers.ListSpecialistPoolsAsyncPager(
            method=rpc,
            request=request,
            response=response,
            metadata=metadata,
        )

        # Done; return the response.
        return response

    async def delete_specialist_pool(
        self,
        request: Union[specialist_pool_service.DeleteSpecialistPoolRequest,
                       dict] = None,
        *,
        name: str = None,
        retry: OptionalRetry = gapic_v1.method.DEFAULT,
        timeout: float = None,
        metadata: Sequence[Tuple[str, str]] = (),
    ) -> operation_async.AsyncOperation:
        r"""Deletes a SpecialistPool as well as all Specialists
        in the pool.


        .. code-block:: python

            from google.cloud import aiplatform_v1beta1

            def sample_delete_specialist_pool():
                # Create a client
                client = aiplatform_v1beta1.SpecialistPoolServiceClient()

                # Initialize request argument(s)
                request = aiplatform_v1beta1.DeleteSpecialistPoolRequest(
                    name="name_value",
                )

                # Make the request
                operation = client.delete_specialist_pool(request=request)

                print("Waiting for operation to complete...")

                response = operation.result()

                # Handle the response
                print(response)

        Args:
            request (Union[google.cloud.aiplatform_v1beta1.types.DeleteSpecialistPoolRequest, dict]):
                The request object. Request message for
                [SpecialistPoolService.DeleteSpecialistPool][google.cloud.aiplatform.v1beta1.SpecialistPoolService.DeleteSpecialistPool].
            name (:class:`str`):
                Required. The resource name of the SpecialistPool to
                delete. Format:
                ``projects/{project}/locations/{location}/specialistPools/{specialist_pool}``

                This corresponds to the ``name`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            retry (google.api_core.retry.Retry): Designation of what errors, if any,
                should be retried.
            timeout (float): The timeout for this request.
            metadata (Sequence[Tuple[str, str]]): Strings which should be
                sent along with the request as metadata.

        Returns:
            google.api_core.operation_async.AsyncOperation:
                An object representing a long-running operation.

                The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
                   empty messages in your APIs. A typical example is to
                   use it as the request or the response type of an API
                   method. For instance:

                      service Foo {
                         rpc Bar(google.protobuf.Empty) returns
                         (google.protobuf.Empty);

                      }

                   The JSON representation for Empty is empty JSON
                   object {}.

        """
        # Create or coerce a protobuf request object.
        # Quick check: If we got a request object, we should *not* have
        # gotten any keyword arguments that map to the request.
        has_flattened_params = any([name])
        if request is not None and has_flattened_params:
            raise ValueError("If the `request` argument is set, then none of "
                             "the individual field arguments should be set.")

        request = specialist_pool_service.DeleteSpecialistPoolRequest(request)

        # If we have keyword arguments corresponding to fields on the
        # request, apply these.
        if name is not None:
            request.name = name

        # Wrap the RPC method; this adds retry and timeout information,
        # and friendly error handling.
        rpc = gapic_v1.method_async.wrap_method(
            self._client._transport.delete_specialist_pool,
            default_timeout=5.0,
            client_info=DEFAULT_CLIENT_INFO,
        )

        # Certain fields should be provided within the metadata header;
        # add these here.
        metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(
            (("name", request.name), )), )

        # Send the request.
        response = await rpc(
            request,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )

        # Wrap the response in an operation future.
        response = operation_async.from_gapic(
            response,
            self._client._transport.operations_client,
            empty_pb2.Empty,
            metadata_type=gca_operation.DeleteOperationMetadata,
        )

        # Done; return the response.
        return response

    async def update_specialist_pool(
        self,
        request: Union[specialist_pool_service.UpdateSpecialistPoolRequest,
                       dict] = None,
        *,
        specialist_pool: gca_specialist_pool.SpecialistPool = None,
        update_mask: field_mask_pb2.FieldMask = None,
        retry: OptionalRetry = gapic_v1.method.DEFAULT,
        timeout: float = None,
        metadata: Sequence[Tuple[str, str]] = (),
    ) -> operation_async.AsyncOperation:
        r"""Updates a SpecialistPool.

        .. code-block:: python

            from google.cloud import aiplatform_v1beta1

            def sample_update_specialist_pool():
                # Create a client
                client = aiplatform_v1beta1.SpecialistPoolServiceClient()

                # Initialize request argument(s)
                specialist_pool = aiplatform_v1beta1.SpecialistPool()
                specialist_pool.name = "name_value"
                specialist_pool.display_name = "display_name_value"

                request = aiplatform_v1beta1.UpdateSpecialistPoolRequest(
                    specialist_pool=specialist_pool,
                )

                # Make the request
                operation = client.update_specialist_pool(request=request)

                print("Waiting for operation to complete...")

                response = operation.result()

                # Handle the response
                print(response)

        Args:
            request (Union[google.cloud.aiplatform_v1beta1.types.UpdateSpecialistPoolRequest, dict]):
                The request object. Request message for
                [SpecialistPoolService.UpdateSpecialistPool][google.cloud.aiplatform.v1beta1.SpecialistPoolService.UpdateSpecialistPool].
            specialist_pool (:class:`google.cloud.aiplatform_v1beta1.types.SpecialistPool`):
                Required. The SpecialistPool which
                replaces the resource on the server.

                This corresponds to the ``specialist_pool`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
                Required. The update mask applies to
                the resource.

                This corresponds to the ``update_mask`` field
                on the ``request`` instance; if ``request`` is provided, this
                should not be set.
            retry (google.api_core.retry.Retry): Designation of what errors, if any,
                should be retried.
            timeout (float): The timeout for this request.
            metadata (Sequence[Tuple[str, str]]): Strings which should be
                sent along with the request as metadata.

        Returns:
            google.api_core.operation_async.AsyncOperation:
                An object representing a long-running operation.

                The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.SpecialistPool` SpecialistPool represents customers' own workforce to work on their data
                   labeling jobs. It includes a group of specialist
                   managers and workers. Managers are responsible for
                   managing the workers in this pool as well as
                   customers' data labeling jobs associated with this
                   pool. Customers create specialist pool as well as
                   start data labeling jobs on Cloud, managers and
                   workers handle the jobs using CrowdCompute console.

        """
        # Create or coerce a protobuf request object.
        # Quick check: If we got a request object, we should *not* have
        # gotten any keyword arguments that map to the request.
        has_flattened_params = any([specialist_pool, update_mask])
        if request is not None and has_flattened_params:
            raise ValueError("If the `request` argument is set, then none of "
                             "the individual field arguments should be set.")

        request = specialist_pool_service.UpdateSpecialistPoolRequest(request)

        # If we have keyword arguments corresponding to fields on the
        # request, apply these.
        if specialist_pool is not None:
            request.specialist_pool = specialist_pool
        if update_mask is not None:
            request.update_mask = update_mask

        # Wrap the RPC method; this adds retry and timeout information,
        # and friendly error handling.
        rpc = gapic_v1.method_async.wrap_method(
            self._client._transport.update_specialist_pool,
            default_timeout=5.0,
            client_info=DEFAULT_CLIENT_INFO,
        )

        # Certain fields should be provided within the metadata header;
        # add these here.
        metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(
            (("specialist_pool.name", request.specialist_pool.name), )), )

        # Send the request.
        response = await rpc(
            request,
            retry=retry,
            timeout=timeout,
            metadata=metadata,
        )

        # Wrap the response in an operation future.
        response = operation_async.from_gapic(
            response,
            self._client._transport.operations_client,
            gca_specialist_pool.SpecialistPool,
            metadata_type=specialist_pool_service.
            UpdateSpecialistPoolOperationMetadata,
        )

        # Done; return the response.
        return response

    async def __aenter__(self):
        return self

    async def __aexit__(self, exc_type, exc, tb):
        await self.transport.close()
Beispiel #37
0
class File(Base):

    directory = ''

    _default_extension = 'gv'

    def __init__(self, filename=None, directory=None,
                 format=None, engine=None, encoding=Base._encoding):
        if filename is None:
            name = getattr(self, 'name', None) or self.__class__.__name__
            filename = '%s.%s' % (name, self._default_extension)
        self.filename = filename

        if directory is not None:
            self.directory = directory

        if format is not None:
            self.format = format

        if engine is not None:
            self.engine = engine

        self.encoding = encoding

    def _kwargs(self):
        result = super(File, self)._kwargs()
        result['filename'] = self.filename
        if 'directory' in self.__dict__:
            result['directory'] = self.directory
        return result

    def _repr_svg_(self):
        return self.pipe(format='svg').decode(self._encoding)

    def pipe(self, format=None):
        """Return the source piped through the Graphviz layout command.

        Args:
            format: The output format used for rendering (``'pdf'``, ``'png'``, etc.).
        Returns:
            Binary (encoded) stdout of the layout command.
        Raises:
            ValueError: If ``format`` is not known.
            graphviz.ExecutableNotFound: If the Graphviz executable is not found.
            subprocess.CalledProcessError: If the exit status is non-zero.
        """
        if format is None:
            format = self._format

        data = text_type(self.source).encode(self._encoding)

        outs = backend.pipe(self._engine, format, data)

        return outs

    @property
    def filepath(self):
        return os.path.join(self.directory, self.filename)

    def save(self, filename=None, directory=None):
        """Save the DOT source to file. Ensure the file ends with a newline.

        Args:
            filename: Filename for saving the source (defaults to ``name`` + ``'.gv'``)
            directory: (Sub)directory for source saving and rendering.
        Returns:
            The (possibly relative) path of the saved source file.
        """
        if filename is not None:
            self.filename = filename
        if directory is not None:
            self.directory = directory

        filepath = self.filepath
        tools.mkdirs(filepath)

        data = text_type(self.source)

        with io.open(filepath, 'w', encoding=self.encoding) as fd:
            fd.write(data)
            if not data.endswith(u'\n'):
                fd.write(u'\n')

        return filepath

    def render(self, filename=None, directory=None, view=False, cleanup=False):
        """Save the source to file and render with the Graphviz engine.

        Args:
            filename: Filename for saving the source (defaults to ``name`` + ``'.gv'``)
            directory: (Sub)directory for source saving and rendering.
            view (bool): Open the rendered result with the default application.
            cleanup (bool): Delete the source file after rendering.
        Returns:
            The (possibly relative) path of the rendered file.
        Raises:
            graphviz.ExecutableNotFound: If the Graphviz executable is not found.
            subprocess.CalledProcessError: If the exit status is non-zero.
            RuntimeError: If viewer opening is requested but not supported.
        """
        filepath = self.save(filename, directory)

        rendered = backend.render(self._engine, self._format, filepath)

        if cleanup:
            os.remove(filepath)

        if view:
            self._view(rendered, self._format)

        return rendered

    def view(self, filename=None, directory=None, cleanup=False):
        """Save the source to file, open the rendered result in a viewer.

        Args:
            filename: Filename for saving the source (defaults to ``name`` + ``'.gv'``)
            directory: (Sub)directory for source saving and rendering.
            cleanup (bool): Delete the source file after rendering.
        Returns:
            The (possibly relative) path of the rendered file.
        Raises:
            graphviz.ExecutableNotFound: If the Graphviz executable is not found.
            subprocess.CalledProcessError: If the exit status is non-zero.
            RuntimeError: If opening the viewer is not supported.

        Short-cut method for calling :meth:`.render` with ``view=True``.
        """
        return self.render(filename=filename, directory=directory, view=True,
                           cleanup=cleanup)

    def _view(self, filepath, format):
        """Start the right viewer based on file format and platform."""
        methodnames = [
            '_view_%s_%s' % (format, backend.PLATFORM),
            '_view_%s' % backend.PLATFORM,
        ]
        for name in methodnames:
            view_method = getattr(self, name, None)
            if view_method is not None:
                break
        else:
            raise RuntimeError('%r has no built-in viewer support for %r '
                'on %r platform' % (self.__class__, format, backend.PLATFORM))
        view_method(filepath)

    _view_darwin = staticmethod(backend.view.darwin)
    _view_freebsd = staticmethod(backend.view.freebsd)
    _view_linux = staticmethod(backend.view.linux)
    _view_windows = staticmethod(backend.view.windows)
Beispiel #38
0
class FlowTrafficClass(IOperationByte, NumericString, IPv6):
    ID = 0x0B
    NAME = 'traffic-class'
    converter = staticmethod(converter(ClassValue))
    decoder = staticmethod(_number)
Beispiel #39
0
class FlowPacketLength(IOperationByteShort, NumericString, IPv4, IPv6):
    ID = 0x0A
    NAME = 'packet-length'
    converter = staticmethod(converter(PacketLength))
    decoder = staticmethod(_number)
Beispiel #40
0
class FlowFragment(IOperationByteShort, BinaryString, IPv4):
    ID = 0x0C
    NAME = 'fragment'
    FLAG = True
    converter = staticmethod(converter(Fragment.named))
    decoder = staticmethod(decoder(ord, Fragment))
Beispiel #41
0
class FlowICMPCode(IOperationByte, NumericString, IPv4, IPv6):
    ID = 0x08
    NAME = 'icmp-code'
    converter = staticmethod(converter(ICMPCode.named, ICMPCode))
    decoder = staticmethod(decoder(_number, ICMPCode))
Beispiel #42
0
class FlowDSCP(IOperationByte, NumericString, IPv4):
    ID = 0x0B
    NAME = 'dscp'
    converter = staticmethod(converter(DSCPValue))
    decoder = staticmethod(_number)
Beispiel #43
0
class FlowDestinationPort(IOperationByteShort, NumericString, IPv4, IPv6):
    ID = 0x05
    NAME = 'destination-port'
    converter = staticmethod(converter(PortValue))
    decoder = staticmethod(_number)
Beispiel #44
0
class FlowTCPFlag(IOperationByte, BinaryString, IPv4, IPv6):
    ID = 0x09
    NAME = 'tcp-flags'
    FLAG = True
    converter = staticmethod(converter(TCPFlag.named))
    decoder = staticmethod(decoder(ord, TCPFlag))
Beispiel #45
0
class FlowIPProtocol(IOperationByte, NumericString, IPv4):
    ID = 0x03
    NAME = 'protocol'
    converter = staticmethod(converter(Protocol.named, Protocol))
    decoder = staticmethod(decoder(ord, Protocol))
Beispiel #46
0
class FlowSourcePort(IOperationByteShort, NumericString, IPv4, IPv6):
    ID = 0x06
    NAME = 'source-port'
    converter = staticmethod(converter(PortValue))
    decoder = staticmethod(_number)
Beispiel #47
0
class cviewHisto:
	"""The cviewHisto class is a data structure that python script writers
can use to dump 3d histogram data into.  It encapsulates the details of
a cview histogram.  After data collection, the class can output the 
.cview files necessary for viewing a dataset with cview.
	"""
	groups = {}
		# dict of groups of histograms
		# A group shares xticks and output directory.
		# The index into the dictionary is the 'outputDirectoryString'.
		# Each dictionary entry is a list containing references to all histos
		# in that group.

	def __init__(self, group='.', desc='None', rate='None', 
										isCumul=False, isSharedY=False):
		"""

		"""
		self.group = group # the group this histo instance belongs to
		if not cviewHisto.groups.has_key(group):
			cviewHisto.groups[group] = []
		cviewHisto.groups[group].append(self) # add self to group

		self.desc = desc # desc is the title of the histogram
		self.rate = rate # rate is the quantity label for the z-axis values
		self.xTicksList = []
		self.yTicksList = []
		self.xTickSort = cmp
		self.yTickSort = cmp # default sort function is 'cmp'
		self.isSharedY = isSharedY # we don't share yTicks by default
		self.datapoints = {} # dict of datapoints indexed by tuple (xTick,yTick)
		self.isCumulative = isCumul # default histo is not cumulative

	def __sync__(self):
		# synchronize xticks and yticks w/ group 
		for histo in cviewHisto.groups[self.group]:
			for xTick in histo.xTicksList:
				if not xTick in self.xTicksList:
					self.xTicksList.append(xTick)
			if self.isSharedY:
				for yTick in histo.yTicksList:
					if not yTick in self.yTicksList:
						self.yTicksList.append(yTick)

		# sort ticks
		self.xTicksList.sort(self.xTickSort)
		self.yTicksList.sort(self.yTickSort)


	def __str__(self): 
		self.__sync__()

		strRep = ''
		for yTick in self.yTicksList:
			for xTick in self.xTicksList:
				if self.datapoints.has_key((xTick, yTick)):
					strRep += str(self.datapoints[(xTick, yTick)]) + ' '
				else:
					strRep += str(0) + ' '
			strRep += '\n'

		return strRep

	def __initialize__(self, xTick, yTick):
		if not xTick in self.xTicksList:
			self.xTicksList.append(xTick)
		if not yTick in self.yTicksList:
			self.yTicksList.append(yTick)
		if not self.datapoints.has_key((xTick, yTick)):
			self.datapoints[(xTick, yTick)] = 0

	def setxTickSort(self, callable):
		"""Change the sort method used for the x-axis
		"""
		self.xTickSort = callable

	def setyTickSort(self, callable):
		"""Change the sort method used for the y-axis
		"""
		self.yTickSort = callable

	def merge(self, otherHisto):
		"""Merge another histo object with self.  The data of both
histograms is combined into the caller.
		"""
		for key in otherHisto.datapoints.keys():
			xTick = key[0]
			yTick = key[1]
			if not xTick in self.xTicksList:
				self.xTicksList.append(xTick)
			if not yTick in self.yTicksList:
				self.yTicksList.append(yTick)
			if not self.datapoints.has_key(key):
				self.datapoints[key] = 0
			self.datapoints[key] += otherHisto.datapoints[key]

	def writeToFiles():
		"""Write all cview histograms out to .cview files
		"""
		
		for dir, histos in cviewHisto.groups.items():
			# sync all histos in group and create index list
			indexList = []
			for histo in histos:
				histo.__sync__()
				indexList.append(histo.desc)

			# index and xtick files need written only once per dir of histos
			indexList.sort()
			f = open(dir + '/index', 'w')
			for index in indexList:
				f.write(index + '\n')
			f.close()

			xTicksList = histos[0].xTicksList
			f = open(dir + '/xtick', 'w')
			for xTick in xTicksList:
				f.write(struct.pack('32s', str(xTick)))
			f.close()

			# must write out all histos for a given directory group
			for histo in histos:
				pathPrefix = dir + '/' + histo.desc
				open(pathPrefix + '.rate', 'w').write(histo.rate)
				open(pathPrefix + '.desc', 'w').write(histo.desc)

				f = open(pathPrefix + '.ytick', 'w')
				for yTick in histo.yTicksList:
					f.write(struct.pack('32s', str(yTick)))
				f.close()

				dp = histo.datapoints
				f = open(pathPrefix + '.data', 'w')
				for xTick in histo.xTicksList:
					cumul = 0
					for yTick in histo.yTicksList:
						if histo.isCumulative:
							if dp.has_key((xTick, yTick)):
								cumul += dp[(xTick, yTick)]
							f.write(struct.pack('f', cumul))
						elif dp.has_key((xTick, yTick)):
							f.write(struct.pack('f', dp[(xTick, yTick)]))
						else:
							f.write(struct.pack('f', 0))
							
				f.close()
	writeToFiles = staticmethod(writeToFiles) # creates static method

	def getZ(self, xTick, yTick):
		"""Return the Z-value for the specified x,y datapoint.
		"""
		try:
			return self.datapoints[(xTick, yTick)]
		except:
			return None

	def incr(self, xTick, yTick, zValue = 1):
		"""Increment the datapoint located at (xTick, yTick) by zValue.
		"""
		self.__initialize__(xTick, yTick) # make sure datapoint is initialized
		self.datapoints[(xTick, yTick)] += zValue

	def set(self, xTick, yTick, zValue):
		"""Set the datapoint to a specific Z-value.
		"""
		self.__initialize__(xTick, yTick) # make sure datapoint is initialized
		self.datapoints[(xTick, yTick)] = zValue
Beispiel #48
0
class FlowNextHeader(IOperationByte, NumericString, IPv6):
    ID = 0x03
    NAME = 'next-header'
    converter = staticmethod(converter(Protocol.named, Protocol))
    decoder = staticmethod(decoder(ord, Protocol))
Beispiel #49
0
class Stream(mitogen.parent.Stream):
    create_child = staticmethod(mitogen.parent.hybrid_tty_create_child)
    child_is_immediate_subprocess = False

    sudo_path = 'sudo'
    username = '******'
    password = None
    preserve_env = False
    set_home = False
    login = False

    selinux_role = None
    selinux_type = None

    def construct(self, username=None, sudo_path=None, password=None,
                  preserve_env=None, set_home=None, sudo_args=None,
                  login=None, selinux_role=None, selinux_type=None, **kwargs):
        super(Stream, self).construct(**kwargs)
        opts = parse_sudo_flags(sudo_args or [])

        self.username = option(self.username, username, opts.user)
        self.sudo_path = option(self.sudo_path, sudo_path)
        self.password = password or None
        self.preserve_env = option(self.preserve_env,
            preserve_env, opts.preserve_env)
        self.set_home = option(self.set_home, set_home, opts.set_home)
        self.login = option(self.login, login, opts.login)
        self.selinux_role = option(self.selinux_role, selinux_role, opts.role)
        self.selinux_type = option(self.selinux_type, selinux_type, opts.type)

    def connect(self):
        super(Stream, self).connect()
        self.name = u'sudo.' + mitogen.core.to_text(self.username)

    def get_boot_command(self):
        # Note: sudo did not introduce long-format option processing until July
        # 2013, so even though we parse long-format options, supply short-form
        # to the sudo command.
        bits = [self.sudo_path, '-u', self.username]
        if self.preserve_env:
            bits += ['-E']
        if self.set_home:
            bits += ['-H']
        if self.login:
            bits += ['-i']
        if self.selinux_role:
            bits += ['-r', self.selinux_role]
        if self.selinux_type:
            bits += ['-t', self.selinux_type]

        bits = bits + ['--'] + super(Stream, self).get_boot_command()
        LOG.debug('sudo command line: %r', bits)
        return bits

    password_incorrect_msg = 'sudo password is incorrect'
    password_required_msg = 'sudo password is required'

    def _connect_input_loop(self, it):
        password_sent = False

        for buf in it:
            LOG.debug('%r: received %r', self, buf)
            if buf.endswith(self.EC0_MARKER):
                self._ec0_received()
                return
            elif PASSWORD_PROMPT in buf.lower():
                if self.password is None:
                    raise PasswordError(self.password_required_msg)
                if password_sent:
                    raise PasswordError(self.password_incorrect_msg)
                self.diag_stream.transmit_side.write(
                    mitogen.core.to_text(self.password + '\n').encode('utf-8')
                )
                password_sent = True

        raise mitogen.core.StreamError('bootstrap failed')

    def _connect_bootstrap(self):
        fds = [self.receive_side.fd]
        if self.diag_stream is not None:
            fds.append(self.diag_stream.receive_side.fd)

        it = mitogen.parent.iter_read(
            fds=fds,
            deadline=self.connect_deadline,
        )

        try:
            self._connect_input_loop(it)
        finally:
            it.close()
Beispiel #50
0
class Munkres:
    """
    Calculate the Munkres solution to the classical assignment problem.
    See the module documentation for usage.
    """
    def __init__(self):
        """Create a new instance"""
        self.C = None
        self.row_covered = []
        self.col_covered = []
        self.n = 0
        self.Z0_r = 0
        self.Z0_c = 0
        self.marked = None
        self.path = None

    def make_cost_matrix(profit_matrix, inversion_function):
        """
        **DEPRECATED**

        Please use the module function ``make_cost_matrix()``.
        """
        import munkres
        return munkres.make_cost_matrix(profit_matrix, inversion_function)

    make_cost_matrix = staticmethod(make_cost_matrix)

    def pad_matrix(self, matrix, pad_value=0):
        """
        Pad a possibly non-square matrix to make it square.

        :Parameters:
            matrix : list of lists
                matrix to pad

            pad_value : int
                value to use to pad the matrix

        :rtype: list of lists
        :return: a new, possibly padded, matrix
        """
        max_columns = 0
        total_rows = len(matrix)

        for row in matrix:
            max_columns = max(max_columns, len(row))

        total_rows = max(max_columns, total_rows)

        new_matrix = []
        for row in matrix:
            row_len = len(row)
            new_row = row[:]
            if total_rows > row_len:
                # Row too short. Pad it.
                new_row += [0] * (total_rows - row_len)
            new_matrix += [new_row]

        while len(new_matrix) < total_rows:
            new_matrix += [[0] * total_rows]

        return new_matrix

    def compute(self, cost_matrix):
        """
        Compute the indexes for the lowest-cost pairings between rows and
        columns in the database. Returns a list of (row, column) tuples
        that can be used to traverse the matrix.

        :Parameters:
            cost_matrix : list of lists
                The cost matrix. If this cost matrix is not square, it
                will be padded with zeros, via a call to ``pad_matrix()``.
                (This method does *not* modify the caller's matrix. It
                operates on a copy of the matrix.)

                **WARNING**: This code handles square and rectangular
                matrices. It does *not* handle irregular matrices.

        :rtype: list
        :return: A list of ``(row, column)`` tuples that describe the lowest
                 cost path through the matrix

        """
        self.C = self.pad_matrix(cost_matrix)
        self.n = len(self.C)
        self.original_length = len(cost_matrix)
        self.original_width = len(cost_matrix[0])
        self.row_covered = [False for i in range(self.n)]
        self.col_covered = [False for i in range(self.n)]
        self.Z0_r = 0
        self.Z0_c = 0
        self.path = self.__make_matrix(self.n * 2, 0)
        self.marked = self.__make_matrix(self.n, 0)

        done = False
        step = 1

        steps = {
            1: self.__step1,
            2: self.__step2,
            3: self.__step3,
            4: self.__step4,
            5: self.__step5,
            6: self.__step6
        }

        while not done:
            try:
                func = steps[step]
                step = func()
            except KeyError:
                done = True

        # Look for the starred columns
        results = []
        for i in range(self.original_length):
            for j in range(self.original_width):
                if self.marked[i][j] == 1:
                    results += [(i, j)]

        return results

    def __copy_matrix(self, matrix):
        """Return an exact copy of the supplied matrix"""
        return copy.deepcopy(matrix)

    def __make_matrix(self, n, val):
        """Create an *n*x*n* matrix, populating it with the specific value."""
        matrix = []
        for i in range(n):
            matrix += [[val for j in range(n)]]
        return matrix

    def __step1(self):
        """
        For each row of the matrix, find the smallest element and
        subtract it from every element in its row. Go to Step 2.
        """
        C = self.C
        n = self.n
        for i in range(n):
            minval = min(self.C[i])
            # Find the minimum value for this row and subtract that minimum
            # from every element in the row.
            for j in range(n):
                self.C[i][j] -= minval

        return 2

    def __step2(self):
        """
        Find a zero (Z) in the resulting matrix. If there is no starred
        zero in its row or column, star Z. Repeat for each element in the
        matrix. Go to Step 3.
        """
        n = self.n
        for i in range(n):
            for j in range(n):
                if (self.C[i][j] == 0) and \
                   (not self.col_covered[j]) and \
                   (not self.row_covered[i]):
                    self.marked[i][j] = 1
                    self.col_covered[j] = True
                    self.row_covered[i] = True

        self.__clear_covers()
        return 3

    def __step3(self):
        """
        Cover each column containing a starred zero. If K columns are
        covered, the starred zeros describe a complete set of unique
        assignments. In this case, Go to DONE, otherwise, Go to Step 4.
        """
        n = self.n
        count = 0
        for i in range(n):
            for j in range(n):
                if self.marked[i][j] == 1:
                    self.col_covered[j] = True
                    count += 1

        if count >= n:
            step = 7  # done
        else:
            step = 4

        return step

    def __step4(self):
        """
        Find a noncovered zero and prime it. If there is no starred zero
        in the row containing this primed zero, Go to Step 5. Otherwise,
        cover this row and uncover the column containing the starred
        zero. Continue in this manner until there are no uncovered zeros
        left. Save the smallest uncovered value and Go to Step 6.
        """
        step = 0
        done = False
        row = -1
        col = -1
        star_col = -1
        while not done:
            (row, col) = self.__find_a_zero()
            if row < 0:
                done = True
                step = 6
            else:
                self.marked[row][col] = 2
                star_col = self.__find_star_in_row(row)
                if star_col >= 0:
                    col = star_col
                    self.row_covered[row] = True
                    self.col_covered[col] = False
                else:
                    done = True
                    self.Z0_r = row
                    self.Z0_c = col
                    step = 5

        return step

    def __step5(self):
        """
        Construct a series of alternating primed and starred zeros as
        follows. Let Z0 represent the uncovered primed zero found in Step 4.
        Let Z1 denote the starred zero in the column of Z0 (if any).
        Let Z2 denote the primed zero in the row of Z1 (there will always
        be one). Continue until the series terminates at a primed zero
        that has no starred zero in its column. Unstar each starred zero
        of the series, star each primed zero of the series, erase all
        primes and uncover every line in the matrix. Return to Step 3
        """
        count = 0
        path = self.path
        path[count][0] = self.Z0_r
        path[count][1] = self.Z0_c
        done = False
        while not done:
            row = self.__find_star_in_col(path[count][1])
            if row >= 0:
                count += 1
                path[count][0] = row
                path[count][1] = path[count - 1][1]
            else:
                done = True

            if not done:
                col = self.__find_prime_in_row(path[count][0])
                count += 1
                path[count][0] = path[count - 1][0]
                path[count][1] = col

        self.__convert_path(path, count)
        self.__clear_covers()
        self.__erase_primes()
        return 3

    def __step6(self):
        """
        Add the value found in Step 4 to every element of each covered
        row, and subtract it from every element of each uncovered column.
        Return to Step 4 without altering any stars, primes, or covered
        lines.
        """
        minval = self.__find_smallest()
        for i in range(self.n):
            for j in range(self.n):
                if self.row_covered[i]:
                    self.C[i][j] += minval
                if not self.col_covered[j]:
                    self.C[i][j] -= minval
        return 4

    def __find_smallest(self):
        """Find the smallest uncovered value in the matrix."""
        minval = sys.maxint
        for i in range(self.n):
            for j in range(self.n):
                if (not self.row_covered[i]) and (not self.col_covered[j]):
                    if minval > self.C[i][j]:
                        minval = self.C[i][j]
        return minval

    def __find_a_zero(self):
        """Find the first uncovered element with value 0"""
        row = -1
        col = -1
        i = 0
        n = self.n
        done = False

        while not done:
            j = 0
            while True:
                if (self.C[i][j] == 0) and \
                   (not self.row_covered[i]) and \
                   (not self.col_covered[j]):
                    row = i
                    col = j
                    done = True
                j += 1
                if j >= n:
                    break
            i += 1
            if i >= n:
                done = True

        return (row, col)

    def __find_star_in_row(self, row):
        """
        Find the first starred element in the specified row. Returns
        the column index, or -1 if no starred element was found.
        """
        col = -1
        for j in range(self.n):
            if self.marked[row][j] == 1:
                col = j
                break

        return col

    def __find_star_in_col(self, col):
        """
        Find the first starred element in the specified row. Returns
        the row index, or -1 if no starred element was found.
        """
        row = -1
        for i in range(self.n):
            if self.marked[i][col] == 1:
                row = i
                break

        return row

    def __find_prime_in_row(self, row):
        """
        Find the first prime element in the specified row. Returns
        the column index, or -1 if no starred element was found.
        """
        col = -1
        for j in range(self.n):
            if self.marked[row][j] == 2:
                col = j
                break

        return col

    def __convert_path(self, path, count):
        for i in range(count + 1):
            if self.marked[path[i][0]][path[i][1]] == 1:
                self.marked[path[i][0]][path[i][1]] = 0
            else:
                self.marked[path[i][0]][path[i][1]] = 1

    def __clear_covers(self):
        """Clear all covered matrix cells"""
        for i in range(self.n):
            self.row_covered[i] = False
            self.col_covered[i] = False

    def __erase_primes(self):
        """Erase all prime markings"""
        for i in range(self.n):
            for j in range(self.n):
                if self.marked[i][j] == 2:
                    self.marked[i][j] = 0
Beispiel #51
0
class PluginRegister(object):
    """
    PluginRegister is a Singleton which holds plugin data

    .. attribute : stable_only
        Bool, include stable plugins only or not. Default True
    """
    __instance = None
    
    def get_instance():
        """ Use this function to get the instance of the PluginRegister """
        if PluginRegister.__instance is None:
            PluginRegister.__instance = 1 # Set to 1 for __init__()
            PluginRegister.__instance = PluginRegister()
        return PluginRegister.__instance
    get_instance = staticmethod(get_instance)
            
    def __init__(self):
        """ This function should only be run once by get_instance() """
        if PluginRegister.__instance is not 1:
            raise Exception("This class is a singleton. "
                            "Use the get_instance() method")
        self.stable_only = True
        if __debug__:
            self.stable_only = False
        self.__plugindata  = []

    def add_plugindata(self, plugindata):
        self.__plugindata.append(plugindata)
        
    def scan_dir(self, dir):
        """
        The dir name will be scanned for plugin registration code, which will
        be loaded in :class:`PluginData` objects if they satisfy some checks.
        
        :returns: A list with :class:`PluginData` objects
        """
        # if the directory does not exist, do nothing
        if not (os.path.isdir(dir) or os.path.islink(dir)):
            return []
        
        ext = r".gpr.py"
        extlen = -len(ext)
        pymod = re.compile(r"^(.*)\.py$")
        
        for filename in os.listdir(dir):
            name = os.path.split(filename)[1]
            if not name[extlen:] == ext:
                continue
            lenpd = len(self.__plugindata)
            full_filename = os.path.join(dir, filename)
            if sys.version_info[0] < 3:
                fd = open(full_filename, "r")
            else:
                fd = io.open(full_filename, "r", encoding='utf-8')
            stream = fd.read()
            fd.close()
            if os.path.exists(os.path.join(os.path.dirname(full_filename),
                                           'locale')):
                try:
                    local_gettext = glocale.get_addon_translator(full_filename).gettext
                except ValueError:
                    print(_('WARNING: Plugin %(plugin_name)s has no translation'
                            ' for any of your configured languages, using US'
                            ' English instead') %
                          {'plugin_name' : filename.split('.')[0] })
                    local_gettext = glocale.translation.gettext
            else:
                local_gettext = glocale.translation.gettext
            try:
                #execfile(full_filename,
                exec (compile(stream, filename, 'exec'),
                      make_environment(_=local_gettext), {})
            except ValueError as msg:
                print(_('ERROR: Failed reading plugin registration %(filename)s') % \
                            {'filename' : filename})
                print(msg)
                self.__plugindata = self.__plugindata[:lenpd]
            except:
                print(_('ERROR: Failed reading plugin registration %(filename)s') % \
                            {'filename' : filename})
                print("".join(traceback.format_exception(*sys.exc_info())))
                self.__plugindata = self.__plugindata[:lenpd]
            #check if: 
            #  1. plugin exists, if not remove, otherwise set module name
            #  2. plugin not stable, if stable_only=True, remove
            #  3. TOOL_DEBUG only if __debug__ True
            rmlist = []
            ind = lenpd-1
            for plugin in self.__plugindata[lenpd:]:
                ind += 1
                plugin.directory = dir
                if not valid_plugin_version(plugin.gramps_target_version):
                    print(_('ERROR: Plugin file %(filename)s has a version of '
                            '"%(gramps_target_version)s" which is invalid for Gramps '
                            '"%(gramps_version)s".' % 
                            {'filename': os.path.join(dir, plugin.fname),
                             'gramps_version': GRAMPSVERSION,
                             'gramps_target_version': plugin.gramps_target_version,}
                            ))
                    rmlist.append(ind)
                    continue
                if not plugin.status == STABLE and self.stable_only:
                    rmlist.append(ind)
                    continue
                if plugin.ptype == TOOL and plugin.category == TOOL_DEBUG \
                and not __debug__:
                    rmlist.append(ind)
                    continue
                if plugin.fname is None:
                    continue
                match = pymod.match(plugin.fname)
                if not match:
                    rmlist.append(ind)
                    print(_('ERROR: Wrong python file %(filename)s in register file '
                            '%(regfile)s')  % {
                               'filename': os.path.join(dir, plugin.fname),
                               'regfile': os.path.join(dir, filename)
                            })
                    continue
                if not os.path.isfile(os.path.join(dir, plugin.fname)):
                    rmlist.append(ind)
                    print(_('ERROR: Python file %(filename)s in register file '
                            '%(regfile)s does not exist')  % {
                               'filename': os.path.join(dir, plugin.fname),
                               'regfile': os.path.join(dir, filename)
                            })
                    continue
                module = match.groups()[0]
                plugin.mod_name = module
                plugin.fpath = dir
            rmlist.reverse()
            for ind in rmlist:
                del self.__plugindata[ind]

    def get_plugin(self, id):
        """
        Return the :class:`PluginData` for the plugin with id
        """
        matches = [x for x in self.__plugindata if x.id == id]
        matches.sort(key=lambda x: version(x.version))
        if len(matches) > 0:
            return matches[-1]
        return None

    def type_plugins(self, ptype):
        """
        Return a list of :class:`PluginData` that are of type ptype
        """
        return [self.get_plugin(id) for id in 
                set([x.id for x in self.__plugindata if x.ptype == ptype])]

    def report_plugins(self, gui=True):
        """
        Return a list of gui or cli :class:`PluginData` that are of type REPORT

        :param gui: bool, if True then gui plugin, otherwise cli plugin
        """
        if gui:
            return [x for x in self.type_plugins(REPORT) if REPORT_MODE_GUI
                                        in x.report_modes]
        else:
            return [x for x in self.type_plugins(REPORT) if REPORT_MODE_CLI
                                        in x.report_modes]

    def tool_plugins(self, gui=True):
        """
        Return a list of :class:`PluginData` that are of type TOOL
        """
        if gui:
            return [x for x in self.type_plugins(TOOL) if TOOL_MODE_GUI
                                        in x.tool_modes]
        else:
            return [x for x in self.type_plugins(TOOL) if TOOL_MODE_CLI
                                        in x.tool_modes]

    
    def bookitem_plugins(self):
        """
        Return a list of REPORT :class:`PluginData` that are can be used as
        bookitem
        """
        return [x for x in self.type_plugins(REPORT) if REPORT_MODE_BKI
                                        in x.report_modes]

    def quickreport_plugins(self):
        """
        Return a list of :class:`PluginData` that are of type QUICKREPORT
        """
        return self.type_plugins(QUICKREPORT)

    def import_plugins(self):
        """
        Return a list of :class:`PluginData` that are of type IMPORT
        """
        return self.type_plugins(IMPORT)

    def export_plugins(self):
        """
        Return a list of :class:`PluginData` that are of type EXPORT
        """
        return self.type_plugins(EXPORT)

    def docgen_plugins(self):
        """
        Return a list of :class:`PluginData` that are of type DOCGEN
        """
        return self.type_plugins(DOCGEN)

    def general_plugins(self, category=None):
        """
        Return a list of :class:`PluginData` that are of type GENERAL
        """
        plugins = self.type_plugins(GENERAL)
        if category:
            return [plugin for plugin in plugins 
                    if plugin.category == category]
        return plugins

    def mapservice_plugins(self):
        """
        Return a list of :class:`PluginData` that are of type MAPSERVICE
        """
        return self.type_plugins(MAPSERVICE)

    def view_plugins(self):
        """
        Return a list of :class:`PluginData` that are of type VIEW
        """
        return self.type_plugins(VIEW)

    def relcalc_plugins(self):
        """
        Return a list of :class:`PluginData` that are of type RELCALC
        """
        return self.type_plugins(RELCALC)

    def gramplet_plugins(self):
        """
        Return a list of :class:`PluginData` that are of type GRAMPLET
        """
        return self.type_plugins(GRAMPLET)
        
    def sidebar_plugins(self):
        """
        Return a list of :class:`PluginData` that are of type SIDEBAR
        """
        return self.type_plugins(SIDEBAR)

    def filter_load_on_reg(self):
        """
        Return a list of :class:`PluginData` that have load_on_reg == True
        """
        return [self.get_plugin(id) for id in 
                set([x.id for x in self.__plugindata 
                     if x.load_on_reg == True])]
Beispiel #52
0
class numpyHisto:
	"""The numpyHisto class behaves the same and has the same interface as
cviewHisto but has a different implementation that uses the
numpy module.  The numpy module uses C-arrays for storing the
histogram data and consequently uses about a third less memory.
The trade-off is that numpyHisto is just a touch slower because
of the overhead in translating strings to integer indices.
	"""
	groups = {}

	def __init__(self, group = '.', desc='None', rate='None',
										isCumul=False, isSharedY=False):
		self.group = group # the group this histo instance belongs to
		if not cviewHisto.groups.has_key(group):
			cviewHisto.groups[group] = []
		cviewHisto.groups[group].append(self) # add self to group

		self.desc = desc # desc is the title of the histogram
		self.rate = rate # rate is the quantity label for the z-axis values
		self.xTicks = {} # tick => numpy index
		self.yTicks = {}
		self.xTickSort = cmp
		self.yTickSort = cmp # default sort function is 'cmp'
		self.isSharedY = isSharedY # don't share yTicks by default
		self.isCumulative = isCumul # default histo is not cumulative

		self.datapoints = numpy.zeros((10, 10), float)
		self.c_datapoints = None # gets set to a numpy array if isCumul

	def __sync__(self):
		# make sure number of data rows/columns are synced with
		#  other histos from group

		# synchronize xticks and yticks w/ group
		for histo in cviewHisto.groups[self.group]:
			for xTick in histo.xTicks.keys():
				if not self.xTicks.has_key(xTick):
					self.xTicks[xTick] = len(self.xTicks)
					if len(self.xTicks) > self.datapoints.shape[1]:
						self.__extendColumns__()

			if self.isSharedY:
				for yTick in histo.yTicks.keys():
					if not self.yTicks.has_key(yTick):
						self.yTicks[yTick] = len(self.yTicks)
						if len(self.yTicks) > self.datapoints.shape[0]:
							self.__extendRows__()

		# create array for storing cumulative data
		if self.isCumulative:
			self.c_datapoints = numpy.array(self.datapoints)
			for (t, col) in sorted(self.xTicks.items(), self.xTickSort):
				cumul = 0
				for (t, row) in sorted(self.yTicks.items(), self.yTickSort):
					cumul += self.datapoints[(row, col)]
					self.c_datapoints[(row, col)] = cumul

	def __str__(self): # print out histogram data
		self.__sync__()
		if self.isCumulative:
			dp = self.c_datapoints
		else:
			dp = self.datapoints
		return dp.__str__()

	def __getIndex__(self, xTick, yTick):
		# remember x-axis runs horizontally and so spans across columns
		# of the numpy array, y-axis spans across the rows of the array
		(rowIndex, colIndex) = (None, None)
		try:
			colIndex = self.xTicks[xTick]
		except:
			pass
		try:
			rowIndex = self.yTicks[yTick]
		except:
			pass

		return (rowIndex, colIndex)

	def __extendColumns__(self):
		numRows = self.datapoints.shape[0]
		colsToAdd = self.datapoints.shape[1] # double number of columns
		newColumns = numpy.zeros((numRows, colsToAdd), int)
		self.datapoints = numpy.hstack((self.datapoints, newColumns))

	def __extendRows__(self):
		numColumns = self.datapoints.shape[1]
		rowsToAdd = self.datapoints.shape[0] # double number of rows
		newRows = numpy.zeros((rowsToAdd, numColumns), int)
		self.datapoints = numpy.vstack((self.datapoints, newRows))

	def __addTick__(self, xTick, yTick):
		if xTick != None:
			self.xTicks[xTick] = len(self.xTicks)
			if len(self.xTicks) > self.datapoints.shape[1]:
				self.__extendColumns__()
		if yTick != None:
			self.yTicks[yTick] = len(self.yTicks)
			if len(self.yTicks) > self.datapoints.shape[0]:
				self.__extendRows__()

	def __initialize__(self, xTick, yTick):
		(rowIndex, colIndex) = self.__getIndex__(xTick, yTick)
		if rowIndex == None and colIndex == None:
			self.__addTick__(xTick, yTick)
			# since tick was just appended, index = list.len - 1
			rowIndex = len(self.yTicks) - 1
			colIndex = len(self.xTicks) - 1
		elif rowIndex == None:
			self.__addTick__(None, yTick)
			rowIndex = len(self.yTicks) - 1
		elif colIndex == None:
			self.__addTick__(xTick, None)
			colIndex = len(self.xTicks) - 1
		return (rowIndex, colIndex)

	def setxTickSort(self, callable):
		self.xTickSort = callable # accessed through class name

	def setyTickSort(self, callable):
		self.yTickSort = callable

	def writeToFiles():
		for dir, histos in cviewHisto.groups.items():
			# sync all histos in group and create index list
			indexList = []
			for histo in histos:
				histo.__sync__()
				indexList.append(histo.desc)

			# index and xtick files need written only once per dir of histos
			indexList.sort()
			f = open(dir + '/index', 'w')
			for index in indexList:
				f.write(index + '\n')
			f.close()

			xTicksList = sorted(histos[0].xTicks.keys(), histos[0].xTickSort)
			f = open(dir + '/xtick', 'w')
			for xTick in xTicksList:
				f.write(struct.pack('32s', str(xTick)))
			f.close()

			# must write out all histos for a given directory group
			for histo in histos:
				pathPrefix = dir + '/' + histo.desc
				open(pathPrefix + '.rate', 'w').write(histo.rate)
				open(pathPrefix + '.desc', 'w').write(histo.desc)

				yTickItems = sorted(histo.yTicks.items(), histo.yTickSort)

				f = open(pathPrefix + '.ytick', 'w')
				for (yTick, index) in yTickItems:
					f.write(struct.pack('32s', str(yTick)))
				f.close()

				if histo.isCumulative:
					dp = histo.c_datapoints
				else:
					dp = histo.datapoints

				f = open(pathPrefix + '.data', 'w')
				for (x, col) in sorted(histo.xTicks.items(), histo.xTickSort):
					for (y, row) in yTickItems:
						f.write(struct.pack('f', dp[(row, col)]))
				f.close()

	writeToFiles = staticmethod(writeToFiles) # creates static method

	def getZ(self, xTick, yTick):
		index = self.__getIndex__(xTick, yTick)
		if index[0] == None or index[1] == None:
			return None
		else:
			return self.datapoints[index]

	def merge(self, otherHisto): # will merge another histo object w/ current
		for (x, col) in otherHisto.xTicks.items():
			for (y, row) in otherHisto.yTicks.items():
				(rowIndex, colIndex) = self.__getIndex__(x, y)
				if colIndex == None:
					colIndex = self.xTicks[x] = len(self.xTicks)
					xTicksCount = len(self.xTicks)
					if xTicksCount > self.datapoints.shape[1]:
						self.__extendColumns__()
				if rowIndex == None:
					rowIndex = self.yTicks[y] = len(self.yTicks)
					yTicksCount = len(self.yTicks)
					if yTicksCount > self.datapoints.shape[0]:
						self.__extendRows__()

				index = (rowIndex, colIndex)
				self.datapoints[index] += otherHisto.datapoints[(row, col)]

	def incr(self, xTick, yTick, zValue = 1):
		index = self.__initialize__(xTick, yTick)
		self.datapoints[index] += zValue

	def set(self, xTick, yTick, zValue):
		index = self.__initialize__(xTick, yTick)
		self.datapoints[index] = zValue
Beispiel #53
0
class UpdateDependencies(BaseSalesforceMetadataApiTask):
    api_class = ApiDeploy
    name = "UpdateDependencies"
    task_options = {
        "dependencies": {
            "description":
            "List of dependencies to update. Defaults to project__dependencies. "
            "Each dependency is a dict with either 'github' set to a github repository URL "
            "or 'namespace' set to a Salesforce package namespace. "
            "Github dependencies may include 'tag' to install a particular git ref. "
            "Package dependencies may include 'version' to install a particular version."
        },
        "ignore_dependencies": {
            "description":
            "List of dependencies to be ignored, including if they are present as transitive "
            "dependencies. Dependencies can be specified using the 'github' or 'namespace' keys (all other keys "
            "are not used). Note that this can cause installations to fail if required prerequisites are not available."
        },
        "namespaced_org": {
            "description":
            "If True, the changes namespace token injection on any dependencies so tokens %%%NAMESPACED_ORG%%% and ___NAMESPACED_ORG___ will get replaced with the namespace.  The default is false causing those tokens to get stripped and replaced with an empty string.  Set this if deploying to a namespaced scratch org or packaging org."
        },
        "purge_on_delete": {
            "description":
            "Sets the purgeOnDelete option for the deployment. Defaults to True"
        },
        "include_beta": {
            "description":
            "Install the most recent release, even if beta. Defaults to False."
        },
        "allow_newer": {
            "description":
            "If the org already has a newer release, use it. Defaults to True."
        },
        "allow_uninstalls": {
            "description":
            "Allow uninstalling a beta release or newer final release "
            "in order to install the requested version. Defaults to False. "
            "Warning: Enabling this may destroy data."
        },
        "security_type": {
            "description":
            "Which users to install packages for (FULL = all users, NONE = admins only)"
        },
    }

    def _init_options(self, kwargs):
        super(UpdateDependencies, self)._init_options(kwargs)
        self.options["purge_on_delete"] = process_bool_arg(
            self.options.get("purge_on_delete", True))
        self.options["namespaced_org"] = process_bool_arg(
            self.options.get("namespaced_org", False))
        self.options["include_beta"] = process_bool_arg(
            self.options.get("include_beta", False))
        self.options["dependencies"] = (
            self.options.get("dependencies")
            or self.project_config.project__dependencies)
        self.options["allow_newer"] = process_bool_arg(
            self.options.get("allow_newer", True))
        self.options["allow_uninstalls"] = process_bool_arg(
            self.options.get("allow_uninstalls", False))
        self.options["security_type"] = self.options.get(
            "security_type", "FULL")
        if self.options["security_type"] not in ("FULL", "NONE", "PUSH"):
            raise TaskOptionsError(
                f"Unsupported value for security_type: {self.options['security_type']}"
            )

        if "ignore_dependencies" in self.options:
            if any("github" not in dep and "namespace" not in dep
                   for dep in self.options["ignore_dependencies"]):
                raise TaskOptionsError(
                    "An invalid dependency was specified for ignore_dependencies."
                )

    def _run_task(self):
        if not self.options["dependencies"]:
            self.logger.info("Project has no dependencies, doing nothing")
            return

        if self.options["include_beta"] and not self.org_config.scratch:
            raise TaskOptionsError(
                "Target org must be a scratch org when `include_beta` is true."
            )

        self.logger.info("Preparing static dependencies map")
        dependencies = self.project_config.get_static_dependencies(
            self.options["dependencies"],
            include_beta=self.options["include_beta"],
            ignore_deps=self.options.get("ignore_dependencies"),
        )

        self.installed = None
        self.uninstall_queue = []
        self.install_queue = []

        self.logger.info("Dependencies:")
        for line in self.project_config.pretty_dependencies(dependencies):
            self.logger.info(line)

        self._process_dependencies(dependencies)

        # Reverse the uninstall queue
        self.uninstall_queue.reverse()

        self._uninstall_dependencies()
        self._install_dependencies()
        self.org_config.reset_installed_packages()

    def _process_dependencies(self, dependencies):
        for dependency in dependencies:
            # Process child dependencies
            dependency_uninstalled = False
            subdependencies = dependency.get("dependencies")
            if subdependencies:
                count_uninstall = len(self.uninstall_queue)
                self._process_dependencies(subdependencies)
                if count_uninstall != len(self.uninstall_queue):
                    dependency_uninstalled = True

            # Process namespace dependencies (managed packages)
            if "namespace" in dependency:
                self._process_namespace_dependency(dependency,
                                                   dependency_uninstalled)
            else:
                # zip_url or repo dependency
                self.install_queue.append(dependency)

        if self.uninstall_queue and not self.options["allow_uninstalls"]:
            raise TaskOptionsError(
                "Updating dependencies would require uninstalling these packages "
                "but uninstalls are not enabled: {}".format(", ".join(
                    dep["namespace"] for dep in self.uninstall_queue)))

    def _process_namespace_dependency(self,
                                      dependency,
                                      dependency_uninstalled=None):
        dependency_version = str(dependency["version"])

        if self.installed is None:
            self.installed = self._get_installed()

        if dependency["namespace"] in self.installed:
            # Some version is installed, check what to do
            installed_version = self.installed[dependency["namespace"]]
            required_version = LooseVersion(dependency_version)
            installed_version = LooseVersion(installed_version)

            if installed_version > required_version and self.options[
                    "allow_newer"]:
                # Avoid downgrading if allow_newer = True
                required_version = installed_version

            if required_version == installed_version and not dependency_uninstalled:
                self.logger.info("  {}: version {} already installed".format(
                    dependency["namespace"], dependency_version))
                return

            if "Beta" in installed_version.vstring:
                # Always uninstall Beta versions if required is different
                self.uninstall_queue.append(dependency)
                self.logger.info("  {}: Uninstall {} to upgrade to {}".format(
                    dependency["namespace"],
                    installed_version,
                    dependency["version"],
                ))
            elif dependency_uninstalled:
                # If a dependency of this one needs to be uninstalled, always uninstall the package
                self.uninstall_queue.append(dependency)
                self.logger.info(
                    "  {}: Uninstall and Reinstall to allow downgrade of dependency"
                    .format(dependency["namespace"]))
            elif required_version < installed_version:
                # Uninstall to downgrade
                self.uninstall_queue.append(dependency)
                self.logger.info(
                    "  {}: Downgrade from {} to {} (requires uninstall/install)"
                    .format(
                        dependency["namespace"],
                        installed_version,
                        dependency["version"],
                    ))
            else:
                self.logger.info("  {}: Upgrade from {} to {}".format(
                    dependency["namespace"],
                    installed_version,
                    dependency["version"],
                ))
            self.install_queue.append(dependency)
        else:
            # Just a regular install
            self.logger.info("  {}: Install version {}".format(
                dependency["namespace"], dependency["version"]))
            self.install_queue.append(dependency)

    def _get_installed(self):
        self.logger.info("Retrieving list of packages from target org")
        api = ApiRetrieveInstalledPackages(self)
        return api()

    def _uninstall_dependencies(self):
        for dependency in self.uninstall_queue:
            self._uninstall_dependency(dependency)

    def _install_dependencies(self):
        for dependency in self.install_queue:
            self._install_dependency(dependency)

    # hooks for tests
    _download_extract_github = staticmethod(download_extract_github)
    _download_extract_zip = staticmethod(download_extract_zip)

    def _install_dependency(self, dependency):
        if "zip_url" or "repo_name" in dependency:
            package_zip = None
            if "zip_url" in dependency:
                self.logger.info(
                    "Deploying unmanaged metadata from /{} of {}".format(
                        dependency["subfolder"], dependency["zip_url"]))
                package_zip = self._download_extract_zip(
                    dependency["zip_url"],
                    subfolder=dependency.get("subfolder"))
            elif "repo_name" in dependency:
                self.logger.info(
                    "Deploying unmanaged metadata from /{} of {}/{}".format(
                        dependency["subfolder"],
                        dependency["repo_owner"],
                        dependency["repo_name"],
                    ))
                gh_for_repo = self.project_config.get_github_api(
                    dependency["repo_owner"], dependency["repo_name"])
                package_zip = self._download_extract_github(
                    gh_for_repo,
                    dependency["repo_owner"],
                    dependency["repo_name"],
                    dependency["subfolder"],
                    ref=dependency.get("ref"),
                )

            if package_zip:
                if dependency.get("namespace_tokenize"):
                    self.logger.info(
                        "Replacing namespace prefix {}__ in files and filenames with namespace token strings"
                        .format("{}__".format(
                            dependency["namespace_tokenize"])))
                    package_zip = process_text_in_zipfile(
                        package_zip,
                        functools.partial(
                            tokenize_namespace,
                            namespace=dependency["namespace_tokenize"],
                            logger=self.logger,
                        ),
                    )

                if dependency.get("namespace_inject"):
                    self.logger.info(
                        "Replacing namespace tokens with {}".format(
                            "{}__".format(dependency["namespace_inject"])))
                    package_zip = process_text_in_zipfile(
                        package_zip,
                        functools.partial(
                            inject_namespace,
                            namespace=dependency["namespace_inject"],
                            managed=not dependency.get("unmanaged"),
                            namespaced_org=self.options["namespaced_org"],
                            logger=self.logger,
                        ),
                    )

                if dependency.get("namespace_strip"):
                    self.logger.info(
                        "Removing namespace prefix {}__ from all files and filenames"
                        .format("{}__".format(dependency["namespace_strip"])))
                    package_zip = process_text_in_zipfile(
                        package_zip,
                        functools.partial(
                            strip_namespace,
                            namespace=dependency["namespace_strip"],
                            logger=self.logger,
                        ),
                    )

                package_zip = ZipfilePackageZipBuilder(package_zip)()

            elif "namespace" in dependency:
                self.logger.info("Installing {} version {}".format(
                    dependency["namespace"], dependency["version"]))
                package_zip = InstallPackageZipBuilder(
                    dependency["namespace"],
                    dependency["version"],
                    securityType=self.options["security_type"],
                )()
        if not package_zip:
            raise TaskOptionsError(f"Could not find package for {dependency}")
        api = self.api_class(self,
                             package_zip,
                             purge_on_delete=self.options["purge_on_delete"])
        return api()

    def _uninstall_dependency(self, dependency):
        self.logger.info("Uninstalling {}".format(dependency["namespace"]))
        package_zip = UninstallPackageZipBuilder(
            dependency["namespace"],
            self.project_config.project__package__api_version)
        api = self.api_class(self,
                             package_zip(),
                             purge_on_delete=self.options["purge_on_delete"])
        return api()

    def freeze(self, step):
        ui_options = self.task_config.config.get("ui_options", {})
        dependencies = self.project_config.get_static_dependencies(
            self.options["dependencies"],
            include_beta=self.options["include_beta"],
            ignore_deps=self.options.get("ignore_dependencies"),
        )
        steps = []
        for i, dependency in enumerate(self._flatten(dependencies), start=1):
            name = dependency.pop("name", None)
            if "namespace" in dependency:
                kind = "managed"
                name = name or "Install {} {}".format(dependency["namespace"],
                                                      dependency["version"])
            else:
                kind = "metadata"
                name = name or "Deploy {}".format(dependency["subfolder"])
            task_config = {
                "options": self.options.copy(),
                "checks": self.task_config.checks or [],
            }
            task_config["options"]["dependencies"] = [dependency]
            ui_step = {"name": name, "kind": kind, "is_required": True}
            ui_step.update(ui_options.get(i, {}))
            ui_step.update({
                "path": "{}.{}".format(step.path, i),
                "step_num": "{}.{}".format(step.step_num, i),
                "task_class": self.task_config.class_path,
                "task_config": task_config,
                "source": step.project_config.source.frozenspec,
            })
            steps.append(ui_step)
        return steps

    def _flatten(self, dependencies):
        result = []
        for dependency in dependencies:
            subdeps = dependency.pop("dependencies", [])
            for subdep in self._flatten(subdeps):
                if subdep not in result:
                    result.append(subdep)
            if dependency not in result:
                result.append(dependency)
        return result
Beispiel #54
0
    def test_metrics(self):
        session_factory = self.replay_flight_data("test_lambda_policy_metrics")
        from c7n.policy import PolicyCollection

        self.patch(
            PolicyCollection,
            "session_factory",
            staticmethod(lambda x=None: session_factory),
        )

        yaml_file = self.write_policy_file({
            "policies": [{
                "name":
                "ec2-tag-compliance-v6",
                "resource":
                "ec2",
                "mode": {
                    "type": "ec2-instance-state",
                    "events": ["running"]
                },
                "filters": [
                    {
                        "tag:custodian_status": "absent"
                    },
                    {
                        "or": [
                            {
                                "tag:App": "absent"
                            },
                            {
                                "tag:Env": "absent"
                            },
                            {
                                "tag:Owner": "absent"
                            },
                        ]
                    },
                ],
            }]
        })

        end = datetime.utcnow()
        start = end - timedelta(14)
        period = 24 * 60 * 60 * 14

        out = self.get_output([
            "custodian",
            "metrics",
            "--start",
            str(start),
            "--end",
            str(end),
            "--period",
            str(period),
            yaml_file,
        ])

        self.assertEqual(
            json.loads(out),
            {
                "ec2-tag-compliance-v6": {
                    u"Durations": [],
                    u"Errors": [{
                        u"Sum": 0.0,
                        u"Timestamp": u"2016-05-30T10:50:00+00:00",
                        u"Unit": u"Count",
                    }],
                    u"Invocations": [{
                        u"Sum": 4.0,
                        u"Timestamp": u"2016-05-30T10:50:00+00:00",
                        u"Unit": u"Count",
                    }],
                    u"ResourceCount": [{
                        u"Average": 1.0,
                        u"Sum": 2.0,
                        u"Timestamp": u"2016-05-30T10:50:00+00:00",
                        u"Unit": u"Count",
                    }],
                    u"Throttles": [{
                        u"Sum": 0.0,
                        u"Timestamp": u"2016-05-30T10:50:00+00:00",
                        u"Unit": u"Count",
                    }],
                }
            },
        )
Beispiel #55
0
 def apply(*args, **kwargs):
     ApplyPatchAction.is_applicable = staticmethod(
         lambda *args: 'ApplyPatchAction cannot be applied.')
Beispiel #56
0
from pulp_rpm.plugins.distributors.yum import configuration as yum_config

from pulp_integrity import validator


class BrokenSymlinkError(validator.ValidationError):
    """A broken symlink error."""


class MissingSymlinkError(validator.ValidationError):
    """A missing symlink error."""

# the link can be None
SymlinkFailure = collections.namedtuple('SymlinkFailure',
                                        validator.ValidationFailure._fields + ('link',))
SymlinkFailure.__nonzero__ = staticmethod(lambda: False)


class YumDistributorValidatorMixin(object):
    applicable_types = set(['rpm', 'srpm', 'drpm'])

    def __init__(self):
        # the count of repositories&distributors is small compared to the count of units
        # therefore the cost of caching the repositories is amortized in a singleton validator
        self.repo_cache = {}

    def applicable(self, unit):
        """Check applicability of this validator.

        Only the self.applicable_types are relevant.
Beispiel #57
0
class StepListPrinter(MonitorListener):
    """
    This class takes care of printing DeepSea execution in the terminal as a list of steps, but
    uses its own thread to allow the output of time clock counters for each step
    """
    HAS_UTF8_SUPPORT = check_terminal_utf8_support()
    # pylint: disable=C0103
    OK = PP.green(PP.bold(u"\u2713")) if HAS_UTF8_SUPPORT else PP.green("OK")
    FAIL = PP.red(u"\u274C") if HAS_UTF8_SUPPORT else PP.red("Fail")
    WAITING = PP.orange(u"\u23F3") if HAS_UTF8_SUPPORT else PP.orange(
        "Running")

    STAGE = staticmethod(PP.magenta)
    INFO = staticmethod(PP.dark_yellow)
    RUNNER = staticmethod(PP.blue)
    STATE = staticmethod(PP.orange)
    MINION = staticmethod(PP.cyan)
    STATE_RES = staticmethod(PP.grey)
    SUCCESS = staticmethod(PP.dark_green)
    FAILURE = staticmethod(PP.red)
    TIME = staticmethod(PP.purple)

    def print_step(self, step, depth=0):
        """
        Prints a single step
        Args:
            step (StepListPrinter.Step): the step object
            depth (int): the step depth, if depth > 0 it's a substep
        """
        step_order_width = 9
        step_desc_width = 60
        indent = 2

        if depth == 0:
            # root step
            if step.step.order > 0:
                step_order = "[{}/{}]".format(step.step.order,
                                              self.total_steps)
            else:
                step_order = "[init]"

            rest = step_order_width - len(step_order)
            rest = 0 if rest < 0 else rest
            offset = len(step_order) + rest + 1

        else:
            prefix_indent = step_order_width + indent * depth
            if depth > 1:
                prefix_indent += 3
            offset = prefix_indent + 4

        desc_width = step_desc_width - (offset - step_order_width)

        if not step.reprint:
            step.reprint = True
        elif depth == 0:
            step.clean(desc_width)

        if depth == 0:
            PP.print(PP.bold("{}{} ".format(step_order, " " * rest)))
        else:
            PP.print("{} |_ ".format(" " * prefix_indent))

        step.print(offset, desc_width, depth)

    @staticmethod
    def format_desc(desc, width):
        """
        Breaks the string into an array of strings of max length width
        """
        result = []
        while len(desc) > width:
            idx = desc[:width].rfind(' ')
            if idx != -1:
                result.append(desc[0:idx])
                desc = desc[idx + 1:]
            else:
                idx = desc[width - 1:].find(' ')
                if idx != -1:
                    idx = idx + (width - 1)
                    result.append(desc[:idx])
                    desc = desc[idx + 1:]
                else:
                    break
        result.append(desc)
        return result

    class Step(object):
        def __init__(self, printer, step):
            self.printer = printer
            self.step = step
            self.finished = False
            self.reprint = False
            self.substeps = OrderedDict()
            self.args = step.args_str
            if step.start_event:
                self.start_ts = datetime.datetime.strptime(
                    step.start_event.stamp, "%Y-%m-%dT%H:%M:%S.%f")
            else:
                self.start_ts = None

            if step.skipped:
                self.finished = True

        def _find_running_substep(self):
            for substep in self.substeps.values():
                if not substep.finished:
                    return substep
            return self

        def start_runner_substep(self, step):
            substep = self._find_running_substep()
            substep.substeps[step.jid] = SP.Runner(self.printer, step)

        def start_state_substep(self, step):
            substep = self._find_running_substep()
            substep.substeps[step.jid] = SP.State(self.printer, step)

        def finish_substep(self, step):
            if step.jid in self.substeps:
                self.substeps[step.jid].finished = True
                return True
            for substep in self.substeps.values():
                if substep.finish_substep(step):
                    return True
            return False

        # pylint: disable=W0613
        def print(self, offset, desc_width, depth):
            """
            Prints the status of a step
            """
            # if not self.reprint:
            #     self.reprint = True
            # else:
            #     self.clean(desc_width)

        def clean(self, desc_width):
            """
            Prepare for re-print of step
            """
            raise NotImplementedError()

        @staticmethod
        def ftime(tr):
            if tr.seconds > 0:
                return "{}s".format(
                    int(round(tr.seconds + tr.microseconds / 1000000.0)))
            else:
                return "{}s".format(
                    round(tr.seconds + tr.microseconds / 1000000.0, 1))

    class Runner(Step):
        def __init__(self, printer, step):
            super(SP.Runner, self).__init__(printer, step)

        def clean(self, desc_width):
            for substep in self.substeps.values():
                if substep.reprint:
                    substep.clean(desc_width - 5)
            PP.print("\x1B[A\x1B[K")
            if self.args and len(self.step.name) + len(
                    self.args) + 2 >= desc_width:
                PP.print("\x1B[A\x1B[K" *
                         len(SP.format_desc(self.args, desc_width)))

        def print(self, offset, desc_width, depth):
            super(SP.Runner, self).print(offset, desc_width, depth)

            if len(self.step.name) + len(self.args) + 2 < desc_width:
                if self.args:
                    desc_length = len(self.step.name) + len(self.args) + 2
                    PP.print(
                        SP.RUNNER("{}({})".format(self.step.name, self.args)))
                else:
                    desc_length = len(self.step.name)
                    PP.print(SP.RUNNER("{}".format(self.step.name)))
                PP.print(
                    SP.RUNNER("{} ".format("." * (desc_width - desc_length))))
                print_args = False
            else:
                desc_length = len(self.step.name)
                PP.print(SP.RUNNER("{}".format(self.step.name)))
                PP.print(
                    SP.RUNNER("{} ".format("." * (desc_width - desc_length))))
                print_args = True

            if self.finished:
                if self.step.skipped:
                    PP.println(PP.grey("skipped"))
                else:
                    PP.print(SP.OK if self.step.success else SP.FAIL)
                    ts = datetime.datetime.strptime(self.step.end_event.stamp,
                                                    "%Y-%m-%dT%H:%M:%S.%f")
                    PP.println(" ({})".format(SP.Step.ftime(ts -
                                                            self.start_ts)))
            else:
                ts = datetime.datetime.utcnow()
                PP.print(SP.WAITING)
                PP.println(" ({})".format(SP.Step.ftime(ts - self.start_ts)))

            if self.args and print_args:
                lines = StepListPrinter.format_desc(self.args, desc_width - 2)
                lines[-1] += ")"
                first = True
                for line in lines:
                    PP.print(" " * offset)
                    if first:
                        PP.println(SP.RUNNER("({}".format(line)))
                        first = False
                    else:
                        PP.println(SP.RUNNER(" {}".format(line)))

            for substep in self.substeps.values():
                self.printer.print_step(substep, depth + 1)

    class State(Step):
        def __init__(self, printer, step):
            super(SP.State, self).__init__(printer, step)

        def clean(self, desc_width):
            if self.args and len(self.step.name) + len(
                    self.args) + 5 >= desc_width:
                PP.print("\x1B[A\x1B[K" *
                         len(SP.format_desc(self.args, desc_width)))

            if self.step.skipped:
                PP.print("\x1B[A\x1B[K")
            else:
                for substep in self.substeps.values():
                    if substep.reprint:
                        substep.clean(desc_width - 5)

                for target in self.step.targets.values():
                    PP.print("\x1B[A\x1B[K" * (len(target['states']) + 1))
                PP.print("\x1B[A\x1B[K")

        def print(self, offset, desc_width, depth):
            super(SP.State, self).print(offset, desc_width, depth)

            if len(self.step.name) + len(self.args) + 2 < desc_width:
                if self.args:
                    desc_length = len(self.step.name) + len(self.args) + 2
                    PP.print(
                        SP.STATE("{}({})".format(self.step.name, self.args)))
                else:
                    desc_length = len(self.step.name)
                    PP.print(SP.STATE("{}".format(self.step.name)))
                if not self.step.skipped:
                    PP.print(SP.STATE(" on"))
                print_args = False
            else:
                desc_length = len(self.step.name)
                PP.print(SP.STATE("{}".format(self.step.name)))
                print_args = True

            if self.step.skipped:
                PP.print(
                    SP.STATE("{} ".format("." * (desc_width - desc_length))))
                PP.println(PP.grey('skipped'))
            else:
                PP.println()

            if self.args and print_args:
                lines = SP.format_desc(self.args, desc_width - 2)
                lines[-1] += ")"
                if not self.step.skipped:
                    lines[-1] += " on"
                first = True
                for line in lines:
                    PP.print(" " * offset)
                    if first:
                        PP.println(SP.STATE("({}".format(line)))
                        first = False
                    else:
                        PP.println(SP.STATE(" {}".format(line)))

            if self.step.skipped:
                return

            for substep in self.substeps.values():
                self.printer.print_step(substep, depth + 1)

            for target, data in self.step.targets.items():
                PP.print(" " * offset)
                PP.print(SP.MINION(target))
                PP.print(
                    SP.MINION("{} ".format("." * (desc_width - len(target)))))
                if data['finished']:
                    PP.print(SP.OK if data['success'] else SP.FAIL)
                    ts = datetime.datetime.strptime(data['event'].stamp,
                                                    "%Y-%m-%dT%H:%M:%S.%f")
                    PP.println(" ({})".format(SP.Step.ftime(ts -
                                                            self.start_ts)))
                else:
                    ts = datetime.datetime.utcnow()
                    PP.print(SP.WAITING)
                    PP.println(" ({})".format(SP.Step.ftime(ts -
                                                            self.start_ts)))

                for state_res in data['states']:
                    msg = state_res.step.pretty_string()
                    PP.print(" " * offset)
                    PP.print(SP.STATE_RES("  |_ {}".format(msg)))
                    msg_rest = desc_width - (len(msg) + 3) - 2
                    msg_rest = 0 if msg_rest < 0 else msg_rest
                    PP.print(SP.STATE_RES("{} ".format("." * msg_rest)))
                    if state_res.finished:
                        if state_res.success:
                            PP.println(u"{}".format(SP.OK))
                        else:
                            PP.println(u"{}".format(SP.FAIL))
                    else:
                        PP.println(SP.WAITING)

    class PrinterThread(threading.Thread):
        def __init__(self, printer):
            super(StepListPrinter.PrinterThread, self).__init__()
            self.printer = printer
            self.daemon = True
            self.running = True

        def stop(self):
            self.running = False
            self.join()

        def run(self):
            self.running = True
            PP.print("\x1B[?25l")  # hides cursor
            while self.running:
                time.sleep(0.5)
                with self.printer.print_lock:
                    if self.printer.step:
                        self.printer.print_step(self.printer.step)

            PP.print("\x1B[?25h")  # shows cursor

    def __init__(self, clear_screen=True):
        super(StepListPrinter, self).__init__()
        self._clear_screen = clear_screen
        self.stage_name = None
        self.stage = None
        self.total_steps = None
        self.errors = None
        self.step = None
        self.thread = None
        self.print_lock = threading.Lock()
        self.init_output = None
        self.init_output_printed = False

    def stage_started(self, stage_name):
        if self._clear_screen:
            os.system('clear')
        PP.p_bold("Starting stage: ")
        PP.println(SP.STAGE(stage_name))

        self.stage_name = stage_name
        self.errors = OrderedDict()
        self.stage = None
        self.total_steps = None

    def stage_parsing_started(self, stage_name):
        PP.print(SP.INFO("Parsing {} steps... ".format(stage_name)))
        PP.println(SP.WAITING)

    def stage_parsing_finished(self, stage, output, exception):
        PP.print("\x1B[A\x1B[K")
        PP.print(SP.INFO("Parsing {} steps... ".format(self.stage_name)))
        if exception:
            PP.println(SP.FAIL)
            PP.println()
            if isinstance(exception, StateRenderingException):
                PP.println(
                    PP.bold(
                        "An error occurred when rendering one of the following "
                        "states:"))
                for state in exception.states:
                    PP.print(PP.cyan("    - {}".format(state)))
                    PP.println(" ({})".format("/srv/salt/{}".format(
                        state.replace(".", "/"))))
            else:
                PP.println(
                    PP.bold(
                        "An error occurred while rendering the stage file:"))
                PP.println(PP.cyan("    {}".format(exception.stage_file)))
            PP.println()
            PP.println(PP.bold("Error description:"))
            PP.println(PP.red(exception.pretty_error_desc_str()))
            return
        PP.println(SP.OK)
        PP.println()
        self.init_output = output.strip()

        self.stage = stage
        self.total_steps = stage.total_steps()

        self.thread = SP.PrinterThread(self)
        self.thread.start()

    def stage_finished(self, stage):
        self.step = None
        self.thread.stop()
        self.thread = None

        PP.println("\x1B[K")

        if not self.init_output_printed and self.init_output:
            PP.println(PP.bold("Stage initialization output:"))
            PP.println(self.init_output)
            PP.println()

        if not self.errors and not stage.success:
            PP.println(PP.bold("Stage execution failed: "))
            ret = stage.end_event.raw_event['data']['return']
            if isinstance(ret, dict):
                for data in stage.end_event.raw_event['data']['return'][
                        'data'].values():
                    for state in data.values():
                        if not state['result']:
                            PP.println(PP.red("  - {}".format(
                                state['__id__'])))
            elif isinstance(ret, str):
                for line in ret.split('\n'):
                    PP.println(SP.FAILURE("  {}".format(line)))
            else:
                PP.println(SP.FAILURE("  Unknown Error"))

            return

        PP.p_bold("Ended stage: ")
        PP.print(SP.STAGE("{} ".format(self.stage.name)))
        succeeded = stage.current_step - len(self.errors)
        PP.print(
            SP.SUCCESS("succeeded={}/{}".format(succeeded, self.total_steps)))
        if self.errors:
            PP.print(
                SP.FAILURE(" failed={}/{}".format(len(self.errors),
                                                  self.total_steps)))

        start_ts = datetime.datetime.strptime(stage.start_event.stamp,
                                              "%Y-%m-%dT%H:%M:%S.%f")
        end_ts = datetime.datetime.strptime(stage.end_event.stamp,
                                            "%Y-%m-%dT%H:%M:%S.%f")
        PP.print(
            SP.TIME(" time={}s".format(
                round((end_ts - start_ts).total_seconds(), 1))))
        PP.println()

        if self.errors:
            PP.println()
            PP.println(PP.bold("Failures summary:\n"))
            for step, error in self.errors.items():
                if isinstance(error, dict):
                    step_dir_path = "/srv/salt/{}".format(
                        step.replace('.', '/'))
                    if os.path.exists(step_dir_path):
                        PP.println(
                            PP.orange("{} ({}):".format(step, step_dir_path)))
                    else:
                        PP.println(PP.orange("{}:".format(step)))
                    for minion, event in error.items():
                        PP.println(SP.MINION("  {}:".format(minion)))
                        ret_data = event.raw_event['data']['return']
                        if isinstance(ret_data, list):
                            ret_data = dict([(None, val) for val in ret_data])
                        if isinstance(ret_data, dict):
                            for key, substep in ret_data.items():
                                if isinstance(substep, dict):
                                    if not substep['result']:
                                        if '__id__' not in substep:
                                            match = re.match(
                                                r".*\|-(.*)_\|-.*", key)
                                            if match:
                                                substep_id = match.group(1)
                                            else:
                                                substep_id = None
                                        else:
                                            substep_id = substep['__id__']
                                        if substep_id:
                                            PP.println("    {}: {}".format(
                                                PP.info(substep_id),
                                                PP.red(substep['comment'])))
                                        else:
                                            PP.println("    {}".format(
                                                PP.red(substep['comment'])))
                                        if 'changes' in substep:
                                            if 'stdout' in substep['changes']:
                                                PP.println(
                                                    "        stdout: {}".
                                                    format(
                                                        PP.red(
                                                            substep['changes']
                                                            ['stdout'])))
                                            if 'stderr' in substep['changes']:
                                                PP.println(
                                                    "        stderr: {}".
                                                    format(
                                                        PP.red(
                                                            substep['changes']
                                                            ['stderr'])))
                                else:
                                    PP.println("    {}".format(
                                        PP.red(substep)))
                        elif isinstance(ret_data, str):
                            # pylint: disable=E1101
                            for line in ret_data.split('\n'):
                                PP.println("    {}".format(PP.red(line)))
                        else:
                            PP.println("    {}".format(PP.red(ret_data)))
                        logger.debug("state error in minion '%s':\n%s", minion,
                                     event.raw_event)
                else:
                    step_file_path = "/srv/modules/runners/{}.py".format(
                        step[:step.find('.')])
                    if os.path.exists(step_file_path):
                        PP.println(
                            PP.orange("{} ({}):".format(step, step_file_path)))
                    else:
                        PP.println(PP.orange("{}:".format(step)))
                    traceback = error.raw_event['data']['return']
                    for line in traceback.split('\n'):
                        PP.println(PP.red("  {}".format(line)))

                    logger.debug("runner error:\n%s", error.raw_event)

    def step_runner_started(self, step):
        with self.print_lock:
            if self.step:
                # substep starting
                self.step.start_runner_substep(step)
            else:
                self.step = SP.Runner(self, step)
                if step.order == 1:
                    PP.println()
                    # first step, need to output initialization stdout
                    if self.init_output:
                        PP.println(PP.bold("Stage initialization output:"))
                        PP.println(self.init_output)
                    self.init_output_printed = True
                    PP.println()
                elif step.order > 1:
                    PP.println()
            self.print_step(self.step)

    def step_runner_finished(self, step):
        if not step.success:
            if step.name not in self.errors:
                self.errors[step.name] = step.end_event

        with self.print_lock:
            if self.step and self.step.step.jid != step.jid:
                # maybe it's a substep
                if not self.step.finish_substep(step):
                    logger.error("substep jid=%s not found: event=\n%s",
                                 step.jid, step.end_event)
            elif self.step:
                self.step.finished = True
                self.print_step(self.step)
            if self.step.step.jid == step.jid:
                self.step = None

    def step_runner_skipped(self, step):
        # the step_runner_started already handles skipped steps
        self.step_runner_started(step)
        self.step = None

    def step_state_started(self, step):
        with self.print_lock:
            if self.step:
                self.step.start_state_substep(step)
            else:
                self.step = SP.State(self, step)
                if step.order == 1:
                    PP.println()
                    # first step, need to output initialization stdout
                    if self.init_output:
                        PP.println(PP.bold("Stage initialization output:"))
                        PP.println(self.init_output)
                    self.init_output_printed = True
                    PP.println()
                elif step.order > 1:
                    PP.println()
            self.print_step(self.step)

    def step_state_minion_finished(self, step, minion):
        if not step.targets[minion]['success']:
            if step.name not in self.errors:
                self.errors[step.name] = OrderedDict()
            self.errors[step.name][minion] = step.targets[minion]['event']

        with self.print_lock:
            if self.step and self.step.step.jid != step.jid:
                # maybe it's a substep
                if not self.step.finish_substep(step):
                    logger.error("substep jid=%s not found: event=\n%s",
                                 step.jid, step.end_event)
            elif self.step:
                self.print_step(self.step)

    def step_state_finished(self, step):
        with self.print_lock:
            if self.step and self.step.step.jid == step.jid:
                self.step.finished = True
                self.step = None

    def step_state_result(self, step, event):
        with self.print_lock:
            assert self.step
            assert isinstance(self.step, StepListPrinter.State)
            self.print_step(self.step)

    def step_state_skipped(self, step):
        # the step_state_started already handles skipped steps
        self.step_state_started(step)
        self.step = None
class DisassemblerView(QAbstractScrollArea):
    statusUpdated = pyqtSignal(QWidget, name="statusUpdated")

    def __init__(self, data, filename, view, parent):
        super(DisassemblerView, self).__init__(parent)

        self.status = ""
        self.view = view

        self.data = data
        for type in ExeFormats:
            exe = type(data)
            if exe.valid:
                self.data = exe
                self.view.exe = exe
                break

        # Create analysis and start it in another thread
        self.analysis = Analysis(self.data)
        self.analysis_thread = threading.Thread(None, self.analysis_thread_proc)
        self.analysis_thread.daemon = True
        self.analysis_thread.start()

        # Start disassembly view at the entry point of the binary
        if hasattr(self.data, "entry"):
            self.function = self.data.entry()
        else:
            self.function = None
        self.update_id = None
        self.ready = False
        self.desired_pos = None
        self.highlight_token = None
        self.cur_instr = None
        self.scroll_mode = False
        self.blocks = {}
        self.show_il = False
        self.simulation = None

        # Create timer to automatically refresh view when it needs to be updated
        self.updateTimer = QTimer()
        self.updateTimer.setInterval(100)
        self.updateTimer.setSingleShot(False)
        self.updateTimer.timeout.connect(self.updateTimerEvent)
        self.updateTimer.start()

        self.initFont()

        # Initialize scroll bars
        self.width = 0
        self.height = 0
        self.setHorizontalScrollBarPolicy(Qt.ScrollBarAsNeeded)
        self.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
        self.horizontalScrollBar().setSingleStep(self.charWidth)
        self.verticalScrollBar().setSingleStep(self.charHeight)
        areaSize = self.viewport().size()
        self.adjustSize(areaSize.width(), areaSize.height())

        # Setup navigation
        self.view.register_navigate("disassembler", self, self.navigate)
        self.view.register_navigate("make_proc", self, self.make_proc)

        self.search_regex = None
        self.last_search_type = FindDialog.SEARCH_HEX

    def initFont(self):
        # Get font and compute character sizes
        self.font = getMonospaceFont()
        self.baseline = int(QFontMetricsF(self.font).ascent())
        self.charWidth = QFontMetricsF(self.font).width('X')
        self.charHeight = int(QFontMetricsF(self.font).height()) + getExtraFontSpacing()
        self.charOffset = getFontVerticalOffset()

    def adjustSize(self, width, height):
        # Recompute size information
        self.renderWidth = self.width
        self.renderHeight = self.height
        self.renderXOfs = 0
        self.renderYOfs = 0
        if self.renderWidth < width:
            self.renderXOfs = int((width - self.renderWidth) / 2)
            self.renderWidth = width
        if self.renderHeight < height:
            self.renderYOfs = int((height - self.renderHeight) / 2)
            self.renderHeight = height

        # Update scroll bar information
        self.horizontalScrollBar().setPageStep(width)
        self.horizontalScrollBar().setRange(0, self.renderWidth - width)
        self.verticalScrollBar().setPageStep(height)
        self.verticalScrollBar().setRange(0, self.renderHeight - height)

    def resizeEvent(self, event):
        # Window was resized, adjust scroll bar
        self.adjustSize(event.size().width(), event.size().height())

    def get_cursor_pos(self):
        if self.cur_instr is None:
            return self.function
        return self.cur_instr

    def set_cursor_pos(self, addr):
        if not self.view.navigate("disassembler", addr):
            self.view_in_hex_editor(addr)

    def get_selection_range(self):
        return (self.get_cursor_pos(), self.get_cursor_pos())

    def set_selection_range(self, begin, end):
        self.set_cursor_pos(begin)

    def write(self, data):
        pos = self.get_cursor_pos()
        if pos is None:
            return False
        return self.data.write(pos, data) == len(data)

    def copy_address(self):
        clipboard = QApplication.clipboard()
        clipboard.clear()
        mime = QMimeData()
        mime.setText("0x%x" % self.get_cursor_pos())
        clipboard.setMimeData(mime)

    def analysis_thread_proc(self):
        self.analysis.analyze()

    def closeRequest(self):
        # Stop analysis when closing tab
        self.analysis.stop()
        return True

    def paintEvent(self, event):
        # Initialize painter
        p = QPainter(self.viewport())
        p.setFont(self.font)

        xofs = self.horizontalScrollBar().value()
        yofs = self.verticalScrollBar().value()

        if not self.ready:
            # Analysis for the current function is not yet complete, paint loading screen
            gradient = QLinearGradient(QPointF(0, 0), QPointF(self.viewport().size().width(), self.viewport().size().height()))
            gradient.setColorAt(0, QColor(232, 232, 232))
            gradient.setColorAt(1, QColor(192, 192, 192))
            p.setPen(QColor(0, 0, 0, 0))
            p.setBrush(QBrush(gradient))
            p.drawRect(0, 0, self.viewport().size().width(), self.viewport().size().height())

            if self.function is None:
                text = "No function selected"
            else:
                text = "Loading..."
            p.setPen(Qt.black)
            p.drawText((self.viewport().size().width() / 2) - ((len(text) * self.charWidth) / 2),
                (self.viewport().size().height() / 2) + self.charOffset + self.baseline - (self.charHeight / 2), text)
            return

        # Render background
        gradient = QLinearGradient(QPointF(-xofs, -yofs), QPointF(self.renderWidth - xofs, self.renderHeight - yofs))
        gradient.setColorAt(0, QColor(232, 232, 232))
        gradient.setColorAt(1, QColor(192, 192, 192))
        p.setPen(QColor(0, 0, 0, 0))
        p.setBrush(QBrush(gradient))
        p.drawRect(0, 0, self.viewport().size().width(), self.viewport().size().height())

        p.translate(self.renderXOfs - xofs, self.renderYOfs - yofs)

        # Render each node
        for block in list(self.blocks.values()):
            # Render shadow
            p.setPen(QColor(0, 0, 0, 0))
            p.setBrush(QColor(0, 0, 0, 128))
            p.drawRect(block.x + self.charWidth + 4, block.y + self.charWidth + 4,
                block.width - (4 + 2 * self.charWidth), block.height - (4 + 2 * self.charWidth))

            # Render node background
            gradient = QLinearGradient(QPointF(0, block.y + self.charWidth),
                QPointF(0, block.y + block.height - self.charWidth))
            gradient.setColorAt(0, QColor(255, 255, 252))
            gradient.setColorAt(1, QColor(255, 255, 232))
            p.setPen(Qt.black)
            p.setBrush(QBrush(gradient))
            p.drawRect(block.x + self.charWidth, block.y + self.charWidth,
                block.width - (4 + 2 * self.charWidth), block.height - (4 + 2 * self.charWidth))

            if self.cur_instr != None:
                y = block.y + (2 * self.charWidth) + (len(block.block.header_text.lines) * self.charHeight)
                for instr in block.block.instrs:
                    if instr.addr == self.cur_instr:
                        p.setPen(QColor(0, 0, 0, 0))
                        p.setBrush(QColor(255, 255, 128, 128))
                        p.drawRect(block.x + self.charWidth + 3, y, block.width - (10 + 2 * self.charWidth),
                            len(instr.text.lines) * self.charHeight)
                    y += len(instr.text.lines) * self.charHeight

            if self.highlight_token:
                # Render highlighted tokens
                x = block.x + (2 * self.charWidth)
                y = block.y + (2 * self.charWidth)
                for line in block.block.header_text.tokens:
                    for token in line:
                        if token[2:] == self.highlight_token:
                            p.setPen(QColor(0, 0, 0, 0))
                            p.setBrush(QColor(192, 0, 0, 64))
                            p.drawRect(x + token[0] * self.charWidth, y,
                                token[1] * self.charWidth, self.charHeight)
                    y += self.charHeight
                for instr in block.block.instrs:
                    for line in instr.text.tokens:
                        for token in line:
                            if token[2:] == self.highlight_token:
                                p.setPen(QColor(0, 0, 0, 0))
                                p.setBrush(QColor(192, 0, 0, 64))
                                p.drawRect(x + token[0] * self.charWidth, y,
                                    token[1] * self.charWidth, self.charHeight)
                        y += self.charHeight

            # Render node text
            x = block.x + (2 * self.charWidth)
            y = block.y + (2 * self.charWidth)
            for line in block.block.header_text.lines:
                partx = x
                for part in line:
                    p.setPen(part[1])
                    p.drawText(partx, y + self.charOffset + self.baseline, part[0])
                    partx += len(part[0]) * self.charWidth
                y += self.charHeight
            for instr in block.block.instrs:
                for line in instr.text.lines:
                    partx = x
                    for part in line:
                        p.setPen(part[1])
                        p.drawText(partx, y + self.charOffset + self.baseline, part[0])
                        partx += len(part[0]) * self.charWidth
                    y += self.charHeight

            # Render edges
            for edge in block.edges:
                p.setPen(edge.color)
                p.setBrush(edge.color)
                p.drawPolyline(QPolygon(edge.polyline))
                p.drawConvexPolygon(QPolygon(edge.arrow))

    def isMouseEventInBlock(self, event):
        # Convert coordinates to system used in blocks
        xofs = self.horizontalScrollBar().value()
        yofs = self.verticalScrollBar().value()
        x = event.x() + xofs - self.renderXOfs
        y = event.y() + yofs - self.renderYOfs

        # Check each block for hits
        for block in list(self.blocks.values()):
            # Compute coordinate relative to text area in block
            blockx = x - (block.x + (2 * self.charWidth))
            blocky = y - (block.y + (2 * self.charWidth))
            # Check to see if click is within bounds of block
            if (blockx < 0) or (blockx > (block.width - 4 * self.charWidth)):
                continue
            if (blocky < 0) or (blocky > (block.height - 4 * self.charWidth)):
                continue
            return True

        return False

    def getInstrForMouseEvent(self, event):
        # Convert coordinates to system used in blocks
        xofs = self.horizontalScrollBar().value()
        yofs = self.verticalScrollBar().value()
        x = event.x() + xofs - self.renderXOfs
        y = event.y() + yofs - self.renderYOfs

        # Check each block for hits
        for block in list(self.blocks.values()):
            # Compute coordinate relative to text area in block
            blockx = x - (block.x + (2 * self.charWidth))
            blocky = y - (block.y + (2 * self.charWidth))
            # Check to see if click is within bounds of block
            if (blockx < 0) or (blockx > (block.width - 4 * self.charWidth)):
                continue
            if (blocky < 0) or (blocky > (block.height - 4 * self.charWidth)):
                continue
            # Compute row within text
            row = int(blocky / self.charHeight)
            # Determine instruction for this row
            cur_row = len(block.block.header_text.lines)
            if row < cur_row:
                return block.block.entry
            for instr in block.block.instrs:
                if row < cur_row + len(instr.text.lines):
                    return instr.addr
                cur_row += len(instr.text.lines)

        return None

    def getTokenForMouseEvent(self, event):
        # Convert coordinates to system used in blocks
        xofs = self.horizontalScrollBar().value()
        yofs = self.verticalScrollBar().value()
        x = event.x() + xofs - self.renderXOfs
        y = event.y() + yofs - self.renderYOfs

        # Check each block for hits
        for block in list(self.blocks.values()):
            # Compute coordinate relative to text area in block
            blockx = x - (block.x + (2 * self.charWidth))
            blocky = y - (block.y + (2 * self.charWidth))
            # Check to see if click is within bounds of block
            if (blockx < 0) or (blockx > (block.width - 4 * self.charWidth)):
                continue
            if (blocky < 0) or (blocky > (block.height - 4 * self.charWidth)):
                continue
            # Compute row and column within text
            col = int(blockx / self.charWidth)
            row = int(blocky / self.charHeight)
            # Check tokens to see if one was clicked
            cur_row = 0
            for line in block.block.header_text.tokens:
                if cur_row == row:
                    for token in line:
                        if (col >= token[0]) and (col < (token[0] + token[1])):
                            # Clicked on a token
                            return token
                cur_row += 1
            for instr in block.block.instrs:
                for line in instr.text.tokens:
                    if cur_row == row:
                        for token in line:
                            if (col >= token[0]) and (col < (token[0] + token[1])):
                                # Clicked on a token
                                return token
                    cur_row += 1

        return None

    def find_instr(self, addr):
        for block in list(self.blocks.values()):
            for instr in block.block.instrs:
                if instr.addr == addr:
                    return instr
        return None

    def nop_out(self, addr):
        instr = self.find_instr(addr)
        if instr != None:
            self.view.begin_undo()
            instr.patch_to_nop(self.data)
            self.view.commit_undo()

    def always_branch(self, addr):
        instr = self.find_instr(addr)
        if instr != None:
            self.view.begin_undo()
            instr.patch_to_always_branch(self.data)
            self.view.commit_undo()

    def invert_branch(self, addr):
        instr = self.find_instr(addr)
        if instr != None:
            self.view.begin_undo()
            instr.patch_to_invert_branch(self.data)
            self.view.commit_undo()

    def skip_and_return_zero(self, addr):
        instr = self.find_instr(addr)
        if instr != None:
            self.view.begin_undo()
            instr.patch_to_zero_return(self.data)
            self.view.commit_undo()

    def skip_and_return_value(self, addr):
        instr = self.find_instr(addr)
        if instr != None:
            value, ok = QInputDialog.getText(self, "Skip and Return Value", "Return value:", QLineEdit.Normal)
            if ok:
                try:
                    value = int(value, 0)
                except:
                    QMessageBox.critical(self, "Error", "Expected numerical address")
                    return
    
            self.view.begin_undo()
            instr.patch_to_fixed_return_value(self.data, value)
            self.view.commit_undo()

    def view_in_hex_editor(self, addr):
        if not self.view.navigate("exe", addr):
            self.view.navigate("hex", addr)

    def show_address(self):
        if "address" in self.analysis.options:
            addr = False
        else:
            addr = True
        self.analysis.set_address_view(addr)

    def context_menu(self, addr):
        popup = QMenu()
        view_in_hex = popup.addAction("View in &hex editor")
        view_in_hex.triggered.connect(lambda : self.view_in_hex_editor(addr))
        view_in_hex.setShortcut(QKeySequence(Qt.Key_H))
        popup.addAction("Copy address", self.copy_address)
        enter_name_action = popup.addAction("Re&name symbol", self.enter_name)
        enter_name_action.setShortcut(QKeySequence(Qt.Key_N))
        undefine_name_action = popup.addAction("&Undefine symbol", self.undefine_name)
        undefine_name_action.setShortcut(QKeySequence(Qt.Key_U))
        show_address_action = popup.addAction("Show &address", self.show_address)
        show_address_action.setCheckable(True)
        show_address_action.setChecked("address" in self.analysis.options)
        popup.addSeparator()

        patch = popup.addMenu("&Patch")
        patch.addAction("Convert to NOP").triggered.connect(lambda : self.nop_out(addr))
        instr = self.find_instr(addr)
        if instr:
            if instr.is_patch_branch_allowed():
                patch.addAction("Never branch").triggered.connect(lambda : self.nop_out(addr))
                patch.addAction("Always branch").triggered.connect(lambda : self.always_branch(addr))
                patch.addAction("Invert branch").triggered.connect(lambda : self.invert_branch(addr))
            if instr.is_patch_to_zero_return_allowed():
                patch.addAction("Skip and return zero").triggered.connect(lambda : self.skip_and_return_zero(addr))
            if instr.is_patch_to_fixed_return_value_allowed():
                patch.addAction("Skip and return value...").triggered.connect(lambda : self.skip_and_return_value(addr))

        popup.exec_(QCursor.pos())

    def mousePressEvent(self, event):
        if (event.button() != Qt.LeftButton) and (event.button() != Qt.RightButton):
            return

        if not self.isMouseEventInBlock(event):
            # Click outside any block, enter scrolling mode
            self.scroll_base_x = event.x()
            self.scroll_base_y = event.y()
            self.scroll_mode = True
            self.viewport().grabMouse()
            return

        # Check for click on a token and highlight it
        token = self.getTokenForMouseEvent(event)
        if token:
            self.highlight_token = token[2:]
        else:
            self.highlight_token = None

        # Update current instruction
        instr = self.getInstrForMouseEvent(event)
        if instr != None:
            self.cur_instr = instr
        else:
            self.cur_instr = None

        self.viewport().update()

        if (instr != None) and (event.button() == Qt.RightButton):
            self.context_menu(instr)

    def mouseMoveEvent(self, event):
        if self.scroll_mode:
            x_delta = self.scroll_base_x - event.x()
            y_delta = self.scroll_base_y - event.y()
            self.scroll_base_x = event.x()
            self.scroll_base_y = event.y()
            self.horizontalScrollBar().setValue(self.horizontalScrollBar().value() + x_delta)
            self.verticalScrollBar().setValue(self.verticalScrollBar().value() + y_delta)

    def mouseReleaseEvent(self, event):
        if event.button() != Qt.LeftButton:
            return

        if self.scroll_mode:
            self.scroll_mode = False
            self.viewport().releaseMouse()

    def mouseDoubleClickEvent(self, event):
        token = self.getTokenForMouseEvent(event)
        if token and (token[2] == "ptr"):
            self.analysis.lock.acquire()
            if token[3] not in self.analysis.functions:
                # Not a function or not analyzed, go to address in hex editor
                addr = token[3]
                self.analysis.lock.release()
                self.view_in_hex_editor(addr)
            else:
                self.view.add_history_entry()
                self.function = token[3]
                self.ready = False
                self.desired_pos = None
                self.cur_instr = None
                self.highlight_token = None
                self.viewport().update()
                self.analysis.lock.release()

    def go_to_address(self):
        addr_str, ok = QInputDialog.getText(self, "Go To Address", "Address:", QLineEdit.Normal)
        if ok:
            try:
                addr = int(addr_str, 16)
                if (addr < self.data.start()) or (addr > self.data.end()):
                    if hasattr(self.data, "symbols_by_name") and (addr_str in self.data.symbols_by_name):
                        addr = self.data.symbols_by_name[addr_str]
                    else:
                        QMessageBox.critical(self, "Error", "Address out of range")
                        return
            except:
                if hasattr(self.data, "symbols_by_name") and (addr_str in self.data.symbols_by_name):
                    addr = self.data.symbols_by_name[addr_str]
                elif (addr_str[0] == '@') and hasattr(self.data, "symbols_by_name") and (addr_str[1:] in self.data.symbols_by_name):
                    addr = self.data.symbols_by_name[addr_str[1:]]
                else:
                    QMessageBox.critical(self, "Error", "Invalid address or symbol")
                    return

            # Try navigating within disassembly, if it isn't within a function then
            # navigate to the hex editor
            if not self.view.navigate("disassembler", addr):
                self.view_in_hex_editor(addr)

    def enter_name(self):
        # A symbol must be selected
        if (self.highlight_token == None) or (self.highlight_token[0] != "ptr"):
            QMessageBox.critical(self, "Error", "No symbol selected.")
            return

        addr = self.highlight_token[1]
        name = self.highlight_token[2]

        # Ask for new name
        new_name, ok = QInputDialog.getText(self, "Rename Symbol", "Symbol name:", QLineEdit.Normal, name)
        if ok:
            self.analysis.create_symbol(addr, new_name)

    def undefine_name(self):
        # A symbol must be selected
        if (self.highlight_token == None) or (self.highlight_token[0] != "ptr"):
            QMessageBox.critical(self, "Error", "No symbol selected.")
            return

        addr = self.highlight_token[1]
        name = self.highlight_token[2]

        # Ask for new name
        self.analysis.undefine_symbol(addr, name)

    def navigate_for_find(self, addr):
        func, instr = self.analysis.find_instr(addr, True)
        if func != None:
            self.navigate(addr)
        else:
            self.make_proc(addr)
            self.cur_instr = addr
            self.desired_pos = None

    def perform_find(self, dlg):
        self.search_regex = dlg.search_regex()
        if self.cur_instr != None:
            self.search_start = self.cur_instr
        else:
            if self.function is None:
                return
            self.search_start = self.function

        found_loc = self.data.find(self.search_regex, self.search_start)
        if found_loc != -1:
            self.view.add_history_entry()
            self.navigate_for_find(found_loc)
            self.search_pos = found_loc + 1
            return

        found_loc = self.data.find(self.search_regex, self.data.start())
        if (found_loc != -1) and (found_loc < self.search_start):
            self.view.add_history_entry()
            self.navigate_for_find(found_loc)
            self.search_pos = found_loc + 1
            return

        QMessageBox.information(self, "Not Found", "Search string not found.")

    def find(self):
        dlg = FindDialog(self.last_search_type, self)
        if dlg.exec_() == QDialog.Accepted:
            self.last_search_type = dlg.search_type()
            self.perform_find(dlg)

    def find_next(self):
        if self.search_regex == None:
            QMessageBox.critical(self, "Error", "No active search")
            return

        found_loc = self.data.find(self.search_regex, self.search_pos)
        if self.search_pos >= self.search_start:
            if found_loc != -1:
                self.view.add_history_entry()
                self.navigate_for_find(found_loc)
                self.search_pos = found_loc + 1
                return
            self.search_pos = 0
        else:
            if (found_loc != -1) and (found_loc < self.search_start):
                self.view.add_history_entry()
                self.navigate_for_find(found_loc)
                self.search_pos = found_loc + 1
                return

            QMessageBox.information(self, "End of Search", "No additional matches found.")
            self.search_pos = self.search_start
            return

        found_loc = self.data.find(self.search_regex, self.search_pos)
        if found_loc < self.search_start:
            self.view.add_history_entry()
            self.navigate_for_find(found_loc)
            self.search_pos = found_loc + 1
            return

        QMessageBox.information(self, "End of Search", "No additional matches found.")
        self.search_pos = self.search_start

    def keyPressEvent(self, event):
        if event.key() == Qt.Key_H:
            if self.cur_instr != None:
                self.view_in_hex_editor(self.cur_instr)
            else:
                if self.function is not None:
                    self.view_in_hex_editor(self.function)
        elif event.key() == Qt.Key_G:
            self.go_to_address()
        elif event.key() == Qt.Key_N:
            self.enter_name()
        elif event.key() == Qt.Key_U:
            self.undefine_name()
        elif event.key() == Qt.Key_Slash:
            dlg = FindDialog(FindDialog.SEARCH_REGEX, self)
            if dlg.exec_() == QDialog.Accepted:
                self.perform_find(dlg)
        else:
            super(DisassemblerView, self).keyPressEvent(event)

    def prepareGraphNode(self, block):
        # Compute size of node in pixels
        width = 0
        height = 0
        for line in block.block.header_text.lines:
            chars = 0
            for part in line:
                chars += len(part[0])
            if chars > width:
                width = chars
            height += 1
        for instr in block.block.instrs:
            for line in instr.text.lines:
                chars = 0
                for part in line:
                    chars += len(part[0])
                if chars > width:
                    width = chars
                height += 1
        block.width = (width + 4) * self.charWidth + 4
        block.height = (height * self.charHeight) + (4 * self.charWidth) + 4

    def adjustGraphLayout(self, block, col, row):
        block.col += col
        block.row += row
        for edge in block.new_exits:
            self.adjustGraphLayout(self.blocks[edge], col, row)

    def computeGraphLayout(self, block):
        # Compute child node layouts and arrange them horizontally
        col = 0
        row_count = 1
        for edge in block.new_exits:
            self.computeGraphLayout(self.blocks[edge])
            self.adjustGraphLayout(self.blocks[edge], col, 1)
            col += self.blocks[edge].col_count
            if (self.blocks[edge].row_count + 1) > row_count:
                row_count = self.blocks[edge].row_count + 1

        block.row = 0
        if col >= 2:
            # Place this node centered over the child nodes
            block.col = int((col - 2) / 2)
            block.col_count = col
        else:
            # No child nodes, set single node's width (nodes are 2 columns wide to allow
            # centering over a branch)
            block.col = 0
            block.col_count = 2
        block.row_count = row_count

    def isEdgeMarked(self, edges, row, col, index):
        if index >= len(edges[row][col]):
            return False
        return edges[row][col][index]

    def markEdge(self, edges, row, col, index):
        while len(edges[row][col]) <= index:
            edges[row][col] += [False]
        edges[row][col][index] = True

    def findHorizEdgeIndex(self, edges, row, min_col, max_col):
        # Find a valid index
        i = 0
        while True:
            valid = True
            for col in range(min_col, max_col + 1):
                if self.isEdgeMarked(edges, row, col, i):
                    valid = False
                    break
            if valid:
                break
            i += 1

        # Mark chosen index as used
        for col in range(min_col, max_col + 1):
            self.markEdge(edges, row, col, i)
        return i

    def findVertEdgeIndex(self, edges, col, min_row, max_row):
        # Find a valid index
        i = 0
        while True:
            valid = True
            for row in range(min_row, max_row + 1):
                if self.isEdgeMarked(edges, row, col, i):
                    valid = False
                    break
            if valid:
                break
            i += 1

        # Mark chosen index as used
        for row in range(min_row, max_row + 1):
            self.markEdge(edges, row, col, i)
        return i

    def routeEdge(self, horiz_edges, vert_edges, edge_valid, start, end, color):
        edge = DisassemblerEdge(color, end)

        # Find edge index for initial outgoing line
        i = 0
        while True:
            if not self.isEdgeMarked(vert_edges, start.row + 1, start.col + 1, i):
                break
            i += 1
        self.markEdge(vert_edges, start.row + 1, start.col + 1, i)
        edge.addPoint(start.row + 1, start.col + 1)
        edge.start_index = i
        horiz = False

        # Find valid column for moving vertically to the target node
        if end.row < (start.row + 1):
            min_row = end.row
            max_row = start.row + 1
        else:
            min_row = start.row + 1
            max_row = end.row
        col = start.col + 1
        if min_row != max_row:
            ofs = 0
            while True:
                col = start.col + 1 - ofs
                if col >= 0:
                    valid = True
                    for row in range(min_row, max_row + 1):
                        if not edge_valid[row][col]:
                            valid = False
                            break
                    if valid:
                        break

                col = start.col + 1 + ofs
                if col < len(edge_valid[min_row]):
                    valid = True
                    for row in range(min_row, max_row + 1):
                        if not edge_valid[row][col]:
                            valid = False
                            break
                    if valid:
                        break

                ofs += 1

        if col != (start.col + 1):
            # Not in same column, need to generate a line for moving to the correct column
            if col < (start.col + 1):
                min_col = col
                max_col = start.col + 1
            else:
                min_col = start.col + 1
                max_col = col
            index = self.findHorizEdgeIndex(horiz_edges, start.row + 1, min_col, max_col)
            edge.addPoint(start.row + 1, col, index)
            horiz = True

        if end.row != (start.row + 1):
            # Not in same row, need to generate a line for moving to the correct row
            index = self.findVertEdgeIndex(vert_edges, col, min_row, max_row)
            edge.addPoint(end.row, col, index)
            horiz = False

        if col != (end.col + 1):
            # Not in ending column, need to generate a line for moving to the correct column
            if col < (end.col + 1):
                min_col = col
                max_col = end.col + 1
            else:
                min_col = end.col + 1
                max_col = col
            index = self.findHorizEdgeIndex(horiz_edges, end.row, min_col, max_col)
            edge.addPoint(end.row, end.col + 1, index)
            horiz = True

        # If last line was horizontal, choose the ending edge index for the incoming edge
        if horiz:
            index = self.findVertEdgeIndex(vert_edges, end.col + 1, end.row, end.row)
            edge.points[len(edge.points) - 1][2] = index

        return edge

    def renderFunction(self, func):
        # Create render nodes
        self.blocks = {}
        for block in list(func.blocks.values()):
            self.blocks[block.entry] = DisassemblerBlock(block)
            self.prepareGraphNode(self.blocks[block.entry])

        # Populate incoming lists
        for block in list(self.blocks.values()):
            for edge in block.block.exits:
                self.blocks[edge].incoming += [block.block.entry]

        # Construct acyclic graph where each node is used as an edge exactly once
        block = func.blocks[func.entry]
        visited = [func.entry]
        queue = [self.blocks[func.entry]]
        changed = True

        while changed:
            changed = False

            # First pick nodes that have single entry points
            while len(queue) > 0:
                block = queue.pop()

                for edge in block.block.exits:
                    if edge in visited:
                        continue

                    # If node has no more unseen incoming edges, add it to the graph layout now
                    if len(self.blocks[edge].incoming) == 1:
                        self.blocks[edge].incoming.remove(block.block.entry)
                        block.new_exits += [edge]
                        queue += [self.blocks[edge]]
                        visited += [edge]
                        changed = True

            # No more nodes satisfy constraints, pick a node to continue constructing the graph
            best = None
            for block in list(self.blocks.values()):
                if not block.block.entry in visited:
                    continue
                for edge in block.block.exits:
                    if edge in visited:
                        continue
                    if (best == None) or (len(self.blocks[edge].incoming) < best_edges) or ((len(self.blocks[edge].incoming) == best_edges) and (edge < best)):
                        best = edge
                        best_edges = len(self.blocks[edge].incoming)
                        best_parent = block

            if best != None:
                self.blocks[best].incoming.remove(best_parent.block.entry)
                best_parent.new_exits += [best]
                visited += [best]
                changed = True

        # Compute graph layout from bottom up
        self.computeGraphLayout(self.blocks[func.entry])

        # Prepare edge routing
        horiz_edges = [None] * (self.blocks[func.entry].row_count + 1)
        vert_edges = [None] * (self.blocks[func.entry].row_count + 1)
        edge_valid = [None] * (self.blocks[func.entry].row_count + 1)
        for row in range(0, self.blocks[func.entry].row_count + 1):
            horiz_edges[row] = [None] * (self.blocks[func.entry].col_count + 1)
            vert_edges[row] = [None] * (self.blocks[func.entry].col_count + 1)
            edge_valid[row] = [True] * (self.blocks[func.entry].col_count + 1)
            for col in range(0, self.blocks[func.entry].col_count + 1):
                horiz_edges[row][col] = []
                vert_edges[row][col] = []
        for block in list(self.blocks.values()):
            edge_valid[block.row][block.col + 1] = False

        # Perform edge routing
        for block in list(self.blocks.values()):
            start = block
            for edge in block.block.exits:
                end = self.blocks[edge]
                color = Qt.black
                if edge == block.block.true_path:
                    color = QColor(0, 144, 0)
                elif edge == block.block.false_path:
                    color = QColor(144, 0, 0)
                start.edges += [self.routeEdge(horiz_edges, vert_edges, edge_valid, start, end, color)]

        # Compute edge counts for each row and column
        col_edge_count = [0] * (self.blocks[func.entry].col_count + 1)
        row_edge_count = [0] * (self.blocks[func.entry].row_count + 1)
        for row in range(0, self.blocks[func.entry].row_count + 1):
            for col in range(0, self.blocks[func.entry].col_count + 1):
                if len(horiz_edges[row][col]) > row_edge_count[row]:
                    row_edge_count[row] = len(horiz_edges[row][col])
                if len(vert_edges[row][col]) > col_edge_count[col]:
                    col_edge_count[col] = len(vert_edges[row][col])

        # Compute row and column sizes
        col_width = [0] * (self.blocks[func.entry].col_count + 1)
        row_height = [0] * (self.blocks[func.entry].row_count + 1)
        for block in list(self.blocks.values()):
            if (int(block.width / 2)) > col_width[block.col]:
                col_width[block.col] = int(block.width / 2)
            if (int(block.width / 2)) > col_width[block.col + 1]:
                col_width[block.col + 1] = int(block.width / 2)
            if int(block.height) > row_height[block.row]:
                row_height[block.row] = int(block.height)

        # Compute row and column positions
        col_x = [0] * self.blocks[func.entry].col_count
        row_y = [0] * self.blocks[func.entry].row_count
        self.col_edge_x = [0] * (self.blocks[func.entry].col_count + 1)
        self.row_edge_y = [0] * (self.blocks[func.entry].row_count + 1)
        x = 16
        for i in range(0, self.blocks[func.entry].col_count):
            self.col_edge_x[i] = x
            x += 8 * col_edge_count[i]
            col_x[i] = x
            x += col_width[i]
        y = 16
        for i in range(0, self.blocks[func.entry].row_count):
            self.row_edge_y[i] = y
            y += 8 * row_edge_count[i]
            row_y[i] = y
            y += row_height[i]
        self.col_edge_x[self.blocks[func.entry].col_count] = x
        self.row_edge_y[self.blocks[func.entry].row_count] = y
        self.width = x + 16 + (8 * col_edge_count[self.blocks[func.entry].col_count])
        self.height = y + 16 + (8 * row_edge_count[self.blocks[func.entry].row_count])

        # Compute node positions
        for block in list(self.blocks.values()):
            block.x = int((col_x[block.col] + col_width[block.col] + 4 * col_edge_count[block.col + 1]) - (block.width / 2))
            if (block.x + block.width) > (col_x[block.col] + col_width[block.col] + col_width[block.col + 1] + 8 * col_edge_count[block.col + 1]):
                block.x = int((col_x[block.col] + col_width[block.col] + col_width[block.col + 1] + 8 * col_edge_count[block.col + 1]) - block.width)
            block.y = row_y[block.row]

        # Precompute coordinates for edges
        for block in list(self.blocks.values()):
            for edge in block.edges:
                start = edge.points[0]
                start_row = start[0]
                start_col = start[1]
                last_index = edge.start_index
                last_pt = QPoint(self.col_edge_x[start_col] + (8 * last_index) + 4,
                    block.y + block.height + 4 - (2 * self.charWidth))
                pts = [last_pt]

                for i in range(0, len(edge.points)):
                    end = edge.points[i]
                    end_row = end[0]
                    end_col = end[1]
                    last_index = end[2]
                    if start_col == end_col:
                        new_pt = QPoint(last_pt.x(), self.row_edge_y[end_row] + (8 * last_index) + 4)
                    else:
                        new_pt = QPoint(self.col_edge_x[end_col] + (8 * last_index) + 4, last_pt.y())
                    pts += [new_pt]
                    last_pt = new_pt
                    start_col = end_col

                new_pt = QPoint(last_pt.x(), edge.dest.y + self.charWidth - 1)
                pts += [new_pt]
                edge.polyline = pts

                pts = [QPoint(new_pt.x() - 3, new_pt.y() - 6), QPoint(new_pt.x() + 3, new_pt.y() - 6), new_pt]
                edge.arrow = pts

        # Adjust scroll bars for new size
        areaSize = self.viewport().size()
        self.adjustSize(areaSize.width(), areaSize.height())

        if self.desired_pos:
            # There was a position saved, navigate to it
            self.horizontalScrollBar().setValue(self.desired_pos[0])
            self.verticalScrollBar().setValue(self.desired_pos[1])
        elif self.cur_instr != None:
            self.show_cur_instr()
        else:
            # Ensure start node is visible
            start_x = self.blocks[func.entry].x + self.renderXOfs + int(self.blocks[func.entry].width / 2)
            self.horizontalScrollBar().setValue(start_x - int(areaSize.width() / 2))
            self.verticalScrollBar().setValue(0)

        self.update_id = func.update_id
        self.ready = True
        self.viewport().update(0, 0, areaSize.width(), areaSize.height())

    def updateTimerEvent(self):
        status = self.analysis.status
        if status != self.status:
            self.status = status
            self.statusUpdated.emit(self)

        if self.function is None:
            return

        if self.ready:
            # Check for updated code
            self.analysis.lock.acquire()
            if self.update_id != self.analysis.functions[self.function].update_id:
                self.renderFunction(self.analysis.functions[self.function])
            self.analysis.lock.release()
            return

        # View not up to date, check to see if active function is ready
        self.analysis.lock.acquire()
        if self.function in self.analysis.functions:
            if self.analysis.functions[self.function].ready:
                # Active function now ready, generate graph
                self.renderFunction(self.analysis.functions[self.function])
        self.analysis.lock.release()

    def show_cur_instr(self):
        for block in list(self.blocks.values()):
            row = len(block.block.header_text.lines)
            for instr in block.block.instrs:
                if self.cur_instr == instr.addr:
                    x = block.x + int(block.width / 2)
                    y = block.y + (2 * self.charWidth) + int((row + 0.5) * self.charHeight)
                    self.horizontalScrollBar().setValue(x + self.renderXOfs -
                        int(self.horizontalScrollBar().pageStep() / 2))
                    self.verticalScrollBar().setValue(y + self.renderYOfs -
                        int(self.verticalScrollBar().pageStep() / 2))
                    return
                row += len(instr.text.lines)

    def navigate(self, addr):
        # Check to see if address is within current function
        for block in list(self.blocks.values()):
            row = len(block.block.header_text.lines)
            for instr in block.block.instrs:
                if (addr >= instr.addr) and (addr < (instr.addr + len(instr.opcode))):
                    self.cur_instr = instr.addr
                    self.show_cur_instr()
                    self.viewport().update()
                    return True
                row += len(instr.text.lines)

        # Check other functions for this address
        func, instr = self.analysis.find_instr(addr)
        if func != None:
            self.function = func
            self.cur_instr = instr
            self.highlight_token = None
            self.ready = False
            self.desired_pos = None
            self.viewport().update()
            return True

        return False

    def make_proc(self, addr):
        # Create a procedure at the requested address if one does not already exist
        if self.data.architecture() is None:
            # Architecture not defined yet, ask the user and set it now
            arch_dlg = ArchitectureDialog(self)
            if arch_dlg.exec_() == QDialog.Rejected:
                return False
            self.data.default_arch = arch_dlg.result

        self.analysis.lock.acquire()
        if addr not in self.analysis.functions:
            self.analysis.queue.append(addr)
        self.analysis.lock.release()

        self.function = addr
        self.cur_instr = None
        self.highlight_token = None
        self.ready = False
        self.desired_pos = None
        self.viewport().update()
        return True

    def navigate_to_history_entry(self, entry):
        self.function = entry.function
        self.ready = False
        self.desired_pos = [entry.scroll_x, entry.scroll_y]
        self.cur_instr = entry.cur_instr
        self.highlight_token = entry.highlight_token
        self.viewport().update()

    def get_history_entry(self):
        return DisassemblerHistoryEntry(self)

    def fontChanged(self):
        self.initFont()

        if self.ready:
            # Rerender function to update layout
            self.analysis.lock.acquire()
            self.renderFunction(self.analysis.functions[self.function])
            self.analysis.lock.release()

    def getPriority(data, ext):
        if Analysis.isPreferredForFile(data):
            return 80
        return 0
    getPriority = staticmethod(getPriority)

    def getViewName():
        return "Disassembler"
    getViewName = staticmethod(getViewName)

    def getShortViewName():
        return "Disassembler"
    getShortViewName = staticmethod(getShortViewName)

    def handlesNavigationType(name):
        return (name == "disassembler") or (name == "make_proc")
    handlesNavigationType = staticmethod(handlesNavigationType)
Beispiel #59
0
class MutablePolyDenseMatrix(MutableDenseMatrix):
    """
    A mutable matrix of objects from poly module or to operate with them.

    Examples
    ========

    >>> from sympy.polys.polymatrix import PolyMatrix
    >>> from sympy import Symbol, Poly, ZZ
    >>> x = Symbol('x')
    >>> pm1 = PolyMatrix([[Poly(x**2, x), Poly(-x, x)], [Poly(x**3, x), Poly(-1 + x, x)]])
    >>> v1 = PolyMatrix([[1, 0], [-1, 0]])
    >>> pm1*v1
    Matrix([
    [    Poly(x**2 + x, x, domain='ZZ'), Poly(0, x, domain='ZZ')],
    [Poly(x**3 - x + 1, x, domain='ZZ'), Poly(0, x, domain='ZZ')]])

    >>> pm1.ring
    ZZ[x]

    >>> v1*pm1
    Matrix([
    [ Poly(x**2, x, domain='ZZ'), Poly(-x, x, domain='ZZ')],
    [Poly(-x**2, x, domain='ZZ'),  Poly(x, x, domain='ZZ')]])

    >>> pm2 = PolyMatrix([[Poly(x**2, x, domain='QQ'), Poly(0, x, domain='QQ'), Poly(1, x, domain='QQ'), \
            Poly(x**3, x, domain='QQ'), Poly(0, x, domain='QQ'), Poly(-x**3, x, domain='QQ')]])
    >>> v2 = PolyMatrix([1, 0, 0, 0, 0, 0], ring=ZZ)
    >>> v2.ring
    ZZ
    >>> pm2*v2
    Matrix([[Poly(x**2, x, domain='QQ')]])

    """
    _class_priority = 10
    # we don't want to sympify the elements of PolyMatrix
    _sympify = staticmethod(lambda x: x)

    def __init__(self, *args, **kwargs):
        # if any non-Poly element is given as input then
        # 'ring' defaults 'EX'
        ring = kwargs.get('ring', EX)
        if all(isinstance(p, Poly) for p in self._mat) and self._mat:
            domain = tuple([p.domain[p.gens] for p in self._mat])
            ring = domain[0]
            for i in range(1, len(domain)):
                ring = ring.unify(domain[i])
        self.ring = ring

    def _eval_matrix_mul(self, other):
        self_cols = self.cols
        other_rows, other_cols = other.rows, other.cols
        other_len = other_rows * other_cols
        new_mat_rows = self.rows
        new_mat_cols = other.cols

        new_mat = [0] * new_mat_rows * new_mat_cols

        if self.cols != 0 and other.rows != 0:
            mat = self._mat
            other_mat = other._mat
            for i in range(len(new_mat)):
                row, col = i // new_mat_cols, i % new_mat_cols
                row_indices = range(self_cols * row, self_cols * (row + 1))
                col_indices = range(col, other_len, other_cols)
                vec = (mat[a] * other_mat[b]
                       for a, b in zip(row_indices, col_indices))
                # 'Add' shouldn't be used here
                new_mat[i] = sum(vec)

        return self.__class__(new_mat_rows, new_mat_cols, new_mat, copy=False)

    def _eval_scalar_mul(self, other):
        mat = [
            Poly(a.as_expr() * other, *a.gens) if isinstance(a, Poly) else a *
            other for a in self._mat
        ]
        return self.__class__(self.rows, self.cols, mat, copy=False)

    def _eval_scalar_rmul(self, other):
        mat = [
            Poly(other *
                 a.as_expr(), *a.gens) if isinstance(a, Poly) else other * a
            for a in self._mat
        ]
        return self.__class__(self.rows, self.cols, mat, copy=False)
Beispiel #60
0
    def test_print_result(self):
        print_result(self.console_printer,
                     None,
                     self.file_diff_dict,
                     'illegal value',
                     {})

        with simulate_console_inputs(0):
            print_result(self.console_printer,
                         self.section,
                         self.file_diff_dict,
                         Result('origin', 'msg', diffs={}),
                         {})

        with make_temp() as testfile_path:
            file_dict = {
                testfile_path: ['1\n', '2\n', '3\n'],
                'f_b': ['1', '2', '3']
            }
            diff = Diff(file_dict[testfile_path])
            diff.delete_line(2)
            diff.change_line(3, '3\n', '3_changed\n')

            ApplyPatchAction.is_applicable = staticmethod(
                lambda *args: True)

            # Interaction must be closed by the user with `0` if it's not a
            # param
            with simulate_console_inputs('INVALID',
                                         -1,
                                         1,
                                         0,
                                         3) as input_generator:
                curr_section = Section('')
                print_section_beginning(self.console_printer, curr_section)
                print_result(self.console_printer,
                             curr_section,
                             self.file_diff_dict,
                             Result('origin', 'msg', diffs={
                                    testfile_path: diff}),
                             file_dict)
                self.assertEqual(input_generator.last_input, 3)

                self.file_diff_dict.clear()

                with open(testfile_path) as f:
                    self.assertEqual(f.readlines(), ['1\n', '3_changed\n'])

                os.remove(testfile_path + '.orig')

                name, section = get_action_info(curr_section,
                                                TestAction().get_metadata(),
                                                failed_actions=set())
                self.assertEqual(input_generator.last_input, 4)
                self.assertEqual(str(section), " {param : '3'}")
                self.assertEqual(name, 'TestAction')

        # Check if the user is asked for the parameter only the first time.
        # Use OpenEditorAction that needs this parameter (editor command).
        with simulate_console_inputs(1, 'test_editor', 0, 1, 0) as generator:
            OpenEditorAction.is_applicable = staticmethod(lambda *args: True)

            patch_result = Result('origin', 'msg', diffs={testfile_path: diff})
            patch_result.file = 'f_b'

            print_result(self.console_printer,
                         curr_section,
                         self.file_diff_dict,
                         patch_result,
                         file_dict)
            # choose action, choose editor, choose no action (-1 -> 2)
            self.assertEqual(generator.last_input, 2)

            # It shoudn't ask for parameter again
            print_result(self.console_printer,
                         curr_section,
                         self.file_diff_dict,
                         patch_result,
                         file_dict)
            self.assertEqual(generator.last_input, 4)