class Operation(object):
  """An operation representing the live version of a work item specification.

  An operation can have one or more outputs and for each output it can have
  one or more receiver operations that will take that as input.
  """

  def __init__(self, operation_name, spec, counter_factory, state_sampler):
    """Initializes a worker operation instance.

    Args:
      operation_name: The system name assigned by the runner for this
        operation.
      spec: A operation_specs.Worker* instance.
      counter_factory: The CounterFactory to use for our counters.
      state_sampler: The StateSampler for the current operation.
    """
    self.operation_name = operation_name
    self.spec = spec
    self.counter_factory = counter_factory
    self.consumers = collections.defaultdict(list)

    # These are overwritten in the legacy harness.
    self.step_name = operation_name
    self.metrics_container = MetricsContainer(self.step_name)
    self.scoped_metrics_container = ScopedMetricsContainer(
        self.metrics_container)

    self.state_sampler = state_sampler
    self.scoped_start_state = self.state_sampler.scoped_state(
        self.operation_name, 'start')
    self.scoped_process_state = self.state_sampler.scoped_state(
        self.operation_name, 'process')
    self.scoped_finish_state = self.state_sampler.scoped_state(
        self.operation_name, 'finish')
    # TODO(ccy): the '-abort' state can be added when the abort is supported in
    # Operations.
    self.receivers = []

  def start(self):
    """Start operation."""
    self.debug_logging_enabled = logging.getLogger().isEnabledFor(
        logging.DEBUG)
    # Everything except WorkerSideInputSource, which is not a
    # top-level operation, should have output_coders
    if getattr(self.spec, 'output_coders', None):
      self.receivers = [ConsumerSet(self.counter_factory, self.step_name,
                                    i, self.consumers[i], coder)
                        for i, coder in enumerate(self.spec.output_coders)]

  def finish(self):
    """Finish operation."""
    pass

  def process(self, o):
    """Process element in operation."""
    pass

  def output(self, windowed_value, output_index=0):
    cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)

  def add_receiver(self, operation, output_index=0):
    """Adds a receiver operation for the specified output."""
    self.consumers[output_index].append(operation)

  def progress_metrics(self):
    return beam_fn_api_pb2.Metrics.PTransform(
        processed_elements=beam_fn_api_pb2.Metrics.PTransform.ProcessedElements(
            measured=beam_fn_api_pb2.Metrics.PTransform.Measured(
                total_time_spent=(
                    self.scoped_start_state.sampled_seconds()
                    + self.scoped_process_state.sampled_seconds()
                    + self.scoped_finish_state.sampled_seconds()),
                # Multi-output operations should override this.
                output_element_counts=(
                    # If there is exactly one output, we can unambiguously
                    # fix its name later, which we do.
                    # TODO(robertwb): Plumb the actual name here.
                    {'ONLY_OUTPUT': self.receivers[0].opcounter
                                    .element_counter.value()}
                    if len(self.receivers) == 1
                    else None))),
        user=self.metrics_container.to_runner_api())

  def __str__(self):
    """Generates a useful string for this object.

    Compactly displays interesting fields.  In particular, pickled
    fields are not displayed.  Note that we collapse the fields of the
    contained Worker* object into this object, since there is a 1-1
    mapping between Operation and operation_specs.Worker*.

    Returns:
      Compact string representing this object.
    """
    return self.str_internal()

  def str_internal(self, is_recursive=False):
    """Internal helper for __str__ that supports recursion.

    When recursing on receivers, keep the output short.
    Args:
      is_recursive: whether to omit some details, particularly receivers.
    Returns:
      Compact string representing this object.
    """
    printable_name = self.__class__.__name__
    if hasattr(self, 'step_name'):
      printable_name += ' %s' % self.step_name
      if is_recursive:
        # If we have a step name, stop here, no more detail needed.
        return '<%s>' % printable_name

    if self.spec is None:
      printable_fields = []
    else:
      printable_fields = operation_specs.worker_printable_fields(self.spec)

    if not is_recursive and getattr(self, 'receivers', []):
      printable_fields.append('receivers=[%s]' % ', '.join([
          str(receiver) for receiver in self.receivers]))

    return '<%s %s>' % (printable_name, ', '.join(printable_fields))
Exemple #2
0
class Operation(object):
    """An operation representing the live version of a work item specification.

  An operation can have one or more outputs and for each output it can have
  one or more receiver operations that will take that as input.
  """
    def __init__(
            self,
            name_context,  # type: Union[str, common.NameContext]
            spec,
            counter_factory,
            state_sampler  # type: StateSampler
    ):
        """Initializes a worker operation instance.

    Args:
      name_context: A NameContext instance or string(deprecated), with the
        name information for this operation.
      spec: A operation_specs.Worker* instance.
      counter_factory: The CounterFactory to use for our counters.
      state_sampler: The StateSampler for the current operation.
    """
        if isinstance(name_context, common.NameContext):
            # TODO(BEAM-4028): Clean this up once it's completely migrated.
            # We use the specific operation name that is used for metrics and state
            # sampling.
            self.name_context = name_context
        else:
            self.name_context = common.NameContext(name_context)

        self.spec = spec
        self.counter_factory = counter_factory
        self.execution_context = None  # type: Optional[ExecutionContext]
        self.consumers = collections.defaultdict(
            list)  # type: DefaultDict[int, List[Operation]]

        # These are overwritten in the legacy harness.
        self.metrics_container = MetricsContainer(
            self.name_context.metrics_name())

        self.state_sampler = state_sampler
        self.scoped_start_state = self.state_sampler.scoped_state(
            self.name_context,
            'start',
            metrics_container=self.metrics_container)
        self.scoped_process_state = self.state_sampler.scoped_state(
            self.name_context,
            'process',
            metrics_container=self.metrics_container)
        self.scoped_finish_state = self.state_sampler.scoped_state(
            self.name_context,
            'finish',
            metrics_container=self.metrics_container)
        # TODO(ccy): the '-abort' state can be added when the abort is supported in
        # Operations.
        self.receivers = []  # type: List[ConsumerSet]
        # Legacy workers cannot call setup() until after setting additional state
        # on the operation.
        self.setup_done = False
        self.step_name = None  # type: Optional[str]

    def setup(self):
        # type: () -> None
        """Set up operation.

    This must be called before any other methods of the operation."""
        with self.scoped_start_state:
            self.debug_logging_enabled = logging.getLogger().isEnabledFor(
                logging.DEBUG)
            # Everything except WorkerSideInputSource, which is not a
            # top-level operation, should have output_coders
            #TODO(pabloem): Define better what step name is used here.
            if getattr(self.spec, 'output_coders', None):
                self.receivers = [
                    ConsumerSet.create(self.counter_factory,
                                       self.name_context.logging_name(), i,
                                       self.consumers[i], coder)
                    for i, coder in enumerate(self.spec.output_coders)
                ]
        self.setup_done = True

    def start(self):
        # type: () -> None
        """Start operation."""
        if not self.setup_done:
            # For legacy workers.
            self.setup()

    def process(self, o):
        # type: (WindowedValue) -> None
        """Process element in operation."""
        pass

    def finalize_bundle(self):
        # type: () -> None
        pass

    def needs_finalization(self):
        return False

    def try_split(self, fraction_of_remainder):
        # type: (...) -> Optional[Any]
        return None

    def current_element_progress(self):
        return None

    def finish(self):
        # type: () -> None
        """Finish operation."""
        pass

    def teardown(self):
        # type: () -> None
        """Tear down operation.

    No other methods of this operation should be called after this."""
        pass

    def reset(self):
        # type: () -> None
        self.metrics_container.reset()

    def output(self, windowed_value, output_index=0):
        # type: (WindowedValue, int) -> None
        cython.cast(Receiver,
                    self.receivers[output_index]).receive(windowed_value)

    def add_receiver(self, operation, output_index=0):
        # type: (Operation, int) -> None
        """Adds a receiver operation for the specified output."""
        self.consumers[output_index].append(operation)

    def progress_metrics(self):
        # type: () -> beam_fn_api_pb2.Metrics.PTransform
        return beam_fn_api_pb2.Metrics.PTransform(
            processed_elements=beam_fn_api_pb2.Metrics.PTransform.
            ProcessedElements(
                measured=beam_fn_api_pb2.Metrics.PTransform.Measured(
                    total_time_spent=(
                        self.scoped_start_state.sampled_seconds() +
                        self.scoped_process_state.sampled_seconds() +
                        self.scoped_finish_state.sampled_seconds()),
                    # Multi-output operations should override this.
                    output_element_counts=(
                        # If there is exactly one output, we can unambiguously
                        # fix its name later, which we do.
                        # TODO(robertwb): Plumb the actual name here.
                        {
                            'ONLY_OUTPUT':
                            self.receivers[0].opcounter.element_counter.value(
                            )
                        } if len(self.receivers) == 1 else None))),
            user=self.metrics_container.to_runner_api())

    def monitoring_infos(self, transform_id):
        # type: (str) -> Dict[FrozenSet, metrics_pb2.MonitoringInfo]
        """Returns the list of MonitoringInfos collected by this operation."""
        all_monitoring_infos = self.execution_time_monitoring_infos(
            transform_id)
        all_monitoring_infos.update(
            self.pcollection_count_monitoring_infos(transform_id))
        all_monitoring_infos.update(self.user_monitoring_infos(transform_id))
        return all_monitoring_infos

    def pcollection_count_monitoring_infos(self, transform_id):
        """Returns the element count MonitoringInfo collected by this operation."""
        if len(self.receivers) == 1:
            # If there is exactly one output, we can unambiguously
            # fix its name later, which we do.
            # TODO(robertwb): Plumb the actual name here.
            elem_count_mi = monitoring_infos.int64_counter(
                monitoring_infos.ELEMENT_COUNT_URN,
                self.receivers[0].opcounter.element_counter.value(),
                ptransform=transform_id,
                tag='ONLY_OUTPUT' if len(self.receivers) == 1 else str(None),
            )

            (unused_mean, sum, count, min,
             max) = (self.receivers[0].opcounter.mean_byte_counter.value())
            metric = metrics_pb2.Metric(
                distribution_data=metrics_pb2.DistributionData(
                    int_distribution_data=metrics_pb2.IntDistributionData(
                        count=count, sum=sum, min=min, max=max)))
            sampled_byte_count = monitoring_infos.int64_distribution(
                monitoring_infos.SAMPLED_BYTE_SIZE_URN,
                metric,
                ptransform=transform_id,
                tag='ONLY_OUTPUT' if len(self.receivers) == 1 else str(None),
            )
            return {
                monitoring_infos.to_key(elem_count_mi): elem_count_mi,
                monitoring_infos.to_key(sampled_byte_count): sampled_byte_count
            }
        return {}

    def user_monitoring_infos(self, transform_id):
        """Returns the user MonitoringInfos collected by this operation."""
        return self.metrics_container.to_runner_api_monitoring_infos(
            transform_id)

    def execution_time_monitoring_infos(self, transform_id):
        # type: (str) -> Dict[FrozenSet, metrics_pb2.MonitoringInfo]
        total_time_spent_msecs = (
            self.scoped_start_state.sampled_msecs_int() +
            self.scoped_process_state.sampled_msecs_int() +
            self.scoped_finish_state.sampled_msecs_int())
        mis = [
            monitoring_infos.int64_counter(
                monitoring_infos.START_BUNDLE_MSECS_URN,
                self.scoped_start_state.sampled_msecs_int(),
                ptransform=transform_id),
            monitoring_infos.int64_counter(
                monitoring_infos.PROCESS_BUNDLE_MSECS_URN,
                self.scoped_process_state.sampled_msecs_int(),
                ptransform=transform_id),
            monitoring_infos.int64_counter(
                monitoring_infos.FINISH_BUNDLE_MSECS_URN,
                self.scoped_finish_state.sampled_msecs_int(),
                ptransform=transform_id),
            monitoring_infos.int64_counter(monitoring_infos.TOTAL_MSECS_URN,
                                           total_time_spent_msecs,
                                           ptransform=transform_id),
        ]
        return {monitoring_infos.to_key(mi): mi for mi in mis}

    def __str__(self):
        """Generates a useful string for this object.

    Compactly displays interesting fields.  In particular, pickled
    fields are not displayed.  Note that we collapse the fields of the
    contained Worker* object into this object, since there is a 1-1
    mapping between Operation and operation_specs.Worker*.

    Returns:
      Compact string representing this object.
    """
        return self.str_internal()

    def str_internal(self, is_recursive=False):
        """Internal helper for __str__ that supports recursion.

    When recursing on receivers, keep the output short.
    Args:
      is_recursive: whether to omit some details, particularly receivers.
    Returns:
      Compact string representing this object.
    """
        printable_name = self.__class__.__name__
        if hasattr(self, 'step_name'):
            printable_name += ' %s' % self.name_context.logging_name()
            if is_recursive:
                # If we have a step name, stop here, no more detail needed.
                return '<%s>' % printable_name

        if self.spec is None:
            printable_fields = []
        else:
            printable_fields = operation_specs.worker_printable_fields(
                self.spec)

        if not is_recursive and getattr(self, 'receivers', []):
            printable_fields.append(
                'receivers=[%s]' %
                ', '.join([str(receiver) for receiver in self.receivers]))

        return '<%s %s>' % (printable_name, ', '.join(printable_fields))
Exemple #3
0
class Operation(object):
    """An operation representing the live version of a work item specification.

  An operation can have one or more outputs and for each output it can have
  one or more receiver operations that will take that as input.
  """
    def __init__(self, name_context, spec, counter_factory, state_sampler):
        """Initializes a worker operation instance.

    Args:
      name_context: A NameContext instance or string(deprecated), with the
        name information for this operation.
      spec: A operation_specs.Worker* instance.
      counter_factory: The CounterFactory to use for our counters.
      state_sampler: The StateSampler for the current operation.
    """
        if isinstance(name_context, common.NameContext):
            # TODO(BEAM-4028): Clean this up once it's completely migrated.
            # We use the specific operation name that is used for metrics and state
            # sampling.
            self.name_context = name_context
        else:
            self.name_context = common.NameContext(name_context)

        self.spec = spec
        self.counter_factory = counter_factory
        self.consumers = collections.defaultdict(list)

        # These are overwritten in the legacy harness.
        self.metrics_container = MetricsContainer(
            self.name_context.metrics_name())

        self.state_sampler = state_sampler
        self.scoped_start_state = self.state_sampler.scoped_state(
            self.name_context,
            'start',
            metrics_container=self.metrics_container)
        self.scoped_process_state = self.state_sampler.scoped_state(
            self.name_context,
            'process',
            metrics_container=self.metrics_container)
        self.scoped_finish_state = self.state_sampler.scoped_state(
            self.name_context,
            'finish',
            metrics_container=self.metrics_container)
        # TODO(ccy): the '-abort' state can be added when the abort is supported in
        # Operations.
        self.receivers = []

    def start(self):
        """Start operation."""
        self.debug_logging_enabled = logging.getLogger().isEnabledFor(
            logging.DEBUG)
        # Everything except WorkerSideInputSource, which is not a
        # top-level operation, should have output_coders
        #TODO(pabloem): Define better what step name is used here.
        if getattr(self.spec, 'output_coders', None):
            self.receivers = [
                ConsumerSet(self.counter_factory,
                            self.name_context.logging_name(), i,
                            self.consumers[i], coder)
                for i, coder in enumerate(self.spec.output_coders)
            ]

    def process(self, o):
        """Process element in operation."""
        pass

    def finish(self):
        """Finish operation."""
        pass

    def output(self, windowed_value, output_index=0):
        cython.cast(Receiver,
                    self.receivers[output_index]).receive(windowed_value)

    def add_receiver(self, operation, output_index=0):
        """Adds a receiver operation for the specified output."""
        self.consumers[output_index].append(operation)

    def progress_metrics(self):
        return beam_fn_api_pb2.Metrics.PTransform(
            processed_elements=beam_fn_api_pb2.Metrics.PTransform.
            ProcessedElements(
                measured=beam_fn_api_pb2.Metrics.PTransform.Measured(
                    total_time_spent=(
                        self.scoped_start_state.sampled_seconds() +
                        self.scoped_process_state.sampled_seconds() +
                        self.scoped_finish_state.sampled_seconds()),
                    # Multi-output operations should override this.
                    output_element_counts=(
                        # If there is exactly one output, we can unambiguously
                        # fix its name later, which we do.
                        # TODO(robertwb): Plumb the actual name here.
                        {
                            'ONLY_OUTPUT':
                            self.receivers[0].opcounter.element_counter.value(
                            )
                        } if len(self.receivers) == 1 else None))),
            user=self.metrics_container.to_runner_api())

    def __str__(self):
        """Generates a useful string for this object.

    Compactly displays interesting fields.  In particular, pickled
    fields are not displayed.  Note that we collapse the fields of the
    contained Worker* object into this object, since there is a 1-1
    mapping between Operation and operation_specs.Worker*.

    Returns:
      Compact string representing this object.
    """
        return self.str_internal()

    def str_internal(self, is_recursive=False):
        """Internal helper for __str__ that supports recursion.

    When recursing on receivers, keep the output short.
    Args:
      is_recursive: whether to omit some details, particularly receivers.
    Returns:
      Compact string representing this object.
    """
        printable_name = self.__class__.__name__
        if hasattr(self, 'step_name'):
            printable_name += ' %s' % self.name_context.logging_name()
            if is_recursive:
                # If we have a step name, stop here, no more detail needed.
                return '<%s>' % printable_name

        if self.spec is None:
            printable_fields = []
        else:
            printable_fields = operation_specs.worker_printable_fields(
                self.spec)

        if not is_recursive and getattr(self, 'receivers', []):
            printable_fields.append(
                'receivers=[%s]' %
                ', '.join([str(receiver) for receiver in self.receivers]))

        return '<%s %s>' % (printable_name, ', '.join(printable_fields))
Exemple #4
0
class Operation(object):
  """An operation representing the live version of a work item specification.

  An operation can have one or more outputs and for each output it can have
  one or more receiver operations that will take that as input.
  """

  def __init__(self, name_context, spec, counter_factory, state_sampler):
    """Initializes a worker operation instance.

    Args:
      name_context: A NameContext instance or string(deprecated), with the
        name information for this operation.
      spec: A operation_specs.Worker* instance.
      counter_factory: The CounterFactory to use for our counters.
      state_sampler: The StateSampler for the current operation.
    """
    if isinstance(name_context, common.NameContext):
      # TODO(BEAM-4028): Clean this up once it's completely migrated.
      # We use the specific operation name that is used for metrics and state
      # sampling.
      self.name_context = name_context
    else:
      self.name_context = common.NameContext(name_context)

    self.spec = spec
    self.counter_factory = counter_factory
    self.execution_context = None
    self.consumers = collections.defaultdict(list)

    # These are overwritten in the legacy harness.
    self.metrics_container = MetricsContainer(self.name_context.metrics_name())

    self.state_sampler = state_sampler
    self.scoped_start_state = self.state_sampler.scoped_state(
        self.name_context, 'start', metrics_container=self.metrics_container)
    self.scoped_process_state = self.state_sampler.scoped_state(
        self.name_context, 'process', metrics_container=self.metrics_container)
    self.scoped_finish_state = self.state_sampler.scoped_state(
        self.name_context, 'finish', metrics_container=self.metrics_container)
    # TODO(ccy): the '-abort' state can be added when the abort is supported in
    # Operations.
    self.receivers = []
    # Legacy workers cannot call setup() until after setting additional state
    # on the operation.
    self.setup_done = False

  def setup(self):
    with self.scoped_start_state:
      self.debug_logging_enabled = logging.getLogger().isEnabledFor(
          logging.DEBUG)
      # Everything except WorkerSideInputSource, which is not a
      # top-level operation, should have output_coders
      #TODO(pabloem): Define better what step name is used here.
      if getattr(self.spec, 'output_coders', None):
        self.receivers = [
            ConsumerSet.create(
                self.counter_factory,
                self.name_context.logging_name(),
                i,
                self.consumers[i], coder)
            for i, coder in enumerate(self.spec.output_coders)]
    self.setup_done = True

  def start(self):
    """Start operation."""
    if not self.setup_done:
      # For legacy workers.
      self.setup()

  def process(self, o):
    """Process element in operation."""
    pass

  def try_split(self, fraction_of_remainder):
    return None

  def finish(self):
    """Finish operation."""
    pass

  def reset(self):
    self.metrics_container.reset()

  def output(self, windowed_value, output_index=0):
    cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)

  def add_receiver(self, operation, output_index=0):
    """Adds a receiver operation for the specified output."""
    self.consumers[output_index].append(operation)

  def progress_metrics(self):
    return beam_fn_api_pb2.Metrics.PTransform(
        processed_elements=beam_fn_api_pb2.Metrics.PTransform.ProcessedElements(
            measured=beam_fn_api_pb2.Metrics.PTransform.Measured(
                total_time_spent=(
                    self.scoped_start_state.sampled_seconds()
                    + self.scoped_process_state.sampled_seconds()
                    + self.scoped_finish_state.sampled_seconds()),
                # Multi-output operations should override this.
                output_element_counts=(
                    # If there is exactly one output, we can unambiguously
                    # fix its name later, which we do.
                    # TODO(robertwb): Plumb the actual name here.
                    {'ONLY_OUTPUT': self.receivers[0].opcounter
                                    .element_counter.value()}
                    if len(self.receivers) == 1
                    else None))),
        user=self.metrics_container.to_runner_api())

  def monitoring_infos(self, transform_id):
    """Returns the list of MonitoringInfos collected by this operation."""
    all_monitoring_infos = self.execution_time_monitoring_infos(transform_id)
    all_monitoring_infos.update(
        self.element_count_monitoring_infos(transform_id))
    all_monitoring_infos.update(self.user_monitoring_infos(transform_id))
    return all_monitoring_infos

  def element_count_monitoring_infos(self, transform_id):
    """Returns the element count MonitoringInfo collected by this operation."""
    if len(self.receivers) == 1:
      # If there is exactly one output, we can unambiguously
      # fix its name later, which we do.
      # TODO(robertwb): Plumb the actual name here.
      mi = monitoring_infos.int64_counter(
          monitoring_infos.ELEMENT_COUNT_URN,
          self.receivers[0].opcounter.element_counter.value(),
          ptransform=transform_id,
          tag='ONLY_OUTPUT' if len(self.receivers) == 1 else str(None),
      )
      return {monitoring_infos.to_key(mi) : mi}
    return {}

  def user_monitoring_infos(self, transform_id):
    """Returns the user MonitoringInfos collected by this operation."""
    return self.metrics_container.to_runner_api_monitoring_infos(transform_id)

  def execution_time_monitoring_infos(self, transform_id):
    total_time_spent_msecs = (
        self.scoped_start_state.sampled_msecs_int()
        + self.scoped_process_state.sampled_msecs_int()
        + self.scoped_finish_state.sampled_msecs_int())
    mis = [
        monitoring_infos.int64_counter(
            monitoring_infos.START_BUNDLE_MSECS_URN,
            self.scoped_start_state.sampled_msecs_int(),
            ptransform=transform_id
        ),
        monitoring_infos.int64_counter(
            monitoring_infos.PROCESS_BUNDLE_MSECS_URN,
            self.scoped_process_state.sampled_msecs_int(),
            ptransform=transform_id
        ),
        monitoring_infos.int64_counter(
            monitoring_infos.FINISH_BUNDLE_MSECS_URN,
            self.scoped_finish_state.sampled_msecs_int(),
            ptransform=transform_id
        ),
        monitoring_infos.int64_counter(
            monitoring_infos.TOTAL_MSECS_URN,
            total_time_spent_msecs,
            ptransform=transform_id
        ),
    ]
    return {monitoring_infos.to_key(mi) : mi for mi in mis}

  def __str__(self):
    """Generates a useful string for this object.

    Compactly displays interesting fields.  In particular, pickled
    fields are not displayed.  Note that we collapse the fields of the
    contained Worker* object into this object, since there is a 1-1
    mapping between Operation and operation_specs.Worker*.

    Returns:
      Compact string representing this object.
    """
    return self.str_internal()

  def str_internal(self, is_recursive=False):
    """Internal helper for __str__ that supports recursion.

    When recursing on receivers, keep the output short.
    Args:
      is_recursive: whether to omit some details, particularly receivers.
    Returns:
      Compact string representing this object.
    """
    printable_name = self.__class__.__name__
    if hasattr(self, 'step_name'):
      printable_name += ' %s' % self.name_context.logging_name()
      if is_recursive:
        # If we have a step name, stop here, no more detail needed.
        return '<%s>' % printable_name

    if self.spec is None:
      printable_fields = []
    else:
      printable_fields = operation_specs.worker_printable_fields(self.spec)

    if not is_recursive and getattr(self, 'receivers', []):
      printable_fields.append('receivers=[%s]' % ', '.join([
          str(receiver) for receiver in self.receivers]))

    return '<%s %s>' % (printable_name, ', '.join(printable_fields))