Beispiel #1
0
    def stream_update(self, position, timeout=3600.0):
        try:
            req = binlogdata_pb2.StreamUpdateRequest(position=position)

            it = self.stub.StreamUpdate(req, timeout)
            for response in it:
                stream_event = response.stream_event
                fields = []
                rows = []
                if stream_event.primary_key_fields:
                    conversions = []
                    for field in stream_event.primary_key_fields:
                        fields.append(field.name)
                        conversions.append(
                            field_types_proto3.conversions.get(field.type))

                    for r in stream_event.primary_key_values:
                        row = tuple(_make_row(r, conversions))
                        rows.append(row)

                yield update_stream.StreamEvent(
                    category=int(stream_event.category),
                    table_name=stream_event.table_name,
                    fields=fields,
                    rows=rows,
                    sql=stream_event.sql,
                    timestamp=stream_event.timestamp,
                    transaction_id=stream_event.transaction_id)
        except face.AbortionError as e:
            # FIXME(alainjobart) These exceptions don't print well, so raise
            # one that will.  The real fix is to define a set of exceptions
            # for this library and raise that, but it's more work.
            raise dbexceptions.OperationalError(e.details, e)
Beispiel #2
0
    def stream_update(self, position, timeout=3600.0):
        """Note this implementation doesn't honor the timeout."""
        try:
            self.client.stream_call('UpdateStream.ServeUpdateStream',
                                    {'Position': position})
            while True:
                response = self.client.stream_next()
                if response is None:
                    break
                reply = response.reply

                str_category = reply['Category']
                if str_category == 'DML':
                    category = update_stream.StreamEvent.DML
                elif str_category == 'DDL':
                    category = update_stream.StreamEvent.DDL
                elif str_category == 'POS':
                    category = update_stream.StreamEvent.POS
                else:
                    category = update_stream.StreamEvent.ERR

                fields = []
                rows = []
                if reply['PrimaryKeyFields']:
                    conversions = []
                    for field in reply['PrimaryKeyFields']:
                        fields.append(field['Name'])
                        conversions.append(
                            field_types.conversions.get(field['Type']))

                    for pk_list in reply['PrimaryKeyValues']:
                        if not pk_list:
                            continue
                        row = tuple(_make_row(pk_list, conversions))
                        rows.append(row)

                yield update_stream.StreamEvent(
                    category=category,
                    table_name=reply['TableName'],
                    fields=fields,
                    rows=rows,
                    sql=reply['Sql'],
                    timestamp=reply['Timestamp'],
                    transaction_id=reply['TransactionID'])
        except gorpc.AppError as e:
            raise dbexceptions.DatabaseError(*e.args)
        except gorpc.GoRpcError as e:
            raise dbexceptions.OperationalError(*e.args)
        except:
            raise
Beispiel #3
0
  def stream_update(self, position, timeout=3600.0):
    req = binlogdata_pb2.StreamUpdateRequest(position=position)

    with self.stub as stub:
      it = stub.StreamUpdate(req, timeout)
      for response in it:
        stream_event = response.stream_event
        fields = []
        rows = []
        if stream_event.primary_key_fields:
          conversions = []
          for field in stream_event.primary_key_fields:
            fields.append(field.name)
            conversions.append(field_types.conversions.get(field.type))

          for r in stream_event.primary_key_values:
            row = tuple(_make_row(r.values, conversions))
            rows.append(row)

        try:
          yield update_stream.StreamEvent(
              category=int(stream_event.category),
              table_name=stream_event.table_name,
              fields=fields,
              rows=rows,
              sql=stream_event.sql,
              timestamp=stream_event.timestamp,
              transaction_id=stream_event.transaction_id)
        except GeneratorExit:
          # if the loop is interrupted for any reason, we need to
          # cancel the iterator, so we close the RPC connection,
          # and the with __exit__ statement is executed.

          # FIXME(alainjobart) this is flaky. It sometimes doesn't stop
          # the iterator, and we don't get out of the 'with'.
          # Sending a Ctrl-C to the process then works for some reason.
          it.cancel()
          break