コード例 #1
0
 def add_stop_event(
     self, conn: Optional[cb_bin_client.MemcachedClient]
 ) -> couchbaseConstants.PUMP_ERROR:
     sasl_user = str(
         self.source_bucket.get("name",
                                pump.get_username(self.opts.username)))
     event = {
         "timestamp":
         self.get_timestamp(),
         "real_userid": {
             "source": "internal",
             "user": pump.return_string(sasl_user)
         },
         "source_bucket":
         pump.return_string(self.source_bucket['name']),
         "source_node":
         pump.return_string(self.source_node['hostname']),
         "target_bucket":
         pump.return_string(self.sink_map['buckets'][0]['name'])
     }
     if conn:
         try:
             conn.audit(couchbaseConstants.AUDIT_EVENT_RESTORE_SINK_STOP,
                        json.dumps(event))
         except Exception as e:
             logging.warning(f'auditing error: {e}')
     return 0
コード例 #2
0
ファイル: pump_cb.py プロジェクト: simon-dew/couchbase-cli
    def check(opts, spec: str, source_map: Dict[str, Any]) -> Tuple[couchbaseConstants.PUMP_ERROR,
                                                                    Optional[Dict[str, Any]]]:
        rv, sink_map = pump.rest_couchbase(opts, spec,
                                           opts.username_dest is not None and opts.password_dest is not None)
        if rv != 0:
            return rv, None

        if sink_map is None:
            return rv, None

        rv, source_bucket_name = pump.find_source_bucket_name(opts, source_map)
        if rv != 0:
            return rv, None
        rv, sink_bucket_name = pump.find_sink_bucket_name(opts, source_bucket_name)
        if rv != 0:
            return rv, None

        # Adjust sink_map['buckets'] to have only our sink_bucket.
        sink_buckets = [bucket for bucket in sink_map['buckets']
                        if pump.return_string(bucket['name']) == pump.return_string(sink_bucket_name)]
        if not sink_buckets:
            return f'error: missing bucket-destination: {sink_bucket_name} at destination: {spec};' \
                f' perhaps your username/password is missing or incorrect', None
        if len(sink_buckets) != 1:
            return f'error: multiple buckets with name:{sink_bucket_name} at destination: {spec}', None
        sink_map['buckets'] = sink_buckets
        if opts.extra.get("allow_recovery_vb_remap", 0) == 1:
            error = CBSink.map_recovery_buckets(sink_map, sink_bucket_name, opts.vbucket_list)
            if error is not None:
                return error, None

        return 0, sink_map
コード例 #3
0
 def add_start_event(self, conn: Optional[cb_bin_client.MemcachedClient]) -> couchbaseConstants.PUMP_ERROR:
     sasl_user = str(self.source_bucket.get("name"))
     event = {"timestamp": self.get_timestamp(),
              "real_userid": {"source": "internal",
                              "user": pump.return_string(sasl_user),
                             },
              "mode": pump.return_string(getattr(self.opts, "mode", "diff")),
              "source_bucket": pump.return_string(self.source_bucket['name']),
              "source_node": pump.return_string(self.source_node['hostname'])
             }
     if conn:
         try:
             conn.audit(couchbaseConstants.AUDIT_EVENT_BACKUP_START, json.dumps(event))
         except Exception as e:
             logging.warning(f'auditing error: {e}')
     return 0
コード例 #4
0
    def consume_batch_async(
        self, batch: pump.Batch
    ) -> Tuple[couchbaseConstants.PUMP_ERROR, Optional[pump.SinkBatchFuture]]:
        if not self.writer:
            self.csvfile = sys.stdout
            if self.spec.startswith(CSVSink.CSV_JSON_SCHEME):
                if len(batch.msgs) <= 0:
                    future = pump.SinkBatchFuture(self, batch)
                    self.future_done(future, 0)
                    return 0, future

                cmd, vbucket_id, key, flg, exp, cas, meta, val_bytes = batch.msgs[
                    0][:8]
                doc = json.loads(val_bytes)
                self.fields = sorted(doc.keys())
                if 'id' not in self.fields:
                    self.fields = ['id'] + self.fields
                if self.spec.endswith(".csv"):
                    filename = self.get_csvfile(
                        self.spec[len(CSVSink.CSV_JSON_SCHEME):])
                    try:
                        self.csvfile = open(filename, "w", encoding='utf-8')
                    except IOError:
                        return f'error: could not write csv to file: {filename}', None
                self.writer = csv.writer(self.csvfile)
                self.writer.writerow(self.fields)
            else:
                if self.spec.endswith(".csv"):
                    filename = self.get_csvfile(
                        self.spec[len(CSVSink.CSV_SCHEME):])
                    try:
                        self.csvfile = open(filename, "w", encoding='utf-8')
                    except IOError:
                        return f'error: could not write csv to file: {filename}', None
                self.writer = csv.writer(self.csvfile)
                self.writer.writerow([
                    'id', 'flags', 'expiration', 'cas', 'value', 'rev', 'vbid',
                    'dtype'
                ])
        msg_tuple_format = 0
        for msg in batch.msgs:
            cmd, vbucket_id, key, flg, exp, cas, meta, val_bytes = msg[:8]
            if self.skip(key, vbucket_id):
                continue
            if not msg_tuple_format:
                msg_tuple_format = len(msg)
            seqno = dtype = nmeta = 0
            if msg_tuple_format > 8:
                seqno, dtype, nmeta, conf_res = msg[8:12]
            if dtype > 2:
                try:
                    val_bytes = snappy.uncompress(val_bytes)
                except Exception:
                    pass
            try:
                if cmd in [
                        couchbaseConstants.CMD_TAP_MUTATION,
                        couchbaseConstants.CMD_DCP_MUTATION
                ]:
                    if self.fields:
                        if val_bytes and len(val_bytes) > 0:
                            try:
                                row = []
                                doc = json.loads(val_bytes)
                                if type(doc) == dict:
                                    for field in self.fields:
                                        if field == 'id':
                                            row.append(pump.return_string(key))
                                        else:
                                            row.append(doc[field])
                                    self.writer.writerow(row)
                            except ValueError:
                                pass
                    else:
                        self.writer.writerow([
                            pump.return_string(key), flg, exp, cas, val_bytes,
                            meta, vbucket_id, dtype
                        ])
                elif cmd in [
                        couchbaseConstants.CMD_TAP_DELETE,
                        couchbaseConstants.CMD_DCP_DELETE
                ]:
                    pass
                elif cmd == couchbaseConstants.CMD_GET:
                    pass
                else:
                    return f'error: CSVSink - unknown cmd: {cmd!s}', None
            except IOError:
                return "error: could not write csv to stdout", None

        future = pump.SinkBatchFuture(self, batch)
        self.future_done(future, 0)
        return 0, future