Exemple #1
0
    def finish_batch(self, batch_info, dst_curs):
        """Called when batch finishes."""
        if self.dst_queue_name is None: return

        fields = ['type', 'data',
                  'extra1', 'extra2', 'extra3', 'extra4', 'time']
        pgq.bulk_insert_events(dst_curs, self.rows, fields, self.dst_queue_name)
Exemple #2
0
    def finish_batch(self, batch_info, dst_curs):
        """Called when batch finishes."""
        if self.dst_queue_name is None:
            return

        fields = ['type', 'data',
                  'extra1', 'extra2', 'extra3', 'extra4', 'time']
        pgq.bulk_insert_events(dst_curs, self.rows, fields, self.dst_queue_name)
Exemple #3
0
    def process_remote_batch(self, db, batch_id, ev_list, dst_db):

        # load data
        rows = []
        for ev in ev_list:
            data = [ev.type, ev.data, ev.extra1, ev.extra2, ev.extra3, ev.extra4, ev.time]
            rows.append(data)
        fields = ['type', 'data', 'extra1', 'extra2', 'extra3', 'extra4', 'time']

        # insert data
        curs = dst_db.cursor()
        pgq.bulk_insert_events(curs, rows, fields, self.dst_queue_name)
Exemple #4
0
    def process_remote_batch(self, db, batch_id, ev_list, dst_db):

        # load data
        rows = []
        for ev in ev_list:
            data = [ev.type, ev.data, ev.extra1, ev.extra2, ev.extra3, ev.extra4, ev.time]
            rows.append(data)
            ev.tag_done()
        fields = ["type", "data", "extra1", "extra2", "extra3", "extra4", "time"]

        # insert data
        curs = dst_db.cursor()
        pgq.bulk_insert_events(curs, rows, fields, self.dst_queue_name)
Exemple #5
0
    def process_remote_batch(self, db, batch_id, ev_list, dst_db):

        # load data
        rows = []
        for ev in ev_list:
            data = [
                ev.type, ev.data, ev.extra1, ev.extra2, ev.extra3, ev.extra4,
                ev.time
            ]
            rows.append(data)
        fields = [
            'type', 'data', 'extra1', 'extra2', 'extra3', 'extra4', 'time'
        ]

        # insert data
        curs = dst_db.cursor()
        pgq.bulk_insert_events(curs, rows, fields, self.dst_queue_name)
    def process_remote_batch(self, db, batch_id, ev_list, dst_db):
        cache = {}
        queue_field = self.cf.get('queue_field', 'extra1')
        for ev in ev_list:
            row = [ev.type, ev.data, ev.extra1, ev.extra2, ev.extra3, ev.extra4, ev.time]
            queue = ev.__getattr__(queue_field)
            if queue not in cache:
                cache[queue] = []
            cache[queue].append(row)

        # should match the composed row
        fields = ['type', 'data', 'extra1', 'extra2', 'extra3', 'extra4', 'time']

        # now send them to right queues
        curs = dst_db.cursor()
        for queue, rows in cache.items():
            pgq.bulk_insert_events(curs, rows, fields, queue)
    def process_remote_batch(self, db, batch_id, ev_list, dst_db):
        cache = {}
        queue_field = self.cf.get('queue_field', 'extra1')
        for ev in ev_list:
            row = [
                ev.type, ev.data, ev.extra1, ev.extra2, ev.extra3, ev.extra4,
                ev.time
            ]
            queue = ev.__getattr__(queue_field)
            if queue not in cache:
                cache[queue] = []
            cache[queue].append(row)

        # should match the composed row
        fields = [
            'type', 'data', 'extra1', 'extra2', 'extra3', 'extra4', 'time'
        ]

        # now send them to right queues
        curs = dst_db.cursor()
        for queue, rows in cache.items():
            pgq.bulk_insert_events(curs, rows, fields, queue)