Exemplo n.º 1
0
    def test_batch_error(self):
        fobj = self.find_group(self.gid)

        con0 = GCContact(fobj)
        con0.set_firstname('Namo Narayananaya')
        gce0 = con0.get_gce()

        con = GCContact(fobj)
        con.set_firstname('Ayeshwarya')
        con.set_birthday('abcd"ef')
        # con.set_anniv('1978-05-31 %s est n il y a %d ans')
        # con.set_birthday('1980-08-10')
        gce = con.get_gce()

        feed = self.pimdb.new_feed()
        feed.add_insert(entry=gce0, batch_id_string="DeadBeef")
        feed.add_insert(entry=gce0, batch_id_string="DeadBeef")
        feed.add_insert(entry=gce, batch_id_string="DeadBeef")

        b = BatchState(1, feed, op='insert', sync_tag="asynk:testgcex:ex")

        print 'Request: ', utils.pretty_xml(str(feed))
        rr = self.pimdb.exec_batch(feed)
        print 'Response: ', utils.pretty_xml(str(rr))

        for entry in rr.entry:
            print entry.batch_status
            if entry.batch_status:
                print 'Code: ', entry.batch_status.code
                print 'Reason: ', entry.batch_status.reason
            else:
                self.handle_interrupted_feed(feed, str(rr))
Exemplo n.º 2
0
    def test_batch_error (self):
        fobj = self.find_group(self.gid)

        con0 = GCContact(fobj)
        con0.set_firstname('Namo Narayananaya')
        gce0 = con0.get_gce()

        con = GCContact(fobj)
        con.set_firstname('Ayeshwarya')
        con.set_birthday('abcd"ef')
        # con.set_anniv('1978-05-31 %s est n il y a %d ans')
        # con.set_birthday('1980-08-10')
        gce = con.get_gce()

        feed = self.pimdb.new_feed()
        feed.add_insert(entry=gce0, batch_id_string="DeadBeef")
        feed.add_insert(entry=gce0, batch_id_string="DeadBeef")
        feed.add_insert(entry=gce, batch_id_string="DeadBeef")

        b = BatchState(1, feed, op='insert', sync_tag="asynk:testgcex:ex")

        print 'Request: ', utils.pretty_xml(str(feed))
        rr = self.pimdb.exec_batch(feed)
        print 'Response: ', utils.pretty_xml(str(rr))

        for entry in rr.entry:
            print entry.batch_status
            if entry.batch_status:
                print 'Code: ',entry.batch_status.code
                print 'Reason: ', entry.batch_status.reason
            else:
                self.handle_interrupted_feed(feed, str(rr))
Exemplo n.º 3
0
    def batch_create(self, src_sl, src_dbid, items):
        """See the documentation in folder.Folder"""

        my_dbid = self.get_dbid()
        c = self.get_config()
        pname = src_sl.get_pname()

        src_sync_tag = c.make_sync_label(src_sl.get_pname(), src_dbid)
        dst_sync_tag = c.make_sync_label(src_sl.get_pname(), my_dbid)

        f = self.get_db().new_feed()
        stats = BatchState(1, f, 'insert', sync_tag=dst_sync_tag)

        success = True
        for item in items:
            con_itemid = item.get_itemid_from_synctags(pname, 'gc')
            gc = GCContact(self, con=item, con_itemid=con_itemid)
            bid = item.get_itemid()
            gc.update_sync_tags(src_sync_tag, bid)

            gce = gc.get_gce()

            stats.add_con(bid, new=gc, orig=item)
            f.add_insert(entry=gce, batch_id_string=bid)
            stats.incr_cnt()

            if stats.get_cnt() % self.get_batch_size() == 0:
                # Feeds have to be less than 1MB. We can push this some
                # more. FIXME.
                logging.debug(
                    'Uploading new batch # %02d to Google. ' +
                    'Count: %3d. Size: %6.2fK', stats.get_bnum(),
                    stats.get_cnt(), stats.get_size())
                rf = self.get_db().exec_batch(f)
                succ, cons = stats.process_batch_response(rf)
                success = success and succ

                f = self.get_db().new_feed()
                stats = BatchState(stats.get_bnum() + 1,
                                   f,
                                   'insert',
                                   sync_tag=dst_sync_tag)

        # Upload any leftovers
        if stats.get_cnt() > 0:
            logging.debug('New Batch # %02d. Count: %3d. Size: %5.2fK',
                          stats.get_bnum(), stats.get_cnt(), stats.get_size())
            rf = self.get_db().exec_batch(f)
            succ, cons = stats.process_batch_response(rf)
            success = success and succ

        return success
Exemplo n.º 4
0
    def batch_create (self, src_sl, src_dbid, items):
        """See the documentation in folder.Folder"""

        my_dbid = self.get_dbid()
        c       = self.get_config()
        pname   = src_sl.get_pname()

        src_sync_tag = c.make_sync_label(src_sl.get_pname(), src_dbid)
        dst_sync_tag = c.make_sync_label(src_sl.get_pname(), my_dbid)

        f     = self.get_db().new_feed()
        stats = BatchState(1, f, 'insert', sync_tag=dst_sync_tag)

        success = True
        for item in items:
            con_itemid = item.get_itemid_from_synctags(pname, 'gc')
            gc  = GCContact(self, con=item, con_itemid=con_itemid)
            bid = item.get_itemid()
            gc.update_sync_tags(src_sync_tag, bid)

            gce = gc.get_gce()

            stats.add_con(bid, new=gc, orig=item)
            f.add_insert(entry=gce, batch_id_string=bid)
            stats.incr_cnt()
            
            if stats.get_cnt() % self.get_batch_size() == 0:
                # Feeds have to be less than 1MB. We can push this some
                # more. FIXME.
                logging.debug('Uploading new batch # %02d to Google. ' +
                              'Count: %3d. Size: %6.2fK',
                              stats.get_bnum(), stats.get_cnt(),
                              stats.get_size())
                rf = self.get_db().exec_batch(f)
                succ, cons = stats.process_batch_response(rf)
                success = success and succ

                f = self.get_db().new_feed()
                stats = BatchState(stats.get_bnum()+1, f, 'insert',
                                   sync_tag=dst_sync_tag)
           
        # Upload any leftovers
        if stats.get_cnt() > 0:
            logging.debug('New Batch # %02d. Count: %3d. Size: %5.2fK',
                          stats.get_bnum(), stats.get_cnt(),
                          stats.get_size())
            rf = self.get_db().exec_batch(f)
            succ, cons = stats.process_batch_response(rf)
            success = success and succ

        return success
Exemplo n.º 5
0
    def batch_update (self, sync_list, src_dbid, items):
        """See the documentation in folder.Folder"""

        # Updates and deletes on google require not just the entryid but also
        # its correct etag which is a version identifier. This is to ensure
        # two apps do not overwrite each other's work without even knowing
        # about it. So we need to approach this in two steps: (a) Fetch the
        # ContactEntries for all the items we are interested in. the returned
        # entry objects have all the required info, including the latest
        # etag. (b) Modify the same entry with the local updates and send it
        # back

        my_dbid = self.get_dbid()
        c       = self.get_config()
        pname   = sync_list.get_pname()

        src_sync_tag = c.make_sync_label(pname, src_dbid)
        dst_sync_tag = c.make_sync_label(pname, my_dbid)

        tags  = [item.get_sync_tags(dst_sync_tag)[0] for item in items]
        gcids = [val for (tag, val) in tags]

        logging.debug('Refreshing etags for modified entries...')

        success, ces   = self._fetch_gc_entries(gcids)
        etags = [copy.deepcopy(ce.etag) for ce in ces]
        f     = self.get_db().new_feed()
        stats = BatchState(1, f, 'update', sync_tag=dst_sync_tag)

        for item, etag in zip(items, etags):
            gc  = GCContact(self, con=item)
            bid = item.get_itemid()
            gc.update_sync_tags(src_sync_tag, bid)

            gce = gc.get_gce()
            gce.etag = etag

            stats.add_con(bid, new=gc, orig=item)
            f.add_update(entry=gce, batch_id_string=bid)
            stats.incr_cnt()
            
            if stats.get_cnt() % self.get_batch_size() == 0:
                # Feeds have to be less than 1MB. We can push this some
                # more. FIXME.
                logging.debug('Uploading mod batch # %02d to Google. ' +
                              'Count: %3d. Size: %6.2fK',
                              stats.get_bnum(), stats.get_cnt(),
                              stats.get_size())
                rf = self.get_db().exec_batch(f)
                succ, cons = stats.process_batch_response(rf)
                success = success and succ

                f = self.get_db().new_feed()
                stats = BatchState(stats.get_bnum()+1, f, 'update',
                                   sync_tag=dst_sync_tag)
           
        # Upload any leftovers
        if stats.get_cnt() > 0:
            logging.debug('Mod Batch # %02d. Count: %3d. Size: %5.2fK',
                          stats.get_bnum(), stats.get_cnt(),
                          stats.get_size())
            rf = self.get_db().exec_batch(f)
            succ, cons = stats.process_batch_response(rf)
            success = success and succ

        return success
Exemplo n.º 6
0
    def batch_update (self, sync_list, src_dbid, items):
        """See the documentation in folder.Folder"""

        # Updates and deletes on google require not just the entryid but also
        # its correct etag which is a version identifier. This is to ensure
        # two apps do not overwrite each other's work without even knowing
        # about it. So we need to approach this in two steps: (a) Fetch the
        # ContactEntries for all the items we are interested in. the returned
        # entry objects have all the required info, including the latest
        # etag. (b) Modify the same entry with the local updates and send it
        # back

        my_dbid = self.get_dbid()
        c       = self.get_config()
        pname   = sync_list.get_pname()

        src_sync_tag = c.make_sync_label(pname, src_dbid)
        dst_sync_tag = c.make_sync_label(pname, my_dbid)

        tags  = [item.get_sync_tags(dst_sync_tag)[0] for item in items]
        gcids = [val for (tag, val) in tags]

        logging.debug('Refreshing etags for modified entries...')

        success, ces   = self._fetch_gc_entries(gcids)
        etags = [copy.deepcopy(ce.etag) for ce in ces]
        f     = self.get_db().new_feed()
        stats = BatchState(1, f, 'update', sync_tag=dst_sync_tag)

        for item, etag in zip(items, etags):
            gc  = GCContact(self, con=item)
            bid = item.get_itemid()
            gc.update_sync_tags(src_sync_tag, bid)

            gce = gc.get_gce()
            gce.etag = etag

            stats.add_con(bid, new=gc, orig=item)
            f.add_update(entry=gce, batch_id_string=bid)
            stats.incr_cnt()
            
            if stats.get_cnt() % self.get_batch_size() == 0:
                # Feeds have to be less than 1MB. We can push this some
                # more. FIXME.
                logging.debug('Uploading mod batch # %02d to Google. ' +
                              'Count: %3d. Size: %6.2fK',
                              stats.get_bnum(), stats.get_cnt(),
                              stats.get_size())
                rf = self.get_db().exec_batch(f)
                succ, cons = stats.process_batch_response(rf)
                success = success and succ

                f = self.get_db().new_feed()
                stats = BatchState(stats.get_bnum()+1, f, 'update',
                                   sync_tag=dst_sync_tag)
           
        # Upload any leftovers
        if stats.get_cnt() > 0:
            logging.debug('Mod Batch # %02d. Count: %3d. Size: %5.2fK',
                          stats.get_bnum(), stats.get_cnt(),
                          stats.get_size())
            rf = self.get_db().exec_batch(f)
            succ, cons = stats.process_batch_response(rf)
            success = success and succ

        return success