def subscribe_to(self, url): uri = api_method('subscription/quickadd') req_type = 'application/x-www-form-urlencoded' data = urlencode({'T': self.auth.edit_token, 'quickadd': url}) msg = self.auth.message('POST', uri) msg.set_request(req_type, Soup.MemoryUse.COPY, data, len(data)) session.queue_message(msg, self.on_quickadd, None)
def on_sync_ids(self, job, success): if not success: logger.error('Could not get data from SQLite') return if len(job.result) == 0: logger.debug('Items doesn\'t need synchronization') GLib.idle_add(self.emit, 'sync-done') return # Somewhy when streaming items and asking more than 512 returns 400. # Asking anything in between 250 and 512 returns exactly 250 items. chunks = split_chunks((('i', i) for i, in job.result), 250, ('', '')) # Asynchronous job queue for items parsing # We will, unless we have a bug, have only one synchronization at time # thus it is safe to initialize it like that. executor = JobExecutor() executor.start() uri = api_method('stream/items/contents') req_type = 'application/x-www-form-urlencoded' for chunk in chunks: self.sync_status += 1 data = urlencode(chunk) message = self.auth.message('POST', uri) message.set_request(req_type, Soup.MemoryUse.COPY, data, len(data)) session.queue_message(message, self.on_response, executor) self.connect('notify::sync-status', self.on_sync_status, executor)
def on_data(self, job, success, data): flag, st = data self.sync_status -= 1 if not success: logger.error('Could not get data from SQLite correctly') return if not len(job.result) == 0: uri = api_method('edit-tag') req_type = 'application/x-www-form-urlencoded' post = ( ( 'r' if st else 'a', flag, ), ('T', self.auth.edit_token), ) chunks = split_chunks(job.result, 250, None) for chunk in chunks: iids, ids = zip(*filter(lambda x: x is not None, chunk)) iids = tuple(zip(itertools.repeat('i'), iids)) payload = urlencode(iids + post) msg = self.auth.message('POST', uri) msg.set_request(req_type, Soup.MemoryUse.COPY, payload, len(payload)) session.queue_message(msg, self.on_response, ids) self.sync_status += 1 if self.sync_status == 0: # In case we didn't have any flags to synchronize logger.debug('There were no flags to synchronize') GLib.idle_add(self.emit, 'sync-done')
def on_credentials(self): if not self.keyring.has_credentials: self.status.update({'ABORTED': True, 'PROGRESS': False}) self.notify('status') return uri = 'https://www.google.com/accounts/ClientLogin' data = 'service=reader&accountType=GOOGLE&Email={0}&Passwd={1}' data = data.format(self.keyring.username, self.keyring.password) message = Message('POST', uri) req_type = 'application/x-www-form-urlencoded' message.set_request(req_type, Soup.MemoryUse.COPY, data, len(data)) session.queue_message(message, self.on_login, None)
def on_login(self, session, message, data): status = message.status_code if not 200 <= status < 400: logger.error('Authentication failed (HTTP {0})'.format(status)) self.status.update({'OK': False, 'PROGRESS': False, 'BAD_CREDENTIALS': status == 403}) self.notify('status') else: # Login was likely successful for line in message.response_body.data.splitlines(): if line.startswith('Auth'): self.login_token = line[5:] message = self.message('GET', api_method('token')) session.queue_message(message, self.on_token, None) break
def on_site_uris(self, job, success): uri = 'https://getfavicon.appspot.com/{0}?defaulticon=none' for site_uri, in job.result: if site_uri is None or not site_uri.startswith('http'): continue elif self.has_icon(site_uri) and not random.randint(0, 200) == 0: # Resync only 0.5% of icons. It's unlikely that icon changes # or becomes available continue msg = Message('GET', uri.format(quote(site_uri))) session.queue_message(msg, self.on_response, site_uri) self.sync_status += 1 if self.sync_status == 0: logger.debug('Favicons synchronization completed') GLib.idle_add(self.emit, 'sync-done')
def sync(self): if self.sync_status.get('synchronizing', False): logger.error('IDs are already being synchronized') return False self.sync_status['synchronizing'] = True item_limit = settings.settings['cache-items'] for name, state in self.states.items(): getargs = state + [('n', item_limit)] url = api_method('stream/items/ids', getargs) msg = self.auth.message('GET', url) session.queue_message(msg, self.on_response, name) # Initially mark everything as deletable and unflag all items. # Laten in process items that are still important will be unmarked # and reflagged again. query = 'UPDATE items SET to_delete=1, unread=0, starred=0, to_sync=0' sqlite.execute(query)
def set_item_label(self, vals, label_id, value): if vals[0] != SubscriptionType.SUBSCRIPTION: logger.error('Adding label to non-subscription!') return False uri = api_method('subscription/edit') req_type = 'application/x-www-form-urlencoded' label_id = 'user/-/{0}'.format(label_id) action = 'a' if value else 'r' item_id = split_id(vals[1])[1] data = urlencode({ 'T': self.auth.edit_token, 's': item_id, 'ac': 'edit', action: label_id }) msg = self.auth.message('POST', uri) msg.set_request(req_type, Soup.MemoryUse.COPY, data, len(data)) session.queue_message(msg, self.on_sub_edit, None)
def sync(self): url = api_method('subscription/list') msg = self.auth.message('GET', url) session.queue_message(msg, self.on_response, None)