def __init__(self, code): if code == 404: self.friendlyDescription = _("File not found") self.longDescription = _("Got 404 status code") else: HTTPError.__init__(self, _("Host returned bad status code: %(code)s", {"code": util.unicodify(code)})) self.code = code
def parse(url_file_stream_or_string): """Parse a feed. This method runs the feed data through feedparser.parse, then does some other things like unicodify it and fix issues with certain feed providers. """ parsed = feedparser.parse(url_file_stream_or_string) parsed = util.unicodify(parsed) _yahoo_hack(parsed['entries']) return parsed
def calc_filename(self, redirected_url): try: disposition = self.headers['content-disposition'] except KeyError: pass else: filename = self.find_value_from_header(disposition, 'filename') if filename is not None: return download_utils.clean_filename(filename) return download_utils.filename_from_url(util.unicodify(redirected_url), clean=True)
def test_unicodify(self): self.assertEqual(None, util.unicodify(None)) # Int self.assertEqualWithType(5, int, util.unicodify(5)) # String self.assertEqualWithType('abc', unicode, util.unicodify('abc')) # List res = util.unicodify(['abc', '123']) self.assertEqualWithType('abc', unicode, res[0]) self.assertEqualWithType('123', unicode, res[1]) # Dict res = util.unicodify({'a': 'abc', 'b': '123'}) self.assertEqualWithType('abc', unicode, res['a']) self.assertEqualWithType('123', unicode, res['b']) # List of dicts res = util.unicodify([{'a': 'abc', 'b': '$$$'}, {'y': u'25', 'z': '28'}]) self.assertEqualWithType('abc', unicode, res[0]['a']) self.assertEqualWithType('$$$', unicode, res[0]['b']) self.assertEqualWithType('25', unicode, res[1]['y']) self.assertEqualWithType('28', unicode, res[1]['z'])
def test_unicodify(self): self.assertEqual(None, util.unicodify(None)) # Int self.assertEqualWithType(5, int, util.unicodify(5)) # String self.assertEqualWithType('abc', unicode, util.unicodify('abc')) # List res = util.unicodify(['abc', '123']) self.assertEqualWithType('abc', unicode, res[0]) self.assertEqualWithType('123', unicode, res[1]) # Dict res = util.unicodify({'a': 'abc', 'b': '123'}) self.assertEqualWithType('abc', unicode, res['a']) self.assertEqualWithType('123', unicode, res['b']) # List of dicts res = util.unicodify([{ 'a': 'abc', 'b': '$$$' }, { 'y': u'25', 'z': '28' }]) self.assertEqualWithType('abc', unicode, res[0]['a']) self.assertEqualWithType('$$$', unicode, res[0]['b']) self.assertEqualWithType('25', unicode, res[1]['y']) self.assertEqualWithType('28', unicode, res[1]['z'])
def parse_url(url, split_path=False): url = fix_file_urls(url) (scheme, host, path, params, query, fragment) = \ util.unicodify(list(urlparse(url))) # Filter invalid URLs with duplicated ports # (http://foo.bar:123:123/baz) which seem to be part of #441. if host.count(':') > 1: host = host[0:host.rfind(':')] if scheme == '' and util.chatter: logging.warn("%r has no scheme", url) if ':' in host: host, port = host.split(':') try: port = int(port) except (SystemExit, KeyboardInterrupt): raise except: logging.warn("invalid port for %r", url) port = default_port(scheme) else: port = default_port(scheme) host = host.lower() scheme = scheme.lower() path = path.replace('|', ':') # Windows drive names are often specified as "C|\foo\bar" if path == '' or not path.startswith('/'): path = '/' + path elif scheme.startswith("file") and re.match(r'/[a-zA-Z]:', path): # fixes "file:///C:/foo" paths path = path[1:] full_path = path if split_path: return scheme, host, port, full_path, params, query else: if params: full_path += ';%s' % params if query: full_path += '?%s' % query return scheme, host, port, full_path
def parse_url(url, split_path=False): url = fix_file_urls(url) (scheme, host, path, params, query, fragment) = \ util.unicodify(list(urlparse(url))) # Filter invalid URLs with duplicated ports # (http://foo.bar:123:123/baz) which seem to be part of #441. if host.count(':') > 1: host = host[0:host.rfind(':')] if scheme == '' and util.chatter: logging.warn("%r has no scheme", url) if ':' in host: host, port = host.split(':') try: port = int(port) except ValueError: logging.warn("invalid port for %r", url) port = default_port(scheme) else: port = default_port(scheme) host = host.lower() scheme = scheme.lower() path = path.replace('|', ':') # Windows drive names are often specified as "C|\foo\bar" if path == '' or not path.startswith('/'): path = '/' + path elif scheme.startswith("file") and re.match(r'/[a-zA-Z]:', path): # fixes "file:///C:/foo" paths path = path[1:] full_path = path if split_path: return scheme, host, port, full_path, params, query if params: full_path += ';%s' % params if query: full_path += '?%s' % query return scheme, host, port, full_path
def update_icon_cache(self, url, info): self.dbItem.confirm_db_thread() if self.removed: iconCacheUpdater.update_finished() return needs_save = False needsChange = False if info == None or (info['status'] != 304 and info['status'] != 200): self.error_callback(url, "bad response") return try: # Our cache is good. Hooray! if info['status'] == 304: return needsChange = True # We have to update it, and if we can't write to the file, we # should pick a new filename. if self.filename and not fileutil.access(self.filename, os.R_OK | os.W_OK): self.filename = None cachedir = app.config.get(prefs.ICON_CACHE_DIRECTORY) try: fileutil.makedirs(cachedir) except OSError: pass try: # Write to a temp file. if self.filename: tmp_filename = self.filename + ".part" else: tmp_filename = os.path.join(cachedir, info["filename"]) + ".part" tmp_filename, output = next_free_filename(tmp_filename) output.write(info["body"]) output.close() except IOError: self.remove_file(tmp_filename) return if self.filename: self.remove_file(self.filename) # Create a new filename always to avoid browser caching in case a # file changes. # Add a random unique id parts = unicodify(info["filename"]).split('.') uid = u"%08d" % random.randint(0, 99999999) if len(parts) == 1: parts.append(uid) else: parts[-1:-1] = [uid] self.filename = u'.'.join(parts) self.filename = unicode_to_filename(self.filename, cachedir) self.filename = os.path.join(cachedir, self.filename) self.filename, fp = next_free_filename(self.filename) needs_save = True try: fileutil.rename(tmp_filename, self.filename) except OSError: self.filename = None needs_save = True fp.close() etag = unicodify(info.get("etag")) modified = unicodify(info.get("modified")) if self.etag != etag: needs_save = True self.etag = etag if self.modified != modified: needs_save = True self.modified = modified if self.url != url: needs_save = True self.url = url finally: if needsChange: self.icon_changed(needs_save=needs_save) self.updating = False if self.needsUpdate: self.needsUpdate = False self.request_update(True) iconCacheUpdater.update_finished()
def get_from_environ(key, default): if key in os.environ: return util.unicodify(os.environ[key]) return default
def update_status(cls, data, cmd_done=False): for field in data: if field not in ['filename', 'short_filename', 'metainfo']: data[field] = unicodify(data[field]) self = get_downloader_by_dlid(dlid=data['dlid']) # FIXME: how do we get all of the possible bit torrent # activity strings into gettext? --NN if data.has_key('activity') and data['activity']: data['activity'] = _(data['activity']) if self is not None: now = time.time() last_update = self.last_update state = self.get_state() new_state = data.get('state', u'downloading') # If this item was marked as pending update, then any update # which comes in now which does not have cmd_done set is void. if not cmd_done and self.status_updates_frozen: logging.debug('self = %s, ' 'saved state = %s ' 'downloader state = %s. ' 'Discard.', self, state, new_state) # treat as stale return False # If the state is one which we set and was meant to be passed # through to the downloader (valid_states), and the downloader # replied with something that was a response to a previous # download command, and state was also a part of valid_states, # but the saved state and the new state do not match # then it means the message is stale. # # Have a think about why this is true: when you set a state, # which is authoritative, to the downloader you expect it # to reply with that same state. If they do not match then it # means the message is stale. # # The exception to this rule is if the downloader replies with # an error state, or if downloading has transitioned to finished # state. # # This also does not apply to any state which we set on the # downloader via a restore command. A restore command before # a pause/resume/cancel will work as intended, and no special # trickery is required. A restore command which happens after # a pause/resume/cancel is void, so no work is required. # # I hope this makes sense and is clear! valid_states = (u'downloading', u'paused', u'stopped', u'uploading-paused', u'finished') if (cmd_done and state in valid_states and new_state in valid_states and state != new_state): if not (state == u'downloading' and new_state == u'finished'): logging.debug('self = %s STALE. ' 'Saved state %s, got state %s. Discarding.', self, state, new_state) return False # We are updating! Reset the status_updates_frozen flag. self.status_updates_frozen = False # We have something to update: update the last updated timestamp. self.last_update = now was_finished = self.is_finished() old_filename = self.get_filename() self.before_changing_rates() self.update_status_attributes(data) self.after_changing_rates() # Store the time the download finished finished = self.is_finished() and not was_finished name_changed = self.get_filename() != old_filename file_migrated = (self.is_finished() and name_changed) if ((self.get_state() == u'uploading' and not self.manualUpload and (app.config.get(prefs.LIMIT_UPLOAD_RATIO) and self.get_upload_ratio() > app.config.get(prefs.UPLOAD_RATIO)))): self.stop_upload() self.signal_change() self.update_item_list(finished, file_migrated) return True
def update_status(cls, data, cmd_done=False): for field in data: if field not in ['filename', 'short_filename', 'metainfo']: data[field] = unicodify(data[field]) self = get_downloader_by_dlid(dlid=data['dlid']) # FIXME: how do we get all of the possible bit torrent # activity strings into gettext? --NN if data.has_key('activity') and data['activity']: data['activity'] = _(data['activity']) if self is not None: now = time.time() last_update = self.last_update state = self.get_state() new_state = data.get('state', u'downloading') # If this item was marked as pending update, then any update # which comes in now which does not have cmd_done set is void. if not cmd_done and self.status_updates_frozen: logging.debug('self = %s, ' 'saved state = %s ' 'downloader state = %s. ' 'Discard.', self, state, new_state) # treat as stale return False # If the state is one which we set and was meant to be passed # through to the downloader (valid_states), and the downloader # replied with something that was a response to a previous # download command, and state was also a part of valid_states, # but the saved state and the new state do not match # then it means the message is stale. # # Have a think about why this is true: when you set a state, # which is authoritative, to the downloader you expect it # to reply with that same state. If they do not match then it # means the message is stale. # # The exception to this rule is if the downloader replies with # an error state, or if downloading has transitioned to finished # state. # # This also does not apply to any state which we set on the # downloader via a restore command. A restore command before # a pause/resume/cancel will work as intended, and no special # trickery is required. A restore command which happens after # a pause/resume/cancel is void, so no work is required. # # I hope this makes sense and is clear! valid_states = (u'downloading', u'paused', u'stopped', u'uploading-paused', u'finished') if (cmd_done and state in valid_states and new_state in valid_states and state != new_state): if not (state == u'downloading' and new_state == u'finished'): logging.debug('self = %s STALE. ' 'Saved state %s, got state %s. Discarding.', self, state, new_state) return False # We are updating! Reset the status_updates_frozen flag. self.status_updates_frozen = False # We have something to update: update the last updated timestamp. self.last_update = now was_finished = self.is_finished() old_filename = self.get_filename() self.before_changing_rates() self.update_status_attributes(data) self.after_changing_rates() # Store the time the download finished finished = self.is_finished() and not was_finished name_changed = self.get_filename() != old_filename file_migrated = (self.is_finished() and name_changed) if ((self.get_state() == u'uploading' and not self.manualUpload and (app.config.get(prefs.LIMIT_UPLOAD_RATIO) and self.get_upload_ratio() > app.config.get(prefs.UPLOAD_RATIO)))): self.stop_upload() self.signal_change() self.update_item_list(finished, file_migrated, old_filename) return True
def update_icon_cache(self, url, info): self.dbItem.confirm_db_thread() if self.removed: app.icon_cache_updater.update_finished() return needs_save = False needsChange = False if info == None or (info['status'] != 304 and info['status'] != 200): self.error_callback(url, "bad response") return try: # Our cache is good. Hooray! if info['status'] == 304: return needsChange = True # We have to update it, and if we can't write to the file, we # should pick a new filename. if ((self.filename and not fileutil.access(self.filename, os.R_OK | os.W_OK))): self.filename = None cachedir = app.config.get(prefs.ICON_CACHE_DIRECTORY) try: fileutil.makedirs(cachedir) except OSError: pass try: # Write to a temp file. if self.filename: tmp_filename = self.filename + ".part" else: tmp_filename = os.path.join(cachedir, info["filename"]) + ".part" tmp_filename, output = next_free_filename(tmp_filename) output.write(info["body"]) output.close() except IOError: self.remove_file(tmp_filename) return except ValueError: logging.warn('update_icon_cache: next_free_filename failed ' '#1, candidate = %r', tmp_filename) return filename = unicode(info["filename"]) filename = unicode_to_filename(filename, cachedir) filename = os.path.join(cachedir, filename) needs_save = True try: filename, fp = next_free_filename(filename) except ValueError: logging.warn('update_icon_cache: next_free_filename failed ' '#2, candidate = %r', filename) return if self.filename: filename = self.filename self.filename = None self.remove_file(filename) # we need to move the file here--so we close the file # pointer and then move the file. fp.close() try: self.remove_file(filename) fileutil.rename(tmp_filename, filename) except (IOError, OSError): logging.exception("iconcache: fileutil.move failed") filename = None self.filename = filename etag = unicodify(info.get("etag")) modified = unicodify(info.get("modified")) if self.etag != etag: needs_save = True self.etag = etag if self.modified != modified: needs_save = True self.modified = modified if self.url != url: needs_save = True self.url = url finally: if needsChange: self.icon_changed(needs_save=needs_save) self.updating = False if self.needsUpdate: self.needsUpdate = False self.request_update(True) app.icon_cache_updater.update_finished()
def update_status(cls, data): for field in data: if field not in ['filename', 'shortFilename', 'channelName', 'metainfo']: data[field] = unicodify(data[field]) self = get_downloader_by_dlid(dlid=data['dlid']) if self is not None: # FIXME - this should get fixed. metainfo = data.pop('metainfo', self.metainfo) # For metainfo, the downloader process doesn't send the # keys if they haven't changed. Therefore, use our # current values if the key isn't present. current = (self.status, self.metainfo) new = (data, metainfo) if current == new: return was_finished = self.is_finished() old_filename = self.get_filename() self.before_changing_status() # FIXME: how do we get all of the possible bit torrent # activity strings into gettext? --NN if data.has_key('activity') and data['activity']: data['activity'] = _(data['activity']) # only set attributes if something's changed. This makes our # UPDATE statments contain less data if data != self.status: self.status = data if metainfo != self.metainfo: self.metainfo = metainfo self._recalc_state() # Store the time the download finished finished = self.is_finished() and not was_finished file_migrated = (self.is_finished() and self.get_filename() != old_filename) needs_signal_item = not (finished or file_migrated) self.after_changing_status() if ((self.get_state() == u'uploading' and not self.manualUpload and (app.config.get(prefs.LIMIT_UPLOAD_RATIO) and self.get_upload_ratio() > app.config.get(prefs.UPLOAD_RATIO)))): self.stop_upload() self.signal_change(needs_signal_item=needs_signal_item, needs_save=False) if self.changed_attributes == set(('status',)): # if we just changed status, then we can wait a while # to store things to disk. Since we go through # update_status() often, this results in a fairly # large performance gain and alleviates #12101 self._save_later() else: self.signal_change() if finished: for item in self.item_list: item.on_download_finished() elif file_migrated: self._file_migrated(old_filename)
def __init__(self, errorMessage): self.friendlyDescription = _("Can't connect") self.longDescription = _("Connection Error: %(msg)s", {"msg": util.unicodify(errorMessage)})
def update_status(cls, data, cmd_done=False): for field in data: if field not in [ 'filename', 'shortFilename', 'channelName', 'metainfo' ]: data[field] = unicodify(data[field]) self = get_downloader_by_dlid(dlid=data['dlid']) if self is not None: now = time.time() last_update = self.last_update rate_limit = False state = self.get_state() new_state = data.get('state', u'downloading') # If this item was marked as pending update, then any update # which comes in now which does not have cmd_done set is void. if not cmd_done and self.status_updates_frozen: logging.debug( 'self = %s, ' 'saved state = %s ' 'downloader state = %s. ' 'Discard.', self, state, new_state) # treat as stale return False # If the timing between the status updates is too narrow, # try to skip it because it makes the UI jerky otherwise. if now < last_update: logging.debug('time.time() gone backwards last = %s now = %s', last_update, now) else: diff = now - last_update if diff < self.MIN_STATUS_UPDATE_SPACING: logging.debug( 'Rate limit: ' 'self = %s, now - last_update = %s, ' 'MIN_STATUS_UPDATE_SPACING = %s.', self, diff, self.MIN_STATUS_UPDATE_SPACING) rate_limit = True # If the state is one which we set and was meant to be passed # through to the downloader (valid_states), and the downloader # replied with something that was a response to a previous # download command, and state was also a part of valid_states, # but the saved state and the new state do not match # then it means the message is stale. # # Have a think about why this is true: when you set a state, # which is authoritative, to the downloader you expect it # to reply with that same state. If they do not match then it # means the message is stale. # # The exception to this rule is if the downloader replies with # an error state, or if downloading has transitioned to finished # state. # # This also does not apply to any state which we set on the # downloader via a restore command. A restore command before # a pause/resume/cancel will work as intended, and no special # trickery is required. A restore command which happens after # a pause/resume/cancel is void, so no work is required. # # I hope this makes sense and is clear! valid_states = (u'downloading', u'paused', u'stopped', u'uploading-paused', u'finished') if (cmd_done and state in valid_states and new_state in valid_states and state != new_state): if not (state == u'downloading' and new_state == u'finished'): logging.debug( 'self = %s STALE. ' 'Saved state %s, got state %s. Discarding.', self, state, new_state) return False # We are updating! Reset the status_updates_frozen flag. self.status_updates_frozen = False # FIXME - this should get fixed. metainfo = data.pop('metainfo', self.metainfo) # For metainfo, the downloader process doesn't send the # keys if they haven't changed. Therefore, use our # current values if the key isn't present. current = (self.status, self.metainfo) new = (data, metainfo) if current == new: return True # We have something to update: update the last updated timestamp. self.last_update = now was_finished = self.is_finished() old_filename = self.get_filename() self.before_changing_status() # FIXME: how do we get all of the possible bit torrent # activity strings into gettext? --NN if data.has_key('activity') and data['activity']: data['activity'] = _(data['activity']) # only set attributes if something's changed. This makes our # UPDATE statments contain less data if data != self.status: self.status = data if metainfo != self.metainfo: self.metainfo = metainfo self._recalc_state() # Store the time the download finished finished = self.is_finished() and not was_finished file_migrated = (self.is_finished() and self.get_filename() != old_filename) needs_signal_item = not (finished or file_migrated or rate_limit) self.after_changing_status() if ((self.get_state() == u'uploading' and not self.manualUpload and (app.config.get(prefs.LIMIT_UPLOAD_RATIO) and self.get_upload_ratio() > app.config.get( prefs.UPLOAD_RATIO)))): self.stop_upload() if self.changed_attributes == set(('status', )): # if we just changed status, then we can wait a while # to store things to disk. Since we go through # update_status() often, this results in a fairly # large performance gain and alleviates #12101 self._save_later() self.signal_change(needs_signal_item=needs_signal_item, needs_save=False) else: self.signal_change() if finished: for item in self.item_list: item.on_download_finished() elif file_migrated: self._file_migrated(old_filename) return True
def update_status(cls, data): for field in data: if field not in ['filename', 'shortFilename', 'channelName', 'metainfo', 'fastResumeData']: data[field] = unicodify(data[field]) self = get_downloader_by_dlid(dlid=data['dlid']) # print data if self is not None: # FIXME - this should get fixed. metainfo = data.pop('metainfo', self.metainfo) fast_resume_data = data.pop('fastResumeData', self.fast_resume_data) # for metainfo and fast_resume_data, the downloader process # doesn't send the keys if they haven't changed. Therefore, use # our current values if the key isn't present current = (self.status, self.metainfo, self.fast_resume_data) new = (data, metainfo, fast_resume_data) try: if current == new: return except Exception: # This is a known bug with the way we used to save # fast resume data logging.exception("RemoteDownloader.update_status: exception when comparing status") was_finished = self.is_finished() old_filename = self.get_filename() self.before_changing_status() # FIXME: how do we get all of the possible bit torrent # activity strings into gettext? --NN if data.has_key('activity') and data['activity']: data['activity'] = _(data['activity']) # only set attributes if something's changed. This makes our # UPDATE statments contain less data if data != self.status: self.status = data if metainfo != self.metainfo: self.metainfo = metainfo if fast_resume_data != self.fast_resume_data: self.fast_resume_data = fast_resume_data self._recalc_state() # Store the time the download finished finished = self.is_finished() and not was_finished file_migrated = (self.is_finished() and self.get_filename() != old_filename) needs_signal_item = not (finished or file_migrated) self.after_changing_status() if ((self.get_state() == u'uploading' and not self.manualUpload and (app.config.get(prefs.LIMIT_UPLOAD_RATIO) and self.get_upload_ratio() > app.config.get(prefs.UPLOAD_RATIO)))): self.stop_upload() self.signal_change(needs_signal_item=needs_signal_item, needs_save=False) if self.changed_attributes == set(('status',)): # if we just changed status, then we can wait a while # to store things to disk. Since we go through # update_status() often, this results in a fairly # large performance gain and alleviates #12101 self._save_later() else: self.signal_change() if finished: for item in self.item_list: item.on_download_finished() elif file_migrated: self._file_migrated(old_filename)
def update_status(cls, data, cmd_done=False): for field in data: if field not in ['filename', 'shortFilename', 'channelName', 'metainfo']: data[field] = unicodify(data[field]) self = get_downloader_by_dlid(dlid=data['dlid']) if self is not None: now = time.time() last_update = self.last_update rate_limit = False state = self.get_state() new_state = data.get('state', u'downloading') # If this item was marked as pending update, then any update # which comes in now which does not have cmd_done set is void. if not cmd_done and self.status_updates_frozen: logging.debug('self = %s, ' 'saved state = %s ' 'downloader state = %s. ' 'Discard.', self, state, new_state) # treat as stale return False # If the timing between the status updates is too narrow, # try to skip it because it makes the UI jerky otherwise. if now < last_update: logging.debug('time.time() gone backwards last = %s now = %s', last_update, now) else: diff = now - last_update if diff < self.MIN_STATUS_UPDATE_SPACING: logging.debug('Rate limit: ' 'self = %s, now - last_update = %s, ' 'MIN_STATUS_UPDATE_SPACING = %s.', self, diff, self.MIN_STATUS_UPDATE_SPACING) rate_limit = True # If the state is one which we set and was meant to be passed # through to the downloader (valid_states), and the downloader # replied with something that was a response to a previous # download command, and state was also a part of valid_states, # but the saved state and the new state do not match # then it means the message is stale. # # Have a think about why this is true: when you set a state, # which is authoritative, to the downloader you expect it # to reply with that same state. If they do not match then it # means the message is stale. # # The exception to this rule is if the downloader replies with # an error state, or if downloading has transitioned to finished # state. # # This also does not apply to any state which we set on the # downloader via a restore command. A restore command before # a pause/resume/cancel will work as intended, and no special # trickery is required. A restore command which happens after # a pause/resume/cancel is void, so no work is required. # # I hope this makes sense and is clear! valid_states = (u'downloading', u'paused', u'stopped', u'uploading-paused', u'finished') if (cmd_done and state in valid_states and new_state in valid_states and state != new_state): if not (state == u'downloading' and new_state == u'finished'): logging.debug('self = %s STALE. ' 'Saved state %s, got state %s. Discarding.', self, state, new_state) return False # We are updating! Reset the status_updates_frozen flag. self.status_updates_frozen = False # FIXME - this should get fixed. metainfo = data.pop('metainfo', self.metainfo) # For metainfo, the downloader process doesn't send the # keys if they haven't changed. Therefore, use our # current values if the key isn't present. current = (self.status, self.metainfo) new = (data, metainfo) if current == new: return True # We have something to update: update the last updated timestamp. self.last_update = now was_finished = self.is_finished() old_filename = self.get_filename() self.before_changing_status() # FIXME: how do we get all of the possible bit torrent # activity strings into gettext? --NN if data.has_key('activity') and data['activity']: data['activity'] = _(data['activity']) # only set attributes if something's changed. This makes our # UPDATE statments contain less data if data != self.status: self.status = data if metainfo != self.metainfo: self.metainfo = metainfo self._recalc_state() # Store the time the download finished finished = self.is_finished() and not was_finished file_migrated = (self.is_finished() and self.get_filename() != old_filename) needs_signal_item = not (finished or file_migrated or rate_limit) self.after_changing_status() if ((self.get_state() == u'uploading' and not self.manualUpload and (app.config.get(prefs.LIMIT_UPLOAD_RATIO) and self.get_upload_ratio() > app.config.get(prefs.UPLOAD_RATIO)))): self.stop_upload() if self.changed_attributes == set(('status',)): # if we just changed status, then we can wait a while # to store things to disk. Since we go through # update_status() often, this results in a fairly # large performance gain and alleviates #12101 self._save_later() self.signal_change(needs_signal_item=needs_signal_item, needs_save=False) else: self.signal_change() if finished: for item in self.item_list: item.on_download_finished() elif file_migrated: self._file_migrated(old_filename) return True
def update_icon_cache(self, url, info): self.dbItem.confirm_db_thread() if self.removed: icon_cache_updater.update_finished() return needs_save = False needsChange = False if info == None or (info['status'] != 304 and info['status'] != 200): self.error_callback(url, "bad response") return try: # Our cache is good. Hooray! if info['status'] == 304: return needsChange = True # We have to update it, and if we can't write to the file, we # should pick a new filename. if ((self.filename and not fileutil.access(self.filename, os.R_OK | os.W_OK))): self.filename = None cachedir = app.config.get(prefs.ICON_CACHE_DIRECTORY) try: fileutil.makedirs(cachedir) except OSError: pass try: # Write to a temp file. if self.filename: tmp_filename = self.filename + ".part" else: tmp_filename = os.path.join(cachedir, info["filename"]) + ".part" tmp_filename, output = next_free_filename(tmp_filename) output.write(info["body"]) output.close() except IOError: self.remove_file(tmp_filename) return except ValueError: logging.warn('update_icon_cache: next_free_filename failed ' '#1, candidate = %r', tmp_filename) return filename = unicode(info["filename"]) filename = unicode_to_filename(filename, cachedir) filename = os.path.join(cachedir, filename) needs_save = True try: filename, fp = next_free_filename(filename) except ValueError: logging.warn('update_icon_cache: next_free_filename failed ' '#2, candidate = %r', filename) return if self.filename: filename = self.filename self.filename = None self.remove_file(filename) # we need to move the file here--so we close the file # pointer and then move the file. fp.close() try: self.remove_file(filename) fileutil.rename(tmp_filename, filename) except (IOError, OSError): logging.exception("iconcache: fileutil.move failed") filename = None self.filename = filename etag = unicodify(info.get("etag")) modified = unicodify(info.get("modified")) if self.etag != etag: needs_save = True self.etag = etag if self.modified != modified: needs_save = True self.modified = modified if self.url != url: needs_save = True self.url = url finally: if needsChange: self.icon_changed(needs_save=needs_save) self.updating = False if self.needsUpdate: self.needsUpdate = False self.request_update(True) icon_cache_updater.update_finished()