def download_thumbnail(self, danbooru_item): """Retrieve a thumbnail for a specific Danbooru item. KIO.storedGet is used for asyncrhonous download. Jobs are scheduled to prevent server overload. :param danbooru_item: An instance of :class:`DanbooruItem <danbooru.api.containers.DanbooruItem>` """ image_url = kdecore.KUrl(danbooru_item.preview_url) flags = KIO.JobFlags(KIO.HideProgressInfo) pixmap = QtGui.QPixmap() name = image_url.fileName() # No need to download if in cache if self.cache is not None: if self.cache.find(name, pixmap): danbooru_item.pixmap = pixmap self.postRetrieved.emit(danbooru_item) return job = KIO.storedGet(image_url, KIO.NoReload, flags) job.setProperty("danbooru_item", QtCore.QVariant(danbooru_item)) # Schedule: we don't want to overload servers KIO.Scheduler.setJobPriority(job, 1) job.result.connect(self.__slot_download_thumbnail)
def all_tags(self): """Fetch all the current tags.""" parameters = dict(limit=0, order="name") flags = KIO.JobFlags(KIO.HideProgressInfo) request_url = utils.danbooru_request_url(self.url, TAG_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.Reload, flags) job.result.connect(self.__slot_download_all_tags)
def batch_download(self, ok): "Download images in batch." selected_items = self.thumbnailarea.selected_images() if not selected_items: return start_url = KUrl("kfiledialog:///danbooru") caption = i18n("Select a directory to save the images to") directory = KFileDialog.getExistingDirectoryUrl( start_url, self, caption) if directory.isEmpty(): return for item in selected_items: file_url = item.url_label.url() tags = item.data.tags # Make a local copy to append paths as addPath works in-place destination = KUrl(directory) file_name = KUrl(file_url).fileName() destination.addPath(file_name) job = KIO.file_copy(KUrl(file_url), destination, -1) job.setProperty("tags", QVariant(tags)) job.result.connect(self.batch_download_slot)
def get_related_tags(self, tags=None, tag_type=None, blacklist=None): """Get tags that are related to a user-supplied list. :param tags: The tags to check for relation :param tag_type: Either :const:`None`, or a specific tag type picked from ``general``, ``artist``, ``copyright``, or ``character`` :param blacklist: A blacklist of tags to be excluded from posts """ if tags is None: return tags = "+".join(tags) parameters = dict(tags=tags) if tag_type is not None: parameters["type"] = tag_type request_url = utils.danbooru_request_url(self.url, RELATED_TAG_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.NoReload, KIO.HideProgressInfo) job.setProperty("tag_blacklist", QtCore.QVariant(blacklist)) job.result.connect(self.__slot_process_related_tag_list)
def get_linter(linter_name, callback): """tries to retrieve a linter and calls `callback` on it on success""" if linter_name in LINTERS: callback(LINTERS[linter_name]) return if linter_name not in NEEDS_LICENSE: showError(i18nc('@info:status', 'No acceptable linter named %1!', linter_name)) return license, objname, url = NEEDS_LICENSE[linter_name] cache_path = p.join(CACHE_DIR, linter_name + '.js') def success(): """store newly created linter and “return” it""" LINTERS[linter_name] = JSModule(JS_ENGINE, cache_path, objname) callback(LINTERS[linter_name]) if p.exists(cache_path): success() return # the user doesn’t have the file. ask to accept its license if not license_accepted(license): return download = KIO.file_copy(KUrl(url), KUrl.fromPath(cache_path)) @download.result.connect def _call(job): if job.error(): showError(i18nc('@info:status', 'Download failed')) else: success() download.start()
def start_download(self): """Slot invoked from clicking on downloadButton. It first shows a messagebox informing what will be done, then downloads the data using KIO, connecting the result signal to the handle_download slot.""" kdeui.KMessageBox.information(self.parent(), "Now data will be retrieved from " "www.kde.org using KIO") # KIO wants KUrls data_url = kdecore.KUrl("http://www.kde.org") # Here we set some basic job properties. In particular the call is made # by indicating the url, then the LoadType (usually NoReload), then the # JobFlags. In case we use a single flag, we can specify directly the # flag, like we did here by suppressing the progress information. # Otherwise, we have to use a binary OR (|) to put also the other types # in. # KIO.storedGet returns the job handling the operation. retrieve_job = KIO.storedGet(data_url, KIO.NoReload, KIO.HideProgressInfo) # To actually obtain the data, we connect the result signal, which is # emitted at the end of the operation. Notice that KIO.storedGet keeps # the result in memory. retrieve_job.result.connect(self.handle_download)
def rename (self, songs): # TODO: parametrize the main music colleciton mainColl= self.collaggr.collections[0] base= mainColl.path d= QDir () for song in songs: dstPath= self.songPath (base, song) dstDir= os.path.dirname (dstPath) # TODO: QtDir is not net transp. try to make sub jobs creating the missing path if d.mkpath (dstDir): # HINT: KUrl because KIO.* use KUrl # src= KUrl (song.filepath) src= KUrl (utils.path2qurl (song.filepath)) # BUG: Renamer.rename() # PyQt4.QtCore.QUrl(u'file:///home/mdione/media/music/new/bandidos rurales/05 - uruguay, uruguay.mp3') -> # PyQt4.QtCore.QUrl(u'file:///home/mdione/media/music/Le\xf3n Gieco/2001 - Bandidos rurales/05 - Uruguay, Uruguay.mp3') # ^^^^ dst= KUrl (dstPath) logger.info ("Renamer.rename()", src, "->", dst) # TODO: do not launch them all in parallel job= KIO.file_move (src, dst) # TODO: emit a finished. # print "Renamer.rename()", job job.result.connect (self.jobFinished) # print "Renamer.rename(): connected" self.jobs.append (job) # print "Renamer.rename(): next!" else: logger.info ("Renamer.rename(): failed to create", dstDir, ", skipping", dstPath)
def batch_download(self, ok): "Download images in batch." selected_items = self.thumbnailarea.selected_images() if not selected_items: return start_url = KUrl("kfiledialog:///danbooru") caption = i18n("Select a directory to save the images to") directory = KFileDialog.getExistingDirectoryUrl(start_url, self, caption) if directory.isEmpty(): return for item in selected_items: file_url = item.url_label.url() tags = item.data.tags # Make a local copy to append paths as addPath works in-place destination = KUrl(directory) file_name = KUrl(file_url).fileName() destination.addPath(file_name) job = KIO.file_copy(KUrl(file_url), destination, -1) job.setProperty("tags", QVariant(tags)) job.result.connect(self.batch_download_slot)
def download(self, url): """Download the package from given KUrl.""" self.progress.setRange(0, 100) self.status.setText(i18n("Downloading %1...", url.fileName())) dest = KGlobal.dirs().saveLocation('tmp') self.job = KIO.copy(url, KUrl(dest), KIO.JobFlags(KIO.Overwrite | KIO.Resume | KIO.HideProgressInfo)) QObject.connect(self.job, SIGNAL("percent(KJob*, unsigned long)"), self.slotPercent) QObject.connect(self.job, SIGNAL("result(KJob*)"), self.slotResult, Qt.QueuedConnection) self.job.start()
def kio_get(self, url_string): " multi-threaded, multi-protocol, resumable, error-checked download " downloader_thread = KIO.storedGet(KUrl(str(url_string).strip())) def kio_job_data_or_error(job): " retrieve job data or job error " print((job.data() if not job.error() else job.error())) return job.data() if not job.error() else job.error() downloader_thread.result.connect(kio_job_data_or_error)
def do_convert(self): print "CurrencyConverter::do_convert" print "CurrencyConverter::do_convert. Update interval:", self.update_interval print "Convert from:", self.def_from, "to", self.def_to url = "http://quote.yahoo.com/d/quotes.csv?s=%s%s=X&f=l1&e=.csv" % (self.def_from, self.def_to) print url self.applet.setBusy(True) job = KIO.get(KUrl(url), KIO.NoReload, KIO.HideProgressInfo) job.warning.connect(self.job_warning) job.data.connect(self.job_received) job.result.connect(self.job_done) self.timer.start() # restart the timer on activity
def get_post_list(self, page=None, tags=None, limit=100, rating="Safe", blacklist=None): """ Retrieve posts from the Danbooru board. There is a fixed limit of 100 posts a time, imposed by the Danbooru API: larger numbers are ignored. The limitation can be worked around by specifying the "page" to view, like in the web version. If the *tags* parameter is set, only posts with these tags will be retrieved. Likewise, setting *blacklist* will skip posts whose tags are contained in such a blacklist. Ratings can be controlled with the *rating* parameter. :param page: The page to view (default: 0) :param tags: A list of tags to include (if None, use all tags) :param limit: The maximum number of items to retrieve (up to 100) :param rating: The maximum allowed rating for items, between "Safe", "Questionable", and "Explicit". :param blacklist: A blacklist of tags used to exclude posts """ if limit > 100: limit = 100 if tags is None: tags = "" else: self._current_tags = tags tags = "+".join(tags) parameters = dict(tags=tags, limit=limit) if page is not None: parameters["page"] = page request_url = utils.danbooru_request_url(self.url, POST_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.NoReload, KIO.HideProgressInfo) job.setProperty("ratings", QtCore.QVariant(rating)) job.setProperty("blacklisted_tags", QtCore.QVariant(blacklist)) job.result.connect(self.__slot_process_post_list)
def get_pool_list(self, page=None): """Get a list of available pools. :param page: The page of the list to browse """ if page is not None: parameters = dict(page=page) else: parameters = None request_url = utils.danbooru_request_url(self.url, POOL_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.NoReload, KIO.HideProgressInfo) job.result.connect(self.__slot_process_pool_list)
def get_tag_list(self, limit=10, name="", blacklist=None): """Get a list of tags. If *name* is supplied, a list of tags including the exact name of the tag is fetched from Danbooru, otherwise the most recent tags are retrieved. :param limit: The number of tags to retrieve :param name: The name of the tag to retrieve, or an empty string :param blacklist: if not :const:`None`, a list of tags to exclude from searches. """ parameters = dict(name=name, limit=limit) request_url = utils.danbooru_request_url(self.url, TAG_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.NoReload, KIO.HideProgressInfo) job.setProperty("tag_blacklist", QtCore.QVariant(blacklist)) job.result.connect(self.__slot_process_tag_list)
def query_tag(self, tagNames): soprano_term_uri = Soprano.Vocabulary.NAO.hasTag() nepomuk_property = Nepomuk.Types.Property(soprano_term_uri) tag = Nepomuk.Tag(tagNames[0]) if tag.uri() == "": print "Tag \"%s\" does not exist" % tagNames[0] return False comparison_term = Nepomuk.Query.ComparisonTerm(nepomuk_property, Nepomuk.Query.ResourceTerm(tag)) if self.options.filesOnly: query = Nepomuk.Query.FileQuery(comparison_term) else: query = Nepomuk.Query.Query(comparison_term) search_url = query.toSearchUrl() search_job = KIO.listDir(kdecore.KUrl(search_url), KIO.HideProgressInfo) search_job.entries.connect(self.search_slot) search_job.result.connect(self.result) search_job.start() return search_job
def get_pool(self, pool_id, page=None, rating="Safe", blacklist=None): """Download all the posts associated with a specific pool. :param pool_id: The pool ID to retrieve posts from :param page: The page of the pool """ parameters = dict(id=pool_id) if page is not None: parameters["page"] = page request_url = utils.danbooru_request_url(self.url, POOL_DATA_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.NoReload, KIO.HideProgressInfo) job.setProperty("ratings", QtCore.QVariant(rating)) job.setProperty("blacklisted_tags", QtCore.QVariant(blacklist)) # We get a list of posts, which we can handle normally job.result.connect(self.__slot_process_post_list)
def query_string(self, url): search_job = KIO.listDir(kdecore.KUrl('nepomuksearch:/?query=' + url), KIO.HideProgressInfo) search_job.entries.connect(self.search_slot) search_job.result.connect(self.result) search_job.start() return search_job
def statusChanged(self): data_url = KUrl( self.getImageUrl() ) retrieve_job = KIO.storedGet(data_url, KIO.NoReload, KIO.HideProgressInfo) retrieve_job.result.connect(self.handleDownloadedImage)
def fetchHackerspaceStatus(self): # TODO: configurable hackerspace url data_url = KUrl("http://it-syndikat.org/status.php") retrieve_job = KIO.storedGet(data_url, KIO.Reload, KIO.HideProgressInfo) retrieve_job.result.connect(self.handleDownloadedJson)