def get_related_tags(self, tags=None, tag_type=None, blacklist=None): """Get tags that are related to a user-supplied list. :param tags: The tags to check for relation :param tag_type: Either :const:`None`, or a specific tag type picked from ``general``, ``artist``, ``copyright``, or ``character`` :param blacklist: A blacklist of tags to be excluded from posts """ if tags is None: return tags = "+".join(tags) parameters = dict(tags=tags) if tag_type is not None: parameters["type"] = tag_type request_url = utils.danbooru_request_url(self.url, RELATED_TAG_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.NoReload, KIO.HideProgressInfo) job.setProperty("tag_blacklist", QtCore.QVariant(blacklist)) job.result.connect(self.__slot_process_related_tag_list)
def start_download(self): """Slot invoked from clicking on downloadButton. It first shows a messagebox informing what will be done, then downloads the data using KIO, connecting the result signal to the handle_download slot.""" kdeui.KMessageBox.information(self.parent(), "Now data will be retrieved from " "www.kde.org using KIO") # KIO wants KUrls data_url = kdecore.KUrl("http://www.kde.org") # Here we set some basic job properties. In particular the call is made # by indicating the url, then the LoadType (usually NoReload), then the # JobFlags. In case we use a single flag, we can specify directly the # flag, like we did here by suppressing the progress information. # Otherwise, we have to use a binary OR (|) to put also the other types # in. # KIO.storedGet returns the job handling the operation. retrieve_job = KIO.storedGet(data_url, KIO.NoReload, KIO.HideProgressInfo) # To actually obtain the data, we connect the result signal, which is # emitted at the end of the operation. Notice that KIO.storedGet keeps # the result in memory. retrieve_job.result.connect(self.handle_download)
def download_thumbnail(self, danbooru_item): """Retrieve a thumbnail for a specific Danbooru item. KIO.storedGet is used for asyncrhonous download. Jobs are scheduled to prevent server overload. :param danbooru_item: An instance of :class:`DanbooruItem <danbooru.api.containers.DanbooruItem>` """ image_url = kdecore.KUrl(danbooru_item.preview_url) flags = KIO.JobFlags(KIO.HideProgressInfo) pixmap = QtGui.QPixmap() name = image_url.fileName() # No need to download if in cache if self.cache is not None: if self.cache.find(name, pixmap): danbooru_item.pixmap = pixmap self.postRetrieved.emit(danbooru_item) return job = KIO.storedGet(image_url, KIO.NoReload, flags) job.setProperty("danbooru_item", QtCore.QVariant(danbooru_item)) # Schedule: we don't want to overload servers KIO.Scheduler.setJobPriority(job, 1) job.result.connect(self.__slot_download_thumbnail)
def kio_get(self, url_string): " multi-threaded, multi-protocol, resumable, error-checked download " downloader_thread = KIO.storedGet(KUrl(str(url_string).strip())) def kio_job_data_or_error(job): " retrieve job data or job error " print((job.data() if not job.error() else job.error())) return job.data() if not job.error() else job.error() downloader_thread.result.connect(kio_job_data_or_error)
def all_tags(self): """Fetch all the current tags.""" parameters = dict(limit=0, order="name") flags = KIO.JobFlags(KIO.HideProgressInfo) request_url = utils.danbooru_request_url(self.url, TAG_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.Reload, flags) job.result.connect(self.__slot_download_all_tags)
def get_post_list(self, page=None, tags=None, limit=100, rating="Safe", blacklist=None): """ Retrieve posts from the Danbooru board. There is a fixed limit of 100 posts a time, imposed by the Danbooru API: larger numbers are ignored. The limitation can be worked around by specifying the "page" to view, like in the web version. If the *tags* parameter is set, only posts with these tags will be retrieved. Likewise, setting *blacklist* will skip posts whose tags are contained in such a blacklist. Ratings can be controlled with the *rating* parameter. :param page: The page to view (default: 0) :param tags: A list of tags to include (if None, use all tags) :param limit: The maximum number of items to retrieve (up to 100) :param rating: The maximum allowed rating for items, between "Safe", "Questionable", and "Explicit". :param blacklist: A blacklist of tags used to exclude posts """ if limit > 100: limit = 100 if tags is None: tags = "" else: self._current_tags = tags tags = "+".join(tags) parameters = dict(tags=tags, limit=limit) if page is not None: parameters["page"] = page request_url = utils.danbooru_request_url(self.url, POST_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.NoReload, KIO.HideProgressInfo) job.setProperty("ratings", QtCore.QVariant(rating)) job.setProperty("blacklisted_tags", QtCore.QVariant(blacklist)) job.result.connect(self.__slot_process_post_list)
def get_pool_list(self, page=None): """Get a list of available pools. :param page: The page of the list to browse """ if page is not None: parameters = dict(page=page) else: parameters = None request_url = utils.danbooru_request_url(self.url, POOL_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.NoReload, KIO.HideProgressInfo) job.result.connect(self.__slot_process_pool_list)
def get_tag_list(self, limit=10, name="", blacklist=None): """Get a list of tags. If *name* is supplied, a list of tags including the exact name of the tag is fetched from Danbooru, otherwise the most recent tags are retrieved. :param limit: The number of tags to retrieve :param name: The name of the tag to retrieve, or an empty string :param blacklist: if not :const:`None`, a list of tags to exclude from searches. """ parameters = dict(name=name, limit=limit) request_url = utils.danbooru_request_url(self.url, TAG_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.NoReload, KIO.HideProgressInfo) job.setProperty("tag_blacklist", QtCore.QVariant(blacklist)) job.result.connect(self.__slot_process_tag_list)
def get_pool(self, pool_id, page=None, rating="Safe", blacklist=None): """Download all the posts associated with a specific pool. :param pool_id: The pool ID to retrieve posts from :param page: The page of the pool """ parameters = dict(id=pool_id) if page is not None: parameters["page"] = page request_url = utils.danbooru_request_url(self.url, POOL_DATA_URL, parameters, self.username, self.password) job = KIO.storedGet(request_url, KIO.NoReload, KIO.HideProgressInfo) job.setProperty("ratings", QtCore.QVariant(rating)) job.setProperty("blacklisted_tags", QtCore.QVariant(blacklist)) # We get a list of posts, which we can handle normally job.result.connect(self.__slot_process_post_list)
def statusChanged(self): data_url = KUrl( self.getImageUrl() ) retrieve_job = KIO.storedGet(data_url, KIO.NoReload, KIO.HideProgressInfo) retrieve_job.result.connect(self.handleDownloadedImage)
def fetchHackerspaceStatus(self): # TODO: configurable hackerspace url data_url = KUrl("http://it-syndikat.org/status.php") retrieve_job = KIO.storedGet(data_url, KIO.Reload, KIO.HideProgressInfo) retrieve_job.result.connect(self.handleDownloadedJson)