def get_files_list(path, titles=None, widget_id=None): if not titles: titles = [] hash = utils.path2hash(path) _, files, _ = utils.cache_expiry(hash, widget_id) if files is None: # We had no old content so have to block and get it now utils.log("Blocking cache path read: {}".format(hash[:5]), "info") files, changed = utils.cache_files(path, widget_id) new_files = [] if 'error' not in files: files = files.get('result').get('files') if not files: utils.log('No items found for {}'.format(path)) return filtered_files = [x for x in files if x['title'] not in titles] for file in filtered_files: new_file = { k: v for k, v in file.items() if v not in [None, '', -1, [], {}] } if 'art' in new_file: for art in new_file['art']: new_file['art'][art] = utils.clean_artwork_url( file['art'][art]) new_files.append(new_file) return new_files
def get_files_list(path, widget_id=None): hash = utils.path2hash(path) _, files, _ = utils.cache_expiry(hash, widget_id) if files is None: # We had no old content so have to block and get it now utils.log("Blocking cache path read: {}".format(hash[:5]), "info") files, changed = utils.cache_files(path, widget_id) new_files = [] if "error" not in files: files = files.get("result").get("files") if not files: utils.log("No items found for {}".format(path)) return for file in files: new_file = { k: v for k, v in file.items() if v not in [None, "", -1, [], {}] } if "art" in new_file: for art in new_file["art"]: new_file["art"][art] = utils.clean_artwork_url( file["art"][art]) new_files.append(new_file) return new_files
def cache_and_update(widget_ids): """a widget might have many paths. Ensure each path is either queued for an update or is expired and if so force it to be refreshed. When going through the queue this could mean we refresh paths that other widgets also use. These will then be skipped. """ assert widget_ids effected_widgets = set() for widget_id in widget_ids: widget_def = manage.get_widget_by_id(widget_id) if not widget_def: continue changed = False widget_path = widget_def.get("path", {}) utils.log( "trying to update {} with widget def {}".format( widget_id, widget_def), "inspect", ) if type(widget_path) != list: widget_path = [widget_path] for path in widget_path: if isinstance(path, dict): _label = path["label"] path = path["file"]["file"] hash = utils.path2hash(path) # TODO: we might be updating paths used by widgets that weren't initiall queued. # We need to return those and ensure they get refreshed also. effected_widgets = effected_widgets.union( utils.widgets_for_path(path)) if utils.is_cache_queue(hash): # we need to update this path regardless new_files, files_changed = utils.cache_files(path, widget_id) changed = changed or files_changed utils.remove_cache_queue(hash) # else: # # double check this hasn't been updated already when updating another widget # expiry, _ = utils.cache_expiry(hash, widget_id, no_queue=True) # if expiry <= time.time(): # utils.cache_files(path, widget_id) # else: # pass # Skipping this path because its already been updated # TODO: only need to do that if a path has changed which we can tell from the history if changed: _update_strings(widget_def) return effected_widgets
def get_files_list(path, widget_id=None): hash = utils.path2hash(path) _, files, _ = utils.cache_expiry(hash, widget_id) if files is None: # We had no old content so have to block and get it now utils.log("Blocking cache path read: {}".format(hash[:5]), "info") files, changed = utils.cache_files(path, widget_id) new_files = [] if "result" in files: files = files.get("result", {}).get("files", []) if not files: utils.log("No items found for {}".format(path)) return [], hash for file in files: new_file = {k: v for k, v in file.items() if v is not None} if "art" in new_file: for art in new_file["art"]: new_file["art"][art] = utils.clean_artwork_url( file["art"][art]) if "cast" in new_file: for idx, cast in enumerate(new_file["cast"]): new_file["cast"][idx][ "thumbnail"] = utils.clean_artwork_url( cast.get("thumbnail", "")) new_files.append(new_file) return new_files, hash elif "error" in files: os.remove(os.path.join(_addon_data, "{}.cache".format(hash))) utils.log("Invalid cache file removed for {}".format(hash)) return None, hash else: utils.log("Error processing {}".format(hash), "error") return None, hash