def add_group(target, group_name=""): dialog = xbmcgui.Dialog() group_name = dialog.input(heading=utils.get_string(30023), defaultt=group_name) group_id = "" if group_name: group_id = utils.get_unique_id(group_name) filename = os.path.join(_addon_data, "{}.group".format(group_id)) group_def = { "label": group_name, "type": target, "paths": [], "id": group_id, "art": folder_sync if target == "widget" else folder_shortcut, "version": settings.get_addon_info("version"), "content": None, } utils.write_json(filename, group_def) else: dialog.notification("AutoWidget", utils.get_string(30024)) del dialog return group_id
def save_playback_history(media_type, playback_percentage): # Record in json when things got played to help predict which widgets will change after playback # if playback_percentage < 0.7: # return history = utils.read_json(_playback_history_path, default={}) plays = history.setdefault("plays", []) plays.append((time.time(), media_type)) utils.write_json(_playback_history_path, history)
def write_path(group_def, path_def=None, update=""): filename = os.path.join(utils._addon_path, "{}.group".format(group_def["id"])) if path_def: if update: for path in group_def["paths"]: if path["id"] == update: group_def["paths"][group_def["paths"].index(path)] = path_def else: group_def["paths"].append(path_def) utils.write_json(filename, group_def)
def write_path(group_def, path_def=None, update=''): filename = os.path.join(_addon_path, '{}.group'.format(group_def['id'])) if path_def: if update: for path in group_def['paths']: if path['id'] == update: group_def['paths'][group_def['paths'].index( path)] = path_def else: group_def['paths'].append(path_def) utils.write_json(filename, group_def)
def read_history(path, create_if_missing=True): hash = path2hash(path) history_path = os.path.join(_addon_data, "{}.history".format(hash)) if not os.path.exists(history_path): if create_if_missing: cache_data = {} history = cache_data.setdefault("history", []) widgets = cache_data.setdefault("widgets", []) utils.write_json(history_path, cache_data) else: cache_data = None else: cache_data = utils.read_json(history_path) return cache_data
def save_path_details(params, _id=''): for param in params: if str(params[param]).endswith(',return)'): return if not _id: _id = params.get('id') if not _id: return path_to_saved = os.path.join(_addon_path, '{}.widget'.format(_id)) params['version'] = _addon_version if 'refresh' not in params: params['refresh'] = utils.get_setting_float('service.refresh_duration') utils.write_json(path_to_saved, params) return params
def add_group(target, group_name=''): group_name = dialog.input(heading=utils.get_string(32037), defaultt=group_name) group_id = '' if group_name: group_id = utils.get_unique_id(group_name) filename = os.path.join(utils._addon_path, '{}.group'.format(group_id)) group_def = {'label': group_name, 'type': target, 'paths': [], 'id': group_id, 'art': folder_sync if target == 'widget' else folder_shortcut, 'version': utils._addon_version} utils.write_json(filename, group_def) else: dialog.notification('AutoWidget', utils.get_string(32038)) return group_id
def push_cache_queue(path, widget_id=None): hash = path2hash(path) queue_path = os.path.join(_addon_data, "{}.queue".format(hash)) history = read_history(path, create_if_missing=True) # Ensure its created changed = False if widget_id is not None and widget_id not in history["widgets"]: history["widgets"].append(widget_id) changed = True if history.get("path", "") != path: history["path"] = path changed = True if changed: history_path = os.path.join(_addon_data, "{}.history".format(hash)) utils.write_json(history_path, history) if os.path.exists(queue_path): pass # Leave original modification date so item is higher priority else: utils.write_json( queue_path, {"hash": hash, "path": path, "widget_id": widget_id} )
def add_group(target, group_name=""): group_name = dialog.input(heading=utils.get_string(32037), defaultt=group_name) group_id = "" if group_name: group_id = utils.get_unique_id(group_name) filename = os.path.join(utils._addon_path, "{}.group".format(group_id)) group_def = { "label": group_name, "type": target, "paths": [], "id": group_id, "art": folder_sync if target == "widget" else folder_shortcut, "version": utils._addon_version, } utils.write_json(filename, group_def) else: dialog.notification("AutoWidget", utils.get_string(32038)) return group_id
def add_group(target): dialog = xbmcgui.Dialog() group_name = dialog.input(heading=utils.get_string(32037)) group_id = '' if group_name: group_id = utils.get_unique_id(group_name) filename = os.path.join(_addon_path, '{}.group'.format(group_id)) group_def = { 'label': group_name, 'type': target, 'paths': [], 'id': group_id, 'info': {}, 'art': folder_sync if target == 'widget' else folder_shortcut, 'version': _addon_version } utils.write_json(filename, group_def) utils.update_container() else: dialog.notification('AutoWidget', utils.get_string(32038)) return group_id
def save_path_details(params): path_to_saved = os.path.join(utils._addon_path, "{}.widget".format(params["id"])) utils.write_json(path_to_saved, params) return params
def cache_expiry(path, widget_id, add=None, background=True): # Predict how long to cache for with a min of 5min so updates don't go in a loop # TODO: find better way to prevents loops so that users trying to manually refresh can do so # TODO: manage the cache files to remove any too old or no longer used # TODO: update paths on autowidget refresh based on predicted update frequency. e.g. plugins with random paths should # update when autowidget updates. hash = path2hash(path) cache_path = os.path.join(_addon_data, "{}.cache".format(hash)) # Read file every time as we might be called from multiple processes history_path = os.path.join(_addon_data, "{}.history".format(hash)) cache_data = utils.read_json(history_path) if os.path.exists(history_path) else None if cache_data is None: cache_data = {} since_read = 0 else: since_read = time.time() - last_read(hash) history = cache_data.setdefault("history", []) widgets = cache_data.setdefault("widgets", []) if widget_id not in widgets: widgets.append(widget_id) expiry = time.time() - 20 contents = None changed = True size = 0 if add is not None: cache_json = json.dumps(add) if not add or not cache_json.strip(): result = "Invalid Write" elif "error" in add or not add.get("result", {}).get("files"): # In this case we don't want to cache a bad result result = "Error" # TODO: do we schedule a new update? or put dummy content up even if we have # good cached content? else: utils.write_json(cache_path, add) contents = add size = len(cache_json) content_hash = path2hash(cache_json) changed = history[-1][1] != content_hash if history else True history.append((time.time(), content_hash)) if cache_data.get("path") != path: cache_data["path"] = path utils.write_json(history_path, cache_data) # expiry = history[-1][0] + DEFAULT_CACHE_TIME pred_dur = predict_update_frequency(history) expiry = ( history[-1][0] + pred_dur * 0.75 ) # less than prediction to ensure pred keeps up to date result = "Wrote" else: # write any updated widget_ids so we know what to update when we dequeue # Also important as wwe use last modified of .history as accessed time utils.write_json(history_path, cache_data) if not os.path.exists(cache_path): result = "Empty" if background: contents = utils.make_holding_path(utils.get_string(30145), "refresh") push_cache_queue(path) else: contents = utils.read_json(cache_path, log_file=True) if contents is None: result = "Invalid Read" if background: contents = utils.make_holding_path( utils.get_string(30139).format(hash), "alert" ) push_cache_queue(path) else: # write any updated widget_ids so we know what to update when we dequeue # Also important as wwe use last modified of .history as accessed time utils.write_json(history_path, cache_data) size = len(json.dumps(contents)) if history: expiry = history[-1][0] + predict_update_frequency(history) # queue_len = len(list(iter_queue())) if expiry > time.time(): result = "Read" elif not background: result = "Skip already updated" # elif queue_len > 3: # # Try to give system more breathing space by returning empty cache but ensuring refresh # # better way is to just do this the first X accessed after startup. # # or how many accessed in the last 30s? # push_cache_queue(hash) # result = "Skip (queue={})".format(queue_len) # contents = dict(result=dict(files=[])) else: push_cache_queue(path) result = "Read and queue" # TODO: some metric that tells us how long to the first and last widgets becomes visible and then get updated # not how to measure the time delay when when the cache is read until it appears on screen? # Is the first cache read always the top visibible widget? utils.log( "{} cache {}B (exp:{}, last:{}): {} {}".format( result, size, utils.ft(expiry - time.time()), utils.ft(since_read), hash[:5], widgets, ), "notice", ) return expiry, contents, changed