def actions(self): ''' Construct a dict of dicts: actions are keys and dictionary of attributes/values are values. Cached too. ''' id = "ra_actions-%s" % self.ra_string() if cache.is_cached(id): return cache.retrieve(id) if self.mk_ra_node() is None: return None d = {} for c in self.ra_elem.xpath("//actions/action"): name = c.get("name") if not name or name in self.skip_ops: continue if name == "monitor": name = monitor_name_node(c) d[name] = {} for a in c.attrib.keys(): if a in self.skip_op_attr: continue v = c.get(a) if v: d[name][a] = v # add monitor ops without role, if they don't already # exist d2 = {} for op in d.keys(): if re.match("monitor_[^0-9]", op): norole_op = re.sub(r'monitor_[^0-9_]+_(.*)', r'monitor_\1', op) if not norole_op in d: d2[norole_op] = d[op] d.update(d2) return cache.store(id, d)
def params(self): ''' Construct a dict of dicts: parameters are keys and dictionary of attributes/values are values. Cached too. ''' id = "ra_params-%s" % self.ra_string() if cache.is_cached(id): return cache.retrieve(id) if self.mk_ra_node() is None: return None d = {} for c in self.ra_elem.xpath("//parameters/parameter"): name = c.get("name") if not name: continue required = c.get("required") unique = c.get("unique") type, default = self.param_type_default(c) d[name] = { "required": required, "unique": unique, "type": type, "default": default, } return cache.store(id, d)
def search(): callback = request.args.get('callback', '') query_string = str(request.args.get('query', '')) print query_string stemming = True if request.args.get('stemming','Y') == 'Y' else False stop_words = True if request.args.get('stop_words','N') == 'Y' else False scoring_method = request.args.get('scoring_method','tf') start_rank = int(request.args.get('start_rank',0)) num_results = int(request.args.get('num_results',10)) query_id = request.args.get('query_id',None) if (query_id is None) or (not cache.is_cached(query_id)): query_id = str(uuid.uuid4()) st = time.time() rank_list = query_parser.get_rank(query_string, scoring_method, stop_words, stemming) en = time.time() results_length = len(rank_list) cache.cache_result(query_id, rank_list) processing_time = en-st cache.cache_result_stats(query_id, results_length, processing_time) rank_list = rank_list[start_rank: start_rank+num_results] else : rank_list = cache.get_cached(query_id, start_rank, num_results) stats = cache.get_cached_stats(query_id) results_length = stats['results_length'] processing_time = stats['processing_time'] results = json_utils.generate_json(query_id, rank_list, query_string, scoring_method, processing_time, results_length, start_rank) print "done" if(callback == ''): return results else: return callback+'(' + results +')'
def ra_providers(ra_type, ra_class="ocf"): 'List of providers for a class:type.' id = "ra_providers-%s-%s" % (ra_class, ra_type) if cache.is_cached(id): return cache.retrieve(id) l = ra_if().providers(ra_type, ra_class) l.sort() return cache.store(id, l)
def ra_classes(): ''' List of RA classes. ''' if cache.is_cached("ra_classes"): return cache.retrieve("ra_classes") l = ra_if().classes() l.sort() return cache.store("ra_classes", l)
def ra_providers_all(ra_class="ocf"): ''' List of providers for a class. ''' id = "ra_providers_all-%s" % ra_class if cache.is_cached(id): return cache.retrieve(id) ocf = os.path.join(os.environ["OCF_ROOT"], "resource.d") if os.path.isdir(ocf): return cache.store(id, sorted([s for s in os.listdir(ocf) if os.path.isdir(os.path.join(ocf, s))])) return []
def ra_providers_all(ra_class="ocf"): ''' List of providers for a class. ''' id = "ra_providers_all-%s" % ra_class if cache.is_cached(id): return cache.retrieve(id) dir = "%s/resource.d" % os.environ["OCF_ROOT"] l = [] for s in os.listdir(dir): if os.path.isdir("%s/%s" % (dir, s)): l.append(s) l.sort() return cache.store(id, l)
def meta(self): ''' RA meta-data as raw xml. ''' sid = "ra_meta-%s" % self.ra_string() if cache.is_cached(sid): return cache.retrieve(sid) if self.ra_class in constants.meta_progs: l = prog_meta(self.ra_class) else: l = ra_if().meta(self.ra_class, self.ra_type, self.ra_provider) if not l: return None self.debug("read and cached meta-data") return cache.store(sid, l)
def ra_providers_all(ra_class="ocf"): ''' List of providers for a class. ''' id = "ra_providers_all-%s" % ra_class if cache.is_cached(id): return cache.retrieve(id) ocf = os.path.join(os.environ["OCF_ROOT"], "resource.d") if os.path.isdir(ocf): return cache.store( id, sorted([ s for s in os.listdir(ocf) if os.path.isdir(os.path.join(ocf, s)) ])) return []
def ra_types(ra_class="ocf", ra_provider=""): ''' List of RA type for a class. ''' if not ra_class: ra_class = "ocf" id = "ra_types-%s-%s" % (ra_class, ra_provider) if cache.is_cached(id): return cache.retrieve(id) list = [] for ra in ra_if().types(ra_class): if (not ra_provider or ra_provider in ra_providers(ra, ra_class)) \ and ra not in list: list.append(ra) list.sort() return cache.store(id, list)
for name in files: if paths.split_path_ext(name)[2] not in paths.img_extns: continue abs = os.path.join(root, name) #if there is no preview, I don't cache the preview thumbnail. if name == ".preview.jpeg" or name.lower() == "preview.jpg": check_sizes = [preview_size] else: check_sizes = cache_sizes for size in check_sizes: rel = paths.abs_to_rel(abs) abs_cachedir = os.path.join(gallery_config.cache_prefix, size) abs_cachefile = os.path.join(abs_cachedir, rel) if cache.is_cached(abs, abs_cachefile, config): print abs + '(' + size + ") is up to date" else: dims = size.split("x") width = int(dims[0]) if len(dims) > 1: height = int(dims[1]) else: height = width print "caching " + rel + "(" + str(width) + ", " + str( height) + ")" cache.cache_img(rel, width, height, abs_cachedir, abs_cachefile, 0) #print "calling cache_img(\"" + str(rel) + "\", \"" + str(width) + "\", \"" + str(height) + "\", \"" + str(abs_cachedir) + "\", \"" + str(abs_cachefile) + "\", 0)" total_created = total_created + 1 time.sleep(1) if total_created > 20: