def applyOverrides(self, filters): used_price_overrides = set() used_state_overrides = set() tamount, tshort = cm.priceFromString(self.price_threshold) min_val = cm.convert(float(tamount), tshort) min_price = cm.compilePrice( fm.default_min_price) if self.default_min_price else None for cf in filters: # title = re.match('(.+) \(.+\)', fltr.title.lower()).group(1) override = self.default_fprice_override for id in self.filter_price_overrides: if id.lower() == cf.fltr.id: override = self.filter_price_overrides[id] used_price_overrides.add(id) break state = cf.enabled and cf.comp.get('price_max', 0) >= min_val for id in self.filter_state_overrides: if id.lower() == cf.fltr.id: state = self.filter_state_overrides[id] used_state_overrides.add(id) cf.enabled = state cf.comp['price_max'] = cm.compilePrice(override, cf.comp['price_max']) if min_price is not None: cf.comp['price_min'] = cm.compilePrice(fm.default_min_price)
def _price_key(self, key): if key == '': return None # this means it will be ignored while sorting try: return cm.compilePrice(key, base_price=0) except Exception: return 0
def applyItemPriceOverrides(self, filters): used_price_overrides = set() for cf in filters: override = self.default_price_override for id in self.price_overrides: if id.lower() == cf.fltr.id: override = self.price_overrides[id] used_price_overrides.add(id) break cf.comp['price_max'] = cm.compilePrice(override, cf.comp['price_max'])
def compileFilter(self, fltr, path=None): if path is None: path = [] if fltr.id in path: raise AppException( "Circular reference detected while compiling filters: {}". format(path)) path.append(fltr.id) if not fltr.baseId or fltr.baseId == fltr.id: baseComp = {} else: baseFilter = self.getFilterById( fltr.baseId, itertools.chain(self.userFilters, self.autoFilters)) if baseFilter is None: # try using last compilation compiledFilter = self.getFilterById( fltr.baseId, self.activeFilters, lambda x, y: x.fltr.id == y) if compiledFilter is None: raise CompileException( "Base filter '{}' not found.".format(fltr.baseId)) # return None baseComp = self.compileFilter(compiledFilter.fltr, path) else: baseComp = self.compileFilter(baseFilter, path) # if baseComp is None: # return None comp = fltr.compile(baseComp) if fltr.id.startswith('_'): val_override = self.price_overrides.get( fltr.id, self.default_price_override) comp['price_max'] = cm.compilePrice(val_override, comp['price_max']) # return fltr.compile(baseComp) return comp
def compile(self, base={}): crit = self.criteria comp = dict(base) for key in crit: # if key == 'type': # types = [] # for itype in crit['type']: # for id in _ITEM_TYPE: # if itype == _ITEM_TYPE[id]: # types.append(id) # break # comp['type'] = types if key == 'rarity': comp[key] = [_NAME_TO_TYPE[itype] for itype in crit[key]] elif key == 'iclass': comp[key] = ItemClass[crit[key]] elif key == 'name': comp['name'] = [name.lower() for name in crit[key]] elif key in ('price_min', 'price_max'): comp[key] = cm.compilePrice(crit[key], comp.get(key, None)) elif key == 'fgs': fgs = [FilterGroupFactory.create(FilterGroupType(fg['type']), fg) for fg in crit[key]] for fg in fgs: for mf in fg.mfs: if mf.type != ModFilterType.Pseudo: mf.expr = re.compile(mf.expr) comp[key] = fgs else: comp[key] = crit[key] return comp
def run(self): # pr = cProfile.Profile() # pr.enable() with Pool(processes=self.num_workers) as pool: data = None request_id = None msgr.send_msg('Parser started..', logging.INFO) while not self._finished: try: item = self.queue.get() if item is None: break request_id, b = item msgr.send_update_id(request_id) last_parse = time.time() data = json.loads(b.getvalue().decode()) # snapshot filters and currency information with cm.compile_lock: filters = fm.getActiveFilters() c_budget = cm.compilePrice( fm.budget) if fm.budget else None ccm = cm.toCCM() if not len(filters): msgr.send_msg("No filters are active. Stopping..") self.signal_stop = True break # pr.enable() tabs, league_tabs, items = parse_stashes_parallel( data, filters, ccm, self.league, c_budget, self.stateMgr, self.resultHandler, self.num_workers, pool) # pr.disable() # parse_next_id(data, self.stateMgr) parse_time = time.time() - last_parse speed = items / max(parse_time, 0.001) self.parse_speed.append(speed) self.parse_times.append(parse_time) msgr.send_msg( "Parse: {:.3f}s, Tabs: {}, League tabs: {}, Items: {}". format(parse_time, tabs, league_tabs, items), logging.DEBUG) # except Empty: # pass except Exception as e: msgr.send_msg( "Unexpected error occurred while parsing: {}. Error details logged to file. ID: {}" .format(e, request_id), logging.ERROR) logexception() if data: fname = os.path.join( JSON_ERROR_DIR, JSON_ERROR_FNAME.format(request_id)) with open(fname, "w") as f: json.dump(data, f, indent=4, separators=(',', ': ')) msgr.send_msg('Parser stopped', logging.INFO)