def compileFilters(self, force_validation=False): with self.compile_lock: try: if self.validation_required or force_validation: start = time.time() valid = self.validateFilters() end = time.time() - start msgr.send_msg( 'Filters validation time {:.4f}s'.format(end), logging.DEBUG) if not valid: raise AppException('Filter validation failed.') self._compileFilters() msg = 'Filters compiled successfully.' if len(self.getActiveFilters()): msg += ' {} are active.'.format( len(self.getActiveFilters())) msgr.send_msg(msg, logging.INFO) except Exception as e: # configuration is valid yet compile failed, stop self.compiledFilters = [] self.activeFilters = [] self.compiled_item_prices = {} self.compiled_filter_prices = {} if isinstance(e, AppException): msgr.send_msg(e, logging.ERROR) else: logexception() msgr.send_msg( 'Unexpected error while compiling filters: {}'.format( e), logging.ERROR) finally: msgr.send_object(FiltersInfo())
def start(self): try: self.scan() except AppException as e: msgr.send_msg(e, logging.ERROR) except BaseException as e: msgr.send_msg('Unexpected error occurred: {}. Error details logged to file.'.format(e), logging.ERROR) logexception() finally: self.notifier.stop() self.updater.stop() if self.downloader: self.downloader.stop() if self.parser: self.parser.stop() if self.downloader: self.downloader.join() if self.parser: self.parser.join() self.stateMgr.close() # after parser is closed so we don't interfere with pending saves if self.updater.is_alive(): self.updater.join() if self.notifier.is_alive(): self.notifier.join() msgr.send_msg("Scanning stopped") msgr.send_stopped()
def update(self, force_update=False, accept_empty=False): if not force_update and not self.needUpdate: return # print('updating currency..') try: shorts = self.shorts rates = {} def get_count(currency): return max( currency['receive']['count'] if currency['receive'] else 0, currency['pay']['count'] if currency['pay'] else 0) for url in CurrencyManager.CURRENCY_API: data = getJsonFromURL(url.format(config.league)) if data is None and not accept_empty: raise AppException( "Currency update failed. Empty response from server.") if data: # shorts.update({currency['name']: currency['shorthands'] for currency in data["currencyDetails"]}) rates.update({ currency['currencyTypeName']: float(currency['chaosEquivalent']) for currency in data["lines"] if get_count(currency) >= self.confidence_level }) # cur_shorts = dict(self.shorts) # for name in cur_shorts: # shorts[name] = list(set(cur_shorts[name] + shorts.get(name, []))) # can use update if we want to keep information from past updates, more robust if server returns less data # dict(self.rates).update(rates) self.compile(shorts, rates, last_update=datetime.utcnow() if rates else None) except pycurl.error as e: raise AppException( "Currency update failed. Connection error: {}".format(e)) except AppException: raise except (KeyError, ValueError) as e: raise AppException( "Currency update failed. Parsing error: {}".format(e)) except Exception as e: logexception() raise AppException( "Currency update failed. Unexpected error: {}".format(e))
def loadUserFilters(self, validate=True): try: self.userFilters, last_update = FilterManager.loadFiltersFromFile( _USER_FILTERS_FNAME, validate) except FileNotFoundError: self._loadDefaultFilters() self.saveUserFilters() except AppException: raise except Exception as e: logexception() raise AppException( "Loading user filters failed. Unexpected error: {}".format(e))
def loadAutoFilters(self, validate=True): try: self.autoFilters, self.last_update = FilterManager.loadFiltersFromFile( _AUTO_FILTERS_FNAME, validate) self.item_prices = self.getPrices(self.autoFilters) except FileNotFoundError as e: raise AppException( "Loading generated filters failed. Missing file {}", e.filename) except AppException: raise except Exception as e: logexception() raise AppException( "Loading generated filters failed. Unexpected error: {}". format(e))
def loadConfig(self): try: try: with self.config_file_lock: with open(FILTERS_CFG_FNAME, encoding="utf-8", errors="replace") as f: data = json.load(f) except FileNotFoundError: data = {} self.disabled_categories = data.get('disabled_categories', []) self.price_threshold = data.get('price_threshold', self.DEFAULT_PRICE_THRESHOLD) self.budget = data.get('budget', self.DEFAULT_BUDGET) self.default_min_price = data.get('default_min_price', self.DEFAULT_MIN_PRICE) self.default_price_override = data.get('default_price_override', self.DEFAULT_PRICE_OVERRIDE) self.default_fprice_override = data.get( 'default_fprice_override', self.DEFAULT_FPRICE_OVERRIDE) self.price_overrides = data.get('price_overrides', {}) self.filter_price_overrides = data.get('filter_price_overrides', {}) self.filter_state_overrides = data.get('filter_state_overrides', {}) self.confidence_level = data.get('confidence_level', self.DEFAULT_CONFIDENCE_LEVEL) self.enable_5l_filters = data.get('enable_5l_filters', self.DEFAULT_ENABLE_5L_FILTERS) try: self.validateConfig() except AppException as e: raise AppException( 'Failed validating filters configuration. {}'.format(e)) self.saveConfig() except Exception as e: logexception() raise AppException( 'Failed loading filters configuration. Unexpected error: {}'. format(e))
def applyChanges(self, event=None): if not self.bvar_modified.get() or not cm.initialized: return overrides = {} for iid in self.tree.get_children(): #TODO: hide #0 col and move names to a value column currency_name_col = '#0' # CurrencyColumn.Currency.name # id = self.tree.set(iid, currency_name_col) id = self.tree.item(iid, 'text') override = self.tree.set(iid, CurrencyColumn.Override.name) if override: overrides[id] = override # ids = set([self.tree.set(iid, currency_name_col) for iid in self.tree.get_children()]) ids = set( [self.tree.item(iid, 'text') for iid in self.tree.get_children()]) # preserve unhandled ids configuration for key in (set(cm.overrides) - ids): overrides[key] = cm.overrides[key] cm.confidence_level = self.entry_confidence_lvl.get() try: cm.compile(overrides=overrides) if fm.initialized: threading.Thread(target=fm.compileFilters).start() self.bvar_modified.set(False) except AppException as e: messagebox.showerror('Update error', e, parent=self.winfo_toplevel()) except Exception as e: logexception() messagebox.showerror( 'Update error', 'Failed to apply changes, unexpected error:\n{}'.format(e), parent=self.winfo_toplevel())
def handleResult(self, item, stash, fltr): whisper_msg = item.get_whisper_msg(stash) if config.notify: price = item.get_price_raw(get_stash_price_raw(stash)) or '' size_str = "" if item.stacksize == 1 else "{}x".format(item.stacksize) msg = "{} {}\n{}".format(size_str, item.name, price).strip() self.notifier.send((fltr.getDisplayTitle(), msg, item.get_whisper_msg(stash))) try: item_info = ItemResult(item, stash, getBaseUrl(self.poe_api_url), fltr) except (KeyError, IndexError) as e: msgr.send_msg( "Unexpected error while processing item {}. Item details will not be provided.".format(item.name), logging.WARN) item_info = None logexception() with open(ITEM_ERROR_FNAME, mode='w') as f: json.dump(item, f, indent=4, separators=(',', ': ')) msgr.send_msg(whisper_msg, tag=item_info)
def run(self): while not self.evt_stop.wait(self.interval): currency_updated = False filters_updated = False try: try: cm.update() # msgr.send_msg("Currency rates updated successfully.", logging.INFO) currency_updated = True except AppException as e: msgr.send_msg(e, logging.ERROR) try: self.fm.fetchFromAPI() # msgr.send_msg("Filters updated successfully.", logging.INFO) filters_updated = True except AppException as e: msgr.send_msg(e, logging.ERROR) if currency_updated or filters_updated: self.fm.compileFilters() if currency_updated and filters_updated: msgr.send_msg("Scheduled update completed successfully.", logging.INFO) elif not (currency_updated and filters_updated): msgr.send_msg( "Scheduled currency and filters update failed. Retrying in {} seconds.." .format(self.interval), logging.WARN) else: msgr.send_msg("Scheduled update was partially successful.", logging.WARN) except Exception as e: msgr.send_msg("Unexpected error while updating: {}".format(e), logging.ERROR) logexception()
def run(self): try: msgr.send_msg('Downloader started', logging.INFO) if self.skip_ahead: msgr.send_msg('Skipping ahead.. please wait..') # self.skip_ahead = False self.stat_thread.start() start_time = time.time() while not self.evt_stop.is_set(): while not self.evt_stop.is_set(): with self.req_queue_lock: if self.req_queue and self.free_handles and \ (not self.last_request or time.time() - self.last_request >= config.request_delay): # get unfinished request with minimal ID # assumes any request in queue was properly registered with add_request first req_sorted = sorted( self.req_queue, key=lambda r: self.requests.index(r)) req = req_sorted[0] self.req_queue.remove(req) # req = self.req_queue.pop(0) # TODO: choose min ID (only needed if we resubmit failed attempts) handle = self.free_handles.pop(0) self._prepare_handle(handle, req) if self.last_request: delta = time.time() - self.last_request else: delta = 0 add_delay = time.time() - req.submit_time self.queue_time.append(add_delay) self.m.add_handle(handle) self.last_request = time.time() req.start_time = self.last_request self.req_delay_time.append(delta) # msgr.send_msg('Added: {}, delta: {:.3f}s, add delay: {:.3f}s'.format(req.req_id, delta, add_delay), logging.DEBUG) ret, num_handles = self.m.perform() if ret != pycurl.E_CALL_MULTI_PERFORM: break # Check for curl objects which have terminated, and add them to the freelist while not self.evt_stop.is_set(): num_q, ok_list, err_list = self.m.info_read() for c in ok_list: self.m.remove_handle(c) cinfo = info(c) # print("Success: {} - {}: total: {:.2f}s, speed: {} KB/s, size: {} KB, " # "start-transfer: {:.2f}s, pre-transfer: {:.2f}s" # .format(c.req.req_id, c.getinfo(pycurl.HTTP_CODE), cinfo['total-time'], # round(cinfo['speed-download']/1024), round(cinfo['size-download']/1024), # cinfo['starttransfer-time'], cinfo['pretransfer-time'])) if cinfo['http-code'] == 200: if not c.req.submitted_next: next_id = c.req.peek_id() if next_id: msgr.send_msg( 'Full peek was required for ID: {}'. format(c.req.req_id), logging.INFO) self.add_request(next_id) c.req.submitted_next = next_id c.req.peek_time = time.time( ) - c.req.start_time if c.req.submitted_next: c.req.finished = True else: msgr.send_msg( 'Request successful for ID {}, but next ID was not found. Redownloading..' .format(c.req.req_id), logging.INFO) else: try: err_data = json.loads( c.req.buffer.getvalue().decode())['error'] msgr.send_msg( 'Request for ID {} failed. {} - Code: {}, Message: {}' .format(c.req.req_id, cinfo['http-code'], err_data['code'], err_data['message']), logging.WARN) except Exception: msgr.send_msg( 'Request for ID {} failed. {} - {}'.format( c.req.req_id, cinfo['http-code'], c.req.buffer.getvalue().decode()), logging.WARN) # if c.req.finished: # self.free_handles.append(c) # else: # c.req.reset() # self.m.add_handle(c) self.free_handles.append(c) if not c.req.finished: c.req.reset() self._submit(c.req) else: self.req_time.append(cinfo['total-time']) self.peek_time.append(c.req.peek_time) for c, errno, errmsg in err_list: self.m.remove_handle(c) # if c.req.finished: # for skips? # self.free_handles.append(c) # else: # c.req.reset() # self.m.add_handle(c) self.free_handles.append(c) if not c.req.finished: msgr.send_msg( "Failed: {} - Code: {}, {}. Redownloading..". format(c.req.req_id, errno, errmsg), logging.INFO) c.req.reset() self._submit(c.req) else: cinfo = info(c) self.req_time.append(cinfo['total-time']) self.peek_time.append(c.req.peek_time) if num_q == 0: break with self.requests_lock: while self.requests and self.requests[0].finished: req = self.requests.pop(0) delta = get_delta(req.req_id, req.submitted_next) self.dl_deltas.append(delta) with self.delta_lock: self.delta_count += delta if not req.skip_data: try: self.res_queue.put((req.req_id, req.buffer), timeout=1) except Full: msgr.send_msg( 'Result queue is full.. waiting for consumer to free slots..', logging.WARN) self.res_queue.put((req.req_id, req.buffer)) if self.skip_ahead: if self.dl_deltas and len( self.dl_deltas) == self.dl_deltas.maxlen: avg_delta = sum(self.dl_deltas) / len(self.dl_deltas) # msgr.send_msg("Delta avg: {}".format(avg_delta), logging.DEBUG) if avg_delta <= SUFFICIENT_DELTA: msgr.send_msg( "Sufficient delta reached ({}) after {:.3f} seconds. Data processing started." .format(avg_delta, time.time() - start_time)) self.skip_ahead = False if self.skip_ahead: passed = time.time() - start_time if passed > self.skip_timeout: msgr.send_msg( "Skip ahead timed out after {:.3f} seconds. Data processing started." .format(passed), logging.WARN) self.skip_ahead = False self.m.select(1.0) if not len(self.requests): # Should never happen, since there is always a next id. # If it does, parent thread will end up restarting this from last saved point msgr.send_msg('No requests left.. stopping..', logging.WARN) self.stop() except Exception as e: msgr.send_msg( "Unexpected error occurred while downloading: {}. Error details logged to file." .format(e), logging.ERROR) logexception() finally: self._close() msgr.send_msg('Downloader stopped', logging.INFO)
def run(self): # pr = cProfile.Profile() # pr.enable() with Pool(processes=self.num_workers) as pool: data = None request_id = None msgr.send_msg('Parser started..', logging.INFO) while not self._finished: try: item = self.queue.get() if item is None: break request_id, b = item msgr.send_update_id(request_id) last_parse = time.time() data = json.loads(b.getvalue().decode()) # snapshot filters and currency information with cm.compile_lock: filters = fm.getActiveFilters() c_budget = cm.compilePrice( fm.budget) if fm.budget else None ccm = cm.toCCM() if not len(filters): msgr.send_msg("No filters are active. Stopping..") self.signal_stop = True break # pr.enable() tabs, league_tabs, items = parse_stashes_parallel( data, filters, ccm, self.league, c_budget, self.stateMgr, self.resultHandler, self.num_workers, pool) # pr.disable() # parse_next_id(data, self.stateMgr) parse_time = time.time() - last_parse speed = items / max(parse_time, 0.001) self.parse_speed.append(speed) self.parse_times.append(parse_time) msgr.send_msg( "Parse: {:.3f}s, Tabs: {}, League tabs: {}, Items: {}". format(parse_time, tabs, league_tabs, items), logging.DEBUG) # except Empty: # pass except Exception as e: msgr.send_msg( "Unexpected error occurred while parsing: {}. Error details logged to file. ID: {}" .format(e, request_id), logging.ERROR) logexception() if data: fname = os.path.join( JSON_ERROR_DIR, JSON_ERROR_FNAME.format(request_id)) with open(fname, "w") as f: json.dump(data, f, indent=4, separators=(',', ': ')) msgr.send_msg('Parser stopped', logging.INFO)
def fetchFromAPI(self, force_update=False, accept_empty=False): if not force_update and not self.needUpdate: return # print('updating filters..') try: filter_ids = [] filters = [] def name_to_id(name): return '_' + name.lower().replace(' ', '_') def get_unique_id(title, name, category, links): title_id = name_to_id(title) if title_id not in filter_ids: return title_id name_id = name_to_id('{}{}'.format( name, ' {}L'.format(links) if links else '')) if name_id not in filter_ids: # print('id {} was taken, using name id {} instead'.format(title_id, name_id)) return name_id category_id = name_to_id(title + ' ' + category) if category_id not in filter_ids: # print('id {} was taken, using category id {} instead'.format(title_id, category_id)) return category_id id = title_id n = 2 while id in filter_ids: id = '{}{}'.format(title_id, n) n += 1 # if n > 2: # print('id {} was taken, using {} instead'.format(title_id, id)) return id c = pycurl.Curl() for url in _URLS: furl = url.format(config.league) data = getJsonFromURL(furl, handle=c, max_attempts=3) if data is None and not accept_empty: raise AppException( "Filters update failed. Empty response from server") if data: category = re.match(".*Get(.*)Overview", furl).group(1).lower() for item in data['lines']: if item['count'] < self.confidence_level: continue priority = FilterPriority.AutoBase crit = {} # crit['price_max'] = "{} exalted".format(float(item.get('exaltedValue', 0))) crit['price_max'] = "{} chaos".format( float(item.get('chaosValue', 0))) base = item['baseType'] if category not in ( 'essence', ) else None name = item['name'] if base: name += ' ' + base crit['name'] = ['"{}"'.format(name)] try: rarity = ItemRarity(item['itemClass']) crit['rarity'] = [_ITEM_TYPE[rarity]] except ValueError: rarity = None crit['buyout'] = True if category in ('uniquearmour', 'uniqueweapon'): crit['corrupted'] = False links = item['links'] title = "{} {} {}".format( 'Legacy' if rarity == ItemRarity.Relic else '', item['name'], item['variant'] if item['variant'] is not None else '').strip() if links: title = '{} {}L'.format(title, links) crit['links_min'] = links if links == 5: priority += 1 elif links == 6: priority += 2 tier = item['mapTier'] if tier: crit['level_min'] = tier crit['level_max'] = tier id = get_unique_id(title, name, category, links) filter_ids.append(id) fltr = Filter(title, crit, False, category, id=id, priority=priority) if item['variant'] is not None: if item['variant'] not in _VARIANTS: msgr.send_msg( "Unknown variant {} in item {}".format( item['variant'], item['name']), logging.WARN) else: # crit['explicit'] = {'mods': [{'expr': _VARIANTS[item['variant']]}]} mfs = _VARIANTS[item['variant']] if mfs: fg = AllFilterGroup() for expr in _VARIANTS[item['variant']]: fg.addModFilter( ModFilter(ModFilterType.Explicit, expr)) fltr.criteria['fgs'] = [fg.toDict()] fltr.validate() filters.append(fltr) self.autoFilters = filters self.item_prices = self.getPrices(self.autoFilters) self.saveAutoFilters() self.last_update = datetime.utcnow() if filters else None except pycurl.error as e: raise AppException( "Filters update failed. Connection error: {}".format(e)) except (KeyError, ValueError) as e: raise AppException( "Filters update failed. Parsing error: {}".format(e)) except AppException: raise except Exception as e: logexception() raise AppException( "Filters update failed. Unexpected error: {}".format(e))
def applyChanges(self, event=None): if not self.bvar_modified.get() or not fm.initialized: return if not self._validateForm(): return price_threshold = self.entry_threshold.get() default_price_override = self.entry_price_override.get() default_fprice_override = self.entry_fprice_override.get() budget = self.entry_budget.get() min_price = self.entry_min_price.get() confidence_lvl = self.entry_confidence_lvl.get( ) or fm.DEFAULT_CONFIDENCE_LEVEL enable_5l_filters = self.var_5l_filters.get() price_overrides = {} filter_price_overrides = {} filter_state_overrides = {} for iid in self.tree.get_children(): id = self.tree.set(iid, PricesColumn.ID.name) iprice = self.tree.set(iid, PricesColumn.Override.name) if iprice: price_overrides[id] = iprice fprice = self.tree.set(iid, PricesColumn.FilterOverride.name) if fprice: filter_price_overrides[id] = fprice fstate = self.tree.set(iid, PricesColumn.FilterStateOverride.name) try: filter_state_overrides[id] = FilterStateOption[fstate].value except KeyError: pass ids = set([ self.tree.set(iid, PricesColumn.ID.name) for iid in self.tree.get_children() ]) # preserve unhandled ids configuration for key in (set(fm.price_overrides) - ids): price_overrides[key] = fm.price_overrides[key] for key in (set(fm.filter_price_overrides) - ids): filter_price_overrides[key] = fm.filter_price_overrides[key] for key in (set(fm.filter_state_overrides) - ids): filter_state_overrides[key] = fm.filter_state_overrides[key] try: fm.updateConfig(default_price_override, default_fprice_override, price_threshold, budget, min_price, price_overrides, filter_price_overrides, filter_state_overrides, int(confidence_lvl), enable_5l_filters) except AppException as e: messagebox.showerror( 'Validation error', 'Failed to update configuration:\n{}'.format(e), parent=self.winfo_toplevel()) except Exception as e: logexception() messagebox.showerror( 'Update error', 'Failed to apply changes, unexpected error:\n{}'.format(e), parent=self.winfo_toplevel()) else: # SHOULD always work since config is valid, main console will report any failures # background thread because schema validating takes a bit of time threading.Thread(target=fm.compileFilters).start() self._initFormState()