Exemplo n.º 1
0
    def _get_latest_id(self, is_beta):
        latest_id = None
        failed_attempts = 0
        sleep_time = 0

        if is_beta:
            ninja_api_nextid_field = 'next_beta_change_id'
        else:
            ninja_api_nextid_field = 'next_change_id'

        while not self._stop.wait(sleep_time) and not latest_id:
            try:
                data = getJsonFromURL(NINJA_API)
                if data is None:
                    msgr.send_msg("Error retrieving latest id from API, bad response", logging.WARN)
                elif ninja_api_nextid_field not in data:
                    raise AppException(
                        "Error retrieving latest id from API, missing {} key".format(ninja_api_nextid_field))
                else:
                    latest_id = data[ninja_api_nextid_field]
                    break
            except pycurl.error as e:
                errno, msg = e.args
                msgr.send_tmsg("Connection error {}: {}".format(errno, msg), logging.WARN)
            finally:
                failed_attempts += 1
                sleep_time = min(2 ** failed_attempts, 30)

        return latest_id
Exemplo n.º 2
0
    def compileFilters(self, force_validation=False):
        with self.compile_lock:
            try:
                if self.validation_required or force_validation:
                    start = time.time()
                    valid = self.validateFilters()
                    end = time.time() - start
                    msgr.send_msg(
                        'Filters validation time {:.4f}s'.format(end),
                        logging.DEBUG)
                    if not valid:
                        raise AppException('Filter validation failed.')

                self._compileFilters()
                msg = 'Filters compiled successfully.'
                if len(self.getActiveFilters()):
                    msg += ' {} are active.'.format(
                        len(self.getActiveFilters()))
                msgr.send_msg(msg, logging.INFO)
            except Exception as e:
                # configuration is valid yet compile failed, stop
                self.compiledFilters = []
                self.activeFilters = []
                self.compiled_item_prices = {}
                self.compiled_filter_prices = {}
                if isinstance(e, AppException):
                    msgr.send_msg(e, logging.ERROR)
                else:
                    logexception()
                    msgr.send_msg(
                        'Unexpected error while compiling filters: {}'.format(
                            e), logging.ERROR)
            finally:
                msgr.send_object(FiltersInfo())
Exemplo n.º 3
0
    def validateFilters(self):
        valid = True

        verrors = {}
        for fltr in self.getRawFilters():
            try:
                fltr.validate()
            except AppException as e:
                verrors[fltr] = e
                valid = False

        for fltr, e in verrors.items():
            msgr.send_msg('{}: {}'.format(fltr.title or fltr.id, e),
                          logging.ERROR)

        for fltr in self.userFilters:
            if fltr.id.startswith('_'):
                msgr.send_msg(
                    '{}; Invalid ID {}, underscore prefix is reserved for generated filters'
                    .format(fltr.title, fltr.id), logging.ERROR)
                valid = False

        filters = list(self.getRawFilters())
        for fid in self.getFilterIds():
            matches = self.getFiltersById(fid, filters)
            if len(matches) > 1:
                msgr.send_msg(
                    'Duplicate ID {} detected for filters: {}'.format(
                        fid, (fltr.title for fltr in matches)), logging.ERROR)
                valid = False

        if valid:
            self.validation_required = False
            msgr.send_msg('Filters passed validation.', logging.INFO)
        return valid
Exemplo n.º 4
0
    def start(self):
        try:
            self.scan()
        except AppException as e:
            msgr.send_msg(e, logging.ERROR)
        except BaseException as e:
            msgr.send_msg('Unexpected error occurred: {}. Error details logged to file.'.format(e), logging.ERROR)
            logexception()
        finally:
            self.notifier.stop()
            self.updater.stop()

            if self.downloader:
                self.downloader.stop()
            if self.parser:
                self.parser.stop()

            if self.downloader:
                self.downloader.join()

            if self.parser:
                self.parser.join()

            self.stateMgr.close()  # after parser is closed so we don't interfere with pending saves

            if self.updater.is_alive():
                self.updater.join()

            if self.notifier.is_alive():
                self.notifier.join()

            msgr.send_msg("Scanning stopped")
            msgr.send_stopped()
Exemplo n.º 5
0
    def _compileFilters(self):
        filters = []

        for fltr in self.autoFilters:
            try:
                comp = self.compileFilter(fltr)
                cf = CompiledFilter(fltr, comp)
                cf.enabled = fltr.category not in self.disabled_categories
                if not self.enable_5l_filters:
                    cf.enabled = '_5l' not in cf.fltr.id
                filters.append(cf)
            except CompileException as e:
                msgr.send_msg(
                    'Failed compiling filter {}: {}'.format(fltr.title, e),
                    logging.WARN)

        item_prices = self.getPrices(self.autoFilters)
        # self.applyItemPriceOverrides(filters)
        compiled_item_prices = self.getCompiledPrices(filters)

        user_filters = []

        for fltr in self.userFilters:
            try:
                comp = self.compileFilter(fltr)
                cf = CompiledFilter(fltr, comp)
                cf.enabled = fltr.enabled and fltr.category not in self.disabled_categories and fltr.criteria
                user_filters.append(cf)
            except CompileException as e:
                msgr.send_msg(
                    'Failed compiling filter {}: {}'.format(fltr.title, e),
                    logging.WARN)

        # apply filter overrides only after user filters are compiled
        self.applyOverrides(filters)

        compiled_filter_prices = self.getCompiledPrices(filters)

        filters = user_filters + filters

        for cf in filters:
            if cf.enabled and 'price_max' in cf.comp and cf.comp[
                    'price_max'] <= 0:
                cf.enabled = False
                msgr.send_msg(
                    'Filter disabled: {}. price max must be higher than zero.'.
                    format(cf.getDisplayTitle()), logging.DEBUG)

        active_filters = [cf for cf in filters if cf.enabled]
        active_filters.sort(key=lambda cf: cf.fltr.priority, reverse=True)

        for cf in filters:
            cf.finalize()

        self.activeFilters = active_filters
        self.compiledFilters = filters

        self.item_prices = item_prices
        self.compiled_item_prices = compiled_item_prices
        self.compiled_filter_prices = compiled_filter_prices
Exemplo n.º 6
0
    def handleResult(self, item, stash, fltr):
        whisper_msg = item.get_whisper_msg(stash)

        if config.notify:
            price = item.get_price_raw(get_stash_price_raw(stash)) or ''
            size_str = "" if item.stacksize == 1 else "{}x".format(item.stacksize)

            msg = "{} {}\n{}".format(size_str, item.name, price).strip()
            self.notifier.send((fltr.getDisplayTitle(), msg, item.get_whisper_msg(stash)))

        try:
            item_info = ItemResult(item, stash, getBaseUrl(self.poe_api_url), fltr)
        except (KeyError, IndexError) as e:
            msgr.send_msg(
                "Unexpected error while processing item {}. Item details will not be provided.".format(item.name),
                logging.WARN)
            item_info = None
            logexception()
            with open(ITEM_ERROR_FNAME, mode='w') as f:
                json.dump(item, f, indent=4, separators=(',', ': '))

        msgr.send_msg(whisper_msg, tag=item_info)
Exemplo n.º 7
0
    def run(self):
        while not self._finished:
            item = self.ntfy_queue.get()
            if item is None:
                break
            title, msg, whisperMsg = item

            if config.notify:
                if config.notify_copy_msg:
                    pyperclip.copy(whisperMsg)

                delay = float(config.notification_duration)

                if delay > 0 and self.ntfy_queue.qsize():
                    title = "{} ({} more)".format(title, self.ntfy_queue.qsize())

                if config.notify_play_sound:
                    try:
                        winsound.PlaySound(ALERT_FNAME, winsound.SND_ASYNC | winsound.SND_FILENAME)
                    except RuntimeError as e:
                        pass  # failed to play sound (probably because of excessive notifications)
                    except Exception as e:
                        msgr.send_msg("Error playing sound: {}".format(e), logging.ERROR)

                try:
                    if not self.registered:
                        self.registered = self.growl.register()

                    if self.registered:
                        self.growl.notify(noteType="Item Alert",
                                          title=title,
                                          description=msg)
                    else:
                        msgr.send_msg('Failed to register with Growl, Notifications will not work, '
                                      'please check your settings.', logging.WARN)
                except gntp.errors.NetworkError as e:
                    msgr.send_msg('Failed to send notification. Make sure Growl is running.', logging.WARN)

                time.sleep(float(config.notification_duration))

            self.ntfy_queue.task_done()
        self.ntfy_queue.task_done()
Exemplo n.º 8
0
    def scan(self):
        msgr.send_msg("Scan initializing..")
        os.makedirs('tmp', exist_ok=True)
        os.makedirs('log', exist_ok=True)

        is_beta = config.league.lower().startswith('beta ')
        if is_beta:
            self.poe_api_url = POE_BETA_API
            self.league = re.sub('beta ', '', config.league, flags=re.IGNORECASE)
        else:
            self.poe_api_url = POE_API
            self.league = config.league

        # assertions
        if not cm.initialized:
            raise AppException("Currency information must be initialized before starting a scan.")
        if not fm.initialized:
            raise AppException("Filters information must be initialized before starting a scan.")

        if cm.needUpdate:
            try:
                cm.update()
                msgr.send_msg("Currency rates updated successfully.")
            except AppException as e:
                msgr.send_msg(e, logging.ERROR)
                if cm.initialized:
                    msgr.send_msg('Using currency information from a local copy..', logging.WARN)

        if fm.needUpdate:
            try:
                msgr.send_msg("Generating filters from API..")
                fm.fetchFromAPI()
            except AppException as e:
                # filterFallback = True
                msgr.send_msg(e, logging.ERROR)

        msgr.send_msg('Compiling filters..', logging.INFO)
        fm.compileFilters(force_validation=True)

        filters = fm.getActiveFilters()

        if not len(filters):
            raise AppException("No filters are active. Stopping..")

        self.stateMgr.loadState()
        if self.stateMgr.getChangeId() == "" or str(config.scan_mode).lower() == "latest":
            msgr.send_msg("Fetching latest id from API..")
            latest_id = self._get_latest_id(is_beta)

            if latest_id:
                if not self.stateMgr.getChangeId() or get_delta(self.stateMgr.getChangeId(), latest_id) > 0:
                    self.stateMgr.saveState(latest_id)
                else:
                    msgr.send_msg('Saved ID is more recent, continuing..')
            elif not self._stop.is_set():
                raise AppException("Failed retrieving latest ID from API")

        self.updater.start()
        self.notifier.start()

        get_next = True

        msgr.send_msg("Scanning started")
        msgr.send_update_id(self.stateMgr.getChangeId())
        while not self._stop.is_set():
            if self.downloader is None or not self.downloader.is_alive():
                if self.downloader:
                    msgr.send_msg("Download thread ended abruptly. Restarting it..", logging.WARN)
                self.downloader = Downloader(self.stateMgr.getChangeId(), conns=config.max_conns)
                self.downloader.start()

            if self.parser is None or not self.parser.is_alive() and not self.parser.signal_stop:
                if self.parser:
                    msgr.send_msg("Parser thread ended abruptly. Restarting it..", logging.WARN)
                if config.num_workers > 0:
                    workers = config.num_workers
                else:
                    workers = max((os.cpu_count() or 1) - 1, 1)

                self.parser = ParserThread(workers, self.league, self.stateMgr, self.handleResult)
                self.parser.start()

            try:
                if get_next:
                    req_id, resp = self.downloader.get(timeout=0.5)
                    get_next = False

                self.parser.put(req_id, resp, timeout=0.5)
                get_next = True
            except Full:
                msgr.send_msg("Parser queue is full.. waiting for parser..", logging.WARN)
            except Empty:
                continue
Exemplo n.º 9
0
    def run(self):
        try:
            msgr.send_msg('Downloader started', logging.INFO)
            if self.skip_ahead:
                msgr.send_msg('Skipping ahead.. please wait..')
            # self.skip_ahead = False

            self.stat_thread.start()

            start_time = time.time()

            while not self.evt_stop.is_set():
                while not self.evt_stop.is_set():
                    with self.req_queue_lock:
                        if self.req_queue and self.free_handles and \
                                (not self.last_request or time.time() - self.last_request >= config.request_delay):

                            # get unfinished request with minimal ID
                            # assumes any request in queue was properly registered with add_request first
                            req_sorted = sorted(
                                self.req_queue,
                                key=lambda r: self.requests.index(r))
                            req = req_sorted[0]
                            self.req_queue.remove(req)

                            # req = self.req_queue.pop(0)  # TODO: choose min ID (only needed if we resubmit failed attempts)
                            handle = self.free_handles.pop(0)
                            self._prepare_handle(handle, req)

                            if self.last_request:
                                delta = time.time() - self.last_request
                            else:
                                delta = 0

                            add_delay = time.time() - req.submit_time
                            self.queue_time.append(add_delay)

                            self.m.add_handle(handle)
                            self.last_request = time.time()
                            req.start_time = self.last_request

                            self.req_delay_time.append(delta)

                            # msgr.send_msg('Added: {}, delta: {:.3f}s, add delay: {:.3f}s'.format(req.req_id, delta, add_delay), logging.DEBUG)

                    ret, num_handles = self.m.perform()
                    if ret != pycurl.E_CALL_MULTI_PERFORM: break

                # Check for curl objects which have terminated, and add them to the freelist
                while not self.evt_stop.is_set():
                    num_q, ok_list, err_list = self.m.info_read()
                    for c in ok_list:
                        self.m.remove_handle(c)
                        cinfo = info(c)
                        # print("Success: {} - {}: total: {:.2f}s, speed: {} KB/s, size: {} KB, "
                        #       "start-transfer: {:.2f}s, pre-transfer: {:.2f}s"
                        #       .format(c.req.req_id, c.getinfo(pycurl.HTTP_CODE), cinfo['total-time'],
                        #               round(cinfo['speed-download']/1024), round(cinfo['size-download']/1024),
                        #               cinfo['starttransfer-time'], cinfo['pretransfer-time']))

                        if cinfo['http-code'] == 200:
                            if not c.req.submitted_next:
                                next_id = c.req.peek_id()
                                if next_id:
                                    msgr.send_msg(
                                        'Full peek was required for ID: {}'.
                                        format(c.req.req_id), logging.INFO)
                                    self.add_request(next_id)
                                    c.req.submitted_next = next_id
                                    c.req.peek_time = time.time(
                                    ) - c.req.start_time

                            if c.req.submitted_next:
                                c.req.finished = True
                            else:
                                msgr.send_msg(
                                    'Request successful for ID {}, but next ID was not found. Redownloading..'
                                    .format(c.req.req_id), logging.INFO)
                        else:
                            try:
                                err_data = json.loads(
                                    c.req.buffer.getvalue().decode())['error']
                                msgr.send_msg(
                                    'Request for ID {} failed. {} - Code: {}, Message: {}'
                                    .format(c.req.req_id, cinfo['http-code'],
                                            err_data['code'],
                                            err_data['message']), logging.WARN)
                            except Exception:
                                msgr.send_msg(
                                    'Request for ID {} failed. {} - {}'.format(
                                        c.req.req_id, cinfo['http-code'],
                                        c.req.buffer.getvalue().decode()),
                                    logging.WARN)

                        # if c.req.finished:
                        #     self.free_handles.append(c)
                        # else:
                        #     c.req.reset()
                        #     self.m.add_handle(c)
                        self.free_handles.append(c)

                        if not c.req.finished:
                            c.req.reset()
                            self._submit(c.req)
                        else:
                            self.req_time.append(cinfo['total-time'])
                            self.peek_time.append(c.req.peek_time)

                    for c, errno, errmsg in err_list:
                        self.m.remove_handle(c)

                        # if c.req.finished:  # for skips?
                        #     self.free_handles.append(c)
                        # else:
                        #     c.req.reset()
                        #     self.m.add_handle(c)
                        self.free_handles.append(c)

                        if not c.req.finished:
                            msgr.send_msg(
                                "Failed: {} - Code: {}, {}. Redownloading..".
                                format(c.req.req_id, errno,
                                       errmsg), logging.INFO)
                            c.req.reset()
                            self._submit(c.req)
                        else:
                            cinfo = info(c)
                            self.req_time.append(cinfo['total-time'])
                            self.peek_time.append(c.req.peek_time)

                    if num_q == 0:
                        break

                with self.requests_lock:
                    while self.requests and self.requests[0].finished:
                        req = self.requests.pop(0)

                        delta = get_delta(req.req_id, req.submitted_next)
                        self.dl_deltas.append(delta)
                        with self.delta_lock:
                            self.delta_count += delta

                        if not req.skip_data:
                            try:
                                self.res_queue.put((req.req_id, req.buffer),
                                                   timeout=1)
                            except Full:
                                msgr.send_msg(
                                    'Result queue is full.. waiting for consumer to free slots..',
                                    logging.WARN)
                                self.res_queue.put((req.req_id, req.buffer))

                if self.skip_ahead:
                    if self.dl_deltas and len(
                            self.dl_deltas) == self.dl_deltas.maxlen:
                        avg_delta = sum(self.dl_deltas) / len(self.dl_deltas)
                        # msgr.send_msg("Delta avg: {}".format(avg_delta), logging.DEBUG)
                        if avg_delta <= SUFFICIENT_DELTA:
                            msgr.send_msg(
                                "Sufficient delta reached ({}) after {:.3f} seconds. Data processing started."
                                .format(avg_delta,
                                        time.time() - start_time))
                            self.skip_ahead = False

                    if self.skip_ahead:
                        passed = time.time() - start_time
                        if passed > self.skip_timeout:
                            msgr.send_msg(
                                "Skip ahead timed out after {:.3f} seconds. Data processing started."
                                .format(passed), logging.WARN)
                            self.skip_ahead = False

                self.m.select(1.0)

                if not len(self.requests):
                    # Should never happen, since there is always a next id.
                    # If it does, parent thread will end up restarting this from last saved point
                    msgr.send_msg('No requests left.. stopping..',
                                  logging.WARN)
                    self.stop()
        except Exception as e:
            msgr.send_msg(
                "Unexpected error occurred while downloading: {}. Error details logged to file."
                .format(e), logging.ERROR)
            logexception()
        finally:
            self._close()
            msgr.send_msg('Downloader stopped', logging.INFO)
Exemplo n.º 10
0
    def run(self):
        # pr = cProfile.Profile()
        # pr.enable()

        with Pool(processes=self.num_workers) as pool:

            data = None
            request_id = None

            msgr.send_msg('Parser started..', logging.INFO)
            while not self._finished:
                try:
                    item = self.queue.get()
                    if item is None:
                        break

                    request_id, b = item
                    msgr.send_update_id(request_id)

                    last_parse = time.time()
                    data = json.loads(b.getvalue().decode())

                    # snapshot filters and currency information
                    with cm.compile_lock:
                        filters = fm.getActiveFilters()
                        c_budget = cm.compilePrice(
                            fm.budget) if fm.budget else None
                        ccm = cm.toCCM()

                    if not len(filters):
                        msgr.send_msg("No filters are active. Stopping..")
                        self.signal_stop = True
                        break

                    # pr.enable()
                    tabs, league_tabs, items = parse_stashes_parallel(
                        data, filters, ccm, self.league, c_budget,
                        self.stateMgr, self.resultHandler, self.num_workers,
                        pool)

                    # pr.disable()

                    # parse_next_id(data, self.stateMgr)

                    parse_time = time.time() - last_parse
                    speed = items / max(parse_time, 0.001)
                    self.parse_speed.append(speed)
                    self.parse_times.append(parse_time)

                    msgr.send_msg(
                        "Parse: {:.3f}s, Tabs: {}, League tabs: {}, Items: {}".
                        format(parse_time, tabs, league_tabs,
                               items), logging.DEBUG)
                # except Empty:
                #     pass
                except Exception as e:
                    msgr.send_msg(
                        "Unexpected error occurred while parsing: {}. Error details logged to file. ID: {}"
                        .format(e, request_id), logging.ERROR)
                    logexception()
                    if data:
                        fname = os.path.join(
                            JSON_ERROR_DIR,
                            JSON_ERROR_FNAME.format(request_id))
                        with open(fname, "w") as f:
                            json.dump(data,
                                      f,
                                      indent=4,
                                      separators=(',', ': '))

            msgr.send_msg('Parser stopped', logging.INFO)
Exemplo n.º 11
0
    def run(self):
        while not self.evt_stop.wait(self.interval):
            currency_updated = False
            filters_updated = False

            try:
                try:
                    cm.update()
                    # msgr.send_msg("Currency rates updated successfully.", logging.INFO)
                    currency_updated = True
                except AppException as e:
                    msgr.send_msg(e, logging.ERROR)

                try:
                    self.fm.fetchFromAPI()
                    # msgr.send_msg("Filters updated successfully.", logging.INFO)
                    filters_updated = True
                except AppException as e:
                    msgr.send_msg(e, logging.ERROR)

                if currency_updated or filters_updated:
                    self.fm.compileFilters()

                if currency_updated and filters_updated:
                    msgr.send_msg("Scheduled update completed successfully.",
                                  logging.INFO)
                elif not (currency_updated and filters_updated):
                    msgr.send_msg(
                        "Scheduled currency and filters update failed. Retrying in {} seconds.."
                        .format(self.interval), logging.WARN)
                else:
                    msgr.send_msg("Scheduled update was partially successful.",
                                  logging.WARN)
            except Exception as e:
                msgr.send_msg("Unexpected error while updating: {}".format(e),
                              logging.ERROR)
                logexception()
Exemplo n.º 12
0
    def fetchFromAPI(self, force_update=False, accept_empty=False):
        if not force_update and not self.needUpdate:
            return

        # print('updating filters..')

        try:
            filter_ids = []
            filters = []

            def name_to_id(name):
                return '_' + name.lower().replace(' ', '_')

            def get_unique_id(title, name, category, links):
                title_id = name_to_id(title)
                if title_id not in filter_ids:
                    return title_id

                name_id = name_to_id('{}{}'.format(
                    name, ' {}L'.format(links) if links else ''))
                if name_id not in filter_ids:
                    # print('id {} was taken, using name id {} instead'.format(title_id, name_id))
                    return name_id

                category_id = name_to_id(title + ' ' + category)
                if category_id not in filter_ids:
                    # print('id {} was taken, using category id {} instead'.format(title_id, category_id))
                    return category_id

                id = title_id
                n = 2
                while id in filter_ids:
                    id = '{}{}'.format(title_id, n)
                    n += 1
                # if n > 2:
                #     print('id {} was taken, using {} instead'.format(title_id, id))

                return id

            c = pycurl.Curl()
            for url in _URLS:
                furl = url.format(config.league)
                data = getJsonFromURL(furl, handle=c, max_attempts=3)
                if data is None and not accept_empty:
                    raise AppException(
                        "Filters update failed. Empty response from server")

                if data:
                    category = re.match(".*Get(.*)Overview",
                                        furl).group(1).lower()

                    for item in data['lines']:
                        if item['count'] < self.confidence_level:
                            continue
                        priority = FilterPriority.AutoBase
                        crit = {}
                        # crit['price_max'] = "{} exalted".format(float(item.get('exaltedValue', 0)))
                        crit['price_max'] = "{} chaos".format(
                            float(item.get('chaosValue', 0)))
                        base = item['baseType'] if category not in (
                            'essence', ) else None
                        name = item['name']
                        if base:
                            name += ' ' + base
                        crit['name'] = ['"{}"'.format(name)]

                        try:
                            rarity = ItemRarity(item['itemClass'])
                            crit['rarity'] = [_ITEM_TYPE[rarity]]
                        except ValueError:
                            rarity = None

                        crit['buyout'] = True

                        if category in ('uniquearmour', 'uniqueweapon'):
                            crit['corrupted'] = False

                        links = item['links']
                        title = "{} {} {}".format(
                            'Legacy' if rarity == ItemRarity.Relic else '',
                            item['name'], item['variant']
                            if item['variant'] is not None else '').strip()

                        if links:
                            title = '{} {}L'.format(title, links)
                            crit['links_min'] = links
                            if links == 5:
                                priority += 1
                            elif links == 6:
                                priority += 2

                        tier = item['mapTier']
                        if tier:
                            crit['level_min'] = tier
                            crit['level_max'] = tier

                        id = get_unique_id(title, name, category, links)
                        filter_ids.append(id)

                        fltr = Filter(title,
                                      crit,
                                      False,
                                      category,
                                      id=id,
                                      priority=priority)

                        if item['variant'] is not None:
                            if item['variant'] not in _VARIANTS:
                                msgr.send_msg(
                                    "Unknown variant {} in item {}".format(
                                        item['variant'], item['name']),
                                    logging.WARN)
                            else:
                                # crit['explicit'] = {'mods': [{'expr': _VARIANTS[item['variant']]}]}
                                mfs = _VARIANTS[item['variant']]
                                if mfs:
                                    fg = AllFilterGroup()
                                    for expr in _VARIANTS[item['variant']]:
                                        fg.addModFilter(
                                            ModFilter(ModFilterType.Explicit,
                                                      expr))

                                    fltr.criteria['fgs'] = [fg.toDict()]

                        fltr.validate()
                        filters.append(fltr)

            self.autoFilters = filters
            self.item_prices = self.getPrices(self.autoFilters)
            self.saveAutoFilters()
            self.last_update = datetime.utcnow() if filters else None
        except pycurl.error as e:
            raise AppException(
                "Filters update failed. Connection error: {}".format(e))
        except (KeyError, ValueError) as e:
            raise AppException(
                "Filters update failed. Parsing error: {}".format(e))
        except AppException:
            raise
        except Exception as e:
            logexception()
            raise AppException(
                "Filters update failed. Unexpected error: {}".format(e))