def get(self): parser = reqparse.RequestParser() parser.add_argument('q') args = parser.parse_args() if not args.q: return {'data': '0'} if self.is_whitelisted(args.q): p = 0 else: itype = resolve_itype(args.q) if not itype: api.abort(422) return if itype == 'fqdn': p = predict_domain(args.q) elif itype == 'url': p = predict_url(args.q) else: p = 0 p = str(round((p[0][0] * 100), 2)) return {'data': str(p)}
def predict_urls(indicators): urls = _filter_indicators(indicators, 'url') if len(urls) == 0: return indicators predictions = predict_url([u[0] for u in urls]) return _normalize_predictions(indicators, urls, predictions)
def predict_urls(self, indicators): indicators = list(indicators) urls = [(i.indicator, idx) for idx, i in enumerate(indicators) if i.itype == 'url'] predict = predict_url([u[0] for u in urls]) for idx, u in enumerate(urls): indicators[u[1]].probability = round((predict[idx][0] * 100), 2) return indicators
def _predict_indicators(itype, indicators): _load() indicators = _to_list(indicators) urls = [(i.indicator, idx) for idx, i in enumerate(indicators) if i.itype == itype] if itype == 'url': predict = predict_url([u[0] for u in urls]) else: predict = predict_fqdn([u[0] for u in urls]) for idx, u in enumerate(urls): indicators[u[1]].probability = float(predict[idx][0]) return indicators