def test_search_newest_empty(self):
     url = token_hex(25)
     info("Requested URL: {}".format(url))
     r, date = search_newest(url)
     assert_type(r, dict, "Check if returned search data is valid dict")
     assert_empty(r, "Check if returned search data is empty")
     assert_none(date,  "Check if returned date is None")
示例#2
0
def get_urlscan_details(url_body):
    try:
        jsonschema.validate(url_body, details_url_schema)
    except jsonschema.exceptions.ValidationError as exc:
        raise BadRequest(exc.message)

    URL = url_body.get('url')
    historic_search, when_performed = urlscan.search_newest(URL)
    found = False
    if when_performed and when_performed > datetime.utcnow() - timedelta(days=Const.DAY):
        results = urlscan.results(historic_search.get('_id'))
        if results:
            found = True

    if not found:
        try:
            url_id = urlscan.submit(URL)
        except urlscan.ApiKeyException as exc:
            raise Unauthorized(exc.message)
        except urlscan.UrlscanException as exc:
            raise BadRequest(exc.message)

        results = urlscan.results(url_id, wait_time=60)
        if not results:
            return _no_data_response()

    response_text = {
        "details": results
    }
    return Response(json.dumps(
            response_text,
            default=_default_json_model
            ), 200, mimetype="application/json")
 def test_search_newest(self, urlscan_data):
     url = urlscan_data[0]
     info("Requested URL: {}".format(url))
     r, date = search_newest(url)
     assert_type(r, dict, "Check if returned search data is valid dict")
     assert_not_empty(r, "Check if returned search data is not empty")
     assert_type(date, datetime, "Check if returned search data is valid datetime object")
def verify_urlscan(URL, passive=False, urlscan_wait_time=Const.URLSCAN_WAIT_SECONDS):
    historic_search, when_performed = urlscan.search_newest(URL)

    if passive and when_performed and when_performed > datetime.utcnow() - timedelta(days=3 * Const.DAY):
        results = urlscan.results(historic_search.get('_id'))
        if results and results.get('malicious'):
            return True, Const.URLSCAN_FINISHED_MESSAGE
        else:
            return False, Const.URLSCAN_FINISHED_MESSAGE
    elif not passive:
        log.debug('[URL_VERIFY] Force scanning URL')
        try:
            url_id = urlscan.submit(URL)
        except urlscan.UrlscanException:
            return False, Const.URLSCAN_NOT_FINISHED_ERROR
        results = urlscan.results(url_id, wait_time=urlscan_wait_time)
        if results:
            if results.get('malicious'):
                return True, Const.URLSCAN_FINISHED_MESSAGE
            else:
                return False, Const.URLSCAN_FINISHED_MESSAGE
        else:
            return False, Const.URLSCAN_NOT_FINISHED_ERROR
    else:
        return False, Const.URLSCAN_NOT_FINISHED_ERROR
def urlscan_data():
    search_url = "example.com"
    wait(5)
    info("Requested url - {}".format(search_url))
    uid = submit(search_url)
    info("Submited search with id {}".format(uid), pre=True)
    r = results(uid, wait_time=120)
    newest = search_newest(search_url)
    yield search_url, uid, r, newest