コード例 #1
0
 def build_search_results(
         username: str, password: str, aliases: Tuple[Alias, ...],
         search_cache: LRUCache) -> Tuple[List[OeciCase], List[str]]:
     errors = []
     search_results: List[OeciCase] = []
     alias_match = search_cache[aliases]
     if alias_match:
         return alias_match
     else:
         for alias in aliases:
             session = requests.Session()
             try:
                 login_response = Crawler.attempt_login(
                     session, username, password)
                 alias_search_result = Crawler.search(
                     session,
                     login_response,
                     alias.first_name,
                     alias.last_name,
                     alias.middle_name,
                     alias.birth_date,
                 )
                 search_results += alias_search_result
             except InvalidOECIUsernamePassword as e:
                 error(401, str(e))
             except OECIUnavailable as e:
                 error(404, str(e))
             except Exception as e:
                 errors.append(str(e))
             finally:
                 session.close()
         if not errors:
             search_cache[aliases] = search_results, errors
         return search_results, errors
コード例 #2
0
ファイル: search.py プロジェクト: Jessba2/recordexpungPDX
    def post(self):
        request_data = request.get_json()

        if request_data is None or not request_data.get("names"):
            error(400, "No json data in request body")

        for alias in request_data["names"]:
            check_data_fields(
                alias,
                ["first_name", "last_name", "middle_name", "birth_date"])

        cipher = DataCipher(key=current_app.config.get("SECRET_KEY"))

        if not "oeci_token" in request.cookies.keys():
            error(401, "Missing login credentials to OECI.")

        decrypted_credentials = cipher.decrypt(request.cookies["oeci_token"])

        crawler = Crawler()

        login_result = crawler.login(decrypted_credentials["oeci_username"],
                                     decrypted_credentials["oeci_password"],
                                     close_session=False)

        if login_result is False:
            error(401, "Attempted login to OECI failed")

        cases: List[Case] = []
        for alias in request_data["names"]:
            cases += crawler.search(
                alias["first_name"],
                alias["last_name"],
                alias["middle_name"],
                alias["birth_date"],
            ).cases
        cases_with_unique_case_number = [
            list(group)[0]
            for key, group in groupby(cases, lambda case: case.case_number)
        ]
        record = Record(cases_with_unique_case_number)

        expunger = Expunger(record)
        expunger.run()

        try:
            save_result(request_data, record)
        except Exception as ex:
            logging.error("Saving search result failed with exception: %s" %
                          ex,
                          stack_info=True)

        record_summary = RecordSummarizer.summarize(record)
        response_data = {"data": {"record": record_summary}}

        current_app.json_encoder = ExpungeModelEncoder

        return response_data  # Json-encoding happens automatically here
コード例 #3
0
    def post(self):
        request_data = request.get_json()

        if request_data is None:
            error(400, "No json data in request body")

        check_data_fields(
            request_data,
            ["first_name", "last_name", "middle_name", "birth_date"])

        cipher = DataCipher(key=current_app.config.get("SECRET_KEY"))

        if not "oeci_token" in request.cookies.keys():
            error(401, "Missing login credentials to OECI.")

        decrypted_credentials = cipher.decrypt(request.cookies["oeci_token"])

        crawler = Crawler()

        login_result = crawler.login(decrypted_credentials["oeci_username"],
                                     decrypted_credentials["oeci_password"],
                                     close_session=False)

        if login_result is False:
            error(401, "Attempted login to OECI failed")

        record = crawler.search(request_data["first_name"],
                                request_data["last_name"],
                                request_data["middle_name"],
                                request_data["birth_date"])

        expunger = Expunger(record)
        expunger.run()

        try:
            save_result(request_data, record)
        except Exception as ex:
            logging.error("Saving search result failed with exception: %s" %
                          ex,
                          stack_info=True)

        response_data = {"data": {"record": record}}

        current_app.json_encoder = ExpungeModelEncoder

        return response_data  #Json-encoding happens automatically here
コード例 #4
0
    def build_record(
        username: str, password: str, aliases: List[Dict[str, str]]
    ) -> Tuple[Record, AmbiguousRecord, Dict[str, Question]]:
        ambiguous_cases_accumulator: List[AmbiguousCase] = []
        questions_accumulator: List[Question] = []
        errors = []
        for alias in aliases:
            crawler = Crawler()
            login_result = crawler.login(username,
                                         password,
                                         close_session=False)
            if login_result is False:
                error(401, "Attempted login to OECI failed")

            try:
                search_result = crawler.search(
                    alias["first_name"],
                    alias["last_name"],
                    alias["middle_name"],
                    alias["birth_date"],
                )
                ambiguous_cases, questions = search_result
                ambiguous_cases_accumulator += ambiguous_cases
                questions_accumulator += questions
            except Exception as e:
                errors.append(str(e))
        if errors:
            record = Record((), tuple(errors))
            ambiguous_record = [record]
            return record, ambiguous_record, {}
        else:
            ambiguous_record: AmbiguousRecord = []  # type: ignore
            for cases in product(*ambiguous_cases_accumulator):
                cases_with_unique_case_number: List[Case] = [
                    list(group)[0] for key, group in groupby(
                        sorted(list(cases), key=lambda case: case.case_number),
                        lambda case: case.case_number)
                ]
                ambiguous_record.append(
                    Record(tuple(cases_with_unique_case_number)))
            record = RecordCreator.analyze_ambiguous_record(ambiguous_record)
            questions_as_dict = dict(
                list(
                    map(lambda q: (q.ambiguous_charge_id, q),
                        questions_accumulator)))
            return record, ambiguous_record, questions_as_dict
コード例 #5
0
ファイル: search.py プロジェクト: darenschaad/recordexpungPDX
    def post(self):
        request_data = request.get_json()

        if request_data is None:
            error(400, "No json data in request body")

        check_data_fields(request_data, ["first_name", "last_name",
                                 "middle_name", "birth_date"])

        cipher = DataCipher(
            key=current_app.config.get("JWT_SECRET_KEY"))

        decrypted_credentials = cipher.decrypt(request.cookies["oeci_token"])

        crawler = Crawler()

        login_result = crawler.login(
            decrypted_credentials["oeci_username"],
            decrypted_credentials["oeci_password"],
            close_session=False)

        if login_result is False:
            error(401, "Attempted login to OECI failed")

        record = crawler.search(
            request_data["first_name"],
            request_data["last_name"],
            request_data["middle_name"],
            request_data["birth_date"])

        expunger = Expunger(record)
        expunger.run()

        response_data = {
            "data": {
                "record": record
            }
        }

        current_app.json_encoder = ExpungeModelEncoder

        return response_data #Json-encoding happens automatically here
コード例 #6
0
class TestCrawler(unittest.TestCase):
    def setUp(self):
        self.crawler = Crawler()
        with requests_mock.Mocker() as m:
            m.post(URL.login_url(), text=PostLoginPage.POST_LOGIN_PAGE)
            self.crawler.login('username', 'password')

    def test_search_function(self):
        base_url = 'https://publicaccess.courts.oregon.gov/PublicAccessLogin/'
        with requests_mock.Mocker() as m:
            m.post(base_url + 'Search.aspx?ID=100',
                   [{
                       'text': SearchPageResponse.RESPONSE
                   }, {
                       'text': JohnDoe.RECORD
                   }])

            base_url += 'CaseDetail.aspx'
            m.get(base_url + '?CaseID=X0001', text=CaseDetails.CASE_X1)
            m.get(base_url + '?CaseID=X0002',
                  text=CaseDetails.CASE_WITHOUT_FINANCIAL_SECTION)
            m.get(base_url + '?CaseID=X0003',
                  text=CaseDetails.CASE_WITHOUT_DISPOS)

            self.crawler.search('John', 'Doe')

        assert len(self.crawler.result.cases) == 3

        assert len(self.crawler.result.cases[0].charges) == 3
        assert len(self.crawler.result.cases[1].charges) == 1
        assert len(self.crawler.result.cases[2].charges) == 3

        assert self.crawler.result.cases[0].charges[
            0].disposition.ruling == 'Convicted - Failure to Appear'
        assert self.crawler.result.cases[0].charges[
            0].disposition.date == '06/12/2017'
        assert self.crawler.result.cases[0].charges[
            1].disposition.ruling == 'Dismissed'
        assert self.crawler.result.cases[0].charges[
            1].disposition.date == '06/12/2017'
        assert self.crawler.result.cases[0].charges[
            2].disposition.ruling == 'Hmmmm'
        assert self.crawler.result.cases[0].charges[
            2].disposition.date == '06/12/2017'

        assert self.crawler.result.cases[1].charges[
            0].disposition.ruling == 'Dismissed'
        assert self.crawler.result.cases[1].charges[
            0].disposition.date == '04/30/1992'

        assert self.crawler.result.cases[2].charges[
            0].disposition.ruling is None
        assert self.crawler.result.cases[2].charges[0].disposition.date is None
        assert self.crawler.result.cases[2].charges[
            1].disposition.ruling is None
        assert self.crawler.result.cases[2].charges[1].disposition.date is None
        assert self.crawler.result.cases[2].charges[
            2].disposition.ruling is None
        assert self.crawler.result.cases[2].charges[2].disposition.date is None

    def test_a_blank_search_response(self):
        base_url = 'https://publicaccess.courts.oregon.gov/PublicAccessLogin/'
        with requests_mock.Mocker() as m:
            m.post(base_url + 'Search.aspx?ID=100',
                   [{
                       'text': SearchPageResponse.RESPONSE
                   }, {
                       'text': JohnDoe.BLANK_RECORD
                   }])
            self.crawler.search('John', 'Doe')

        assert len(self.crawler.result.cases) == 0

    def test_single_charge_conviction(self):
        base_url = 'https://publicaccess.courts.oregon.gov/PublicAccessLogin/'
        with requests_mock.Mocker() as m:
            m.post(base_url + 'Search.aspx?ID=100',
                   [{
                       'text': SearchPageResponse.RESPONSE
                   }, {
                       'text': JohnDoe.SINGLE_CASE_RECORD
                   }])
            base_url += 'CaseDetail.aspx'
            m.get(base_url + '?CaseID=CASEJD1', text=CaseDetails.CASEJD1)

            self.crawler.search('John', 'Doe')

        assert len(self.crawler.result.cases) == 1
        assert len(self.crawler.result.cases[0].charges) == 1

        assert self.crawler.result.cases[0].charges[0].name == 'Loading Zone'
        assert self.crawler.result.cases[0].charges[0].statute == '29'
        assert self.crawler.result.cases[0].charges[
            0].level == 'Violation Unclassified'
        assert self.crawler.result.cases[0].charges[0].date == '09/04/2008'
        assert self.crawler.result.cases[0].charges[
            0].disposition.ruling == 'Convicted'
        assert self.crawler.result.cases[0].charges[
            0].disposition.date == '11/18/2008'