def setUp(self):
        pacer_session = PacerSession()

        if pacer_credentials_are_defined():
            # CAND chosen at random
            pacer_session = get_pacer_session()
            pacer_session.login()

        with open(os.path.join(JURISCRAPER_ROOT, "pacer/courts.json")) as j:
            self.courts = get_courts_from_json(json.load(j))

        path = os.path.join(TESTS_ROOT_EXAMPLES_PACER,
                            "dates/valid_free_opinion_dates.json")
        with open(path) as j:
            self.valid_dates = json.load(j)

        self.reports = {}
        for court in self.courts:
            court_id = get_court_id_from_url(court["court_link"])
            self.reports[court_id] = FreeOpinionReport(court_id, pacer_session)
    def run_parsers_on_path(self, path):
        """Test all the parsers on a given local path

        :param path: The path where you can find the files
        """
        file_paths = glob.glob(path)
        file_paths.sort()
        path_max_len = max(len(path) for path in file_paths) + 2
        for i, path in enumerate(file_paths):
            sys.stdout.write("%s. Doing %s" % (i, path.ljust(path_max_len)))
            t1 = time.time()
            dirname, filename = os.path.split(path)
            filename_sans_ext = filename.split(".")[0]
            json_path = os.path.join(dirname,
                                     "%s_result.json" % filename_sans_ext)

            lasc = LASCSearch(session=None)
            with open(path, "rb") as f:
                data = json.load(f)
                clean_data = lasc._parse_case_data(data)

            if not os.path.isfile(json_path):
                # First time testing this docket
                bar = "*" * 50
                print("\n\n%s\nJSON FILE DID NOT EXIST. CREATING IT AT:"
                      "\n\n  %s\n\n"
                      "Please test the data in this file before assuming "
                      "everything worked.\n%s\n" % (bar, json_path, bar))
                with open(json_path, "w") as f:
                    json.dump(clean_data, f, indent=2, sort_keys=True)
                    continue

            with open(json_path) as f:
                j = json.load(f)
                self.assertEqual(j, clean_data)

            t2 = time.time()
            duration = t2 - t1
            warn_or_crash_slow_parser(duration, max_duration=1)
            sys.stdout.write("✓ - %0.1fs\n" % (t2 - t1))
Exemple #3
0
def check_project_file(pfile, create=False, date=False):
    if os.path.isfile(pfile):
        with open(pfile, 'r') as f:
            project = json.load(f)
    else:
        if create:
            project = { 'name': '',
                        'invoice': '001',
                        'rate': 0,
                        'start_date': date or datetime.date.today(),
                        'work_log': []}
        else:
            sys.exit('No {0} found, '.format(pfile))
    return project
    def parse_files(self, path_root, file_ext, test_class):
        """Can we do a simple query and parse?"""
        paths = []
        for root, dirnames, filenames in os.walk(path_root):
            for filename in fnmatch.filter(filenames, file_ext):
                paths.append(os.path.join(root, filename))
        paths.sort()
        path_max_len = max(len(path) for path in paths) + 2
        for i, path in enumerate(paths):
            t1 = time.time()
            sys.stdout.write("%s. Doing %s" % (i, path.ljust(path_max_len)))
            dirname, filename = os.path.split(path)
            filename_sans_ext = filename.split(".")[0]
            json_path = os.path.join(dirname, "%s.json" % filename_sans_ext)

            court = filename_sans_ext.split("_")[0]
            report = test_class(court)
            with open(path, "r") as f:
                report._parse_text(f.read())

            # Does the metadata function work too? It usually, but not always,
            # gets called by report.data
            try:
                _ = report.metadata
            except AttributeError:
                # Some reports don't have this method.
                pass
            data = report.data
            if not os.path.exists(json_path):
                with open(json_path, "w") as f:
                    print("Creating new file at %s" % json_path)
                    json.dump(data, f, indent=2, sort_keys=True)
                continue
            data = json.loads(json.dumps(data, sort_keys=True))
            with open(json_path) as f:
                j = json.load(f)
                with self.subTest("Parsing PACER",
                                  file=filename,
                                  klass=test_class):
                    self.assertEqual(j, data)
            t2 = time.time()
            duration = t2 - t1
            warn_or_crash_slow_parser(duration, max_duration=2)

            sys.stdout.write("✓\n")
    def run_parsers_on_path(
        self,
        path_root,
        required_fields=["date_filed", "case_name", "docket_number"],
    ):
        """Test all the parsers, faking the network query."""
        paths = []
        for root, dirnames, filenames in os.walk(path_root):
            for filename in fnmatch.filter(filenames, "*.html"):
                paths.append(os.path.join(root, filename))
        paths.sort()
        path_max_len = max(len(path) for path in paths) + 2
        for i, path in enumerate(paths):

            sys.stdout.write("%s. Doing %s" % (i, path.ljust(path_max_len)))
            t1 = time.time()
            dirname, filename = os.path.split(path)
            filename_sans_ext = filename.split(".")[0]
            json_path = os.path.join(dirname, "%s.json" % filename_sans_ext)
            court = filename_sans_ext.split("_")[0]

            report = DocketReport(court)
            with open(path, "rb") as f:
                report._parse_text(f.read().decode("utf-8"))
            data = report.data

            if data != {}:
                # If the docket is a valid docket, make sure some required
                # fields are populated.
                for field in required_fields:
                    self.assertTrue(
                        data[field],
                        msg="Unable to find truthy value for field %s" % field,
                    )

                self.assertEqual(data["court_id"], court)

                # Party-specific tests...
                for party in data["parties"]:
                    self.assertTrue(
                        party.get("name", False),
                        msg="Every party must have a name attribute. Did not "
                        "get a value for:\n\n%s" % party,
                    )
                    # Protect against effed up adversary proceedings cases that
                    # don't parse properly. See: cacb, 2:08-ap-01570-BB
                    self.assertNotIn("----", party["name"])

            if not os.path.isfile(json_path):
                bar = "*" * 50
                print(
                    "\n\n%s\nJSON FILE DID NOT EXIST. CREATING IT AT:"
                    "\n\n  %s\n\n"
                    "Please test the data in this file before assuming "
                    "everything worked.\n%s\n" % (bar, json_path, bar)
                )
                with open(json_path, "w") as f:
                    json.dump(data, f, indent=2, sort_keys=True)
                    # self.assertFalse(True)
                    continue

            with open(json_path) as f:
                j = json.load(f)
                if j != {}:
                    # Compare docket entries and parties first, for easier
                    # debugging, then compare whole objects to be sure.
                    self.assertEqual(
                        j["docket_entries"], data["docket_entries"]
                    )
                    self.assertEqual(j["parties"], data["parties"])
                self.assertEqual(j, data)
            t2 = time.time()

            duration = t2 - t1
            warn_or_crash_slow_parser(duration, max_duration=1)
            sys.stdout.write("✓ - %0.1fs\n" % (t2 - t1))
Exemple #6
0
 def test_dump_datetime_roundtrips(self):
     orig_dict = dict(created_at=datetime.date(2011, 1, 1))
     fileobj = StringIO()
     jsondate3.dump(orig_dict, fileobj)
     fileobj.seek(0)
     self.assertEqual(orig_dict, jsondate3.load(fileobj))