Ejemplo n.º 1
0
    def test_feature_generation_list_to_json(self):
        processed_cycler_run_path = os.path.join(TEST_FILE_DIR,
                                                 PROCESSED_CYCLER_FILE)
        with ScratchDir('.'):
            os.environ['BEEP_PROCESSING_DIR'] = os.getcwd()

            # Create dummy json obj
            json_obj = {
                "mode": self.events_mode,
                "file_list":
                [processed_cycler_run_path, processed_cycler_run_path],
                'run_list': [0, 1]
            }
            json_string = json.dumps(json_obj)

            newjsonpaths = process_file_list_from_json(
                json_string, processed_dir=os.getcwd())
            reloaded = json.loads(newjsonpaths)

            # Check that at least strings are output
            self.assertIsInstance(reloaded['file_list'][-1], str)

            # Ensure first is correct
            features_reloaded = loadfn(reloaded['file_list'][0])
            self.assertIsInstance(features_reloaded, DeltaQFastCharge)
            self.assertEqual(
                features_reloaded.X.loc[0, 'nominal_capacity_by_median'],
                1.0628421000000001)
Ejemplo n.º 2
0
    def test_insufficient_data_file(self):
        processed_cycler_run_path = os.path.join(
            TEST_FILE_DIR, self.processed_cycler_file_insuf)
        with ScratchDir("."):
            os.environ["BEEP_PROCESSING_DIR"] = os.getcwd()

            json_obj = {
                "file_list": [processed_cycler_run_path],
                "run_list": [1],
            }
            json_string = json.dumps(json_obj)

            json_path = process_file_list_from_json(json_string,
                                                    processed_dir=os.getcwd())
            output_obj = json.loads(json_path)
            self.assertEqual(output_obj["result_list"][0], "incomplete")
            self.assertEqual(
                output_obj["message_list"][0]["comment"],
                "Insufficient or incorrect data for featurization",
            )

            # Workflow output
            output_file_path = Path(tempfile.gettempdir()) / "results.json"
            self.assertTrue(output_file_path.exists())

            output_data = json.loads(output_file_path.read_text())
            output_json = output_data[0]

            self.assertEqual(output_obj["file_list"][0],
                             output_json["filename"])
            self.assertEqual(os.path.getsize(output_json["filename"]),
                             output_json["size"])
            self.assertEqual(1, output_json["run_id"])
            self.assertEqual("featurizing", output_json["action"])
            self.assertEqual("incomplete", output_json["status"])
Ejemplo n.º 3
0
    def test_feature_generation_list_to_json(self):
        processed_cycler_run_path = os.path.join(
            TEST_FILE_DIR, 'PreDiag_000240_000227_truncated_structure.json')
        with ScratchDir('.'):
            os.environ['BEEP_PROCESSING_DIR'] = TEST_FILE_DIR
            #os.environ['BEEP_PROCESSING_DIR'] = os.getcwd()

            # Create dummy json obj
            json_obj = {
                "mode": self.events_mode,
                "file_list":
                [processed_cycler_run_path, processed_cycler_run_path],
                'run_list': [0, 1]
            }
            json_string = json.dumps(json_obj)

            newjsonpaths = process_file_list_from_json(
                json_string, processed_dir=os.getcwd())
            reloaded = json.loads(newjsonpaths)

            # Check that at least strings are output
            self.assertIsInstance(reloaded['file_list'][-1], str)

            # Ensure first is correct
            features_reloaded = loadfn(reloaded['file_list'][0])
            self.assertIsInstance(features_reloaded, DeltaQFastCharge)
            self.assertEqual(
                features_reloaded.X.loc[0, 'nominal_capacity_by_median'],
                0.07114775279999999)
            features_reloaded = loadfn(reloaded['file_list'][-1])
            self.assertIsInstance(features_reloaded, DiagnosticProperties)
            self.assertListEqual(
                list(features_reloaded.X.iloc[2, :]),
                [143, 0.9753520623934744, 'rpt_0.2C', 'discharge_energy'])
Ejemplo n.º 4
0
    def test_feature_generation_list_to_json(self):
        processed_cycler_run_path = os.path.join(
            TEST_FILE_DIR, "PreDiag_000240_000227_truncated_structure.json")
        with ScratchDir("."):
            os.environ["BEEP_PROCESSING_DIR"] = TEST_FILE_DIR
            # os.environ['BEEP_PROCESSING_DIR'] = os.getcwd()

            # Create dummy json obj
            json_obj = {
                "mode": self.events_mode,
                "file_list":
                [processed_cycler_run_path, processed_cycler_run_path],
                "run_list": [0, 1],
            }
            json_string = json.dumps(json_obj)

            newjsonpaths = process_file_list_from_json(
                json_string, processed_dir=os.getcwd())
            reloaded = json.loads(newjsonpaths)

            # Check that at least strings are output
            self.assertIsInstance(reloaded["file_list"][-1], str)

            # Ensure first is correct
            features_reloaded = loadfn(reloaded["file_list"][4])
            self.assertIsInstance(features_reloaded, DeltaQFastCharge)
            self.assertEqual(
                features_reloaded.X.loc[0, "nominal_capacity_by_median"],
                0.07114775279999999,
            )
            features_reloaded = loadfn(reloaded["file_list"][-1])
            self.assertIsInstance(features_reloaded, DiagnosticProperties)
            self.assertListEqual(
                list(features_reloaded.X.iloc[2, :]),
                [
                    141, 0.9859837086597274, 91.17758004259996,
                    2.578137278917377, 'reset', 'discharge_energy'
                ],
            )

            # Workflow output
            output_file_path = Path(tempfile.gettempdir()) / "results.json"
            self.assertTrue(output_file_path.exists())

            output_data = json.loads(output_file_path.read_text())
            output_json = output_data[0]

            self.assertEqual(reloaded["file_list"][0], output_json["filename"])
            self.assertEqual(os.path.getsize(output_json["filename"]),
                             output_json["size"])
            self.assertEqual(0, output_json["run_id"])
            self.assertEqual("featurizing", output_json["action"])
            self.assertEqual("success", output_json["status"])
Ejemplo n.º 5
0
    def test_insufficient_data_file(self):
        processed_cycler_run_path = os.path.join(TEST_FILE_DIR,
                                                 processed_cycler_file_insuf)

        json_obj = {
            "mode": self.events_mode,
            "file_list": [processed_cycler_run_path],
            'run_list': [1]
        }
        json_string = json.dumps(json_obj)

        json_path = process_file_list_from_json(json_string,
                                                processed_dir=TEST_FILE_DIR)
        output_obj = json.loads(json_path)
        self.assertEqual(output_obj['result_list'][0], 'incomplete')
        self.assertEqual(output_obj['message_list'][0]['comment'],
                         'Insufficient data for featurization')
Ejemplo n.º 6
0
    def test_insufficient_data_file(self):
        processed_cycler_run_path = os.path.join(TEST_FILE_DIR, PROCESSED_CYCLER_FILE_INSUF)
        with ScratchDir('.'):
            os.environ['BEEP_ROOT'] = os.getcwd()

            json_obj = {
                        "mode": self.events_mode,
                        "file_list": [processed_cycler_run_path],
                        'run_list': [1]
                        }
            json_string = json.dumps(json_obj)

            json_path = process_file_list_from_json(json_string, processed_dir=os.getcwd())
            output_obj = json.loads(json_path)
            self.assertEqual(output_obj['result_list'][0], 'incomplete')
            self.assertEqual(output_obj['message_list'][0]['comment'],
                             'Insufficient or incorrect data for featurization')
Ejemplo n.º 7
0
    def test_python(self):
        """Python script for end to end test"""
        # Copy
        mapped = collate.process_files_json()
        rename_output = json.loads(mapped)
        rename_output['mode'] = self.events_mode  # mode run|test|events_off
        rename_output['run_list'] = list(range(len(
            rename_output['file_list'])))
        mapped = json.dumps(rename_output)

        # Validation
        validated = validate.validate_file_list_from_json(mapped)
        validated_output = json.loads(validated)
        validated_output['mode'] = self.events_mode  # mode run|test|events_off
        validated_output['run_list'] = list(
            range(len(validated_output['file_list'])))
        validated = json.dumps(validated_output)

        # Data structuring
        structured = structure.process_file_list_from_json(validated)
        structured_output = json.loads(structured)
        structured_output[
            'mode'] = self.events_mode  # mode run|test|events_off
        structured_output['run_list'] = list(
            range(len(structured_output['file_list'])))
        structured = json.dumps(structured_output)

        # Featurization
        featurized = featurize.process_file_list_from_json(structured)
        featurized_output = json.loads(featurized)
        featurized_output[
            'mode'] = self.events_mode  # mode run|test|events_off
        featurized_output['run_list'] = list(
            range(len(featurized_output['file_list'])))
        featurized = json.dumps(featurized_output)

        # Prediction
        predictions = run_model.process_file_list_from_json(
            featurized, model_dir=MODEL_DIR)

        # Validate output files
        self._check_result_file_validity()
Ejemplo n.º 8
0
    def test_features_on_list(self):
        files = [
            "PredictionDiagnostics_000102_0001B1_structure.json",
            "PredictionDiagnostics_000103_0001B3_structure.json",
            "PredictionDiagnostics_000114_00003C_structure.json",
            "PredictionDiagnostics_000117_00003E_structure.json",
            "PredictionDiagnostics_000120_000041_structure.json",
            "PredictionDiagnostics_000122_000043_structure.json",
            "PredictionDiagnostics_000124_000049_structure.json",
            "PredictionDiagnostics_000130_000044_structure.json",
            "PredictionDiagnostics_000133_00004D_structure (2).json",
            "PredictionDiagnostics_000136_00002D_structure (1).json",
            "PredictionDiagnostics_000139_000034_structure.json",
            "PredictionDiagnostics_000144_00002E_structure.json",
            "PredictionDiagnostics_000148_000038_structure.json",
            "PredictionDiagnostics_000150_00003B_structure.json",
            "PredictionDiagnostics_000156_000023_structure.json",
            "PredictionDiagnostics_000160_000251_structure.json",
            "PredictionDiagnostics_000163_000022_structure.json",
            "PredictionDiagnostics_000163_000022_structure.json",
            "PredictionDiagnostics_000164_000239_structure.json",
            "PredictionDiagnostics_000167_000255_structure.json",
            "PredictionDiagnostics_000168_000253_structure.json",
            "PredictionDiagnostics_000175_000247_structure.json",
            "PredictionDiagnostics_000178_00023B_structure.json",
            "PredictionDiagnostics_000181_00023A_structure.json",
            "PredictionDiagnostics_000184_000244_structure.json",
            "PredictionDiagnostics_000186_00024E_structure.json",
            "PredictionDiagnostics_000194_000242_structure.json",
        ]
        with ScratchDir("."):
            os.environ["BEEP_PROCESSING_DIR"] = TEST_FILE_DIR
            for file in files:
                pcycler_run_path = os.path.join(TEST_FILE_DIR, file)
                json_obj = {
                    "file_list": [pcycler_run_path],
                    "run_list": [0],
                }
                json_string = json.dumps(json_obj)

                newjsonpaths = process_file_list_from_json(
                    json_string, processed_dir=os.getcwd())
Ejemplo n.º 9
0
    def test_raw_to_features(self):
        os.environ["BEEP_PROCESSING_DIR"] = TEST_FILE_DIR

        download_s3_object(bucket=self.maccor_file_w_parameters_s3["bucket"],
                           key=self.maccor_file_w_parameters_s3["key"],
                           destination_path=self.maccor_file_w_parameters)

        with ScratchDir("."):
            os.environ["BEEP_PROCESSING_DIR"] = TEST_FILE_DIR
            # os.environ['BEEP_PROCESSING_DIR'] = os.getcwd()
            dp = MaccorDatapath.from_file(self.maccor_file_w_parameters)
            dp.autostructure()
            processed_run_path = os.path.join(TEST_FILE_DIR,
                                              "processed_diagnostic.json")
            # Dump to the structured file and check the file size
            dumpfn(dp, processed_run_path)
            # Create dummy json obj
            json_obj = {
                "file_list": [processed_run_path],
                "run_list": [0],
            }
            json_string = json.dumps(json_obj)

            newjsonpaths = process_file_list_from_json(
                json_string, processed_dir=os.getcwd())

            reloaded = json.loads(newjsonpaths)

            import pprint
            pprint.pprint(reloaded)

            result_list = ['success'] * 7
            self.assertEqual(reloaded['result_list'], result_list)
            rpt_df = loadfn(reloaded['file_list'][0])
            self.assertEqual(
                np.round(rpt_df.X['m0_Amp_rpt_0.2C_1'].iloc[0], 6), 0.867371)
Ejemplo n.º 10
0
    def test_feature_generation_list_to_json(self):
        processed_cycler_run_path = os.path.join(TEST_FILE_DIR,
                                                 processed_cycler_file)
        # Create dummy json obj
        json_obj = {
            "mode": self.events_mode,
            "file_list":
            [processed_cycler_run_path, processed_cycler_run_path],
            'run_list': [0, 1]
        }
        json_string = json.dumps(json_obj)

        newjsonpaths = process_file_list_from_json(json_string,
                                                   processed_dir=TEST_FILE_DIR)
        reloaded = json.loads(newjsonpaths)

        # Check that at least strings are output
        self.assertIsInstance(reloaded['file_list'][-1], str)

        # Ensure first is correct
        predictor_reloaded = loadfn(reloaded['file_list'][0])
        self.assertIsInstance(predictor_reloaded, DegradationPredictor)
        self.assertEqual(predictor_reloaded.nominal_capacity,
                         1.0628421000000001)