Exemple #1
0
 def test_create_web3_client_private_key(self, method_call_mock):
     """
     Test that private key is instantiated correctly when creating web3 client
     """
     eth_provider = self.config_utils.create_eth_provider(
         "EthereumTesterProvider", {})
     private_key = "0xc2fd94c5216e754d3eb8f4f34017120fef318c50780ce408b54db575b120229f"
     passphrase = "abc123ropsten"
     client, new_account, new_private_key = self.config_utils.create_web3_client(
         eth_provider, passphrase,
         io_utils.fetch_file(resource_uri("mykey.json")), 2)
     self.assertEqual(private_key, Web3.toHex(new_private_key),
                      "Private key was not decrypted correctly")
     # None ETH provider will make this fail
     try:
         client, new_account, new_private_key = self.config_utils.create_web3_client(
             eth_provider, "incorrect-passphrase",
             io_utils.fetch_file(resource_uri("mykey.json")), 2)
         self.fail(
             "No exception was thrown even though the private key isn't correct"
         )
     except ConfigurationException as e:
         self.assertTrue("MAC mismatch" in str(e),
                         "Expected the MAC mismatch exception")
         # Expected
         pass
    def test_compress_and_decode_full_report(self):
        """
        Ensures that a typical, complete report is compressed properly.
        """
        report = load_json(fetch_file(resource_uri("reports/DAOBug.json")))
        hexstring = self.compress_report(report)
        decoded_report = self.decode_report(hexstring)

        expected_report = load_json(
            fetch_file(resource_uri("reports/DAOBugDecompressed.json")))
        self.__compare_json(decoded_report, expected_report)
 def setUp(self):
     """
     Sets up fresh database for each test.
     """
     self.worker = Sqlite3Worker(TestSqlLite3Worker.db_file)
     self.worker.execute_script(
         fetch_file(resource_uri('evt/createdb.sql', is_main=True)))
Exemple #4
0
    def test_timeout_on_complex_file(self):
        """
        Tests if the analyzer throttles the execution and generates error message
        """

        # rewiring configs
        original_timeouts = []
        for i in range(0, len(self.__config.analyzers)):
            # It's an expected behaviour
            analyzer_name = self.__config.analyzers[i].wrapper.analyzer_name
            self.assertEqual(
                self.__audit_node.config.analyzers[i].wrapper._Wrapper__timeout_sec,
                self.__audit_node.config._Config__analyzers_config[i][analyzer_name][
                    'timeout_sec'])
            original_timeouts.append(
                self.__audit_node.config.analyzers[i].wrapper._Wrapper__timeout_sec)
            self.__audit_node.config.analyzers[i].wrapper._Wrapper__timeout_sec = 6
            self.__audit_node.config._Config__analyzers_config[i][analyzer_name][
                'timeout_sec'] = 3

        contract = resource_uri("kyber.sol")
        self.__request_audit(contract, self.__PRICE)

        self.__evt_wait_loop(self.__submitReport_filter)

        self.__send_done_message(self.__REQUEST_ID)
        self.__assert_audit_request_report(self.__REQUEST_ID,
                                           report_file_path="reports/kyber.json")
        # setting back the configurations
        for i in range(0, len(original_timeouts)):
            self.__audit_node.config.analyzers[i].wrapper._Wrapper__timeout_sec = \
                original_timeouts[i]
            analyzer_name = self.__config.analyzers[i].wrapper.analyzer_name
            self.__audit_node.config._Config__analyzers_config[i][
                analyzer_name]['timeout_sec'] = original_timeouts[i]

        compressed_report = self.__compress_report("reports/kyber.json")

        # Asserts the database content.
        expected_row = {"request_id": 1,
                        "requestor": self.__config.account,
                        "contract_uri": contract,
                        "evt_name": "LogAuditAssigned",
                        'assigned_block_nbr': "IGNORE",
                        "submission_block_nbr": "IGNORE",
                        "fk_status": "DN",
                        "fk_type": "AU",
                        "price": str(self.__PRICE),
                        "status_info": "Report successfully submitted",
                        "tx_hash": "IGNORE",
                        "submission_attempts": 1,
                        "is_persisted": 1,
                        "audit_uri": "IGNORE",
                        "audit_hash": "IGNORE",
                        "audit_state": self.__AUDIT_STATE_ERROR,
                        "full_report": "IGNORE",
                        "compressed_report": compressed_report
                        }
        self.assert_event_table_contains(self.__config, [expected_row],
                                            ignore_keys=[key for key in expected_row if expected_row[key] == "IGNORE"])
Exemple #5
0
    def test_successful_contract_audit_request_dockerhub_fail_multiple_analyzers(self):
        """
        Tests that a report is generated when the dockerhub fails
        """
        # Replace analyzers with a single dockerhub fail analyzer
        faulty_wrapper = Wrapper(
            wrappers_dir="{0}/tests/resources/wrappers".format(project_root()),
            analyzer_name="dockerhub_fail",
            args="",
            storage_dir="/tmp/{}{}".format(time(), random()),
            timeout_sec=60,
            prefetch=False
        )
        analyzer = Analyzer(faulty_wrapper)
        original_analyzers = self.__audit_node.config._Config__analyzers
        original_analyzers_config = self.__audit_node.config._Config__analyzers_config
        self.__audit_node.config._Config__analyzers[1] = analyzer
        self.__audit_node.config._Config__analyzers_config[1] = {"dockerhub_fail": analyzer}

        # since we're mocking the smart contract, we should explicitly call its internals
        buggy_contract = resource_uri("DAOBug.sol")
        self.__request_audit(buggy_contract, self.__PRICE)

        self.__evt_wait_loop(self.__submitReport_filter)

        # NOTE: if the audit node later requires the stubbed fields, this will have to change a bit
        self.__send_done_message(self.__REQUEST_ID)
        self.__assert_audit_request_report(self.__REQUEST_ID,
                                           report_file_path="reports/DockerhubFailAllAnalyzers.json")
        self.__assert_all_analyzers(self.__REQUEST_ID)

        # set the values back
        self.__audit_node.config._Config__analyzers = original_analyzers
        self.__audit_node.config._Config__analyzers_config = original_analyzers_config

        compressed_report = self.__compress_report("reports/DockerhubFailAllAnalyzers.json")

        # asserting the database content
        expected_row = {"request_id": 1,
                        "requestor": self.__config.account,
                        "contract_uri": buggy_contract,
                        "evt_name": "LogAuditAssigned",
                        'assigned_block_nbr': "IGNORE",
                        "submission_block_nbr": "IGNORE",
                        "fk_status": "DN",
                        "fk_type": "AU",
                        "price": str(self.__PRICE),
                        "status_info": "Report successfully submitted",
                        "tx_hash": "IGNORE",
                        "submission_attempts": 1,
                        "is_persisted": 1,
                        "audit_uri": "IGNORE",
                        "audit_hash": "IGNORE",
                        "audit_state": 5,
                        "full_report": "IGNORE",
                        "compressed_report": compressed_report
                        }
        self.assert_event_table_contains(self.__config, [expected_row],
                                          ignore_keys=[key for key in expected_row if expected_row[key] == "IGNORE"])
    def __copy_yaml_setup(self):
        test_config = fetch_file(resource_uri('test_config.yaml'))
        with open(test_config) as yaml_file:
            cfg = yaml.load(yaml_file)
        tmp = NamedTemporaryFile(mode='w+t', delete=False)
        yaml.dump(cfg, tmp, default_flow_style=False)

        return tmp, cfg
 def test_load_config(self):
     """
     Tests that utils are able to load a configuration dictionary from yaml file.
     """
     uri = resource_uri("test_config.yaml")
     config_dict = self.config_utils.load_config(uri, "dev")
     self.assertIsNotNone(config_dict, "Configuration dictionary was not loaded")
     self.assertTrue("evt_db_path" in config_dict.keys(),
                     "Key evt_db_path is missing from loaded data")
 def test_encode_decode_idempotence(self):
     """
     Ensures that encode(decode(report)) == encode(decode(encode(decode(report))))
     """
     report = load_json(fetch_file(resource_uri("reports/DAOBug.json")))
     decoded_report = self.decode_report(self.compress_report(report))
     twice_decoded_report = self.decode_report(
         self.compress_report(decoded_report))
     self.__compare_json(decoded_report, twice_decoded_report)
    def test_create_set_from_compressed_report(self):
        # Tests whether vulnerability sets for compressed reports match those from
        # their corresponding uncompressed ones.
        for report in os.listdir(fetch_file(resource_uri("reports/"))):
            uncompressed_report = load_json(
                fetch_file(resource_uri("reports/DAOBug.json")))
            expected_set = VulnerabilitiesSet.from_uncompressed_report(
                uncompressed_report)

            request_id = uncompressed_report['request_id']

            encoder = ReportEncoder()
            compressed_report = encoder.compress_report(
                uncompressed_report, request_id)
            decompressed_report = encoder.decode_report(
                compressed_report, request_id)
            found_set = VulnerabilitiesSet.from_uncompressed_report(
                decompressed_report)

            self.assertEquals(expected_set, found_set)
    def test_inserting_duplicates_events(self):
        """
        Tests that the worker does not propagate raised exception when two records with the same
        primary key are inserted in the database. Also tests that if such an insert is invoked, the
        existing values remain the same.
        """
        with mock.patch.object(self.worker,
                               'logger') as sql3liteworker_logger_mock:
            with mock.patch('evt.evt_pool_manager.logger'
                            ) as evt_pool_manager_logger_mock:
                self.assertFalse(sql3liteworker_logger_mock.warning.called)

                self.worker.execute_script(
                    fetch_file(
                        resource_uri('evt/add_evt_to_be_assigned.sql',
                                     is_main=True)),
                    values=(1, 'x', 'x', 'x', 10, 'x', 'x', 12),
                    error_handler=EventPoolManager.insert_error_handler)

                self.assertFalse(sql3liteworker_logger_mock.warning.called)
                self.assertFalse(evt_pool_manager_logger_mock.warning.called)

        with mock.patch.object(self.worker,
                               'logger') as sql3liteworker_logger_mock:
            with mock.patch('evt.evt_pool_manager.logger'
                            ) as evt_pool_manager_logger_mock:
                self.worker.execute_script(
                    fetch_file(
                        resource_uri('evt/add_evt_to_be_assigned.sql',
                                     is_main=True)),
                    values=(1, 'x', 'x', 'x', 10, 'x', 'x', 12),
                    error_handler=EventPoolManager.insert_error_handler)
                # Ensure that threads were merged before assertions
                self.worker.close()

                self.assertFalse(sql3liteworker_logger_mock.error.called)
                self.assertFalse(evt_pool_manager_logger_mock.error.called)

                self.assertTrue(evt_pool_manager_logger_mock.warning.called)
                args, _ = evt_pool_manager_logger_mock.warning.call_args
                self.assertTrue(isinstance(args[3], apsw.ConstraintError))
    def test_old_pragma_with_caret(self):
        """
        Tests whether no exception is raised upon calling the analyzer
        with a contract locking an old version of Solidity with caret.
        """

        old_contract = fetch_file(resource_uri("DAOBugOld-Caret.sol"))
        analyzer = TestAnalyzerSecurify.__new_analyzer()
        request_id = 15
        report = analyzer.check(old_contract, request_id,
                                "DAOBugOld-Caret.sol")

        self.assertTrue(report['status'], 'success')
        self.assertEquals(6, len(report['potential_vulnerabilities']))
Exemple #12
0
 def compare_json(self,
                  audit_file,
                  report_file_path,
                  json_loaded=False,
                  ignore_id=False):
     if not json_loaded:
         actual_json = load_json(audit_file)
     else:
         actual_json = audit_file
     expected_json = load_json(fetch_file(resource_uri(report_file_path)))
     if ignore_id:
         expected_json['request_id'] = actual_json['request_id']
     diff = DeepDiff(
         actual_json,
         expected_json,
         exclude_paths={
             "root['contract_uri']",
             "root['version']",
             # There is no keystore used for testing. Accounts
             # are dynamic and therefore cannot be compared
             "root['auditor']",
             "root['requestor']",
             # Path is different depending on whether running inside Docker
             "root['timestamp']",
             "root['start_time']",
             "root['end_time']",
             "root['analyzers_reports'][0]['analyzer']['command']",
             "root['analyzers_reports'][0]['coverages'][0]['file']",
             "root['analyzers_reports'][0]['potential_vulnerabilities'][0]['file']",
             "root['analyzers_reports'][0]['start_time']",
             "root['analyzers_reports'][0]['end_time']",
             "root['analyzers_reports'][1]['analyzer']['command']",
             "root['analyzers_reports'][1]['coverages'][0]['file']",
             "root['analyzers_reports'][1]['potential_vulnerabilities'][0]['file']",
             "root['analyzers_reports'][1]['start_time']",
             "root['analyzers_reports'][1]['end_time']",
             "root['analyzers_reports'][2]['analyzer']['command']",
             "root['analyzers_reports'][2]['coverages'][0]['file']",
             "root['analyzers_reports'][2]['potential_vulnerabilities'][0]['file']",
             "root['analyzers_reports'][2]['start_time']",
             "root['analyzers_reports'][2]['end_time']",
             # Once scripts are either executed or skipped. The traces at position 1 differ.
             "root['analyzers_reports'][0]['trace']",
             "root['analyzers_reports'][1]['trace']",
             "root['analyzers_reports'][2]['trace']"
         })
     pprint(diff)
     self.assertEqual(diff, {})
     self.assertEqual(ntpath.basename(actual_json['contract_uri']),
                      ntpath.basename(expected_json['contract_uri']))
Exemple #13
0
 def test_analyzer_produces_metadata_for_errors(self):
     """
     Tests that analyzers produce their metadata even when failure occurs
     """
     buggy_contract = resource_uri("BasicToken.sol")
     buggy_contract_file = fetch_file(buggy_contract)
     # directly calling this function to avoid compilation checks;
     # this will cause error states for the analyzers
     report = self.__thread.get_audit_report_from_analyzers(
         buggy_contract_file, "0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf",
         buggy_contract, 1)
     self.compare_json(report,
                       "reports/BasicTokenErrorWithMetadata.json",
                       json_loaded=True)
Exemple #14
0
    def test_old_pragma(self):
        """
        Tests whether an exception is raised upon calling the analyzer
        with a contract locking an old version of Solidity.
        """

        old_contract = fetch_file(resource_uri("DAOBugOld.sol"))
        analyzer = TestAnalyzerOyente.__new_analyzer()
        request_id = 15
        report = analyzer.check(old_contract, request_id, "DAOBugOld.sol")

        self.assertTrue(report['status'], 'error')
        self.assertTrue(1, len(report['errors']))
        self.assertTrue("Source file requires different compiler version" in
                        report['errors'][0])
    def test_successful_empty_contract_audit_request(self):
        """
        Tests the entire flow of a successful audit request, from a request
        to the production of a report and its submission.
        """
        # since we're mocking the smart contract, we should explicitly call its internals
        empty_contract = resource_uri("Empty.sol")
        self.__request_audit(empty_contract, self.__PRICE)

        # If the report is not submitted, this should timeout the test
        self.__evt_wait_loop(self.__submitReport_filter)

        # NOTE: if the audit node later requires the stubbed fields, this will have to change a bit
        self.__send_done_message(self.__REQUEST_ID)

        self.__assert_audit_request_report(
            self.__REQUEST_ID, report_file_path="reports/Empty.json")
        self.__assert_all_analyzers(self.__REQUEST_ID)

        compressed_report = TestQSPAuditNode.__compress_report(
            "reports/Empty.json")

        # asserting the database content
        expected_row = {
            "request_id": 1,
            "requestor": self.__config.account,
            "contract_uri": empty_contract,
            "evt_name": "LogAuditAssigned",
            'assigned_block_nbr': "IGNORE",
            "submission_block_nbr": "IGNORE",
            "fk_status": "DN",
            "fk_type": "AU",
            "price": str(self.__PRICE),
            "status_info": "Report successfully submitted",
            "tx_hash": "IGNORE",
            "submission_attempts": 1,
            "is_persisted": 1,
            "audit_uri": "IGNORE",
            "audit_hash": "IGNORE",
            "audit_state": self.__AUDIT_STATE_ERROR,
            "full_report": "IGNORE",
            "compressed_report": compressed_report
        }
        self.assert_event_table_contains(self.__config, [expected_row],
                                         ignore_keys=[
                                             key for key in expected_row
                                             if expected_row[key] == "IGNORE"
                                         ])
 def test_load_config(self):
     config_file_uri = resource_uri("test_config.yaml")
     config = ConfigFactory.create_from_file(config_file_uri, "dev", validate_contract_settings=False)
     self.assertIsNotNone(config.eth_provider)
     self.assertIsNotNone(config.web3_client)
     self.assertIsNotNone(config.account)
     self.assertIsNotNone(config.analyzers)
     self.assertIsNotNone(config.upload_provider)
     self.assertIsNotNone(config.metric_collection_destination_endpoint)
     self.assertEqual(5, config.min_price_in_qsp)
     self.assertEqual(0, config.gas_price_wei)
     self.assertEqual(5, config.evt_polling)
     self.assertEqual(2, len(config.analyzers))
     self.assertEqual(5, config.start_n_blocks_in_the_past)
     self.assertEqual(1, config.block_discard_on_restart)
     self.assertFalse(config.enable_police_audit_polling)
Exemple #17
0
 def test_initialize(self):
     config_file_uri = resource_uri("test_config.yaml")
     config = ConfigFactory.create_from_file(
         config_file_uri, "dev", validate_contract_settings=False)
     log_streaming.initialize("account",
                              config_value(config, "/logging/streaming",
                                           {}),
                              force=True)
     self.assertEqual(get_config(), {})
     self.assertEqual(get_account(), "account")
     self.assertEqual(get_loggers(), {})
     try:
         log_streaming.initialize("account", {})
         self.fail("An exception was expected")
     except Exception:
         # expected
         pass
Exemple #18
0
    def test_old_pragma(self):
        """
        Tests whether an exception is raised upon calling the analyzer
        with a contract locking an old version of Solidity.
        """

        old_contract = fetch_file(resource_uri("DAOBugOld.sol"))
        analyzer = TestAnalyzerMythril.__new_analyzer()
        request_id = 15
        report = analyzer.check(old_contract, request_id, "DAOBugOld.sol")
        self.assertTrue(report['status'], 'error')

        self.assertTrue(len(report['errors']) > 0)
        self.assertEquals(12, len(report['trace']))
        self.assertTrue(
            "Error: Source file requires different compiler version" in
            ''.join(err + '\n' for err in report['errors']))
    def test_old_pragma(self):
        """
        Tests whether an exception is raised upon calling the analyzer
        with a contract locking an old version of Solidity.
        """

        old_contract = fetch_file(resource_uri("DAOBugOld.sol"))
        analyzer = TestAnalyzerSecurify.__new_analyzer()
        request_id = 15
        report = analyzer.check(old_contract, request_id, "DAOBugOld.sol")
        self.assertTrue(report['status'], 'error')

        self.assertTrue(len(report['errors']) > 0)
        self.assertEquals(11, len(report['trace']))
        self.assertTrue(
            "ch.securify.CompilationHelpers.compileContracts" in ''.join(
                err + '\n' for err in report['errors']))
    def test_target_contract_in_non_raw_text_file(self):
        """
        Tests the entire flow of an audit request of a non-raw text file contract (e.g., HTML), from
        a request to the production of a report and its submission.
        """
        buggy_contract = resource_uri("DappBinWallet.sol")
        self.__request_audit(buggy_contract, self.__PRICE)

        self.__evt_wait_loop(self.__submitReport_filter)

        # NOTE: if the audit node later requires the stubbed fields, this will have to change a bit
        self.__send_done_message(self.__REQUEST_ID)

        self.__assert_audit_request_report(
            self.__REQUEST_ID, report_file_path="reports/DappBinWallet.json")

        compressed_report = TestQSPAuditNode.__compress_report(
            "reports/DappBinWallet.json")

        # Asserts the database content.
        expected_row = {
            "request_id": 1,
            "requestor": self.__config.account,
            "contract_uri": buggy_contract,
            "evt_name": "LogAuditAssigned",
            'assigned_block_nbr': "IGNORE",
            "submission_block_nbr": "IGNORE",
            "fk_status": "DN",
            "fk_type": "AU",
            "price": str(self.__PRICE),
            "status_info": "Report successfully submitted",
            "tx_hash": "IGNORE",
            "submission_attempts": 1,
            "is_persisted": 1,
            "audit_uri": "IGNORE",
            "audit_hash": "IGNORE",
            "audit_state": self.__AUDIT_STATE_ERROR,
            "full_report": "IGNORE",
            "compressed_report": compressed_report
        }
        self.assert_event_table_contains(self.__config, [expected_row],
                                         ignore_keys=[
                                             key for key in expected_row
                                             if expected_row[key] == "IGNORE"
                                         ])
Exemple #21
0
    def test_old_pragma_with_caret(self):
        """
        Tests whether an exception is raised upon calling the analyzer with a contract locking an
        old version of Solidity. This SHOULD invoke dockerhub fail.
        """

        old_contract = fetch_file(resource_uri("DAOBugOld-Caret.sol"))

        analyzer = TestAnalyzerDockerhubFail.__new_analyzer()
        request_id = 15
        report = analyzer.check(old_contract, request_id,
                                "DAOBugOld-Caret.sol")

        self.assertTrue(report['status'], 'error')
        self.assertEquals(2, len(report['trace']))
        self.assertEquals(1, len(report['errors']))
        msg = "Error response from daemon: pull access denied for qspprotocol/" \
              "does-not-exist-0.4.25, repository does not exist or may require 'docker login'\n"
        self.assertEquals(msg, report['errors'][0])
    def test_inject_token_auth(self):
        auth_token = "abc123456"
        endpoint = "https://test.com/?token={0}".format(auth_token)
        target_env = "dev"

        # Sets the dictionary to be returned by a call to load_config
        config_file = fetch_file(resource_uri("test_config_with_auth_token.yaml"))
        config_yaml = load_yaml(config_file)
        dummy_utils = ConfigUtilsDummy({'load_config': config_yaml[target_env]})

        config = ConfigFactory.create_from_file(
            environment=target_env,
            config_file_uri="some dummy uri",
            auth_token=auth_token,
            validate_contract_settings=False,
            config_utils=dummy_utils,
        )
        self.assertEqual(config.auth_token, auth_token)
        self.assertEqual(config.eth_provider_args['endpoint_uri'], endpoint)
    def test_report_creation(self):
        """
        Tests whether a report is created upon calling the analyzer
        on a buggy contract
        """
        analyzer = TestAnalyzerSecurify.__new_analyzer()

        buggy_contract = fetch_file(resource_uri("DAOBug.sol"))
        request_id = 15
        report = analyzer.check(buggy_contract, request_id, "DAOBug.sol")

        # Asserts some result produced
        self.assertTrue(report)

        print(json.dumps(report, indent=2))

        # Asserts result is success
        self.assertTrue(report['status'], 'success')
        self.assertIsNotNone(report['potential_vulnerabilities'])
        self.assertEquals(6, len(report['potential_vulnerabilities']))
Exemple #24
0
    def test_report_creation(self):
        """
        Tests whether a report is created upon calling the analyzer on a buggy contract. This SHOULD
        invoke dockerhub fail.
        """
        analyzer = TestAnalyzerDockerhubFail.__new_analyzer()

        buggy_contract = fetch_file(resource_uri("DAOBug.sol"))
        request_id = 15
        report = analyzer.check(buggy_contract, request_id, "DAOBug.sol")

        # Asserts some result produced
        self.assertTrue(report)

        self.assertTrue(report['status'], 'error')

        self.assertEquals(2, len(report['trace']))
        self.assertEquals(1, len(report['errors']))
        msg = "Error response from daemon: pull access denied for qspprotocol/" \
              "does-not-exist-0.4.25, repository does not exist or may require 'docker login'\n"
        self.assertEquals(msg, report['errors'][0])
    def test_restricting_local_max_assigned(self):
        """
        Tests if the limitation on the local maximum assigned requests is in effect
        """

        mocked__get_next_audit_request_called = [False]

        def mocked__get_next_audit_request():
            # this should be unreachable when the limit is reached
            mocked__get_next_audit_request_called[0] = True

        self.assertEqual(int(self.__config.max_assigned_requests), 1)
        # Make sure there anyAvailableRequest returns ready state
        self.__set_any_request_available(1)

        self.__config.web3_client.eth.waitForTransactionReceipt(
            self.__set_assigned_request_count(1))

        buggy_contract = resource_uri("DappBinWallet.sol")
        self.__request_assign_and_emit(self.__REQUEST_ID, buggy_contract,
                                       self.__PRICE, 1)
        with mock.patch(
                'audit.threads.poll_requests_thread.PollRequestsThread.'
                '_PollRequestsThread__get_next_audit_request',
                side_effect=mocked__get_next_audit_request):
            # Make sure there is enough time for mining poll to call QSPAuditNode.__check_then_bid_audit
            # request
            sleep(self.__config.block_mined_polling + 1)
            self.__evt_wait_loop(self.__submitReport_filter)
            self.__config.web3_client.eth.waitForTransactionReceipt(
                self.__set_assigned_request_count(0))
            self.__send_done_message(self.__REQUEST_ID)

        # This is a critical line to be called as the node did all it audits and starts bidding
        # again
        self.__evt_wait_loop(self.__getNextAuditRequest_filter)
        self.__set_any_request_available(0)
        # an extra call to get_next_audit is no accepted
        self.assertFalse(mocked__get_next_audit_request_called[0])
 def __load_report(self, report_file_path):
     return load_json(fetch_file(resource_uri(report_file_path)))
 def test_is_report_deemed_correct_in_case_of_incorrect_encoding(self):
     self.__test_auditor_report_correctness(
         auditor_compressed_report=b'garbage',
         police_report=resource_uri("reports/DAOBug.json"),
         deemed_correct=False)
Exemple #28
0
    def test_successful_police_audit(self):
        uncompressed_report = load_json(
            fetch_file(resource_uri("reports/DAOBug.json")))
        request_id = uncompressed_report['request_id']

        encoder = ReportEncoder()
        compressed_report = encoder.compress_report(uncompressed_report,
                                                    request_id)

        # Creates a mocked method for retrieving the audit result from the blockchain.
        submit_report_instance = SubmitReportThread(self.__config)
        submit_report_instance._SubmitReportThread__get_report_in_blockchain = MagicMock(
        )
        submit_report_instance._SubmitReportThread__get_report_in_blockchain.return_value = \
            compressed_report
        replace_thread(self.__audit_node, SubmitReportThread,
                       submit_report_instance)

        # Adds a police event to the database to trigger the flow of a police
        # check. Since no other thread should be writing to the DB at this
        # point, the write can be performed without a lock.
        poll_requests_instance = PollRequestsThread(
            self.__config, self.__block_mined_polling_thread)
        poll_requests_instance._PollRequestsThread__add_evt_to_db(
            request_id=request_id,
            requestor=self.__audit_node.config.audit_contract_address,
            price=100,
            uri=resource_uri("reports/DAOBug.json"),
            assigned_block_nbr=100,
            is_audit=False)
        replace_thread(self.__audit_node, PollRequestsThread,
                       poll_requests_instance)

        # Disables the claim rewards threading from continuously running ahead;
        # negate the default mocking behaviour of always having rewards
        # available
        claim_rewards_instance = ClaimRewardsThread(self.__config)
        claim_rewards_instance._ClaimRewardsThread__has_available_rewards = MagicMock(
        )
        claim_rewards_instance._ClaimRewardsThread__has_available_rewards.return_value = False
        replace_thread(self.__audit_node, ClaimRewardsThread,
                       claim_rewards_instance)

        # Sets the node as a police officer.
        self.__audit_node.is_police_officer = MagicMock()
        self.__audit_node.is_police_officer.return_value = True

        # Sets the audit report value itself to be returned by the audit node.
        self.__audit_node.audit = MagicMock()
        self.__audit_node.audit.return_value = {
            'audit_state': uncompressed_report['audit_state'],
            'audit_uri': 'http://some-url.com',
            'audit_hash': 'some-hash',
            'full_report': json.dumps(uncompressed_report),
            'compressed_report': compressed_report
        }

        self.__run_audit_node()

        sql3lite_worker = self.__audit_node.config.event_pool_manager.sql3lite_worker
        result_found = False

        # Waits till the record moves from assigned status to submitted.
        sql = "select * from audit_evt where request_id = {0} and fk_status == 'SB' and fk_type='PC'"
        while not result_found:
            rows = sql3lite_worker.execute(sql.format(request_id))
            if len(rows) == 0:
                sleep(0.1)
                continue

            self.assertTrue(len(rows), 1)
            result_found = True
Exemple #29
0
 def setUpClass(cls):
     cfg = load_yaml(fetch_file(resource_uri("test_config.yaml")))
     TestEvtPoolManager.db_file = config_value(cfg, '/dev/evt_db_path')
     remove(TestEvtPoolManager.db_file)
Exemple #30
0
 def __compress_report(self, report_path_uri):
     full_report = load_json(fetch_file(resource_uri(report_path_uri)))
     full_report['version'] = self.__config.node_version
     encoder = ReportEncoder()
     return encoder.compress_report(full_report, full_report['request_id'])