def setUp(self): self.whale = WhaleAlert(working_directory=TEST_WORKING_DIR) self.writer = Writer(self.whale.get_status(), self.whale.get_database()) self.reader = Reader(self.whale.get_status(), self.whale.get_database(), self.whale.get_configuration())
class WrintingLastCallStats(unittest.TestCase): def setUp(self): self.whale = WhaleAlert(working_directory=TEST_WORKING_DIR) self.writer = Writer(self.whale.get_status(), self.whale.get_database()) def tearDown(self): cleanup_working_directories() def reload_status(self): target_file = os.path.join(TEST_WORKING_DIR, settings.data_file_directory, settings.status_file_name) self.writer.get_status_object().set_configuration_file(target_file) def get_status_value(self, section, key): return self.writer.get_status_object().get_value(section, key) def test_successful_call_writen_ok(self): status = successful_call_with_trans success = self.writer.write_status(status) self.reload_status() self.assertEqual( self.get_status_value( settings.status_file_last_good_call_section_name, settings.status_file_option_timeStamp), successful_call_with_trans['timestamp']) self.assertEqual( self.get_status_value( settings.status_file_last_good_call_section_name, settings.status_file_option_transaction_count), successful_call_with_trans['transaction_count']) self.assertEqual(success, True) def test_error_call_writen_ok(self): status = failed_call success = self.writer.write_status(status) self.reload_status() self.assertEqual( self.get_status_value(settings.status_file_last_failed_secion_name, settings.status_file_option_timeStamp), failed_call['timestamp']) self.assertEqual( self.get_status_value(settings.status_file_last_failed_secion_name, settings.status_file_option_error_code), failed_call['error_code']) self.assertEqual( self.get_status_value(settings.status_file_last_failed_secion_name, settings.status_file_option_error_message), failed_call['error_message']) self.assertEqual(success, True)
def __init__(self, working_directory=None, log_level=logging.WARNING): if working_directory is not None: self.__make_directories_as_needed(working_directory) self.__setup_logging(working_directory, log_level) self.__config = self.__generate_configuration(working_directory) self.__database = self.__setup_database(working_directory) self.__status = self.__setup_status_file(working_directory) self.__writer = Writer(self.__status, self.__database) self.__reader = Reader(self.__status, self.__database, self.__config) self.__last_data_request_time = int(time.time()) else: self.__config = None self.__status = None self.__database = None log.debug("Started new Whale Alert API wrapper.") self.transactions = Transactions()
class HandlingErrors(unittest.TestCase): def setUp(self): self.whale = WhaleAlert(working_directory=TEST_WORKING_DIR) self.writer = Writer(self.whale.get_status(), self.whale.get_database()) def tearDown(self): cleanup_working_directories() def test_status_key_error(self): success = self.writer.write_status(bad_status) self.assertIs(success, False) def test_status_handle_missing_status_file(self): cleanup_working_directories() success = self.writer.write_status(successful_call_with_trans) self.assertIs(success, False) def test_handle_bad_type(self): writer = Writer(None, None) success = writer.write_status(successful_call_with_trans) self.assertIs(success, False)
class GettingLoggerStatus(unittest.TestCase): def setUp(self): self.whale = WhaleAlert(working_directory=TEST_WORKING_DIR) self.writer = Writer(self.whale.get_status(), self.whale.get_database()) self.reader = Reader(self.whale.get_status(), self.whale.get_database(), self.whale.get_configuration()) def tearDown(self): cleanup_working_directories() def write_good_status(self, unix_timestamp): status = dict() status['timestamp'] = Reader.to_local_time(unix_timestamp) status['error_code'] = 200 status['error_message'] = 'A dud message' status['transaction_count'] = 0 self.writer.write_status(status) def write_bad_status(self, unix_timestamp): status = dict() status['timestamp'] = Reader.to_local_time(unix_timestamp) status['error_code'] = 1 status['error_message'] = 'A dud message' status['transaction_count'] = 0 self.writer.write_status(status) def test_getting_status_stdout(self): last_good_call_seconds_ago = 300 last_good_call_minutes = int(round(last_good_call_seconds_ago / 60, 0)) health = round( 100 * (settings.health_list_length - 1) / settings.health_list_length, 1) self.write_bad_status(int(time.time()) - 200) self.write_good_status(int(time.time()) - last_good_call_seconds_ago) output = self.reader.status_request() expected_std_status = "Last successful call {} minutes ago, health {}%".format( last_good_call_minutes, health) self.assertEqual(expected_std_status, output) def test_getting_status_dict(self): last_good_call_seconds_ago = 100 last_good_call_minutes = int(round(last_good_call_seconds_ago / 60, 0)) health = round( 100 * (settings.health_list_length - 1) / settings.health_list_length, 1) self.write_bad_status(int(time.time()) - 200) self.write_good_status(int(time.time()) - last_good_call_seconds_ago) output = self.reader.status_request(as_dict=True) expected_status = { 'last_call': last_good_call_minutes, 'health': health, 'status': 'Ok' } self.assertEqual(expected_status, output) def test_getting_status_dict_logger_not_ok(self): last_good_call_seconds_ago = 700 last_good_call_minutes = int(round(last_good_call_seconds_ago / 60, 0)) health = round( 100 * (settings.health_list_length - 1) / settings.health_list_length, 1) self.write_bad_status(int(time.time()) - 200) self.write_good_status(int(time.time()) - last_good_call_seconds_ago) output = self.reader.status_request(as_dict=True) expected_status = { 'last_call': last_good_call_minutes, 'health': health, 'status': 'Error' } self.assertEqual(expected_status, output) def test_request_status_with_no_working_directory_returns_none(self): reader = Reader() expected_status = None output = reader.status_request() self.assertEqual(expected_status, output) def test_request_status_with_no_working_directory_returns_none(self): output = self.reader.status_request() self.assertEqual(None, output)
class RequestingStatusByExchange(unittest.TestCase): def setUp(self): self.whale = WhaleAlert(working_directory=TEST_WORKING_DIR) self.writer = Writer(self.whale.get_status(), self.whale.get_database()) self.reader = Reader(self.whale.get_status(), self.whale.get_database()) def tearDown(self): cleanup_working_directories() def add_call_to_database(self, data): self.writer.write_transactions(data) def test_single_blockchain_single_symbol_request_one_result(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['bitcoin'] request[settings.request_symbols] = ['BTC'] request[settings.request_maximum_results] = 1 request[settings.request_from_time] = 0 data = test_good_data[0] expected_output = format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_single_blockchain_multiple_symbols_request_one_result(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['ethereum'] request[settings.request_symbols] = ['USDT', "ETH"] request[settings.request_maximum_results] = 1 request[settings.request_from_time] = 0 data = test_good_data[4] expected_output = format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_single_blockchain_multiple_symbols_request_multiple_results(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['ethereum'] request[settings.request_symbols] = ['USDT', "ETH"] request[settings.request_maximum_results] = 2 request[settings.request_from_time] = 0 data = test_good_data[1] expected_output = format_output(data) data = test_good_data[4] expected_output = expected_output + format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_multiple_blockchain_multiple_symbols_request_multiple_results( self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['ethereum', 'tron'] request[settings.request_symbols] = ['USDT', "ETH"] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 0 data = test_good_data[1] expected_output = format_output(data) data = test_good_data[2] expected_output = expected_output + format_output(data) data = test_good_data[4] expected_output = expected_output + format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_multiple_blockchain_multiple_symbols_request_one_result(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['ethereum', 'tron'] request[settings.request_symbols] = ['USDT', "ETH"] request[settings.request_maximum_results] = 1 request[settings.request_from_time] = 0 data = test_good_data[4] expected_output = format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_invalid_blockchain_is_ignored(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['ethereum', 'not_a_blockchain'] request[settings.request_symbols] = ['USDT', "ETH"] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 0 data = test_good_data[1] expected_output = format_output(data) data = test_good_data[4] expected_output = expected_output + format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_invalid_symbols_are_ignored(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['ethereum', 'tron'] request[settings.request_symbols] = ["ETH"] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 0 data = test_good_data[4] expected_output = format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_values_only_taken_after_timestamp(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['ethereum', 'tron'] request[settings.request_symbols] = ['USDT', "ETH"] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 1488874451 data = test_good_data[4] expected_output = format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_blockchain_wildcard_gets_all_blockchain(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['*'] request[settings.request_symbols] = ['USDT'] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 0 data = test_good_data[1] expected_output = format_output(data) data = test_good_data[2] expected_output = expected_output + format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_symbol_widcard_gets_all_symbols(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['ethereum'] request[settings.request_symbols] = ['*'] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 0 data = test_good_data[1] expected_output = format_output(data) data = test_good_data[4] expected_output = expected_output + format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_both_wildcards_return_all_results(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['*'] request[settings.request_symbols] = ['*'] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 0 data = test_good_data[0] expected_output = format_output(data) data = test_good_data[1] expected_output = expected_output + format_output(data) data = test_good_data[2] expected_output = expected_output + format_output(data) data = test_good_data[3] expected_output = expected_output + format_output(data) data = test_good_data[4] expected_output = expected_output + format_output(data) expected_output = expected_output[:-1] output = self.reader.data_request(request) self.assertEqual(output, expected_output) def test_both_wildcards_return_all_results_as_dict(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['*'] request[settings.request_symbols] = ['*'] request[settings.request_maximum_results] = 1 request[settings.request_from_time] = 0 data = test_good_data[4] expected_output = format_output(data, as_dict=True) output = self.reader.data_request(request, as_dict=True) self.assertEqual(output, [expected_output]) def test_single_result_pretty_format(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['*'] request[settings.request_symbols] = ['*'] request[settings.request_maximum_results] = 1 request[settings.request_from_time] = 0 output = self.reader.data_request(request, pretty=True) self.assertEqual([output], [expected_pretty_output]) def test_single_result_pretty_format_medium_sum(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['neo'] request[settings.request_symbols] = ['*'] request[settings.request_maximum_results] = 1 request[settings.request_from_time] = 0 output = self.reader.data_request(request, pretty=True) self.assertEqual([output], [expected_pretty_large_sum]) def test_single_pretty_format_huge_sum(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['tron'] request[settings.request_symbols] = ['*'] request[settings.request_maximum_results] = 1 request[settings.request_from_time] = 0 output = self.reader.data_request(request, pretty=True) self.assertEqual([output], [expected_pretty_huge_sum]) def test_calling_without_database_returns_empty_string(self): reader = Reader() request = dict(settings.request_format) request[settings.request_blockchain] = ['*'] request[settings.request_symbols] = ['*'] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 0 output = reader.data_request(request) self.assertEqual(output, '') def test_calling_with_empty_database_returns_empty_list(self): request = dict(settings.request_format) request[settings.request_blockchain] = ['*'] request[settings.request_symbols] = ['*'] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 0 output = self.reader.data_request(request, as_dict=True) self.assertEqual(output, []) def test_calling_with_empty_database_returns_empty_dataFrame(self): request = dict(settings.request_format) request[settings.request_blockchain] = ['*'] request[settings.request_symbols] = ['*'] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 0 output = self.reader.data_request(request, as_df=True) self.assertIs(output.empty, True) def test_bad_data_request_returns_empty_string(self): bad_request = { 'bad_key': 'sfd', 'another_bad_key': 'sfd', } output = self.reader.data_request(bad_request) self.assertEqual(output, '') def test_getting_all_data_as_df(self): self.add_call_to_database(test_good_data) request = dict(settings.request_format) request[settings.request_blockchain] = ['*'] request[settings.request_symbols] = ['*'] request[settings.request_maximum_results] = 5 request[settings.request_from_time] = 0 expected_output = pd.DataFrame(flattern_transactions(test_good_data)) expected_output = expected_output.reindex(sorted( expected_output.columns), axis=1) output = self.reader.data_request(request, as_df=True) pd.testing.assert_frame_equal(expected_output, output)
class WhaleAlert(): """ Python wrapper for the Whale Watch API""" def __init__(self, working_directory=None, log_level=logging.WARNING): if working_directory is not None: self.__make_directories_as_needed(working_directory) self.__setup_logging(working_directory, log_level) self.__config = self.__generate_configuration(working_directory) self.__database = self.__setup_database(working_directory) self.__status = self.__setup_status_file(working_directory) self.__writer = Writer(self.__status, self.__database) self.__reader = Reader(self.__status, self.__database, self.__config) self.__last_data_request_time = int(time.time()) else: self.__config = None self.__status = None self.__database = None log.debug("Started new Whale Alert API wrapper.") self.transactions = Transactions() def __setup_logging(self, working_directory, log_level): logging_file = os.path.join(working_directory, settings.data_file_directory, settings.log_file_name) try: logging.basicConfig( format='%(asctime)s %(levelname)s %(module)s: %(message)s', datefmt='%m/%d/%Y %I:%M:%S%p', level=log_level, filename=logging_file) except Exception as e_r: print("Failed to create logging file. Exception '{}'".format(e_r), file=sys.stderr) raise def __setup_status_file(self, working_directory): status = ConfigChecker() status.set_expectation( settings.status_file_last_good_call_section_name, settings.status_file_option_timeStamp, str, '') status.set_expectation( settings.status_file_last_good_call_section_name, settings.status_file_option_transaction_count, int, 0) status.set_expectation(settings.status_file_last_failed_secion_name, settings.status_file_option_timeStamp, str, '') status.set_expectation(settings.status_file_last_failed_secion_name, settings.status_file_option_error_code, int, 0) status.set_expectation(settings.status_file_last_failed_secion_name, settings.status_file_option_error_message, str, '') status.set_expectation( settings.status_file_current_session_section_name, settings.status_file_option_successful_calls, int, 0) status.set_expectation( settings.status_file_current_session_section_name, settings.status_file_option_failed_calls, int, 0) status.set_expectation( settings.status_file_current_session_section_name, settings.status_file_option_success_rate, float, 0.0) status.set_expectation( settings.status_file_current_session_section_name, settings.status_file_option_health, float, 100.0) status.set_expectation(settings.status_file_all_time_section_name, settings.status_file_option_successful_calls, int, 0) status.set_expectation(settings.status_file_all_time_section_name, settings.status_file_option_failed_calls, int, 0) status.set_expectation(settings.status_file_all_time_section_name, settings.status_file_option_success_rate, float, 0.0) target_file = os.path.join(working_directory, settings.data_file_directory, settings.status_file_name) status.set_configuration_file(target_file) status.write_configuration_file(target_file) return status def __setup_database(self, working_directory): file_location = os.path.join(working_directory, settings.data_file_directory, settings.database_file_name) database = SQHelper(file_location) if not database.exists(): log.critical("Failed to create required database file, exiting") raise return database def __generate_configuration(self, working_directory): self.__make_directories_as_needed(working_directory) config = ConfigChecker() config.set_expectation(settings.API_section_name, settings.API_option_private_key, str, settings.API_option_privatate_key_default) config.set_expectation(settings.API_section_name, settings.API_option_interval, int, settings.API_option_interval_default) config.set_expectation(settings.API_section_name, settings.API_option_minimum_value, int, settings.API_option_minimum_value_default) config.set_expectation(settings.API_section_name, settings.API_option_historical_limit, int, settings.API_option_historical_limit_default) target_directory = os.path.join(working_directory, settings.input_configuation_filename) config.set_configuration_file(target_directory) config.write_configuration_file(target_directory) return config def __make_directories_as_needed(self, working_directory): target_directory = os.path.join(working_directory, settings.data_file_directory) self.__make_dir(target_directory) def __make_dir(self, target_directory): if not os.path.exists(target_directory): try: log.debug( "Creating new directory '{}'".format(target_directory)) os.makedirs(target_directory) except OSError as e_r: log.error( "Cannot create directory '{}'. Exception '{}'".format( target_directory, e_r)) raise def get_configuration(self): """ Get the configuration used Note: This function always returns None if a working_directory is not supplied when the class object is created. Returns: config (ConfigChecker) if a valid configuration exists. None: No configuration exists """ return self.__config def get_status(self): """ Get the status file used Note: This function always returns None if a working_directory is not supplied when the class object is created. Returns: status (ConfigChecker) if a valid status file exists. None: No configuration exists """ return self.__status def get_database(self): """ Get the database file used Note: This function always returns None if a working_directory is not supplied when the class object is created. Returns: database (SQHelper) if a valid database is connected. None: No database is connected. """ return self.__database def get_transactions(self, start_time, end_time=None, api_key=None, cursor=None, min_value=500000, limit=100): """ Use the Whale Alert API to get the lastest transactions for a given time period Parameters: start_time (int): A unix time stamp representing the start time to get transactions (exclusive) end_time (int): A unix time stamp representing the end time to get transactions (inclusive) api_key (str): Key to use for transaction. Must be supplied if running with no configuration file. cursor (str): The pagnation key used from the previous transaction. (optional) min_value (int): The minimum value transaction to return (Free API has 500000 minimum) limit (int): The maximum number of transactions to return (Maximum 100) Returns: success (bool) : If true, then a successful call was made (Return code of 200). Return false otherwise. transactions (list or None): - success = True: A list containing a dictionary for each transaction. - success = False: None status (dict): A dictionary containing the timestamp, error_code and error_message for the transaction. """ if type(start_time) is not int: raise ValueError("Start time must be a unix time stamp integer") if end_time is not None and type(end_time) is not int: raise ValueError("End time must be a unix time stamp integer") if self.__config is None and api_key is None: raise ValueError( "An API key needs to be supplied to get latest transactions") if self.__config is not None and api_key is None: api_key = self.__config.get_value(settings.API_section_name, settings.API_option_private_key) if self.__config is not None and min_value == 500000: min_value = self.__config.get_value( settings.API_section_name, settings.API_option_minimum_value) success, transactions, status = self.transactions.get_transactions( start_time, end_time, api_key, cursor, min_value, limit) return success, transactions, status def write_custom_status(self, status): """ Write a custom status to the status file Parameters: status (dict) a python dictionary with four keys: - timestamp: An iso8601 timestamp - error_code: An integer error code. 200 represents success, other values are failures - error_message: A description of the error code - transaction_count: The number of transaction for this status (Leave at zero if not relevant) Returns: True: The status was written False: An error occured. Errors are sent to the logging module """ if self.__status is None: return False return self.__writer.write_status(status) def fetch_and_store_data(self, start_time, end_time=None, api_key=None, cursor=None, min_value=500000, limit=100): """ Use the Whale Alert API to get the lastest transactions for a given time period. Store the result in given database. The call and write status is written to the status file. Parameters: start_time (int): A unix time stamp representing the start time to get transactions (exclusive) end_time (int): A unix time stamp representing the end time to get transactions (inclusive) api_key (str): Key to use for transaction. Must be supplied if running with no configuration file. cursor (str): The pagnation key used from the previous transaction. (optional) min_value (int): The minimum value transaction to return (Free API has 500000 minimum) limit (int): The maximum number of transactions to return (Maximum 100) Returns: success (bool) : If true, then a successful call was made, with data stored. Returns false otherwise. """ if self.__database is None: log.error( "Trying to fetch data without an API key or working directory (cannot store data)." ) return False elif self.__database is not None and api_key is None: api_key = self.__config.get_value(settings.API_section_name, settings.API_option_private_key) log.debug("Using configuration supplied API key") else: log.debug("Using overridden API key") success, transactions, status = self.transactions.get_transactions( start_time, end_time, api_key, cursor, min_value, limit) if success is True and len(transactions) > 0: written_ok = self.__writer.write_transactions(transactions) if written_ok is False: status['error_code'] = 20 status[ 'error_message'] = "Failed to write transactions to database" success = False elif success is True and len(transactions) == 0: success = False self.__writer.write_status(status) return success def start_daemon(self, force=False, print_output=False): """ Start logging transactions to database based on configuration file values """ if daemon_already_running() and force is False: return if self.__database is None: log.error("Trying to start daemon with no working directory") return api_key = self.__config.get_value(settings.API_section_name, settings.API_option_private_key) historical_limit = self.__config.get_value( settings.API_section_name, settings.API_option_historical_limit) request_interval = self.__config.get_value( settings.API_section_name, settings.API_option_interval) if request_interval <= 0: log.error( "Request interval cannot be less than or equal to zero, daemon not starting" ) return if historical_limit <= 0: log.error( "Historical limit cannot be less than or equal to zero, daemon not starting" ) return self.__status.set_value( settings.status_file_current_session_section_name, settings.status_file_option_successful_calls, 0) self.__status.set_value( settings.status_file_current_session_section_name, settings.status_file_option_failed_calls, 0) self.__status.set_value( settings.status_file_current_session_section_name, settings.status_file_option_success_rate, 100.0) self.__status.set_value( settings.status_file_current_session_section_name, settings.status_file_option_health, 100.0) self.__status.write_configuration_file() while True: if self.transactions.get_last_cursor() is None: start_time = self.__find_latest_timestamp() else: start_time = 0 end_time = int(time.time()) if (end_time - start_time) > historical_limit: start_time = end_time - historical_limit success = self.fetch_and_store_data( start_time, end_time=end_time, api_key=api_key, cursor=self.transactions.get_last_cursor()) if success is True and print_output: print(self.get_new_transaction(pretty=True)) time.sleep(request_interval) def __find_latest_timestamp(self): tables = self.__database.get_table_names() latest_timestamp = 0 for table in tables: last = self.__database.get_last_time_entry(table) try: if last['timestamp'] > latest_timestamp: latest_timestamp = last['timestamp'] except KeyError: pass if latest_timestamp >= time.time(): latest_timestamp = time.time() return int(latest_timestamp) def kill(): """ Kill any running daemon process """ subprocess.Popen(['killall', PROCESS_NAME]) def to_excel(self, output_file='whaleAlert.xlsx'): """ Write the entire database to an excel file """ if self.__database is None: return False tables = self.__database.get_table_names() if len(tables) == 0: return False df_list = [] for table in tables: df_list.append(self.__database.table_to_df(table)) writer = pd.ExcelWriter(output_file, engine='openpyxl') _ = [ A.to_excel(writer, sheet_name="{0}".format(tables[i])) for i, A in enumerate(df_list) ] writer.save() return True def data_request(self, start=0, blockchain=None, symbols=None, max_results=20, pretty=False, as_df=False, as_dict=False): """ Retreive data from the trasaction database Parameters: start (int): Unix timestamp from where to start returning transactions blockchain (list): Return transactions from a specific blockchains, defaults to all. symbols (list): Return transactions for speciffic symbols (tags), defaults to all. max_results (int): The maximum number of results to return. pretty (bool): Use ASIC colour codes to format the output as_df (bool): Return the results as a Pandas DataFrame as_dict (bool): Return the results as a dictionary. """ request = dict(settings.request_format) if blockchain is not None: request[settings.request_blockchain] = blockchain else: request[settings.request_blockchain] = ['*'] if symbols is not None: request[settings.request_symbols] = symbols else: request[settings.request_symbols] = ['*'] request[settings.request_from_time] = start request[settings.request_maximum_results] = max_results return self.__reader.data_request(request, pretty, as_df, as_dict) def get_new_transaction(self, pretty=False, as_df=False, as_dict=False): """Get the transaction returned since the last call to this method WARNING: If start_daemon is called with print_output = True, then this method will not work as expected Parameters: pretty (Bool): Use ascii colour codes to format the output. as_df (Bool): Return all transactions as a dataframe as_dict (Bool): Retun as a {timestamp: '', 'text' ''} dictionary. Pretty output is also applied as_df takes precendence over as_dict. Returns: Formatted output depending on the passed parameters. """ if as_df is True: return self.__writer.get_last_written() return self.__reader.dataframe_to_transaction_output( self.__writer.get_last_written(), pretty=pretty, as_dict=as_dict) def dataframe_to_transaction_output(self, df: pd.DataFrame, pretty: bool, as_dict: bool): """ Directly turn a transaction dataframe into transaction strings (or dictionaries) Parameters: df: A pandas dataframe aquired from data_request or get_new_transaction pretty (bool): Use ascii colour codes to format the output. as_dict (Bool): Retun as a {timestamp: '', 'text' ''} dictionary. Pretty output is also applied Returns: Formatted output depending on the passed parameters. """ return self.__reader.dataframe_to_transaction_output(df, pretty=pretty, as_dict=as_dict) def status_request(self, as_dict=False): """ Get current status of the running logger""" return self.__reader.status_request(as_dict=as_dict)
class WritingData(unittest.TestCase): def setUp(self): self.whale = WhaleAlert(working_directory=TEST_WORKING_DIR) self.writer = Writer(self.whale.get_status(), self.whale.get_database()) def tearDown(self): cleanup_working_directories() def test_writing_new_data_adds_tables_to_database(self): transactions = test_good_data success = self.writer.write_transactions(transactions) database = self.writer.get_database() tables = database.get_table_names() expected_tables = ['bitcoin', 'ethereum', 'tron', 'neo'] self.assertEqual(success, True) self.assertEqual(tables, expected_tables) def test_writing_new_data_adds_correct_columns_to_table(self): transactions = test_good_data success = self.writer.write_transactions(transactions) database = self.writer.get_database() columns = database.get_column_names('bitcoin') expected_columns = sorted(list(settings.database_columns.keys())) self.assertEqual(success, True) self.assertEqual(expected_columns, columns) def test_writing_new_data_last_time_entry_is_correct(self): transactions = test_good_data success = self.writer.write_transactions(transactions) database = self.writer.get_database() last_entry = database.get_last_time_entry('bitcoin') expected_entry = { 'blockchain': 'bitcoin', 'symbol': 'btc', 'id': '662472177', 'transaction_type': 'transfer', 'hash': '8d5ae34805f70d0a412964dca4dbd3f48bc103700686035a61b293cb91fe750d', 'from_address': 'f2103b01cd7957f3a9d9726bbb74c0ccd3f355d3', 'from_owner': 'binance', 'from_owner_type': 'exchange', 'to_address': '3f5ce5fbfe3e9af3971dd833d26ba9b5c936f0be', 'to_owner': 'binance', 'to_owner_type': 'exchange', 'timestamp': 1588874414, 'amount': 3486673, 'amount_usd': 3508660.2, 'transaction_count': 1 } self.assertEqual(success, True) self.assertEqual(expected_entry, last_entry) def test_writing_new_data_extra_key_returns_false(self): transaction = test_extra_key success = self.writer.write_transactions(transaction) self.assertEqual(success, False) def test_writing_empty_transactions_returns_false(self): transactions = [] success = self.writer.write_transactions(transactions) self.assertEqual(success, False) def test_writing_with_a_bad_key_returns_false(self): transactions = test_bad_key success = self.writer.write_transactions(transactions) self.assertEqual(success, False) def test_writing_with_a_bad_dictionary_returns_false(self): transactions = dict(test_bad_key[0]) success = self.writer.write_transactions(transactions) self.assertEqual(success, False) def test_writing_successive_entries(self): transactions = test_good_data success = self.writer.write_transactions(transactions) success = self.writer.write_transactions(transactions) success = self.writer.write_transactions(transactions) database = self.writer.get_database() df = database.table_to_df('bitcoin') self.assertEqual(len(df), 3) # There are two ethereum transaction in the good data df = database.table_to_df('ethereum') self.assertEqual(len(df), 6) self.assertEqual(success, True) def test_writing_wrong_data_type(self): transactions = 'A string' success = self.writer.write_transactions(transactions) self.assertEqual(success, False)
def test_handle_bad_type(self): writer = Writer(None, None) success = writer.write_status(successful_call_with_trans) self.assertIs(success, False)
def test_writing_status_with_no_working_directory_handled_ok(self): writer = Writer(self.whale.get_status(), self.whale.get_database()) writer.write_status(successful_call_no_trans)
class WritingCurrentAndAllTimeStatus(unittest.TestCase): def setUp(self): self.whale = WhaleAlert(working_directory=TEST_WORKING_DIR) self.writer = Writer(self.whale.get_status(), self.whale.get_database()) def tearDown(self): cleanup_working_directories() def test_writing_status_with_no_working_directory_handled_ok(self): writer = Writer(self.whale.get_status(), self.whale.get_database()) writer.write_status(successful_call_no_trans) def test_writing_a_successful_call_updates_success_no_trans(self): previous_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_successful_calls) previous_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_successful_calls) success = self.writer.write_status(successful_call_no_trans) self.reload_status() new_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_successful_calls) new_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_successful_calls) self.assertEqual(success, True) self.assertEqual(new_value_all_time, previous_value_all_time + 1) self.assertEqual(new_value_current, previous_value_current + 1) def test_writing_a_successful_call_updates_success_with_trans(self): previous_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_successful_calls) previous_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_successful_calls) success = self.writer.write_status(successful_call_with_trans) self.reload_status() new_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_successful_calls) new_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_successful_calls) self.assertEqual(success, True) self.assertEqual(new_value_all_time, previous_value_all_time + 1) self.assertEqual(new_value_current, previous_value_current + 1) def test_writing_a_successful_call_doesnt_change_failed_calls(self): previous_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_failed_calls) previous_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_failed_calls) success = self.writer.write_status(successful_call_no_trans) self.reload_status() new_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_failed_calls) new_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_failed_calls) self.assertEqual(success, True) self.assertEqual(new_value_all_time, previous_value_all_time) self.assertEqual(new_value_current, previous_value_current) def test_writing_a_successful_call_doesnt_change_failed_calls_with_trans( self): previous_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_failed_calls) previous_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_failed_calls) success = self.writer.write_status(successful_call_with_trans) self.reload_status() new_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_failed_calls) new_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_failed_calls) self.assertEqual(success, True) self.assertEqual(new_value_all_time, previous_value_all_time) self.assertEqual(new_value_current, previous_value_current) def test_writing_a_failed_call_updates_failed(self): previous_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_failed_calls) previous_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_failed_calls) success = self.writer.write_status(failed_call) self.reload_status() new_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_failed_calls) new_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_failed_calls) self.assertEqual(success, True) self.assertEqual(new_value_all_time, previous_value_all_time + 1) self.assertEqual(new_value_current, previous_value_current + 1) def test_writing_a_failed_call_doesnt_change_successful_calls(self): previous_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_successful_calls) previous_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_successful_calls) success = self.writer.write_status(failed_call) self.reload_status() new_value_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_successful_calls) new_value_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_successful_calls) self.assertEqual(success, True) self.assertEqual(new_value_all_time, previous_value_all_time) self.assertEqual(new_value_current, previous_value_current) def test_writing_failed_and_successfull_calls_calculates_percent(self): self.writer.get_status_object().set_value( settings.status_file_all_time_section_name, settings.status_file_option_success_rate, 0) self.writer.get_status_object().set_value( settings.status_file_current_session_section_name, settings.status_file_option_success_rate, 0) self.writer.write_status(successful_call_no_trans) self.writer.write_status(successful_call_no_trans) self.writer.write_status(successful_call_no_trans) self.writer.write_status(failed_call) success = self.writer.write_status(failed_call) self.reload_status() percent_all_time = self.get_status_value( settings.status_file_all_time_section_name, settings.status_file_option_success_rate) percent_current = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_success_rate) self.assertEqual(percent_all_time, 60.00) self.assertEqual(percent_current, 60.00) self.assertEqual(success, True) def test_writing_health(self): health_length = settings.health_list_length self.assertGreater(health_length, 10) self.writer.write_status(failed_call) success = self.writer.write_status(failed_call) expected_health = round(100 * (health_length - 2) / health_length, 1) self.reload_status() current_health = self.get_status_value( settings.status_file_current_session_section_name, settings.status_file_option_health) self.assertEqual(current_health, expected_health) self.assertEqual(success, True) def reload_status(self): target_file = os.path.join(TEST_WORKING_DIR, settings.data_file_directory, settings.status_file_name) self.writer.get_status_object().set_configuration_file(target_file) def get_status_value(self, section, key): return self.writer.get_status_object().get_value(section, key)