def __init__(self): super(CusRepo, self).__init__() self.log = MyLogger(self.__class__.__name__) self.git_clean() self.checkout_branch() self.log.info("create repo done")
def __init__(self): super(RepoBase, self).__init__() super(myRepo, self).__init__() self.log = MyLogger(self.__class__.__name__) self.update() self.git_init()
def callback(ch, method, properties, body): """ This method is called every time there is a new element in queue (var : queue) :param ch: :param method: :param properties: :param body: :return: """ try: # Logs obj_logger = MyLogger(logs_directory, category) obj_logger.msg_logger('#' * 100) obj_logger.msg_logger('Getting Data : %s' % (datetime.datetime.now())) # Data from Queue data = json.loads(body) notification_url = data['notification_url'] data.pop('notification_url') notification_params = data obj_logger.msg_logger('>>>>>>>>>> Sending Notification : %s || %s' % (notification_url, notification_params)) # Send Notification requests.post(notification_url, data=json.dumps(notification_params), headers=headers) obj_logger.msg_logger('>>>>>>>>>> Notification Success : %s || %s' % (notification_url, notification_params)) # Insert in DB insert_sql(logger=obj_logger, table_name='notification_logs', data={ 'tx_hash': notification_params['tx_hash'], 'notification_url ': notification_url, 'params': str(notification_params), 'timestamp': datetime.datetime.now(), 'Status': 'Success' }) except Exception as e: # If there is an Exception , Send the Notification to Exception Queue - which will be handled manually obj_logger.error_logger( '>>>>>>>>>> Notification Failure : %s || %s || %s' % (e, notification_url, notification_params)) obj_logger.msg_logger('>>>>>>>>>> Pushing to Exception Queue : %s' % (exception_queue)) send_notification(obj_logger, notification_url, notification_params, queue=exception_queue) finally: obj_logger.msg_logger("#" * 100) # We are ACK in both the case of success or failure because if there is no error then its ok # But if there is an error then we are sending it to Exception Queue . So in both the case we can delete this from main queue ch.basic_ack(delivery_tag=method.delivery_tag)
def main(): """ Scheduling :return: """ try: sched = BlockingScheduler(timezone='Asia/Kolkata') sched.add_job(mempool_crawler, 'interval', id='erc_mempool_crawler', seconds=3) sched.start() except Exception as e: obj_logger = MyLogger(logs_directory,category) obj_logger.error_logger('Main : %s'%(e))
def __init__(self): super(DownloadToolController, self).__init__() self.root_dir = os.getcwd() self.download_tool = None self.log = MyLogger(self.__class__.__name__) self.download_tool_dict = {} self.release_download_tool_name = None self.download_tool_release_zip_dir = "" self.zip_tool = zipTool() self.update()
def __init__(self, version_log="", root_path=""): self.root_dir = os.getcwd() self.release_dist_dir = '' self.build_root_dir = '' self.git_root_dir = root_path self._storage_list = [] self.CUR_DATE = 0 self.version_log = version_log self.menifest_xml = '' self.cp_version = None self.dsp_version = None self.log = MyLogger(self.__class__.__name__) self.verion_name = None
def __init__(self): super(GitPushDspBase, self).__init__() self.log = MyLogger(self.__class__.__name__) self.decompress_tool = zipTool() self.release_dsp_bin = '' self.dsp_version = '' self.git_push_dsp_rf_list = [] self.rf_type = "PM813" self.update() self.create_git()
def callback(ch, method, properties, body): """ This method is called every time there is a new element in queue (var : queue) :param ch: :param method: :param properties: :param body: :return: """ try: # Logger obj_logger = MyLogger(logs_directory, category) obj_logger.msg_logger('#' * 100) obj_logger.msg_logger('In Exception Queue : %s' % (queue)) obj_logger.msg_logger('Getting Data : %s' % (datetime.datetime.now())) # Data from Queue data = json.loads(body) notification_url = data['notification_url'] data.pop('notification_url') notification_params = data obj_logger.msg_logger('>>>>>>>>>> Sending Notification : %s || %s' % (notification_url, notification_params)) # Send Notification requests.post(notification_url, data=json.dumps(notification_params), headers=headers) obj_logger.msg_logger('>>>>>>>>>> Notification Success : %s || %s' % (notification_url, notification_params)) # Insert in DB insert_sql(logger=obj_logger, table_name='notification_logs', data={ 'tx_hash': notification_params['tx_hash'], 'notification_url ': notification_url, 'params': str(notification_params), 'timestamp': datetime.datetime.now(), 'Status': 'Success' }) ch.basic_ack(delivery_tag=method.delivery_tag) except Exception as e: obj_logger.error_logger( '>>>>>>>>>> Notification Failure : %s || %s || %s' % (e, notification_url, notification_params)) finally: obj_logger.msg_logger("#" * 100)
def __init__(self): self.root_dir = os.getcwd() self.version_log = "" self.log = MyLogger(self.__class__.__name__) self.verion_name = '' self.cur_version = '' self.chip_ID = None self.OS_type = None self.ps_mode = None self.decompress_tool = zipTool() self.dsp_version_pattern = re.compile( r"(CRANE_.*?,.*?[0-9][0-9]:[0-9][0-9]:[0-9][0-9]|CRANEG_.*?,.*?[0-9][0-9]:[0-9][0-9]:[0-9][0-9])" ) self.version_pattern = None self.cp_version = None self.dsp_version = None
def __init__(self): super(gitPushSDKBase, self).__init__() self.log = MyLogger(self.__class__.__name__) self.zip_tool = zipTool() self.cp_sdk_version = None self.cp_sdk = None self.cp_sdk_root_dir = None self.dsp_rf_root_dir = '' self.git_push_dsp_dir = '' self.git_push_dsp_rf_list = [] self.dsp_bin = None self.rf_bin = None self.update() self.create_git()
def __init__(self): super(CraneAliOSRepo, self).__init__() self.log = MyLogger(self.__class__.__name__) self.OS_type = "ALIOS" self.log.info("create repo done")
def mempool_crawler(): """ Mempool Process :return: """ obj_logger = MyLogger(logs_directory, category) obj_logger.msg_logger('#' * 100) obj_logger.msg_logger('Getting Mempool Data') # Get Mempool Data mempool_transaction_data = rpc_request(obj_logger, 'eth_getBlockByNumber', ['pending', True]).get( 'result', {}).get('transactions', []) obj_logger.msg_logger('Crawling Mempool Starts') for tx in mempool_transaction_data: tx_hash = tx['hash'] to_address = tx['to'] # Redis Check if (not redis_conn.sismember('eth_eth_zct_set', tx_hash)) and ( redis_conn.sismember('eth_eth_aw_set', to_address)): obj_logger.msg_logger( '>>>>>>>> Transaction Found in Mempool : %s' % (tx_hash)) from_address = tx['from'] value = int(tx['value'], 16) bid_id = -2 confirmations = 0 block_number = -1 flag = 'eth_incoming' sys_timestamp = datetime.datetime.now() # Insert in DB result = insert_sql(logger=obj_logger, table_name='eth_transactions', data={ 'from_address': from_address, 'to_address': to_address, 'tx_hash': tx_hash, 'bid_id': bid_id, 'confirmations': confirmations, 'block_number': block_number, 'value': value, 'flag': flag, 'sys_timestamp': sys_timestamp, }) if result: notif_url = find_sql_join( logger=obj_logger, table_names=['user_master', 'address_master'], filters={'address_master.address': to_address}, on_conditions={ 'user_master.user_name': 'address_master.user_name' }, columns=['user_master.notification_url' ])[0]['notification_url'] notif_params = { 'from_address': from_address, 'to_address': to_address, 'tx_hash': tx_hash, 'bid_id': bid_id, 'confirmations': confirmations, 'block_number': block_number, 'value': value, 'flag': flag } send_notification(obj_logger, notif_url, notif_params, queue=hook_queue) obj_logger.msg_logger( '>>>>>>>> Adding to eth_eth_zct_set : %s' % (tx_hash)) redis_conn.sadd( 'eth_eth_zct_set', tx_hash.encode('utf-8') ) # To cross check in Block Crawler and not to send multiple notification obj_logger.msg_logger('Crawling Mempool Ends') obj_logger.msg_logger('#' * 100)
def __init__(self): super(gitPushR2RCSDK, self).__init__() self.log = MyLogger(self.__class__.__name__) self.branch_name = "master"
def __init__(self): super(gitPushCraneDCXODsp, self).__init__() self.log = MyLogger(self.__class__.__name__)
def __init__(self): super(gitPushCraneMDsp, self).__init__() self.log = MyLogger(self.__class__.__name__) self.rf_type = "PM803"
def __init__(self): super(CusMasterSDK009Repo, self).__init__() self.log = MyLogger(self.__class__.__name__)
def __init__(self): super(CraneExternalSRepo, self).__init__() self.log = MyLogger(self.__class__.__name__) self.log.info("create repo done")
def __init__(self): self.root_dir = os.getcwd() self.log = MyLogger(self.__class__.__name__) self.git_push_root_dir = None self.branch_name = "master"
def __init__(self): super(cusR1Repo, self).__init__() self.log = MyLogger(self.__class__.__name__)
def __init__(self): super(craneCR2RCRepo, self).__init__() self.log = MyLogger(self.__class__.__name__) self.chip_ID = "craneC" self.log.info("create repo done")
def __init__(self): super(cusCraneGRepo, self).__init__() self.log = MyLogger(self.__class__.__name__) self.chip_ID = "craneG" self.ps_mode = "LWG"
def block_crawler(): """ Block Crawling process :return: """ obj_logger = MyLogger(logs_directory, category) obj_logger.msg_logger('Getting Block Numbers.....') # Get Current Block from RPC current_block = int( rpc_request(obj_logger, 'eth_blockNumber', []).get('result', 0), 16) crawled_blocks = int(redis_conn.get('eth_eth_blocks_crawled') or 0) obj_logger.msg_logger('Crawled Block Number : %s' % (crawled_blocks)) obj_logger.msg_logger('Current Block Number : %s' % (current_block)) obj_logger.msg_logger('Pending : %s' % (current_block - crawled_blocks)) if current_block > crawled_blocks: for block_number in range(crawled_blocks + 1, current_block + 1): obj_logger.msg_logger('#' * 100) obj_logger.msg_logger('Crawling Block : %s || Current Block : %s' % (block_number, current_block)) obj_logger.msg_logger('Pending : %s' % (current_block - block_number)) obj_logger.msg_logger('Start :%s' % (datetime.datetime.now())) # Increment Confirmations for tx_id whose 1 confirmation is already sent for tx_hash in redis_conn.smembers('eth_eth_pct_set'): tx_hash = tx_hash.decode('utf-8') data = find_sql_join( logger=obj_logger, table_names=[ 'user_master', 'address_master', 'eth_transactions' ], filters={'eth_transactions.tx_hash': tx_hash}, on_conditions={ 'user_master.user_name': 'address_master.user_name', 'address_master.address': 'eth_transactions.to_address' }, ) if not data: obj_logger.error_logger( '>>>>>>>>>>> Data not found in SQL for tx_hash : %s' % (tx_hash)) continue confirmations = data[0]['confirmations'] notif_url = data[0]['notification_url'] if confirmations < confirmation_threshold: increment_sql(obj_logger, 'eth_transactions', {'tx_hash': tx_hash}, 'confirmations') notif_params = { 'from_address': data[0]['from_address'], 'to_address': data[0]['to_address'], 'tx_hash': tx_hash, 'bid_id': -1, 'confirmations': confirmations + 1, 'block_number': data[0]['block_number'], 'value': data[0]['value'], 'flag': 'eth_incoming' } obj_logger.msg_logger( '>>>>>>>> Sending Confirmation : %s || %s' % (confirmations + 1, tx_hash)) send_notification(obj_logger, notif_url, notif_params, queue=hook_queue) else: obj_logger.msg_logger( '>>>>>>>> %s Confirmation Sent : %s' % (confirmation_threshold, tx_hash)) obj_logger.msg_logger( '>>>>>>>> Removing from eth_eth_pct_set : %s' % (tx_hash)) redis_conn.srem('eth_eth_pct_set', tx_hash) # Crawling Blocks block_info = rpc_request(obj_logger, 'eth_getBlockByNumber', [hex(int(block_number)), True]) if block_info: block_transactions = block_info.get('result', {}).get( 'transactions', []) else: block_transactions = [] obj_logger.error_logger( 'Data not found for block number : %s' % str(block_number)) for tx in block_transactions: to_address = tx['to'] if (redis_conn.sismember('eth_eth_aw_set', to_address)): tx_hash = tx['hash'] obj_logger.msg_logger( '>>>>>>>> Transaction Found in Block : %s : %s' % (block_number, tx_hash)) confirmations = 1 block_number = int(tx['blockNumber'], 16) # Check if 1 Confirmation is sent from mempool crawler - Should be found in eth_eth_pct_set if not redis_conn.sismember('eth_eth_pct_set', tx_hash): from_address = tx['from'] value = int(tx['value'], 16) bid_id = -1 flag = 'eth_incoming' sys_timestamp = datetime.datetime.now() # Check if 0 Confirmation is sent from mempool crawler - Should be found in eth_eth_zct_set if redis_conn.sismember('eth_eth_zct_set', tx_hash): update_sql(obj_logger, 'eth_transactions', {'tx_hash': tx_hash}, updated_values={ 'confirmations': confirmations, 'block_number': block_number }) else: # Missed in Mempool - Send 1 Confirmation and add in eth_eth_pct_set obj_logger.msg_logger( '>>>>>>>> Transaction Missed from mempool. Sending %s confirmation : %s' % (confirmations, str(tx_hash))) data = { 'from_address': from_address, 'to_address': to_address, 'tx_hash': tx_hash, 'bid_id': bid_id, 'confirmations': confirmations, 'block_number': block_number, 'value': value, 'flag': flag, 'sys_timestamp': sys_timestamp, } insert_sql(obj_logger, 'eth_transactions', data) notif_url = find_sql_join( logger=obj_logger, table_names=['user_master', 'address_master'], filters={'address_master.address': to_address}, on_conditions={ 'user_master.user_name': 'address_master.user_name' }, columns=['user_master.notification_url' ])[0]['notification_url'] notif_params = { 'from_address': from_address, 'to_address': to_address, 'tx_hash': tx_hash, 'bid_id': -1, 'confirmations': confirmations, 'block_number': block_number, 'value': value, 'flag': flag } obj_logger.msg_logger( '>>>>>>>> Sending Confirmation : %s || %s' % (confirmations, tx_hash)) send_notification(obj_logger, notif_url, notif_params, queue=hook_queue) obj_logger.msg_logger( '>>>>>>>> Adding to eth_eth_pct_set : %s' % (tx_hash)) redis_conn.sadd('eth_eth_pct_set', tx_hash.encode('utf-8')) # Increment Redis Blocks Crawled redis_conn.set('eth_eth_blocks_crawled', block_number) obj_logger.msg_logger('Ends :%s' % (datetime.datetime.now())) obj_logger.msg_logger('#' * 100) else: obj_logger.msg_logger('#' * 100)
def __init__(self): super(gitPushR2RCSDK008, self).__init__() self.log = MyLogger(self.__class__.__name__) self.branch_name = "r2_rc_sdk_1.008"
def __init__(self): super(craneGRepo, self).__init__() self.log = MyLogger(self.__class__.__name__) self.chip_ID = "craneG" self.ps_mode = "LWG" self.log.info("create repo done")
def mempool_crawler(): """ Mempool Process :return: """ obj_logger = MyLogger(logs_directory,category) obj_logger.msg_logger('#'*100) obj_logger.msg_logger('Getting Mempool Data') # Get Mempool Data mempool_transaction_data = rpc_request(obj_logger, 'eth_getBlockByNumber', ['pending', True]).get('result',{}).get('transactions',[]) obj_logger.msg_logger('Crawling Mempool Starts') for tx in mempool_transaction_data: tx_hash = tx['hash'] contract_address = tx['to'] # To address in ERC 20 Transaction is Contract Address # TODO - if tx hashes are not matching in redis, then we need to encode/decode utf-8 # Redis Check if (not redis_conn.sismember('eth_erc_zct_set',tx_hash)) and (redis_conn.sismember('eth_erc_aw_set',contract_address)): obj_logger.msg_logger('>>>>>>>> Transaction Found in Mempool : %s'%(tx_hash)) from_address = tx['from'] bid_id = -1 confirmations = 0 block_number = -1 flag = 'erc20' sys_timestamp = datetime.datetime.now() # Decoding Inputs input = tx['input'] with open(abi_file, 'r') as abi_definition: abi = json.load(abi_definition) contract_obj = web3.Web3().eth.contract(address=web3.Web3().toChecksumAddress(contract_address), abi=abi) params = contract_obj.decode_function_input(input) to_address = params[1].get('_to') value = params[1].get('_value') # Insert in DB result = insert_sql( logger=obj_logger, table_name= 'erc_transactions', data={ 'from_address': from_address, 'to_address': to_address, 'contract_address': contract_address, 'tx_hash': tx_hash, 'bid_id': bid_id, 'confirmations': confirmations, 'block_number': block_number, 'value': value, 'flag': flag, 'sys_timestamp': sys_timestamp, } ) if result: notif_url = find_sql_join(logger=obj_logger, table_names=['user_master', 'erc_address_master'], filters={'erc_address_master.address': to_address}, on_conditions={'user_master.user_name': 'erc_address_master.user_name'}, columns=['user_master.notification_url'] )[0]['notification_url'] notif_params = { 'from_address': from_address, 'to_address': to_address, 'contract_address': contract_address, 'tx_hash': tx_hash, 'bid_id': bid_id, 'confirmations': confirmations, 'block_number': block_number, 'value': value, 'flag': flag } send_notification(obj_logger,notif_url,notif_params,queue=hook_main) obj_logger.msg_logger('>>>>>>>> Adding to eth_erc_zct_set : %s' % (tx_hash)) redis_conn.sadd('eth_erc_zct_set', tx_hash.encode('utf-8')) # To cross check in Block Crawler and not to send multiple notification obj_logger.msg_logger('Crawling Mempool Ends') obj_logger.msg_logger('#' * 100)
def __init__(self): super(gitPushCraneFTR2RCSDK, self).__init__() self.log = MyLogger(self.__class__.__name__) self.branch_name = "crane_ft"