コード例 #1
0
    def run(self):
        while not self._check_if_available():
            print("Item not available")
            time.sleep(self.product_polling_seconds)
            self.driver.refresh()
        send_notification('Item available for purchase!')

        self._find_by_id_and_click('add-to-cart-button')
        time.sleep(0.1)
        self._find_by_id_and_click('hlb-ptc-btn-native')

        self._login_if_required()

        time.sleep(0.25)
        self._wait_for_spinner_to_leave()

        delivery_options = self.driver.find_elements_by_class_name(
            'a-radio-label')
        fastest_delivery_option = delivery_options[-1]
        fastest_delivery_option.click()

        self._wait_for_spinner_to_leave()

        place_order = WebDriverWait(self.driver, 20).until(
            expected_conditions.element_to_be_clickable(
                (By.CLASS_NAME, "place-your-order-button")))
        place_order.click()
        time.sleep(2)

        try:
            self.driver.find_elements_by_class_name('a-color-success')
            send_notification("Purchase successful!")
        except NoSuchElementException:
            pass
コード例 #2
0
def callback(ch, method, properties, body):
    """
    This method is called every time there is a new element in queue (var : queue)
    :param ch:
    :param method:
    :param properties:
    :param body:
    :return:
    """
    try:

        # Logs
        obj_logger = MyLogger(logs_directory, category)
        obj_logger.msg_logger('#' * 100)
        obj_logger.msg_logger('Getting Data : %s' % (datetime.datetime.now()))

        # Data from Queue
        data = json.loads(body)
        notification_url = data['notification_url']
        data.pop('notification_url')
        notification_params = data

        obj_logger.msg_logger('>>>>>>>>>> Sending Notification : %s || %s' %
                              (notification_url, notification_params))
        # Send Notification
        requests.post(notification_url,
                      data=json.dumps(notification_params),
                      headers=headers)
        obj_logger.msg_logger('>>>>>>>>>> Notification Success : %s || %s' %
                              (notification_url, notification_params))

        # Insert in DB
        insert_sql(logger=obj_logger,
                   table_name='notification_logs',
                   data={
                       'tx_hash': notification_params['tx_hash'],
                       'notification_url ': notification_url,
                       'params': str(notification_params),
                       'timestamp': datetime.datetime.now(),
                       'Status': 'Success'
                   })
    except Exception as e:
        # If there is an Exception , Send the Notification to Exception Queue - which will be handled manually
        obj_logger.error_logger(
            '>>>>>>>>>> Notification Failure : %s || %s || %s' %
            (e, notification_url, notification_params))
        obj_logger.msg_logger('>>>>>>>>>> Pushing to Exception Queue : %s' %
                              (exception_queue))
        send_notification(obj_logger,
                          notification_url,
                          notification_params,
                          queue=exception_queue)

    finally:
        obj_logger.msg_logger("#" * 100)
        # We are ACK in both the case of success or failure because if there is no error then its ok
        # But if there is an error then we are sending it to Exception Queue . So in both the case we can delete this from main queue
        ch.basic_ack(delivery_tag=method.delivery_tag)
コード例 #3
0
def mempool_crawler():
    """
    Mempool Process
    :return:
    """

    obj_logger = MyLogger(logs_directory,category)
    obj_logger.msg_logger('#'*100)
    obj_logger.msg_logger('Getting Mempool Data')

    # Get Mempool Data
    mempool_transaction_data = rpc_request(obj_logger, 'eth_getBlockByNumber', ['pending', True]).get('result',{}).get('transactions',[])

    obj_logger.msg_logger('Crawling Mempool Starts')

    for tx in mempool_transaction_data:

        tx_hash = tx['hash']
        contract_address = tx['to'] # To address in ERC 20 Transaction is Contract Address

        # TODO - if tx hashes are not matching in redis, then we need to encode/decode utf-8
        # Redis Check
        if (not redis_conn.sismember('eth_erc_zct_set',tx_hash)) and (redis_conn.sismember('eth_erc_aw_set',contract_address)):
            obj_logger.msg_logger('>>>>>>>> Transaction Found in Mempool : %s'%(tx_hash))

            from_address = tx['from']
            bid_id = -1
            confirmations = 0
            block_number = -1
            flag = 'erc20'
            sys_timestamp = datetime.datetime.now()

            # Decoding Inputs
            input = tx['input']
            with open(abi_file, 'r') as abi_definition:
                abi = json.load(abi_definition)
            contract_obj = web3.Web3().eth.contract(address=web3.Web3().toChecksumAddress(contract_address), abi=abi)
            params = contract_obj.decode_function_input(input)
            to_address = params[1].get('_to')
            value = params[1].get('_value')

            # Insert in DB
            result = insert_sql(
                logger=obj_logger,
                table_name= 'erc_transactions',
                data={
                'from_address': from_address,
                'to_address': to_address,
                'contract_address': contract_address,
                'tx_hash': tx_hash,
                'bid_id': bid_id,
                'confirmations': confirmations,
                'block_number': block_number,
                'value': value,
                'flag': flag,
                'sys_timestamp': sys_timestamp,
                }
            )

            if result:
                notif_url = find_sql_join(logger=obj_logger,
                    table_names=['user_master', 'erc_address_master'],
                    filters={'erc_address_master.address': to_address},
                    on_conditions={'user_master.user_name': 'erc_address_master.user_name'},
                    columns=['user_master.notification_url']
                )[0]['notification_url']

                notif_params = {
                    'from_address': from_address,
                    'to_address': to_address,
                    'contract_address': contract_address,
                    'tx_hash': tx_hash,
                    'bid_id': bid_id,
                    'confirmations': confirmations,
                    'block_number': block_number,
                    'value': value,
                    'flag': flag
                }
                send_notification(obj_logger,notif_url,notif_params,queue=hook_main)
                obj_logger.msg_logger('>>>>>>>> Adding to eth_erc_zct_set : %s' % (tx_hash))
                redis_conn.sadd('eth_erc_zct_set', tx_hash.encode('utf-8')) # To cross check in Block Crawler and not to send multiple notification

    obj_logger.msg_logger('Crawling Mempool Ends')
    obj_logger.msg_logger('#' * 100)
コード例 #4
0
	try:
		for username in usernames:
			if not username in username_to_user_data.keys() and username not in suspended_users:
				try:
					username_to_user_data[username] = get_user_data_from_username(username)
				except tweepy.RateLimitError:
					dump()
					count_down(minutes=15)
				except tweepy.error.TweepError as ex:
					print username
					suspended_users.add(username)
		print_update()
		dump()
	except KeyboardInterrupt:
		print_update()
		dump()
		quit()

print len(usernames)
print len(suspended_users)
while len(username_to_user_data) < (len(usernames) - len(suspended_users)):
	try:
		run()
	except KeyboardInterrupt:
		print_update()
		dump()
		quit()


send_notification('All user data gathered.')
コード例 #5
0
def block_crawler():
    """
    Block Crawling process
    :return:
    """

    obj_logger = MyLogger(logs_directory, category)
    obj_logger.msg_logger('Getting Block Numbers.....')

    # Get Current Block from RPC
    current_block = int(
        rpc_request(obj_logger, 'eth_blockNumber', []).get('result', 0), 16)
    crawled_blocks = int(redis_conn.get('eth_eth_blocks_crawled') or 0)

    obj_logger.msg_logger('Crawled Block Number : %s' % (crawled_blocks))
    obj_logger.msg_logger('Current Block Number : %s' % (current_block))
    obj_logger.msg_logger('Pending : %s' % (current_block - crawled_blocks))

    if current_block > crawled_blocks:

        for block_number in range(crawled_blocks + 1, current_block + 1):

            obj_logger.msg_logger('#' * 100)
            obj_logger.msg_logger('Crawling Block : %s || Current Block : %s' %
                                  (block_number, current_block))
            obj_logger.msg_logger('Pending : %s' %
                                  (current_block - block_number))
            obj_logger.msg_logger('Start :%s' % (datetime.datetime.now()))

            # Increment Confirmations for tx_id whose 1 confirmation is already sent
            for tx_hash in redis_conn.smembers('eth_eth_pct_set'):
                tx_hash = tx_hash.decode('utf-8')
                data = find_sql_join(
                    logger=obj_logger,
                    table_names=[
                        'user_master', 'address_master', 'eth_transactions'
                    ],
                    filters={'eth_transactions.tx_hash': tx_hash},
                    on_conditions={
                        'user_master.user_name': 'address_master.user_name',
                        'address_master.address': 'eth_transactions.to_address'
                    },
                )

                if not data:
                    obj_logger.error_logger(
                        '>>>>>>>>>>> Data not found in SQL for tx_hash : %s' %
                        (tx_hash))
                    continue

                confirmations = data[0]['confirmations']
                notif_url = data[0]['notification_url']

                if confirmations < confirmation_threshold:
                    increment_sql(obj_logger, 'eth_transactions',
                                  {'tx_hash': tx_hash}, 'confirmations')
                    notif_params = {
                        'from_address': data[0]['from_address'],
                        'to_address': data[0]['to_address'],
                        'tx_hash': tx_hash,
                        'bid_id': -1,
                        'confirmations': confirmations + 1,
                        'block_number': data[0]['block_number'],
                        'value': data[0]['value'],
                        'flag': 'eth_incoming'
                    }
                    obj_logger.msg_logger(
                        '>>>>>>>> Sending Confirmation : %s || %s' %
                        (confirmations + 1, tx_hash))
                    send_notification(obj_logger,
                                      notif_url,
                                      notif_params,
                                      queue=hook_queue)
                else:
                    obj_logger.msg_logger(
                        '>>>>>>>> %s Confirmation Sent : %s' %
                        (confirmation_threshold, tx_hash))
                    obj_logger.msg_logger(
                        '>>>>>>>> Removing from eth_eth_pct_set : %s' %
                        (tx_hash))
                    redis_conn.srem('eth_eth_pct_set', tx_hash)

            # Crawling Blocks
            block_info = rpc_request(obj_logger, 'eth_getBlockByNumber',
                                     [hex(int(block_number)), True])
            if block_info:
                block_transactions = block_info.get('result', {}).get(
                    'transactions', [])
            else:
                block_transactions = []
                obj_logger.error_logger(
                    'Data not found for block number : %s' % str(block_number))

            for tx in block_transactions:

                to_address = tx['to']
                if (redis_conn.sismember('eth_eth_aw_set', to_address)):

                    tx_hash = tx['hash']
                    obj_logger.msg_logger(
                        '>>>>>>>> Transaction Found in Block : %s : %s' %
                        (block_number, tx_hash))

                    confirmations = 1
                    block_number = int(tx['blockNumber'], 16)

                    # Check if 1 Confirmation is sent from mempool crawler - Should be found in eth_eth_pct_set
                    if not redis_conn.sismember('eth_eth_pct_set', tx_hash):

                        from_address = tx['from']
                        value = int(tx['value'], 16)
                        bid_id = -1
                        flag = 'eth_incoming'
                        sys_timestamp = datetime.datetime.now()

                        # Check if 0 Confirmation is sent from mempool crawler - Should be found in eth_eth_zct_set
                        if redis_conn.sismember('eth_eth_zct_set', tx_hash):
                            update_sql(obj_logger,
                                       'eth_transactions',
                                       {'tx_hash': tx_hash},
                                       updated_values={
                                           'confirmations': confirmations,
                                           'block_number': block_number
                                       })
                        else:  # Missed in Mempool - Send 1 Confirmation and add in eth_eth_pct_set
                            obj_logger.msg_logger(
                                '>>>>>>>> Transaction Missed from mempool. Sending %s confirmation : %s'
                                % (confirmations, str(tx_hash)))
                            data = {
                                'from_address': from_address,
                                'to_address': to_address,
                                'tx_hash': tx_hash,
                                'bid_id': bid_id,
                                'confirmations': confirmations,
                                'block_number': block_number,
                                'value': value,
                                'flag': flag,
                                'sys_timestamp': sys_timestamp,
                            }
                            insert_sql(obj_logger, 'eth_transactions', data)

                        notif_url = find_sql_join(
                            logger=obj_logger,
                            table_names=['user_master', 'address_master'],
                            filters={'address_master.address': to_address},
                            on_conditions={
                                'user_master.user_name':
                                'address_master.user_name'
                            },
                            columns=['user_master.notification_url'
                                     ])[0]['notification_url']

                        notif_params = {
                            'from_address': from_address,
                            'to_address': to_address,
                            'tx_hash': tx_hash,
                            'bid_id': -1,
                            'confirmations': confirmations,
                            'block_number': block_number,
                            'value': value,
                            'flag': flag
                        }
                        obj_logger.msg_logger(
                            '>>>>>>>> Sending Confirmation : %s || %s' %
                            (confirmations, tx_hash))
                        send_notification(obj_logger,
                                          notif_url,
                                          notif_params,
                                          queue=hook_queue)
                        obj_logger.msg_logger(
                            '>>>>>>>> Adding to eth_eth_pct_set : %s' %
                            (tx_hash))
                        redis_conn.sadd('eth_eth_pct_set',
                                        tx_hash.encode('utf-8'))

            # Increment Redis Blocks Crawled
            redis_conn.set('eth_eth_blocks_crawled', block_number)
            obj_logger.msg_logger('Ends :%s' % (datetime.datetime.now()))
            obj_logger.msg_logger('#' * 100)
    else:
        obj_logger.msg_logger('#' * 100)
コード例 #6
0
def mempool_crawler():
    """
    Mempool Process
    :return:
    """

    obj_logger = MyLogger(logs_directory, category)
    obj_logger.msg_logger('#' * 100)
    obj_logger.msg_logger('Getting Mempool Data')

    # Get Mempool Data
    mempool_transaction_data = rpc_request(obj_logger, 'eth_getBlockByNumber',
                                           ['pending', True]).get(
                                               'result',
                                               {}).get('transactions', [])

    obj_logger.msg_logger('Crawling Mempool Starts')

    for tx in mempool_transaction_data:

        tx_hash = tx['hash']
        to_address = tx['to']

        # Redis Check
        if (not redis_conn.sismember('eth_eth_zct_set', tx_hash)) and (
                redis_conn.sismember('eth_eth_aw_set', to_address)):
            obj_logger.msg_logger(
                '>>>>>>>> Transaction Found in Mempool : %s' % (tx_hash))

            from_address = tx['from']
            value = int(tx['value'], 16)
            bid_id = -2
            confirmations = 0
            block_number = -1
            flag = 'eth_incoming'
            sys_timestamp = datetime.datetime.now()

            # Insert in DB
            result = insert_sql(logger=obj_logger,
                                table_name='eth_transactions',
                                data={
                                    'from_address': from_address,
                                    'to_address': to_address,
                                    'tx_hash': tx_hash,
                                    'bid_id': bid_id,
                                    'confirmations': confirmations,
                                    'block_number': block_number,
                                    'value': value,
                                    'flag': flag,
                                    'sys_timestamp': sys_timestamp,
                                })

            if result:
                notif_url = find_sql_join(
                    logger=obj_logger,
                    table_names=['user_master', 'address_master'],
                    filters={'address_master.address': to_address},
                    on_conditions={
                        'user_master.user_name': 'address_master.user_name'
                    },
                    columns=['user_master.notification_url'
                             ])[0]['notification_url']

                notif_params = {
                    'from_address': from_address,
                    'to_address': to_address,
                    'tx_hash': tx_hash,
                    'bid_id': bid_id,
                    'confirmations': confirmations,
                    'block_number': block_number,
                    'value': value,
                    'flag': flag
                }
                send_notification(obj_logger,
                                  notif_url,
                                  notif_params,
                                  queue=hook_queue)
                obj_logger.msg_logger(
                    '>>>>>>>> Adding to eth_eth_zct_set : %s' % (tx_hash))
                redis_conn.sadd(
                    'eth_eth_zct_set', tx_hash.encode('utf-8')
                )  # To cross check in Block Crawler and not to send multiple notification

    obj_logger.msg_logger('Crawling Mempool Ends')
    obj_logger.msg_logger('#' * 100)
コード例 #7
0
		    sys.stdout.flush()
	except KeyboardInterrupt:
		print_update()
		json.dump(users_to_followers, open(OUTPUT_FILENAME, 'w'))
		quit()

users = []
for user in user_data:
	if user not in users_to_followers.keys():
		users.append(user)
users.sort(key=lambda uname:user_data[uname]['followers_count'])
protected_users = json.load(open('data/protected_users.json'))
while (len(users) > 0):
	try:
		users = []
		for user in user_data:
			if user not in users_to_followers.keys():
				users.append(user)
			users.sort(key=lambda uname:user_data[uname]['followers_count'])
		print len(users)
		run()
	except KeyboardInterrupt:
		dump()
		quit()
	# except:
	# 	dump()
	# 	send_notification('Error encountered. Quiting.')
	# 	quit()

send_notification('\nAll followers collected.')
コード例 #8
0
	except KeyboardInterrupt:
		print_update()
		json.dump(users_to_friends, open(OUTPUT_FILENAME, 'w'))
		quit()

users = []
for user in user_data:
	if user not in users_to_friends.keys():
		users.append(user)
users.sort(key=lambda uname:user_data[uname]['friends_count'])
protected_users = json.load(open('data/protected_users.json'))

while len(users) > 0:
	try:
		users = []
		for user in user_data:
			if user not in users_to_friends.keys():
				users.append(user)
			users.sort(key=lambda uname:user_data[uname]['friends_count'])
		print len(users)
		run()
	except KeyboardInterrupt:
		dump()
		quit()
	# except:
	# 	dump()
	# 	send_notification('Error encountered. Quiting.')
	# 	quit()

send_notification('\nAll friends collected.')