예제 #1
0
class myRepo(object):
    def __init__(self, version_log="", root_path=""):
        self.root_dir = os.getcwd()
        self.release_dist_dir = ''
        self.build_root_dir = ''
        self.git_root_dir = root_path
        self._storage_list = []
        self.CUR_DATE = 0
        self.version_log = version_log
        self.menifest_xml = ''
        self.cp_version = None
        self.dsp_version = None
        self.log = MyLogger(self.__class__.__name__)

        self.verion_name = None

    def git_init(self):
        for storage in self._storage_list:
            _path = os.path.join(self.git_root_dir, storage)
            _repo = git.Repo(_path)
            _git = _repo.git
            try:
                # _git.config("--global", "user.name", "binwu")
                # _git.config("--global", "user.email", "*****@*****.**")
                _git.status()
            except Exception, e:
                self.log.error(e)
예제 #2
0
    def __init__(self):
        super(CusRepo, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.git_clean()
        self.checkout_branch()
        self.log.info("create repo done")
예제 #3
0
class CraneExternalSRepo(RepoBase):
    def __init__(self):
        super(CraneExternalSRepo, self).__init__()
        self.log = MyLogger(self.__class__.__name__)
        self.log.info("create repo done")

    def get_config(self):
        json_file = os.path.join(self.root_dir, "json", "repo.json")
        json_str = load_json(json_file)
        self.config_d = json_str["crane_external_info"]
        self.branch_name = "master"
예제 #4
0
class craneCR2RCRepo(CusRepo):
    def __init__(self):
        super(craneCR2RCRepo, self).__init__()
        self.log = MyLogger(self.__class__.__name__)
        self.chip_ID = "craneC"
        self.log.info("create repo done")

    def get_config(self):
        json_file = os.path.join(self.root_dir, "json", "repo.json")
        json_str = load_json(json_file)
        self.config_d = json_str["cus_cranec_info"]
        self.branch_name = "r2_rc"
예제 #5
0
    def __init__(self):
        super(DownloadToolController, self).__init__()
        self.root_dir = os.getcwd()
        self.download_tool = None

        self.log = MyLogger(self.__class__.__name__)
        self.download_tool_dict = {}
        self.release_download_tool_name = None
        self.download_tool_release_zip_dir = ""
        self.zip_tool = zipTool()

        self.update()
def main():
    """
    Scheduling
    :return:
    """
    try:
        sched = BlockingScheduler(timezone='Asia/Kolkata')
        sched.add_job(mempool_crawler, 'interval', id='erc_mempool_crawler', seconds=3)
        sched.start()
    except Exception as e:
        obj_logger = MyLogger(logs_directory,category)
        obj_logger.error_logger('Main : %s'%(e))
예제 #7
0
class craneGRepo(RepoBase):
    def __init__(self):
        super(craneGRepo, self).__init__()
        self.log = MyLogger(self.__class__.__name__)
        self.chip_ID = "craneG"
        self.ps_mode = "LWG"
        self.log.info("create repo done")

    def get_config(self):
        json_file = os.path.join(self.root_dir, "json", "repo.json")
        json_str = load_json(json_file)
        self.config_d = json_str["craneg_info"]
        self.branch_name = "master"
예제 #8
0
class gitPushR2RCSDK(gitPushSDKBase):
    def __init__(self):
        super(gitPushR2RCSDK, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.branch_name = "r2_rc"
        # self.git.checkout(self.branch_name)

    def get_config(self):
        json_file = os.path.join(self.root_dir, "json", "git_push.json")
        json_str = load_json(json_file)
        self.config_d = json_str["cus_r2_rc_sdk"]

    def get_dsp_rf_dir(self, root_dir):
        for root, dirs, files in os.walk(root_dir, topdown=False):
            if "DSP" in dirs:
                self.dsp_rf_root_dir = os.path.join(root, "DSP")
                self.git_push_dsp_dir = os.path.dirname(self.target_dist_dir)

                dsp_bin = os.path.join(self.dsp_rf_root_dir, "CRANE",
                                       "CAT1GSM", "dsp.bin")
                rf_bin = os.path.join(self.dsp_rf_root_dir, "CRANE", "CAT1GSM",
                                      "rf.bin")

                crane_dsp_rf_dir = os.path.join(self.git_push_dsp_dir, "cus",
                                                "evb", "images")
                self.git_push_dsp_rf_list.append((dsp_bin, crane_dsp_rf_dir))
                self.git_push_dsp_rf_list.append((rf_bin, crane_dsp_rf_dir))

                dsp_bin = os.path.join(self.dsp_rf_root_dir, "CRANEG",
                                       "CAT1WCDMAGSM", "dsp.bin")
                rf_bin = os.path.join(self.dsp_rf_root_dir, "CRANEG",
                                      "CAT1WCDMAGSM", "rf.bin")

                craneg_a0_dir = os.path.join(self.git_push_dsp_dir, "cus",
                                             "evb_g_a0", "images")
                self.git_push_dsp_rf_list.append((dsp_bin, craneg_a0_dir))
                self.git_push_dsp_rf_list.append((rf_bin, craneg_a0_dir))

                dsp_bin = os.path.join(self.dsp_rf_root_dir, "CRANEG",
                                       "CAT1GSM", "dsp.bin")
                rf_bin = os.path.join(self.dsp_rf_root_dir, "CRANEG",
                                      "CAT1GSM", "rf.bin")

                cranec_dir = os.path.join(self.git_push_dsp_dir, "cus",
                                          "evb_c", "images")
                self.git_push_dsp_rf_list.append((dsp_bin, cranec_dir))
                self.git_push_dsp_rf_list.append((rf_bin, cranec_dir))

                self.log.info(self.git_push_dsp_rf_list)
                break
예제 #9
0
    def __init__(self, version_log="", root_path=""):
        self.root_dir = os.getcwd()
        self.release_dist_dir = ''
        self.build_root_dir = ''
        self.git_root_dir = root_path
        self._storage_list = []
        self.CUR_DATE = 0
        self.version_log = version_log
        self.menifest_xml = ''
        self.cp_version = None
        self.dsp_version = None
        self.log = MyLogger(self.__class__.__name__)

        self.verion_name = None
예제 #10
0
    def __init__(self):
        super(GitPushDspBase, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.decompress_tool = zipTool()

        self.release_dsp_bin = ''

        self.dsp_version = ''

        self.git_push_dsp_rf_list = []
        self.rf_type = "PM813"

        self.update()
        self.create_git()
예제 #11
0
    def __init__(self):
        super(RepoBase, self).__init__()
        super(myRepo, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.update()
        self.git_init()
예제 #12
0
    def __init__(self):
        self.root_dir = os.getcwd()
        self.version_log = ""
        self.log = MyLogger(self.__class__.__name__)
        self.verion_name = ''
        self.cur_version = ''
        self.chip_ID = None
        self.OS_type = None
        self.ps_mode = None
        self.decompress_tool = zipTool()

        self.dsp_version_pattern = re.compile(
            r"(CRANE_.*?,.*?[0-9][0-9]:[0-9][0-9]:[0-9][0-9]|CRANEG_.*?,.*?[0-9][0-9]:[0-9][0-9]:[0-9][0-9])"
        )
        self.version_pattern = None

        self.cp_version = None
        self.dsp_version = None
예제 #13
0
    def __init__(self):
        super(gitPushSDKBase, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.zip_tool = zipTool()

        self.cp_sdk_version = None
        self.cp_sdk = None
        self.cp_sdk_root_dir = None
        self.dsp_rf_root_dir = ''
        self.git_push_dsp_dir = ''

        self.git_push_dsp_rf_list = []

        self.dsp_bin = None
        self.rf_bin = None

        self.update()
        self.create_git()
예제 #14
0
def block_crawler():
    """
    Block Crawling process
    :return:
    """

    obj_logger = MyLogger(logs_directory, category)
    obj_logger.msg_logger('Getting Block Numbers.....')

    # Get Current Block from RPC
    current_block = int(
        rpc_request(obj_logger, 'eth_blockNumber', []).get('result', 0), 16)
    crawled_blocks = int(redis_conn.get('eth_eth_blocks_crawled') or 0)

    obj_logger.msg_logger('Crawled Block Number : %s' % (crawled_blocks))
    obj_logger.msg_logger('Current Block Number : %s' % (current_block))
    obj_logger.msg_logger('Pending : %s' % (current_block - crawled_blocks))

    if current_block > crawled_blocks:

        for block_number in range(crawled_blocks + 1, current_block + 1):

            obj_logger.msg_logger('#' * 100)
            obj_logger.msg_logger('Crawling Block : %s || Current Block : %s' %
                                  (block_number, current_block))
            obj_logger.msg_logger('Pending : %s' %
                                  (current_block - block_number))
            obj_logger.msg_logger('Start :%s' % (datetime.datetime.now()))

            # Increment Confirmations for tx_id whose 1 confirmation is already sent
            for tx_hash in redis_conn.smembers('eth_eth_pct_set'):
                tx_hash = tx_hash.decode('utf-8')
                data = find_sql_join(
                    logger=obj_logger,
                    table_names=[
                        'user_master', 'address_master', 'eth_transactions'
                    ],
                    filters={'eth_transactions.tx_hash': tx_hash},
                    on_conditions={
                        'user_master.user_name': 'address_master.user_name',
                        'address_master.address': 'eth_transactions.to_address'
                    },
                )

                if not data:
                    obj_logger.error_logger(
                        '>>>>>>>>>>> Data not found in SQL for tx_hash : %s' %
                        (tx_hash))
                    continue

                confirmations = data[0]['confirmations']
                notif_url = data[0]['notification_url']

                if confirmations < confirmation_threshold:
                    increment_sql(obj_logger, 'eth_transactions',
                                  {'tx_hash': tx_hash}, 'confirmations')
                    notif_params = {
                        'from_address': data[0]['from_address'],
                        'to_address': data[0]['to_address'],
                        'tx_hash': tx_hash,
                        'bid_id': -1,
                        'confirmations': confirmations + 1,
                        'block_number': data[0]['block_number'],
                        'value': data[0]['value'],
                        'flag': 'eth_incoming'
                    }
                    obj_logger.msg_logger(
                        '>>>>>>>> Sending Confirmation : %s || %s' %
                        (confirmations + 1, tx_hash))
                    send_notification(obj_logger,
                                      notif_url,
                                      notif_params,
                                      queue=hook_queue)
                else:
                    obj_logger.msg_logger(
                        '>>>>>>>> %s Confirmation Sent : %s' %
                        (confirmation_threshold, tx_hash))
                    obj_logger.msg_logger(
                        '>>>>>>>> Removing from eth_eth_pct_set : %s' %
                        (tx_hash))
                    redis_conn.srem('eth_eth_pct_set', tx_hash)

            # Crawling Blocks
            block_info = rpc_request(obj_logger, 'eth_getBlockByNumber',
                                     [hex(int(block_number)), True])
            if block_info:
                block_transactions = block_info.get('result', {}).get(
                    'transactions', [])
            else:
                block_transactions = []
                obj_logger.error_logger(
                    'Data not found for block number : %s' % str(block_number))

            for tx in block_transactions:

                to_address = tx['to']
                if (redis_conn.sismember('eth_eth_aw_set', to_address)):

                    tx_hash = tx['hash']
                    obj_logger.msg_logger(
                        '>>>>>>>> Transaction Found in Block : %s : %s' %
                        (block_number, tx_hash))

                    confirmations = 1
                    block_number = int(tx['blockNumber'], 16)

                    # Check if 1 Confirmation is sent from mempool crawler - Should be found in eth_eth_pct_set
                    if not redis_conn.sismember('eth_eth_pct_set', tx_hash):

                        from_address = tx['from']
                        value = int(tx['value'], 16)
                        bid_id = -1
                        flag = 'eth_incoming'
                        sys_timestamp = datetime.datetime.now()

                        # Check if 0 Confirmation is sent from mempool crawler - Should be found in eth_eth_zct_set
                        if redis_conn.sismember('eth_eth_zct_set', tx_hash):
                            update_sql(obj_logger,
                                       'eth_transactions',
                                       {'tx_hash': tx_hash},
                                       updated_values={
                                           'confirmations': confirmations,
                                           'block_number': block_number
                                       })
                        else:  # Missed in Mempool - Send 1 Confirmation and add in eth_eth_pct_set
                            obj_logger.msg_logger(
                                '>>>>>>>> Transaction Missed from mempool. Sending %s confirmation : %s'
                                % (confirmations, str(tx_hash)))
                            data = {
                                'from_address': from_address,
                                'to_address': to_address,
                                'tx_hash': tx_hash,
                                'bid_id': bid_id,
                                'confirmations': confirmations,
                                'block_number': block_number,
                                'value': value,
                                'flag': flag,
                                'sys_timestamp': sys_timestamp,
                            }
                            insert_sql(obj_logger, 'eth_transactions', data)

                        notif_url = find_sql_join(
                            logger=obj_logger,
                            table_names=['user_master', 'address_master'],
                            filters={'address_master.address': to_address},
                            on_conditions={
                                'user_master.user_name':
                                'address_master.user_name'
                            },
                            columns=['user_master.notification_url'
                                     ])[0]['notification_url']

                        notif_params = {
                            'from_address': from_address,
                            'to_address': to_address,
                            'tx_hash': tx_hash,
                            'bid_id': -1,
                            'confirmations': confirmations,
                            'block_number': block_number,
                            'value': value,
                            'flag': flag
                        }
                        obj_logger.msg_logger(
                            '>>>>>>>> Sending Confirmation : %s || %s' %
                            (confirmations, tx_hash))
                        send_notification(obj_logger,
                                          notif_url,
                                          notif_params,
                                          queue=hook_queue)
                        obj_logger.msg_logger(
                            '>>>>>>>> Adding to eth_eth_pct_set : %s' %
                            (tx_hash))
                        redis_conn.sadd('eth_eth_pct_set',
                                        tx_hash.encode('utf-8'))

            # Increment Redis Blocks Crawled
            redis_conn.set('eth_eth_blocks_crawled', block_number)
            obj_logger.msg_logger('Ends :%s' % (datetime.datetime.now()))
            obj_logger.msg_logger('#' * 100)
    else:
        obj_logger.msg_logger('#' * 100)
예제 #15
0
class DownloadToolController(object):
    def __init__(self):
        super(DownloadToolController, self).__init__()
        self.root_dir = os.getcwd()
        self.download_tool = None

        self.log = MyLogger(self.__class__.__name__)
        self.download_tool_dict = {}
        self.release_download_tool_name = None
        self.download_tool_release_zip_dir = ""
        self.zip_tool = zipTool()

        self.update()

    def update(self):
        json_file = os.path.join(self.root_dir,"json","download_tool.json")
        json_str = load_json(json_file)

        self.tmp = os.path.join(self.root_dir, json_str["tmp"])
        self.download_tool_dir = os.path.join(self.root_dir, json_str["download_tool_dir"])
        self.partition_config = os.path.join(self.root_dir, json_str["partition_config"])
        self.template_config = os.path.join(self.root_dir, json_str["template_config"])

        self.download_tool_release_dir = json_str["download_tool_release_dir"]
        self.download_tool_file_name = json_str["download_tool_file_name"]
        self.win_type = json_str["win_type"]



    def unzip_download_tool(self,zip_file,extract_dir=None):
        self.zip_tool.unpack_archive(os.path.join(self.download_tool_dir,zip_file),extract_dir)

    def update_download_tool(self):
        tools_list = [_file for _file in os.listdir(self.download_tool_release_dir) if
                      self.win_type in _file and "aboot-tools" in _file and _file.endswith(".exe")]
        tools_list.sort(key=lambda fn: os.path.getmtime(self.download_tool_release_dir + '\\' + fn))
        self.log.debug(tools_list)
        if not self.download_tool_file_name:
            self.download_tool_file_name = tools_list[-1]
        file_name = self.download_tool_file_name
        self.download_tool_release_zip_dir = os.path.join(self.download_tool_dir,
                                                          self.download_tool_file_name.replace(".exe",""))
        # self.log.info(file_name)
        file_name_l = [file_name,file_name.replace("x64","x86")]
        self.download_tool = []
        for file_name in file_name_l:
            zip_file = os.path.join(self.download_tool_dir,file_name)
            # self.log.info(zip_file)
            download_tool_fname = zip_file.replace(".exe","")
            if not os.path.exists(zip_file):
                shutil.copy2(os.path.join(self.download_tool_release_dir,file_name),self.download_tool_dir)
                time.sleep(3)
                self.unzip_download_tool(zip_file)
            elif not os.path.exists(download_tool_fname):
                time.sleep(3)
                self.unzip_download_tool(zip_file)
            self.download_tool.append(download_tool_fname)
        for _tool in self.download_tool:
            self.log.info(_tool)

    def prepare_download_tool(self,images,borad="crane_evb_z2"):
        """borad : crane_evb_z2, bird_phone, crane_evb_dual_sim"""
        self.log.debug("\n".join(images))
        self.log.debug("\n".join(self.download_tool))
        for download_tool_dir in self.download_tool:
            if not os.path.exists(download_tool_dir):
                self.log.warning("%s not exists" % download_tool_dir)
                continue
            dist_dir = os.path.join(download_tool_dir,"images")
            dist_bin_l = [os.path.join(dist_dir,os.path.basename(_file)) for _file in images]
            for _file in dist_bin_l:
                if os.path.exists(_file):
                    os.remove(_file)
            for src_bin,dist_bin in zip(images,dist_bin_l):
                if os.path.exists(src_bin):
                    if os.path.exists(dist_bin):
                        os.remove(dist_bin)
                    shutil.copy2(src_bin,dist_bin)
                else:
                    self.log.warning("%s not exists" % src_bin)
            if os.path.isdir(self.partition_config):
                for _file in os.listdir(self.partition_config):
                    shutil.copy2(os.path.join(self.partition_config,_file),
                                 os.path.join(download_tool_dir,"config","partition",_file))
            elif os.path.isfile(self.partition_config):
                shutil.copy2(self.partition_config,os.path.join(download_tool_dir,"config","partition",
                                                                os.path.basename(self.partition_config)))
            else:
                self.log.error("self.partition_config:%s error" % self.partition_config)

            if os.path.isdir(self.template_config):
                for _file in os.listdir(self.template_config):
                    shutil.copy2(os.path.join(self.template_config,_file),
                                 os.path.join(download_tool_dir,"config","template",_file))
            elif os.path.isfile(self.template_config):
                shutil.copy2(self.template_config,
                             os.path.join(download_tool_dir,"config","template",os.path.basename(self.template_config)))
            else:
                self.log.error("self.template_config:%s error" % self.template_config)

    def release_zip(self, dist_dir, project_type = "CRANE_A0_16MB", zip_name = None):
        assert os.path.exists(
            self.download_tool_release_zip_dir)," can not find download tool %s " % self.download_tool_release_zip_dir
        os.chdir(self.download_tool_release_zip_dir)
        if "craneg_evb" in dist_dir:
            project_type = "CRANEG_Z1_32+8MB"
        if zip_name:
            zip_file = zip_name
        else:
            zip_file = "ASR_CRANE_EVB_%s.zip" % project_type
        zip_file = os.path.join(dist_dir,zip_file)
        release_log = os.path.join(self.tmp,"release_log.txt")
        self.log.info("create zip %s" % zip_file)
        release_cmd = "arelease.exe -c . -g -p ASR_CRANE_EVB -v %s %s > %s" % (
            project_type, zip_file, release_log)
        os.system(release_cmd)

    def release_download_tool(self,release_name,borad="crane_evb_z2",dist_dir=None, download_tool_l=[]):
        import time
        "borad : crane_evb_z2, bird_phone, crane_evb_dual_sim"
        date = time.strftime("%Y%m%d_%H%M%S")
        release_file_name = "%s_%s_DOWNLOAD_TOOL" % (release_name.upper(),borad.upper())
        release_dir = os.path.join(self.download_tool_dir,release_file_name)
        os.mkdir(release_dir) if not os.path.exists(release_dir) else None
        self.log.info(release_file_name)
        # self.log.info(release_dir)
        if download_tool_l:
            download_tool_src = download_tool_l[:]
        else:
            download_tool_src = self.download_tool
        for _tool in download_tool_src:
            dist_file = os.path.join(release_dir,os.path.basename(_tool))
            # self.log.info(dist_file)
            if not os.path.exists(dist_file):
                shutil.copytree(_tool,dist_file)

        dist = os.path.join(dist_dir,release_file_name)
        self.zip_tool.make_archive_e(dist,"zip",release_dir)
        shutil.rmtree(release_dir)
        self.release_download_tool_name = dist + ".zip"
        self.download_tool_dict[borad] = self.release_download_tool_name
        # self.log.info(self.download_tool_dict)
        return self.release_download_tool_name
def mempool_crawler():
    """
    Mempool Process
    :return:
    """

    obj_logger = MyLogger(logs_directory, category)
    obj_logger.msg_logger('#' * 100)
    obj_logger.msg_logger('Getting Mempool Data')

    # Get Mempool Data
    mempool_transaction_data = rpc_request(obj_logger, 'eth_getBlockByNumber',
                                           ['pending', True]).get(
                                               'result',
                                               {}).get('transactions', [])

    obj_logger.msg_logger('Crawling Mempool Starts')

    for tx in mempool_transaction_data:

        tx_hash = tx['hash']
        to_address = tx['to']

        # Redis Check
        if (not redis_conn.sismember('eth_eth_zct_set', tx_hash)) and (
                redis_conn.sismember('eth_eth_aw_set', to_address)):
            obj_logger.msg_logger(
                '>>>>>>>> Transaction Found in Mempool : %s' % (tx_hash))

            from_address = tx['from']
            value = int(tx['value'], 16)
            bid_id = -2
            confirmations = 0
            block_number = -1
            flag = 'eth_incoming'
            sys_timestamp = datetime.datetime.now()

            # Insert in DB
            result = insert_sql(logger=obj_logger,
                                table_name='eth_transactions',
                                data={
                                    'from_address': from_address,
                                    'to_address': to_address,
                                    'tx_hash': tx_hash,
                                    'bid_id': bid_id,
                                    'confirmations': confirmations,
                                    'block_number': block_number,
                                    'value': value,
                                    'flag': flag,
                                    'sys_timestamp': sys_timestamp,
                                })

            if result:
                notif_url = find_sql_join(
                    logger=obj_logger,
                    table_names=['user_master', 'address_master'],
                    filters={'address_master.address': to_address},
                    on_conditions={
                        'user_master.user_name': 'address_master.user_name'
                    },
                    columns=['user_master.notification_url'
                             ])[0]['notification_url']

                notif_params = {
                    'from_address': from_address,
                    'to_address': to_address,
                    'tx_hash': tx_hash,
                    'bid_id': bid_id,
                    'confirmations': confirmations,
                    'block_number': block_number,
                    'value': value,
                    'flag': flag
                }
                send_notification(obj_logger,
                                  notif_url,
                                  notif_params,
                                  queue=hook_queue)
                obj_logger.msg_logger(
                    '>>>>>>>> Adding to eth_eth_zct_set : %s' % (tx_hash))
                redis_conn.sadd(
                    'eth_eth_zct_set', tx_hash.encode('utf-8')
                )  # To cross check in Block Crawler and not to send multiple notification

    obj_logger.msg_logger('Crawling Mempool Ends')
    obj_logger.msg_logger('#' * 100)
예제 #17
0
    def __init__(self):
        super(gitPushR2RCSDK, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.branch_name = "master"
예제 #18
0
    def __init__(self):
        super(gitPushR2RCSDK008, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.branch_name = "r2_rc_sdk_1.008"
예제 #19
0
    def __init__(self):
        super(gitPushCraneMDsp, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.rf_type = "PM803"
예제 #20
0
 def __init__(self):
     super(cusR1Repo, self).__init__()
     self.log = MyLogger(self.__class__.__name__)
예제 #21
0
class gitPushSDKBase(GitPushBase):
    def __init__(self):
        super(gitPushSDKBase, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.zip_tool = zipTool()

        self.cp_sdk_version = None
        self.cp_sdk = None
        self.cp_sdk_root_dir = None
        self.dsp_rf_root_dir = ''
        self.git_push_dsp_dir = ''

        self.git_push_dsp_rf_list = []

        self.dsp_bin = None
        self.rf_bin = None

        self.update()
        self.create_git()

        # self.print_info()

    def get_config(self):
        self.config_d = {}

    def update(self):
        self.get_config()
        self.cp_sdk_release_dir = os.path.normpath(
            self.config_d["release_dir"])
        self.git_push_cp_dir = os.path.normpath(
            self.config_d["git_push_root_dir"])
        self.git_push_root_dir = self.git_push_cp_dir
        self.target_dist_dir = os.path.normpath(self.config_d["target_dir"])
        self.cp_sdk_dir = os.path.normpath(
            os.path.join(self.root_dir, self.config_d["local_dir"]))
        self.cp_version_file = self.config_d["verson_file"]
        self.release_target = self.config_d["release_target"]

        self.push_cmd = self.config_d["git_push_cmd"]

    def get_dsp_rf_dir(self, root_dir):
        for root, dirs, files in os.walk(root_dir, topdown=False):
            if "DSP" in dirs:
                self.dsp_rf_root_dir = os.path.join(root, "DSP")
                self.dsp_bin = os.path.join(self.dsp_rf_root_dir, "dsp.bin")
                self.rf_bin = os.path.join(self.dsp_rf_root_dir, "rf.bin")
                self.git_push_dsp_dir = os.path.dirname(self.target_dist_dir)
                self.git_push_dsp_dir = os.path.join(self.git_push_dsp_dir,
                                                     "cus", "evb", "images")
                self.git_push_dsp_rf_list.append(
                    (self.dsp_bin, self.git_push_dsp_dir))
                self.git_push_dsp_rf_list.append(
                    (self.rf_bin, self.git_push_dsp_dir))
                break

    def find_new_cp_sdk(self):
        "ASR3603_MINIGUI_20200603_SDK.zip"
        cp_sdk_list = [
            _file for _file in os.listdir(self.cp_sdk_release_dir)
            if (_file.endswith(".zip") or _file.endswith(".7z"))
            and _file.startswith(self.release_target)
        ]
        cp_sdk_list.sort(key=lambda fn: os.path.getmtime(
            os.path.join(self.cp_sdk_release_dir, fn)))
        assert cp_sdk_list, "can not find sdk"
        self.cp_sdk = cp_sdk_list[-1]
        self.log.debug("newest adk: %s" % self.cp_sdk)

    def clean_git_push_cp(self):
        self.git_clean()
        for _file in os.listdir(self.target_dist_dir):
            if _file in [".git", "X.bat"]:
                continue
            _file = os.path.join(self.target_dist_dir, _file)
            if os.path.isfile(_file):
                os.remove(_file)
            else:
                shutil.rmtree(_file)
        self.log.info("clean cp done")

    def copy_sdk(self):
        self.log.info("copy %s..." % self.cp_sdk)
        shutil.copy2(os.path.join(self.cp_sdk_release_dir, self.cp_sdk),
                     self.cp_sdk_dir)
        time.sleep(3)
        self.log.info("copy done.")

    def copy_sdk_to_git_push_cp(self, cp_sdk):
        try:
            root_dir = os.path.join(self.cp_sdk_dir, cp_sdk)
            for _file in os.listdir(root_dir):
                fname = os.path.join(root_dir, _file)
                if os.path.isfile(fname):
                    shutil.copy2(fname,
                                 os.path.join(self.target_dist_dir, _file))
                elif os.path.isdir(fname):
                    shutil.copytree(fname,
                                    os.path.join(self.target_dist_dir, _file))
                else:
                    self.log.warning("%s" % fname)

            self.log.info("%s" % self.dsp_rf_root_dir)
            if os.path.exists(self.dsp_rf_root_dir):
                dir_path = os.path.dirname(self.dsp_rf_root_dir)
                self.log.info("%s" % dir_path)
                dist_dir = os.path.join(self.target_dist_dir,
                                        os.path.basename(dir_path))
                self.log.info("%s" % dist_dir)
                if os.path.exists(dist_dir):
                    shutil.rmtree(dist_dir)
                shutil.copytree(dir_path, dist_dir)
                while self.git_push_dsp_rf_list:
                    src_file, dist = self.git_push_dsp_rf_list.pop()
                    self.log.info(src_file)
                    self.log.info(dist)
                    if os.path.exists(dist):
                        if os.path.isfile(src_file):
                            shutil.copy2(src_file, dist)
                        else:
                            self.log.warning("%s" % src_file)

            self.log.info("copy_sdk_to_git_push_cp done.")
        except Exception, e:
            self.log.error(e)
            self.log.error("copy_sdk_to_git_push_cp error")
            raise Exception, "copy_sdk_to_git_push_cp error"
예제 #22
0
class GitPushDspBase(GitPushBase):
    def __init__(self):
        super(GitPushDspBase, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.decompress_tool = zipTool()

        self.release_dsp_bin = ''

        self.dsp_version = ''

        self.git_push_dsp_rf_list = []
        self.rf_type = "PM813"

        self.update()
        self.create_git()

        # self.print_info()

    def get_config(self):
        self.config_d = {}

    def update(self):
        self.get_config()
        self.release_dir = os.path.normpath(self.config_d["release_dir"])
        self.target_dist_dir = os.path.normpath(self.config_d["target_dir"])
        self.git_push_root_dir = os.path.normpath(
            self.config_d["git_push_root_dir"])
        self.local_dsp_bin = os.path.normpath(self.config_d["verson_file"])
        self.push_cmd = self.config_d["git_push_cmd"]

        self.dsp_version_pattern = re.compile(self.config_d["version_pattern"])
        self.release_target_file = self.config_d["release_target"]

    def update_git_push_dsp_rf_list(self):
        root_dir = os.path.dirname(self.release_dsp_bin)

        self.git_push_dsp_rf_list.append(
            (self.release_dsp_bin, self.local_dsp_bin))

        rf_bin = os.path.join(root_dir, self.rf_type, "rf.bin")
        self.git_push_dsp_rf_list.append((rf_bin, self.target_dist_dir))

        rf_verson_file = os.path.join(root_dir, self.rf_type, "RF_Version.txt")
        self.git_push_dsp_rf_list.append(
            (rf_verson_file, self.target_dist_dir))

        rf_excel_file = os.path.join(root_dir, self.rf_type, "rf.xlsm")
        self.git_push_dsp_rf_list.append((rf_excel_file, self.target_dist_dir))

    def get_dsp_version(self, dsp_bin=None):
        """CRANE_CAT1GSM_L1_1.043.000 , Dec 13 2019 03:30:56"""
        if not dsp_bin:
            dsp_bin = self.local_dsp_bin
        dsp_version_file = os.path.join(self.root_dir, "tmp",
                                        "dsp_version_tmp.bin")
        self.decompress_tool.decompress_bin(dsp_bin, dsp_version_file)
        assert os.path.exists(dsp_version_file), "can not find {}".format(
            dsp_version_file)
        with open(dsp_version_file, "rb") as fob:
            text = fob.read()
        match = self.dsp_version_pattern.findall(text)
        if match:
            self.log.debug(match[0])
            version_info = match[0]
        else:
            self.log.error("can not find dsp version infomation")
            version_info = None
        # os.remove(dsp_version_file)
        return version_info

    def get_release_dsp_rf(self):
        dsp_release_bin_l = []
        release_dir_list = [os.path.join(self.release_dir,_dir) for _dir in os.listdir(self.release_dir) \
                            if os.path.isdir(os.path.join(self.release_dir,_dir))]
        release_dir_list.sort(key=lambda fn: os.path.getmtime(fn))
        self.log.debug("release_dir_list len:", len(release_dir_list))
        self.log.debug(release_dir_list[-10:])
        release_files = [
            os.path.normpath(os.path.join(_dir, self.release_target_file))
            for _dir in release_dir_list[-10:]
        ]
        for _file in release_files:
            if os.path.exists(_file):
                root = os.path.dirname(_file)
                rf = os.path.join(root, self.rf_type, "rf.bin")
                if os.path.exists(rf):
                    dsp_release_bin_l.append(_file)
        dsp_release_bin_l.sort(key=lambda fn: os.path.getmtime(fn))
        # self.log.info("\n".join(dsp_release_bin_l))
        self.release_dsp_bin = dsp_release_bin_l[-1]

    def condition(self):
        self.update()
        self.git_clean()
        local_dsp_version = self.get_dsp_version()
        self.get_release_dsp_rf()
        release_dsp_version = self.get_dsp_version(self.release_dsp_bin)
        if local_dsp_version == release_dsp_version:
            return False
        else:
            self.log.info("local_dsp_version  :", local_dsp_version)
            self.log.info("release_dsp_version:", release_dsp_version)
            self.dsp_version = release_dsp_version
            return True

    def close_push(self):
        to_address = ",".join(["*****@*****.**"])
        subject = "%s auto push" % self.dsp_version
        msg = r"Hi %s, %s auto push done!" % (to_address.split("@")[0],
                                              self.dsp_version)
        send_email_tool(to_address, subject.upper(), msg)

    def git_push_start(self):
        if not self.condition():
            time.sleep(10)
            return

        self.log.info("wait for dsp copy...")
        time.sleep(30)
        self.update_git_push_dsp_rf_list()
        while self.git_push_dsp_rf_list:
            src_file, dist = self.git_push_dsp_rf_list.pop()
            self.log.info(src_file)
            self.log.info(dist)
            if os.path.exists(dist):
                if os.path.isfile(src_file):
                    shutil.copy2(src_file, dist)
                else:
                    self.log.warning("%s" % src_file)

        self.log.info("=" * 50)
        self.log.info("git push dsp...")
        try:
            self.git_add()
            match = re.findall(self.dsp_version_pattern, self.dsp_version)
            if match:
                dsp_version = match[0]
            else:
                dsp_version = str(time.asctime())
            commit_info = "update dsp dailybuild %s" % dsp_version
            self.git_commit(commit_info)
            self.git_push()
            self.close_push()
            return True
        except Exception, e:
            self.log.error(e)
            self.log.error("git push error")
            self.git_clean()
            return None
예제 #23
0
 def __init__(self):
     super(gitPushCraneFTR2RCSDK, self).__init__()
     self.log = MyLogger(self.__class__.__name__)
     self.branch_name = "crane_ft"
예제 #24
0
class GitPushBase(object):
    def __init__(self):
        self.root_dir = os.getcwd()
        self.log = MyLogger(self.__class__.__name__)
        self.git_push_root_dir = None
        self.branch_name = "master"

    def create_git(self):
        self.git = git.Repo(self.git_push_root_dir).git
        # self.git.config("--global","core.autocrlf","false")
        # self.git.config("--global","user.name","binwu")
        # self.git.config("--global","user.email","*****@*****.**")

    def git_add(self, *file_name_l):
        self.log.info("git add...")
        if file_name_l:
            self.git.add(*file_name_l)
        else:
            self.git.add("--all")
        self.log.info("git add done")

    def git_commit(self, commit_info):
        self.log.info("git commit...")
        self.log.info("conmmit info:", commit_info)
        self.git.commit("-m %s" % commit_info)
        self.log.info("git commit done")

    def git_push(self):
        self.log.info("git push...")
        self.git.push(*self.push_cmd)
        self.log.info("git push done")

    def git_clean(self):
        try:
            self.git.clean("-xdf")
            self.git.reset("--hard", "origin/%s" % self.branch_name)
            self.git.pull()
        except Exception, e:
            self.log.error(e)
            self.git.reset("--hard", "origin/%s" % self.branch_name)
            raise Exception("git_clean error")
def callback(ch, method, properties, body):
    """
    This method is called every time there is a new element in queue (var : queue)
    :param ch:
    :param method:
    :param properties:
    :param body:
    :return:
    """
    try:

        # Logger
        obj_logger = MyLogger(logs_directory, category)
        obj_logger.msg_logger('#'*100)
        obj_logger.msg_logger('In Exception Queue : %s'%(queue))
        obj_logger.msg_logger('Getting Data : %s'%(datetime.datetime.now()))

        # Data from Queue
        data = json.loads(body)
        notification_url = data['notification_url']
        data.pop('notification_url')
        notification_params = data

        obj_logger.msg_logger('>>>>>>>>>> Sending Notification : %s || %s' % (notification_url, notification_params))
        # Send Notification
        requests.post(notification_url, data=json.dumps(notification_params), headers=headers)
        obj_logger.msg_logger('>>>>>>>>>> Notification Success : %s || %s' % (notification_url, notification_params))
        # Insert in DB
        insert_sql(logger=obj_logger, table_name='notification_logs', data={
            'tx_hash': notification_params['tx_hash'],
            'notification_url ': notification_url,
            'params': str(notification_params),
            'timestamp': datetime.datetime.now(),
            'Status': 'Success'
        })
        ch.basic_ack(delivery_tag=method.delivery_tag)
    except Exception as e:
        obj_logger.error_logger('>>>>>>>>>> Notification Failure : %s || %s || %s' % (e, notification_url, notification_params))
    finally:
        obj_logger.msg_logger("#" * 100)
예제 #26
0
 def __init__(self):
     super(gitPushCraneDCXODsp, self).__init__()
     self.log = MyLogger(self.__class__.__name__)
예제 #27
0
def callback(ch, method, properties, body):
    """
    This method is called every time there is a new element in queue (var : queue)
    :param ch:
    :param method:
    :param properties:
    :param body:
    :return:
    """
    try:

        # Logs
        obj_logger = MyLogger(logs_directory, category)
        obj_logger.msg_logger('#' * 100)
        obj_logger.msg_logger('Getting Data : %s' % (datetime.datetime.now()))

        # Data from Queue
        data = json.loads(body)
        notification_url = data['notification_url']
        data.pop('notification_url')
        notification_params = data

        obj_logger.msg_logger('>>>>>>>>>> Sending Notification : %s || %s' %
                              (notification_url, notification_params))
        # Send Notification
        requests.post(notification_url,
                      data=json.dumps(notification_params),
                      headers=headers)
        obj_logger.msg_logger('>>>>>>>>>> Notification Success : %s || %s' %
                              (notification_url, notification_params))

        # Insert in DB
        insert_sql(logger=obj_logger,
                   table_name='notification_logs',
                   data={
                       'tx_hash': notification_params['tx_hash'],
                       'notification_url ': notification_url,
                       'params': str(notification_params),
                       'timestamp': datetime.datetime.now(),
                       'Status': 'Success'
                   })
    except Exception as e:
        # If there is an Exception , Send the Notification to Exception Queue - which will be handled manually
        obj_logger.error_logger(
            '>>>>>>>>>> Notification Failure : %s || %s || %s' %
            (e, notification_url, notification_params))
        obj_logger.msg_logger('>>>>>>>>>> Pushing to Exception Queue : %s' %
                              (exception_queue))
        send_notification(obj_logger,
                          notification_url,
                          notification_params,
                          queue=exception_queue)

    finally:
        obj_logger.msg_logger("#" * 100)
        # We are ACK in both the case of success or failure because if there is no error then its ok
        # But if there is an error then we are sending it to Exception Queue . So in both the case we can delete this from main queue
        ch.basic_ack(delivery_tag=method.delivery_tag)
예제 #28
0
class GitPushDMSDK(gitPushR2RCSDK):
    def __init__(self):
        super(gitPushR2RCSDK, self).__init__()
        self.log = MyLogger(self.__class__.__name__)

        self.branch_name = "master"
        # self.git.checkout(self.branch_name)

    def get_config(self):
        json_file = os.path.join(self.root_dir, "json", "git_push.json")
        json_str = load_json(json_file)
        self.config_d = json_str["dm_sdk"]

    def get_dsp_rf_dir(self, root_dir):
        self.git_push_dsp_dir = os.path.dirname(self.target_dist_dir)
        for root, dirs, files in os.walk(root_dir, topdown=False):
            if "DSP" in dirs:
                self.dsp_rf_root_dir = os.path.join(root, "DSP")

                dsp_bin = os.path.join(self.dsp_rf_root_dir, "CRANEM",
                                       "CAT1GSM", "dsp.bin")
                rf_bin = os.path.join(self.dsp_rf_root_dir, "CRANEM",
                                      "CAT1GSM", "PM803", "rf.bin")

                cranem_dm_dir = os.path.join(self.git_push_dsp_dir, "cus",
                                             "evb_dm", "images")
                self.git_push_dsp_rf_list.append((dsp_bin, cranem_dm_dir))
                self.git_push_dsp_rf_list.append((rf_bin, cranem_dm_dir))

            if "CP" in dirs:
                cp_root_dir = os.path.join(root, "CP")

                boot33_bin = os.path.join(cp_root_dir, "CAT1GSM", "boot33.bin")
                apn_bin = os.path.join(cp_root_dir, "CAT1GSM", "apn.bin")

                cranem_dm_dir = os.path.join(self.git_push_dsp_dir, "cus",
                                             "evb_dm", "images")
                self.git_push_dsp_rf_list.append((boot33_bin, cranem_dm_dir))
                self.git_push_dsp_rf_list.append((apn_bin, cranem_dm_dir))

                break
        cranem_dm_dir = os.path.join(self.git_push_dsp_dir, "cus", "evb_dm",
                                     "build")
        Crane_DS_miniPhone_h = os.path.join(
            cranem_dm_dir, "Crane_DS_miniPhone_16M_Ram_8M_Flash_XIP_CIPSRAM.h")
        Crane_DS_miniPhone_sct = os.path.join(
            cranem_dm_dir,
            "Crane_DS_miniPhone_16M_Ram_8M_Flash_XIP_CIPSRAM.sct")
        dist_dir = os.path.join(self.target_dist_dir, "csw", "platform",
                                "dev_plat", "build")
        self.git_push_dsp_rf_list.append((Crane_DS_miniPhone_h, dist_dir))
        self.git_push_dsp_rf_list.append((Crane_DS_miniPhone_sct, dist_dir))
        for _item in self.git_push_dsp_rf_list:
            self.log.info(_item)

    def find_new_cp_sdk(self):
        cp_sdk_list = [
            _file for _file in os.listdir(self.cp_sdk_release_dir)
            if _file.startswith(self.release_target)
        ]
        cp_sdk_list.sort(key=lambda fn: os.path.getmtime(
            os.path.join(self.cp_sdk_release_dir, fn)))
        assert cp_sdk_list, "can not find sdk"
        self.cp_sdk = cp_sdk_list[-1]
        self.log.debug("newest adk: %s" % self.cp_sdk)

    def copy_sdk(self):
        copy(os.path.join(self.cp_sdk_release_dir, self.cp_sdk),
             os.path.join(self.cp_sdk_dir, self.cp_sdk))
        time.sleep(3)

    def unzip_sdk(self):
        fname, _ = os.path.splitext(self.cp_sdk)
        root_dir = os.path.join(self.cp_sdk_dir, self.cp_sdk, "SDK")
        sdk_zip = [
            os.path.join(root_dir, _file) for _file in os.listdir(root_dir)
            if _file.startswith(self.release_target)
        ]
        assert sdk_zip, "can not find %s" % root_dir
        sdk_zip = sdk_zip[-1]
        while True:
            try:
                with ziptool_mutex:
                    self.zip_tool.unpack_archive(sdk_zip, root_dir)
                assert os.path.exists(root_dir), "can not find %s" % root_dir
                for root, dirs, files in os.walk(root_dir, topdown=False):
                    if "3g_ps" in dirs:
                        self.cp_sdk_root_dir = root
                        break
                assert os.path.exists(
                    self.cp_sdk_root_dir
                ), "can not find %s" % self.cp_sdk_root_dir
                self.get_dsp_rf_dir(os.path.join(self.cp_sdk_dir, self.cp_sdk))
                return
            except Exception, e:
                time.sleep(10)
                self.log.error(e)
def mempool_crawler():
    """
    Mempool Process
    :return:
    """

    obj_logger = MyLogger(logs_directory,category)
    obj_logger.msg_logger('#'*100)
    obj_logger.msg_logger('Getting Mempool Data')

    # Get Mempool Data
    mempool_transaction_data = rpc_request(obj_logger, 'eth_getBlockByNumber', ['pending', True]).get('result',{}).get('transactions',[])

    obj_logger.msg_logger('Crawling Mempool Starts')

    for tx in mempool_transaction_data:

        tx_hash = tx['hash']
        contract_address = tx['to'] # To address in ERC 20 Transaction is Contract Address

        # TODO - if tx hashes are not matching in redis, then we need to encode/decode utf-8
        # Redis Check
        if (not redis_conn.sismember('eth_erc_zct_set',tx_hash)) and (redis_conn.sismember('eth_erc_aw_set',contract_address)):
            obj_logger.msg_logger('>>>>>>>> Transaction Found in Mempool : %s'%(tx_hash))

            from_address = tx['from']
            bid_id = -1
            confirmations = 0
            block_number = -1
            flag = 'erc20'
            sys_timestamp = datetime.datetime.now()

            # Decoding Inputs
            input = tx['input']
            with open(abi_file, 'r') as abi_definition:
                abi = json.load(abi_definition)
            contract_obj = web3.Web3().eth.contract(address=web3.Web3().toChecksumAddress(contract_address), abi=abi)
            params = contract_obj.decode_function_input(input)
            to_address = params[1].get('_to')
            value = params[1].get('_value')

            # Insert in DB
            result = insert_sql(
                logger=obj_logger,
                table_name= 'erc_transactions',
                data={
                'from_address': from_address,
                'to_address': to_address,
                'contract_address': contract_address,
                'tx_hash': tx_hash,
                'bid_id': bid_id,
                'confirmations': confirmations,
                'block_number': block_number,
                'value': value,
                'flag': flag,
                'sys_timestamp': sys_timestamp,
                }
            )

            if result:
                notif_url = find_sql_join(logger=obj_logger,
                    table_names=['user_master', 'erc_address_master'],
                    filters={'erc_address_master.address': to_address},
                    on_conditions={'user_master.user_name': 'erc_address_master.user_name'},
                    columns=['user_master.notification_url']
                )[0]['notification_url']

                notif_params = {
                    'from_address': from_address,
                    'to_address': to_address,
                    'contract_address': contract_address,
                    'tx_hash': tx_hash,
                    'bid_id': bid_id,
                    'confirmations': confirmations,
                    'block_number': block_number,
                    'value': value,
                    'flag': flag
                }
                send_notification(obj_logger,notif_url,notif_params,queue=hook_main)
                obj_logger.msg_logger('>>>>>>>> Adding to eth_erc_zct_set : %s' % (tx_hash))
                redis_conn.sadd('eth_erc_zct_set', tx_hash.encode('utf-8')) # To cross check in Block Crawler and not to send multiple notification

    obj_logger.msg_logger('Crawling Mempool Ends')
    obj_logger.msg_logger('#' * 100)
예제 #30
0
 def __init__(self):
     self.root_dir = os.getcwd()
     self.log = MyLogger(self.__class__.__name__)
     self.git_push_root_dir = None
     self.branch_name = "master"