Esempio n. 1
0
def getProductDescription(productId):
    logger.info("----------getDaumKeyword()----------")
    logger.debug("get Source Datas")
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36'
    }
    #source1 = requests.get(target_url['keyword_cpn'],headers=headers).text
    #source2 = requests.get(target_url['keyword_cpn2'],headers=headers).text
    source3 = requests.get("https://www.coupang.com/vp/products/" + productId,
                           headers=headers).text
    #source1 = requests.get("https://www.naver.com").text
    #soup1 = BeautifulSoup(source1, 'html.parser')
    #soup2 = BeautifulSoup(source2, 'html.parser')
    soup3 = BeautifulSoup(source3, 'html.parser')

    print(soup3)

    #print(source1)
    #print(source2)
    #print(soup3)
    #elem_list1 = soup1.select(".subType-IMAGE")
    #elem_list_good = soup2.select(".sdp-review__highlight__positive__article__content")
    #elem_list_bad = soup2.select(".sdp-review__highlight__critical__article__content")
    elem_list_review = soup3.select(".prod-description ul li")

    for i, v in enumerate(elem_list_review):
        print(v.get_text())
    datas = []
    return datas
Esempio n. 2
0
 def get_debug_asir_result(self):
     loadname = 'ASIR_' + self.loadname
     logger.debug('loadname is: %s ', loadname)
     try:
         dic = get_jenkins_data(loadname)
         if dic:
             list_temp = []
             dic_pci = self.get_target_value('pci', dic, list_temp)
             list_temp2 = []
             dic_children = self.get_target_value(
                 'children', self.get_key_value('name', 'crt', dic_pci),
                 list_temp2)
             dic_debug = self.get_key_value('name', 'debug', dic_children)
             if dic_debug.get('cases'):
                 dic_cases = dic_debug['cases'][0]
                 debug_status = dic_cases['result']
                 if debug_status == 'PASS':
                     debug_status = 'Yes'
                 if debug_status == 'FAIL':
                     debug_status = 'No'
                 return debug_status
             else:
                 debug_status = 'NULL'
         else:
             debug_status = 'NULL'
         return debug_status
     except Exception, e:
         logger.error('error:%s', e)
Esempio n. 3
0
async def files(request: web.Request):
    """
    apk安装
    :param request:
    :return:
    """
    logger.debug(request.url)

    sort = request.query.get("sort", "")
    page = int(request.query["page"])
    # per_page = int(request.query["per_page"])
    start = (page - 1) * 5
    end = start + 5
    _list = await file_service.query_install_file(0, start, 5, sort)
    total = await file_service.query_all_install_file()
    last_page = int(total / 5) + 1
    # logger.debug(str(page) + " ------- " + str(per_page) + ">>>>>> " + str(list))
    if page < last_page:
        next_page_url = "http://172.17.2.233:8000/files?page=" + str((page + 1))
        prev_page_url = "http://172.17.2.233:8000/files?page=" + str(page)
        if page > 1:
            prev_page_url = "http://172.17.2.233:8000/files?page=" + str((page - 1))
    else:
        next_page_url = "http://172.17.2.233:8000/files?page=" + str(page)
        prev_page_url = "http://172.17.2.233:8000/files?page=" + str((page - 1))

    result = {"total": total, "per_page": 5, "current_page": page, "last_page": last_page,
              "next_page_url": next_page_url, "prev_page_url": prev_page_url, "from": start, "to": end, "data": _list}
    # logger.debug(result)
    return web.json_response(text=json.dumps(result))
Esempio n. 4
0
    def _update_database(self):
        self.getSelfUsersFollowing()
        followings = self.LastJson["users"].copy()

        self._clear_unfollowed_users(followings)

        for user in followings:
            self.searchUsername(user["username"])
            user = self.LastJson["user"]

            mean_like_count, mean_comment_count = self.get_user_info(
                user["pk"], check_on=100)

            user_in_db = User(
                user["pk"],
                user["username"],
                calendar.timegm(time.gmtime()),
                user["follower_count"],
                mean_like_count,
                mean_comment_count,
                "General"
            )
            is_user_exists = len(
                self._db.select(User, User.id == user_in_db.id)
            ) == 1

            if is_user_exists:
                self._db.update(user_in_db)
            else:
                self._db.insert(user_in_db)
        logger.debug("Database updated")
Esempio n. 5
0
 def on_epoch_end(self, epoch, logs={}):
     logger.debug("Epoch %d End " % epoch)
     self.state['epochs'] -= 1
     loss = logs['loss']
     acc = logs['acc']
     valloss = logs['val_loss']
     valacc = logs['val_acc']
     # Sample Content
     # {'CIDEr': 0.11325126353463148, 'Bleu_4': 0.1706107390467726, 'Bleu_3': 0.27462591349020055, 'Bleu_2': 0.4157995334621001, 'Bleu_1': 0.6064295446876932, 'ROUGE_L': 0.40471970665189977, 'METEOR': 0.17162570735633326}
     coco_json = self.framework.eval_onvalidation()
     cider = coco_json['CIDEr']
     bleu4 = coco_json['Bleu_4']
     rouge = coco_json['ROUGE_L']
     meteor = coco_json['METEOR']
     ename = "%.3f_Cider%.3f_Blue%.3f_Rouge%.3f_Meteor%.3f" % (
         valloss, cider, bleu4, rouge, meteor)
     self.elogs.add(
         [epoch, loss, acc, valloss, valacc, cider, bleu4, rouge, meteor])
     self.elogs.flush()
     if valloss < self.bestlossepoch or True:
         to_rm = self.last_epochmodel
         self.last_epochmodel = self.framework.save(
             epoch=("%03d_loss_%s" % (self.state['epochs'], ename)))
         self.bestlossepoch = valloss
         if to_rm is not None:
             pass
             # os.remove(to_rm)
     return
Esempio n. 6
0
 def switch_page_handle(self, page_name, keyword):
     """
     switch WebView handle
     :param page_name:
     :param keyword:
     :return:
     """
     handles = self.get_handles
     try:
         for handle in handles:
             self.driver.switch_to.window(handle)
             logger.debug(f"Page: {page_name} Action:[switch_page_handle]\n"
                          f"Msg: Switch to handle:{handle}")
             if keyword in self.get_page_source:
                 break
         if keyword not in self.get_page_source:
             raise exceptions.NoSuchElementException(
                 "No such keyword:{} in page source:{}".format(
                     keyword, self.get_page_source))
     except exceptions.NoSuchElementException as e:
         handle = self.get_handle
         self.capture_screen_shot(page_name)
         logger.error(
             f"Page: {page_name} Action:[switch_page_handle]\n"
             f"Msg: Switch to handle:{handle}, keyword not found! Track:{e}"
         )
         raise e
     else:
         handle = self.get_handle
         logger.info(f"Page: {page_name} Action:[switch_page_handle]\n"
                     f"Msg: Switch to handle:{handle} keyword found")
         return self
Esempio n. 7
0
    def request_query(self, category, keyword, page=1):
        logger.debug("Request Query to G9")
        self.item_dict = None
        self.item_del_dict = None
        self.item_del_dict = dict()
        http = urllib3.PoolManager()

        page_info = page
        search_keyword = keyword
        target_category = category

        raw_keyword = str(search_keyword.encode('utf-8'))
        raw_keyword = raw_keyword[1:]
        raw_keyword = raw_keyword.replace("'", "")
        raw_keyword = raw_keyword.replace("x", "")

        encode_keyword = re.sub("[/\\\:?\"]", "%", raw_keyword)

        query_url = 'http://www.g9.co.kr/Display/Category/' + \
                    str(target_category) + '?page=' + str(page_info) + \
                    "&sort=latest&viewType=B&searchQuery=%20" + encode_keyword

        print(query_url)
        req = http.request('GET', query_url, preload_content=False)
        decoded_html = (req.data).decode('utf-8')

        self.parsing_html = decoded_html
        req.release_conn()
Esempio n. 8
0
 def _get_job_builds_id(self, url, job):
     sql = "SELECT build_id FROM crt_load_testline_status_page \
            WHERE url = '{}' AND job = '{}' ORDER BY CAST(build_id as unsigned) DESC".format(url, job)
     logger.debug(sql)
     results = self.db.get_DB(sql)
     logger.debug(results)
     return results
Esempio n. 9
0
    def _wait_with_log(self, update_on):
        "Waits given seconds with a log."
        seconds = get_realtime_setting(update_on, int)

        # Log every `WAIT_SECS` seconds
        wait_secs = get_realtime_setting("WAIT_SECS", int, 10)
        spin_count = seconds // wait_secs
        leap = seconds % wait_secs
        time.sleep(leap)

        total_waited_time = leap
        while spin_count > 0 and self.is_active:
            wait_secs = get_realtime_setting("WAIT_SECS", int, 10)
            remaining_time = spin_count * wait_secs

            logger.debug("Waiting... %d seconds remained.", remaining_time)

            time.sleep(wait_secs)

            total_waited_time += wait_secs
            wait_time_s = get_realtime_setting(update_on, int)

            if wait_time_s != seconds:
                logger.warning("Time has been updated!")
                if total_waited_time >= wait_time_s:
                    break
                spin_count = (wait_time_s - total_waited_time) // wait_secs
                seconds = wait_time_s
                continue

            spin_count -= 1

        if self.is_active:
            logger.debug("Time is up.")
Esempio n. 10
0
def get_loadnames(mode):
    """
    :param mode: FZM FDD = FLF
                 FZM TDD = TLF
                 CFZC FDD = FLC
                 CFZC TDD = TLC
    :return loadname list
    example: get_loadname('TLF')
    """
    crt_type = str(mode) + '%'
    logger.debug('Type is: %s', mode)
    sql_str = '''
        select enb_build
        from test_results 
        where enb_build !='Null' and enb_build !='' and enb_build not like '%MF%' and crt_type='CRT1_DB' 
        and enb_release like("''' + crt_type + '''")
        GROUP BY enb_build 
        order by time_epoch_start desc limit 30
        '''
    try:
        data = mysqldb.get_DB(sql_str)
        results = []
        for row in data:
            loadname = row[0]
            results.append(loadname)
        return results
    except Exception, e:
        logger.error('error: get_loadnames %s', e)
Esempio n. 11
0
def getDaumNews():
    logger.info("----------getDaumNews()----------")
    source = requests.get(target_url['news_daum']).text
    soup = BeautifulSoup(source, 'html.parser')
    elem_list_title = soup.select("div.cont_thumb .tit_thumb a")
    elem_list_desc = soup.select("div.cont_thumb .desc_thumb span")

    titles = []
    descs = []
    datas = []
    url = 'https://search.daum.net/search?w=news&nil_search=btn&DA=NTB&enc=utf8&cluster=y&cluster_page=1&q='
    for i, v in enumerate(elem_list_title):
        titles.append(v.text + '\t' + v.attrs['href'])

    for i, v in enumerate(elem_list_desc):
        descs.append(v.text.strip())
    fn.aryLenSync(titles, descs)
    for i, v in enumerate(titles):
        data = '%s\t%s\t%s\t%s\t%s\n' % (
            target_code['news_daum'], createTime, fn.getStrNo(i + 1),
            fn.getConvData(titles[i].split('\t')[0]),
            url + fn.getEncodeUrl(titles[i].split('\t')[0]))
        datas.append(data)
        logger.debug(data)

    return datas
Esempio n. 12
0
    def run(self):
        input_queue = self.input_queue
        queue_endian = self.queue_endian
        overwrite = self.overwrite

        while True:
            if input_queue is not None:
                __get_queue = input_queue.get()
                if __get_queue == queue_endian:
                    logger.debug("ImageDownloader Done")
                    break
                else:
                    __get_data = __get_queue
                    # print(__get_data)
                    save_path = (__get_data[0]).strip()
                    goodscode = (__get_data[1]).strip()
                    if save_path is not None and goodscode is not None:
                        __flag = url_image_download(save_path, goodscode, overwrite)
                        if __flag is False:
                            logger.warning("Image Donwload Error " + goodscode)
                    else:
                        logger.warning(str(os.getpid()) + " / Warning with Queue " + str(__get_queue))
            else:
                logger.critical("Queue is Empty")
                break
Esempio n. 13
0
def call_back_toservice(task, query):
    logger.debug('[callback a verifytask by rabbitmq]')
    try:
        credentials = pika.PlainCredentials(username=task.master_info['spider_mq_user']
                                            , password=task.master_info['spider_mq_passwd'])
        connection = pika.BlockingConnection(
            pika.ConnectionParameters(
                host=task.master_info['spider_mq_host'], virtual_host=task.master_info['spider_mq_vhost'], credentials=credentials
            )
        )
        channel = connection.channel()

        msg = json.dumps({
            'qid': task.req_qid, 'type': task.callback_type,
            'uid': task.req_uid, 'query': json.dumps(query)
        })

        res = channel.basic_publish(
            exchange=task.master_info['spider_mq_exchange'],
            routing_key=task.master_info['spider_mq_routerKey'],
            properties=pika.BasicProperties(delivery_mode=2),
            body=msg,
        )
        connection.close()
        if not res:
            raise Exception('RabbitMQ Result False')
        logger.debug('[callback a verifytask done]')
    except Exception as exc:
        logger.exception("callback a task fail. error = {0}".format(traceback.format_exc()))
Esempio n. 14
0
def ImageResultSum(result_queue, queue_endian, threads_num):
    __counter = 0
    __check_endian = 0

    null_image_list = list()

    while True:
        if result_queue is not None:
            __get_queue = result_queue.get()

            if __get_queue == queue_endian:
                __check_endian = __check_endian + 1
                logger.debug("Get Endian")
                if __check_endian == threads_num:
                    logger.debug("ImageResultSum Done")
                    break
            else:
                __data = __get_queue
                goodscode = __data[0]
                exist = __data[1]
                if exist is False:
                    print("Null Goodscode " + str(goodscode))
                    null_image_list.append(goodscode)
        else:
            logger.critical("Queue is Empty")
            break

    return null_image_list
Esempio n. 15
0
 def find_one(self, sql, expected=None, times=None) -> tuple:
     """
     Query all results
     :param times: loop times
     :param sql: Execute database expression -> str
     :param expected:
     :return: results -> tuple
     """
     res = None
     if not times:
         times = 20
     try:
         logger.info("Model: fetchone, SQL: 【{}】".format(sql))
         for i in range(times):
             row = self.cur.execute(sql)
             logger.debug("row: {}".format(row))
             if not row:
                 time.sleep(6)
                 self.con.commit()
                 continue
             res = self.cur.fetchone()
             logger.info("result: {}".format(res))
             if not expected or res[0] == expected:
                 return res
             time.sleep(6)
             self.con.commit()
         return res
     except pymysql.err.InterfaceError as e:
         self.con.ping(reconnect=True)
         logger.warning(f"Database connection failed: {e}")
         return self.find_one(sql, expected, times)
     except (pymysql.err.Error, TypeError) as e:
         logger.error("Database error rolling back: {}".format(e))
         self.con.rollback()
         raise e
Esempio n. 16
0
 def _report(self):
     if self._error == 1:
         logger.debug(f"The error flag: {self._error}. Exit the function.")
         return ""
     logger.info("Try to report in the alternate method.")
     param = parse.parse_qs(parse.urlparse(str(self._navigation_url)).query)
     url = f"{self._host}/project_add.asp"
     payload = {
         "id": param["id"][0],
         "province": "四川省",
         "city": "成都市",
         "area": "龙泉驿区",
         "wuhan": "否",
         "fare": "否",
         "wls": "否",
         "kesou": "否",
         "zhengduan": "",
         "Submit": "提交",
         "action": "add",
         "adds": "undefined",
         "addsxy": "undefined"
     }
     res = self._session.post(url=url, headers=self._headers, data=payload)
     logger.debug(
         f"URL:{url}. Payload:{payload}. Status code:{res.status_code}")
     if res.status_code != 200:
         logger.error(
             f"Failed:GET request. URL:{url}. Status code:{res.status_code}"
         )
     res.encoding = "utf-8"
     return res.text
Esempio n. 17
0
    def run(self):
        input_queue = self.input_queue
        result_queue = self.result_queue
        queue_endian = self.queue_endian

        while True:
            if input_queue is not None:
                __get_queue = input_queue.get()
                if __get_queue == queue_endian:
                    logger.debug("ImageCheckWorker Done")
                    result_queue.put(queue_endian)
                    break
                else:
                    __get_data = __get_queue
                    # print(__get_data)
                    goodscode = __get_data.strip()
                    if goodscode is not None:
                        __img_exist = exist_check(goodscode)
                        __ret_data = [goodscode, __img_exist]
                        result_queue.put(__ret_data)
                    else:
                        logger.warning(
                            str(os.getpid()) + " / Warning with Queue " +
                            str(__get_queue))
            else:
                logger.critical("Queue is Empty")
                break
    def datas_from_ids(self, idlst):
        logger.debug("\n Loading Video/Captions for ids : %s" % str(idlst))
        afeatures = []
        vids   = []
        capIn  = []
        capOut = []
        for _id in idlst:
            vccs = self.get_video_caption(_id, just_one_caption = True)
            if vccs is None:
                continue
            for vcc in vccs:
                _afeature, _vid, _capIn, _capOut = vcc
                afeatures.append(_afeature)
                vids.append(_vid)
                capIn.append(_capIn)
                capOut.append(_capOut)
        afeatures  = np.asarray(afeatures)
        capIn  = np.asarray(capIn)
        capOut = np.asarray(capOut)
        vids   = np.asarray(vids)

        logger.debug("Shape vids   %s [max distinct %d]" % (str(np.shape(vids)),len(idlst)))
        logger.debug("Shape afeatures  %s" % str(np.shape(afeatures)))
        logger.debug("Shape CapIn  %s" % str(np.shape(capIn)))
        logger.debug("Shape CapOut %s" % str(np.shape(capOut)))


        return [[capIn,afeatures,vids],capOut]
Esempio n. 19
0
 def go_scf_wechat(uid, title, message, api, sendkey, userid):
     now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
     url = f'{api}/{sendkey}'
     ps = ""
     msg = " " * 10 + title + "\n\n" + uid + ":\n" + " " * 4 + message + f"\n{ps}\n\n{now}"
     payload = {
         "sendkey": sendkey,
         "msg_type": "text",
         "msg": msg,
         "to_user": userid
     }
     # go_scf post请求body必须为json,详见文档
     res = requests.post(url=url, data=json.dumps(payload))
     logger.debug(f"URL:{url}. Status code:{res.status_code}")
     res.encoding = "utf-8"
     logger.debug(f"Response:{res.text}")
     dict_res = json.loads(res.text)
     if res.status_code != 200:
         logger.error(
             f"Failed to push the WeChat message. Status code:{res.status_code}."
         )
         logger.error("Retry to push the WeChat message.")
         raise Exception("Failed to push the WeChat message.")
     elif dict_res["code"] != 0:
         logger.error(
             f'Failed to push the WeChat message. [{dict_res["msg"]}].')
         logger.error("Retry to push the WeChat message.")
         raise Exception("Failed to push the WeChat message.")
     else:
         logger.info("Successful to push the WeChat message.")
Esempio n. 20
0
    def process(self):
        data = []
        builds = self.impl.require_data(DB_UNFINISHED_BUILD_ID,
                                        url=self.url, job=self.job_name)
        for build_id in builds:
            build = self.jenkins.get_build(build_id)
            if build is None:
                continue
            info = _parse_build_data(build.json_data, build.inner.get_params())
            logger.debug(info)
            data.append(info)

        db_last_build_id = self.impl.require_data(DB_LATEST_BUILD_ID,
                                                  url=self.url, job=self.job_name)
        last_build_id = self.jenkins.get_last_buildnumber()
        logger.info("{}-{}: db_last_build_id {}, jenkins last_build_id {}.".format(self.url, self.job_name,
                                                                                   db_last_build_id,
                                                                                   last_build_id))
        if db_last_build_id is None:
            filters = ['id', 'result', 'displayName', 'timestamp', 'url']
            json_data = self.jenkins.get_all_builds(filters)
            return data + _parse_builds_data(json_data)

        build_id = int(db_last_build_id)
        while build_id < int(last_build_id):
            build_id += 1
            build = self.jenkins.get_build(build_id)
            info = _parse_build_data(build.json_data, build.inner.get_params())
            data.append(info)

        logger.debug(data)
        return data
Esempio n. 21
0
def get_asir_loadnames(mode):
    """
    get asir loadnames
    :param mode: ASIR FDD = FL
                 ASIR TDD = TL
    :return: loadname list
    """
    crt_type = str(mode) + '%'
    logger.debug('Type is: %s', mode)
    sql_str = '''
        select enb_build
        from test_results 
        where enb_build !='Null' and enb_build !='' and enb_build not like '%MF%' and crt_type='CRT1_DB' 
        and enb_release like("''' + crt_type + '''") and enb_hw_type='AIRSCALE' 
        GROUP BY enb_build 
        order by time_epoch_start desc limit 30
        '''
    try:
        data = mysqldb.get_DB(sql_str)
        results = []
        for row in data:
            loadname = row[0]
            results.append(loadname)
        return results
    except Exception, e:
        logger.error('error: get_loadnames %s', e)
Esempio n. 22
0
 def loadWordEmbedding(self, glove_file):
     self.wordEmbedding = loadFromPickleIfExists(WORD_EMBEDDED_CACHE)
     if self.wordEmbedding:
         logger.debug("Embedding Loaded")
         return False
     else:
         self.wordEmbedding = dict()
         with open(glove_file, 'r') as f:
             for i,line in enumerate(f):
                 tokens = line.split()
                 tokens = [tok.__str__() for tok in tokens]
                 word = tokens[0]
                 self.wordEmbedding[word] = np.asarray(tokens[1:], dtype='float32')
         minVal = float('inf')
         maxVal = -minVal
         for v in self.wordEmbedding.values():
             for x in v:
                 minVal = min(minVal,x)
                 maxVal = max(maxVal,x)
         mapper = interp1d([minVal,maxVal],[-1,1])
         logger.info("Mapping minVal[%f], maxVal[%f] to [-1,1]  " % (minVal,maxVal))
         for w in self.wordEmbedding:
             self.wordEmbedding[w] = mapper(self.wordEmbedding[w])
         print("Cross Check")
         print(self.wordEmbedding['good'])
         self.saveEmbedding()
         return True
Esempio n. 23
0
 def get_unexecuted_count(self):
     branch = self.get_release()
     if branch is None:
         raise ValueError("Invalid branch: None. Load name: %s" %
                          (self.loadname, ))
     logger.debug('branch is  %s :', branch)
     logger.debug('loadname is  %s :', self.loadname)
     sql_str = '''                
         SELECT count(*)
         FROM (SELECT *
               FROM (SELECT crt_testcase_name.casename
                     FROM crt_testcase_release
                            INNER JOIN crt_testcase_name ON crt_testcase_name.id = crt_testcase_release.case_id
                     WHERE crt_testcase_release.release_id = "''' + branch + '''") AS t1
               WHERE t1.casename NOT IN (SELECT test_case_name
                                         FROM test_results
                                         WHERE enb_build = "''' + self.loadname + '''"
                                           AND record_valid = 1
                                           AND crt_type = 'CRT1_DB')) AS t2                             
      '''
     try:
         data = mysqldb.get_DB(sql_str)
         results = data[0][0]
         return results
     except Exception, e:
         logger.error('error: get_unexecuted_count %s', e)
Esempio n. 24
0
 def _login(self, uid, password):
     if self._error == 1:
         logger.debug(f"The error flag: {self._error}. Exit the function.")
         return
     url = f"{self._host}/weblogin.asp"
     payload = {
         "username": uid,
         "userpwd": password,
         "code": self._captcha_code,
         "login": "******",
         "checkcode": "1",
         "rank": "0",
         "action": "login",
         "m5": "1",
     }
     res = self._session.post(url=url, headers=self._headers, data=payload)
     logger.debug(
         f"URL:{url}. Payload:{payload}. Status code:{res.status_code}")
     res.encoding = "utf-8"
     if res.status_code != 200:
         logger.error(
             f"Failed:POST request. URL:{url}. Status code:{res.status_code}"
         )
         self._set_error(2, 1)
     elif "alert" in res.text:
         logger.error(
             "Failed to login the ISP.[Incorrect username, password or captcha code]"
         )
         self._set_error(2, 1)
     else:
         logger.info("Successful to login the ISP.")
 def data_generator_random(self, batch_size, start=0, typeSet = 0):
     if typeSet == 0:
         ids = self.vHandler.getTrainingIds()
     elif typeSet == 1:
         ids = self.vHandler.getValidationIds()
     elif typeSet == 2:
         ids = self.vHandler.getTestIds()
     else:
         assert False
     random.shuffle(ids)
     count = (len(ids) + batch_size - 1)//batch_size
     assert count > 0
     if start == -1:
         start = random.randint(0,count)
     logger.debug("Max Batches of type %d : %d " % (typeSet, count))
     #start = start % count
     while True:
         bs = batch_size
         if bs>len(ids):
             bs=len(ids)
             logger.debug("FORCE Reducing Batch Size to %d from %d",bs,batch_size)
         idlst = random.sample(ids,bs)
         data = self.datas_from_ids(idlst)
         ndata = []
         for d in data:
             if d is not None:
                 ndata.append(d)
         if len(ndata) > 0:
             yield ndata
Esempio n. 26
0
 def send(self, uid, title, msg, receiver: list):
     logger.debug(f"Email receiver:{receiver[0]}.")
     if not self._is_login:
         logger.error("Failed to send the email.[Email not login]")
         self.login()
         raise Exception("Failed to send the email.")
     else:
         now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
         mail_msg = self._mail_payload.format(uid=uid,
                                              msg=msg,
                                              mail_name=self._mail_name,
                                              time=now)
         message = MIMEText(mail_msg, "html", "utf-8")
         message['Subject'] = title
         message['From'] = f"{self._mail_name} <{self._mail_user}>"
         message['To'] = receiver[0]
         try:
             self.smtp.sendmail(self._mail_user, receiver,
                                message.as_string())
             logger.info("Successful to send the email.")
         except Exception as e:
             logger.error(f"Failed to send the email.[{e}]")
             logger.error("Retry to send the email.")
             self._is_login = False
             self.login()
             raise Exception("Failed to send the email.")
Esempio n. 27
0
def getProductReview(productId):
    logger.info("----------getDaumKeyword()----------")
    logger.debug("get Source Datas")
    headers = {
        'User-Agent':
        'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36'
    }
    #source1 = requests.get(target_url['keyword_cpn'],headers=headers).text
    #source2 = requests.get(target_url['keyword_cpn2'],headers=headers).text
    source3 = requests.get(
        "https://www.coupang.com/vp/product/reviews?productId=" + productId +
        "&size=20&sortBy=ORDER_SCORE_ASC",
        headers=headers).text
    #source1 = requests.get("https://www.naver.com").text
    #soup1 = BeautifulSoup(source1, 'html.parser')
    #soup2 = BeautifulSoup(source2, 'html.parser')
    soup3 = BeautifulSoup(source3, 'html.parser')

    #print(source1)
    #print(source2)
    #print(soup3)
    #elem_list1 = soup1.select(".subType-IMAGE")
    #elem_list_good = soup2.select(".sdp-review__highlight__positive__article__content")
    #elem_list_bad = soup2.select(".sdp-review__highlight__critical__article__content")
    elem_list_review = soup3.select(
        ".sdp-review__article__list__review__content")

    datas = []
    for i, v in enumerate(elem_list_review):
        datas.append({
            "productId": str(productId),
            "review": str(v.get_text()).strip()
        })

    return datas


#
# base_dir = os.path.dirname( os.path.abspath( __file__ ) ) +"/files/"
# file_cupang  = base_dir+"result.xlsx"
# file_cupang2  = base_dir+"result2.xlsx"
# res1 = pd.read_excel(file_cupang,sheet_name='cupang')
#
# arry = res1['productId']
#
# info = pd.DataFrame(
#         columns=['productId', 'review'])
#
# len =len(arry)
# for i, v in enumerate(arry):
#     print(len - i)
#     time.sleep(5)
#     data = getProductReview(str(v))
#     for ii,vv in enumerate(data):
#         info = info.append({'productId': vv['productId'] , 'review': vv['review'] }, ignore_index=True)
#         print(vv)
#
# file2 = file_cupang
# info.to_excel(file_cupang2, sheet_name='cupang',engine='xlsxwriter')
Esempio n. 28
0
async def installfile(request: web.Request):
    """
    apk安装
    :param request:
    :return:
    """
    logger.debug(request.url)
    return {}
Esempio n. 29
0
 def get_item_dict(self, items_dict, item_remove_dict):
     logger.debug("Check Item from remove dict")
     self.download_item_dict = None
     if item_remove_dict:
         for remove_item in item_remove_dict.keys():
             if remove_item in items_dict:
                 del items_dict[remove_item]
     self.download_item_dict = items_dict
Esempio n. 30
0
def send_message(_, kwargs):
    logger.debug(f'Scheduler started at PID:{os.getpid()}')
    data = kwargs["data"]
    client.send_message(data["contact"], data["message"], data["type"])
    scheduled_time = convert_to_local_ts(data["timeInfo"]["schedule_time"],
                                         format_date=True)
    current_time = pendulum.now().to_datetime_string()
    logger.info(f'scheduled at {scheduled_time} sent at {current_time}')