Exemple #1
0
def write_recording():
    record_option = result['record_option']

    new_record_option = []
    for item in record_option:
        finish = False
        item['wb'] = list(item['wb'])
        item['aaa_mode'] = list(item['aaa_mode'])
        for new_item in new_record_option:
            if cmp(new_item['data']['wb'], item['wb']) == 0 and cmp(
                    new_item['data']['aaa_mode'], item['aaa_mode']) == 0:
                new_item['count'] += 1
                finish = not finish
                break
        if not finish:
            new_record_option.append({'data': item, 'count': 1})

    new_record_option.sort(cmp=list_cmp)

    content = ''
    content += '每一次录像: name 值为camera._startRecording,和 camera._stopRecording。过程出现的参数:\n\n'
    for item in new_record_option:
        content += json.dumps(item['data']) + ' :\t' + str(
            item['count']) + '\n'
    write_file('output/result3.txt', content)
Exemple #2
0
def write_recording1():
    record_option = result['record_option']

    new_record_option = {'wb': {}, 'aaa_mode': {}}
    for item in record_option:
        item['wb'] = list(item['wb'])
        item['aaa_mode'] = list(item['aaa_mode'])
        for wb in item['wb']:
            if new_record_option['wb'].has_key(wb):
                new_record_option['wb'][wb] += 1
            else:
                new_record_option['wb'][wb] = 1

        for aaa_mode in item['aaa_mode']:
            if new_record_option['aaa_mode'].has_key(aaa_mode):
                new_record_option['aaa_mode'][aaa_mode] += 1
            else:
                new_record_option['aaa_mode'][aaa_mode] = 1

    content = ''
    content += '每一次录像: name 值为camera._startRecording,和 camera._stopRecording。过程出现的参数:\n\n'
    for index in new_record_option['wb']:
        content += 'wb = ' + str(index) + ':\t' + str(
            new_record_option['wb'][index]) + '\n'

    content += '\n\n' + '————————————————' + '\n\n'

    for index in new_record_option['aaa_mode']:
        content += 'aaa_mode = ' + str(index) + ':\t' + str(
            new_record_option['aaa_mode'][index]) + '\n'

    write_file('output/result3.txt', content)
Exemple #3
0
def write_live1():
    live_option = result['live_option']

    new_live_option = {'wb': {}, 'aaa_mode': {}}
    for item in live_option:
        item['wb'] = list(item['wb'])
        item['aaa_mode'] = list(item['aaa_mode'])
        for wb in item['wb']:
            if new_live_option['wb'].has_key(wb):
                new_live_option['wb'][wb] += 1
            else:
                new_live_option['wb'][wb] = 1

        for aaa_mode in item['aaa_mode']:
            if new_live_option['aaa_mode'].has_key(aaa_mode):
                new_live_option['aaa_mode'][aaa_mode] += 1
            else:
                new_live_option['aaa_mode'][aaa_mode] = 1

    content = ''
    content += '每一次直播: name 值为camera._startLive,和 camera._stopLive。过程出现的参数:\n\n'
    for index in new_live_option['wb']:
        content += 'wb = ' + str(index) + ':\t' + str(
            new_live_option['wb'][index]) + '\n'

    content += '\n\n' + '————————————————' + '\n\n'

    for index in new_live_option['aaa_mode']:
        content += 'aaa_mode = ' + str(index) + ':\t' + str(
            new_live_option['aaa_mode'][index]) + '\n'
    write_file('output/result4.txt', content)
Exemple #4
0
def write_live():
    live_option = result['live_option']
    new_live_option = []

    for item in live_option:
        finish = False
        item['wb'] = list(item['wb'])
        item['aaa_mode'] = list(item['aaa_mode'])
        for new_item in new_live_option:
            if cmp(new_item['data']['wb'], item['wb']) == 0 and cmp(
                    new_item['data']['aaa_mode'], item['aaa_mode']) == 0:
                new_item['count'] += 1
                finish = True
                break
        if not finish:
            new_live_option.append({'data': item, 'count': 1})

    new_live_option.sort(cmp=list_cmp)

    content = ''
    content += '每一次直播: name 值为camera._startLive,和 camera._stopLive。过程出现的参数:\n\n'
    for item in new_live_option:
        content += json.dumps(item['data']) + ' :\t' + str(
            item['count']) + '\n'
    write_file('output/result4.txt', content)
Exemple #5
0
 def audit(self):
     try:
         print "the tmp filename is :" + self.filename
         write_file(self.filename, self.dreq.request)
         slow_cmd = "python sqlmap.py -r %s --dbms=Mysql --batch" % self.filename
         quick_cmd = "python sqlmap.py -r %s --dbms=Mysql --smart  --batch" % self.filename
         cmd = slow_cmd if len(self.dreq.url) < 150 else quick_cmd
         result = self.psqlmap(cmd)
         m = re.search(r"Place[\w\W]+Parameter[\w\W]+Payload", result,
                       re.IGNORECASE)
         if m:
             method = re.findall(r"Place:(.*)", result,
                                 re.IGNORECASE)[0].strip()
             parameter = re.findall(r"Parameter:(.*)", result,
                                    re.IGNORECASE)[0].strip()
             payload = '\r\n'.join(
                 re.findall(r"Payload:(.*)", result, re.IGNORECASE))
             self.dlog.debug(
                 "[ SqlmapAudit Vul Result ] method: %s, parameter: %s, payload: %s"
                 % (method, parameter, payload))
             vul = scanIssue(rid=self.dreq.rid,
                             type=vulTypes.sql,
                             host=self.dreq.host,
                             url=self.dreq.url,
                             method=method,
                             parameters=parameter,
                             payload=payload)
             self.insertVul(vul)
     except Exception, e:
         elog.exception(e)
Exemple #6
0
def run_process(q_recv, q_send, in_folder, out_folder, failed_extractions_file,
                max_tries, use_diffbot):
    """
    Tries 'max_tries' times to extract text using
    At the end, if using diffbot, tries one last time with boilerpipe
    """

    texts, trec_ids = [], []

    def retrieve_texts_from_html(html, use_diffbot=False):
        """ Use the Diffbot API/Boilerpipe to retrieve texts from HTML """

        if use_diffbot:
            dummy_url = 'https://www.diffbot.com/dev/analytics/'
            url_api = "https://api.diffbot.com/v3/article?token=%s" \
                      "&discussion=false&url=%s" % (DIFFBOT_TOKEN, dummy_url)
            headers = {'Content-type': 'text/html'}
            content = json.loads(
                requests.post(url_api, data=html, headers=headers).text)

            text = content["objects"][0]["text"]
            title = content["objects"][0]["title"]

            text = '\n'.join([title, text])
        else:
            text = Extractor(extractor='ArticleExtractor', html=html).getText()

        return text

    while True:
        trec_id = q_recv.get()

        # Check end condition
        if trec_id is None:
            break

        # Check if file exists
        if not os.path.isfile("%s/%s" % (in_folder, trec_id)):
            continue

        # Read HTML
        html = read_file("%s/%s" % (in_folder, trec_id), encoding='latin1')

        i = 0
        while i != max_tries:
            try:
                texts.append(
                    retrieve_texts_from_html(html, use_diffbot=use_diffbot))
                trec_ids.append(trec_id)
                break
            except Exception as e:  # Extraction failed
                # print(e)
                i += 1

        if i == max_tries:
            write_file("%s\n" % trec_id, failed_extractions_file, 'a')

    q_send.put((texts, trec_ids))
Exemple #7
0
def write_timelapse1():
    timelapse_option = result['timelapse_option']
    new_timelapse_option = {
        'wb': {},
        'aaa_mode': {},
        'long_shutter': {},
        'interval': collections.OrderedDict()
    }

    for item in timelapse_option['option']:
        item['wb'] = list(item['wb'])
        item['aaa_mode'] = list(item['aaa_mode'])
        for wb in item['wb']:
            if new_timelapse_option['wb'].has_key(wb):
                new_timelapse_option['wb'][wb] += 1
            else:
                new_timelapse_option['wb'][wb] = 1

        for aaa_mode in item['aaa_mode']:
            if new_timelapse_option['aaa_mode'].has_key(aaa_mode):
                new_timelapse_option['aaa_mode'][aaa_mode] += 1
            else:
                new_timelapse_option['aaa_mode'][aaa_mode] = 1

        for long_shutter in item['long_shutter']:
            if new_timelapse_option['long_shutter'].has_key(long_shutter):
                new_timelapse_option['long_shutter'][long_shutter] += item[
                    'long_shutter'][long_shutter]
            else:
                new_timelapse_option['long_shutter'][long_shutter] = item[
                    'long_shutter'][long_shutter]

    sorted_key_list = sorted(timelapse_option['interval'])

    for key in sorted_key_list:
        new_timelapse_option['interval'][key] = timelapse_option['interval'][
            key]

    content = ''
    content += '每一次Timelapse。过程出现的参数:\n\n'
    for index in new_timelapse_option['wb']:
        content += 'wb = ' + str(index) + ':\t' + str(
            new_timelapse_option['wb'][index]) + '\n'

    content += '\n\n' + '————————————————' + '\n\n'

    for index in new_timelapse_option['aaa_mode']:
        content += 'aaa_mode = ' + str(index) + ':\t' + str(
            new_timelapse_option['aaa_mode'][index]) + '\n'

    content += '\n\n' + '————————————————' + '\n\n'
    for index in new_timelapse_option['interval']:
        content += 'interval = ' + str(index) + ':\t' + str(
            new_timelapse_option['interval'][index]) + '\n'

    write_file('output/result5.txt', content)
Exemple #8
0
 def __task_new(self):
     try:
         req = urllib2.Request( self.server+ '/task/new')
         resp = json.load(urllib2.urlopen(req))
         if resp['success'] == True:
             self.sqlitaskid = resp['taskid']
             self.request_file = self.tmp_dir+"\\"+self.sqlitaskid
             utils.write_file(self.request_file, self.request)
             print 'Created SQLMap Task: ' + self.sqlitaskid + '\n'
         else:
             print 'SQLMap task creation failed\n'
     except:
         print 'Except SQLMap task creation failed\n'
Exemple #9
0
 def __task_new(self):
     try:
         req = urllib2.Request(self.server + '/task/new')
         resp = json.load(urllib2.urlopen(req))
         if resp['success'] == True:
             self.sqlitaskid = resp['taskid']
             self.request_file = self.tmp_dir + "\\" + self.sqlitaskid
             utils.write_file(self.request_file, self.request)
             print 'Created SQLMap Task: ' + self.sqlitaskid + '\n'
         else:
             print 'SQLMap task creation failed\n'
     except:
         print 'Except SQLMap task creation failed\n'
Exemple #10
0
def write_timelapse():
    timelapse_option = result['timelapse_option']
    new_timelapse_option = {
        'option': [],
        'interval': collections.OrderedDict()
    }

    for item in timelapse_option['option']:
        finish = False
        item['wb'] = list(item['wb'])
        item['aaa_mode'] = list(item['aaa_mode'])
        for new_item in new_timelapse_option['option']:
            if cmp(new_item['data']['wb'], item['wb']) == 0 and cmp(
                    new_item['data']['aaa_mode'],
                    item['aaa_mode']) == 0 and cmp(
                        new_item['data']['long_shutter'],
                        item['long_shutter']) == 0:
                new_item['count'] += 1
                finish = True
                break
        if not finish:
            temp = collections.OrderedDict()
            temp['wb'] = item['wb']
            temp['aaa_mode'] = item['aaa_mode']
            temp['long_shutter'] = item['long_shutter']
            new_timelapse_option['option'].append({'data': temp, 'count': 1})

    new_timelapse_option['option'].sort(cmp=list_cmp1)

    sorted_key_list = sorted(timelapse_option['interval'])

    for key in sorted_key_list:
        new_timelapse_option['interval'][key] = timelapse_option['interval'][
            key]

    print new_timelapse_option['interval']

    content = ''
    content += '每一次Timelapse。过程出现的参数:\n\n'
    for item in new_timelapse_option['option']:
        content += json.dumps(item['data']) + ' :\t' + str(
            item['count']) + '\n'

    content += '\n\n' + '————————————————' + '\n\n'
    for index in new_timelapse_option['interval']:
        content += 'interval = ' + str(index) + ':\t' + str(
            new_timelapse_option['interval'][index]) + '\n'

    write_file('output/result5.txt', content)
Exemple #11
0
def get_data_list(threadName):
    try:
        while not temp_queue.empty():
            curstart = temp_queue.get()
            info = "线程名:%s,没有获取到数据还有 %d个" % (threadName, temp_queue.qsize())
            logging.info(info)
            print(info)

            option = webdriver.ChromeOptions()
            option.add_argument("headless")
            browser = webdriver.Chrome(chrome_options=option)
            url = data_list_url
            url = url.format(26, curstart)

            print(url)
            logging.info(url)

            browser.get(url)  # Load page
            # time.sleep(1)     #休眠1秒,主要防止目标服务器判为攻击,不知是否有效

            #获得网页数据
            data = browser.page_source
            file_name = DATA_LIST_PATH + "/" + str(curstart) + ".html"
            if utils.write_file(file_name, data):
                info = file_name + "写入文件成功"
                logging.info(info)
                print(info)

            # browser.close()
            browser.quit()
    except BaseException as e:
        traceback.print_exc()  #直接打印异常
        logging.error(traceback.format_exc())  #返回字符串 写入到文件
Exemple #12
0
 def audit(self):
     try:
         print "the tmp filename is :"+ self.filename
         write_file(self.filename, self.dreq.request)
         slow_cmd = "python sqlmap.py -r %s --dbms=Mysql --batch"%self.filename
         quick_cmd = "python sqlmap.py -r %s --dbms=Mysql --smart  --batch"%self.filename
         cmd = slow_cmd if len(self.dreq.url) < 150 else quick_cmd
         result = self.psqlmap(cmd)
         m = re.search(r"Place[\w\W]+Parameter[\w\W]+Payload", result, re.IGNORECASE)
         if m:
             method = re.findall(r"Place:(.*)", result, re.IGNORECASE)[0].strip()
             parameter = re.findall(r"Parameter:(.*)", result, re.IGNORECASE)[0].strip()
             payload = '\r\n'.join(re.findall(r"Payload:(.*)", result, re.IGNORECASE))
             self.dlog.debug("[ SqlmapAudit Vul Result ] method: %s, parameter: %s, payload: %s"%(method, parameter, payload))
             vul = scanIssue(rid=self.dreq.rid, type= vulTypes.sql, host= self.dreq.host, url= self.dreq.url, method=method, parameters=parameter, payload= payload)
             self.insertVul(vul)
     except Exception,e:
         elog.exception(e)
def read_and_write_html_from_warc(q, file2ids, output_folder):

    while True:
        info = q.get()

        # Check end condition
        if info is None:
            break

        file, file_id = info

        # Check if file exists
        if not os.path.isfile(file):
            continue

        warc_trec_ids = set(file2ids[file_id])

        i = 0
        while warc_trec_ids and i != MAX_TRIES:
            fp = WARCFile(file, "rb")

            first_record = True
            for record in fp:
                if not first_record:
                    if record['warc-trec-id'] in warc_trec_ids:  # Found record
                        # Encode
                        try:
                            text = record.payload.encode('utf-8')
                        except UnicodeDecodeError:
                            text = record.payload

                        write_file(text,
                                   '%s/%s' %
                                   (output_folder, record['warc-trec-id']),
                                   encoding=None)
                        warc_trec_ids.remove(record['warc-trec-id'])
                else:
                    first_record = False
            i += 1
        if warc_trec_ids:
            write_file(warc_trec_ids, 'failed_extracted_files.txt', mode='a')
Exemple #14
0
    def build_poc(self, poc_file, device_name, abi, sdk):
        poc_bin = poc_file[:-2]
        application_mk = APPLICATION_MK_TEMPLATE % (str(abi), str(sdk))
        android_mk = ANDROID_MK_TEMPLATE % (poc_file, poc_bin)
        original_src = "%s/%s/%s" % (consts.IDF_HOME, consts.POC_CODE_PATH,
                                     poc_file)
        work_dir = "%s/%s/%s_%s_%d/" % (consts.IDF_HOME, consts.TEMP_PATH,
                                        poc_bin, device_name, int(time.time()))
        build_dir = work_dir + "jni/"
        # mkdir for work
        os.system("mkdir -p %s" % build_dir)

        utils.write_file(build_dir + "Application.mk", application_mk)
        utils.write_file(build_dir + "Android.mk", android_mk)

        # copy src file
        os.system("cp %s %s" % (original_src, build_dir))
        # ndk-build
        os.system("cd %s; ndk-build &> /dev/null" % build_dir)

        return "%s/libs/%s/%s" % (work_dir, abi, poc_bin)
Exemple #15
0
def write_basic():
    content = '发起拍照: name 值为 camera._takePicture' + '\n\n'
    content += ''
    for index, item in enumerate(result['take_count']):
        content += remark[0][index] + ':\t' + str(item) + '\n'

    content += '\n\n' + '————————————————' + '\n\n'

    content += '录像 name 值为 camera._startRecording' + '\n\n'
    content += ''
    for index, item in enumerate(result['record_count']):
        content += remark[1][index] + ':\t' + str(item) + '\n'

    content += '\n\n' + '————————————————' + '\n\n'

    content += '直播 name 值为 camera._startLive' + '\n\n'
    content += ''
    for index, item in enumerate(result['live_count']):
        content += remark[2][index] + ':\t' + str(item) + '\n'

    content += '\n\n' + '————————————————' + '\n\n'

    write_file('output/result1.txt', content)

    #####
    content = ''
    content += '发起拍照: name 值为 camera._takePicture wb、aaa_mode频次统计\n\n'

    for index in result['option_count']['wb']:
        content += 'wb = ' + str(index) + ':\t' + str(
            result['option_count']['wb'][index]) + '\n'

    content += '\n\n' + '————————————————' + '\n\n'

    for index in result['option_count']['aaa_mode']:
        content += 'aaa_mode = ' + str(index) + ':\t' + str(
            result['option_count']['aaa_mode'][index]) + '\n'

    write_file('output/result2.txt', content)
Exemple #16
0
    def start_analyze(self):
        try:
            with open('data/result_save', 'r') as f:
                self.result_list.extend(pickle.load(f))
        except:
            serialize_result('data/result_save', [])
        file_list, file_used = get_file_list()
        for file in file_list:
            try:
                self.analyze_log(file)
            except:
                write_file('data/file_used.json', json.dumps(file_used))
                serialize_result('data/result_save', self.result_list)
                print 'Not finished'
                exit()
            file_used.append(file)
            write_file('data/file_used.json', json.dumps(file_used))

        serialize_result('data/result_save', self.result_list)
        result = self.get_sum()
        self.print_export(result)
        serialize_result('data/result', result)
Exemple #17
0
def parse_exact_room_async(url, short_id):
    apartment = None

    request_data = get(url)
    try:
        soup = BeautifulSoup(request_data.text, 'html.parser')
        apartment = soup.find("div", class_="adtxt_box")
        current_flat_temp = flat.Flat()
        title = apartment.find("p", class_="title")
        current_flat_temp.sourceURL = url
        current_flat_temp.shortId = short_id
        current_flat_temp.price = title.find("span").contents[0]
        current_flat_temp.name = title.find("span").parent.contents[-1]
        current_flat_temp.author = apartment.find(
            "span", class_="phones").parent.contents[1]
        current_flat_temp.description = (''.join(
            str(p.contents[0]).replace('<span class="rooms">', '').replace(
                '</span>', '').strip()
            for p in apartment.findAll("p", class_=None)[:-1])).replace(
                '"', '', 100).replace("'", "", 100)
        current_flat_temp.phoneImgURL = apartment.find(
            "span", class_="phones").find("img")['src']
        current_flat_temp.creationDate = datetime.utcnow().strftime(
            "%Y-%m-%d %H:%M:%S.%f")
        current_flat_temp.actualToDate = (
            datetime.now() +
            timedelta(days=90)).strftime("%Y-%m-%d %H:%M:%S.%f")
        current_flat_temp.parsingSource = 1
        current_flat_temp.isActive = 1
        current_flat_temp.clientId = 1

        __photosListUrls = ""
        create_directory_if_not_exist(IMAGES_COMMON_PATH +
                                      current_flat_temp.shortId)

        for image in soup.findAll("a", rel="pics"):
            write_file(
                IMAGES_COMMON_PATH + current_flat_temp.shortId + "\\" +
                image["href"].split("/")[-1],
                get(KVARTIRANT_URL + image["href"]).content)
            __photosListUrls = __photosListUrls + image["href"].split(
                "/")[-1] + ";"

        current_flat_temp.photosListUrls = __photosListUrls

        if current_flat_temp.phoneImgURL:
            write_file(
                IMAGES_COMMON_PATH + current_flat_temp.shortId + "\\" +
                current_flat_temp.phoneImgURL.split("/")[-1],
                get(KVARTIRANT_URL + "/" +
                    current_flat_temp.phoneImgURL).content)

        new_db = database.ApartmentsDb()
        new_db.add_apartment(current_flat_temp)
        new_db.con.close()

    except Exception as ex:
        broken_flat = UnparsedFlat()
        broken_flat.URL = str(url)
        broken_flat.HTML = str(apartment).replace("'", '"')
        broken_flat.Exception = str(ex).replace("'", '"')
        broken_flat.ErrorDate = datetime.now()
        new_db = database.ApartmentsDb()
        new_db.add_unpased_apartment(broken_flat)
        new_db.con.close()
Exemple #18
0
    def __write_result(self, result: ExecuteResult):
        formatted_output = """
--host--: {host},
--username--: {username},
--password--: {password},
--protocol--: {protocol},
--login_success--: {login_success},
--connect_success--: {connect_success},
--command_ok--: {command_ok},
--ssh_port--: {ssh_port},
--telnet_port: {telnet_port},
--ssh_connect_success--: {ssh_connect_success},
--telnet_connect_success--: {telnet_connect_success},
--command_list--:{command_list},
--err--: {err},
--result--:
{result}

""".format(host=result.host,
           username=result.username,
           password=result.password
           if self.cos.output_password else "<DO NOT SHOW PASSWORD>",
           protocol=result.protocol,
           login_success=str(result.login_success),
           connect_success=str(result.detail.ssh_connect_success
                               or result.detail.telnet_connect_success),
           command_ok=str(result.command_ok),
           ssh_port=self.cos.ssh_port,
           telnet_port=self.cos.telnet_port,
           ssh_connect_success=result.detail.ssh_connect_success,
           telnet_connect_success=result.detail.telnet_connect_success,
           command_list=" ".join(self.cos.command_list),
           err=result.err,
           result=result.output)
        if self.cos.output == "file":
            # 写入文件的同时, 也写到标准输出, 这样便于调用者处理
            utils.stdout_write(formatted_output, flush=True)

            filename = self.gen_filename(
                host=result.host,
                protocol=result.protocol,
                username=result.username,
                login_ok=result.login_success,
                connect_ok=result.detail.ssh_connect_success
                or result.detail.telnet_connect_success,
            )
            filename += ".txt"

            a, b = utils.write_file(dir=self.cos.output_dir,
                                    filename=filename,
                                    content=formatted_output)
            filename_full_path = filename if not self.cos.output_dir else self.cos.output_dir + "/" + filename
            if not a and not b:
                debug_print("write file: {} failed".format(filename_full_path))
                return
            debug_print("write to file: {} success".format(filename_full_path))
        else:  # 如果不是文件的话, 都输出到stdout
            # lst = [
            # 	"\n--host--: " + result.host,
            # 	"--username--: " + result.username,
            # 	# "--password--: " + result.password,
            # 	"--protocol--: " + result.protocol,
            # 	"--login_success--: " + str(result.login_success),
            # 	"--connect_success--: " + str(result.connect_success),
            # 	"--command_ok--: " + str(result.command_ok),
            # 	"--err--: " + str(result.err),
            # 	"--result--:\n" + result.output,
            # ]
            # utils.stdout_write('\n'.join(lst), flush=True)
            utils.stdout_write(formatted_output, flush=True)
                        results_votazione = run_query(sparql_senato, query_votazione,query_delay,Json=True)

                        if results_votazione!=-1:
                            total_result['votazioni'][votazione['votazione']] = results_votazione

                        else:
                            error_messages.append("Connection refused for query vot: %s" % query_votazione)

                else:
                    error_messages.append("Connection refused for query seduta vot: %s" % query_seduta_votazioni)


                write_file(output_path+
                           seduta_file_pattern+
                           seduta['numero']+".json",
                           total_result,
                           fields=None,
                           print_metadata=False,
                           Json=True
                )

            else:
                error_messages.append("Connection refused for query seduta: %s" % query_seduta)


    else:
        print "nessuna nuova seduta"
        exit(1)
else:
    error_messages.append("Connection refused for query sedute: %s" % query_sedute)

# se ci sono stati errori manda una singola email con tutti gli errori
Exemple #20
0
    for trec_id in tqdm(trec_ids, desc='Transforming HTMLs'):
        q_process_recv.put(trec_id)  # blocks until q below its max size

    # Tell workers we're done
    for _ in range(mp.cpu_count()):
        q_process_recv.put(None)

    # Receive info
    pbar = tqdm(total=len(trec_ids),
                ncols=100,
                leave=True,
                desc='Writing texts')
    time.sleep(0.1)  # It's how tqdm works...
    for _ in range(mp.cpu_count()):
        for text, trec_id in zip(*q_process_send.get()):
            write_file(text, '%s/%s' % (args.out_folder, trec_id))
            pbar.update(1)

    # Close pool
    pool.close()
    pool.join()

    # for trec_id in tqdm(trec_ids, total=len(trec_ids)):
    #     # Check if file exists
    #     if not os.path.isfile("%s/%s" % (args.in_folder, trec_id)):
    #         continue
    #     # Read HTML
    #     html = read_file("%s/%s" % (args.in_folder, trec_id), encoding='latin1')
    #
    #     i = 0
    #     while i != args.max_tries:
def install_linux_check(params):
    """check guest status after installation, including network ping,
       read/write option in guest. return value: 0 - ok; 1 - bad
    """
    global logger
    logger = params['logger']
    params.pop('logger')

    guestname = params.get('guestname')
    virt_type = params.get('virt_type')

    logger.info("the name of guest is %s" % guestname)

    # Connect to local hypervisor connection URI
    hypervisor = utils.get_hypervisor()

    logger.info("the type of hypervisor is %s" % hypervisor)

    conn = sharedmod.libvirtobj['conn']
    domobj = conn.lookupByName(guestname)
    state = domobj.info()[0]

    if(state == libvirt.VIR_DOMAIN_SHUTOFF):
        logger.info("guest is shutoff, if u want to run this case, \
                     guest must be started")
        return 1

    logger.info("get the mac address of vm %s" % guestname)
    mac = utils.get_dom_mac_addr(guestname)
    logger.info("the mac address of vm %s is %s" % (guestname, mac))

    timeout = 300
    while timeout:
        ipaddr = utils.mac_to_ip(mac, 180)
        if not ipaddr:
            logger.info(str(timeout) + "s left")
            time.sleep(10)
            timeout -= 10
        else:
            logger.info("the ip address of vm %s is %s" % (guestname, ipaddr))
            break

    if timeout == 0:
        logger.info("vm %s fail to get ip address" % guestname)
        return 1

    time.sleep(120)

    logger.info("Now checking guest health after installation")

    domain_name=guestname
    blk_type=params['hddriver']
    nic_type=params['nicdriver']
    Test_Result = 0

    # Ping guest from host
    logger.info("check point1: ping guest from host")
    if utils.do_ping(ipaddr, 20) == 1:
        logger.info("ping current guest successfull")
    else:
        logger.error("Error: can't ping current guest")
        Test_Result = 1
        return Test_Result

    # Creat file and read file in guest.
    logger.info("check point2: creat and read dirctory/file in guest")
    if utils.create_dir(ipaddr, "root", "redhat") == 0:
        logger.info("create dir - /tmp/test successfully")
        if utils.write_file(ipaddr, "root", "redhat") == 0:
            logger.info("write and read file: /tmp/test/test.log successfully")
        else:
            logger.error("Error: fail to write/read file - /tmp/test/test.log")
            Test_Result = 1
            return Test_Result
    else:
        logger.error("Error: fail to create dir - /tmp/test")
        Test_Result = 1
        return Test_Result

    # Check whether vcpu equals the value set in geust config xml
    logger.info("check point3: check cpu number in guest equals to \
                 the value set in domain config xml")
    vcpunum_expect = int(utils.get_num_vcpus(domain_name))
    logger.info("vcpu number in domain config xml - %s is %s" % \
                 (domain_name, vcpunum_expect))
    vcpunum_actual = int(utils.get_remote_vcpus(ipaddr, "root", "redhat"))
    logger.info("The actual vcpu number in guest - %s is %s" %
                 (domain_name, vcpunum_actual))
    if vcpunum_expect == vcpunum_actual:
        logger.info("The actual vcpu number in guest is \
                     equal to the setting your domain config xml")
    else:
        logger.error("Error: The actual vcpu number in guest is \
                      NOT equal to the setting your domain config xml")
        Test_Result = 1
        return Test_Result

    # Check whether mem in guest is equal to the value set in domain config xml
    logger.info("check point4: check whether mem in guest is equal to \
                 the value set in domain config xml")
    mem_expect = utils.get_size_mem(domain_name)
    logger.info("current mem size in domain config xml - %s is %s" %
                 (domain_name, mem_expect))
    mem_actual = utils.get_remote_memory(ipaddr, "root", "redhat")
    logger.info("The actual mem size in guest - %s is %s" %
                (domain_name, mem_actual))
    diff_range = int(mem_expect) * 0.07
    diff = int(mem_expect) - int(mem_actual)
    if int(math.fabs(diff)) < int(diff_range):
        logger.info("The actual mem size in guest is almost equal to \
                    the setting your domain config xml")
    else:
        logger.error("Error: The actual mem size in guest is NOT equal to \
                      the setting your domain config xml")
        Test_Result = 1
        return Test_Result

    # Check app works fine in guest, such as: wget
    logger.info("check point5: check app works fine in guest, such as: wget")
    logger.info("get system environment information")
    envfile = os.path.join(HOME_PATH, 'global.cfg')
    logger.info("the environment file is %s" % envfile)

    envparser = env_parser.Envparser(envfile)
    file_url = envparser.get_value("other", "wget_url")

    if utils.run_wget_app(ipaddr, "root", "redhat", file_url, logger) == 0:
        logger.info("run wget successfully in guest.")
    else:
        logger.error("Error: fail to run wget in guest")
        Test_Result = 1
        return Test_Result

    # Check nic and blk driver in guest
    if 'kvm' in virt_type or 'xenfv' in virt_type:
        logger.info("check point6: check nic and blk driver in guest is \
                     expected as your config:")
        if utils.validate_remote_nic_type(ipaddr, "root", "redhat",
           nic_type, logger) == 0 and \
           utils.validate_remote_blk_type(ipaddr, "root", "redhat",
                                        blk_type, logger) == 0:
            logger.info("nic type - %s and blk type - %s check successfully" %
                       (nic_type, blk_type))
        else:
            logger.error("Error: nic type - %s or blk type - %s check failed" %
                        (nic_type, blk_type))
            Test_Result = 1
            return Test_Result

    return Test_Result
        # "Nome",
        # "Voto"

        voti_all = vne_sheet.worksheet("Voti dei Parlamentari").get_all_values()
        # rimuove dal foglio dei voti le colonne oltre la 4

        for voto in voti_all[1:]:
            # elimina eventuali spazi iniziali/finali dal valore del voto
            voto[4] = voto[4].strip()
            voti.append(voto[:4])

        vne_dict = {'metadati': dati_generali, 'voti' : voti}

        write_file(output_path+
                   ramo+"_vne_" +
                   dati_generali['N. seduta'] +
                   "_"+
                   dati_generali['Titolo votazione'][:10]+".json",
                   vne_dict,
                   fields=None,
                   print_metadata=False,
                   Json=True
            )

    close_procedure(script_name, smtp_server, notification_system, notification_list, error_mail_body)