Esempio n. 1
0
	def analysis_report(self,url_id,content):
		"""解析检测报告,并将结果返回,否则返回False"""
				
		conn = MysqlConnection().return_conn()
		cursor = conn.cursor()

		response_code = json.loads(content)['response_code']
		if response_code == 0:                 #不同的response_code,代表情况不同
			print self.url + ' ,This site is not present virustotal\'s  database'
		elif response_code == -2:
			print self.url + ' ,the requested item is still queued for analysis'
		elif response_code == 1:
			url = json.loads(content)['url']               #查询的url
			scan_date = json.loads(content)['scan_date']   #扫描时间
			positives = json.loads(content)['positives']   #是恶意网址的扫描引擎数量
			total = json.loads(content)['total']           #总共使用扫描引擎数量
			scans = json.loads(content)['scans']

			result_list=[]
			i = 0
			for key in scans.keys():
				if  scans[key]['detected'] == True:
					result_list.append({str(key):str(scans[key]['result'])})
					i = i + 1

			sql = 'INSERT INTO virustotal_details(id,url,subtime,total,positives' + createfield(i) + ')'
			num = ' "%s"' * i
			li  = num.split(' ')
			sql = sql + ' VALUES ("%s","%s","%s","%s","%s" ' + ','.join(li) + ')'


			value = (str(url_id),str(url),str(scan_date),str(total),str(positives))+tuple(result_list)
			sql = sql % value
			print sql
			
			cursor.execute(sql)
			conn.commit()
			cursor.close()
			conn.close()
class File2Sql:
    
    def __init__(self,file_source = 'ip_block_source/anhui.txt'):
        '''初始化函数'''
        
        self.conn = MysqlConnection().return_conn()  #连接数据库
        self.file_source = file_source                                     #ip段txt文件路径和文件名
        
    def file2sql(self,num = 500):
        '''文件ip段导入到数据库中'''
                             
        sql = 'INSERT INTO ip_block (start_ip,end_ip,region_id,state) VALUES (%s,%s,"1","0")'
        cursor = self.conn.cursor()
        ip_block = file(self.file_source,mode = 'r')           #打开文件
        ip_block_lines = ip_block.readlines()                     #得到所有ip段
        linecount = len(ip_block_lines)                                #得到行数
        for line in range(linecount):
            list_ip = ip_block_lines[line].strip().split('\t')      #分隔为列表
            long_start_ip = ip2long(str(list_ip[0]))
            long_end_ip = ip2long(str(list_ip[1]))
            row_count = long_end_ip - long_start_ip+1
            counts = row_count/num
            for count in range(counts):
                change_ip = long_start_ip + num
                if change_ip<long_end_ip:
                    print long2ip(long_start_ip),long2ip(change_ip)
                    cursor.execute(sql,(long2ip(long_start_ip),long2ip(change_ip)))
                    long_start_ip = change_ip+1
                else:
                    print long2ip(long_start_ip),long2ip(long_end_ip)
                    cursor.execute(sql,(long2ip(long_start_ip),long2ip(change_ip)))
                    break
            self.conn.commit()
                                  
        ip_block.close()
        cursor.close()
        self.conn.close()
Esempio n. 3
0
    lock.acquire()  #线程锁

    result_urls.append({'url': checkurl,'http_code': http_code,'dir_url': dir_url})
    URL_COUNT += 1

    if URL_COUNT % RESULTS_NUM == 0 or URL_COUNT == rowcount:   #每隔RESULTS_NUM个url时候,将结果存入到数据库中
        print result_urls

        sql = 'UPDATE url_detail_info SET online = %s WHERE url = %s '
        conn = MysqlConnection().return_conn()
        cursor = conn.cursor()
        for url in result_urls:
            cursor.execute(sql,(url['http_code'],url['url']))

        conn.commit()   #更新
        cursor.close()  #关闭
        conn.close()
    
        del result_urls[:]      #清除已存入的url探测结果
        if URL_COUNT == rowcount:
            print 'end'
          
    lock.release() #解锁


def main():

    sql = 'SELECT url from url_detail_info limit 1000'
    conn = MysqlConnection().return_conn()
    cursor = conn.cursor()