示例#1
0
 def run(self):
     print(banner)
     dt = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
     print(f'[*] Starting OneForAll @ {dt}\n')
     logger.log('INFOR', f'开始运行OneForAll')
     self.domains = utils.get_domains(self.target)
     if self.domains:
         for self.domain in self.domains:
             collect = Collect(self.domain, export=False)
             collect.run()
             if self.brute:
                 # 由于爆破会有大量dns解析请求 并发常常会导致其他任务中的网络请求超时
                 brute = AIOBrute(self.domain)
                 brute.run()
             table_name = self.domain.replace('.', '_')
             db_conn = database.connect_db()
             self.datas = database.get_data(db_conn, table_name).as_dict()
             loop = asyncio.get_event_loop()
             asyncio.set_event_loop(loop)
             self.datas = loop.run_until_complete(resolve.bulk_query_a(self.datas))
             self.datas = loop.run_until_complete(request.bulk_get_request(self.datas, self.port))
             loop.run_until_complete(asyncio.sleep(0.25))  # 在关闭事件循环前加入一小段延迟让底层连接得到关闭的缓冲时间
             loop.close()
             database.clear_table(db_conn, table_name)
             database.save_db(db_conn, table_name, self.datas)
             # 数据库导出
             if not self.path:
                 self.path = config.result_save_path.joinpath(f'{self.domain}.{self.format}')
             dbexport.export(table_name, db_conn, self.valid, self.path, self.format, self.output)
             db_conn.close()
     else:
         logger.log('FATAL', f'获取域名失败')
     logger.log('INFOR', f'结束运行OneForAll')
示例#2
0
    def main(self):
        if self.brute is None:
            self.brute = config.enable_brute_module
        if self.verify is None:
            self.verify = config.enable_verify_subdomain
        rename_table = self.domain + '_last'
        collect = Collect(self.domain, export=False)
        collect.run()
        if self.brute:
            # 由于爆破会有大量dns解析请求 并发爆破可能会导致其他任务中的网络请求异常
            brute = AIOBrute(self.domain, export=False)
            brute.run()

        db = Database()
        db.copy_table(self.domain, self.domain + '_ori')
        db.remove_invalid(self.domain)
        db.deduplicate_subdomain(self.domain)
        # 不验证子域的情况
        if not self.verify:
            # 数据库导出
            self.valid = None
            dbexport.export(self.domain,
                            valid=self.valid,
                            format=self.format,
                            show=self.show)
            db.drop_table(rename_table)
            db.rename_table(self.domain, rename_table)
            return
        # 开始验证子域工作
        self.datas = db.get_data(self.domain).as_dict()
        loop = asyncio.get_event_loop()
        asyncio.set_event_loop(loop)

        # 解析域名地址
        task = resolve.bulk_query_a(self.datas)
        self.datas = loop.run_until_complete(task)

        # 保存解析结果
        resolve_table = self.domain + '_res'
        db.drop_table(resolve_table)
        db.create_table(resolve_table)
        db.save_db(resolve_table, self.datas, 'resolve')

        # 请求域名地址
        task = request.bulk_get_request(self.datas, self.port)
        self.datas = loop.run_until_complete(task)
        # 在关闭事件循环前加入一小段延迟让底层连接得到关闭的缓冲时间
        loop.run_until_complete(asyncio.sleep(0.25))

        db.clear_table(self.domain)
        db.save_db(self.domain, self.datas)

        # 数据库导出
        dbexport.export(self.domain,
                        valid=self.valid,
                        format=self.format,
                        show=self.show)
        db.drop_table(rename_table)
        db.rename_table(self.domain, rename_table)
        db.close()
示例#3
0
    def main(self):
        """
        OneForAll main process

        :return: subdomain results
        :rtype: list
        """
        self.old_table = self.domain + '_old_result'
        self.new_table = self.domain + '_now_result'
        self.origin_table = self.domain + '_origin_result'
        self.resolve_table = self.domain + '_resolve_result'

        collect = Collect(self.domain, export=False)
        collect.run()
        if self.brute:
            # Due to there will be a large number of dns resolution requests,
            # may cause other network tasks to be error
            brute = Brute(self.domain, word=True, export=False)
            brute.check_env = False
            brute.quite = True
            brute.run()

        # Database processing
        self.deal_db()
        # Mark the new discovered subdomain
        self.data = self.mark()

        # Export results without resolve
        if not self.dns:
            return self.export(self.domain)

        # Resolve subdomains
        self.data = resolve.run_resolve(self.domain, self.data)
        # Save resolve results
        resolve.save_data(self.resolve_table, self.data)

        # Export results without HTTP request
        if not self.req:
            return self.export(self.resolve_table)

        # HTTP request
        self.data = request.run_request(self.domain, self.data, self.port)
        # Save HTTP request result
        request.save_data(self.domain, self.data)

        # Add the final result list to the total data list
        self.datas.extend(self.data)

        # Export
        self.export(self.domain)

        # Scan subdomain takeover
        if self.takeover:
            subdomains = utils.get_subdomains(self.data)
            takeover = Takeover(subdomains)
            takeover.run()
        return self.data
示例#4
0
    def main(self):
        """
        OneForAll实际运行主流程

        :return: 子域结果
        :rtype: list
        """
        self.old_table = self.domain + '_old_result'
        self.new_table = self.domain + '_now_result'
        self.origin_table = self.domain + '_origin_result'
        self.resolve_table = self.domain + '_resolve_result'

        collect = Collect(self.domain, export=False)
        collect.run()
        if self.brute:
            # 由于爆破会有大量dns解析请求 并发爆破可能会导致其他任务中的网络请求异常
            brute = Brute(self.domain, word=True, export=False)
            brute.check_env = False
            brute.run()

        # 有关数据库处理
        self.deal_db()
        # 标记新发现子域
        self.data = self.mark()

        # 不解析子域直接导出结果
        if not self.dns:
            return self.export(self.domain)

        # 解析子域
        self.data = resolve.run_resolve(self.domain, self.data)
        # 保存解析结果
        resolve.save_data(self.resolve_table, self.data)

        # 不请求子域直接导出结果
        if not self.req:
            return self.export(self.resolve_table)

        # 请求子域
        self.data = request.run_request(self.domain, self.data, self.port)
        # 保存请求结果
        request.save_data(self.domain, self.data)

        # 将最终结果列表添加到总的数据列表中
        self.datas.extend(self.data)

        # 数据库导出
        self.export(self.domain)

        # 子域接管检查
        if self.takeover:
            subdomains = utils.get_subdomains(self.data)
            takeover = Takeover(subdomains)
            takeover.run()
        return self.data
示例#5
0
    def main(self):
        collect = Collect(self.domain, export=False)
        collect.run()
        if self.brute:
            # 由于爆破会有大量dns解析请求 并发爆破可能会导致其他任务中的网络请求异常
            brute = AIOBrute(self.domain, export=False)
            brute.run()

        db = Database()
        db.copy_table(self.domain, self.domain+'_ori')
        db.remove_invalid(self.domain)
        db.deduplicate_subdomain(self.domain)
        self.datas = db.get_data(self.domain).as_dict()
        loop = asyncio.get_event_loop()
        asyncio.set_event_loop(loop)

        # 解析域名地址
        task = resolve.bulk_query_a(self.datas)
        self.datas = loop.run_until_complete(task)

        # 保存解析结果
        resolve_table = self.domain + '_res'
        db.drop_table(resolve_table)
        db.create_table(resolve_table)
        db.save_db(resolve_table, self.datas, 'resolve')

        # 请求域名地址
        task = request.bulk_get_request(self.datas, self.port)
        self.datas = loop.run_until_complete(task)
        # 在关闭事件循环前加入一小段延迟让底层连接得到关闭的缓冲时间
        loop.run_until_complete(asyncio.sleep(0.25))
        loop.close()

        db.clear_table(self.domain)
        db.save_db(self.domain, self.datas)

        # 数据库导出
        if not self.path:
            name = f'{self.domain}.{self.format}'
            self.path = config.result_save_path.joinpath(name)
        dbexport.export(self.domain, db.conn, self.valid, self.path,
                        self.format, self.output)

        rename_table = self.domain + '_last'
        db.drop_table(rename_table)
        db.rename_table(self.domain, rename_table)
示例#6
0
    def picture(self, t_name):
        picture = list()
        tweet = Tweet()
        status_media = {}
        value = {}
        collect = Collect()

        api = collect.api()
        status_media = tweet.get_timeline(api, t_name)

        for media in status_media:
            try:
                if (hasattr(status_media[media], "extended_entities")):
                    value = status_media[media].extended_entities
                    for key in value["media"]:
                        picture.append(key["media_url"])
            except:
                pass

        return picture
示例#7
0
    def username(self, t_name):
        username = list()
        tweet = Tweet()
        status_media = {}
        value = {}
        collect = Collect()

        api = collect.api()
        status_media = tweet.get_timeline(api, t_name)

        for media in status_media:
            try:
                if (hasattr(status_media[media], "extended_entities")):
                    value = status_media[media].extended_entities
                    for key in value["media"]:
                        username.append(status_media[media].user._json["name"])
            except:
                pass

        return username
示例#8
0
	def handleDiscovery(self, dev, isNewDev, isNewData):
		if (dev.addr in self.macAddress and isNewData and dev.getValueText(22) != None):
			datas = dev.getValueText(22)
			bat = int(datas[20:22],16)
			hum = int(datas[28:32],16)/100
			temp = int(datas[24:28],16)/100
			#testData(bat, self.dataBat)
			#testData(hum, self.dataHum)
			#testData(temp, self.dataTemp)
			caption = CaptionDAO.getByMacAddress(dev.addr)
			print("----------------------------")
			print(caption.name)
			print("Batterie : ", bat)
			print("Humidite : ", hum)
			print("Temperature : ", temp)
			print("----------------------------")
			collectHum = Collect(0, self.dataHum, caption, datetime.datetime.now(),hum)
			collectBat = Collect(0, self.dataBat, caption, datetime.datetime.now(),bat)
			collectTemp = Collect(0, self.dataTemp, caption, datetime.datetime.now(),temp)
			CollectDAO.create(collectHum)
			CollectDAO.create(collectBat)
			CollectDAO.create(collectTemp)
示例#9
0
from usidpatch import usidpatch
from nextday import N3R
from models import Connection
from xignite import Fundamentals

conn, c = Connection()
import sqlite3

z = Prep()
z.get_df(3000)
master_df = pd.DataFrame()
for i in z.tuple_list:
    a, b, c = i[0], i[1], i[2]
    # print(a,b,c)
    try:
        y = Collect(a, b, c, thresh=.075, n=22)
        if len(y.df_sub) > 0:
            print('Ticker: ', a, '\n Updating: ', len(y.df_sub), ' records')
            master_df = master_df.append(y.df_sub)
        else:
            pass
        y.update_d1Table()
    except:
        y = Collect_Error(a)
        print('Deleted: ', a)
usid = usidpatch(master_df)
usid.df = usid.df.reset_index()
usid.df = usid.df.set_index('USID')
usid.df.to_sql('stockDT',
               conn,
               if_exists='append',
 def add_product(self, product):
     return Collect.create({'collection_id': self.id, 'product_id': product.id})
 def remove_product(self, product):
     collect = Collect.find_first(collection_id=self.id, product_id=product.id)
     if collect:
         collect.destroy()
示例#12
0
文件: extract.py 项目: sunweipeng/edm
	def task_extract_time(self):
		"""
		定时任务
		:return:
		"""
		send_count = 0
		send_report = 0
		while True:
			self.logger.info("=============================浮屠长生 开始第[%s]定时任务==================================" % send_count)
			"""获取发送时间段"""
			send_min_max_time = BusiBaseConfService().search_key_return_value((busi_config.SEND_MIN_MAX_TIME), "9|20")
			"""数据分割"""
			send_time = send_min_max_time.split("|")
			"""获取休眠时间"""
			batch_sleep_time = BusiBaseConfService().search_key_return_value((busi_config.BUSI_BATCH_SLEEP_TIME), "5|8")
			sleep_time = batch_sleep_time.split("|")
			"""获取允许重发的ip"""
			allow_resend_server_ip = BusiBaseConfService().search_key_return_value((busi_config.ALLOW_RESEND_SERVER_IP))

			"""休眠时长"""
			time_out = random.randint(int(sleep_time[0]), int(sleep_time[1]))
			"""获取当前时间"""
			hour = int(time.strftime('%H', time.localtime(time.time())))
			if hour > int(send_time[1]):
				"""
				收取邮件
				"""
				Collect().instance_server_ip()
				self.logger.info("当前时间未在发送时间段内容,进行休眠,休眠时长[%s]分钟" % time_out)
				time.sleep(60 * time_out)
			elif hour < int(send_time[0]):
				"""判断是否在发送时间段内,如果不存在的话跳过 0~9"""
				"""若标志位为0"""
				if send_report == 0 and allow_resend_server_ip == self.public_ip and self.get_task_list():
					send_report = 1
					"""更新账号"""
					SendEmailAccountService().update_all_status_valid()
				send_count = 0
				self.logger.info("当前时间未在发送时间段内容,进行休眠,休眠时长[%s]分钟" % time_out)
				time.sleep(60 * time_out)
			elif not self.search_task_code_by_server_ip():
				self.logger.info("【提取数据】不存在发送中的任务,流程结束")
				time.sleep(60 * time_out)
			elif send_count != 0 and not self.get_task_list():
				"""如果不是当天第一次发送,且缓存记录为空"""
				"""发送次数"""
				send_count += 1
				send_report = 0
				"""
				收取邮件
				"""
				Collect().instance_server_ip()
				"""
				数据二次营销 大于5次就不再营销
				"""
				if allow_resend_server_ip == self.public_ip:
					self.resend_instance()
				self.instance()
				"""休眠"""
				self.logger.info("当前第[%s]次发送,提取完成,进行休眠,休眠时长[%s]分钟" % (send_count, time_out))
				time.sleep(60 * time_out)
			elif self.get_task_list():
				"""如果缓存记录不为空,则休眠"""
				send_report = 0
				self.logger.info("当前第[%s]次发送,缓存信息不为空,进行休眠,休眠时长[%s]分钟" % (send_count, time_out))
				time.sleep(60 * time_out)
			else:
				"""如果缓存记录为空,进行消费生产"""
				send_count += 1
				send_report = 0
				"""生产"""
				self.instance()
				self.logger.info("当前第[%s]次发送,生成缓存信息,进行休眠,休眠时长[%s]分钟" % (send_count, time_out))
				time.sleep(60*time_out)
			self.logger.info("=============================浮屠长生 结束本次定时任务==================================")
示例#13
0
 def add_product(self, product):
     return Collect.create({'collection_id': self.id, 'product_id': product.id})
示例#14
0
    def main(self):
        if self.brute is None:
            self.brute = config.enable_brute_module
        if self.verify is None:
            self.verify = config.enable_verify_subdomain
        old_table = self.domain + '_last'
        new_table = self.domain + '_now'
        collect = Collect(self.domain, export=False)
        collect.run()
        if self.brute:
            # 由于爆破会有大量dns解析请求 并发爆破可能会导致其他任务中的网络请求异常
            brute = AIOBrute(self.domain, export=False)
            brute.run()

        db = Database()
        db.copy_table(self.domain, self.domain+'_ori')
        db.remove_invalid(self.domain)
        db.deduplicate_subdomain(self.domain)

        old_data = []
        # 非第一次收集子域的情况时数据库预处理
        if db.exist_table(new_table):
            db.drop_table(old_table)  # 如果存在上次收集结果表就先删除
            db.rename_table(new_table, old_table)  # 新表重命名为旧表
            old_data = db.get_data(old_table).as_dict()

        # 不验证子域的情况
        if not self.verify:
            # 数据库导出
            self.valid = None
            dbexport.export(self.domain, valid=self.valid,
                            format=self.format, show=self.show)
            db.drop_table(new_table)
            db.rename_table(self.domain, new_table)
            return
        # 开始验证子域工作
        self.data = db.get_data(self.domain).as_dict()

        # 标记新发现子域
        self.data = utils.mark_subdomain(old_data, self.data)

        loop = asyncio.get_event_loop()
        asyncio.set_event_loop(loop)

        # 解析域名地址
        task = resolve.bulk_query_a(self.data)
        self.data = loop.run_until_complete(task)

        # 保存解析结果
        resolve_table = self.domain + '_res'
        db.drop_table(resolve_table)
        db.create_table(resolve_table)
        db.save_db(resolve_table, self.data, 'resolve')

        # 请求域名地址
        task = request.bulk_get_request(self.data, self.port)
        self.data = loop.run_until_complete(task)
        # 在关闭事件循环前加入一小段延迟让底层连接得到关闭的缓冲时间
        loop.run_until_complete(asyncio.sleep(0.25))

        db.clear_table(self.domain)
        db.save_db(self.domain, self.data)

        # 数据库导出
        dbexport.export(self.domain, valid=self.valid,
                        format=self.format, show=self.show)
        db.drop_table(new_table)
        db.rename_table(self.domain, new_table)
        db.close()
        # 子域接管检查

        if self.takeover:
            subdomains = set(map(lambda x: x.get('subdomain'), self.data))
            takeover = Takeover(subdomains)
            takeover.run()
示例#15
0
def collect():
    user_socket = request.environ.get("wsgi.websocket")
    if user_socket:
        while True:
            message = json.loads(user_socket.receive())
            if message.get("msg") == "exit":
                break
            try:
                # 开始
                msg = {
                    "msg": "开始上传",
                    "percent": 0,
                }
                user_socket.send(json.dumps(msg))

                msg_re = message.get("msg").split("\n")
                cu = message.get("cu")
                merchants = message.get("merchants")
                print(msg_re, cu)
                brands = CsBrand.query.all()
                print(brands[0].brand_name)
                co = Collect(user_socket, cu, msg_re, merchants, brands)
                co.execute()

                # 结束
                msg = {
                    "msg": "上传完成",
                    "percent": 100,
                }
                user_socket.send(json.dumps(msg))

                # 更新页数
                if 1 <= int(cu) <= 7:
                    new_pages = []
                    for i, items in enumerate(session['pages'].split()):
                        if i == int(cu):
                            new_pages.append(str(int(msg_re[0]) + 1))
                        else:
                            new_pages.append(items)
                    session['pages'] = ' '.join(new_pages)

                    #并更新文件
                    filename = "config/" + session['username'] + ".txt"
                    with open(filename, 'w') as f:
                        f.write(session['pages'])

                    #更新merchantID
                    new_merchants_list = []
                    for i, items in enumerate(session['merchants_list'].split()):
                        if i == int(cu):
                            new_merchants_list.append(merchants)
                        else:
                            new_merchants_list.append(items)
                    session['merchants_list'] = ' '.join(new_merchants_list)

                    # 并更新文件
                    filename = "config/" + session['username'] + "-mise.txt"
                    with open(filename, 'w') as f:
                        f.write(session['merchants_list'])

            except:
                continue
示例#16
0
from flask import Flask, jsonify, request, render_template
from answer import Answer
from collect import Collect
import re
regex = re.compile(
    r'^(?:http|ftp)s?://'  # http:// or https://
    r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'  #domain...
    r'localhost|'  #localhost...
    r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'  # ...or ip
    r'(?::\d+)?'  # optional port
    r'(?:/?|[/?]\S+)$',
    re.IGNORECASE)

answers = Answer()
collecting = Collect()
app = Flask(__name__)


@app.route('/', methods=['POST'])
def home():
    if (request.method == 'POST'):
        data = request.get_json()
        reply = answers.getReply(data['q'])
        return jsonify({'answer': reply})


@app.route("/page")
def page():
    return render_template("page.html")

示例#17
0
    def main(self):
        if self.brute is None:
            self.brute = config.enable_brute_module
        if self.dns is None:
            self.dns = config.enable_dns_resolve
        if self.req is None:
            self.req = config.enable_http_request
        old_table = self.domain + '_last_result'
        new_table = self.domain + '_now_result'
        collect = Collect(self.domain, export=False)
        collect.run()
        if self.brute:
            # 由于爆破会有大量dns解析请求 并发爆破可能会导致其他任务中的网络请求异常
            brute = AIOBrute(self.domain, export=False)
            brute.run()

        db = Database()
        original_table = self.domain + '_original_result'
        db.copy_table(self.domain, original_table)
        db.remove_invalid(self.domain)
        db.deduplicate_subdomain(self.domain)

        old_data = []
        # 非第一次收集子域的情况时数据库预处理
        if db.exist_table(new_table):
            db.drop_table(old_table)  # 如果存在上次收集结果表就先删除
            db.rename_table(new_table, old_table)  # 新表重命名为旧表
            old_data = db.get_data(old_table).as_dict()

        # 不解析子域直接导出结果
        if not self.dns:
            # 数据库导出
            dbexport.export(self.domain,
                            valid=self.valid,
                            format=self.format,
                            show=self.show)
            db.drop_table(new_table)
            db.rename_table(self.domain, new_table)
            db.close()
            return

        self.data = db.get_data(self.domain).as_dict()

        # 标记新发现子域
        self.data = utils.mark_subdomain(old_data, self.data)

        # 获取事件循环
        loop = asyncio.get_event_loop()
        asyncio.set_event_loop(loop)

        # 解析子域
        task = resolve.bulk_resolve(self.data)
        self.data = loop.run_until_complete(task)

        # 保存解析结果
        resolve_table = self.domain + '_resolve_result'
        db.drop_table(resolve_table)
        db.create_table(resolve_table)
        db.save_db(resolve_table, self.data, 'resolve')

        # 不请求子域直接导出结果
        if not self.req:
            # 数据库导出
            dbexport.export(resolve_table,
                            valid=self.valid,
                            format=self.format,
                            show=self.show)
            db.drop_table(new_table)
            db.rename_table(self.domain, new_table)
            db.close()
            return

        # 请求子域
        task = request.bulk_request(self.data, self.port)
        self.data = loop.run_until_complete(task)
        self.datas.extend(self.data)
        # 在关闭事件循环前加入一小段延迟让底层连接得到关闭的缓冲时间
        loop.run_until_complete(asyncio.sleep(0.25))
        count = utils.count_valid(self.data)
        logger.log('INFOR', f'经验证{self.domain}有效子域{count}个')

        # 保存请求结果
        db.clear_table(self.domain)
        db.save_db(self.domain, self.data, 'request')

        # 数据库导出
        dbexport.export(self.domain,
                        valid=self.valid,
                        format=self.format,
                        show=self.show)
        db.drop_table(new_table)
        db.rename_table(self.domain, new_table)
        db.close()

        # 子域接管检查
        if self.takeover:
            subdomains = set(map(lambda x: x.get('subdomain'), self.data))
            takeover = Takeover(subdomains)
            takeover.run()
示例#18
0
 def remove_product(self, product):
     collect = Collect.find_first(collection_id=self.id, product_id=product.id)
     if collect:
         collect.destroy()