예제 #1
0
*Need the ZMQ_PubSub_Line_Q Module running to be able to work properly.

"""
import time
from packages import Paste
from pubsublogger import publisher

from Helper import Process

if __name__ == "__main__":
    publisher.port = 6380
    publisher.channel = "Script"

    config_section = 'Attributes'

    p = Process(config_section)

    # FUNCTIONS #
    publisher.info("Attribute is Running")

    while True:
        try:
            message = p.get_from_set()

            if message is not None:
                PST = Paste.Paste(message)
            else:
                publisher.debug("Script Attribute is idling 1s")
                print 'sleeping'
                time.sleep(1)
                continue
예제 #2
0
파일: QueueOut.py 프로젝트: Mrnmap/ALLInfo
def run(config_section):
    p = Process(config_section)
    if not p.publish():
        print(config_section, 'has no publisher.')
예제 #3
0
        def __init__(self, type, crawler_options, date, requested_mode, url,
                     domain, port, original_item, *args, **kwargs):
            self.type = type
            self.requested_mode = requested_mode
            self.original_item = original_item
            self.root_key = None
            self.start_urls = url
            self.domains = [domain]
            self.port = str(port)
            date_str = '{}/{}/{}'.format(date['date_day'][0:4],
                                         date['date_day'][4:6],
                                         date['date_day'][6:8])
            self.full_date = date['date_day']
            self.date_month = date['date_month']
            self.date_epoch = int(date['epoch'])

            self.arg_crawler = {
                'html': crawler_options['html'],
                'wait': 10,
                'render_all': 1,
                'har': crawler_options['har'],
                'png': crawler_options['png']
            }

            config_section = 'Crawler'
            self.p = Process(config_section)

            self.r_cache = redis.StrictRedis(
                host=self.p.config.get("Redis_Cache", "host"),
                port=self.p.config.getint("Redis_Cache", "port"),
                db=self.p.config.getint("Redis_Cache", "db"),
                decode_responses=True)

            self.r_serv_log_submit = redis.StrictRedis(
                host=self.p.config.get("Redis_Log_submit", "host"),
                port=self.p.config.getint("Redis_Log_submit", "port"),
                db=self.p.config.getint("Redis_Log_submit", "db"),
                decode_responses=True)

            self.r_serv_metadata = redis.StrictRedis(
                host=self.p.config.get("ARDB_Metadata", "host"),
                port=self.p.config.getint("ARDB_Metadata", "port"),
                db=self.p.config.getint("ARDB_Metadata", "db"),
                decode_responses=True)

            self.r_serv_onion = redis.StrictRedis(
                host=self.p.config.get("ARDB_Onion", "host"),
                port=self.p.config.getint("ARDB_Onion", "port"),
                db=self.p.config.getint("ARDB_Onion", "db"),
                decode_responses=True)

            self.crawler_path = os.path.join(
                self.p.config.get("Directories", "crawled"), date_str)

            self.crawled_paste_filemame = os.path.join(
                os.environ['AIL_HOME'],
                self.p.config.get("Directories", "pastes"),
                self.p.config.get("Directories", "crawled"), date_str)

            self.crawled_har = os.path.join(
                os.environ['AIL_HOME'],
                self.p.config.get("Directories", "crawled_screenshot"),
                date_str)
            self.crawled_screenshot = os.path.join(
                os.environ['AIL_HOME'],
                self.p.config.get("Directories", "crawled_screenshot"))
예제 #4
0
                        action='store')

    parser.add_argument('date',
                        type=str,
                        default="now",
                        help='''month %Y%m, example: 201810''',
                        action='store')

    args = parser.parse_args()

    path = os.path.join(os.environ['AIL_HOME'], 'doc',
                        'statistics')  # save path

    config_section = 'ARDB_Statistics'

    p = Process(config_section, False)

    # ARDB #
    server_statistics = redis.StrictRedis(
        host=p.config.get("ARDB_Statistics", "host"),
        port=p.config.getint("ARDB_Statistics", "port"),
        db=p.config.getint("ARDB_Statistics", "db"),
        decode_responses=True)

    if args.date == 'now' or len(args.date) != 6:
        date = datetime.datetime.now().strftime("%Y%m")
    else:
        date = args.date

    if args.type == 0:
        create_pie_chart(args.country, 'credential_by_tld:', date,
예제 #5
0
====================

This simple module can be used to clean all queues.

Requirements:
-------------


"""
import redis
import os
import time
from packages import Paste
from pubsublogger import publisher
from Helper import Process

if __name__ == "__main__":
    publisher.port = 6380
    publisher.channel = "Script"

    config_section = ['Global', 'Duplicates', 'Indexer', 'Attributes', 'Lines', 'DomClassifier', 'Tokenize', 'Curve', 'Categ', 'CreditCards', 'Mail', 'Onion', 'DumpValidOnion', 'Web',  'WebStats', 'Release', 'Credential', 'Cve', 'Phone', 'SourceCode', 'Keys']
    config_section = ['Curve']

    for queue in config_section:
        print('dropping: ' + queue)
        p = Process(queue)
        while True:
            message = p.get_from_set()
            if message is None:
                break
예제 #6
0
def run(config_section):
    p = Process(config_section)
    p.populate_set_in()