Beispiel #1
0
api_config_object.read(api_config_file)
if not api_config_object.has_section('api') or not os.path.isfile(api_config_file):
    # Write default config
    api_config_object.add_section('api')
    for key in DEFAULTCONF:
        api_config_object.set('api', key, str(DEFAULTCONF[key]))
    conffile = codecs.open(api_config_file, 'w', 'utf-8')
    api_config_object.write(conffile)
    conffile.close()
api_config = multiscanner.common.parse_config(api_config_object)

# Needs api_config in order to function properly
from celery_worker import multiscanner_celery, ssdeep_compare_celery
from ssdeep_analytics import SSDeepAnalytic

db = database.Database(config=api_config.get('Database'))
# To run under Apache, we need to set up the DB outside of __main__
db.init_db()

storage_conf = multiscanner.common.get_config_path(multiscanner.CONFIG, 'storage')
storage_handler = multiscanner.storage.StorageHandler(configfile=storage_conf)
for handler in storage_handler.loaded_storage:
    if isinstance(handler, elasticsearch_storage.ElasticSearchStorage):
        break

ms_config_object = configparser.SafeConfigParser()
ms_config_object.optionxform = str 
ms_configfile = multiscanner.CONFIG
ms_config_object.read(ms_configfile)
ms_config = common.parse_config(ms_config_object)
    config_object.write(conffile)
    conffile.close()
config = common.parse_config(config_object)
api_config = config.get('api')
worker_config = config.get('celery')
db_config = config.get('Database')

app = Celery(broker='{0}://{1}:{2}@{3}/{4}'.format(
    worker_config.get('protocol'),
    worker_config.get('user'),
    worker_config.get('password'),
    worker_config.get('host'),
    worker_config.get('vhost'),
))
app.conf.timezone = worker_config.get('tz')
db = database.Database(config=db_config)


@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
    # Executes every morning at 2:00 a.m.
    sender.add_periodic_task(
        crontab(hour=2, minute=0),
        ssdeep_compare_celery.s(),
    )


def celery_task(files, config=multiscanner.CONFIG):
    '''
    Run multiscanner on the given file and store the results in the storage
    handler(s) specified in the storage configuration file.
Beispiel #3
0
INVALID_REQUEST = {'Message': 'Invalid request parameters'}
UPLOAD_FOLDER = 'tmp/'

BATCH_SIZE = 100
WAIT_SECONDS = 60  # Number of seconds to wait for additional files
# submitted to the create/ API

HTTP_OK = 200
HTTP_CREATED = 201
HTTP_BAD_REQUEST = 400
HTTP_NOT_FOUND = 404

# FULL_DB_PATH = os.path.join(MS_WD, 'sqlite.db')

app = Flask(__name__)
db = database.Database()
storage_conf = multiscanner.common.get_storage_config_path(multiscanner.CONFIG)
storage_handler = multiscanner.storage.StorageHandler(configfile=storage_conf)
for handler in storage_handler.loaded_storage:
    if isinstance(handler, elasticsearch_storage.ElasticSearchStorage):
        break
work_queue = multiprocessing.Queue()


def multiscanner_process(work_queue, exit_signal):
    metadata_list = []
    time_stamp = None
    while True:
        time.sleep(1)
        try:
            metadata_list.append(work_queue.get_nowait())