def __init__(self, logger=None):
        if not logger:
            from assemblyline.common import log as al_log
            al_log.init_logging('suricata_importer')
            logger = logging.getLogger('assemblyline.suricata_importer')
            logger.setLevel(logging.INFO)

        self.ds = forge.get_datastore()
        self.classification = forge.get_classification()
        self.log = logger
    def __init__(self, al_client, logger=None):
        if not logger:
            from assemblyline.common import log as al_log
            al_log.init_logging('sigma_importer')
            logger = logging.getLogger('assemblyline.sigma_importer')
            logger.setLevel(logging.INFO)

        self.update_client = al_client

        self.classification = forge.get_classification()
        self.log = logger
    def __init__(self, importer_type, al_client, logger=None):
        if not logger:
            from assemblyline.common import log as al_log
            al_log.init_logging('yara_importer')
            logger = logging.getLogger('assemblyline.yara_importer')
            logger.setLevel(logging.INFO)

        self.importer_type = importer_type
        self.update_client = al_client
        self.parser = Plyara()
        self.classification = forge.get_classification()
        self.log = logger
Exemplo n.º 4
0
    def __init__(self, logger: logging.Logger = None,
                 shutdown_timeout: float = None, config: Config = None,
                 datastore: AssemblylineDatastore = None,
                 redis: RedisType = None, redis_persist: RedisType = None,
                 default_pattern=".*"):

        self.updater_type = os.environ['SERVICE_PATH'].split('.')[-1].lower()
        self.default_pattern = default_pattern

        if not logger:
            al_log.init_logging(f'updater.{self.updater_type}', log_level=os.environ.get('LOG_LEVEL', "WARNING"))
            logger = logging.getLogger(f'assemblyline.updater.{self.updater_type}')

        super().__init__(f'assemblyline.{SERVICE_NAME}_updater', logger=logger, shutdown_timeout=shutdown_timeout,
                         config=config, datastore=datastore, redis=redis,
                         redis_persist=redis_persist)

        self.update_data_hash = Hash(f'service-updates-{SERVICE_NAME}', self.redis_persist)
        self._update_dir = None
        self._update_tar = None
        self._time_keeper = None
        self._service: Optional[Service] = None
        self.event_sender = EventSender('changes.services',
                                        host=self.config.core.redis.nonpersistent.host,
                                        port=self.config.core.redis.nonpersistent.port)

        self.service_change_watcher = EventWatcher(self.redis, deserializer=ServiceChange.deserialize)
        self.service_change_watcher.register(f'changes.services.{SERVICE_NAME}', self._handle_service_change_event)

        self.signature_change_watcher = EventWatcher(self.redis, deserializer=SignatureChange.deserialize)
        self.signature_change_watcher.register(f'changes.signatures.{SERVICE_NAME.lower()}',
                                               self._handle_signature_change_event)

        # A event flag that gets set when an update should be run for
        # reasons other than it being the regular interval (eg, change in signatures)
        self.source_update_flag = threading.Event()
        self.local_update_flag = threading.Event()
        self.local_update_start = threading.Event()

        # Load threads
        self._internal_server = None
        self.expected_threads = {
            'Sync Service Settings': self._sync_settings,
            'Outward HTTP Server': self._run_http,
            'Internal HTTP Server': self._run_internal_http,
            'Run source updates': self._run_source_updates,
            'Run local updates': self._run_local_updates,
        }
        # Only used by updater with 'generates_signatures: false'
        self.latest_updates_dir = os.path.join(UPDATER_DIR, 'latest_updates')
        if not os.path.exists(self.latest_updates_dir):
            os.makedirs(self.latest_updates_dir)
 def __init__(self, externals=None, logger=None):
     if not logger:
         from assemblyline.common import log as al_log
         al_log.init_logging('YaraValidator')
         logger = logging.getLogger('assemblyline.yara_validator')
         logger.setLevel(logging.WARNING)
     if not externals:
         externals = {'dummy': ''}
     self.log = logger
     self.externals = externals
     self.rulestart = re.compile(
         r'^(?:global )?(?:private )?(?:private )?rule ', re.MULTILINE)
     self.rulename = re.compile('rule ([^{^:]+)')
    def __init__(self, task: ServiceTask):
        # Initialize logging
        al_log.init_logging(f'{task.service_name.lower()}',
                            log_level=logging.INFO)
        self.log = logging.getLogger(
            f'assemblyline.service.{task.service_name.lower()}')

        tags = {}
        for t in task.tags:
            tags.setdefault(t.type, [])
            tags[t.type].append(t.value)

        self._classification: Classification = forge.get_classification()
        self._service_completed: Optional[str] = None
        self._service_started: Optional[str] = None
        self._working_directory: Optional[str] = None
        self.deep_scan = task.deep_scan
        self.depth = task.depth
        self.drop_file: bool = False
        self.error_message: Optional[str] = None
        self.error_status: Optional[str] = None
        self.error_type: str = 'EXCEPTION'
        self.extracted: List[Dict[str, str]] = []
        self.file_name = task.filename
        self.file_type = task.fileinfo.type
        self.file_size = task.fileinfo.size
        self.ignore_filtering = task.ignore_filtering
        self.min_classification = task.min_classification.value
        self.max_extracted = task.max_files
        self.metadata = task.metadata
        self.md5: str = task.fileinfo.md5
        self.mime: str = task.fileinfo.mime or None
        self.result: Optional[Result] = None
        self.safelist_config: Dict[str, Any] = task.safelist_config
        self.service_config: Dict[str, Any] = dict(task.service_config)
        self.service_context: Optional[str] = None
        self.service_debug_info: Optional[str] = None
        self.service_default_result_classification = None
        self.service_name: str = task.service_name
        self.service_tool_version: Optional[str] = None
        self.service_version: Optional[str] = None
        self.sha1: str = task.fileinfo.sha1
        self.sha256: str = task.fileinfo.sha256
        self.sid: str = task.sid
        self.supplementary: List[Dict[str, str]] = []
        self.tags = tags
        self.temp_submission_data: Dict[str, Any] = {
            row.name: row.value
            for row in task.temporary_submission_data
        }
        self.type: str = task.fileinfo.type
Exemplo n.º 7
0
    def __init__(self, task: Task) -> None:
        # Initialize logging for the service
        al_log.init_logging(f'{task.service_name}', log_level=logging.INFO)
        self.log = logging.getLogger(f'assemblyline.service.{task.service_name.lower()}')

        self._working_directory = None
        self.deep_scan = task.deep_scan
        self.extracted = task.extracted
        self.file_name = task.file_name
        self.file_type = task.file_type
        self.max_extracted = task.max_extracted
        self.md5 = task.md5
        self.sha1 = task.sha1
        self.sha256 = task.sha256
        self.sid = task.sid
        self.task = task
Exemplo n.º 8
0
    def __init__(self, config: Optional[Dict] = None) -> None:
        # Load the service attributes from the service manifest
        self.service_attributes = helper.get_service_attributes()

        # Start with default service parameters and override with anything provided
        self.config = self.service_attributes.config
        if config:
            self.config.update(config)

        # Initialize logging for the service
        log.init_logging(f'{self.service_attributes.name}', log_level=logging.INFO)
        self.log = logging.getLogger(f'assemblyline.service.{self.service_attributes.name.lower()}')

        self._task = None

        self._working_directory = None
Exemplo n.º 9
0
    def __init__(self,
                 component_name: str,
                 logger: logging.Logger = None,
                 shutdown_timeout: float = SHUTDOWN_SECONDS_LIMIT,
                 config=None):
        super().__init__(name=component_name)
        al_log.init_logging(component_name)
        self.config = config or forge.get_config()

        self.running = None
        self.log = logger or logging.getLogger(component_name)
        self._exception = None
        self._traceback = None
        self._shutdown_timeout = shutdown_timeout if shutdown_timeout is not None else SHUTDOWN_SECONDS_LIMIT
        self._old_sigint = None
        self._old_sigterm = None
        self._stopped = False
        self._last_heartbeat = 0
Exemplo n.º 10
0
def run_task_handler():
    al_log.init_logging("assemblyline.service.process_handler",
                        log_level=LOG_LEVEL)

    log = logging.getLogger("assemblyline.service.process_handler")

    # Start the two processes
    rs_p = Popen(['python3', '-m', 'assemblyline_v4_service.run_service'])
    th_p = Popen(['python3', '-m', 'assemblyline_service_client.task_handler'])

    def forward_signal(signal_number, _frame):
        th_p.send_signal(signal_number)

    signal.signal(signal.SIGUSR1, forward_signal)
    signal.signal(signal.SIGUSR2, forward_signal)
    signal.signal(signal.SIGTERM, forward_signal)

    while True:
        check_processes(rs_p, th_p, log)

        # Wait 2 seconds before polling process status
        time.sleep(2)
Exemplo n.º 11
0
    def __init__(self, config: Optional[Dict] = None) -> None:
        # Load the service attributes from the service manifest
        self.service_attributes = helper.get_service_attributes()

        # Start with default service parameters and override with anything provided
        self.config = self.service_attributes.config
        if config:
            self.config.update(config)

        self.name = self.service_attributes.name.lower()
        # Initialize logging for the service
        log.init_logging(f'{self.service_attributes.name}',
                         log_level=LOG_LEVEL)
        self.log = logging.getLogger(f'assemblyline.service.{self.name}')

        # Replace warning/error methods with our own patched version
        self._log_warning = self.log.warning
        self._log_error = self.log.error

        self.log.warning = self._warning
        self.log.error = self._error

        self._task = None

        self._working_directory = None

        # Initialize interface for interacting with system safelist
        self._api_interface = None

        self.dependencies = self._get_dependencies_info()
        self.ontologies: Dict = None

        # Updater-related
        self.rules_directory: str = None
        self.rules_list: list = []
        self.update_time: int = None
        self.rules_hash: str = None
Exemplo n.º 12
0
from elasticapm.contrib.flask import ElasticAPM
from flask import Flask
from flask.logging import default_handler

from assemblyline.common import forge, log as al_log
from assemblyline_service_server.api.v1.file import file_api
from assemblyline_service_server.api.v1.service import service_api
from assemblyline_service_server.api.v1.task import task_api
from assemblyline_service_server.api.v1.safelist import safelist_api
from assemblyline_service_server.healthz import healthz

config = forge.get_config()

# Prepare the logger
al_log.init_logging('svc')
LOGGER = logging.getLogger('assemblyline.svc_server')
LOGGER.info("Service server ready to receive connections...")

# Prepare the app
app = Flask('svc-server')
app.config['SECRET_KEY'] = config.ui.secret_key

app.register_blueprint(healthz)
app.register_blueprint(file_api)
app.register_blueprint(service_api)
app.register_blueprint(task_api)
app.register_blueprint(safelist_api)

# Setup logging
app.logger.setLevel(LOGGER.getEffectiveLevel())
Exemplo n.º 13
0
import logging
import os

from flask import Flask
from flask_socketio import SocketIO

from assemblyline_ui.sio.alert import AlertMonitoringNamespace
from assemblyline_ui.sio.live_submission import LiveSubmissionNamespace
from assemblyline_ui.sio.status import SystemStatusNamespace
from assemblyline_ui.sio.submission import SubmissionMonitoringNamespace
from assemblyline.common import forge, log as al_log

config = forge.get_config()

# Prepare the logger
al_log.init_logging("ui")
LOGGER = logging.getLogger('assemblyline.ui.socketio')
LOGGER.info("SocketIO server ready to receive connections...")

# Prepare the app
app = Flask('socketio')
app.config['SECRET_KEY'] = config.ui.secret_key

# If the environment says we should prefix our app by something, do so
if 'APPLICATION_ROOT' in os.environ:
    LOGGER.info(
        f"Flask application root changing: {os.environ['APPLICATION_ROOT']}")
    app.config['APPLICATION_ROOT'] = os.environ['APPLICATION_ROOT']
    app.config['SESSION_COOKIE_PATH'] = '/'

# NOTE: we need to run in threading mode while debugging otherwise, use gevent
from __future__ import annotations

import json
import logging
from typing import List, Union, Optional, Dict, Any

from assemblyline.common import forge
from assemblyline.common import log as al_log
from assemblyline.common.attack_map import attack_map, software_map, group_map, revoke_map
from assemblyline.common.dict_utils import unflatten
from assemblyline.common.str_utils import StringTable, safe_str
from assemblyline_v4_service.common.helper import get_service_attributes, get_heuristics

al_log.init_logging('service.result')
log = logging.getLogger('assemblyline.service.result')

Classification = forge.get_classification()
SERVICE_ATTRIBUTES = get_service_attributes()

BODY_FORMAT = StringTable(
    'BODY_FORMAT',
    [
        ('TEXT', 0),
        ('MEMORY_DUMP', 1),
        ('GRAPH_DATA', 2),
        ('URL', 3),
        ('JSON', 4),
        ('KEY_VALUE', 5),
        ('PROCESS_TREE', 6),
        ('TABLE', 7),
        ('IMAGE', 8),
Exemplo n.º 15
0
    if th_rc is not None:
        log.info(f"task_handler: exit({th_rc})")
        service_process.terminate()
        exit(th_rc)

    # If the service process has crashed tell the task handler something is wrong,
    # then wait for it to exit voluntarily
    if rs_rc is not None:
        log.error(
            f"The service has crashed with exit code: {rs_rc}. The container will be stopped..."
        )
        task_handler_process.send_signal(signal.SIGUSR1)


if __name__ == '__main__':
    al_log.init_logging("assemblyline.service.process_handler",
                        log_level=LOG_LEVEL)

    log = logging.getLogger("assemblyline.service.process_handler")
    # Start the two processes
    rs_p = Popen(['python3', '-m', 'assemblyline_v4_service.run_service'])
    th_p = Popen(['python3', '-m', 'assemblyline_service_client.task_handler'])

    def forward_signal(signal_number, _frame):
        th_p.send_signal(signal_number)

    signal.signal(signal.SIGUSR1, forward_signal)
    signal.signal(signal.SIGUSR2, forward_signal)
    signal.signal(signal.SIGTERM, forward_signal)

    while True:
        check_processes(rs_p, th_p)
Exemplo n.º 16
0
import logging
import os
import shutil
import time

from copy import deepcopy

import certifi
import requests
import yaml

from assemblyline.common import log as al_log
from assemblyline.common.digests import get_sha256_for_file
from assemblyline.common.isotime import iso_to_epoch

al_log.init_logging('service_updater')

LOGGER = logging.getLogger('assemblyline.updater.service')

UPDATE_CONFIGURATION_PATH = os.environ.get('UPDATE_CONFIGURATION_PATH', None)
UPDATE_OUTPUT_PATH = os.environ.get('UPDATE_OUTPUT_PATH',
                                    "/tmp/updater_output")


def test_file(_):
    return True


def url_update(test_func=test_file) -> None:
    """
    Using an update configuration file as an input, which contains a list of sources, download all the file(s) which
DEBUG = config.ui.debug
VERSION = os.environ.get(
    'ASSEMBLYLINE_VERSION',
    f"{FRAMEWORK_VERSION}.{SYSTEM_VERSION}.{BUILD_MINOR}.dev0")
AUTH_KEY = os.environ.get('SERVICE_API_KEY',
                          'ThisIsARandomAuthKey...ChangeMe!')

RATE_LIMITER = Counters(prefix="quota", host=redis, track_counters=True)

# End of Configuration
#################################################################

#################################################################
# Prepare loggers
config.logging.log_to_console = config.logging.log_to_console or DEBUG
al_log.init_logging('svc', config=config)

LOGGER = logging.getLogger('assemblyline.svc')

LOGGER.debug('Logger ready!')

# End of prepare logger
#################################################################

#################################################################
# Global instances

STORAGE = forge.get_datastore(config=config)
FILESTORE = forge.get_filestore(config=config)
LOCK = threading.Lock()
TASKING_CLIENT = TaskingClient(datastore=STORAGE,
Exemplo n.º 18
0
    if th_rc is not None:
        log.info(f"task_handler: exit({th_rc})")
        service_process.terminate()
        exit(th_rc)

    # If the service process has crashed tell the task handler something is wrong,
    # then wait for it to exit voluntarily
    if rs_rc is not None:
        log.error(
            f"The service has crashed with exit code: {rs_rc}. The container will be stopped..."
        )
        task_handler_process.send_signal(signal.SIGUSR1)


if __name__ == '__main__':
    al_log.init_logging("assemblyline.service.process_handler")

    log = logging.getLogger("assemblyline.service.process_handler")
    # Start the two processes
    rs_p = Popen(['python3', '-m', 'assemblyline_v4_service.run_service'])
    th_p = Popen(['python3', '-m', 'assemblyline_service_client.task_handler'])

    def forward_signal(signal_number, _frame):
        th_p.send_signal(signal_number)

    signal.signal(signal.SIGUSR1, forward_signal)
    signal.signal(signal.SIGUSR2, forward_signal)
    signal.signal(signal.SIGTERM, forward_signal)

    while True:
        check_processes(rs_p, th_p)
from yaml.composer import ComposerError
from pysigma.exceptions import UnsupportedFeature

import certifi
import requests
import yaml
from assemblyline_client import get_client
from git import Repo

from assemblyline.common import log as al_log, forge
from assemblyline.common.digests import get_sha256_for_file
from assemblyline.common.isotime import iso_to_epoch
from sigma_importer import SigmaImporter
from pysigma.pysigma import val_file

al_log.init_logging('updater.sigma')
classification = forge.get_classification()

LOGGER = logging.getLogger('assemblyline.updater.sigma')

UPDATE_CONFIGURATION_PATH = os.environ.get('UPDATE_CONFIGURATION_PATH',
                                           "/tmp/sigma_updater_config.yaml")
UPDATE_OUTPUT_PATH = os.environ.get('UPDATE_OUTPUT_PATH',
                                    "/tmp/sigma_updater_output")
UPDATE_DIR = os.path.join(tempfile.gettempdir(), 'sigma_updates')


def add_cacert(cert: str):
    # Add certificate to requests
    cafile = certifi.where()
    with open(cafile, 'a') as ca_editor:
Exemplo n.º 20
0
import yaml

from tempfile import gettempdir

from assemblyline.common import forge, log as al_log
from assemblyline.common.backupmanager import DistributedBackup
from assemblyline.common.security import get_totp_token, generate_random_secret
from assemblyline.common.uid import get_random_id
from assemblyline.odm.models.signature import RULE_STATUSES
from assemblyline.remote.datatypes.hash import Hash

warnings.filterwarnings("ignore")

config = forge.get_config()
config.logging.log_to_console = False
al_log.init_logging('cli')

PROCESSES_COUNT = 50
COUNT_INCREMENT = 500
DATASTORE = None
t_count = 0
t_last = time.time()


class NullLogger(object):
    @staticmethod
    def info(msg, *args, **kwargs):
        pass

    @staticmethod
    def warning(msg, *args, **kwargs):
Exemplo n.º 21
0
def core(request, redis, filestore, config):
    from assemblyline.common import log as al_log
    al_log.init_logging("simulation")

    fields = CoreSession()
    fields.redis = redis
    fields.ds = ds = forge.get_datastore()

    fields.config = config
    forge.config_singletons[False, None] = fields.config

    threads = []
    fields.filestore = filestore
    threads: List[ServerBase] = [
        # Start the ingester components
        IngesterInput(datastore=ds, redis=redis, persistent_redis=redis),
        IngesterSubmitter(datastore=ds, redis=redis, persistent_redis=redis),
        IngesterInternals(datastore=ds, redis=redis, persistent_redis=redis),

        # Start the dispatcher
        FileDispatchServer(datastore=ds, redis=redis, redis_persist=redis),
        SubmissionDispatchServer(datastore=ds,
                                 redis=redis,
                                 redis_persist=redis),

        # Start plumber
        Plumber(datastore=ds, redis=redis, redis_persist=redis, delay=0.5),
    ]

    stages = get_service_stage_hash(redis)
    ingester_input_thread: IngesterInput = threads[0]
    fields.ingest = ingester_input_thread
    fields.ingest_queue = ingester_input_thread.ingester.ingest_queue

    ds.ds.service = MockCollection(Service)
    ds.ds.service_delta = MockCollection(Service)
    ds.service.save('pre_0', dummy_service('pre', 'EXTRACT'))
    ds.service_delta.save('pre', dummy_service('pre', 'EXTRACT'))
    stages.set('pre', ServiceStage.Running)

    threads.append(MockService('pre', ds, redis, filestore))
    fields.pre_service = threads[-1]
    ds.service.save('core-a_0', dummy_service('core-a', 'CORE'))
    ds.service_delta.save('core-a', dummy_service('core-a', 'CORE'))
    stages.set('core-a', ServiceStage.Running)

    threads.append(MockService('core-a', ds, redis, filestore))
    ds.service.save('core-b_0', dummy_service('core-b', 'CORE'))
    ds.service_delta.save('core-b', dummy_service('core-b', 'CORE'))
    threads.append(MockService('core-b', ds, redis, filestore))
    stages.set('core-b', ServiceStage.Running)

    ds.service.save('finish_0', dummy_service('finish', 'POST'))
    ds.service_delta.save('finish', dummy_service('finish', 'POST'))
    threads.append(MockService('finish', ds, redis, filestore))
    stages.set('finish', ServiceStage.Running)

    for t in threads:
        t.daemon = True
        t.start()

    def stop_core():
        [tr.close() for tr in threads]
        [tr.stop() for tr in threads]
        [tr.raising_join() for tr in threads]

    request.addfinalizer(stop_core)
    return fields
Exemplo n.º 22
0

def get_signup_queue(key):
    return ExpiringSet(f"signup_id_{key}",
                       host=config.core.redis.nonpersistent.host,
                       port=config.core.redis.nonpersistent.port,
                       ttl=60 * 15)


# End of Configuration
#################################################################

#################################################################
# Prepare loggers
config.logging.log_to_console = config.logging.log_to_console or DEBUG
al_log.init_logging("ui", config=config)

AUDIT_KW_TARGET = [
    "sid", "sha256", "copy_sid", "filter", "query", "username", "group", "rev",
    "wq_id", "bucket", "cache_key", "alert_key", "alert_id", "url", "q", "fq",
    "file_hash", "heuristic_id", "error_key", "mac", "vm_type", "vm_name",
    "config_name", "servicename", "vm"
]

AUDIT_LOG = logging.getLogger('assemblyline.ui.audit')
LOGGER = logging.getLogger('assemblyline.ui')

if AUDIT:
    AUDIT_LOG.setLevel(logging.INFO)

if DEBUG:
            cur_logger.info("An update is available for download from the datastore")

            if not os.path.exists(update_output_path):
                os.makedirs(update_output_path)

            temp_zip_file = os.path.join(update_output_path, 'temp.zip')
            al_client.signature.download(output=temp_zip_file,
                                         query=f"type:{updater_type} AND (status:NOISY OR status:DEPLOYED)")

            if os.path.exists(temp_zip_file):
                with ZipFile(temp_zip_file, 'r') as zip_f:
                    zip_f.extractall(update_output_path)

                os.remove(temp_zip_file)

            # Create the response yaml
            with open(os.path.join(update_output_path, 'response.yaml'), 'w') as yml_fh:
                yaml.safe_dump(dict(hash=json.dumps(files_sha256)), yml_fh)

            cur_logger.info(f"New ruleset successfully downloaded and ready to use")

        cur_logger.info(f"{updater_type.upper()} updater completed successfully")
    except Exception:
        cur_logger.exception("Updater ended with an exception!")


if __name__ == '__main__':
    al_log.init_logging('updater.yara')
    logger = logging.getLogger('assemblyline.updater.yara')
    yara_update("yara", UPDATE_CONFIGURATION_PATH, UPDATE_OUTPUT_PATH, UPDATE_DIR, YARA_EXTERNALS, logger)