コード例 #1
0
def perform_alert_update(datastore, logger, alert):
    alert_id = alert.get('alert_id')

    with Lock(f"alert-update-{alert_id}", 5):
        old_alert = datastore.alert.get(alert_id, as_obj=False)
        if old_alert is None:
            raise KeyError(f"{alert_id} is missing from the alert collection.")

        # Merge fields...
        merged = {
            x: list(
                set(old_alert.get('al', {}).get(x, [])).union(
                    set(alert['al'].get(x, []))))
            for x in AL_FIELDS
        }

        # Sanity check.
        if not all([
                old_alert.get(x, None) == alert.get(x, None)
                for x in config.core.alerter.constant_alert_fields
        ]):
            raise ValueError("Constant alert field changed. (%s, %s)" %
                             (str(old_alert), str(alert)))

        old_alert = recursive_update(old_alert, alert)
        old_alert['al'] = recursive_update(old_alert['al'], merged)

        datastore.alert.save(alert_id, old_alert)

    logger.info(f"Alert {alert_id} has been updated.")
コード例 #2
0
def temporary_api_key(ds: AssemblylineDatastore, user_name: str, permissions=('R', 'W')):
    """Creates a context where a temporary API key is available."""
    with Lock(f'user-{user_name}', timeout=10):
        name = ''.join(random.choices(string.ascii_lowercase, k=20))
        random_pass = get_random_password(length=48)
        user = ds.user.get(user_name)
        user.apikeys[name] = {
            "password": bcrypt.hash(random_pass),
            "acl": permissions
        }
        ds.user.save(user_name, user)

    try:
        yield f"{name}:{random_pass}"
    finally:
        with Lock(f'user-{user_name}', timeout=10):
            user = ds.user.get(user_name)
            user.apikeys.pop(name)
            ds.user.save(user_name, user)
コード例 #3
0
    def save_or_freshen_file(self,
                             sha256,
                             fileinfo,
                             expiry,
                             classification,
                             cl_engine=forge.get_classification(),
                             redis=None):
        with Lock(f'save-or-freshen-file-{sha256}', 5, host=redis):
            current_fileinfo = self.ds.file.get(
                sha256, as_obj=False, force_archive_access=True) or {}

            # Remove control fields from file info and update current file info
            for x in ['classification', 'expiry_ts', 'seen', 'archive_ts']:
                fileinfo.pop(x, None)
            current_fileinfo.update(fileinfo)

            current_fileinfo['archive_ts'] = now_as_iso(days_until_archive *
                                                        24 * 60 * 60)

            # Update expiry time
            if isinstance(expiry, datetime):
                expiry = expiry.strftime(DATEFORMAT)
            current_expiry = current_fileinfo.get('expiry_ts', expiry)
            if current_expiry and expiry:
                current_fileinfo['expiry_ts'] = max(current_expiry, expiry)
            else:
                current_fileinfo['expiry_ts'] = None

            # Update seen counters
            now = now_as_iso()
            current_fileinfo['seen'] = seen = current_fileinfo.get('seen', {})
            seen['count'] = seen.get('count', 0) + 1
            seen['last'] = now
            seen['first'] = seen.get('first', now)

            # Update Classification
            classification = cl_engine.min_classification(
                str(current_fileinfo.get('classification', classification)),
                str(classification))
            current_fileinfo['classification'] = classification
            self.ds.file.save(sha256,
                              current_fileinfo,
                              force_archive_access=True)
コード例 #4
0
    def _update(self, key, operations):
        with Lock(f'collection-{self.name}-update-{key}', 5):
            data = self.get(key, as_obj=False)

            for op, doc_key, value in operations:
                obj, cur_key = get_object(data, doc_key)
                if op == self.UPDATE_SET:
                    obj[cur_key] = value
                elif op == self.UPDATE_DELETE:
                    obj[cur_key].pop(value)
                elif op == self.UPDATE_APPEND:
                    obj[cur_key].append(value)
                elif op == self.UPDATE_REMOVE:
                    obj[cur_key].remove(value)
                elif op == self.UPDATE_INC:
                    obj[cur_key] += value
                elif op == self.UPDATE_DEC:
                    obj[cur_key] -= value

            return self._save(key, data)
コード例 #5
0
 def locked_execution(next_thread=None):
     with Lock('test', 10):
         if next_thread:
             next_thread.start()
         time.sleep(2)
コード例 #6
0
ファイル: signature.py プロジェクト: rodadmin/assemblyline-ui
def download_signatures(**kwargs):
    """
    Download signatures from the system.

    Variables:
    None

    Arguments:
    query       => Query used to filter the signatures
                   Default: All deployed signatures

    Data Block:
    None

    Result example:
    <A zip file containing all signatures files from the different sources>
    """
    user = kwargs['user']
    query = request.args.get('query', 'status:DEPLOYED')

    access = user['access_control']
    last_modified = STORAGE.get_signature_last_modified()

    query_hash = sha256(
        f'{query}.{access}.{last_modified}'.encode('utf-8')).hexdigest()

    with forge.get_cachestore('al_ui.signature') as signature_cache:
        response = _get_cached_signatures(signature_cache, query_hash)
        if response:
            return response

        with Lock(f"al_signatures_{query_hash[:7]}.zip", 30):
            response = _get_cached_signatures(signature_cache, query_hash)
            if response:
                return response

            output_files = {}

            keys = [
                k['id'] for k in STORAGE.signature.stream_search(
                    query, fl="id", access_control=access, as_obj=False)
            ]
            signature_list = sorted(STORAGE.signature.multiget(
                keys, as_dictionary=False, as_obj=False),
                                    key=lambda x: x['order'])

            for sig in signature_list:
                out_fname = f"{sig['type']}/{sig['source']}"
                output_files.setdefault(out_fname, [])
                output_files[out_fname].append(sig['data'])

            output_zip = InMemoryZip()
            for fname, data in output_files.items():
                output_zip.append(fname, "\n\n".join(data))

            rule_file_bin = output_zip.read()

            signature_cache.save(query_hash,
                                 rule_file_bin,
                                 ttl=DEFAULT_CACHE_TTL)

            return make_file_response(rule_file_bin,
                                      f"al_signatures_{query_hash[:7]}.zip",
                                      len(rule_file_bin),
                                      content_type="application/zip")
コード例 #7
0
    def service_finished(self,
                         sid: str,
                         result_key: str,
                         result: Result,
                         temporary_data: Optional[Dict[str, Any]] = None):
        """Notifies the dispatcher of service completion, and possible new files to dispatch."""
        # Make sure the dispatcher knows we were working on this task
        task_key = ServiceTask.make_key(
            sid=sid,
            service_name=result.response.service_name,
            sha=result.sha256)
        task = self.running_tasks.pop(task_key)
        if not task:
            self.log.warning(
                f"[{sid}/{result.sha256}] {result.response.service_name} could not find the specified "
                f"task in its set of running tasks while processing successful results."
            )
            return
        task = ServiceTask(task)

        # Check if the service is a candidate for dynamic recursion prevention
        if not task.ignore_dynamic_recursion_prevention:
            service_info = self.service_data.get(result.response.service_name,
                                                 None)
            if service_info and service_info.category == "Dynamic Analysis":
                # TODO: This should be done in lua because it can introduce race condition in the future
                #       but in the meantime it will remain this way while we can confirm it work as expected
                submission = self.active_submissions.get(sid)
                submission['submission']['params']['services'][
                    'runtime_excluded'].append(result.response.service_name)
                self.active_submissions.set(sid, submission)

        # Save or freshen the result, the CONTENT of the result shouldn't change, but we need to keep the
        # most distant expiry time to prevent pulling it out from under another submission too early
        if result.is_empty():
            # Empty Result will not be archived therefore result.archive_ts drives their deletion
            self.ds.emptyresult.save(result_key,
                                     {"expiry_ts": result.archive_ts})
        else:
            with Lock(f"lock-{result_key}", 5, self.redis):
                old = self.ds.result.get(result_key)
                if old:
                    if old.expiry_ts and result.expiry_ts:
                        result.expiry_ts = max(result.expiry_ts, old.expiry_ts)
                    else:
                        result.expiry_ts = None
                self.ds.result.save(result_key, result)

        # Let the logs know we have received a result for this task
        if result.drop_file:
            self.log.debug(
                f"[{sid}/{result.sha256}] {task.service_name} succeeded. "
                f"Result will be stored in {result_key} but processing will stop after this service."
            )
        else:
            self.log.debug(
                f"[{sid}/{result.sha256}] {task.service_name} succeeded. "
                f"Result will be stored in {result_key}")

        # Store the result object and mark the service finished in the global table
        process_table = DispatchHash(task.sid, self.redis)
        remaining, duplicate = process_table.finish(
            task.fileinfo.sha256, task.service_name, result_key,
            result.result.score, result.classification, result.drop_file)
        self.timeout_watcher.clear(f'{task.sid}-{task.key()}')
        if duplicate:
            self.log.warning(
                f"[{sid}/{result.sha256}] {result.response.service_name}'s current task was already "
                f"completed in the global processing table.")
            return

        # Push the result tags into redis
        new_tags = []
        for section in result.result.sections:
            new_tags.extend(tag_dict_to_list(section.tags.as_primitives()))
        if new_tags:
            tag_set = ExpiringSet(get_tag_set_name(
                sid=task.sid, file_hash=task.fileinfo.sha256),
                                  host=self.redis)
            tag_set.add(*new_tags)

        # Update the temporary data table for this file
        temp_data_hash = ExpiringHash(get_temporary_submission_data_name(
            sid=task.sid, file_hash=task.fileinfo.sha256),
                                      host=self.redis)
        for key, value in (temporary_data or {}).items():
            temp_data_hash.set(key, value)

        # Send the extracted files to the dispatcher
        depth_limit = self.config.submission.max_extraction_depth
        new_depth = task.depth + 1
        if new_depth < depth_limit:
            # Prepare the temporary data from the parent to build the temporary data table for
            # these newly extract files
            parent_data = dict(temp_data_hash.items())

            for extracted_data in result.response.extracted:
                if not process_table.add_file(
                        extracted_data.sha256,
                        task.max_files,
                        parent_hash=task.fileinfo.sha256):
                    if parent_data:
                        child_hash_name = get_temporary_submission_data_name(
                            task.sid, extracted_data.sha256)
                        ExpiringHash(child_hash_name,
                                     host=self.redis).multi_set(parent_data)

                    self._dispatching_error(
                        task, process_table,
                        Error({
                            'archive_ts': result.archive_ts,
                            'expiry_ts': result.expiry_ts,
                            'response': {
                                'message':
                                f"Too many files extracted for submission {task.sid} "
                                f"{extracted_data.sha256} extracted by "
                                f"{task.service_name} will be dropped",
                                'service_name':
                                task.service_name,
                                'service_tool_version':
                                result.response.service_tool_version,
                                'service_version':
                                result.response.service_version,
                                'status':
                                'FAIL_NONRECOVERABLE'
                            },
                            'sha256': extracted_data.sha256,
                            'type': 'MAX FILES REACHED'
                        }))
                    continue
                file_data = self.files.get(extracted_data.sha256)
                self.file_queue.push(
                    FileTask(
                        dict(sid=task.sid,
                             min_classification=task.min_classification.max(
                                 extracted_data.classification).value,
                             file_info=dict(
                                 magic=file_data.magic,
                                 md5=file_data.md5,
                                 mime=file_data.mime,
                                 sha1=file_data.sha1,
                                 sha256=file_data.sha256,
                                 size=file_data.size,
                                 type=file_data.type,
                             ),
                             depth=new_depth,
                             parent_hash=task.fileinfo.sha256,
                             max_files=task.max_files)).as_primitives())
        else:
            for extracted_data in result.response.extracted:
                self._dispatching_error(
                    task, process_table,
                    Error({
                        'archive_ts': result.archive_ts,
                        'expiry_ts': result.expiry_ts,
                        'response': {
                            'message':
                            f"{task.service_name} has extracted a file "
                            f"{extracted_data.sha256} beyond the depth limits",
                            'service_name':
                            result.response.service_name,
                            'service_tool_version':
                            result.response.service_tool_version,
                            'service_version':
                            result.response.service_version,
                            'status':
                            'FAIL_NONRECOVERABLE'
                        },
                        'sha256': extracted_data.sha256,
                        'type': 'MAX DEPTH REACHED'
                    }))

        # If the global table said that this was the last outstanding service,
        # send a message to the dispatchers.
        if remaining <= 0:
            self.file_queue.push(
                FileTask(
                    dict(sid=task.sid,
                         min_classification=task.min_classification.value,
                         file_info=task.fileinfo,
                         depth=task.depth,
                         max_files=task.max_files)).as_primitives())

        # Send the result key to any watching systems
        msg = {'status': 'OK', 'cache_key': result_key}
        for w in self._get_watcher_list(task.sid).members():
            NamedQueue(w).push(msg)
コード例 #8
0
def add_or_update_hash(**kwargs):
    """
    Add a hash in the safelist if it does not exist or update its list of sources if it does

    Arguments:
    None

    Data Block:
    {
     "classification": "TLP:W",    # Classification of the safe hash (Computed for the mix of sources) - Optional
     "enabled": true,              # Is the safe hash enabled or not
     "file": {                     # Information about the file  - Only used in file mode
       "name": ["file.txt"]            # Possible names for the file
       "size": 12345,                  # Size of the file
       "type": "document/text"},       # Type of the file
     },
     "hashes": {                   # Information about the safe hash - At least one hash required
       "md5": "123...321",             # MD5 hash of the safe hash
       "sha1": "1234...4321",          # SHA1 hash of the safe hash
       "sha256": "12345....54321",     # SHA256 of the safe hash
     "sources": [                  # List of sources for why the file is safelisted, dedupped on name - Required
       {"classification": "TLP:W",     # Classification of the source (default: TLP:W) - Optional
        "name": "NSRL",                # Name of external source or user who safelisted it - Required
        "reason": [                    # List of reasons why the source is safelisted - Required
          "Found as test.txt on default windows 10 CD",
          "Found as install.txt on default windows XP CD"
        ],
        "type": "external"},           # Type or source (external or user) - Required
       {"classification": "TLP:W",
        "name": "admin",
        "reason": ["We've seen this file many times and it leads to False positives"],
        "type": "user"}
     ],
     "signature": {               # Signature information  - Only used in signature mode
       "name": "Avira.Eicar",         # Name of signature
     },
     "tag": {                     # Tag information  - Only used in tag mode
         "type": "network.url",        # Type of tag
         "value": "google.ca"          # Value of the tag
     },
     "type": "tag"                # Type of safelist hash (tag or file)
    }

    Result example:
    {
     "success": true,         # Was the hash successfully added
     "op": "add"              # Was it added to the system or updated
    }
    """
    # Load data
    data = request.json
    user = kwargs['user']

    # Set defaults
    data.setdefault('classification', CLASSIFICATION.UNRESTRICTED)
    data.setdefault('hashes', {})
    if data['type'] == 'tag':
        tag_data = data.get('tag', None)
        if tag_data is None or 'type' not in tag_data or 'value' not in tag_data:
            return make_api_response(None, "Tag data not found", 400)

        hashed_value = f"{tag_data['type']}: {tag_data['value']}".encode(
            'utf8')
        data['hashes']['md5'] = hashlib.md5(hashed_value).hexdigest()
        data['hashes']['sha1'] = hashlib.sha1(hashed_value).hexdigest()
        data['hashes']['sha256'] = hashlib.sha256(hashed_value).hexdigest()
        data.pop('file', None)
        data.pop('signature', None)

    elif data['type'] == 'signature':
        sig_data = data.get('signature', None)
        if sig_data is None or 'name' not in sig_data:
            return make_api_response(None, "Signature data not found", 400)

        hashed_value = f"signature: {sig_data['name']}".encode('utf8')
        data['hashes']['md5'] = hashlib.md5(hashed_value).hexdigest()
        data['hashes']['sha1'] = hashlib.sha1(hashed_value).hexdigest()
        data['hashes']['sha256'] = hashlib.sha256(hashed_value).hexdigest()
        data.pop('tag', None)
        data.pop('file', None)

    elif data['type'] == 'file':
        data.pop('tag', None)
        data.pop('signature', None)
        data.setdefault('file', {})

    data['added'] = data['updated'] = now_as_iso()

    # Find the best hash to use for the key
    qhash = data['hashes'].get(
        'sha256', data['hashes'].get('sha1', data['hashes'].get('md5', None)))
    # Validate hash length
    if not qhash:
        return make_api_response(None, "No valid hash found", 400)

    # Validate sources
    src_map = {}
    for src in data['sources']:
        if src['type'] == 'user':
            if src['name'] != user['uname']:
                return make_api_response(
                    {},
                    f"You cannot add a source for another user. {src['name']} != {user['uname']}",
                    400)
        else:
            if 'signature_importer' not in user['type']:
                return make_api_response(
                    {},
                    "You do not have sufficient priviledges to add an external source.",
                    403)

        src_cl = src.get('classification', None)
        if src_cl:
            data['classification'] = CLASSIFICATION.max_classification(
                data['classification'], src_cl)

        src_map[src['name']] = src

    with Lock(f'add_or_update-safelist-{qhash}', 30):
        old = STORAGE.safelist.get_if_exists(qhash, as_obj=False)
        if old:
            try:
                # Save data to the DB
                STORAGE.safelist.save(qhash, _merge_safe_hashes(data, old))
                return make_api_response({'success': True, "op": "update"})
            except InvalidSafehash as e:
                return make_api_response({}, str(e), 400)
        else:
            try:
                data['sources'] = src_map.values()
                STORAGE.safelist.save(qhash, data)
                return make_api_response({'success': True, "op": "add"})
            except Exception as e:
                return make_api_response({},
                                         f"Invalid data provided: {str(e)}",
                                         400)