def get_file_hex(srl, **kwargs): """ Returns the file hex representation Variables: srl => A resource locator for the file (sha256) Arguments: None Data Block: None API call example: /api/v3/file/hex/123456...654321/ Result example: <THE FILE HEX REPRESENTATION> """ user = kwargs['user'] file_obj = STORAGE.get_file(srl) if not file_obj: return make_api_response({}, "The file was not found in the system.", 404) if user and Classification.is_accessible(user['classification'], file_obj['classification']): with forge.get_filestore() as f_transport: data = f_transport.get(srl) if not data: return make_api_response({}, "This file was not found in the system.", 404) return make_api_response(hexdump(data)) else: return make_api_response({}, "You are not allowed to view this file.", 403)
def delete_submission(sid, **kwargs): """ INCOMPLETE Delete a submission as well as all related files, results and errors Variables: sid => Submission ID to be deleted Arguments: None Data Block: None Result example: {success: true} """ user = kwargs['user'] submission = STORAGE.get_submission(sid) if submission and user \ and Classification.is_accessible(user['classification'], submission['classification']) \ and (submission['submission']['submitter'] == user['uname'] or user['is_admin']): with forge.get_filestore() as f_transport: STORAGE.delete_submission_tree(sid, transport=f_transport) STORAGE.commit_index('submission') return make_api_response({"success": True}) else: return make_api_response("", "Your are not allowed to delete this submission.", 403)
def get_file_raw(srl, **kwargs): """ Return the raw values for a file where non-utf8 chars are replaced by DOTs. Variables: srl => A resource locator for the file (sha256) Arguments: None Data Block: None Result example: <THE RAW FILE> """ user = kwargs['user'] file_obj = STORAGE.get_file(srl) if not file_obj: return make_api_response({}, "The file was not found in the system.", 404) if user and Classification.is_accessible(user['classification'], file_obj['classification']): with forge.get_filestore() as f_transport: data = f_transport.get(srl) if not data: return make_api_response({}, "This file was not found in the system.", 404) return make_api_response(data.translate(FILTER_RAW)) else: return make_api_response({}, "You are not allowed to view this file.", 403)
def download_file(srl, **kwargs): """ Download the file using the default encoding method. This api will force the browser in download mode. Variables: srl => A resource locator for the file (sha256) Arguments: name => Name of the file to download format => Format to encode the file in password => Password of the password protected zip Data Block: None API call example: /api/v3/file/download/123456...654321/ Result example: <THE FILE BINARY ENCODED IN SPECIFIED FORMAT> """ user = kwargs['user'] file_obj = STORAGE.get_file(srl) if not file_obj: return make_api_response({}, "The file was not found in the system.", 404) if user and Classification.is_accessible(user['classification'], file_obj['classification']): params = load_user_settings(user) name = request.args.get('name', srl) if name == "": name = srl else: name = basename(name) name = safe_str(name) file_format = request.args.get('format', params['download_encoding']) if file_format == "raw" and not ALLOW_RAW_DOWNLOADS: return make_api_response({}, "RAW file download has been disabled by administrators.", 403) password = request.args.get('password', None) with forge.get_filestore() as f_transport: data = f_transport.get(srl) if not data: return make_api_response({}, "The file was not found in the system.", 404) data, error, already_encoded = encode_file(data, file_format, name, password) if error: return make_api_response({}, error['text'], error['code']) if file_format != "raw" and not already_encoded: name = "%s.%s" % (name, file_format) return make_file_response(data, name, len(data)) else: return make_api_response({}, "You are not allowed to download this file.", 403)
def check_srl_exists(*args, **kwargs): """ Check if the the provided Resource locators exist in the system or not. Variables: None Arguments: None Data Block (REQUIRED): ["SRL1", SRL2] # List of SRLs (SHA256) Result example: { "existing": [], # List of existing SRLs "missing": [] # List of missing SRLs } """ srls_to_check = request.json if type(srls_to_check) != list: return make_api_response("", "Expecting a list of SRLs", 403) with forge.get_filestore() as f_transport: check_results = SubmissionWrapper.check_exists(f_transport, srls_to_check) return make_api_response(check_results)
def start_submission(*args, **kwargs): """ Submit a batch of files at the same time. This assumes that the presubmit API was called first to verify if the files are indeed already on the system and that the missing files where uploaded using the given transport and upload location returned by the presubmit API. Variables: None Arguments: None Data Block (REQUIRED): { "1": # File ID {"sha256": "982...077", # SHA256 of the file "path": "/local/file/path", # Path of the file "KEYWORD": ARG, }, # Any other KWARGS for the submission block ... } Result example: { "1": # File ID "submission":{}, # Submission Block "request": {}, # Request Block "times": {}, # Timing Block "state": "submitted", # Submission state "services": {}, # Service selection Block "fileinfo": {} # File information Block }, ... } """ user = kwargs['user'] submit_requests = request.json check_submission_quota(user, len(submit_requests)) submit_results = {} user_params = load_user_settings(user) for key, submit in submit_requests.iteritems(): submit['submitter'] = user['uname'] submit['quota_item'] = True path = submit.get('path', './path/missing') if 'classification' not in submit: submit['classification'] = user_params['classification'] if 'groups' not in submit: submit['groups'] = user['groups'] if 'description' not in submit: submit['description'] = "Inspection of file: %s" % path if 'selected'not in submit: submit['selected'] = simplify_services(user_params["services"]) with forge.get_filestore() as f_transport: submit_result = SubmissionWrapper.submit(f_transport, STORAGE, **submit) submit_results[key] = submit_result return make_api_response(submit_results)
def __init__(self, server_url=None, datastore=None): if not server_url: server_url = config.submissions.url self.server_url = server_url self.transport = forge.get_filestore() self.datastore = datastore self.is_unix = os.name == "posix" if not self.is_unix: from assemblyline_client import Client self.client = Client(self.server_url, auth=SUBMISSION_AUTH) elif self.datastore is None: self.datastore = forge.get_datastore()
def format_result(r): try: title = r['result']['sections'][0]['title_text'] if title.startswith('Result exceeded max size.'): sha256 = r['response']['supplementary'][-1][1] with forge.get_filestore() as transport: oversized = json.loads(transport.get(sha256)) oversized['oversized'] = True return oversized except: # pylint:disable=W0702 pass return r
def resubmit_submission_for_analysis(sid, *args, **kwargs): """ Resubmit a submission for analysis with the exact same parameters as before Variables: sid => Submission ID to re-submit Arguments: None Data Block: None Result example: { "submission":{}, # Submission Block "request": {}, # Request Block "times": {}, # Timing Block "state": "submitted", # Submission state "services": {}, # Service selection Block "fileinfo": {} # File information Block } """ user = kwargs['user'] submission = STORAGE.get_submission(sid) if submission: if not Classification.is_accessible(user['classification'], submission['classification']): return make_api_response("", "You are not allowed to re-submit a submission that you don't have access to", 403) task = {k: v for k, v in submission['submission'].iteritems() if k not in STRIP_KW} task.update({k: v for k, v in submission['services'].iteritems() if k not in STRIP_KW}) task['classification'] = submission['classification'] else: return make_api_response({}, "Submission %s does not exists." % sid, status_code=404) task['submitter'] = user['uname'] if 'priority' not in task: task['priority'] = 500 names = [] for name, _ in submission["files"]: names.append(name) task['description'] = "Resubmit %s for analysis" % ", ".join(names) with forge.get_filestore() as f_transport: return make_api_response(SubmissionWrapper.submit_multi(STORAGE, f_transport, submission["files"], **task))
def format_result(user_classification, r, min_classification): if not CLASSIFICATION.is_accessible(user_classification, min_classification): return None try: title = r['result']['sections'][0]['title_text'] if title.startswith('Result exceeded max size.'): sha256 = r['response']['supplementary'][-1][1] with forge.get_filestore() as transport: oversized = json.loads(transport.get(sha256)) oversized['oversized'] = True r = format_result(user_classification, oversized, min_classification) except: # pylint:disable=W0702 pass # Drop sections user does not have access and set others to at least min classification max_classification, r['result']['sections'] = _section_recurse( r['result']['sections'], user_classification, min_classification) # Drop tags user does not have access and set others to at least min classification tags = [ t for t in r['result']['tags'] if CLASSIFICATION.is_accessible( user_classification, t['classification']) ] for tag in tags: tag['classification'] = CLASSIFICATION.max_classification( tag['classification'], min_classification) max_classification = CLASSIFICATION.max_classification( tag['classification'], max_classification) r['result']['tags'] = tags # Set result classification to at least min but no more then viewable result classification r['result']['classification'] = CLASSIFICATION.max_classification( max_classification, min_classification) r['classification'] = CLASSIFICATION.max_classification( max_classification, min_classification) parts = CLASSIFICATION.get_access_control_parts(r['classification']) r.update(parts) if len(r['result']['sections']) == 0 and len(r['result']['tags']) == 0: r['result']['score'] = 0 r['response']['extracted'] = [] return r
def pre_submission(*args, **kwargs): """ Perform a presubmit of a list of local files. This is the first stage for a batch submit of files. Variables: None Arguments: None Data Block (REQUIRED): { "1": # File ID {"sha256": "982...077", # SHA256 of the file "path": "/local/file/path", }, # Path of the file ... } Result example: { "1": # File ID {"exists": false, # Does the file already exist? "succeeded": true, # Is the result for this file accurate? "filestore": "TransportFTP:transport.al", # File Transport method/url "kwargs": # Extra (** kwargs) {"path": "/local/file path"}, # Path to the file "upload_path": "/remote/upload/path", # Where to upload if missing "sha256": "982...077"}, # SHA256 of the file } """ presubmit_requests = request.json presubmit_results = {} for key, presubmit in presubmit_requests.iteritems(): succeeded = True presubmit_result = {} try: with forge.get_filestore() as f_transport: presubmit_result = SubmissionWrapper.presubmit(f_transport, **presubmit) except Exception as e: succeeded = False msg = 'Failed to presubmit for {0}:{1}'.format(key, e) presubmit_result['error'] = msg presubmit_result['succeeded'] = succeeded presubmit_results[key] = presubmit_result return make_api_response(presubmit_results)
def get_file_strings(srl, **kwargs): """ Return all strings in a given file Variables: srl => A resource locator for the file (sha256) Arguments: len => Minimum length for a string Data Block: None Result example: <THE LIST OF STRINGS> """ user = kwargs['user'] hlen = request.args.get('len', "6") file_obj = STORAGE.get_file(srl) if not file_obj: return make_api_response({}, "The file was not found in the system.", 404) if user and Classification.is_accessible(user['classification'], file_obj['classification']): with forge.get_filestore() as f_transport: data = f_transport.get(srl) if not data: return make_api_response({}, "This file was not found in the system.", 404) # Ascii strings pattern = "[\x1f-\x7e]{%s,}" % hlen string_list = re.findall(pattern, data) # UTF-16 strings try: string_list += re.findall(pattern, data.decode("utf-16", errors="ignore")) except UnicodeDecodeError: pass return make_api_response("\n".join(string_list)) else: return make_api_response({}, "You are not allowed to view this file.", 403)
def identify_supplementary_files(*args, **kwargs): """ Ask the UI to create file entries for supplementary files. Variables: None Arguments: None Data Block (REQUIRED): { "1": # File ID {"sha256": "982...077", # SHA256 of the file "classification": "UNRESTRICTED", # Other KW args to be passed to function "ttl": 30 }, # Days to live for the file ... } Result example: { "1": { # File ID "status": "success", # API result status for the file ("success", "failed") "fileinfo": {} # File information Block }, ... } """ user = kwargs['user'] submit_requests = request.json submit_results = {} user_params = load_user_settings(user) for key, submit in submit_requests.iteritems(): submit['submitter'] = user['uname'] if 'classification' not in submit: submit['classification'] = user_params['classification'] with forge.get_filestore() as f_transport: file_info = SubmissionWrapper.identify(f_transport, STORAGE, **submit) if file_info: submit_result = {"status": "succeeded", "fileinfo": file_info} else: submit_result = {"status": "failed", "fileinfo": {}} submit_results[key] = submit_result return make_api_response(submit_results)
sig_keys = DS.list_filtered_signature_keys( "meta.al_status:DEPLOYED OR meta.al_status:NOISY") signature_list = DS.get_signatures(sig_keys) rule_set = [] for s in signature_list: rule_val = YP().dump_rule_file([s]) try: rule_set.append((s['name'], yara.compile(source=rule_val))) except KeyboardInterrupt: raise except: traceback.print_exc() print rule_val for item in Error_list.get("items", []): srl = item.get("srl") if srl is not None: with forge.get_filestore() as f_transport: data = f_transport.get(srl) if data is None: continue for name, rule in rule_set: try: rule.match(data=data) except KeyboardInterrupt: raise except yara.Error as e: print "Error in", name, e.message
import json import os import subprocess import time import uuid import shutil from assemblyline.al.common import forge config = forge.get_config() Classification = forge.get_classification() FILESTORE = forge.get_filestore() MAX_RETRY = 10 STORAGE = forge.get_datastore() WORK_DIR = "/tmp/bundling" class BundlingException(Exception): pass class SubmissionNotFound(Exception): pass class IncompleteBundle(Exception): pass class SubmissionAlreadyExist(Exception):
def submit_file(*args, **kwargs): """ Submit a single file inline Variables: None Arguments: None Data Block (REQUIRED): { "name": "file.exe", # Name of the file "binary": "A24AB..==", # Base64 encoded file binary "params": { # Submission parameters "key": val, # Key/Value pair for params that different then defaults }, # Default params can be fetch at /api/v3/user/submission_params/<user>/ "srv_spec": { # Service specifics parameters "Extract": { "password": "******" }, } } Result example: { "submission":{}, # Submission Block "times": {}, # Timing Block "state": "submitted", # Submission state "services": {}, # Service selection Block "fileinfo": {} # File information Block "files": [] # List of submitted files "request": {} # Request detail block } """ user = kwargs['user'] check_submission_quota(user) out_dir = os.path.join(TEMP_SUBMIT_DIR, uuid4().get_hex()) try: data = request.json if not data: return make_api_response({}, "Missing data block", 400) name = data.get("name", None) if not name: return make_api_response({}, "Filename missing", 400) out_file = os.path.join(out_dir, os.path.basename(name)) binary = data.get("binary", None) if not binary: return make_api_response({}, "File binary missing", 400) else: try: os.makedirs(out_dir) except: pass with open(out_file, "wb") as my_file: my_file.write(base64.b64decode(binary)) # Create task object task = STORAGE.get_user_options(user['uname']) if not task: task = get_default_user_settings(user) task.update(data.get("params", {})) if 'groups' not in task: task['groups'] = user['groups'] task["params"] = data.get("srv_spec", {}) if 'services' in task and "selected" not in task: task["selected"] = task["services"] task['quota_item'] = True task['submitter'] = user['uname'] task['sid'] = str(uuid4()) if not task['description']: task['description'] = "Inspection of file: %s" % name with forge.get_filestore() as f_transport: result = SubmissionWrapper.submit_inline(STORAGE, f_transport, [out_file], **remove_ui_specific_options(task)) if result['submission']['sid'] != task['sid']: raise Exception('ID does not match what was returned by the dispatcher. Cancelling request...') return make_api_response(result) finally: try: # noinspection PyUnboundLocalVariable os.unlink(out_file) except: pass try: shutil.rmtree(out_dir, ignore_errors=True) except: pass
def __init__(self): self.storage = forge.get_datastore() self.transport = forge.get_filestore() log.info("Submission service instantiated. Transport::{0}".format( self.transport))
def ingest_single_file(**kwargs): """ Ingest a single file in the system Note: Binary and sha256 fields are optional but at least one of them has to be there notification_queue, notification_threshold and generate_alert fields are optional Note 2: The ingest API uses the user's default settings to submit files to the system unless these settings are overridden in the 'params' field. Although, there are exceptions to that rule. Fields deep_scan, ignore_filtering, ignore_cache, ignore_tag are resetted to False because the lead to dangerous behavior in the system. Variables: None Arguments: None Data Block: { "name": "file.exe", # Name of the file "binary": "A24AB..==", # Base64 encoded file binary "metadata": { # Submission Metadata "key": val, # Key/Value pair for metadata parameters }, "params": { # Submission parameters "key": val, # Key/Value pair for params that differ from the user's defaults }, # DEFAULT: /api/v3/user/submission_params/<user>/ "sha256": "1234...CDEF" # SHA256 hash of the file "srv_spec": { # Service specifics parameters "Extract": { "password": "******" }, }, "type": "SUBMISSION_TYPE" # Required type field, "notification_queue": None, # Name of the notification queue "notification_threshold": None, # Threshold for notification "generate_alert": False # Generate an alert in our alerting system or not } Result example: { "success": true } """ user = kwargs['user'] out_dir = os.path.join(TEMP_SUBMIT_DIR, uuid4().get_hex()) with forge.get_filestore() as f_transport: try: data = request.json if not data: return make_api_response({}, "Missing data block", 400) notification_queue = data.get('notification_queue', None) if notification_queue: notification_queue = "nq-%s" % notification_queue notification_threshold = data.get('notification_threshold', None) if not isinstance(notification_threshold, int) and notification_threshold: return make_api_response( {}, "notification_threshold should be and int", 400) generate_alert = data.get('generate_alert', False) if not isinstance(generate_alert, bool): return make_api_response({}, "generate_alert should be a boolean", 400) name = data.get("name", None) if not name: return make_api_response({}, "Filename missing", 400) ingest_msg_type = data.get("type", None) if not ingest_msg_type: return make_api_response({}, "Required type field missing", 400) out_file = os.path.join(out_dir, os.path.basename(name)) try: os.makedirs(out_dir) except: pass binary = data.get("binary", None) if not binary: sha256 = data.get('sha256', None) if sha256: if f_transport.exists(sha256): f_transport.download(sha256, out_file) else: return make_api_response( {}, "SHA256 does not exist in our datastore", 404) else: return make_api_response( {}, "Both file binary and sha256 missing", 400) else: with open(out_file, "wb") as my_file: my_file.write(base64.b64decode(binary)) overrides = STORAGE.get_user_options(user['uname']) overrides['selected'] = overrides['services'] overrides.update({ 'deep_scan': False, "priority": 150, "ignore_cache": False, "ignore_filtering": False, "ignore_tag": False, }) overrides.update(data.get("params", {})) overrides.update({ 'description': "[%s] Inspection of file: %s" % (ingest_msg_type, name), 'generate_alert': generate_alert, 'max_extracted': 100, 'max_supplementary': 100, 'params': data.get("srv_spec", {}), 'submitter': user['uname'], }) if notification_queue: overrides.update({ "notification_queue": notification_queue, "notification_threshold": notification_threshold }) overrides['priority'] = min(overrides.get("priority", 150), 250) metadata = data.get("metadata", {}) metadata['type'] = ingest_msg_type if 'ts' not in metadata: metadata['ts'] = now_as_iso() digests = identify.get_digests_for_file(out_file) digests.pop('path', None) sha256 = digests['sha256'] if not f_transport.exists(sha256): f_transport.put(out_file, sha256, location='far') msg = { "priority": overrides['priority'], "type": ingest_msg_type, "overrides": remove_ui_specific_options(overrides), "metadata": metadata } msg.update(digests) ingest.push(forge.determine_ingest_queue(sha256), msg) return make_api_response({"success": True}) finally: try: # noinspection PyUnboundLocalVariable os.unlink(out_file) except: pass try: shutil.rmtree(out_dir, ignore_errors=True) except: pass
def resubmit_for_dynamic(srl, *args, **kwargs): """ Resubmit a file for dynamic analysis Variables: srl => Resource locator (SHA256) Arguments (Optional): copy_sid => Mimic the attributes of this SID. name => Name of the file for the submission Data Block: None Result example: { "submission":{}, # Submission Block "request": {}, # Request Block "times": {}, # Timing Block "state": "submitted", # Submission state "services": {}, # Service selection Block "fileinfo": {} # File information Block } """ user = kwargs['user'] copy_sid = request.args.get('copy_sid', None) name = request.args.get('name', srl) if copy_sid: submission = STORAGE.get_submission(copy_sid) else: submission = None if submission: if not Classification.is_accessible(user['classification'], submission['classification']): return make_api_response("", "You are not allowed to re-submit a submission that you don't have access to", 403) task = {k: v for k, v in submission['submission'].iteritems() if k not in STRIP_KW} task.update({k: v for k, v in submission['services'].iteritems() if k not in STRIP_KW}) task['classification'] = submission['classification'] else: params = STORAGE.get_user_options(user['uname']) task = {k: v for k, v in params.iteritems() if k not in STRIP_KW} task['selected'] = params["services"] task['classification'] = params['classification'] task['sha256'] = srl with forge.get_filestore() as f_transport: if not f_transport.exists(srl): return make_api_response({}, "File %s cannot be found on the server therefore it cannot be resubmitted." % srl, status_code=404) task['path'] = name task['submitter'] = user['uname'] if 'priority' not in task: task['priority'] = 500 task['description'] = "Resubmit %s for Dynamic Analysis" % name if "Dynamic Analysis" not in task['selected']: task['selected'].append("Dynamic Analysis") submit_result = SubmissionWrapper.submit(f_transport, STORAGE, **task) return make_api_response(submit_result)
def start_ui_submission(ui_sid, **kwargs): """ Start UI submission. Starts processing after files where uploaded to the server. Variables: ui_sid => UUID for the current UI file upload Arguments: None Data Block (REQUIRED): Dictionary of user options obtained by calling 'GET /api/v3/user/settings/<username>/' Result example: { 'started': True, # Has the submission started processing? 'sid' : "c7668cfa-...-c4132285142e" # Submission ID } """ user = kwargs['user'] check_submission_quota(user) task = request.json task['groups'] = kwargs['user']['groups'] task['quota_item'] = True if not Classification.is_accessible(user['classification'], task['classification']): return make_api_response({ "started": False, "sid": None }, "You cannot start a scan with higher " "classification then you're allowed to see", 403) request_files = [] request_dirs = [] fnames = [] try: flist = glob.glob(TEMP_DIR + ui_sid + "*") if len(flist) > 0: # Generate file list for fpath in flist: request_dirs.append(fpath) files = os.listdir(fpath) for myfile in files: request_files.append(os.path.join(fpath, myfile)) if myfile not in fnames: fnames.append(myfile) if not task['description']: task['description'] = "Inspection of file%s: %s" % ({ True: "s", False: "" }[len(fnames) > 1], ", ".join(fnames)) # Submit to dispatcher dispatch_task = ui_to_dispatch_task(task, kwargs['user']['uname'], str(uuid.uuid4())) with forge.get_filestore() as f_transport: result = SubmissionWrapper.submit_inline( STORAGE, f_transport, request_files, **dispatch_task) if result['submission']['sid'] != dispatch_task['sid']: raise Exception( 'ID does not match what was returned by the dispatcher. Cancelling request...' ) return make_api_response({ "started": True, "sid": result['submission']['sid'] }) else: return make_api_response({ "started": False, "sid": None }, "No files where found for ID %s. " "Try again..." % ui_sid, 404) finally: # Remove files for myfile in request_files: try: os.unlink(myfile) except: pass # Remove dirs for fpath in request_dirs: try: os.rmdir(fpath) except: pass
def main(): ds = forge.get_datastore() fs = forge.get_filestore() submission_queue = queue.NamedQueue('d-submission', db=DATABASE_NUM) result_queue = queue.NamedQueue('d-result', db=DATABASE_NUM) file_queue = queue.NamedQueue('d-file', db=DATABASE_NUM) error_queue = queue.NamedQueue('d-error', db=DATABASE_NUM) dynamic_queue = queue.NamedQueue('d-dynamic', db=DATABASE_NUM) alert_queue = queue.NamedQueue('d-alert', db=DATABASE_NUM) filescore_queue = queue.NamedQueue('d-filescore', db=DATABASE_NUM) emptyresult_queue = queue.NamedQueue('d-emptyresult', db=DATABASE_NUM) log.info("Ready!") queues = [ submission_queue, result_queue, file_queue, error_queue, dynamic_queue, alert_queue, filescore_queue, emptyresult_queue ] while True: queue_name, key = queue.select(*queues) try: rewrite = False expiry = None if isinstance(key, tuple) or isinstance(key, list): key, rewrite, expiry = key if rewrite: # noinspection PyProtectedMember ds._save_bucket_item(ds.get_bucket(queue_name[2:]), key, {"__expiry_ts__": expiry}) if queue_name == "d-submission": ds.delete_submission(key) log.debug("Submission %s (DELETED)" % key) elif queue_name == "d-result": ds.delete_result(key) log.debug("Result %s (DELETED)" % key) elif queue_name == "d-error": ds.delete_error(key) log.debug("Error %s (DELETED)" % key) elif queue_name == "d-file": ds.delete_file(key) if config.core.expiry.delete_storage and fs.exists( key, location='far'): fs.delete(key, location='far') log.debug("File %s (DELETED)" % key) elif queue_name == "d-alert": ds.delete_alert(key) log.debug("Alert %s (DELETED)" % key) elif queue_name == "d-filescore": ds.delete_filescore(key) log.debug("FileScore %s (DELETED)" % key) elif queue_name == "d-emptyresult": ds.delete_result(key) log.debug("EmptyResult %s (DELETED)" % key) else: log.warning("Unknown message: %s (%s)" % (key, queue_name)) except: log.exception("Failed deleting key %s from bucket %s:", key, queue_name) queues = queues[1:] + queues[0:1]