def __init__(self, datastore: AssemblylineDatastore = None, filestore: FileStore = None, config=None, redis=None, redis_persist=None, identify=None): self.log = logging.getLogger('assemblyline.tasking_client') self.config = config or forge.CachedObject(forge.get_config) self.datastore = datastore or forge.get_datastore(self.config) self.dispatch_client = DispatchClient(self.datastore, redis=redis, redis_persist=redis_persist) self.event_sender = EventSender('changes.services', redis) self.filestore = filestore or forge.get_filestore(self.config) self.heuristic_handler = HeuristicHandler(self.datastore) self.heuristics = { h.heur_id: h for h in self.datastore.list_all_heuristics() } self.status_table = ExpiringHash(SERVICE_STATE_HASH, ttl=60 * 30, host=redis) self.tag_safelister = forge.CachedObject(forge.get_tag_safelister, kwargs=dict( log=self.log, config=config, datastore=self.datastore), refresh=300) if identify: self.cleanup = False else: self.cleanup = True self.identify = identify or forge.get_identify( config=self.config, datastore=self.datastore, use_cache=True)
def __init__(self, datastore: AssemblylineDatastore = None, filestore: FileStore = None, config=None, redis=None, identify=None): self.log = logging.getLogger('assemblyline.submission_client') self.config = config or forge.CachedObject(forge.get_config) self.datastore = datastore or forge.get_datastore(self.config) self.filestore = filestore or forge.get_filestore(self.config) self.redis = redis if identify: self.cleanup = False else: self.cleanup = True self.identify = identify or forge.get_identify( config=self.config, datastore=self.datastore, use_cache=True) # A client for interacting with the dispatcher self.dispatcher = DispatchClient(datastore, redis)
def test_id_file_base(): with forge.get_identify(use_cache=False) as identify: tests_dir = os.path.dirname(__file__) id_file_base = "id_file_base" file_base_dir = os.path.join(tests_dir, id_file_base) map_file = "id_file_base.json" map_path = os.path.join(file_base_dir, map_file) with open(map_path, "r") as f: contents = f.read() json_contents = loads(contents) for _, _, files in os.walk(file_base_dir): for file_name in files: if file_name == map_file: continue file_path = os.path.join(file_base_dir, file_name) data = identify.fileinfo(file_path) actual_value = data.get("type", "") expected_value = json_contents[file_name] assert actual_value == expected_value
def test_identify(): with forge.get_identify(use_cache=False) as identify: # Setup test data aaaa = f"{'A' * 10000}".encode() sha256 = hashlib.sha256(aaaa).hexdigest() # Prep temp file _, input_path = tempfile.mkstemp() output_path = f"{input_path}.cart" try: # Write temp file with open(input_path, 'wb') as oh: oh.write(aaaa) # Create a cart file with open(output_path, 'wb') as oh: with open(input_path, 'rb') as ih: pack_stream(ih, oh, {'name': 'test_identify.a'}) # Validate the cart file created meta = get_metadata_only(output_path) assert meta.get("sha256", None) == sha256 # Validate identify file detection info = identify.fileinfo(output_path) assert info.get("type", None) == "archive/cart" # Validate identify hashing output_sha256 = subprocess.check_output(['sha256sum', output_path ])[:64].decode() assert info.get("sha256", None) == output_sha256 finally: # Cleanup output file if os.path.exists(output_path): os.unlink(output_path) # Cleanup input file if os.path.exists(input_path): os.unlink(input_path)
def ready_body(core, body=None): out = { 'salt': get_random_id(), } out.update(body or {}) out = json.dumps(out).encode() sha256 = hashlib.sha256() sha256.update(out) core.filestore.put(sha256.hexdigest(), out) with NamedTemporaryFile() as file: file.write(out) file.flush() with forge.get_identify(use_cache=False) as identify: fileinfo = identify.fileinfo(file.name) core.ds.save_or_freshen_file(sha256.hexdigest(), fileinfo, now_as_iso(500), 'U', redis=core.redis) return sha256.hexdigest(), len(out)
def test_identify_samples(sample): with forge.get_identify(use_cache=False) as identify: assert identify.fileinfo(sample[0])["type"] == sample[1]
def __init__(self): self.service = None self.service_class = None self.submission_params = None self.file_dir = None self.identify = forge.get_identify(use_cache=False)
import pytest from assemblyline.common import forge from assemblyline.common.dict_utils import flatten from assemblyline.odm.messages.task import Task as ServiceTask from assemblyline_v4_service.common.request import ServiceRequest from assemblyline_v4_service.common.task import Task from cart import unpack_file from vipermonkey_ import ViperMonkey TEST_DIR = os.path.dirname(os.path.abspath(__file__)) ROOT_DIR = os.path.dirname(TEST_DIR) SELF_LOCATION = os.environ.get("FULL_SELF_LOCATION", ROOT_DIR) SAMPLES_LOCATION = os.environ.get("FULL_SAMPLES_LOCATION", None) identify = forge.get_identify(use_cache=False) def find_sample(locations, sample): # Assume samples are carted sample = f"{sample}.cart" for location in locations: p = [path for path in Path(location).rglob(sample)] if len(p) == 1: return p[0] raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), sample) def list_results(location):