def get_resources(pe): resources = [] if hasattr(pe, 'DIRECTORY_ENTRY_RESOURCE'): count = 1 for resource_type in pe.DIRECTORY_ENTRY_RESOURCE.entries: try: resource = {} if resource_type.name is not None: name = str(resource_type.name) else: name = str(pefile.RESOURCE_TYPE.get(resource_type.struct.Id, "UNKNOWN")) if name is None: name = str(resource_type.struct.Id) if hasattr(resource_type, 'directory'): for resource_id in resource_type.directory.entries: if hasattr(resource_id, 'directory'): for resource_lang in resource_id.directory.entries: data = pe.get_data(resource_lang.data.struct.OffsetToData, resource_lang.data.struct.Size) entropy = get_entropy(data) filetype = get_type(data) md5 = get_md5(data) sha256 = get_sha256(data) language = pefile.LANG.get(resource_lang.data.lang, None) language_desc = LCID.get(resource_lang.id, 'unknown language') sublanguage = pefile.get_sublang_name_for_lang( resource_lang.data.lang, resource_lang.data.sublang) offset = ('%-8s' % hex(resource_lang.data.struct.OffsetToData)).strip() size = ('%-8s' % hex(resource_lang.data.struct.Size)).strip() resource = [ count, name, offset, md5, sha256, size, filetype, entropy, language, sublanguage, language_desc ] # Dump resources if requested if self.dump and pe == self.pe: if self.dump: folder = self.dump else: folder = tempfile.mkdtemp() resource_path = path.join(folder, '{0}_{1}_{2}'.format( self.sha256, offset, name)) resource.append(resource_path) with open(resource_path, 'wb') as resource_handle: resource_handle.write(data) resources.append(resource) count += 1 except Exception as e: log.error(e) continue return resources
def process_dir(read_dir, out_dir, mode='FCG'): """ Convert a series of APK into graph objects. Load all APKs in a dir subtree and create graph objects that are pickled for later processing and learning. """ read_dir = os.getcwd() + read_dir out_dir = os.getcwd() + out_dir sys.setrecursionlimit(100000) files = [] # check if pdg doesnt exist yet and mark the file to be processed for dirName, subdirList, fileList in os.walk(read_dir): for f in fileList: files.append(os.path.join(dirName, f)) # loop through .apk files and save them in .pdg.pz format print("\nProcessing {} APK files in dir {}".format(len(files), read_dir)) for f in tqdm(files): # nx.DiGraph().nodes f = os.path.realpath(f) print('[] Loading {0}'.format(f)) try: if mode is 'FCG': graph = FCG(f).get_fcg() elif mode is 'CFG': graph = CFG(f).get_cg() # if an exception happens, save the .apk in the corresponding dir except Exception as e: err = e.__class__.__name__ err_dir = err + "/" d = os.path.join(read_dir, err_dir) if not os.path.exists(d): os.makedirs(d) cmd = "cp {} {}".format(f, d) os.system(cmd) print("[*] {} error loading {}".format(err, f)) continue h = get_sha256(f) if not out_dir: out_dir = read_dir if not os.path.exists(out_dir): os.mkdir(out_dir) fnx = os.path.join(out_dir, "{}".format(h)) nx.write_gpickle(graph, fnx + '.pz') print("[*] Saved {}\n".format(fnx)) print("Done.")
async def put(self, token=''): file = self.request.files['file'][0] file_name = file['filename'] file_content = file['body'] hash_str = get_sha256(file_content) db_client = MongoWrapper() file_doc = await db_client.get_file_by_name_and_hash( file_name, hash_str) if file_doc: key = str(file_doc['_id']) else: aws_client = AWSWrapper(key=key, file_name=file_name, loop=loop) await aws_client.upload_file(file_content) key = await db_client.insert_file(file_name, hash_str) resp = {'key': key} self.write(escape.json_encode(resp))
def _is_blacklisted(self, username): """Check if a user is blacklisted. :param: addr (string) the hashed username. :return: true is the username is blacklisted, false otherwise. """ hashed_username = utils.get_sha256(username) try: self.bl.is_blacklisted(hashed_username, 'Twitter', self.bl_max_request, self.bl_wait_time) return False except blacklist.BlacklistError as e: return True
def _is_blacklisted(self, account): """Check if a user is blacklisted. :param: addr (string) the hashed address of the user. :return: true is the address is blacklisted, false otherwise. """ anon_acc = utils.get_sha256(account) try: self.bl.is_blacklisted(anon_acc, 'XMPP', self.bl_max_req, self.bl_wait_time) return False except blacklist.BlacklistError as e: return True
def _is_blacklisted(self, account): """Check if a user is blacklisted. :param: addr (string) the hashed address of the user. :return: true is the address is blacklisted, false otherwise. """ anon_acc = utils.get_sha256(account) try: self.bl.is_blacklisted( anon_acc, 'XMPP', self.bl_max_req, self.bl_wait_time ) return False except blacklist.BlacklistError as e: return True
def process(self, event): fileName = os.path.basename(event.src_path) if event.is_directory is False and os.path.exists( event.src_path) and os.path.basename( event.src_path).startswith( '.') is False and os.path.getsize(event.src_path) != 0: rand = ''.join( random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(6)) cmd = ["cp", event.src_path, "/tmp/" + fileName + '.' + rand] args = utils.args_to_string(cmd) p = subprocess.check_output(cmd, shell=False) sha256 = utils.get_sha256('/tmp/' + fileName + '.' + rand) print '[!] Sending ' + event.src_path + '.' + rand + ' to Viper\n[!] sha256: ' + sha256 utils.upload('/tmp/' + fileName + '.' + rand)
def _is_blacklisted(self, account): """Check if a user is blacklisted. :param: addr (string) the hashed address of the user. :return: true is the address is blacklisted, false otherwise. """ anon_acc = utils.get_sha256(account) bl = blacklist.Blacklist(self.bl) self.log.debug("Checking if address %s is blacklisted" % anon_acc) try: bl.is_blacklisted(anon_acc, 'XMPP', self.bl_max_req, self.bl_wait_time) return False except blacklist.BlacklistError as e: self.log.info("Blacklisted address %s. Reason: %s" % (anon_acc, e)) return True
def _is_blacklisted(self, username): """Check if a user is blacklisted. :param: addr (string) the hashed username. :return: true is the username is blacklisted, false otherwise. """ hashed_username = utils.get_sha256(username) try: self.bl.is_blacklisted( hashed_username, 'Twitter', self.bl_max_request, self.bl_wait_time ) return False except blacklist.BlacklistError as e: return True
def analyseAPK(apk_file): sha256 = get_sha256(apk_file) a, d, dx = AnalyzeAPK(apk_file) # ============== extract permissions =============== permissions = a.get_permissions() # ============== extract sensitiveApis =============== sensitiveApis = set() for dd in d: for method in dd.get_methods(): g = dx.get_method(method) for BasicBlock in g.get_basic_blocks().get(): instructions = BasicBlockAttrBuilder.GetBasicBlockDalvikCode( BasicBlock) PscoutApis = BasicBlockAttrBuilder.GetInvokedPscoutApis( instructions) sensitiveApis = sensitiveApis.union(PscoutApis) # ============== extract third-party-libraries =========== tpls = getThirdPartyLibrary(apk_file, sha256) return sha256, permissions, list(sensitiveApis), tpls
def create_hash(self): block_info = "{}{}{}{}{}".format(self.block_no, self.nonce, self.previous_hash, self.get_transaction_str(), self.timestamp) self.hash = get_sha256(block_info)
def process_email(self, raw_msg): """Process the email received. Create an email object from the string received. The processing flow is as following: - check for blacklisted address. - parse the email. - check the type of request. - send reply. :param: raw_msg (string) the email received. :raise: InternalError if something goes wrong while asking for the links to the Core module. """ self.log.debug("Processing email") parsed_msg = email.message_from_string(raw_msg) content = self._get_content(parsed_msg) from_addr = parsed_msg['From'] to_addr = parsed_msg['To'] bogus_request = False status = '' req = None try: # two ways for a request to be bogus: address malformed or # blacklisted try: self.log.debug("Normalizing address...") norm_from_addr = self._get_normalized_address(from_addr) except AddressError as e: bogus_request = True self.log.info('invalid; none; none') if norm_from_addr: anon_addr = utils.get_sha256(norm_from_addr) if self._is_blacklisted(anon_addr): bogus_request = True self.log.info('blacklist; none; none') if not bogus_request: # try to figure out what the user is asking self.log.debug("Request seems legit; parsing it...") req = self._parse_email(content, to_addr) # our address should have the locale requested our_addr = "gettor+%s@%s" % (req['lc'], self.our_domain) # possible options: help, links, mirrors if req['type'] == 'help': self.log.debug("Trying to send help...") self.log.info('help; none; %s' % req['lc']) # make sure we can send emails try: self._send_help('en', our_addr, norm_from_addr) except SendEmailError as e: self.log.debug("FAILED: %s" % str(e)) raise InternalError("Something's wrong with the SMTP " "server: %s" % str(e)) elif req['type'] == 'mirrors': self.log.debug("Trying to send the mirrors...") self.log.info('mirrors; none; %s' % req['lc']) # make sure we can send emails try: self._send_mirrors('en', our_addr, norm_from_addr) except SendEmailError as e: self.log.debug("FAILED: %s" % str(e)) raise SendEmailError("Something's wrong with the SMTP " "server: %s" % str(e)) elif req['type'] == 'links': self.log.debug("Trying to obtain the links...") self.log.info('links; %s; %s' % (req['os'], req['lc'])) try: links = self.core.get_links( 'SMTP', req['os'], req['lc'] ) # if core fails, we fail too except (core.InternalError, core.ConfigError) as e: self.log.debug("FAILED: %s" % str(e)) # something went wrong with the core raise InternalError("Error obtaining the links") # make sure we can send emails self.log.debug("Trying to send the links...") try: self._send_links(links, req['lc'], req['os'], our_addr, norm_from_addr) except SendEmailError as e: self.log.debug("FAILED: %s" % str(e)) raise SendEmailError("Something's wrong with the SMTP " "server: %s" % str(e)) finally: self.log.debug("Request processed")
def process_email(self, raw_msg): """Process the email received. Create an email object from the string received. The processing flow is as following: - Check for blacklisted address. - Parse the email. - Check the type of request. - Send reply. :param: raw_msg (string) the email received. :raise: InternalError if something goes wrong while asking for the links to the Core module. """ parsed_msg = email.message_from_string(raw_msg) content = self._get_content(parsed_msg) from_addr = parsed_msg['From'] to_addr = parsed_msg['To'] bogus_request = False logfile = '' status = '' req = None try: # two ways for a request to be bogus: address malformed or # blacklisted try: norm_from_addr = self._get_normalized_address(from_addr) except AddressError as e: status = 'malformed' bogus_request = True # it might be interesting to know what triggered this logfile = self._log_email('malformed', content) if norm_from_addr: anon_addr = utils.get_sha256(norm_from_addr) if self._is_blacklisted(anon_addr): status = 'blacklisted' bogus_request = True # it might be interesting to know extra info logfile = self._log_email(anon_addr, content) if not bogus_request: # try to figure out what the user is asking req = self._parse_email(content, to_addr) # our address should have the locale requested our_addr = "gettor+%s@%s" % (req['lc'], self.our_domain) # two possible options: asking for help or for the links self.log.info("New request for %s" % req['type']) if req['type'] == 'help': # make sure we can send emails try: self._send_help(req['lc'], our_addr, norm_from_addr) except SendEmailError as e: status = 'internal_error' raise InternalError("Something's wrong with the SMTP " "server: %s" % str(e)) elif req['type'] == 'links': try: self.log.info("Asking core for links in %s for %s" % (req['lc'], req['os'])) links = self.core.get_links('SMTP', req['os'], req['lc']) except core.UnsupportedLocaleError as e: self.log.info("Request for unsupported locale: %s (%s)" % (req['lc'], str(e))) # if we got here, the address of the sender should # be valid so we send him/her a message about the # unsupported locale status = 'unsupported_lc' self._send_unsupported_lc(req['lc'], req['os'], our_addr, norm_from_addr) return # if core fails, we fail too except (core.InternalError, core.ConfigurationError) as e: status = 'core_error' self.log.error("Something went wrong with Core: %s" % str(e)) raise InternalError("Error obtaining the links.") # make sure we can send emails try: self._send_links(links, req['lc'], req['os'], our_addr, norm_from_addr, req['pt']) except SendEmailError as e: status = 'internal_error' raise SendEmailError("Something's wrong with the SMTP " "server: %s" % str(e)) status = 'success' finally: # keep stats if req: self.core.add_request_to_db('SMTP', req['type'], req['os'], req['lc'], req['pt'], status, logfile) else: # invalid request, so no info about it # logfiles were created for this self.core.add_request_to_db('SMTP', '', '', '', '', status, logfile)
def process_objects(self, expanded_objects=[]): """ process_objects(expanded_objects) Given a list of objects, determines if uploadable (binary), and then create a dictionary of: sha1_hash sha256_hash modified_time filesize Sha1_hash is only determined on first upload or if modified time and file size changed. """ objects_metadata = {} for obj in expanded_objects: # Process if object is uploadable if self.uploadable_object(obj): # Object name in metadata file. Replace \\ with / to remain consistent # accoss platforms object_name = utils.unix_path( os.path.relpath(obj, self.paths['shelves'])) # Determine paths object_path = os.path.abspath(obj) object_metadata_file = '%s.pitem' % object_path # Add object to gitignore self.add_object_to_gitignore(obj) object_mtime = utils.get_modified_time(obj) object_file_size = utils.get_file_size(obj) # Use cached checksum since checksum hashing is cpu intensive and # file size and modified times are quicker. Checksums are force using # cli flag --checksum. if (not self.options['checksum'] and os.path.exists(object_metadata_file)): with open(object_metadata_file) as json_file: cached_metadata = json.load(json_file) # Use cached hash if filesize and mtime are the same if (object_file_size == cached_metadata[object_name]['file_size'] and object_mtime == cached_metadata[object_name]['modified_time']): object_sha1_hash = cached_metadata[object_name][ 'sha1_hash'] if 'sha26_hash' in cached_metadata[object_name]: object_sha256_hash = cached_metadata[object_name][ 'sha256_hash'] else: object_sha256_hash = utils.get_sha256(obj) else: object_sha1_hash = utils.get_sha1(obj) object_sha256_hash = utils.get_sha256(obj) else: # Genertate hash if cached_metadat is not present object_sha1_hash = utils.get_sha1(obj) object_sha256_hash = utils.get_sha256(obj) # TODO remove sha1 check as its not needed. # Add object to metadata dictionary objects_metadata[object_name] = { 'sha1_hash': object_sha1_hash, 'sha256_hash': object_sha256_hash, 'modified_time': object_mtime, 'file_size': object_file_size, } return objects_metadata
def process_objects(self, expanded_objects=[]): """ process_objects(expanded_objects) Given a list of objects, determines if uploadable (binary), and then create a dictionary of: sha1_hash sha256_hash modified_time filesize Sha1_hash is only determined on first upload or if modified time and file size changed. """ objects_metadata = {} for obj in expanded_objects: # Process if object is uploadable if self.uploadable_object(obj): # Object name in metadata file. Replace \\ with / to remain consistent # accoss platforms object_name = utils.unix_path( os.path.relpath(obj, self.paths['shelves']) ) # Determine paths object_path = os.path.abspath(obj) object_metadata_file = '%s.pitem' % object_path # Add object to gitignore self.add_object_to_gitignore(obj) object_mtime = utils.get_modified_time(obj) object_file_size = utils.get_file_size(obj) # Use cached checksum since checksum hashing is cpu intensive and # file size and modified times are quicker. Checksums are force using # cli flag --checksum. if ( not self.options['checksum'] and os.path.exists(object_metadata_file) ): with open(object_metadata_file) as json_file: cached_metadata = json.load(json_file) # Use cached hash if filesize and mtime are the same if ( object_file_size == cached_metadata[object_name]['file_size'] and object_mtime == cached_metadata[object_name]['modified_time'] ): object_sha1_hash = cached_metadata[object_name]['sha1_hash'] if 'sha26_hash' in cached_metadata[object_name]: object_sha256_hash = cached_metadata[object_name]['sha256_hash'] else: object_sha256_hash = utils.get_sha256(obj) else: object_sha1_hash = utils.get_sha1(obj) object_sha256_hash = utils.get_sha256(obj) else: # Genertate hash if cached_metadat is not present object_sha1_hash = utils.get_sha1(obj) object_sha256_hash = utils.get_sha256(obj) # TODO remove sha1 check as its not needed. # Add object to metadata dictionary objects_metadata[object_name] = { 'sha1_hash': object_sha1_hash, 'sha256_hash': object_sha256_hash, 'modified_time': object_mtime, 'file_size': object_file_size, } return objects_metadata