def policy_enumerate(args, access_key, secret_key, token): session = boto3.Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key, aws_session_token=token) iam = session.client('iam') iamres = session.resource('iam') r = requests.get('http://169.254.169.254/latest/meta-data/iam/info') role_arn = json.loads(r.text)['InstanceProfileArn'] role = role_arn.split('/')[1] response1 = None response2 = None try: response1 = iam.list_attached_role_policies(RoleName=role) response2 = iam.list_role_policies(RoleName=role) except ClientError as error: common.exception(error, 'List role policy failed.') print('\nThe following permissions belong to the role {}: \n'.format(role)) values = [] values += attached_policy_enum(iam, iamres, response1) values += managed_policy_enum(iamres, response2, role) values_to_print = filter_results(values, args) common.print_table( values_to_print, ["Service", "Action", "Resource", "Effect", "Policy name"])
def attached_policy_enum(iam, iamres, response1): values = [] for attached_policy in response1['AttachedPolicies']: role_policy1 = iamres.Policy(attached_policy['PolicyArn']) policy_version = None try: policy = iam.get_policy(PolicyArn=role_policy1.arn) policy_version = iam.get_policy_version( PolicyArn=role_policy1.arn, VersionId=policy['Policy']['DefaultVersionId']) except ClientError as error: common.exception(error, 'Get role policy failed.') for statement in policy_version['PolicyVersion']['Document'][ 'Statement']: resource = statement['Resource'] effect = statement['Effect'] actions = statement['Action'] if isinstance(actions, list): for action in actions: values.append( compose_value(action, resource, effect, role_policy1.arn.split('/')[-1])) else: values.append( compose_value(actions, resource, effect, role_policy1.arn.split('/')[-1])) return values
def __encrypt_file(self): """ Core method to encrypt a file. """ fs_input = common.get_file_size(self.__file_input) fs_output = fs_input + self.__obfuscate_enc if self.__reverse_bytes > fs_input: common.exception("The reverse byte value must not be greater " \ "than the input file size.") if self.__existing_key: fs_key = common.get_file_size(self.__file_key) if (fs_key - self.__obfuscate_key) < \ (fs_input - self.__obfuscate_enc): common.exception("The given key file is too small.") else: fs_key = fs_input + self.__obfuscate_key common.build_task_file(self.__task_id, self.__file_input, fs_input, self.__file_key, fs_key, self.__file_output, fs_output, "encryption") keyfile.build_file_key(self.__file_key, fs_input, self.__buffer_size, self.__obfuscate_key, self.__existing_key, self.__dev_random, self.__fortuna, self.__overwrite, 1) self.__file_size = fs_input self.__erfr_core() if int(self.__obfuscate_enc) > 0: obfuscator.add_random_bytes(self.__file_output, self.__buffer_size, self.__obfuscate_enc, self.__dev_random, self.__fortuna)
def dependency(self, arg_name, arg_value, dependency): """ Check the dependency of a command-line argument. """ # Does not make sense, yet. if not dependency == None: if arg_value == None or str(arg_value) == "": common.exception("The '%s' argument depends on '%s'." % \ (arg_name, dependency))
def base64_key(task_id, base64_string): """ Convert a Base64 key string into a temporary key file. """ if task_id == None: common.exception("A task ID is required to convert a Base64 string " \ "into a temporary key file.") base64_string = str(base64_string) if base64_string == "": common.exception("The Base64 string must not be empty.") try: data_key = bytearray(b"" + base64.b64decode(base64_string)) except ValueError: common.exception("The given Base64 key string contains invalid " \ "characters.") except: common.exception("The given Base64 key string is not valid.") file_key = os.path.join(tempfile.gettempdir(), "erfr_" + str(task_id).rjust(4, "0") + ".key") fh_key = open(file_key, "wb") fh_key.write(data_key) fh_key.close() return file_key
def get_prng(dev_random=False, fortuna=False, fortuna_seed_bytes=None, fortuna_reseed_period=None): """ Create an instance of the requested pseudo-random number generator. """ fallback = bool( int( common.global_config(["KeyGenerator", "General"], ["fallback"], "0"))) prng = None try: if dev_random: prng = __DevRandom() else: prng = __Urandom() if fortuna: if fortuna_seed_bytes == None: try: fortuna_seed_bytes = \ int(common.global_config(["Fortuna"], ["seed_bytes"], 16)) except: fortuna_seed_bytes = 16 if fortuna_reseed_period == None: try: fortuna_reseed_period = \ int(common.global_config(["Fortuna"], ["reseed_period"], 8)) except: fortuna_reseed_period = 8 if fortuna_seed_bytes < 1: fortuna_seed_bytes = 1 return __Fortuna(prng, fortuna_seed_bytes, fortuna_reseed_period) else: # The parameters for the seed bytes as well as the reseed period # are not relvant here and will be ignored. return prng except Exception as ex: if fallback: return __Urandom() else: common.exception(ex.message)
def search_episode(info): info['title'] = common.exception(info['title']) if info['absolute_number'] == 0: query = info['title'] + ' %sx%02d'% (info['season'], info['episode']) # define query else: query = info['title'] + ' %02d' % info['absolute_number'] # define query anime query = query.encode('utf-8') filters.title = query if settings.time_noti > 0: provider.notify(message="Searching: " + query.title() + '...', header=None, time=settings.time_noti, image=settings.icon) query = provider.quote_plus(query) url_search = "%s/buscar.php?busqueda=%s" % (settings.url,query) provider.log.info(url_search) if browser.open(url_search): results = [] data = browser.content search_serie = re.search('/series/(.*?)/" title', data) if search_serie is not None: url_search = '%s/series/%s/' % (settings.url, search_serie.group(1)) browser.open(url_search) data = browser.content cont = 0 lname = re.search(filters.title.replace(' ', '.') + '(.*?).torrent', data, re.IGNORECASE) if lname is not None: torrent = '%s/torrents_tor/%s' % (settings.url, lname.group()) name = lname.group().replace('.torrent', '') + ' S%02dE%02d'% (info['season'], info['episode']) + ' - ' + settings.name_provider #find name in the torrent results.append({"name": name, "uri": torrent, "seeds": 10000, "peers": 5000}) # return le torrent cont = 1 provider.log.info('>>>>>> ' + str(cont) + ' torrents sent to Pulsar<<<<<<<') else: provider.log.error('>>>>>>>%s<<<<<<<' % browser.status) provider.notify(message=browser.status, header=None, time=5000, image=settings.icon) return results
def split_key(file_input, parts, buffer_size=4096, overwrite=False): """ Split a key file in a user-defined number of parts. """ if not overwrite: pv.path(file_input, "key", True, True) pv.path(file_input + ".001", "key part", True, False) else: pv.path(file_input, "key", True, True) buffer_size = int(buffer_size) parts = int(parts) if parts < 2: common.exception("The number of key parts must be greater than 1.") elif parts > 999: common.exception("The number of key parts must be less than 1000.") file_size = common.get_file_size(file_input) part_id = 0 part_size = int(file_size / parts) part_last = file_size - (part_size * (parts - 1)) fh_input = open(file_input, "rb") for part in range(parts): part_id += 1 file_key = file_input + "." + str(part_id).rjust(3, "0") if part_id < parts: file_size = int(part_size) else: file_size = int(part_last) fh_output = open(file_key, "wb") byte_blocks = int(file_size / buffer_size) byte_remainder = file_size % buffer_size for block in range(byte_blocks): fh_output.write(fh_input.read(buffer_size)) if byte_remainder > 0: fh_output.write(fh_input.read(byte_remainder)) fh_output.close() fh_input.close()
def __decrypt_file(self): """ Core method to decrypt a file. """ fs_input = common.get_file_size(self.__file_input) fs_key = common.get_file_size(self.__file_key) fs_output = fs_input - self.__obfuscate_enc if self.__reverse_bytes > fs_input: common.exception("The reverse byte value must not be greater " \ "than the input file size.") common.build_task_file(self.__task_id, self.__file_input, fs_input, self.__file_key, fs_key, self.__file_output, fs_output, "decryption") self.__file_size = fs_output self.__erfr_core()
def scan_queue(queue_name, sqs): queue = None try: queue = sqs.create_queue(QueueName=queue_name) except EndpointConnectionError as error: print('The requested queue could not be reached. \n{}'.format(error)) sys.exit() except ClientError as error: common.exception(error, 'Queue could not be reached. \n{}'.format(error)) # get messages msgs = [] while True: messages = queue.receive_messages(VisibilityTimeout=120, WaitTimeSeconds=20) for message in messages: msgs.append(message.body) if not messages or len(msgs) > 100: break return msgs
def search_episode(info): info['title'] = common.exception(info['title']) if info['absolute_number'] == 0: query = info['title'] + ' %sx%02d' % (info['season'], info['episode'] ) # define query else: query = info['title'] + ' %02d' % info[ 'absolute_number'] # define query anime query = query.encode('utf-8') filters.title = query if settings.time_noti > 0: provider.notify(message="Searching: " + query.title() + '...', header=None, time=settings.time_noti, image=settings.icon) query = provider.quote_plus(query) url_search = "%s/buscar.php?busqueda=%s" % (settings.url, query) provider.log.info(url_search) if browser.open(url_search): results = [] data = browser.content search_serie = re.search('/series/(.*?)/" title', data) if search_serie is not None: url_search = '%s/series/%s/' % (settings.url, search_serie.group(1)) browser.open(url_search) data = browser.content cont = 0 lname = re.search( filters.title.replace(' ', '.') + '(.*?).torrent', data, re.IGNORECASE) if lname is not None: torrent = '%s/torrents_tor/%s' % (settings.url, lname.group()) name = lname.group( ).replace('.torrent', '') + ' S%02dE%02d' % ( info['season'], info['episode'] ) + ' - ' + settings.name_provider #find name in the torrent results.append({ "name": name, "uri": torrent, "seeds": 10000, "peers": 5000 }) # return le torrent cont = 1 provider.log.info('>>>>>> ' + str(cont) + ' torrents sent to Pulsar<<<<<<<') else: provider.log.error('>>>>>>>%s<<<<<<<' % browser.status) provider.notify(message=browser.status, header=None, time=5000, image=settings.icon) return results
def enum_resources(arn, services): print('Enumerating all resources in the following services: ' + ', '.join(services) + '\n') values = [] for service in services: arn.service.pattern = service try: instances = skew.scan('{}/*'.format(arn)) if instances: for instance in instances: region = str(instance).split(':')[3] resource_name = str(instance).split('/')[1] values.append([service, region, resource_name]) except ClientError as error: resp = error.response['Error']['Code'] if resp == 'ExpiredTokenException': print('AWS token has expired: \n{}'.format(error)) else: common.exception(error, 'Failed to access resource.') sys.exit() return values
def scan_table(table, dynamo): try: response = dynamo.scan(TableName=table) except EndpointConnectionError as error: print('The requested table could not be reached. \n{}'.format(error)) sys.exit() except ClientError as error: if error.response['Error']['Code'] == 'ResourceNotFoundException': print('Requested table not found.') print(error) sys.exit() else: common.exception(error, 'Scan dynamodb table failed.') print('Scanning the table...') data = response['Items'] while 'LastEvaluatedKey' in response: response = dynamo.scan(TableName=table, ExclusiveStartKey=response['LastEvaluatedKey']) data.extend(response['Items']) return data
def list_and_save(logs_client, start_time, stop_time): groups = [] try: groups = logs_client.describe_log_groups()['logGroups'] except EndpointConnectionError as error: print('Error: {}'.format(error)) sys.exit() except ClientError as error: common.exception(error, 'Describe log groups failed.') values = [] filenames = [] for group in groups: group_name = group['logGroupName'] streams = logs_client.describe_log_streams( logGroupName=group_name)['logStreams'] for stream in streams: stream_name = stream['logStreamName'] values.append(str(group_name)) log_events = logs_client.get_log_events(logGroupName=group_name, logStreamName=stream_name, startTime=start_time, endTime=stop_time) events = log_events['events'] groupname = re.sub('[^\w\s-]', '', group_name) streamname = re.sub('[^\w\s-]', '', stream_name) gr_st = groupname + '--' + streamname filenames += write_events(events, gr_st) print('Files downloaded to $currentpath/cw_logs folder.') values = set(values) return filenames, values
def get_bytes(self, amount): """ Get a certain amount of pseudo-random bytes. """ pv.intvalue(amount, "random bytes", True, False, False) amount = int(amount) if self.__fortuna_reseed_count == 0 or \ self.__fortuna_reseed_count >= self.__fortuna_reseed_period: try: self.__fortuna.reseed( self.__randgen.get_bytes(self.__fortuna_seed_bytes)) except TypeError: common.exception("Version mismatch?") if self.__fortuna_reseed_count >= self.__fortuna_reseed_period: self.__fortuna_reseed_period = 1 else: self.__fortuna_reseed_period += 1 bytes_random = \ bytearray(b"" + self.__fortuna.pseudo_random_data(amount)) return bytes_random
def __init__(self, prng, seed_bytes, reseed_period): try: import Crypto self.__crypto_version = "%s.%s.%s" % (Crypto.version_info[0], Crypto.version_info[1], Crypto.version_info[2]) except ImportError: common.exception("The PyCrypto library does not seem to be " + \ "installed. Due to this, the Fortuna pseudo-" + \ "random number generator is unavailable.") try: from Crypto import Random self.__fortuna = Random.Fortuna.FortunaGenerator.AESGenerator() except ImportError: common.exception("The PyCrypto library seems to be " + \ "installed, but certain components cannot " + \ "be accessed. Please ensure that the " + \ "PyCrypto library (version 2.1.0 or higher) " + \ "has been installed properly.") self.__fortuna_seed_bytes = int(seed_bytes) self.__fortuna_reseed_period = int(reseed_period) self.__randgen = prng
def search_episode(info): info["title"] = common.exception(info["title"]) if info["absolute_number"] == 0: query = info["title"] + " %sx%02d" % (info["season"], info["episode"]) # define query else: query = info["title"] + " %02d" % info["absolute_number"] # define query anime query = query.encode("utf-8") filters.title = query if settings.time_noti > 0: provider.notify( message="Searching: " + query.title() + "...", header=None, time=settings.time_noti, image=settings.icon ) query = provider.quote_plus(query) url_search = "%s/buscar.php?busqueda=%s" % (settings.url, query) provider.log.info(url_search) if browser.open(url_search): results = [] data = browser.content search_serie = re.search('/series/(.*?)/" title', data) if search_serie is not None: url_search = "%s/series/%s/" % (settings.url, search_serie.group(1)) browser.open(url_search) data = browser.content cont = 0 lname = re.search(filters.title.replace(" ", ".") + "(.*?).torrent", data, re.IGNORECASE) if lname is not None: torrent = "%s/torrents_tor/%s" % (settings.url, lname.group()) name = ( lname.group().replace(".torrent", "") + " S%02dE%02d" % (info["season"], info["episode"]) + " - " + settings.name_provider ) # find name in the torrent results.append({"name": name, "uri": torrent, "seeds": 10000, "peers": 5000}) # return le torrent cont = 1 provider.log.info(">>>>>> " + str(cont) + " torrents sent to Pulsar<<<<<<<") else: provider.log.error(">>>>>>>%s<<<<<<<" % browser.status) provider.notify(message=browser.status, header=None, time=5000, image=settings.icon) return results
def build_file_key(file_path, file_size, buffer_size=4096, bytes_random=0, use_existing_key=False, dev_random=False, fortuna=False, overwrite=False, parts=1): """ Build a key file which contains the random values which are required to encrypt the input file. """ delay = common.get_delay() if use_existing_key: pv.path(file_path, "key", True, True) else: if not overwrite: pv.path(file_path, "key", True, False) pv.path(file_path + ".001", "key part", True, False) file_path = os.path.abspath(file_path) pv.intvalue(file_size, "key file size", True, False, False) pv.intvalue(buffer_size, "buffer size", True, False, False) pv.intvalue(bytes_random, "random bytes", True, True, False) pv.intvalue(parts, "key file parts", True, False, False) file_size = int(file_size) buffer_size = int(buffer_size) bytes_random = int(bytes_random) parts = int(parts) if parts > (file_size + bytes_random): common.exception("The file is too small for given number of key " \ "parts.") elif parts > 999: common.exception("The number of key parts must be less than 1000.") if not use_existing_key: key_path = file_path part_id = 0 part_temp = int(file_size + bytes_random) part_size = part_temp / parts part_last = part_temp - (part_size * (parts - 1)) for part in range(parts): if parts > 1: part_id += 1 key_path = file_path + "." + str(part_id).rjust(3, "0") if part_id < parts: file_size = part_size else: file_size = part_last data_key = bytearray(b"") fh_key = open(key_path, "wb") byte_blocks = int(file_size / buffer_size) byte_remainder = file_size % buffer_size prng = randgen.get_prng(dev_random, fortuna) for block in range(byte_blocks): data_key = prng.get_bytes(buffer_size) fh_key.write(data_key) time.sleep(delay) if byte_remainder > 0: data_key = prng.get_bytes(byte_remainder) fh_key.write(data_key) time.sleep(delay) fh_key.close() if parts == 1: if bytes_random > 0: obfuscator.add_random_bytes(file_path, buffer_size, bytes_random, dev_random)
def __init__(self): if not os.path.exists("/dev/random"): common.exception("The device \"/dev/random\" is available on " \ "Unix-like systems, only.")
def __monitor_file(task_file, file_path, file_size, description, delay, progress): """ Monitor the file size of the given file. """ file_name = os.path.basename(file_path) file_dir = __remove_duplicate_chars( \ file_path.rstrip(file_name).rstrip(os.path.sep), os.path.sep) file_size = int(file_size) file_size_init = 0 file_size_current = 0 file_size_perc = 0 chars_running = ["-", "\\", "|", "/"] chars_stalled = ["?", " "] chars_missing = ["X", " "] delay_running = 0.1 delay_stalled = 0.6 progress_chars = chars_running progress_count = 0 stalled = False wait = delay_running display_file_info = \ bool(int(common.global_config(["KeyGenerator", "Monitor"], ["display_file_info"], "1"))) if display_file_info: print("%s:" % description).ljust(16, " ") + file_name print("File path:").ljust(16, " ") + file_dir else: print "%s" % description if file_size < 1000: print("File size:").ljust(16, " ") + ("%s bytes total" % file_size) else: size_round = __format_size(file_size) print ("File size:").ljust(16, " ") + \ ("%s (%s bytes total)" % (size_round, file_size)) if not progress: return try: file_size_init = file_size file_size_current = common.get_file_size(file_path) file_size_perc = int((file_size_current * 100) / file_size) except: pass count = 0 while file_size_current < file_size: try: file_size_current = common.get_file_size(file_path) except: pass if file_size_current == file_size: break file_exists_task = common.file_exists(task_file) file_exists_input = common.file_exists(file_path) if not file_exists_task or not file_exists_input: if not file_exists_input: progress_chars = chars_missing else: progress_chars = chars_stalled stalled = True wait = delay_stalled else: progress_chars = chars_running wait = delay_running if stalled: dict_contents = __read_content(task_file) if not int(dict_contents["file_input_size"]) == \ file_size_init: print "-" * 78 common.exception("Task mismatch. Process cancelled.") stalled = False progress_count += 1 if progress_count >= len(progress_chars): progress_count = 0 if delay == 0: __progress(file_size_perc, None, True) return if delay > 0: if file_size_perc < 100: __progress( \ file_size_perc, progress_chars[progress_count], False) time.sleep(wait) if count < delay: count += 0.1 continue else: count = 0 try: file_size_current = common.get_file_size(file_path) if not stalled: file_size_perc = int((file_size_current * 100) / file_size) except: pass __progress(100, " ", True)
def get_status(task_id, delay=0): """ Get the status of the Erfr process with the given task ID. """ task_file = common.get_task_file(task_id) pv.intrange(task_id, "task ID", 1, common.get_max_tasks(), False) pv.intvalue(delay, "delay", True, True, False) delay = int(delay) task_id = int(task_id) progress_key = True process_type = "" process_type_list = ["encryption", "decryption", "key generation"] file_input_path = "" file_input_size = 0 file_key_path = "" file_key_size = 0 file_output_path = "" file_output_size = 0 valid_type = False if not common.file_exists(task_file): common.exception("No process is running with the given task ID.") dict_contents = __read_content(task_file) process_type = dict_contents["process_type"] if process_type == "": common.exception("The process type cannot be empty.") for item in process_type_list: if process_type == item: valid_type = True if not valid_type: common.exception("The process type '%s' is not supported." \ % process_type) file_input_path = dict_contents["file_input_path"] file_input_size = dict_contents["file_input_size"] if "crypt" in process_type: file_key_path = dict_contents["file_key_path"] file_key_size = dict_contents["file_key_size"] file_output_path = dict_contents["file_output_path"] file_output_size = dict_contents["file_output_size"] if process_type == "decryption": progress_key = False print print "Monitoring Erfr %s process with task ID %s." % \ (process_type, task_id) if delay > 0: if delay == 1: print "Refreshing the process status every second." else: print "Refreshing the process status every %s seconds." % \ str(delay) print print "-" * 78 if file_key_path == "" and file_output_path == "": __monitor_file(task_file, file_input_path, file_input_size, "File name", delay, True) else: __monitor_file(task_file, file_input_path, file_input_size, "Input file", delay, False) print __monitor_file(task_file, file_key_path, file_key_size, "Key file", delay, progress_key) print __monitor_file(task_file, file_output_path, file_output_size, "Output file", delay, True) print "-" * 78 print if delay > 0: print "Process finished."