def get_temp(self): file_to_zip = [] for dirName, subdirList, fileList in os.walk( os.path.join(start_fs, '/tmp')): file_to_zip.extend([os.path.join(dirName, f) for f in fileList]) file_to_zip = list(set(file_to_zip)) self.args['logger'].info('Zip tmp.zip with %s ' % file_to_zip) utils.zip_file(file_to_zip, 'tmp.zip', self.args['output_dir'], self.args['logger'])
def collect_ssh_profile(self): self.args['logger'].info('Collect Know Hosts') list_knows_host = [] for home in self._homes: if os.path.exists(home): list_knows_host.extend( glob.glob( os.path.join(start_fs, os.path.join(home, '.ssh/known_hosts')))) if len(list_knows_host) > 0: utils.zip_file(list_knows_host, 'know_hosts.zip', self.args['output_dir'], self.args['logger'])
def collect_log(self): files_list_to_zip = {} self.args['logger'].info('Zip of /var/log') for dirName, subdirList, fileList in os.walk( os.path.join(start_fs, '/var/log')): for fname in fileList: absolut_path = os.path.join(dirName, fname) size = os.stat(absolut_path).st_size if size < size_max_log: files_list_to_zip[os.path.join(dirName, fname)] = size files_list_to_zip_sorted = sorted(files_list_to_zip.items(), key=lambda x: x[1]) utils.zip_file( dict(files_list_to_zip).keys(), 'var_log.zip', self.args['output_dir'], self.args['logger']) self.args['logger'].info('Zip of /var log is finished') pass
def autorun(self): file_to_zip = [] dir_collect = glob.glob(etc_folder_d) cron_dir = glob.glob(etc_cron_rep) self.args['logger'].info('Collect %s ' % dir_collect) for d in dir_collect: for dirName, subdirList, fileList in os.walk(d): file_to_zip.extend( [os.path.join(dirName, f) for f in fileList]) self.args['logger'].info('Collect %s ' % cron_dir) for d in cron_dir: for dirName, subdirList, fileList in os.walk(d): file_to_zip.extend( [os.path.join(dirName, f) for f in fileList]) file_to_zip.append(etc_cron) self.args['logger'].info('Zip file autorun.zip') utils.zip_file(list(set(file_to_zip)), 'autorun.zip', self.args['output_dir'], self.args['logger'])
def collect_users(self): list_to_zip = [] self.args['logger'].info('Collect users') if os.path.isfile(etc_passwd): list_to_zip.append(etc_passwd) if os.path.isfile(etc_shadow): list_to_zip.append(etc_shadow) if os.path.isfile(etc_bashrc): list_to_zip.append(etc_bashrc) if os.path.isfile(etc_profile): list_to_zip.append(etc_profile) for home in self._homes: if os.path.exists(home): list_to_zip.extend([ p for p in glob.glob( os.path.join(start_fs, os.path.join(home, '.*'))) if os.path.isfile(p) ]) utils.zip_file(list_to_zip, 'users_home.zip', self.args['output_dir'], self.args['logger'])
def create_or_update_function(self, name, runtime, handler, description=None, zip_filename=None, s3_filename=None, local_filename=None, otherfiles=None): if zip_filename: zip_blob = utils.get_zip_contents(zip_filename) code = {'ZipFile': zip_blob} log.debug('source=zip, file=%s' % zip_filename) elif local_filename: zip_filename = utils.zip_file(local_filename, otherfiles=otherfiles) zip_blob = utils.get_zip_contents(zip_filename) code = {'ZipFile': zip_blob} log.debug('source=local, file=%s' % local_filename) elif s3_filename: bucket, key = utils.get_host(s3_filename), utils.get_path( s3_filename) if key.endswith('.zip'): code = {'S3Bucket': bucket, 'S3Key': key} else: filename = utils.get_resource(s3_filename) local_filename = self.download_from_s3(bucket, key, filename) zip_filename = utils.zip_file(local_filename, otherfiles=otherfiles) zip_blob = utils.get_zip_contents(zip_filename) code = {'ZipFile': zip_blob} log.debug('source=s3, file=%s' % s3_filename) else: log.error('Missing source') raise MissingSourceCodeFileError( "Must provide either zip_filename, s3_filename or local_filename" ) try: _handler = '%s.%s' % (name, handler) self.awslambda\ .update_function_configuration(FunctionName=name, Role=self.role_arn, Handler=_handler, Description=description or name, Timeout=self.timeout_time, Runtime=runtime, VpcConfig={ 'SubnetIds': self.subnet_ids, 'SecurityGroupIds': self.security_group_ids }) if zip_filename or local_filename: function = self.awslambda \ .update_function_code(FunctionName=name, ZipFile=code['ZipFile'], Publish=True) else: function = self.awslambda \ .update_function_code(FunctionName=name, S3Bucket=code['S3Bucket'], S3Key=code['S3Key'], Publish=True) log.info("Lambda updated, lambda=%s" % name) except botocore.exceptions.ClientError as ex: if ex.response['Error']['Code'] == 'ResourceNotFoundException': _handler = '%s.%s' % (name, handler) # Amazon needs a few seconds to replicate the new role through # all regions. So creating a Lambda function just after the role # creation would sometimes result in botocore.exceptions.ClientError: # An error occurred (InvalidParameterValueException) when calling # the CreateFunction operation: The role defined for the function # cannot be assumed by Lambda. lambda_created = False last_ex = None for i in range(1, 10): try: function = self.awslambda \ .create_function(FunctionName=name, Runtime=runtime, Role=self.role_arn, Handler=_handler, Description=description or name, Timeout=self.timeout_time, Publish=True, Code=code, VpcConfig={ 'SubnetIds': self.subnet_ids, 'SecurityGroupIds': self.security_group_ids }) log.info("Lambda created, lambda=%s" % name) lambda_created = True break except botocore.exceptions.ClientError as exx: if exx.response['Error'][ 'Code'] == 'InvalidParameterValueException': log.info( 'Retrying to create lambda, lambda=%s ...' % name) time.sleep(3) last_ex = exx else: raise exx if not lambda_created: raise last_ex else: raise ex function_arn = function['FunctionArn'] return function_arn