def cleanup_chrome_processes_and_tmp_files(): # remote temp files for file in Files.find('/tmp/core.headless_shell.*'): pid = file.split('.')[-1] Process.run( 'pkill', ['-TERM', '-P', str(pid)] ) # this doesn't seem to be working since the "headless_shell <defunct>" is still there Files.delete(file)
def run(event, context): file_name = event.get('file_name') # get file_name from lambda params tmp_path = '/tmp' # location of lambda temp folder tmp_file = Files.path_combine( tmp_path, file_name) # create file name (in temp folder) Files.write(tmp_file, 'some text') # create file (with some text) return Files.find(tmp_path + '/*.*') # return list of files in temp folder
def test_upload(self): tmp_folder = Temp_Folder_Code(self.lambda_name) (self.aws_lambda.set_s3_bucket (self.s3_bucket ) .set_s3_key (self.s3_key ) .set_folder_code(tmp_folder.folder )) #self.aws_lambda.upload() #assert tmp_folder.s3_file_exists() is True downloaded_file = self.aws_lambda.s3().file_download(self.s3_bucket, self.s3_key) # download file uploaded assert Files.exists(downloaded_file) unzip_location = tmp_folder.folder + '_unzipped' Files.unzip_file(downloaded_file,unzip_location) # unzip it assert Files.contents(Files.find(unzip_location+'/*').pop()) == tmp_folder.lambda_code # confirm unzipped file's contents self.aws_lambda.s3().file_delete(self.s3_bucket, self.s3_key)
def csv_files(self): files = {} for file_path in Files.find(self.data_folder + '/*'): file_name = Files.file_name(file_path) files[file_name] = file_path return files