def create_job_items_from_directory(job_item, dir_path): items_log.info("Creating job items from directory") fileslist = [] for (dirpath, dirnames, filenames) in os.walk(dir_path): for f in filenames: fileslist.append({ "filename": f, "file_path": os.path.join(dirpath,f), }) for f in fileslist: s3.upload_file(f['filename'], f['file_path'], S3_WORKING_INPUT_BUCKET) create_job_item(job_item['job_id'], f['filename'], sqs.get_queue(SQS_JOB_ITEMS_QUEUE))
def create_job_items_from_directory(job_item, dir_path): items_log.info("Creating job items from directory") fileslist = [] for (dirpath, dirnames, filenames) in os.walk(dir_path): for f in filenames: fileslist.append({ "filename": f, "file_path": os.path.join(dirpath, f), }) for f in fileslist: s3.upload_file(f['filename'], f['file_path'], S3_WORKING_INPUT_BUCKET) create_job_item(job_item['job_id'], f['filename'], sqs.get_queue(SQS_JOB_ITEMS_QUEUE))
def upload_log_file(output_dir, output_filename): log_file_path = os.path.abspath(output_filename + USER_JOB_LOG_EXT) s3_key = output_dir + "/logs/" + output_filename + USER_JOB_LOG_EXT s3.upload_file(s3_key, log_file_path, S3_OUTPUT_BUCKET) return log_file_path
def upload_output_file(output_dir, output_filename): output_file_path = os.path.abspath(output_filename) s3_key = output_dir + "/" + output_filename s3.upload_file(s3_key, output_file_path, S3_OUTPUT_BUCKET) return output_file_path