Exemplo n.º 1
0
 def wait_job(self, name, namespace, delete=False, sleep=5):
     jobs_path = self.jobs_path.format(namespace)
     url = 'https://{0}:{1}{2}/{3}'.format(self.kubernetes_service_host,
                                           self.kubernetes_service_port,
                                           jobs_path,
                                           name)
     while True:
         try:
             r = requests.get(url,
                              verify=self.cert_verify,
                              headers=self.auth_header)
             if r.status_code != 200:
                 raise Exception(f'Error obtaining {name} info - {str(r.status_code)}\n{str(r.content)}')
             job = r.json()
             if (utils.is_value_in_dict(job['status'], 'succeeded') and
                     utils.is_value_in_dict(job['spec'], 'completions')):
                 if job['status']['succeeded'] >= job['spec']['completions']:
                     # Delete succeeded jobs if delete=True
                     if delete:
                         self.delete_job(name, namespace)
                     break
             if (utils.is_value_in_dict(job['status'], 'failed') and
                     utils.is_value_in_dict(job['spec'], 'backoffLimit')):
                 if job['status']['failed'] >= job['spec']['backoffLimit']:
                     logger.error(f'{name} failed! See pod logs for details')
                     break
             time.sleep(sleep)
         except Exception as e:
             logger.error(e)
             break
Exemplo n.º 2
0
 def add_s3_environment_vars(self):
     if utils.is_value_in_dict(self.aws_properties, 's3'):
         s3_props = self.aws_properties['s3']
         if utils.is_value_in_dict(s3_props, 'input_bucket'):
             self.add_lambda_environment_variable('INPUT_BUCKET',
                                                  s3_props['input_bucket'])
         if utils.is_value_in_dict(s3_props, 'output_bucket'):
             self.add_lambda_environment_variable('OUTPUT_BUCKET',
                                                  s3_props['output_bucket'])
         if utils.is_value_in_dict(s3_props, 'output_folder'):
             self.add_lambda_environment_variable('OUTPUT_FOLDER',
                                                  s3_props['output_folder'])
Exemplo n.º 3
0
 def save_tmp_udocker_env(cls):
     #Avoid override global variables
     if utils.is_value_in_dict(os.environ, 'UDOCKER_TARBALL'):
         cls.udocker_tarball = os.environ['UDOCKER_TARBALL']
     if utils.is_value_in_dict(os.environ, 'UDOCKER_DIR'):
         cls.udocker_dir = os.environ['UDOCKER_DIR']
     # Set temporal global vars
     udocker_tarball = utils.resource_path(
         utils.join_paths(cls.lambda_code_files_path, "udocker",
                          "udocker-1.1.3.tar.gz"))
     utils.set_environment_variable('UDOCKER_TARBALL', udocker_tarball)
     utils.set_environment_variable(
         'UDOCKER_DIR', utils.join_paths(cls.scar_temporal_folder,
                                         "udocker"))
Exemplo n.º 4
0
 def create_command(self):
     self.add_container_volumes()
     self.add_container_environment_variables()
     # Container running script
     if utils.is_value_in_dict(lambda_instance.event, 'script'): 
         self.add_script_as_entrypoint()
     # Container with args
     elif utils.is_value_in_dict(lambda_instance.event,'cmd_args'):
         self.add_args()
     # Script to be executed every time (if defined)
     elif utils.is_variable_in_environment('INIT_SCRIPT_PATH'):
         self.add_init_script()
     # Only container
     else:
         self.cmd_container_execution += [self.container_name]
Exemplo n.º 5
0
 def get_s3_record(self):
     if len(lambda_instance.event['Records']) > 1:
         logger.warning("Multiple records detected. Only processing the first one.")
         
     record = lambda_instance.event['Records'][0]
     if utils.is_value_in_dict(record, 's3'):
         return record['s3']
Exemplo n.º 6
0
 def __init__(self):
     if utils.is_value_in_dict(lambda_instance.event, 'Records'):
         self.record = self.get_s3_record()
         self.input_bucket = self.record['bucket']['name']
         self.file_key = unquote_plus(self.record['object']['key'])
         self.file_name = os.path.basename(self.file_key).replace(' ', '')
         self.file_download_path = '{0}/{1}'.format(lambda_instance.input_folder, self.file_name) 
Exemplo n.º 7
0
 def invoke_function_batch(self,scar_input_file,variables):
     self.register_job_definition(lambda_instance.function_name, utils.get_environment_variable("IMAGE_ID"))
     data=""
     if utils.is_variable_in_environment('INIT_SCRIPT_PATH') : 
         with open(str(os.environ['INIT_SCRIPT_PATH'])) as myfile:
             data = myfile.read()
     if utils.is_value_in_dict(lambda_instance.event, 'script'):
         data=utils.base64_to_utf8_string(lambda_instance.event['script'])
     if lambda_instance.has_input_bucket():
             self.bucket_name_input = lambda_instance.input_bucket
     if lambda_instance.has_output_bucket():
         self.bucket_name_output = lambda_instance.output_bucket
     
     if (data!=""):
         self.register_job_definition("scarfiles",self.scarfile_id_image)
         response = self.submit_job("scarfiles",1,"INIT",data,self.bucket_name_input,self.bucket_name_output,"script.sh",scar_input_file,variables)
         idJob = self.submit_job(lambda_instance.function_name,1,"MED",data,self.bucket_name_input,self.bucket_name_output,lambda_instance.input_folder+"/script.sh",scar_input_file,variables,idJob=response["jobId"])["jobId"]
         response = self.submit_job("scarfiles",1,"FINISH",data,self.bucket_name_input,self.bucket_name_output,"script.sh",scar_input_file,variables,idJob=idJob)
     elif(self.bucket_name_output!="NO" or self.bucket_name_input!="NO"):
         self.register_job_definition("scarfiles",self.scarfile_id_image)
         response = self.submit_job("scarfiles",1,"INIT",data,self.bucket_name_input,self.bucket_name_output,"",scar_input_file,variables)
         idJob = self.submit_job(lambda_instance.function_name,1,"MED",data,self.bucket_name_input,self.bucket_name_output,"",scar_input_file,variables,idJob=response["jobId"])["jobId"]
         response = self.submit_job("scarfiles",1,"FINISH",data,self.bucket_name_input,self.bucket_name_output,"",scar_input_file,variables,idJob=idJob)
     else:
         idJob = self.submit_job(lambda_instance.function_name,1,"MED",data,self.bucket_name_input,self.bucket_name_output,"",scar_input_file,variables)["jobId"]
     return idJob
Exemplo n.º 8
0
 def get_user_script(self):
     script = ""
     if utils.is_variable_in_environment('INIT_SCRIPT_PATH'):
         file_content = utils.read_file(utils.get_environment_variable('INIT_SCRIPT_PATH'), 'rb')
         script = utils.utf8_to_base64_string(file_content)        
     if utils.is_value_in_dict(self.lambda_instance.event, 'script'):
         script = self.lambda_instance.event['script']
     return script
Exemplo n.º 9
0
 def set_required_environment_variables(self):
     self.add_lambda_environment_variable(
         'TIMEOUT_THRESHOLD', str(self.properties['timeout_threshold']))
     self.add_lambda_environment_variable('LOG_LEVEL',
                                          self.properties['log_level'])
     if utils.is_value_in_dict(self.properties, 'image'):
         self.add_lambda_environment_variable('IMAGE_ID',
                                              self.properties['image'])
     self.add_s3_environment_vars()
     if 'api_gateway' in self.aws_properties:
         self.add_lambda_environment_variable(
             'API_GATEWAY_ID', self.aws_properties['api_gateway']['id'])
Exemplo n.º 10
0
 def create_command(self):
     self.add_container_volumes()
     self.add_container_environment_variables()
     # Container running script
     if utils.is_value_in_dict(self.lambda_instance.event, 'script'): 
         # Add script in memory as entrypoint
         script_path = "{0}/script.sh".format(self.lambda_instance.temporal_folder)
         script_content = utils.base64_to_utf8_string(self.lambda_instance.event['script'])
         utils.create_file_with_content(script_path, script_content)
         self.cmd_container_execution += ["--entrypoint={0} {1}".format(self.script_exec, script_path), self.container_name]
     # Container with args
     elif utils.is_value_in_dict(self.lambda_instance.event,'cmd_args'):
         # Add args
         self.cmd_container_execution += [self.container_name]
         self.cmd_container_execution += json.loads(self.lambda_instance.event['cmd_args'])
     # Script to be executed every time (if defined)
     elif utils.is_variable_in_environment('INIT_SCRIPT_PATH'):
         # Add init script
         init_script_path = "{0}/init_script.sh".format(self.lambda_instance.temporal_folder)
         shutil.copyfile(utils.get_environment_variable("INIT_SCRIPT_PATH"), init_script_path)    
         self.cmd_container_execution += ["--entrypoint={0} {1}".format(self.script_exec, init_script_path), self.container_name]
     # Only container
     else:
         self.cmd_container_execution += [self.container_name]
Exemplo n.º 11
0
 def set_required_environment_variables(self):
     self.add_lambda_environment_variable(
         'TIMEOUT_THRESHOLD', str(self.properties['timeout_threshold']))
     self.add_lambda_environment_variable('LOG_LEVEL',
                                          self.properties['log_level'])
     ###
     self.add_lambda_environment_variable('EXECUTION_MODE',
                                          self.properties['execution_mode'])
     self.add_lambda_environment_variable(
         'ROLE', self.aws_properties['iam']['role'])
     self.add_lambda_environment_variable('MEMORY',
                                          str(self.properties['memory']))
     ###
     if utils.is_value_in_dict(self.properties, 'image'):
         self.add_lambda_environment_variable('IMAGE_ID',
                                              self.properties['image'])
     self.add_s3_environment_vars()
     if 'api_gateway' in self.aws_properties:
         self.add_lambda_environment_variable(
             'API_GATEWAY_ID', self.aws_properties['api_gateway']['id'])
Exemplo n.º 12
0
 def is_s3_event(self):
     if utils.is_value_in_dict(self.lambda_instance.event, 'Records'):
         return self.lambda_instance.event['Records'][0][
             'eventSource'] == "aws:s3"
     return False
Exemplo n.º 13
0
 def is_s3_event(self):
     if utils.is_value_in_dict(lambda_instance.event, 'Records'):
         # Check if the event is an S3 event
         return lambda_instance.event['Records'][0]['eventSource'] == "aws:s3"
     return False