Ejemplo n.º 1
0
    def deploy(self, user_job_request):
        job_name = utils.get_metadata("name",user_job_request,0)
        job_version = utils.get_metadata("version",user_job_request,0)
        control = utils.get_metadata("control",user_job_request,0)
        process = utils.get_metadata("process",user_job_request,0)
        sitename = utils.get_metadata("site-name",user_job_request,0)
        area = utils.get_metadata("area",user_job_request,0)
        machinename = utils.get_metadata("machine-name",user_job_request,0)


        if ddb.valid_job_request(job_name, job_version, control):
            logger.info("Setting greengrass group")
            connector = gg.generate_greengrass_resources(user_job_request)
            if not connector.create_gg_definitions():
                return 0
            else:
                # write raw job to s3
                logger.info("Writing raw job to s3")
                if not s3.write(user_job_request,"raw"): 
                    connector.delete_gg_definitions()
                    return 0
                # build job as per protocol
                logger.info("Processing job request")
                processed_job_request = build.job(user_job_request)
                if not processed_job_request:
                    connector.delete_gg_definitions()
                    return 0
                # write process job to s3
                logger.info("Writing processed job to s3")
                if not s3.write(processed_job_request,"json"): 
                    connector.delete_gg_definitions()
                    return 0
                # update DynamoDB as all other jobs on this gg group id will stop during manual deployment
                logger.info("Updating datase to stop all jobs for the group id")
                if not ddb.update(job_name, job_version, control, user_job_request): 
                    connector.delete_gg_definitions()
                    return 0
                # create an entry in DynamoDB
                logger.info("Creating database entry")
                if not ddb.write(user_job_request): 
                    connector.delete_gg_definitions()
                    return 0
                # collect metrics
                if os.environ["MET_ENB"].lower() == "true":
                    metrics.get_metrics(user_job_request)
                # prompt user to deploy via the console
                post.to_user(job_name, job_version, "info", var.m2c2_user_deploy_request)
                return 1
        else:
            return 0
def to_lambda(job_name, message):
    if os.environ["SEND_ANONYMOUS_METRIC"] == "Yes":
        metrics.get_metrics(message)
    topic = "m2c2/job/" + job_name + "/submit"
    iot_client.publish(topic=topic, qos=1, payload=json.dumps(message))
def to_lambda(job_name, message):
    if os.environ["MET_ENB"].lower() == "true":
        metrics.get_metrics(message)
    topic = "m2c2/job/" + job_name + "/submit"
    iot_client.publish(topic=topic, qos=1, payload=json.dumps(message))
 def deploy(self, user_job_request):
     job_name = utils.get_metadata("name", user_job_request, 0)
     protocol = utils.get_metadata("protocol", user_job_request, 0)
     if ddb.valid_job_request(job_name):
         logger.info("Setting greengrass group")
         connector = gg.generate_greengrass_resources(user_job_request)
         if not connector.create_gg_definitions():
             return 0
         else:
             logger.info("Processing job request")
             processed_job_request = build.job(user_job_request)
             if not processed_job_request:
                 connector.delete_gg_definitions()
                 return 0
             # write process job to s3
             if protocol == 'slmp':
                 logger.info("Writing processed job to s3")
                 if not s3.write(processed_job_request, "json"):
                     connector.delete_gg_definitions()
                     return 0
             # create an entry in DynamoDB
             logger.info("Creating database entry")
             new_job_version = str(ddb.write(user_job_request))
             if not new_job_version:
                 connector.delete_gg_definitions()
                 return 0
             # collect metrics
             if os.environ["SEND_ANONYMOUS_METRIC"] == "Yes":
                 metrics.get_metrics(user_job_request)
             # prompt user to deploy via the console
             post.to_user(job_name, new_job_version, "info",
                          var.m2c2_user_deploy_request)
             logger.info("Deploying to GG edge")
             if not connector.deploy_gg_group():
                 return 0
             logging.info("Starting jobs")
             user_job_request['job']['control'] = "start"
             user_job_request['job']['properties'][0]['version'] = str(
                 new_job_version)
             logger.info(
                 "Sending the following to the IoT topic {0}: {1}".format(
                     topic, str(user_job_request)))
             try:
                 resp = iot_client.publish(
                     topic=topic,
                     qos=0,
                     payload=json.dumps(user_job_request))
             except Exception as err:
                 logger.error(
                     "There was an issue publishing start to the IoT topic m2c2/job/request:"
                     + str(err))
                 post.to_user(
                     job_name, new_job_version, "error",
                     var.m2c2_publish_to_topic % (user_job_request))
             jobs_to_restart = []
             jobs_to_restart = ddb.start_jobs_after_gg_deploy(
                 user_job_request)
             for entry in jobs_to_restart:
                 for item in entry['job']['properties']:
                     if item['name'] == user_job_request['job'][
                             'properties'][0]['name']:
                         entry['control'] = 'stop'
             logger.info("Restarting jobs")
             if jobs_to_restart:
                 try:
                     for entry in jobs_to_restart:
                         logger.info(
                             "Sending the following to the IoT topic {0}: {1}"
                             .format(topic, str(entry)))
                         resp = iot_client.publish(
                             topic=topic, qos=0, payload=json.dumps(entry))
                         time.sleep(5)
                 except Exception as err:
                     logger.error(
                         "There was an issue publishing start to the IoT topic m2c2/job/request:"
                         + str(err))
                     post.to_user(job_name, new_job_version, "error",
                                  var.m2c2_publish_to_topic % (entry))
             return 1
     else:
         return 0