コード例 #1
0
    def __init__(self, name, job_def, job_queue, log_backend,
                 container_overrides):
        """
        Docker Job

        :param name: Job Name
        :param job_def: Job definition
        :type: job_def: JobDefinition
        :param job_queue: Job Queue
        :param log_backend: Log backend
        :type log_backend: moto.logs.models.LogsBackend
        """
        threading.Thread.__init__(self)
        DockerModel.__init__(self)

        self.job_name = name
        self.job_id = str(uuid.uuid4())
        self.job_definition = job_def
        self.container_overrides = container_overrides or {}
        self.job_queue = job_queue
        self.job_state = "SUBMITTED"  # One of SUBMITTED | PENDING | RUNNABLE | STARTING | RUNNING | SUCCEEDED | FAILED
        self.job_queue.jobs.append(self)
        self.job_started_at = datetime.datetime(1970, 1, 1)
        self.job_stopped_at = datetime.datetime(1970, 1, 1)
        self.job_stopped = False
        self.job_stopped_reason = None

        self.stop = False

        self.daemon = True
        self.name = "MOTO-BATCH-" + self.job_id

        self._log_backend = log_backend
        self.log_stream_name = None
コード例 #2
0
ファイル: models.py プロジェクト: zatarra/moto
    def __init__(self, spec, region, validate_s3=True, version=1):
        DockerModel.__init__(self)
        # required
        self.region = region
        self.code = spec["Code"]
        self.function_name = spec["FunctionName"]
        self.handler = spec["Handler"]
        self.role = spec["Role"]
        self.run_time = spec["Runtime"]
        self.logs_backend = logs_backends[self.region]
        self.environment_vars = spec.get("Environment",
                                         {}).get("Variables", {})
        self.policy = None
        self.state = "Active"
        self.reserved_concurrency = spec.get("ReservedConcurrentExecutions",
                                             None)

        # optional
        self.description = spec.get("Description", "")
        self.memory_size = spec.get("MemorySize", 128)
        self.publish = spec.get("Publish", False)  # this is ignored currently
        self.timeout = spec.get("Timeout", 3)
        self.layers = self._get_layers_data(spec.get("Layers", []))

        self.logs_group_name = "/aws/lambda/{}".format(self.function_name)
        self.logs_backend.ensure_log_group(self.logs_group_name, [])

        # this isn't finished yet. it needs to find out the VpcId value
        self._vpc_config = spec.get("VpcConfig", {
            "SubnetIds": [],
            "SecurityGroupIds": []
        })

        # auto-generated
        self.version = version
        self.last_modified = datetime.datetime.utcnow().strftime(
            "%Y-%m-%d %H:%M:%S")

        if "ZipFile" in self.code:
            self.code_bytes, self.code_size, self.code_sha_256 = _zipfile_content(
                self.code["ZipFile"])

            # TODO: we should be putting this in a lambda bucket
            self.code["UUID"] = str(uuid.uuid4())
            self.code["S3Key"] = "{}-{}".format(self.function_name,
                                                self.code["UUID"])
        else:
            key = _validate_s3_bucket_and_key(self.code)
            if key:
                self.code_bytes = key.value
                self.code_size = key.size
                self.code_sha_256 = hashlib.sha256(key.value).hexdigest()

        self.function_arn = make_function_arn(self.region, ACCOUNT_ID,
                                              self.function_name)

        self.tags = dict()
コード例 #3
0
    def __init__(self, spec, region, validate_s3=True, version=1):
        DockerModel.__init__(self)
        # required
        self.region = region
        self.code = spec["Code"]
        self.function_name = spec["FunctionName"]
        self.handler = spec["Handler"]
        self.role = spec["Role"]
        self.run_time = spec["Runtime"]
        self.logs_backend = logs_backends[self.region]
        self.environment_vars = spec.get("Environment",
                                         {}).get("Variables", {})
        self.policy = None
        self.state = "Active"
        self.reserved_concurrency = spec.get("ReservedConcurrentExecutions",
                                             None)

        # optional
        self.description = spec.get("Description", "")
        self.memory_size = spec.get("MemorySize", 128)
        self.publish = spec.get("Publish", False)  # this is ignored currently
        self.timeout = spec.get("Timeout", 3)

        self.logs_group_name = "/aws/lambda/{}".format(self.function_name)
        self.logs_backend.ensure_log_group(self.logs_group_name, [])

        # this isn't finished yet. it needs to find out the VpcId value
        self._vpc_config = spec.get("VpcConfig", {
            "SubnetIds": [],
            "SecurityGroupIds": []
        })

        # auto-generated
        self.version = version
        self.last_modified = datetime.datetime.utcnow().strftime(
            "%Y-%m-%d %H:%M:%S")

        if "ZipFile" in self.code:
            # more hackery to handle unicode/bytes/str in python3 and python2 -
            # argh!
            try:
                to_unzip_code = base64.b64decode(
                    bytes(self.code["ZipFile"], "utf-8"))
            except Exception:
                to_unzip_code = base64.b64decode(self.code["ZipFile"])

            self.code_bytes = to_unzip_code
            self.code_size = len(to_unzip_code)
            self.code_sha_256 = hashlib.sha256(to_unzip_code).hexdigest()

            # TODO: we should be putting this in a lambda bucket
            self.code["UUID"] = str(uuid.uuid4())
            self.code["S3Key"] = "{}-{}".format(self.function_name,
                                                self.code["UUID"])
        else:
            # validate s3 bucket and key
            key = None
            try:
                # FIXME: does not validate bucket region
                key = s3_backend.get_object(self.code["S3Bucket"],
                                            self.code["S3Key"])
            except MissingBucket:
                if do_validate_s3():
                    raise InvalidParameterValueException(
                        "Error occurred while GetObject. S3 Error Code: NoSuchBucket. S3 Error Message: The specified bucket does not exist"
                    )
            except MissingKey:
                if do_validate_s3():
                    raise ValueError(
                        "InvalidParameterValueException",
                        "Error occurred while GetObject. S3 Error Code: NoSuchKey. S3 Error Message: The specified key does not exist.",
                    )
            if key:
                self.code_bytes = key.value
                self.code_size = key.size
                self.code_sha_256 = hashlib.sha256(key.value).hexdigest()

        self.function_arn = make_function_arn(self.region, ACCOUNT_ID,
                                              self.function_name)

        self.tags = dict()