Beispiel #1
0
 def description(self):
     if self.is_logging:
         self.latest_delivery_time = datetime2int(datetime.utcnow())
         self.latest_delivery_attempt = iso_8601_datetime_without_milliseconds(
             datetime.utcnow())
     desc = {
         "IsLogging": self.is_logging,
         "LatestDeliveryAttemptTime": self.latest_delivery_attempt,
         "LatestNotificationAttemptTime": "",
         "LatestNotificationAttemptSucceeded": "",
         "LatestDeliveryAttemptSucceeded": "",
         "TimeLoggingStarted": "",
         "TimeLoggingStopped": "",
     }
     if self.started:
         desc["StartLoggingTime"] = datetime2int(self.started)
         desc[
             "TimeLoggingStarted"] = iso_8601_datetime_without_milliseconds(
                 self.started)
         desc["LatestDeliveryTime"] = self.latest_delivery_time
     if self.stopped:
         desc["StopLoggingTime"] = datetime2int(self.stopped)
         desc[
             "TimeLoggingStopped"] = iso_8601_datetime_without_milliseconds(
                 self.stopped)
     return desc
Beispiel #2
0
 def to_dict(self):
     key_dict = {
         "KeyMetadata": {
             "AWSAccountId":
             self.account_id,
             "Arn":
             self.arn,
             "CreationDate":
             iso_8601_datetime_without_milliseconds(datetime.now()),
             "Description":
             self.description,
             "Enabled":
             self.enabled,
             "KeyId":
             self.id,
             "KeyUsage":
             self.key_usage,
             "KeyState":
             self.key_state,
         }
     }
     if self.key_state == "PendingDeletion":
         key_dict["KeyMetadata"][
             "DeletionDate"] = iso_8601_datetime_without_milliseconds(
                 self.deletion_date)
     return key_dict
Beispiel #3
0
    def describe_image_scan_findings(self, registry_id, repository_name, image_id):
        repo = self._get_repository(repository_name, registry_id)

        image = repo._get_image(image_id.get("imageTag"), image_id.get("imageDigest"))

        if not image.last_scan:
            image_id_rep = "{{imageDigest:'{0}', imageTag:'{1}'}}".format(
                image_id.get("imageDigest") or "null",
                image_id.get("imageTag") or "null",
            )
            raise ScanNotFoundException(
                image_id=image_id_rep,
                repository_name=repository_name,
                registry_id=repo.registry_id,
            )

        return {
            "registryId": repo.registry_id,
            "repositoryName": repository_name,
            "imageId": {
                "imageDigest": image.image_digest,
                "imageTag": image.image_tag,
            },
            "imageScanStatus": {
                "status": "COMPLETE",
                "description": "The scan was completed successfully.",
            },
            "imageScanFindings": {
                "imageScanCompletedAt": iso_8601_datetime_without_milliseconds(
                    image.last_scan
                ),
                "vulnerabilitySourceUpdatedAt": iso_8601_datetime_without_milliseconds(
                    datetime.utcnow()
                ),
                "findings": [
                    {
                        "name": "CVE-9999-9999",
                        "uri": "https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-9999-9999",
                        "severity": "HIGH",
                        "attributes": [
                            {"key": "package_version", "value": "9.9.9"},
                            {"key": "package_name", "value": "moto_fake"},
                            {
                                "key": "CVSS2_VECTOR",
                                "value": "AV:N/AC:L/Au:N/C:P/I:P/A:P",
                            },
                            {"key": "CVSS2_SCORE", "value": "7.5"},
                        ],
                    }
                ],
                "findingSeverityCounts": {"HIGH": 1},
            },
        }
Beispiel #4
0
 def start_logging(self):
     self.is_logging = True
     self.started = datetime.utcnow()
     self.latest_delivery_time = datetime2int(datetime.utcnow())
     self.latest_delivery_attempt = iso_8601_datetime_without_milliseconds(
         datetime.utcnow()
     )
Beispiel #5
0
 def to_dict(self):
     key_dict = {
         "KeyMetadata": {
             "AWSAccountId":
             self.account_id,
             "Arn":
             self.arn,
             #"CreationDate": "2015-01-01 00:00:00",
             "CreationDate":
             datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ"),
             "Description":
             self.description,
             "Enabled":
             self.enabled,
             "KeyId":
             self.id,
             "KeyUsage":
             self.key_usage,
             "KeyState":
             self.key_state,
         }
     }
     if self.key_state == 'PendingDeletion':
         key_dict['KeyMetadata'][
             'DeletionDate'] = iso_8601_datetime_without_milliseconds(
                 self.deletion_date)
     return key_dict
Beispiel #6
0
    def __init__(
        self,
        name,
        container_provider,
        client_token,
        region_name,
        aws_partition,
        tags=None,
        virtual_cluster_id=None,
    ):
        self.id = virtual_cluster_id or random_cluster_id()

        self.name = name
        self.client_token = client_token
        self.arn = VIRTUAL_CLUSTER_ARN_TEMPLATE.format(
            partition=aws_partition,
            region=region_name,
            virtual_cluster_id=self.id)
        self.state = VIRTUAL_CLUSTER_STATUS
        self.container_provider = container_provider
        self.container_provider_id = container_provider["id"]
        self.namespace = container_provider["info"]["eksInfo"]["namespace"]
        self.creation_date = iso_8601_datetime_without_milliseconds(
            datetime.today().replace(hour=0, minute=0, second=0,
                                     microsecond=0))
        self.tags = tags
Beispiel #7
0
    def __init__(
        self,
        cluster_identifier,
        database,
        db_user,
        query_parameters,
        query_string,
        secret_arn,
    ):
        now = iso_8601_datetime_without_milliseconds(datetime.now())

        self.id = str(uuid.uuid4())
        self.cluster_identifier = cluster_identifier
        self.created_at = now
        self.database = database
        self.db_user = db_user
        self.duration = 0
        self.has_result_set = False
        self.query_parameters = query_parameters
        self.query_string = query_string
        self.redshift_pid = random.randint(0, 99999)
        self.redshift_query_id = random.randint(0, 99999)
        self.result_rows = -1
        self.result_size = -1
        self.secret_arn = secret_arn
        self.status = "STARTED"
        self.sub_statements = []
        self.updated_at = now
Beispiel #8
0
    def cancel_job_run(self, job_id, virtual_cluster_id):

        if not re.match(r"[a-z,A-Z,0-9]{19}", job_id):
            raise ValidationException("Invalid job run short id")

        if job_id not in self.jobs.keys():
            raise ResourceNotFoundException(f"Job run {job_id} doesn't exist.")

        if virtual_cluster_id != self.jobs[job_id].virtual_cluster_id:
            raise ResourceNotFoundException(f"Job run {job_id} doesn't exist.")

        if self.jobs[job_id].state in [
                "FAILED",
                "CANCELLED",
                "CANCEL_PENDING",
                "COMPLETED",
        ]:
            raise ValidationException(
                f"Job run {job_id} is not in a cancellable state")

        job = self.jobs[job_id]
        job.state = "CANCELLED"
        job.finished_at = iso_8601_datetime_without_milliseconds(
            datetime.today().replace(hour=0, minute=1, second=0,
                                     microsecond=0))
        job.state_details = "JobRun CANCELLED successfully."

        return job
Beispiel #9
0
 def __init__(self, arn, name, definition, roleArn, tags=None):
     self.creation_date = iso_8601_datetime_without_milliseconds(datetime.now())
     self.arn = arn
     self.name = name
     self.definition = definition
     self.roleArn = roleArn
     self.tags = tags
Beispiel #10
0
    def __init__(
        self,
        cluster_name,
        fargate_profile_name,
        pod_execution_role_arn,
        selectors,
        region_name,
        aws_partition,
        client_request_token=None,
        subnets=None,
        tags=None,
    ):
        if subnets is None:
            subnets = list()
        if tags is None:
            tags = dict()

        self.created_at = iso_8601_datetime_without_milliseconds(datetime.now())
        self.uuid = str(uuid4())
        self.fargate_profile_arn = FARGATE_PROFILE_ARN_TEMPLATE.format(
            partition=aws_partition,
            region=region_name,
            cluster_name=cluster_name,
            fargate_profile_name=fargate_profile_name,
            uuid=self.uuid,
        )

        self.status = ACTIVE_STATUS
        self.cluster_name = cluster_name
        self.fargate_profile_name = fargate_profile_name
        self.pod_execution_role_arn = pod_execution_role_arn
        self.client_request_token = client_request_token
        self.selectors = selectors
        self.subnets = subnets
        self.tags = tags
Beispiel #11
0
 def schedule_key_deletion(self, key_id, pending_window_in_days):
     if key_id in self.keys:
         if 7 <= pending_window_in_days <= 30:
             self.keys[key_id].enabled = False
             self.keys[key_id].key_state = 'PendingDeletion'
             self.keys[key_id].deletion_date = datetime.now() + timedelta(days=pending_window_in_days)
             return iso_8601_datetime_without_milliseconds(self.keys[key_id].deletion_date)
Beispiel #12
0
    def get_metric_data(self,
                        queries,
                        start_time,
                        end_time,
                        scan_by="TimestampAscending"):

        period_data = [
            md for md in self.metric_data
            if start_time <= md.timestamp <= end_time
        ]

        results = []
        for query in queries:
            period_start_time = start_time
            query_ns = query["metric_stat._metric._namespace"]
            query_name = query["metric_stat._metric._metric_name"]
            delta = timedelta(seconds=int(query["metric_stat._period"]))
            result_vals = []
            timestamps = []
            stat = query["metric_stat._stat"]
            while period_start_time <= end_time:
                period_end_time = period_start_time + delta
                period_md = [
                    period_md for period_md in period_data if
                    period_start_time <= period_md.timestamp < period_end_time
                ]

                query_period_data = [
                    md for md in period_md
                    if md.namespace == query_ns and md.name == query_name
                ]

                metric_values = [m.value for m in query_period_data]

                if len(metric_values) > 0:
                    if stat == "Average":
                        result_vals.append(
                            sum(metric_values) / len(metric_values))
                    elif stat == "Minimum":
                        result_vals.append(min(metric_values))
                    elif stat == "Maximum":
                        result_vals.append(max(metric_values))
                    elif stat == "Sum":
                        result_vals.append(sum(metric_values))
                    timestamps.append(
                        iso_8601_datetime_without_milliseconds(
                            period_start_time))
                period_start_time += delta
            if scan_by == "TimestampDescending" and len(timestamps) > 0:
                timestamps.reverse()
                result_vals.reverse()
            label = query["metric_stat._metric._metric_name"] + " " + stat
            results.append({
                "id": query["id"],
                "label": label,
                "vals": result_vals,
                "timestamps": timestamps,
            })
        return results
Beispiel #13
0
 def __init__(self, region_name, account_id, state_machine_name, execution_name, state_machine_arn):
     execution_arn = 'arn:aws:states:{}:{}:execution:{}:{}'
     execution_arn = execution_arn.format(region_name, account_id, state_machine_name, execution_name)
     self.execution_arn = execution_arn
     self.name = execution_name
     self.start_date = iso_8601_datetime_without_milliseconds(datetime.now())
     self.state_machine_arn = state_machine_arn
     self.status = 'RUNNING'
     self.stop_date = None
Beispiel #14
0
def test_send_to_cw_log_group():
    # given
    client_events = boto3.client("events", "eu-central-1")
    client_logs = boto3.client("logs", region_name="eu-central-1")
    log_group_name = "/test-group"
    rule_name = "test-rule"
    client_logs.create_log_group(logGroupName=log_group_name)
    client_events.put_rule(
        Name=rule_name,
        EventPattern=json.dumps({"account": [ACCOUNT_ID]}),
        State="ENABLED",
    )
    client_events.put_targets(
        Rule=rule_name,
        Targets=[
            {
                "Id": "logs",
                "Arn": "arn:aws:logs:eu-central-1:{0}:log-group:{1}".format(
                    ACCOUNT_ID, log_group_name
                ),
            }
        ],
    )

    # when
    event_time = datetime(2021, 1, 1, 12, 23, 34)
    client_events.put_events(
        Entries=[
            {
                "Time": event_time,
                "Source": "source",
                "DetailType": "type",
                "Detail": json.dumps({"key": "value"}),
            }
        ],
    )

    # then
    response = client_logs.filter_log_events(logGroupName=log_group_name)
    response["events"].should.have.length_of(1)
    event = response["events"][0]
    event["logStreamName"].should_not.be.empty
    event["timestamp"].should.be.a(float)
    event["ingestionTime"].should.be.a(int)
    event["eventId"].should_not.be.empty

    message = json.loads(event["message"])
    message["version"].should.equal("0")
    message["id"].should_not.be.empty
    message["detail-type"].should.equal("type")
    message["source"].should.equal("source")
    message["time"].should.equal(iso_8601_datetime_without_milliseconds(event_time))
    message["region"].should.equal("eu-central-1")
    message["resources"].should.be.empty
    message["detail"].should.equal({"key": "value"})
Beispiel #15
0
    def _send_to_sqs_queue(self, resource_id, event):
        from moto.sqs import sqs_backends

        event_copy = copy.deepcopy(event)
        event_copy["time"] = iso_8601_datetime_without_milliseconds(
            datetime.utcfromtimestamp(event_copy["time"])
        )

        sqs_backends[self.region_name].send_message(
            queue_name=resource_id, message_body=json.dumps(event_copy)
        )
Beispiel #16
0
def test_create_nodegroup_generates_valid_nodegroup_created_timestamp(NodegroupBuilder):
    _, generated_test_data = NodegroupBuilder()

    result_time = iso_8601_datetime_without_milliseconds(
        generated_test_data.nodegroup_describe_output[NodegroupAttributes.CREATED_AT]
    )

    if settings.TEST_SERVER_MODE:
        RegExTemplates.ISO8601_FORMAT.match(result_time).should.be.true
    else:
        result_time.should.equal(FROZEN_TIME)
Beispiel #17
0
    def response_object(self):
        response_object = self.gen_response_object()

        response_object["registryId"] = self.registry_id
        response_object["repositoryArn"] = self.arn
        response_object["repositoryName"] = self.name
        response_object["repositoryUri"] = self.uri
        response_object["createdAt"] = iso_8601_datetime_without_milliseconds(
            self.created_at)
        del response_object["arn"], response_object["name"], response_object[
            "images"]
        return response_object
Beispiel #18
0
def test_create_fargate_profile_generates_valid_created_timestamp(
    FargateProfileBuilder,
):
    _, generated_test_data = FargateProfileBuilder()

    result_time = iso_8601_datetime_without_milliseconds(
        generated_test_data.fargate_describe_output[FargateProfileAttributes.CREATED_AT]
    )

    if settings.TEST_SERVER_MODE:
        RegExTemplates.ISO8601_FORMAT.match(result_time).should.be.true
    else:
        result_time.should.equal(FROZEN_TIME)
Beispiel #19
0
    def __init__(
        self,
        name,
        role_arn,
        resources_vpc_config,
        region_name,
        aws_partition,
        version=None,
        kubernetes_network_config=None,
        logging=None,
        client_request_token=None,
        tags=None,
        encryption_config=None,
    ):
        if encryption_config is None:
            encryption_config = []
        if tags is None:
            tags = dict()

        self.nodegroups = dict()
        self.nodegroup_count = 0

        self.fargate_profiles = dict()
        self.fargate_profile_count = 0

        self.arn = CLUSTER_ARN_TEMPLATE.format(partition=aws_partition,
                                               region=region_name,
                                               name=name)
        self.certificateAuthority = {"data": random_string(1400)}
        self.creation_date = iso_8601_datetime_without_milliseconds(
            datetime.now())
        self.identity = {
            "oidc": {
                "issuer": ISSUER_TEMPLATE.format(region=region_name)
            }
        }
        self.endpoint = ENDPOINT_TEMPLATE.format(region=region_name)

        self.kubernetes_network_config = (kubernetes_network_config
                                          or DEFAULT_KUBERNETES_NETWORK_CONFIG)
        self.logging = logging or DEFAULT_LOGGING
        self.platformVersion = DEFAULT_PLATFORM_VERSION
        self.status = ACTIVE_STATUS
        self.version = version or DEFAULT_KUBERNETES_VERSION

        self.client_request_token = client_request_token
        self.encryption_config = encryption_config
        self.name = name
        self.resources_vpc_config = resources_vpc_config
        self.role_arn = role_arn
        self.tags = tags
Beispiel #20
0
    def get_lifecycle_policy(self, registry_id, repository_name):
        repo = self._get_repository(repository_name, registry_id)

        if not repo.lifecycle_policy:
            raise LifecyclePolicyNotFoundException(repository_name, repo.registry_id)

        return {
            "registryId": repo.registry_id,
            "repositoryName": repository_name,
            "lifecyclePolicyText": repo.lifecycle_policy,
            "lastEvaluatedAt": iso_8601_datetime_without_milliseconds(
                datetime.utcnow()
            ),
        }
def test_send_to_sqs_queue():
    # given
    client_events = boto3.client("events", "eu-central-1")
    client_sqs = boto3.client("sqs", region_name="eu-central-1")
    rule_name = "test-rule"
    queue_url = client_sqs.create_queue(QueueName="test-queue")["QueueUrl"]
    queue_arn = client_sqs.get_queue_attributes(
        QueueUrl=queue_url,
        AttributeNames=["QueueArn"])["Attributes"]["QueueArn"]
    client_events.put_rule(
        Name=rule_name,
        EventPattern=json.dumps({"account": [ACCOUNT_ID]}),
        State="ENABLED",
    )
    client_events.put_targets(
        Rule=rule_name,
        Targets=[{
            "Id": "sqs",
            "Arn": queue_arn
        }],
    )

    # when
    event_time = datetime(2021, 1, 1, 12, 23, 34)
    client_events.put_events(Entries=[{
        "Time": event_time,
        "Source": "source",
        "DetailType": "type",
        "Detail": json.dumps({"key": "value"}),
    }], )

    # then
    response = client_sqs.receive_message(QueueUrl=queue_url)
    response["Messages"].should.have.length_of(1)
    message = response["Messages"][0]
    message["MessageId"].should_not.be.empty
    message["ReceiptHandle"].should_not.be.empty
    message["MD5OfBody"].should_not.be.empty

    body = json.loads(message["Body"])
    body["version"].should.equal("0")
    body["id"].should_not.be.empty
    body["detail-type"].should.equal("type")
    body["source"].should.equal("source")
    body["time"].should.equal(
        iso_8601_datetime_without_milliseconds(event_time))
    body["region"].should.equal("eu-central-1")
    body["resources"].should.be.empty
    body["detail"].should.equal({"key": "value"})
Beispiel #22
0
 def to_dict(self):
     key_dict = {
         "KeyMetadata": {
             "AWSAccountId": self.account_id,
             "Arn": self.arn,
             "CreationDate": "2015-01-01 00:00:00",
             "Description": self.description,
             "Enabled": self.enabled,
             "KeyId": self.id,
             "KeyUsage": self.key_usage,
             "KeyState": self.key_state,
         }
     }
     if self.key_state == 'PendingDeletion':
         key_dict['KeyMetadata']['DeletionDate'] = iso_8601_datetime_without_milliseconds(self.deletion_date)
     return key_dict
Beispiel #23
0
 def to_dict(self):
     key_dict = {
         "KeyMetadata": {
             "AWSAccountId": self.account_id,
             "Arn": self.arn,
             "CreationDate": datetime.strftime(datetime.utcnow(), "%Y-%m-%dT%H:%M:%SZ"),
             "Description": self.description,
             "Enabled": self.enabled,
             "KeyId": self.id,
             "KeyUsage": self.key_usage,
             "KeyState": self.key_state,
         }
     }
     if self.key_state == 'PendingDeletion':
         key_dict['KeyMetadata']['DeletionDate'] = iso_8601_datetime_without_milliseconds(self.deletion_date)
     return key_dict
Beispiel #24
0
    def update_application(
        self,
        application_id,
        initial_capacity,
        maximum_capacity,
        auto_start_configuration,
        auto_stop_configuration,
        network_configuration,
    ):
        if application_id not in self.applications.keys():
            raise ResourceNotFoundException(application_id)

        if self.applications[application_id].state not in [
                "CREATED", "STOPPED"
        ]:
            raise ValidationException(
                f"Application {application_id} must be in one of the following statuses [CREATED, STOPPED]. "
                f"Current status: {self.applications[application_id].state}")

        if initial_capacity:
            self.applications[
                application_id].initial_capacity = initial_capacity

        if maximum_capacity:
            self.applications[
                application_id].maximum_capacity = maximum_capacity

        if auto_start_configuration:
            self.applications[
                application_id].auto_start_configuration = auto_start_configuration

        if auto_stop_configuration:
            self.applications[
                application_id].auto_stop_configuration = auto_stop_configuration

        if network_configuration:
            self.applications[
                application_id].network_configuration = network_configuration

        self.applications[
            application_id].updated_at = iso_8601_datetime_without_milliseconds(
                datetime.today().replace(hour=0,
                                         minute=0,
                                         second=0,
                                         microsecond=0))

        return self.applications[application_id].to_dict()
Beispiel #25
0
    def _send_to_cw_log_group(self, name, event):
        from moto.logs import logs_backends

        event_copy = copy.deepcopy(event)
        event_copy["time"] = iso_8601_datetime_without_milliseconds(
            datetime.utcfromtimestamp(event_copy["time"]))

        log_stream_name = str(uuid4())
        log_events = [{
            "timestamp": unix_time(datetime.utcnow()),
            "message": json.dumps(event_copy),
        }]

        logs_backends[self.region_name].create_log_stream(
            name, log_stream_name)
        logs_backends[self.region_name].put_log_events(name, log_stream_name,
                                                       log_events, None)
Beispiel #26
0
    def __init__(self,
                 region_name,
                 dataset_arns,
                 dataset_group_name,
                 domain,
                 tags=None):
        self.creation_date = iso_8601_datetime_without_milliseconds(
            datetime.now())
        self.modified_date = self.creation_date

        self.arn = ("arn:aws:forecast:" + region_name + ":" + str(ACCOUNT_ID) +
                    ":dataset-group/" + dataset_group_name)
        self.dataset_arns = dataset_arns if dataset_arns else []
        self.dataset_group_name = dataset_group_name
        self.domain = domain
        self.tags = tags
        self._validate()
Beispiel #27
0
 def to_dict(self):
     key_dict = {
         "KeyMetadata": {
             "AWSAccountId": self.account_id,
             "Arn": self.arn,
             "CreationDate": "%d" % unix_time(),
             "Description": self.description,
             "Enabled": self.enabled,
             "KeyId": self.id,
             "KeyUsage": self.key_usage,
             "KeyState": self.key_state,
         }
     }
     if self.key_state == 'PendingDeletion':
         key_dict['KeyMetadata'][
             'DeletionDate'] = iso_8601_datetime_without_milliseconds(
                 self.deletion_date)
     return key_dict
Beispiel #28
0
    def _send_to_sqs_queue(self, resource_id, event, group_id=None):
        from moto.sqs import sqs_backends

        event_copy = copy.deepcopy(event)
        event_copy["time"] = iso_8601_datetime_without_milliseconds(
            datetime.utcfromtimestamp(event_copy["time"]))

        if group_id:
            queue_attr = sqs_backends[self.region_name].get_queue_attributes(
                queue_name=resource_id,
                attribute_names=["ContentBasedDeduplication"])
            if queue_attr["ContentBasedDeduplication"] == "false":
                warnings.warn(
                    "To let EventBridge send messages to your SQS FIFO queue, you must enable content-based deduplication."
                )
                return

        sqs_backends[self.region_name].send_message(
            queue_name=resource_id,
            message_body=json.dumps(event_copy),
            group_id=group_id,
        )
Beispiel #29
0
    def __init__(
        self,
        name,
        release_label,
        application_type,
        client_token,
        region_name,
        initial_capacity,
        maximum_capacity,
        tags,
        auto_start_configuration,
        auto_stop_configuration,
        network_configuration,
    ):
        # Provided parameters
        self.name = name
        self.release_label = release_label
        self.application_type = application_type.capitalize()
        self.client_token = client_token
        self.initial_capacity = initial_capacity
        self.maximum_capacity = maximum_capacity
        self.auto_start_configuration = (auto_start_configuration
                                         or default_auto_start_configuration())
        self.auto_stop_configuration = (auto_stop_configuration
                                        or default_auto_stop_configuration())
        self.network_configuration = network_configuration
        self.tags = tags or {}

        # Service-generated-parameters
        self.id = random_appplication_id()
        self.arn = APPLICATION_ARN_TEMPLATE.format(partition="aws",
                                                   region=region_name,
                                                   application_id=self.id)
        self.state = APPLICATION_STATUS
        self.state_details = ""
        self.created_at = iso_8601_datetime_without_milliseconds(
            datetime.today().replace(hour=0, minute=0, second=0,
                                     microsecond=0))
        self.updated_at = self.created_at
Beispiel #30
0
 def __init__(
     self,
     name,
     virtual_cluster_id,
     client_token,
     execution_role_arn,
     release_label,
     job_driver,
     configuration_overrides,
     region_name,
     aws_partition,
     tags,
 ):
     self.id = random_job_id()
     self.name = name
     self.virtual_cluster_id = virtual_cluster_id
     self.arn = JOB_ARN_TEMPLATE.format(
         partition=aws_partition,
         region=region_name,
         virtual_cluster_id=self.virtual_cluster_id,
         job_id=self.id,
     )
     self.state = JOB_STATUS
     self.client_token = client_token
     self.execution_role_arn = execution_role_arn
     self.release_label = release_label
     self.job_driver = job_driver
     self.configuration_overrides = configuration_overrides
     self.created_at = iso_8601_datetime_without_milliseconds(
         datetime.today().replace(hour=0, minute=0, second=0,
                                  microsecond=0))
     self.created_by = None
     self.finished_at = None
     self.state_details = None
     self.failure_reason = None
     self.tags = tags
Beispiel #31
0
 def enabled_iso_8601(self):
     return iso_8601_datetime_without_milliseconds(self.enable_date)
Beispiel #32
0
 def last_used_iso_8601(self):
     return iso_8601_datetime_without_milliseconds(self.last_used)
Beispiel #33
0
 def created_iso_8601(self):
     return iso_8601_datetime_without_milliseconds(self.created)