Пример #1
0
 def test_clean_branch_name(self):
     self.assertIsNone(utils.clean_branch_name(None))
     self.assertEqual("", utils.clean_branch_name(""))
     self.assertEqual("master", utils.clean_branch_name("master"))
     self.assertEqual("master", utils.clean_branch_name("local/master"))
     self.assertEqual("for-next", utils.clean_branch_name("local/for-next"))
     self.assertEqual("linux-4.4.y",
                      utils.clean_branch_name("local/linux-4.4.y"))
Пример #2
0
 def test_clean_branch_name(self):
     self.assertIsNone(utils.clean_branch_name(None))
     self.assertEqual("", utils.clean_branch_name(""))
     self.assertEqual("master", utils.clean_branch_name("master"))
     self.assertEqual("master", utils.clean_branch_name("local/master"))
     self.assertEqual("for-next", utils.clean_branch_name("local/for-next"))
     self.assertEqual(
         "linux-4.4.y", utils.clean_branch_name("local/linux-4.4.y"))
Пример #3
0
def convert_git_branch_field(collection_name):
    connection = get_db_connection()
    database = connection["kernel-ci"]

    for doc in database[collection_name].find():
        git_branch = doc.get(models.GIT_BRANCH_KEY)
        if git_branch and "local" in git_branch:
            git_branch = utils.clean_branch_name(git_branch)

            ret_val = utils.db.update2(
                database, collection_name, {models.ID_KEY: doc[models.ID_KEY]},
                {"$set": {
                    models.GIT_BRANCH_KEY: git_branch
                }})

            if ret_val == 500:
                log_error_update(collection_name, doc)

    connection.close()
Пример #4
0
    def _post(self, *args, **kwargs):
        response = hresponse.HandlerResponse()

        obj = kwargs["json_obj"]

        job = obj.get(models.JOB_KEY)
        kernel = obj.get(models.KERNEL_KEY)
        git_branch = utils.clean_branch_name(obj.get(models.GIT_BRANCH_KEY))
        status = obj.get(models.STATUS_KEY, None)

        if not status:
            status = models.PASS_STATUS

        if (status in models.VALID_JOB_STATUS):
            ret_val = utils.db.find_and_update(
                self.collection,
                {
                    models.GIT_BRANCH_KEY: git_branch,
                    models.JOB_KEY: job,
                    models.KERNEL_KEY: kernel
                },
                {models.STATUS_KEY: status}
            )

            if ret_val == 404:
                response.status_code = 404
                response.reason = JOB_NOT_FOUND % (job, kernel, git_branch)
            elif ret_val == 500:
                response.status_code = 500
                response.reason = INTERNAL_ERROR % (job, kernel, git_branch)
            else:
                response.reason = \
                    JOB_UPDATED % (job, kernel, git_branch, status)
                # Create the build logs summary file.
                taskb.create_build_logs_summary.apply_async(
                    [job, kernel, git_branch])
        else:
            response.status_code = 400
            response.reason = \
                INVALID_STATUS % (status, str(models.VALID_JOB_STATUS))

        return response
Пример #5
0
    def _post(self, *args, **kwargs):
        response = hresponse.HandlerResponse()

        obj = kwargs["json_obj"]

        job = obj.get(models.JOB_KEY)
        kernel = obj.get(models.KERNEL_KEY)
        git_branch = utils.clean_branch_name(obj.get(models.GIT_BRANCH_KEY))
        status = obj.get(models.STATUS_KEY, None)

        if not status:
            status = models.PASS_STATUS

        if (status in models.VALID_JOB_STATUS):
            ret_val = utils.db.find_and_update(
                self.collection, {
                    models.GIT_BRANCH_KEY: git_branch,
                    models.JOB_KEY: job,
                    models.KERNEL_KEY: kernel
                }, {models.STATUS_KEY: status})

            if ret_val == 404:
                response.status_code = 404
                response.reason = JOB_NOT_FOUND % (job, kernel, git_branch)
            elif ret_val == 500:
                response.status_code = 500
                response.reason = INTERNAL_ERROR % (job, kernel, git_branch)
            else:
                response.reason = \
                    JOB_UPDATED % (job, kernel, git_branch, status)
                # Create the build logs summary file.
                taskb.create_build_logs_summary.apply_async(
                    [job, kernel, git_branch])
        else:
            response.status_code = 400
            response.reason = \
                INVALID_STATUS % (status, str(models.VALID_JOB_STATUS))

        return response
Пример #6
0
    def _post(self, *args, **kwargs):
        response = hresponse.HandlerResponse(202)
        json_obj = kwargs["json_obj"]
        j_get = json_obj.get

        # Mandatory keys
        job = j_get(models.JOB_KEY)
        kernel = j_get(models.KERNEL_KEY)
        branch = utils.clean_branch_name(j_get(models.GIT_BRANCH_KEY))

        # Optional keys
        report_type = j_get(models.REPORT_TYPE_KEY)
        countdown = j_get(models.DELAY_KEY)
        if countdown is None:
            countdown = self.settings["senddelay"]

        # Deprecated - ToDo: use report_type only in client code
        if j_get(models.SEND_BOOT_REPORT_KEY):
            report_type = 'boot'
        elif j_get(models.SEND_BUILD_REPORT_KEY):
            report_type = 'build'

        report_keys = REPORT_TYPE_KEYS.get(report_type)
        if not report_keys:
            response.status_code = 400
            response.reason = (
                "Invalid report type: {}.  Valid values are: {}".format(
                    report_type, ", ".join(REPORT_TYPE_KEYS.keys())))
            return response
        report_data = {k: j_get(k) for k in report_keys}

        email_format = j_get(models.EMAIL_FORMAT_KEY, None)
        email_format, email_errors = _check_email_format(email_format)
        response.errors = email_errors
        schedule_errors = None

        try:
            countdown = int(countdown)
            if countdown < 0:
                countdown = abs(countdown)
                response.errrors = (
                    "Negative value specified for the '%s' key, "
                    "its positive value will be used instead (%ds)" %
                    (models.DELAY_KEY, countdown))

            if countdown > MAX_DELAY:
                response.errors = (
                    "Delay value specified out of range (%ds), "
                    "maximum delay permitted (%ds) will be used instead" %
                    (countdown, MAX_DELAY))
                countdown = MAX_DELAY

            when = (datetime.datetime.now(tz=bson.tz_util.utc) +
                    datetime.timedelta(seconds=countdown))

            def j_get_list(key):
                value = j_get(key)
                if value is None:
                    value = []
                elif not isinstance(value, list):
                    value = [value]
                return value

            email_opts = {
                "to": j_get_list(models.REPORT_SEND_TO_KEY),
                "cc": j_get_list(models.REPORT_CC_KEY),
                "bcc": j_get_list(models.REPORT_BCC_KEY),
                "in_reply_to": j_get(models.IN_REPLY_TO_KEY),
                "subject": j_get(models.SUBJECT_KEY),
                "format": email_format,
            }

            report_type_or_plan = j_get(models.PLAN_KEY, report_type)

            self.log.info(TRIGGER_RECEIVED,
                          self.request.remote_ip, job, branch, kernel,
                          datetime.datetime.utcnow(), report_type_or_plan)

            hashable_str = ''.join(
                str(x) for x in [
                    job,
                    branch,
                    kernel,
                    email_opts["to"],
                    email_opts["cc"],
                    email_opts["bcc"],
                    email_opts["in_reply_to"],
                    email_opts["subject"],
                    report_type_or_plan,
                    str(email_format),
                ])
            schedule_hash = hashlib.sha1(hashable_str).hexdigest()

            try:
                lock_key = '-'.join(
                    ['email', report_type, job, branch, kernel])

                with redis.lock.Lock(self.redisdb, lock_key, timeout=2):
                    if not self.redisdb.exists(schedule_hash):
                        self.redisdb.set(schedule_hash, "schedule", ex=86400)

                        schedule_method = getattr(
                            self, "_schedule_{}_report".format(report_type))

                        errors, response.errors = schedule_method(
                            report_data, email_opts, countdown)

                        response.reason, response.status_code = \
                            _check_status(report_type, errors, when)
                    else:
                        self.log.warn(TRIGGER_RECEIVED_ALREADY, job, branch,
                                      kernel, report_type_or_plan)
                        taskq.send_multiple_emails_error.apply_async([
                            job, branch, kernel,
                            datetime.datetime.utcnow(), email_format,
                            report_type, email_opts
                        ])
                        response.status_code = 409
                        response.reason = ERR_409_MESSAGE
            except redis.lock.LockError:
                # Probably only reached during the unit tests.
                pass
        except (TypeError, ValueError):
            response.status_code = 400
            response.reason = ("Wrong value specified for 'delay': %s" %
                               countdown)

        return response
Пример #7
0
    def _post(self, *args, **kwargs):
        response = hresponse.HandlerResponse(202)

        json_obj = kwargs["json_obj"]

        j_get = json_obj.get
        job = j_get(models.JOB_KEY)
        kernel = j_get(models.KERNEL_KEY)
        branch = utils.clean_branch_name(j_get(models.GIT_BRANCH_KEY))
        lab_name = j_get(models.LAB_NAME_KEY, None)

        countdown = j_get(models.DELAY_KEY, self.settings["senddelay"])
        if countdown is None:
            countdown = self.settings["senddelay"]

        try:
            send_boot = bool(j_get(models.SEND_BOOT_REPORT_KEY, False))
            send_build = bool(j_get(models.SEND_BUILD_REPORT_KEY, False))

            email_format = j_get(models.EMAIL_FORMAT_KEY, None)
            email_format, email_errors = _check_email_format(email_format)
            response.errors = email_errors

            boot_errors = False
            build_errors = False

            if send_boot or send_build:
                countdown = int(countdown)
                if countdown < 0:
                    countdown = abs(countdown)
                    response.errrors = (
                        "Negative value specified for the '%s' key, "
                        "its positive value will be used instead (%ds)" %
                        (models.DELAY_KEY, countdown))

                if countdown > MAX_DELAY:
                    response.errors = (
                        "Delay value specified out of range (%ds), "
                        "maximum delay permitted (%ds) will be used instead" %
                        (countdown, MAX_DELAY))
                    countdown = MAX_DELAY

                when = (datetime.datetime.now(tz=bson.tz_util.utc) +
                        datetime.timedelta(seconds=countdown))

                schedule_data = {
                    "countdown":
                    countdown,
                    "boot_emails":
                    j_get(models.BOOT_REPORT_SEND_TO_KEY, None),
                    "boot_cc_emails":
                    j_get(models.BOOT_REPORT_SEND_CC_KEY, None),
                    "boot_bcc_emails":
                    j_get(models.BOOT_REPORT_SEND_BCC_KEY, None),
                    "build_emails":
                    j_get(models.BUILD_REPORT_SEND_TO_KEY, None),
                    "build_cc_emails":
                    j_get(models.BUILD_REPORT_SEND_CC_KEY, None),
                    "build_bcc_emails":
                    j_get(models.BUILD_REPORT_SEND_BCC_KEY, None),
                    "generic_emails":
                    j_get(models.REPORT_SEND_TO_KEY, None),
                    "generic_cc_emails":
                    j_get(models.REPORT_CC_KEY, None),
                    "generic_bcc_emails":
                    j_get(models.REPORT_BCC_KEY, None),
                    "in_reply_to":
                    j_get(models.IN_REPLY_TO_KEY, None),
                    "subject":
                    j_get(models.SUBJECT_KEY, None),
                    "db_options":
                    self.settings["dboptions"],
                }

                email_type = []
                if send_boot:
                    email_type.append("boot")

                if send_build:
                    email_type.append("build")

                self.log.info(TRIGGER_RECEIVED, self.request.remote_ip, job,
                              branch, kernel, datetime.datetime.utcnow(),
                              str(email_type))

                hashable_str = "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}".format(
                    job, branch, kernel, str(schedule_data["boot_emails"]),
                    str(schedule_data["boot_cc_emails"]),
                    str(schedule_data["boot_bcc_emails"]),
                    str(schedule_data["build_emails"]),
                    str(schedule_data["build_cc_emails"]),
                    str(schedule_data["build_bcc_emails"]),
                    str(schedule_data["generic_emails"]),
                    str(schedule_data["generic_cc_emails"]),
                    str(schedule_data["generic_bcc_emails"]),
                    schedule_data["in_reply_to"], schedule_data["subject"],
                    str(email_type), str(email_format))
                schedule_hash = hashlib.sha1(hashable_str).hexdigest()

                try:
                    lock_key = \
                        "email-{}-{}-{}-{}".format(
                            str(email_type), job, branch, kernel)

                    with redis.lock.Lock(self.redisdb, lock_key, timeout=2):
                        if not self.redisdb.exists(schedule_hash):
                            self.redisdb.set(schedule_hash,
                                             "schedule",
                                             ex=86400)

                            if send_boot:
                                email_type.append("boot")
                                boot_errors, response.errors = \
                                    self._schedule_boot_report(
                                        job,
                                        branch,
                                        kernel,
                                        lab_name, email_format, schedule_data)

                            if send_build:
                                build_errors, response.errors = \
                                    self._schedule_build_report(
                                        job,
                                        branch,
                                        kernel, email_format, schedule_data)

                            response.reason, response.status_code = \
                                _check_status(
                                    send_boot,
                                    send_build,
                                    boot_errors, build_errors, when)
                        else:
                            self.log.warn(TRIGGER_RECEIVED_ALREADY, job,
                                          branch, kernel, str(email_type))
                            taskq.send_multiple_emails_error.apply_async([
                                job, branch, kernel,
                                datetime.datetime.utcnow(), email_format,
                                email_type, schedule_data
                            ])
                            response.status_code = 409
                            response.reason = ERR_409_MESSAGE
                except redis.lock.LockError:
                    # Probably only reached during the unit tests.
                    pass
            else:
                response.status_code = 400
                response.reason = (
                    "Don't know which report to send: either specify "
                    " '%s' or '%s'" % (models.SEND_BOOT_REPORT_KEY,
                                       models.SEND_BUILD_REPORT_KEY))
        except (TypeError, ValueError):
            response.status_code = 400
            response.reason = ("Wrong value specified for 'delay': %s" %
                               countdown)

        return response
Пример #8
0
def import_single_build(json_obj, db_options, base_path=utils.BASE_PATH):
    """Import a single build from the file system.

    :param json_obj: The json object containing the necessary data.
    :type json_obj: dictionary
    :param db_options: The database connection options.
    :type db_options: dictionary
    :param base_path: The base path on the file system where to look for.
    :type base_path: string
    :return The defconfig ID, the job ID and the errors data structure.
    """
    errors = {}
    job_id = None
    build_doc = None
    build_id = None
    j_get = json_obj.get

    arch = j_get(models.ARCHITECTURE_KEY)
    job = j_get(models.JOB_KEY)
    kernel = j_get(models.KERNEL_KEY)
    defconfig = j_get(models.DEFCONFIG_KEY)
    git_branch = j_get(models.GIT_BRANCH_KEY)
    build_environment = j_get(models.BUILD_ENVIRONMENT_KEY)
    defconfig_full = j_get(models.DEFCONFIG_FULL_KEY, None)

    # Clean up the branch name so we don't have "local/*" anymore.
    git_branch = utils.clean_branch_name(git_branch)

    if (utils.valid_name(job) and utils.valid_name(kernel)):
        # New directory structure:
        # $job/$branch/$kernel/$arch/$defconfig/$environment

        parent_dir = os.path.join(base_path, job, git_branch, kernel, arch)
        build_dir = os.path.join(parent_dir, defconfig_full or defconfig,
                                 build_environment)

        if os.path.isdir(build_dir):
            try:
                database = utils.db.get_db_connection(db_options)

                ret_val, job_doc, job_id = _get_or_create_job(
                    job, kernel, git_branch, database, db_options)
                if ret_val != 201 and job_id is None:
                    err_msg = (
                        "Error saving/finding job document '%s-%s-%s' for "
                        "'%s-%s' might not be linked to its job")
                    utils.LOG.error(err_msg, job, kernel, git_branch, arch,
                                    defconfig, build_environment)
                    ERR_ADD(
                        errors, ret_val,
                        err_msg % (job, kernel, git_branch, arch, defconfig,
                                   build_environment))

                build_doc = _traverse_build_dir(build_dir, job_doc, errors,
                                                database)

                ret_val = _update_job_doc(job_doc, job_id, job_doc.status,
                                          build_doc, database)
                if ret_val != 201:
                    err_msg = (
                        "Error updating job document '%s-%s-%s' with values "
                        "from build doc")
                    utils.LOG.error(err_msg, job, git_branch, kernel)
                    ERR_ADD(errors, ret_val,
                            err_msg % (job, git_branch, kernel))
                if build_doc:
                    ret_val, build_id = utils.db.save(database,
                                                      build_doc,
                                                      manipulate=True)
                if ret_val != 201:
                    err_msg = "Error saving build document '%s-%s-%s-%s-%s-%s'"
                    utils.LOG.error(err_msg, job, git_branch, kernel, arch,
                                    defconfig, build_environment)
                    ERR_ADD(
                        errors, ret_val,
                        err_msg % (job, git_branch, kernel, arch, defconfig))
            except pymongo.errors.ConnectionFailure, ex:
                utils.LOG.exception(ex)
                utils.LOG.error("Error getting database connection")
                utils.LOG.warn(
                    "Build for '%s-%s-%s-%s-%s-%s' will not be imported", job,
                    git_branch, kernel, arch, defconfig, build_environment)
                ERR_ADD(
                    errors, 500,
                    "Internal server error: build for '%s-%s-%s-%s-%s-%s' "
                    "will not be imported" % (job, git_branch, kernel, arch,
                                              defconfig, build_environment))
        else:
            err_msg = ("No build directory found for '%s-%s-%s-%s-%s-%s': "
                       "has everything been uploaded?")
            utils.LOG.error(err_msg, job, git_branch, kernel, arch, defconfig,
                            build_environment)
            ERR_ADD(
                errors, 500, err_msg %
                (job, git_branch, kernel, arch, defconfig, build_environment))
Пример #9
0
def import_single_build(json_obj, db_options, base_path=utils.BASE_PATH):
    """Import a single build from the file system.

    :param json_obj: The json object containing the necessary data.
    :type json_obj: dictionary
    :param db_options: The database connection options.
    :type db_options: dictionary
    :param base_path: The base path on the file system where to look for.
    :type base_path: string
    :return The defconfig ID, the job ID and the errors data structure.
    """
    errors = {}
    job_id = None
    build_doc = None
    build_id = None
    j_get = json_obj.get

    arch = j_get(models.ARCHITECTURE_KEY)
    job = j_get(models.JOB_KEY)
    kernel = j_get(models.KERNEL_KEY)
    defconfig = j_get(models.DEFCONFIG_KEY)
    git_branch = j_get(models.GIT_BRANCH_KEY)
    build_environment = j_get(models.BUILD_ENVIRONMENT_KEY)
    defconfig_full = j_get(models.DEFCONFIG_FULL_KEY, None)

    # Clean up the branch name so we don't have "local/*" anymore.
    git_branch = utils.clean_branch_name(git_branch)

    if (utils.valid_name(job) and utils.valid_name(kernel)):
        # New directory structure:
        # $job/$branch/$kernel/$arch/$defconfig/$environment

        parent_dir = os.path.join(base_path, job, git_branch, kernel, arch)
        build_dir = os.path.join(
            parent_dir, defconfig_full or defconfig, build_environment)

        if os.path.isdir(build_dir):
            try:
                database = utils.db.get_db_connection(db_options)

                ret_val, job_doc, job_id = _get_or_create_job(
                    job, kernel, git_branch, database, db_options)
                if ret_val != 201 and job_id is None:
                    err_msg = (
                        "Error saving/finding job document '%s-%s-%s' for "
                        "'%s-%s' might not be linked to its job")
                    utils.LOG.error(
                        err_msg, job, kernel, git_branch, arch, defconfig,
                        build_environment)
                    ERR_ADD(
                        errors,
                        ret_val,
                        err_msg % (job, kernel, git_branch, arch, defconfig,
                                   build_environment)
                    )

                build_doc = _traverse_build_dir(
                    build_dir, job_doc, errors, database)

                ret_val = _update_job_doc(
                    job_doc,
                    job_id, job_doc.status, build_doc, database)
                if ret_val != 201:
                    err_msg = (
                        "Error updating job document '%s-%s-%s' with values "
                        "from build doc")
                    utils.LOG.error(err_msg, job, git_branch, kernel)
                    ERR_ADD(
                        errors, ret_val, err_msg % (job, git_branch, kernel))
                if build_doc:
                    ret_val, build_id = utils.db.save(
                        database, build_doc, manipulate=True)
                if ret_val != 201:
                    err_msg = "Error saving build document '%s-%s-%s-%s-%s-%s'"
                    utils.LOG.error(
                        err_msg, job, git_branch, kernel, arch, defconfig,
                        build_environment)
                    ERR_ADD(
                        errors,
                        ret_val, err_msg % (
                            job, git_branch, kernel, arch, defconfig))
            except pymongo.errors.ConnectionFailure, ex:
                utils.LOG.exception(ex)
                utils.LOG.error("Error getting database connection")
                utils.LOG.warn(
                    "Build for '%s-%s-%s-%s-%s-%s' will not be imported",
                    job, git_branch, kernel, arch, defconfig,
                    build_environment)
                ERR_ADD(
                    errors, 500,
                    "Internal server error: build for '%s-%s-%s-%s-%s-%s' "
                    "will not be imported" % (
                        job, git_branch, kernel, arch, defconfig,
                        build_environment)
                )
        else:
            err_msg = (
                "No build directory found for '%s-%s-%s-%s-%s-%s': "
                "has everything been uploaded?")
            utils.LOG.error(
                err_msg, job, git_branch, kernel, arch, defconfig,
                build_environment)
            ERR_ADD(errors, 500, err_msg % (
                job, git_branch, kernel, arch, defconfig, build_environment))
Пример #10
0
    def _post(self, *args, **kwargs):
        response = hresponse.HandlerResponse(202)
        json_obj = kwargs["json_obj"]
        j_get = json_obj.get

        # Mandatory keys
        job = j_get(models.JOB_KEY)
        kernel = j_get(models.KERNEL_KEY)
        branch = utils.clean_branch_name(j_get(models.GIT_BRANCH_KEY))

        # Optional keys
        report_type = j_get(models.REPORT_TYPE_KEY)
        countdown = j_get(models.DELAY_KEY)
        if countdown is None:
            countdown = self.settings["senddelay"]

        # Deprecated - ToDo: use report_type only in client code
        if j_get(models.SEND_BOOT_REPORT_KEY):
            report_type = 'boot'
        elif j_get(models.SEND_BUILD_REPORT_KEY):
            report_type = 'build'

        report_keys = REPORT_TYPE_KEYS.get(report_type)
        if not report_keys:
            response.status_code = 400
            response.reason = (
                "Invalid report type: {}.  Valid values are: {}"
                .format(report_type, ", ".join(REPORT_TYPE_KEYS.keys()))
            )
            return response
        report_data = {k: j_get(k) for k in report_keys}

        email_format = j_get(models.EMAIL_FORMAT_KEY, None)
        email_format, email_errors = _check_email_format(email_format)
        response.errors = email_errors
        schedule_errors = None

        try:
            countdown = int(countdown)
            if countdown < 0:
                countdown = abs(countdown)
                response.errrors = (
                    "Negative value specified for the '%s' key, "
                    "its positive value will be used instead (%ds)" %
                    (models.DELAY_KEY, countdown)
                )

            if countdown > MAX_DELAY:
                response.errors = (
                    "Delay value specified out of range (%ds), "
                    "maximum delay permitted (%ds) will be used instead" %
                    (countdown, MAX_DELAY)
                )
                countdown = MAX_DELAY

            when = (
                datetime.datetime.now(tz=bson.tz_util.utc) +
                datetime.timedelta(seconds=countdown))

            def j_get_list(key):
                value = j_get(key)
                if value is None:
                    value = []
                elif not isinstance(value, list):
                    value = [value]
                return value

            email_opts = {
                "to": j_get_list(models.REPORT_SEND_TO_KEY),
                "cc": j_get_list(models.REPORT_CC_KEY),
                "bcc": j_get_list(models.REPORT_BCC_KEY),
                "in_reply_to": j_get(models.IN_REPLY_TO_KEY),
                "subject": j_get(models.SUBJECT_KEY),
                "format": email_format,
            }

            report_type_or_plan = j_get(models.PLAN_KEY, report_type)

            self.log.info(
                TRIGGER_RECEIVED,
                self.request.remote_ip,
                job,
                branch,
                kernel,
                datetime.datetime.utcnow(),
                report_type_or_plan
            )

            hashable_str = ''.join(str(x) for x in [
                job,
                branch,
                kernel,
                email_opts["to"],
                email_opts["cc"],
                email_opts["bcc"],
                email_opts["in_reply_to"],
                email_opts["subject"],
                report_type_or_plan,
                str(email_format),
            ])
            schedule_hash = hashlib.sha1(hashable_str).hexdigest()

            try:
                lock_key = '-'.join([
                    'email', report_type, job, branch, kernel])

                with redis.lock.Lock(self.redisdb, lock_key, timeout=2):
                    if not self.redisdb.exists(schedule_hash):
                        self.redisdb.set(
                            schedule_hash, "schedule", ex=86400)

                        schedule_method = getattr(
                            self, "_schedule_{}_report".format(report_type))

                        errors, response.errors = schedule_method(
                            report_data, email_opts, countdown)

                        response.reason, response.status_code = \
                            _check_status(report_type, errors, when)
                    else:
                        self.log.warn(
                            TRIGGER_RECEIVED_ALREADY,
                            job, branch, kernel, report_type_or_plan
                        )
                        taskq.send_multiple_emails_error.apply_async(
                            [
                                job,
                                branch,
                                kernel,
                                datetime.datetime.utcnow(),
                                email_format,
                                report_type,
                                email_opts
                            ]
                        )
                        response.status_code = 409
                        response.reason = ERR_409_MESSAGE
            except redis.lock.LockError:
                # Probably only reached during the unit tests.
                pass
        except (TypeError, ValueError):
            response.status_code = 400
            response.reason = (
                "Wrong value specified for 'delay': %s" % countdown)

        return response