def taskcluster_upload(self):
        auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
        credentials = {}
        execfile(auth, credentials)
        client_id = credentials.get('taskcluster_clientId')
        access_token = credentials.get('taskcluster_accessToken')
        if not client_id or not access_token:
            self.warning('Skipping S3 file upload: No taskcluster credentials.')
            return

        # We need to activate the virtualenv so that we can import taskcluster
        # (and its dependent modules, like requests and hawk).  Normally we
        # could create the virtualenv as an action, but due to some odd
        # dependencies with query_build_env() being called from build(), which
        # is necessary before the virtualenv can be created.
        self.disable_mock()
        self.create_virtualenv()
        self.enable_mock()
        self.activate_virtualenv()

        # Enable Taskcluster debug logging, so at least we get some debug
        # messages while we are testing uploads.
        logging.getLogger('taskcluster').setLevel(logging.DEBUG)

        branch = self.config['branch']
        platform = self.config['platform']
        revision = self._query_revision()
        tc = Taskcluster(self.config['branch'],
                         self.query_pushdate(),
                         client_id,
                         access_token,
                         self.log_obj,
                         )

        index = self.config.get('taskcluster_index', 'index.garbage.staging')
        # TODO: Bug 1165980 - these should be in tree. Note the '.l10n' suffix.
        routes = [
            "%s.buildbot.branches.%s.%s.l10n" % (index, branch, platform),
            "%s.buildbot.revisions.%s.%s.%s.l10n" % (index, revision, branch, platform),
        ]

        task = tc.create_task(routes)
        tc.claim_task(task)

        self.info("Uploading files to S3: %s" % self.upload_files)
        for upload_file in self.upload_files:
            # Create an S3 artifact for each file that gets uploaded. We also
            # check the uploaded file against the property conditions so that we
            # can set the buildbot config with the correct URLs for package
            # locations.
            tc.create_artifact(task, upload_file)
        tc.report_completed(task)
Esempio n. 2
0
    def taskcluster_upload(self):
        auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
        credentials = {}
        execfile(auth, credentials)
        client_id = credentials.get('taskcluster_clientId')
        access_token = credentials.get('taskcluster_accessToken')
        if not client_id or not access_token:
            self.warning('Skipping S3 file upload: No taskcluster credentials.')
            return

        # We need to activate the virtualenv so that we can import taskcluster
        # (and its dependent modules, like requests and hawk).  Normally we
        # could create the virtualenv as an action, but due to some odd
        # dependencies with query_build_env() being called from build(), which
        # is necessary before the virtualenv can be created.
        self.disable_mock()
        self.create_virtualenv()
        self.enable_mock()
        self.activate_virtualenv()

        branch = self.config['branch']
        platform = self.config['platform']
        revision = self._query_revision()
        repo = self.query_l10n_repo()
        if not repo:
            self.fatal("Unable to determine repository for querying the push info.")
        pushinfo = self.vcs_query_pushinfo(repo, revision, vcs='hgtool')
        pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(pushinfo.pushdate))

        routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
                                   'testing/taskcluster/routes.json')
        with open(routes_json) as f:
            contents = json.load(f)
            templates = contents['l10n']

        for locale, files in self.upload_files.iteritems():
            self.info("Uploading files to S3 for locale '%s': %s" % (locale, files))
            routes = []
            for template in templates:
                fmt = {
                    'index': self.config.get('taskcluster_index', 'index.garbage.staging'),
                    'project': branch,
                    'head_rev': revision,
                    'pushdate': pushdate,
                    'year': pushdate[0:4],
                    'month': pushdate[4:6],
                    'day': pushdate[6:8],
                    'build_product': self.config['stage_product'],
                    'build_name': self.query_build_name(),
                    'build_type': self.query_build_type(),
                    'locale': locale,
                }
                fmt.update(self.buildid_to_dict(self._query_buildid()))
                routes.append(template.format(**fmt))

            self.info('Using routes: %s' % routes)
            tc = Taskcluster(branch,
                             pushinfo.pushdate, # Use pushdate as the rank
                             client_id,
                             access_token,
                             self.log_obj,
                             )
            task = tc.create_task(routes)
            tc.claim_task(task)

            for upload_file in files:
                # Create an S3 artifact for each file that gets uploaded. We also
                # check the uploaded file against the property conditions so that we
                # can set the buildbot config with the correct URLs for package
                # locations.
                tc.create_artifact(task, upload_file)
            tc.report_completed(task)
Esempio n. 3
0
    def taskcluster_upload(self):
        auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
        credentials = {}
        execfile(auth, credentials)
        client_id = credentials.get('taskcluster_clientId')
        access_token = credentials.get('taskcluster_accessToken')
        if not client_id or not access_token:
            self.warning('Skipping S3 file upload: No taskcluster credentials.')
            return

        self.activate_virtualenv()

        dirs = self.query_abs_dirs()
        locales = self.query_locales()
        make = self.query_exe("make")
        upload_env = self.query_upload_env()
        cwd = dirs['abs_locales_dir']
        branch = self.config['branch']
        revision = self.query_revision()
        repo = self.query_l10n_repo()
        pushinfo = self.vcs_query_pushinfo(repo, revision, vcs='hg')
        pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(pushinfo.pushdate))
        routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
                                   'testing/mozharness/configs/routes.json')
        with open(routes_json) as routes_file:
            contents = json.load(routes_file)
            templates = contents['l10n']

        for locale in locales:
            output = self.get_output_from_command_m(
                "%s echo-variable-UPLOAD_FILES AB_CD=%s" % (make, locale),
                cwd=cwd,
                env=upload_env,
            )
            files = shlex.split(output)
            abs_files = [os.path.abspath(os.path.join(cwd, f)) for f in files]

            routes = []
            fmt = {
                'index': self.config.get('taskcluster_index', 'index.garbage.staging'),
                'project': branch,
                'head_rev': revision,
                'pushdate': pushdate,
                'year': pushdate[0:4],
                'month': pushdate[4:6],
                'day': pushdate[6:8],
                'build_product': self.config['stage_product'],
                'build_name': self.query_build_name(),
                'build_type': self.query_build_type(),
                'locale': locale,
            }
            for template in templates:
                routes.append(template.format(**fmt))

            self.info('Using routes: %s' % routes)
            tc = Taskcluster(branch,
                             pushinfo.pushdate, # Use pushdate as the rank
                             client_id,
                             access_token,
                             self.log_obj,
                             )
            task = tc.create_task(routes)
            tc.claim_task(task)

            for upload_file in abs_files:
                tc.create_artifact(task, upload_file)
            tc.report_completed(task)
Esempio n. 4
0
    def taskcluster_upload(self):
        auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
        credentials = {}
        execfile(auth, credentials)
        client_id = credentials.get('taskcluster_clientId')
        access_token = credentials.get('taskcluster_accessToken')
        if not client_id or not access_token:
            self.warning('Skipping S3 file upload: No taskcluster credentials.')
            return

        # We need to activate the virtualenv so that we can import taskcluster
        # (and its dependent modules, like requests and hawk).  Normally we
        # could create the virtualenv as an action, but due to some odd
        # dependencies with query_build_env() being called from build(), which
        # is necessary before the virtualenv can be created.
        self.disable_mock()
        self.create_virtualenv()
        self.enable_mock()
        self.activate_virtualenv()

        # Enable Taskcluster debug logging, so at least we get some debug
        # messages while we are testing uploads.
        logging.getLogger('taskcluster').setLevel(logging.DEBUG)

        branch = self.config['branch']
        platform = self.config['platform']
        revision = self._query_revision()

        routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
                                   'testing/taskcluster/routes.json')
        with open(routes_json) as f:
            contents = json.load(f)
            templates = contents['l10n']

        for locale, files in self.upload_files.iteritems():
            self.info("Uploading files to S3 for locale '%s': %s" % (locale, files))
            routes = []
            for template in templates:
                fmt = {
                    # TODO: Bug 1133074
                    #index = self.config.get('taskcluster_index', 'index.garbage.staging')
                    'index': 'index.garbage.staging.mshal-testing',
                    'project': branch,
                    'head_rev': revision,
                    'build_product': self.config['stage_product'],
                    'build_name': self.query_build_name(),
                    'build_type': self.query_build_type(),
                    'locale': locale,
                }
                fmt.update(self.buildid_to_dict(self._query_buildid()))
                routes.append(template.format(**fmt))
                self.info('Using routes: %s' % routes)

            tc = Taskcluster(branch,
                             self.query_pushdate(),
                             client_id,
                             access_token,
                             self.log_obj,
                             )
            task = tc.create_task(routes)
            tc.claim_task(task)

            for upload_file in files:
                # Create an S3 artifact for each file that gets uploaded. We also
                # check the uploaded file against the property conditions so that we
                # can set the buildbot config with the correct URLs for package
                # locations.
                tc.create_artifact(task, upload_file)
            tc.report_completed(task)
Esempio n. 5
0
    def taskcluster_upload(self):
        auth = os.path.join(os.getcwd(), self.config["taskcluster_credentials_file"])
        credentials = {}
        execfile(auth, credentials)
        client_id = credentials.get("taskcluster_clientId")
        access_token = credentials.get("taskcluster_accessToken")
        if not client_id or not access_token:
            self.warning("Skipping S3 file upload: No taskcluster credentials.")
            return

        self.activate_virtualenv()

        dirs = self.query_abs_dirs()
        locales = self.query_locales()
        make = self.query_exe("make")
        upload_env = self.query_upload_env()
        cwd = dirs["abs_locales_dir"]
        branch = self.config["branch"]
        revision = self.query_revision()
        repo = self.query_l10n_repo()
        pushinfo = self.vcs_query_pushinfo(repo, revision, vcs="hgtool")
        pushdate = time.strftime("%Y%m%d%H%M%S", time.gmtime(pushinfo.pushdate))
        routes_json = os.path.join(self.query_abs_dirs()["abs_mozilla_dir"], "testing/taskcluster/routes.json")
        with open(routes_json) as f:
            contents = json.load(f)
            templates = contents["l10n"]

        for locale in locales:
            output = self.get_output_from_command_m(
                "%s echo-variable-UPLOAD_FILES AB_CD=%s" % (make, locale), cwd=cwd, env=upload_env
            )
            files = shlex.split(output)
            abs_files = [os.path.abspath(os.path.join(cwd, f)) for f in files]

            routes = []
            fmt = {
                "index": self.config.get("taskcluster_index", "index.garbage.staging"),
                "project": branch,
                "head_rev": revision,
                "pushdate": pushdate,
                "year": pushdate[0:4],
                "month": pushdate[4:6],
                "day": pushdate[6:8],
                "build_product": self.config["stage_product"],
                "build_name": self.query_build_name(),
                "build_type": self.query_build_type(),
                "locale": locale,
            }
            for template in templates:
                routes.append(template.format(**fmt))

            self.info("Using routes: %s" % routes)
            tc = Taskcluster(
                branch, pushinfo.pushdate, client_id, access_token, self.log_obj  # Use pushdate as the rank
            )
            task = tc.create_task(routes)
            tc.claim_task(task)

            for upload_file in abs_files:
                tc.create_artifact(task, upload_file)
            tc.report_completed(task)
Esempio n. 6
0
    def taskcluster_upload(self):
        auth = os.path.join(os.getcwd(),
                            self.config['taskcluster_credentials_file'])
        credentials = {}
        execfile(auth, credentials)
        client_id = credentials.get('taskcluster_clientId')
        access_token = credentials.get('taskcluster_accessToken')
        if not client_id or not access_token:
            self.warning(
                'Skipping S3 file upload: No taskcluster credentials.')
            return

        # We need to activate the virtualenv so that we can import taskcluster
        # (and its dependent modules, like requests and hawk).  Normally we
        # could create the virtualenv as an action, but due to some odd
        # dependencies with query_build_env() being called from build(), which
        # is necessary before the virtualenv can be created.
        self.disable_mock()
        self.create_virtualenv()
        self.enable_mock()
        self.activate_virtualenv()

        branch = self.config['branch']
        platform = self.config['platform']
        revision = self._query_revision()
        repo = self.query_l10n_repo()
        if not repo:
            self.fatal(
                "Unable to determine repository for querying the push info.")
        pushinfo = self.vcs_query_pushinfo(repo, revision, vcs='hgtool')
        pushdate = time.strftime('%Y%m%d%H%M%S',
                                 time.gmtime(pushinfo.pushdate))

        routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
                                   'testing/taskcluster/routes.json')
        with open(routes_json) as f:
            contents = json.load(f)
            templates = contents['l10n']

        for locale, files in self.upload_files.iteritems():
            self.info("Uploading files to S3 for locale '%s': %s" %
                      (locale, files))
            routes = []
            for template in templates:
                fmt = {
                    'index':
                    self.config.get('taskcluster_index',
                                    'index.garbage.staging'),
                    'project':
                    branch,
                    'head_rev':
                    revision,
                    'pushdate':
                    pushdate,
                    'year':
                    pushdate[0:4],
                    'month':
                    pushdate[4:6],
                    'day':
                    pushdate[6:8],
                    'build_product':
                    self.config['stage_product'],
                    'build_name':
                    self.query_build_name(),
                    'build_type':
                    self.query_build_type(),
                    'locale':
                    locale,
                }
                fmt.update(self.buildid_to_dict(self._query_buildid()))
                routes.append(template.format(**fmt))

            self.info('Using routes: %s' % routes)
            tc = Taskcluster(
                branch,
                pushinfo.pushdate,  # Use pushdate as the rank
                client_id,
                access_token,
                self.log_obj,
            )
            task = tc.create_task(routes)
            tc.claim_task(task)

            for upload_file in files:
                # Create an S3 artifact for each file that gets uploaded. We also
                # check the uploaded file against the property conditions so that we
                # can set the buildbot config with the correct URLs for package
                # locations.
                tc.create_artifact(task, upload_file)
            tc.report_completed(task)
Esempio n. 7
0
    def taskcluster_upload(self):
        auth = os.path.join(os.getcwd(), self.config["taskcluster_credentials_file"])
        credentials = {}
        execfile(auth, credentials)
        client_id = credentials.get("taskcluster_clientId")
        access_token = credentials.get("taskcluster_accessToken")
        if not client_id or not access_token:
            self.warning("Skipping S3 file upload: No taskcluster credentials.")
            return

        # We need to activate the virtualenv so that we can import taskcluster
        # (and its dependent modules, like requests and hawk).  Normally we
        # could create the virtualenv as an action, but due to some odd
        # dependencies with query_build_env() being called from build(), which
        # is necessary before the virtualenv can be created.
        self.disable_mock()
        self.create_virtualenv()
        self.enable_mock()
        self.activate_virtualenv()

        # Enable Taskcluster debug logging, so at least we get some debug
        # messages while we are testing uploads.
        logging.getLogger("taskcluster").setLevel(logging.DEBUG)

        branch = self.config["branch"]
        platform = self.config["platform"]
        revision = self._query_revision()
        repo = self.query_l10n_repo()
        if not repo:
            self.fatal("Unable to determine repository for querying the push info.")
        pushinfo = self.vcs_query_pushinfo(repo, revision, vcs="hgtool")

        routes_json = os.path.join(self.query_abs_dirs()["abs_mozilla_dir"], "testing/taskcluster/routes.json")
        with open(routes_json) as f:
            contents = json.load(f)
            templates = contents["l10n"]

        for locale, files in self.upload_files.iteritems():
            self.info("Uploading files to S3 for locale '%s': %s" % (locale, files))
            routes = []
            for template in templates:
                fmt = {
                    "index": self.config.get("taskcluster_index", "index.garbage.staging"),
                    "project": branch,
                    "head_rev": revision,
                    "build_product": self.config["stage_product"],
                    "build_name": self.query_build_name(),
                    "build_type": self.query_build_type(),
                    "locale": locale,
                }
                fmt.update(self.buildid_to_dict(self._query_buildid()))
                routes.append(template.format(**fmt))

            self.info("Using routes: %s" % routes)
            tc = Taskcluster(
                branch, pushinfo.pushdate, client_id, access_token, self.log_obj  # Use pushdate as the rank
            )
            task = tc.create_task(routes)
            tc.claim_task(task)

            for upload_file in files:
                # Create an S3 artifact for each file that gets uploaded. We also
                # check the uploaded file against the property conditions so that we
                # can set the buildbot config with the correct URLs for package
                # locations.
                tc.create_artifact(task, upload_file)
            tc.report_completed(task)
Esempio n. 8
0
    def taskcluster_upload(self):
        auth = os.path.join(os.getcwd(),
                            self.config['taskcluster_credentials_file'])
        credentials = {}
        execfile(auth, credentials)
        client_id = credentials.get('taskcluster_clientId')
        access_token = credentials.get('taskcluster_accessToken')
        if not client_id or not access_token:
            self.warning(
                'Skipping S3 file upload: No taskcluster credentials.')
            return

        self.activate_virtualenv()

        dirs = self.query_abs_dirs()
        locales = self.query_locales()
        make = self.query_exe("make")
        upload_env = self.query_upload_env()
        cwd = dirs['abs_locales_dir']
        branch = self.config['branch']
        revision = self.query_revision()
        repo = self.query_l10n_repo()
        pushinfo = self.vcs_query_pushinfo(repo, revision, vcs='hgtool')
        pushdate = time.strftime('%Y%m%d%H%M%S',
                                 time.gmtime(pushinfo.pushdate))
        routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
                                   'testing/taskcluster/routes.json')
        with open(routes_json) as f:
            contents = json.load(f)
            templates = contents['l10n']

        for locale in locales:
            output = self.get_output_from_command_m(
                "%s echo-variable-UPLOAD_FILES AB_CD=%s" % (make, locale),
                cwd=cwd,
                env=upload_env,
            )
            files = shlex.split(output)
            abs_files = [os.path.abspath(os.path.join(cwd, f)) for f in files]

            routes = []
            fmt = {
                'index':
                self.config.get('taskcluster_index', 'index.garbage.staging'),
                'project':
                branch,
                'head_rev':
                revision,
                'pushdate':
                pushdate,
                'year':
                pushdate[0:4],
                'month':
                pushdate[4:6],
                'day':
                pushdate[6:8],
                'build_product':
                self.config['stage_product'],
                'build_name':
                self.query_build_name(),
                'build_type':
                self.query_build_type(),
                'locale':
                locale,
            }
            for template in templates:
                routes.append(template.format(**fmt))

            self.info('Using routes: %s' % routes)
            tc = Taskcluster(
                branch,
                pushinfo.pushdate,  # Use pushdate as the rank
                client_id,
                access_token,
                self.log_obj,
            )
            task = tc.create_task(routes)
            tc.claim_task(task)

            for upload_file in abs_files:
                tc.create_artifact(task, upload_file)
            tc.report_completed(task)
Esempio n. 9
0
    def taskcluster_upload(self):
        auth = os.path.join(os.getcwd(), self.config["taskcluster_credentials_file"])
        credentials = {}
        execfile(auth, credentials)
        client_id = credentials.get("taskcluster_clientId")
        access_token = credentials.get("taskcluster_accessToken")
        if not client_id or not access_token:
            self.warning("Skipping S3 file upload: No taskcluster credentials.")
            return

        # We need to activate the virtualenv so that we can import taskcluster
        # (and its dependent modules, like requests and hawk).  Normally we
        # could create the virtualenv as an action, but due to some odd
        # dependencies with query_build_env() being called from build(), which
        # is necessary before the virtualenv can be created.
        self.disable_mock()
        self.create_virtualenv()
        self.enable_mock()
        self.activate_virtualenv()

        branch = self.config["branch"]
        revision = self._query_revision()
        repo = self.query_l10n_repo()
        if not repo:
            self.fatal("Unable to determine repository for querying the push info.")
        pushinfo = self.vcs_query_pushinfo(repo, revision, vcs="hgtool")
        pushdate = time.strftime("%Y%m%d%H%M%S", time.gmtime(pushinfo.pushdate))

        routes_json = os.path.join(self.query_abs_dirs()["abs_mozilla_dir"], "testing/taskcluster/routes.json")
        with open(routes_json) as f:
            contents = json.load(f)
            templates = contents["l10n"]

        # Release promotion creates a special task to accumulate all artifacts
        # under the same task
        artifacts_task = None
        self.read_buildbot_config()
        if "artifactsTaskId" in self.buildbot_config.get("properties", {}):
            artifacts_task_id = self.buildbot_config["properties"]["artifactsTaskId"]
            artifacts_tc = Taskcluster(
                branch=branch,
                rank=pushinfo.pushdate,
                client_id=client_id,
                access_token=access_token,
                log_obj=self.log_obj,
                task_id=artifacts_task_id,
            )
            artifacts_task = artifacts_tc.get_task(artifacts_task_id)
            artifacts_tc.claim_task(artifacts_task)

        for locale, files in self.upload_files.iteritems():
            self.info("Uploading files to S3 for locale '%s': %s" % (locale, files))
            routes = []
            for template in templates:
                fmt = {
                    "index": self.config.get("taskcluster_index", "index.garbage.staging"),
                    "project": branch,
                    "head_rev": revision,
                    "pushdate": pushdate,
                    "year": pushdate[0:4],
                    "month": pushdate[4:6],
                    "day": pushdate[6:8],
                    "build_product": self.config["stage_product"],
                    "build_name": self.query_build_name(),
                    "build_type": self.query_build_type(),
                    "locale": locale,
                }
                fmt.update(self.buildid_to_dict(self._query_buildid()))
                routes.append(template.format(**fmt))

            self.info("Using routes: %s" % routes)
            tc = Taskcluster(
                branch, pushinfo.pushdate, client_id, access_token, self.log_obj  # Use pushdate as the rank
            )
            task = tc.create_task(routes)
            tc.claim_task(task)

            for upload_file in files:
                # Create an S3 artifact for each file that gets uploaded. We also
                # check the uploaded file against the property conditions so that we
                # can set the buildbot config with the correct URLs for package
                # locations.
                artifact_url = tc.create_artifact(task, upload_file)
                if artifacts_task:
                    artifacts_tc.create_reference_artifact(artifacts_task, upload_file, artifact_url)

            tc.report_completed(task)

        if artifacts_task:
            if not self.query_failed_locales():
                artifacts_tc.report_completed(artifacts_task)
            else:
                # If some locales fail, we want to mark the artifacts
                # task failed, so a retry can reuse the same task ID
                artifacts_tc.report_failed(artifacts_task)
Esempio n. 10
0
    def taskcluster_upload(self):
        auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
        credentials = {}
        execfile(auth, credentials)
        client_id = credentials.get('taskcluster_clientId')
        access_token = credentials.get('taskcluster_accessToken')
        if not client_id or not access_token:
            self.warning('Skipping S3 file upload: No taskcluster credentials.')
            return

        self.activate_virtualenv()

        # Enable Taskcluster debug logging, so at least we get some debug
        # messages while we are testing uploads.
        logging.getLogger('taskcluster').setLevel(logging.DEBUG)

        branch = self.config['branch']
        platform = self.config['platform']
        revision = self._query_revision()
        repo = self.query_l10n_repo()
        if not repo:
            self.fatal("Unable to determine repository for querying the push info.")
        pushinfo = self.vcs_query_pushinfo(repo, revision, vcs='hgtool')

        routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
                                   'testing/taskcluster/routes.json')
        with open(routes_json) as f:
            contents = json.load(f)
            templates = contents['l10n']

        for locale, files in self.upload_files.iteritems():
            self.info("Uploading files to S3 for locale '%s': %s" % (locale, files))
            routes = []
            for template in templates:
                fmt = {
                    'index': self.config.get('taskcluster_index', 'index.garbage.staging'),
                    'project': branch,
                    'head_rev': revision,
                    'build_product': self.config['stage_product'],
                    'build_name': self.query_build_name(),
                    'build_type': self.query_build_type(),
                    'locale': locale,
                }
                fmt.update(self.buildid_to_dict(self._query_buildid()))
                routes.append(template.format(**fmt))

            self.info('Using routes: %s' % routes)
            tc = Taskcluster(branch,
                             pushinfo.pushdate, # Use pushdate as the rank
                             client_id,
                             access_token,
                             self.log_obj,
                             )
            task = tc.create_task(routes)
            tc.claim_task(task)

            for upload_file in files:
                # Create an S3 artifact for each file that gets uploaded. We also
                # check the uploaded file against the property conditions so that we
                # can set the buildbot config with the correct URLs for package
                # locations.
                tc.create_artifact(task, upload_file)
            tc.report_completed(task)