コード例 #1
0
    def __init__(self, *, jenkins_host, job_name, jmdb,
                          aggregator_plugins=None,
                          postprocessor_plugins=None):
        """
        Initializer.

        Required parameters:
            job_name:   Jenkins job name
            jmdb:       JenkinsMongoDB.jenkins_db() instance

        Optional parameters:
            aggregator_plugins: custom aggregator plug-in classes
            postprocessor_plugins: custom post-process plug-in classes
        """
        self.logger = logging.getLogger(__name__)
        self.jenkins_host = jenkins_host
        self.job_name = job_name
        self.aggregator_plugins = aggregator_plugins
        self.postprocessor_plugins = postprocessor_plugins

        cfg = EnvConfiguration(JenkinsJobAggregators.ENV_PARAMS)
        self.builds_max = cfg.get('JENKINS_AGGREGATOR_UPDATE_BUILDS_MAX')

        # XXXrs - This is presently unused.  Want to stash the time of
        #         last update, and refuse to run again until sufficient
        #         time has passed.
        self.freq_sec = cfg.get('JENKINS_AGGREGATOR_UPDATE_FREQ_SEC')

        self.job_data_coll = JenkinsJobDataCollection(job_name=job_name, jmdb=jmdb)
        self.job_meta_coll = JenkinsJobMetaCollection(job_name=job_name, jmdb=jmdb)
        self.alljob_idx = JenkinsAllJobIndex(jmdb=jmdb)
        self.japi = JenkinsApi(host=self.jenkins_host)
コード例 #2
0
    def __init__(self):
        self.logger = logging.getLogger(__name__)
        cfg = EnvConfiguration(XCEFuncTestCoverageData.ENV_PARAMS)
        job_name = cfg.get("XCE_FUNC_TEST_JOB_NAME")

        # XXXrs - This should NOT communicate directly with the DB, but
        #         should go through a REST client.
        jmdb = JenkinsMongoDB()
        self.data = JenkinsJobDataCollection(job_name=job_name, jmdb=jmdb)
        self.meta = JenkinsJobMetaCollection(job_name=job_name, jmdb=jmdb)

        # XXXrs - TEMPORARY (!?!) initialize every time with static configuration.
        #         Eventually, this configuration should be managed elsewhere.

        self.file_groups = FileGroups(meta=self.meta.coll)
        self.file_groups.reset()
        for name, files in XCEFuncTestCoverageData.XCE_FUNC_TEST_FILE_GROUPS.items(
        ):
            self.file_groups.append_group(name=name, files=files)
コード例 #3
0
    def __init__(self, *, job_name):
        """
        Initializer

        Environment parameters:
            SQL_PERF_JOB_NAME:  Jenkins job name.
        """
        self.logger = logging.getLogger(__name__)
        self.job_name = job_name
        jmdb = JenkinsMongoDB()
        self.data = JenkinsJobDataCollection(job_name=self.job_name, jmdb=jmdb)
        self.meta = JenkinsJobMetaCollection(job_name=self.job_name, jmdb=jmdb)
        self.results_cache = {}
コード例 #4
0
ファイル: __init__.py プロジェクト: xcalar/xcalar-infra
    def __init__(self):
        """
        Initializer

        Environment parameters:
            UBM_PERF_JOB_NAME:  Jenkins job name.
        """
        self.logger = logging.getLogger(__name__)
        cfg = EnvConfiguration(UbmPerfResultsData.ENV_PARAMS)
        self.job_name = cfg.get("UBM_PERF_JOB_NAME")
        jmdb = JenkinsMongoDB()
        self.data = JenkinsJobDataCollection(job_name=self.job_name, jmdb=jmdb)
        self.meta = JenkinsJobMetaCollection(job_name=self.job_name, jmdb=jmdb)
        self.results_cache = {}
        self.jresults_cache = {}
コード例 #5
0
class XCEFuncTestCoverageData(object):

    ENV_PARAMS = {"XCE_FUNC_TEST_JOB_NAME": {"default": "XCEFuncTest"}}

    # XXXrs - temporary static config.
    XCE_FUNC_TEST_FILE_GROUPS = \
            {"Critical Files": ["liboperators/GlobalOperators.cpp",
                                "liboperators/LocalOperators.cpp",
                                "liboperators/XcalarEval.cpp",
                                "liboptimizer/Optimizer.cpp",
                                "libxdb/Xdb.cpp",
                                "libruntime/Runtime.cpp",
                                "libquerymanager/QueryManager.cpp",
                                "libqueryeval/QueryEvaluate.cpp",
                                "libmsg/TwoPcFuncDefs.cpp"]}

    def __init__(self):
        self.logger = logging.getLogger(__name__)
        cfg = EnvConfiguration(XCEFuncTestCoverageData.ENV_PARAMS)
        job_name = cfg.get("XCE_FUNC_TEST_JOB_NAME")

        # XXXrs - This should NOT communicate directly with the DB, but
        #         should go through a REST client.
        jmdb = JenkinsMongoDB()
        self.data = JenkinsJobDataCollection(job_name=job_name, jmdb=jmdb)
        self.meta = JenkinsJobMetaCollection(job_name=job_name, jmdb=jmdb)

        # XXXrs - TEMPORARY (!?!) initialize every time with static configuration.
        #         Eventually, this configuration should be managed elsewhere.

        self.file_groups = FileGroups(meta=self.meta.coll)
        self.file_groups.reset()
        for name, files in XCEFuncTestCoverageData.XCE_FUNC_TEST_FILE_GROUPS.items(
        ):
            self.file_groups.append_group(name=name, files=files)

    def xce_versions(self):
        """
        Return available XCE versions for which we have data.
        XXXrs - version/branch :|
        """
        return self.meta.branches(repo='XCE')

    def _get_coverage_data(self, *, bnum):
        data = self.data.get_data(bnum=bnum)
        if not data:
            return None
        return data.get('coverage', None)

    def builds(self,
               *,
               xce_versions=None,
               first_bnum=None,
               last_bnum=None,
               reverse=False):
        rtn = []
        for bnum in self.meta.find_builds(repo='XCE',
                                          branches=xce_versions,
                                          first_bnum=first_bnum,
                                          last_bnum=last_bnum,
                                          reverse=reverse):
            if self._get_coverage_data(bnum=bnum):
                # Only builds with coverage data please
                rtn.append(bnum)
        return rtn

    def filenames(self, *, bnum, group_name=None):
        coverage = self._get_coverage_data(bnum=bnum)
        if not coverage:
            return None

        rawnames = []
        do_sort = False
        if group_name is not None and group_name != "All Files":
            rawnames = self.file_groups.expand(name=group_name)
        else:
            # Load all file names available in coverage
            do_sort = True
            rawnames = coverage.keys()

        # Reduce to just final two path components
        filenames = []
        have_total = False
        for key in rawnames:
            name = MongoDB.decode_key(key)
            if name == 'totals':
                have_total = True
                continue
            fields = name.split('/')
            if len(fields) < 2:
                raise Exception("Incomprehensible: {}".format(name))
            filename = "{}/{}".format(fields[-2], fields[-1])
            if filename in filenames:
                raise Exception("Duplicate: {}".format(filename))
            filenames.append(filename)
        if do_sort:
            filenames.sort()
        if have_total:
            filenames.insert(0, "Total")
        return filenames

    def coverage(self, *, bnum, filename):
        """
        XXXrs - FUTURE - extend to return other than "lines" percentage.
        """
        if filename == "Total":
            filename = "totals"
        coverage = self._get_coverage_data(bnum=bnum)
        if not coverage:
            return None
        for key, data in coverage.items():
            name = MongoDB.decode_key(key)
            if filename in name:
                return coverage[key].get('lines', {}).get('percent', None)
        return None
コード例 #6
0
ファイル: reparse_builds.py プロジェクト: xcalar/xcalar-infra
    jmdb = JenkinsMongoDB()

    if len(args.builds):
        if len(args.jobs) != 1:
            raise ValueError("If --bnum only one --job allowed")

        # Re-parse only specific builds from a job

        # validate the job/builds
        job_name = args.jobs[0]
        active_jobs = jmdb.active_jobs()
        if job_name not in active_jobs:
            raise ValueError("{} is not an active job".format(job_name))

        meta_coll = JenkinsJobMetaCollection(job_name=job_name, jmdb=jmdb)
        all_builds = meta_coll.all_builds()
        for bnum in args.builds:
            if bnum not in all_builds:
                raise ValueError("{} is not a valid build number".format(bnum))

        # See if user wants to proceed
        if not args.force:
            foo = input("Proceed (y/N): ")
            if foo != 'y':
                sys.exit(0)

        print("proceeding...")

        # Flag builds for re-parse
コード例 #7
0
class XDUnitTestCoverageData(object):

    ENV_PARAMS = {"XD_UNIT_TEST_JOB_NAME": {"default": "XDUnitTest"}}

    # XXXrs - temporary static config.
    FILE_GROUPS = {"Critical Files": [
       "/ts/components/workbook/workbookManager.js"
       "/ts/components/dag/DagGraph.js",
       "/ts/components/dag/DagGraphExecutor.js",
       "/ts/components/dag/DagLineage.js",
       "/ts/components/dag/DagList.js",
       "/ts/components/dag/DagNodeExecutor.js",
       "/ts/components/dag/DagNodeMenu.js",
       "/ts/components/dag/DagQueryConverter.js",
       "/ts/components/dag/DagSubGraph.js",
       "/ts/components/dag/DagTab.js",
       "/ts/components/dag/DagTabManager.js",
       "/ts/components/dag/DagTabUser.js",
       "/ts/components/dag/DagTable.js",
       "/ts/components/dag/DagView.js",
       "/ts/components/dag/DagViewManager.js",
       "/ts/components/dag/node/DagNode.js",
       "/ts/components/worksheet/oppanel/SQLOpPanel.js"
       "/ts/components/sql/SQLDagExecutor.js",
       "/ts/components/sql/SQLEditor.js",
       "/ts/components/sql/SQLExecutor.js",
       "/ts/components/sql/SQLSnippet.js",
       "/ts/components/sql/sqlQueryHistory.js",
       "/ts/components/sql/workspace/SQLEditorSpace.js",
       "/ts/components/sql/workspace/SQLResultSpace.js",
       "/ts/components/sql/workspace/SQLTable.js",
       "/ts/components/sql/workspace/SQLTableLister.js",
       "/ts/components/sql/workspace/SQLTableSchema.js",
       "/ts/components/sql/workspace/SQLWorkSpace.js" ]}

    def __init__(self):

        self.logger = logging.getLogger(__name__)
        cfg = EnvConfiguration(XDUnitTestCoverageData.ENV_PARAMS)
        job_name = cfg.get("XD_UNIT_TEST_JOB_NAME")

        # XXXrs - This should NOT communicate directly with the DB, but
        #         should go through a REST client.
        jmdb = JenkinsMongoDB()
        self.data = JenkinsJobDataCollection(job_name=job_name, jmdb=jmdb)
        self.meta = JenkinsJobMetaCollection(job_name=job_name, jmdb=jmdb)

        # XXXrs - TEMPORARY (!?!) initialize every time with static configuration.
        #         Eventually, this configuration should be managed elsewhere.

        self.file_groups = FileGroups(meta=self.meta.coll)
        self.file_groups.reset()
        for name, files in XDUnitTestCoverageData.FILE_GROUPS.items():
            self.file_groups.append_group(name=name, files=files)

        self.file_groups.reset()
        for name, files in XDUnitTestCoverageData.FILE_GROUPS.items():
            self.file_groups.append_group(name=name, files=files)

    def xd_versions(self):
        """
        Return available XD versions for which we have data.
        XXXrs - version/branch :|
        """
        return self.meta.branches(repo='XD')

    def _get_coverage_data(self, *, bnum):
        data = self.data.get_data(bnum=bnum)
        if not data:
            return None
        return data.get('coverage', None)

    def builds(self, *, xd_versions=None,
                        first_bnum=None,
                        last_bnum=None,
                        reverse=False):

        rtn = []
        for bnum in self.meta.find_builds(repo='XD',
                                          branches=xd_versions,
                                          first_bnum=first_bnum,
                                          last_bnum=last_bnum,
                                          reverse=reverse):
            if self._get_coverage_data(bnum = bnum):
                # Only builds with coverage data please
                rtn.append(bnum)
        return rtn

    def filenames(self, *, bnum, group_name=None):
        coverage = self._get_coverage_data(bnum=bnum)
        if not coverage:
            return None

        rawnames = []
        do_sort = False
        if group_name is not None and group_name != "All Files":
            rawnames = self.file_groups.expand(name=group_name)
        else:
            do_sort = True
            rawnames = sorted(coverage.keys())

        have_total = False
        # Reduce a URL to just a filename
        filenames = []
        for key in rawnames:
            url = MongoDB.decode_key(key)
            if url == 'Total':
                have_total = True
                continue
            fields = url.split('/')
            if len(fields) < 2:
                raise Exception("Incomprehensible: {}".format(url))
            filename = "{}/{}".format(fields[-2], fields[-1])
            if filename in filenames:
                raise Exception("Duplicate: {}".format(filename))
            filenames.append(filename)
        if do_sort:
            filenames.sort()
        if have_total:
            filenames.insert(0, "Total")
        return filenames

    def coverage(self, *, bnum, filename):
        coverage = self._get_coverage_data(bnum=bnum)
        if not coverage:
            return None
        for key,data in coverage.items():
            url = MongoDB.decode_key(key)
            if filename.lower() in url.lower():
                return coverage[key].get('covered_pct', None)
        return None
コード例 #8
0
class JenkinsJobAggregators(object):
    """
    Controller class for set of aggregators for a job.
    Handles aggregator execution, storing of returned data, retries.
    """
    ENV_PARAMS = {'JENKINS_AGGREGATOR_UPDATE_BUILDS_MAX':
                    {'required': True,
                     'type': EnvConfiguration.NUMBER,
                     'default': 25},
                  'JENKINS_AGGREGATOR_UPDATE_FREQ_SEC':
                    {'required': True,
                     'type': EnvConfiguration.NUMBER,
                     'default': 300} }

    def __init__(self, *, jenkins_host, job_name, jmdb,
                          aggregator_plugins=None,
                          postprocessor_plugins=None):
        """
        Initializer.

        Required parameters:
            job_name:   Jenkins job name
            jmdb:       JenkinsMongoDB.jenkins_db() instance

        Optional parameters:
            aggregator_plugins: custom aggregator plug-in classes
            postprocessor_plugins: custom post-process plug-in classes
        """
        self.logger = logging.getLogger(__name__)
        self.jenkins_host = jenkins_host
        self.job_name = job_name
        self.aggregator_plugins = aggregator_plugins
        self.postprocessor_plugins = postprocessor_plugins

        cfg = EnvConfiguration(JenkinsJobAggregators.ENV_PARAMS)
        self.builds_max = cfg.get('JENKINS_AGGREGATOR_UPDATE_BUILDS_MAX')

        # XXXrs - This is presently unused.  Want to stash the time of
        #         last update, and refuse to run again until sufficient
        #         time has passed.
        self.freq_sec = cfg.get('JENKINS_AGGREGATOR_UPDATE_FREQ_SEC')

        self.job_data_coll = JenkinsJobDataCollection(job_name=job_name, jmdb=jmdb)
        self.job_meta_coll = JenkinsJobMetaCollection(job_name=job_name, jmdb=jmdb)
        self.alljob_idx = JenkinsAllJobIndex(jmdb=jmdb)
        self.japi = JenkinsApi(host=self.jenkins_host)

    def _update_build(self, *, bnum, is_reparse=False, test_mode=False, test_data_path=None):
        """
        Call all aggregators on the build.  Consolidate results
        and store to the DB.  All or nothing.  All aggregators
        must run successfully or we bail and try again in the
        future (if allowed).

        If test_mode is set it is allowed to pass a job name
        and build number unknown to Jenkins so that an aggregator
        in development can be triggered.  In this case, a
        JenkinsBuildInfo will be constructed with "faked" data.
        """
        self.logger.info("process bnum: {}".format(bnum))

        is_done = False
        try:
            jbi = self.japi.get_build_info(job_name=self.job_name, build_number=bnum)
            # Track whether or not the build is complete.
            # For incomplete builds, record basic build information but do not call
            # plug-in aggregators until the build finishes since that was the original
            # semantic.
            is_done = jbi.is_done()

        except Exception as e:
            if not test_mode:
                self.logger.exception("exception processing bnum: {}".format(bnum))
                if not is_reparse and not self.job_meta_coll.schedule_retry(bnum=bnum):
                    self.job_meta_coll.index_data(bnum=bnum, data=None,
                                                  is_done=True, is_reparse=False)
                    self.job_data_coll.store_data(bnum=bnum, data=None,
                                                  is_done=True, is_reparse=False)
                return False

            # TEST_MODE -----

            self.logger.info("get_build_info exception in test mode")
            self.logger.info("test_data_path: {}".format(test_data_path))

            # If the test job doesn't exist on Jenkins, we'll end up here.
            # In this case, we "fake up" a JenkinsBuildInfo using
            # data either defined in an external file (possibly keyed
            # by build number so that multiple builds can have different
            # "fake" data) or defined below as a static blob.

            fake_data = {'building': False,
                         "actions": [ {"parameters": [{"name": "XCE_GIT_BRANCH", "value": "trunk"},
                                                      {"name": "XD_GIT_BRANCH", "value": "trunk"},
                                                      {"name": "INFRA_GIT_BRANCH", "value": "master"}]}],
                         'builtOn': 'fakehost.somecompany.com',
                         'timestamp': int((time.time()*1000))-3600,
                         'duration': 600,
                         'result': 'SUCCESS'}

            test_data = None
            if test_data_path:
                with open(test_data_path) as json_file:
                    data = json.load(json_file)
                if bnum in data:
                    # keyed by build
                    test_data = data[bnum]
                else:
                    # same data no matter what build
                    test_data = data
            if not test_data:
                test_data = fake_data

            self.logger.info("test_data: {}".format(test_data))
            try:
                jbi = self.japi.get_build_info(job_name=self.job_name,
                                               build_number=bnum,
                                               test_data=test_data)
                is_done = jbi.is_done()
            except Exception as e:
                self.logger.exception("exception processing bnum: {}".format(bnum))
                if not is_reparse and not self.job_meta_coll.schedule_retry(bnum=bnum):
                    self.job_meta_coll.index_data(bnum=bnum, data=None,
                                                  is_done=True, is_reparse=False)
                    self.job_data_coll.store_data(bnum=bnum, data=None,
                                                  is_done=True, is_reparse=False)
                return False

        # Everybody gets the default aggregator
        aggregators = [JenkinsJobInfoAggregator(jenkins_host=self.jenkins_host, job_name=self.job_name)]

        if is_done:
            # Add any custom aggregator plugin(s) registered for the job.
            #
            # N.B.: We only run custom aggregators on completed builds to
            # preserve earlier semantics.
            if self.aggregator_plugins:
                aggregators.extend(self.aggregator_plugins)

        send_log = False
        for aggregator in aggregators:
            if aggregator.send_log_to_update:
                send_log = True
                break

        console_log = None
        if send_log:
            try:
                self.logger.info("get log")
                console_log = jbi.console()
            except Exception as e:
                self.logger.exception("exception processing bnum: {}".format(bnum))
                if not is_reparse and not self.job_meta_coll.schedule_retry(bnum=bnum):
                    self.job_meta_coll.index_data(bnum=bnum, data=None,
                                                  is_done=True, is_reparse=False)
                    self.job_data_coll.store_data(bnum=bnum, data=None,
                                                  is_done=True, is_reparse=False)
                return False

        merged_data = {}
        for agg in aggregators:
            try:
                params =  {'jbi': jbi,
                           'log': None,
                           'is_reparse': is_reparse,
                           'test_mode': test_mode}
                if agg.send_log_to_update:
                    params['log'] = console_log
                self.logger.info('calling aggregator: {}'.format(agg.agg_name))
                data = agg.update_build(**params) or {}

            except JenkinsAggregatorDataUpdateTemporaryError as e:
                # Subclass update_build() encountered a temporary error
                # while trying to gather build information. 
                # Bail, and try again in a bit (if we can).
                self.logger.exception("exception processing bnum: {}".format(bnum))
                if not is_reparse and not self.job_meta_coll.schedule_retry(bnum=bnum):
                    self.job_meta_coll.index_data(bnum=bnum, data=None,
                                                  is_done=True, is_reparse=False)
                    self.job_data_coll.store_data(bnum=bnum, data=None,
                                                  is_done=True, is_reparse=False)
                return False

            for k,v in data.items():
                if k in merged_data:
                    raise Exception("duplicate key: {}".format(k))
                merged_data[k] = v

        if not merged_data and not is_reparse:
            self.logger.info("no data")
            # Make an entry indicating there are no data for this build.
            self.job_meta_coll.index_data(bnum=bnum, data=None,
                                          is_done=is_done, is_reparse=is_reparse)
            self.job_data_coll.store_data(bnum=bnum, data=None,
                                          is_done=is_done, is_reparse=is_reparse)
            return False

        # index_data may side-effect merged_data by extracting "private" stuff
        # (e.g. "commands" like "_add_to_meta_list") so call it first!
        #
        # XXXrs - CONSIDER - this "private" "command" hints that
        #         might want custom post-aggregation "indexers" that
        #         are paired with the "aggregators".
        #

        self.job_meta_coll.index_data(bnum=bnum, data=merged_data,
                                      is_done=is_done, is_reparse=is_reparse)
        self.job_data_coll.store_data(bnum=bnum, data=merged_data,
                                      is_done=is_done, is_reparse=is_reparse)
        self.alljob_idx.index_data(job_name=self.job_name,
                                   bnum=bnum, data=merged_data,
                                   is_done=is_done, is_reparse=is_reparse)

        host_data_coll = JenkinsHostDataCollection(jmdb=jmdb, host_name=merged_data['built_on'])
        host_data_coll.store_data(job_name=self.job_name, bnum=bnum, data=merged_data,
                                  is_done=is_done, is_reparse=is_reparse)
        self.logger.debug("end")
        return True

    def _postprocess_job(self, *, test_mode=False, default_only=False):
        postprocessors = [JenkinsJobPostprocessor(jenkins_host=self.jenkins_host, job_name=self.job_name)]
        if not default_only:
            if self.postprocessor_plugins:
                postprocessors.extend(self.postprocessor_plugins)
        for pproc in postprocessors:
            try:
                data = pproc.update_job(test_mode=test_mode)
                self.job_meta_coll.store_data(key=pproc.name, data=data)
            except Exception as e:
                self.logger.exception("exception post-processing job: {}"
                                      .format(self.job_name))

    def _do_updates(self, *, builds, test_mode=False,
                                     test_data_path=None,
                                     force_default_job_update=False,
                                     is_reparse=False ):
        self.logger.info("builds: {}".format(builds))
        self.logger.info("test_mode: {}".format(test_mode))
        self.logger.info("test_data_path: {}".format(test_data_path))
        self.logger.info("is_reparse: {}".format(is_reparse))
        updated = 0
        for bnum in builds:
            if self._update_build(bnum=bnum, test_mode=test_mode,
                                  test_data_path=test_data_path,
                                  is_reparse=is_reparse):
                updated += 1
        if updated:
            self.logger.debug("{} builds updated, call postprocessors".format(updated))
            self._postprocess_job(test_mode=test_mode)
        elif force_default_job_update:
            self._postprocess_job(test_mode=test_mode, default_only=True)
        return updated


    def update_builds(self, *, test_builds=None,
                               test_data_path=None,
                               force_default_job_update=False):
        self.logger.info("start")

        if test_builds:
            self._do_updates(builds=test_builds,
                             test_mode=True,
                             test_data_path=test_data_path)
            return

        jobinfo = self.japi.get_job_info(job_name=self.job_name)
        jenkins_first = jobinfo.first_build_number()
        jenkins_last = jobinfo.last_build_number()
        if not jenkins_first or not jenkins_last:
            self.logger.error("missing first or last build for job {}".format(self.job_name))
            return

        pending = self.job_meta_coll.pending_builds(first=jenkins_first, last=jenkins_last)
        updated = self._do_updates(builds=pending[:self.builds_max], force_default_job_update=force_default_job_update)

        extra = self.builds_max - updated

        # We can do up to "extra" reparse.
        if extra > 0:
            reparse = self.job_meta_coll.reparse(rtnmax=extra)
            if reparse:
                self._do_updates(builds=reparse, is_reparse=True)
コード例 #9
0
                    if broken:
                        print("{} {}: key {} corefile_name {}".format(
                            job, doc['_id'], key, corefile_name))
                        job_to_builds.setdefault(job, []).append(doc['_id'])

    for job_name, builds in job_to_builds.items():
        print("{} ===== {}\n{}".format(job_name, len(builds), builds))

    # See if user wants to proceed
    foo = input("Schedule reparse (y/N): ")
    if foo != 'y':
        sys.exit(0)

    print("scheduling...")

    # Flag matching builds for re-parse

    for job_name, builds in job_to_builds.items():
        process_lock_name = "{}_process_lock".format(job_name)
        process_lock_meta = {"reason": "locked by broken_corefile_name.py"}
        process_lock = MongoDBKeepAliveLock(db=jmdb.jenkins_db(),
                                            name=process_lock_name)
        try:
            process_lock.lock(meta=process_lock_meta)
        except MongoDBKALockTimeout as e:
            raise Exception("timeout acquiring {}".format(process_lock_name))

        meta_coll = JenkinsJobMetaCollection(job_name=job_name, jmdb=jmdb)
        meta_coll.reparse(builds=builds)
        process_lock.unlock()