def fix_analyses_storage_instrument(portal):
    """Walks through all Analyses not yet verified and if they belong to the
    Storage requisition category, remove the instrument assignment
    """
    logger.info("Sanitizing 'Storage instrument' from analyses")
    query = dict(portal_type="AnalysisCategory", title="Storage requisition")
    cat = api.search(query, "bika_setup_catalog")
    if not cat:
        logger.warn("Category 'Storage requisition' not found [SKIP]")
        return

    cat_uid = api.get_uid(cat[0])

    # Cleanup analysis services first
    query = dict(portal_type="AnalysisService", getCategoryUID=cat_uid)
    brains = api.search(query, "bika_setup_catalog")
    for brain in brains:
        service = api.get_object(brain)
        if not service.getInstrument():
            continue
        service.setInstrument(None)
        service.reindexObject()

    # Cleanup analyses
    query = dict(getCategoryUID=cat_uid, )
    brains = api.search(query, CATALOG_ANALYSIS_LISTING)
    for brain in brains:
        if brain.review_state in ['published', 'rejected', 'invalid']:
            continue
        if not brain.getInstrumentUID:
            continue
        analysis = api.get_object(brain)
        analysis.setInstrument(None)
        analysis.reindexObject()
    logger.info("Sanitizing 'Storage instrument' from analyses [DONE]")
def sanitize_ranges_calculation_from_analyses(portal):
    """Walks through all Analyses not yet verified and remove the calculation
    if is Ranges Calculation set
    """
    logger.info("Sanitizing 'Ranges Calculation' from analyses")
    query = dict(portal_type="Calculation", title="Ranges calculation")
    calc = api.search(query, "bika_setup_catalog")
    if not calc:
        logger.warn("Calculation 'Ranges calculation' not found! [SKIP]")
        return
    calc = api.get_object(calc[0])
    calc_uid = api.get_uid(calc)

    # Cleanup analysis services first
    query = dict(portal_type="AnalysisService", getCalculationUID=calc_uid)
    brains = api.search(query, "bika_setup_catalog")
    for brain in brains:
        service = api.get_object(brain)
        service.setCalculation(None)
        service.reindexObject()

    # Cleanup analyses
    query = dict()
    brains = api.search(query, CATALOG_ANALYSIS_LISTING)
    for brain in brains:
        if brain.getCalculationUID != calc_uid:
            continue
        analysis = api.get_object(brain)
        analysis.setCalculation(None)
        analysis.reindexObject()
    logger.info("Sanitizing 'Ranges Calculation' from analyses [DONE]")
Beispiel #3
0
    def _attach_to_ar(pdf, ar_brain_or_obj):
        ar = api.get_object(ar_brain_or_obj)

        # Attach the pdf to the Analysis Request
        attid = ar.aq_parent.generateUniqueId('Attachment')
        att = _createObjectByType(
            "Attachment", ar.aq_parent, attid)
        att.setAttachmentFile(open(pdf_fn))
        att.setReportOption('i')  # Ignore in report

        # Try to assign the Requisition Attachment Type
        query = dict(portal_type='AttachmentType', title='Delivery')
        brains = api.search(query, 'bika_setup_catalog')
        if brains:
            att_type = api.get_object(brains[0])
            att.setAttachmentType(att_type)

        # Awkward workaround to rename the file
        attf = att.getAttachmentFile()
        attf.filename = '%s.pdf' % filename
        att.setAttachmentFile(attf)
        att.unmarkCreationFlag()
        renameAfterCreation(att)
        atts = ar.getAttachment() + [att] if ar.getAttachment() else [att]
        atts = [a.UID() for a in atts]
        ar.setAttachment(atts)
Beispiel #4
0
def after_no_sampling_workflow(analysis_request):
    """ Event fired for no_sampling_workflow that makes the status of the
    Analysis request or Sample to become sample_ordered
    """
    if not analysis_request.isPartition():
        # Generate the delivery pdf
        generate_requisition_pdf(analysis_request)

    # Set specifications by default
    sample_type = analysis_request.getSampleType()
    specs = api.get_field_value(sample_type, "DefaultAnalysisSpecifications",
                                None)
    if specs:
        analysis_request.setSpecification(api.get_object(specs))
    else:
        # Find out suitable specs by Sample Type name
        sample_type_title = sample_type.Title()
        specs_title = "{} - calculated".format(sample_type_title)
        query = dict(portal_type="AnalysisSpec", title=specs_title)
        specs = api.search(query, 'bika_setup_catalog')
        if specs:
            analysis_request.setSpecification(api.get_object(specs[0]))

    if analysis_request.isPartition():
        # Change workflow state to "at_reception"
        wf.changeWorkflowState(analysis_request,
                               wf_id="bika_ar_workflow",
                               state_id="sample_at_reception")
Beispiel #5
0
def after_submit(analysis):
    """Actions to be done after a submit transition for an analysis takes place
    """
    analysis = api.get_object(analysis)
    if IRequestAnalysis.providedBy(analysis):
        ar = analysis.getRequest()
        set_field_value(ar, "AssayDate", analysis.getDateSubmitted())
Beispiel #6
0
    def folderitem(self, obj, item, index):
        """Service triggered each time an item is iterated in folderitems.

        The use of this service prevents the extra-loops in child objects.

        :obj: the instance of the class to be foldered
        :item: dict containing the properties of the object to be used by
            the template
        :index: current index of the item
        """
        nitem = BaseView.folderitem(self, obj, item, index)
        service = api.get_object(obj)

        nitem["minpanic"] = ""
        nitem["maxpanic"] = ""
        nitem["calculation"] = ""
        keyword = service.getKeyword()
        if keyword in self.specsresults:
            specsresults = self.specsresults[keyword]
            nitem["minpanic"] = specsresults.get("minpanic", "")
            nitem["maxpanic"] = specsresults.get("maxpanic", "")
            nitem["calculation"] = specsresults.get("calculation", "")
            for grade in GRADES_KEYS:
                nitem[grade] = specsresults.get(grade, "")

        nitem["choices"]["calculation"] = self.get_calculations_choices()
        nitem["allow_edit"].extend(["minpanic", "maxpanic", "calculation"])
        nitem["allow_edit"].extend(list(GRADES_KEYS))

        return nitem
def fix_analysis_requests_assay_date(portal):
    logger.info("Updating Assay Date for old Analysis Requests ...")
    query = dict(
        portal_type="AnalysisRequest",
        review_state=["published", "to_be_verified", "verified", "invalid"])
    brains = api.search(query, CATALOG_ANALYSIS_REQUEST_LISTING)
    total = len(brains)
    for num, brain in enumerate(brains):
        if num % 100 == 0:
            logger.info(
                "Updating Assay Date for old Analysis Requests: {}/{}".format(
                    num, total))
        if num % TRANSACTION_THERESHOLD == 0:
            commit_transaction(portal)

        request = api.get_object(brain)
        if not api.get_field_value(request, "AssayDate", None):
            review_states = ["to_be_verified", "published", "verified"]
            analyses = request.getAnalyses(review_state=review_states)
            captures = map(lambda an: an.getResultCaptureDate, analyses)
            captures = sorted(captures)
            if captures:
                api.set_field_value(request, "AssayDate", captures[-1])
                request.reindexObject()
    commit_transaction(portal)
    logger.info("Updating Assay Date for old Analysis Requests [DONE]")
def update_role_mappings(portal):
    logger.info("Updating role mappings ...")
    processed = dict()
    for rm_query in ROLE_MAPPINGS:
        wf_tool = api.get_tool("portal_workflow")
        wf_id = rm_query[0]
        workflow = wf_tool.getWorkflowById(wf_id)

        query = rm_query[1].copy()
        exclude_states = []
        if 'not_review_state' in query:
            exclude_states = query.get('not_review_state', [])
            del query['not_review_state']

        brains = api.search(query, rm_query[2])
        total = len(brains)
        for num, brain in enumerate(brains):
            if num % 100 == 0:
                logger.info("Updating role mappings '{0}': {1}/{2}".format(
                    wf_id, num, total))
            if api.get_uid(brain) in processed.get(wf_id, []):
                # Already processed, skip
                continue

            if api.get_workflow_status_of(brain) in exclude_states:
                # We explicitely want to exclude objs in these states
                continue

            workflow.updateRoleMappingsFor(api.get_object(brain))
            if wf_id not in processed:
                processed[wf_id] = []
            processed[wf_id].append(api.get_uid(brain))
    logger.info("Updating role mappings [DONE]")
Beispiel #9
0
def _folder_item_specifications(self, analysis_brain, item):
    """Set the results range to the item passed in"""
    # Everyone can see valid-ranges
    item['Specification'] = ''
    analysis = api.get_object(analysis_brain)
    results_range = analysis.getResultsRange()
    if not results_range:
        return

    # Display the specification interval
    item["Specification"] = get_formatted_interval(results_range, "")

    # Show an icon if out of range
    out_range, out_shoulders = is_out_of_range(analysis_brain)
    if not out_range:
        return
    # At least is out of range
    img = get_image("exclamation.png", title=_("Result out of range"))
    if not out_shoulders:
        img = get_image("warning.png", title=_("Result in shoulder range"))
    self._append_html_element(item, "Result", img)

    # Grades
    grade = api.get_grade_number(analysis_brain)
    if grade:
        span = "&nbsp;<span class='small grade_{}'>G{}</span>".format(
            grade, grade)
        self._append_html_element(item, "Result", span)
Beispiel #10
0
def disable_autopartitioning(portal):
    logger.info("Disabling auto-partitioning for Templates ...")
    query = dict(portal_type="ARTemplate")
    for template in api.search(query, "portal_catalog"):
        template = api.get_object(template)
        template.setAutoPartition(False)
        template.reindexObject()
    logger.info("Disabling auto-partitioning for Templates [DONE]")
Beispiel #11
0
 def get_sortable_title(self, analysis):
     analysis = api.get_object(analysis)
     sort_key = analysis.getSortKey()
     if sort_key is None:
         sort_key = 999999
     title = sortable_title(analysis)
     if callable(title):
         title = title()
     return "{:010.3f}{}".format(sort_key, title)
 def get_recipient(self, contact):
     if not contact:
         return None
     contact_obj = api.get_object(contact)
     email = contact_obj.getEmailAddress()
     if not email:
         return None
     return {'uid': api.get_uid(contact_obj),
             'name': contact_obj.Title(),
             'email': email}
Beispiel #13
0
def apply_specifications_to_all_sampletypes(portal):
    logger.info("Applying specs to all sample types ...")

    def set_xlsx_specs(senaite_spec):
        logger.info("Applying specs to {}".format(senaite_spec.Title()))
        query = dict(portal_type="Calculation", title="Ranges calculation")
        calc = api.search(query, "bika_setup_catalog")
        if len(calc) == 0 or len(calc) > 1:
            logger.info("No calculation found [SKIP]")
            return
        calc_uid = api.get_uid(calc[0])
        keywords = list()
        raw_specifications = get_xls_specifications()
        for spec in raw_specifications:
            keyword = spec.get("keyword")
            if keyword not in keywords:
                query = dict(portal_type="AnalysisService", getKeyword=keyword)
                brains = api.search(query, "bika_setup_catalog")
                if len(brains) == 0 or len(brains) > 1:
                    logger.info(
                        "No service found for {} [SKIP]".format(keyword))
                    continue
                keywords.append(keyword)

            specs_dict = {
                'keyword': keyword,
                'min_operator': 'geq',
                'min': '0',
                'max_operator': 'lt',
                'max': '0',
                'minpanic': '',
                'maxpanic': '',
                'warn_min': '',
                'warn_max': '',
                'hidemin': '',
                'hidemax': '',
                'rangecomments': '',
                'calculation': calc_uid,
            }
            grades_dict = {grade: "" for grade in GRADES_KEYS}
            specs_dict.update(grades_dict)
            ranges = api.get_field_value(senaite_spec, 'ResultsRange', [{}])
            ranges = filter(lambda val: val.get('keyword') != keyword, ranges)
            ranges.append(specs_dict)
            senaite_spec.setResultsRange(ranges)

    # Existing AnalysisSpec?
    query = dict(portal_type='AnalysisSpec')
    senaite_specs = api.search(query, 'bika_setup_catalog')
    for senaite_spec in senaite_specs:
        senaite_spec = api.get_object(senaite_spec)
        if not senaite_spec.Title().endswith("calculated"):
            continue
        set_xlsx_specs(senaite_spec)
    logger.info("Applying specs to all sample types [DONE]")
Beispiel #14
0
 def reindex(query, catalog_name, job_num):
     brains = api.search(query, catalog_name)
     total = len(brains)
     for num, brain in enumerate(brains):
         if num % 100 == 0:
             logger.info("Reindexing objects (job {}): {}/{}".format(
                 job_num, num, total))
         if num % TRANSACTION_THERESHOLD == 0:
             commit_transaction(portal)
         obj = api.get_object(brain)
         obj.reindexObject()
     commit_transaction(portal)
Beispiel #15
0
def fix_analysis_requests_without_specifications(portal):
    """Walks through all Analysis Requests not yet published and assigns the
    suitable specification
    """
    logger.info("Updating Specifications for Analysis Requests")
    query = dict(portal_type="AnalysisRequest")
    brains = api.search(query, CATALOG_ANALYSIS_REQUEST_LISTING)
    for brain in brains:
        if brain.review_state in ['published', 'rejected', 'invalid']:
            continue
        ar = api.get_object(brain)
        if ar.getSpecification():
            continue

        sample_type = ar.getSampleType().Title()
        specs_title = "{} - calculated".format(sample_type)
        query = dict(portal_type="AnalysisSpec", title=specs_title)
        specs = api.search(query, 'bika_setup_catalog')
        if specs:
            ar.setSpecification(api.get_object(specs[0]))
    logger.info("Updating Specifications for Analysis Requests [DONE]")
Beispiel #16
0
def import_specifications(portal):
    """Creates (or updates) dynamic specifications from
    resources/results_ranges.xlsx
    """
    logger.info("Importing specifications ...")

    query = dict(portal_type='SampleType')
    brains = api.search(query, 'bika_setup_catalog')
    sample_types = map(lambda brain: api.get_object(brain), brains)
    for sample_type in sample_types:
        import_specifications_for_sample_type(portal, sample_type)

    apply_specifications_to_all_sampletypes(portal)

    logger.info("Importing specifications [DONE]")
Beispiel #17
0
    def folder_referral_lab(self, obj, item, index):
        """Adds the column Referral Lab to the item
        """
        is_editable = self.listing.is_analysis_edition_allowed(obj)
        obj = api.get_object(obj)
        ref_lab = api.get_field_value(obj, "ReferralLab", None)
        if not is_editable:
            ref_lab_title = ref_lab and api.get_title(ref_lab) or ""
            item["ReferralLab"] = ref_lab_title
            return item

        # Referral Laboratory is editable
        item["ReferralLab"] = ref_lab and api.get_uid(ref_lab) or ""
        item["choices"]["ReferralLab"] = self.get_referral_labs()
        item['allow_edit'].append('ReferralLab')
        return item
Beispiel #18
0
def generate_requisition_pdf(ar_or_sample):
    if not ar_or_sample:
        logger.warn("No Analysis Request or Sample provided")
        return
    if ISample.providedBy(ar_or_sample):
        for ar in ar_or_sample.getAnalysisRequests():
            generate_requisition_pdf(ar)
        return
    elif not IAnalysisRequest.providedBy(ar_or_sample):
        logger.warn("Type not supported: {}".format(repr(ar_or_sample)))
        return

    html = RequisitionFormPdf(ar_or_sample, ar_or_sample.REQUEST).template()
    html = safe_unicode(html).encode('utf-8')
    filename = '%s-requisition' % ar_or_sample.id
    pdf_fn = tempfile.mktemp(suffix=".pdf")
    pdf = createPdf(htmlreport=html, outfile=pdf_fn)
    if not pdf:
        logger.warn(
            "Unable to generate the PDF of requisition form for {}".format(
                ar_or_sample.id))
        return

    # Attach the pdf to the Analysis Request
    attid = ar_or_sample.aq_parent.generateUniqueId('Attachment')
    att = _createObjectByType("Attachment", ar_or_sample.aq_parent, attid)
    att.setAttachmentFile(open(pdf_fn))
    att.setReportOption('i')  # Ignore in report

    # Try to assign the Requisition Attachment Type
    query = dict(portal_type='AttachmentType', title='Requisition')
    brains = api.search(query, 'bika_setup_catalog')
    if brains:
        att_type = api.get_object(brains[0])
        att.setAttachmentType(att_type)

    # Awkward workaround to rename the file
    attf = att.getAttachmentFile()
    attf.filename = '%s.pdf' % filename
    att.setAttachmentFile(attf)
    att.unmarkCreationFlag()
    renameAfterCreation(att)
    atts = ar_or_sample.getAttachment() + [att] if \
        ar_or_sample.getAttachment() else [att]
    atts = [a.UID() for a in atts]
    ar_or_sample.setAttachment(atts)
    os.remove(pdf_fn)
Beispiel #19
0
    def create_printer(folder, name, values):
        query = dict(portal_type="BarcodePrinter", Title=name)
        brains = api.search(query, "bika_setup_catalog")
        if brains:
            printer = api.get_object(brains[0])
            printer.FileName = values["FileName"]
            printer.PrinterPath = values["PrinterPath"]
            printer.Template = values["Template"]
            return printer

        # Create a new Barcode Printer
        obj = _createObjectByType("BarcodePrinter", folder, tmpID())
        obj.edit(title=name,
                 FileName=values["FileName"],
                 PrinterPath=values["PrinterPath"],
                 Template=values["Template"])
        obj.unmarkCreationFlag()
        renameAfterCreation(obj)
Beispiel #20
0
def update_priorities(portal):
    """Reset the priorities of created ARs to those defined for BHP
    1: Urgent, 3: Routine, 5: STAT
    """
    logger.info("Restoring Priorities ...")
    query = dict(portal_type='AnalysisRequest')
    brains = api.search(query, CATALOG_ANALYSIS_REQUEST_LISTING)
    for brain in brains:
        obj = api.get_object(brain)
        if obj.getPriority() == '2':
            # High --> Urgent (1)
            obj.setPriority(1)
            obj.reindexObject()
        elif obj.getPriority() == '4':
            # Low --> STAT
            obj.setPriority(5)
            obj.reindexObject()
    logger.info("Restoring Priorities [DONE]")
Beispiel #21
0
    def get_bs_object(xlsx_row, xlsx_keyword, portal_type, criteria):
        text_value = xlsx_row.get(xlsx_keyword, None)
        if not text_value:
            logger.warn("Value not set for keyword {}".format(xlsx_keyword))
            return None

        query = {"portal_type": portal_type, criteria: text_value}
        brain = api.search(query, 'bika_setup_catalog')
        if not brain:
            logger.warn("No objects found for type {} and {} '{}'".format(
                portal_type, criteria, text_value))
            return None
        if len(brain) > 1:
            logger.warn(
                "More than one object found for type {} and {} '{}'".format(
                    portal_type, criteria, text_value))
            return None

        return api.get_object(brain[0])
Beispiel #22
0
def fix_i233(portal):
    """Set the date the Sample was received at the lab, not at point of testing
    https://github.com/bhp-lims/bhp.lims/issues/233
    """
    logger.info("Reseting Date Received (#233) ...")
    brains = api.search({}, CATALOG_ANALYSIS_REQUEST_LISTING)
    total = len(brains)
    for num, brain in enumerate(brains):
        if num % 100 == 0:
            logger.info("Reseting Date Received: {}/{}".format(num, total))
        if num % TRANSACTION_THERESHOLD == 0:
            commit_transaction(portal)

        sample = api.get_object(brain)
        date_received = getTransitionDate(sample, "deliver", True)
        if date_received and date_received != sample.getDateReceived():
            sample.setDateReceived(date_received)
            sample.reindexObject(idxs=["getDateReceived", "is_received"])

    logger.info("Reseting Date Received (#233) [DONE]")
Beispiel #23
0
def setup_attachment_types(portal):
    """Creates two attachment types. One for requisition and another one for
    the checklist delivery report
    """
    logger.info("Creating custom Attachment Types ...")
    new_attachment_types = list(NEW_ATTACHMENT_TYPES)
    folder = portal.bika_setup.bika_attachmenttypes
    for attachment in folder.values():
        if attachment.Title() in new_attachment_types:
            new_attachment_types.remove(attachment.Title())

    for new_attachment in new_attachment_types:
        obj = _createObjectByType("AttachmentType", folder, tmpID())
        obj.edit(
            title=new_attachment,
            description="Attachment type for {} files".format(new_attachment))
        obj.unmarkCreationFlag()
        renameAfterCreation(obj)

    logger.info("Assign Attachment Types to requisition and rejection")
    new_attachment_types = dict.fromkeys(NEW_ATTACHMENT_TYPES)
    for attachment in folder.values():
        for att_type in new_attachment_types.keys():
            if attachment.Title() == att_type:
                new_attachment_types[att_type] = attachment
                break

    query = dict(portal_type='AnalysisRequest')
    brains = api.search(query, CATALOG_ANALYSIS_REQUEST_LISTING)
    for brain in brains:
        obj = api.get_object(brain)
        attachments = obj.getAttachment()
        for attachment in attachments:
            if attachment.getAttachmentType():
                continue
            for key, val in new_attachment_types.items():
                if key.lower() in attachment.getAttachmentFile().filename:
                    attachment.setAttachmentType(val)
                    attachment.setReportOption('i')  # Ignore in report
                    break
    logger.info("Creating custom Attachment Types [DONE]")
Beispiel #24
0
def import_specifications_for_sample_type(portal, sample_type):
    logger.info("Importing specs for {}".format(sample_type.Title()))

    def get_bs_object(xlsx_row, xlsx_keyword, portal_type, criteria):
        text_value = xlsx_row.get(xlsx_keyword, None)
        if not text_value:
            logger.warn("Value not set for keyword {}".format(xlsx_keyword))
            return None

        query = {"portal_type": portal_type, criteria: text_value}
        brain = api.search(query, 'bika_setup_catalog')
        if not brain:
            logger.warn("No objects found for type {} and {} '{}'".format(
                portal_type, criteria, text_value))
            return None
        if len(brain) > 1:
            logger.warn(
                "More than one object found for type {} and {} '{}'".format(
                    portal_type, criteria, text_value))
            return None

        return api.get_object(brain[0])

    raw_specifications = get_xls_specifications()
    for spec in raw_specifications:

        # Valid Analysis Service?
        service = get_bs_object(spec, "keyword", "AnalysisService",
                                "getKeyword")
        if not service:
            continue

        # The calculation exists?
        calc_title = "Ranges calculation"
        query = dict(calculation=calc_title)
        calc = get_bs_object(query, "calculation", "Calculation", "title")
        if not calc:
            # Create a new one
            folder = portal.bika_setup.bika_calculations
            _id = folder.invokeFactory("Calculation", id=tmpID())
            calc = folder[_id]
            calc.edit(title=calc_title,
                      PythonImports=[{
                          "module": "bhp.lims.specscalculations",
                          "function": "get_specification_for"
                      }],
                      Formula="get_specification_for($spec)")
            calc.unmarkCreationFlag()
            renameAfterCreation(calc)

        # Existing AnalysisSpec?
        specs_title = "{} - calculated".format(sample_type.Title())
        query = dict(portal_type='AnalysisSpec', title=specs_title)
        aspec = api.search(query, 'bika_setup_catalog')
        if not aspec:
            # Create the new AnalysisSpecs object!
            folder = portal.bika_setup.bika_analysisspecs
            _id = folder.invokeFactory('AnalysisSpec', id=tmpID())
            aspec = folder[_id]
            aspec.edit(title=specs_title)
            aspec.unmarkCreationFlag()
            renameAfterCreation(aspec)
        elif len(aspec) > 1:
            logger.warn(
                "More than one Analysis Specification found for {}".format(
                    specs_title))
            continue
        else:
            aspec = api.get_object(aspec[0])
        aspec.setSampleType(sample_type)

        # Set the analysis keyword and bind it to the calculation to use
        keyword = service.getKeyword()
        specs_dict = {
            'keyword': keyword,
            'min_operator': 'geq',
            'min': '0',
            'max_operator': 'lt',
            'max': '0',
            'minpanic': '',
            'maxpanic': '',
            'warn_min': '',
            'warn_max': '',
            'hidemin': '',
            'hidemax': '',
            'rangecomments': '',
            'calculation': api.get_uid(calc),
        }
        grades_dict = {grade: "" for grade in GRADES_KEYS}
        specs_dict.update(grades_dict)
        ranges = api.get_field_value(aspec, 'ResultsRange', [{}])
        ranges = filter(lambda val: val.get('keyword') != keyword, ranges)
        ranges.append(specs_dict)
        aspec.setResultsRange(ranges)
Beispiel #25
0
def setup_catalogs(portal):
    """Setup Plone catalogs
    """
    logger.info("Setup Catalogs ...")

    # Setup catalogs by type
    for type_name, catalogs in CATALOGS_BY_TYPE:
        at = api.get_tool("archetype_tool")
        # get the current registered catalogs
        current_catalogs = at.getCatalogsByType(type_name)
        # get the desired catalogs this type should be in
        desired_catalogs = map(api.get_tool, catalogs)
        # check if the catalogs changed for this portal_type
        if set(desired_catalogs).difference(current_catalogs):
            # fetch the brains to reindex
            brains = api.search({"portal_type": type_name})
            # updated the catalogs
            at.setCatalogsByType(type_name, catalogs)
            logger.info("Assign '%s' type to Catalogs %s" %
                        (type_name, catalogs))
            for brain in brains:
                obj = api.get_object(brain)
                logger.info("Reindexing '%s'" % repr(obj))
                obj.reindexObject()

    # Setup catalog indexes
    to_index = []
    for catalog, name, meta_type in INDEXES:
        c = api.get_tool(catalog)
        indexes = c.indexes()
        if name in indexes:
            logger.info("Index '%s' already in Catalog [SKIP]" % name)
            continue

        logger.info("Adding Index '%s' for field '%s' to catalog '%s" %
                    (meta_type, name, catalog))
        if meta_type == "ZCTextIndex":
            addZCTextIndex(c, name)
        else:
            c.addIndex(name, meta_type)
        to_index.append((c, name))
        logger.info("Added Index '%s' for field '%s' to catalog [DONE]" %
                    (meta_type, name))

    for catalog, name in to_index:
        logger.info("Indexing new index '%s' ..." % name)
        catalog.manage_reindexIndex(name)
        logger.info("Indexing new index '%s' [DONE]" % name)

    # Setup catalog metadata columns
    for catalog, name in COLUMNS:
        c = api.get_tool(catalog)
        if name not in c.schema():
            logger.info("Adding Column '%s' to catalog '%s' ..." %
                        (name, catalog))
            c.addColumn(name)
            logger.info("Added Column '%s' to catalog '%s' [DONE]" %
                        (name, catalog))
        else:
            logger.info("Column '%s' already in catalog '%s'  [SKIP]" %
                        (name, catalog))
            continue
    logger.info("Setup Catalogs [DONE]")