Beispiel #1
0
def upgrade(tool):
    # Hack prevent out-of-date upgrading
    # Related: PR #1484
    # https://github.com/bikalabs/Bika-LIMS/pull/1484
    from bika.lims.upgrade import skip_pre315
    if skip_pre315(aq_parent(aq_inner(tool))):
        return True

    portal = aq_parent(aq_inner(tool))
    setup = portal.portal_setup
    typestool = getToolByName(portal, 'portal_types')

    # Add the object to bika_setup
    try:
        portal.bika_setup.manage_delObjects('bika_arpriorities')
    except BadRequest:
        logger.info("Folder doesn't exist")

    try:
        typestool.constructContent(type_name="ARPriorities",
                               container=portal.bika_setup,
                               id='bika_arpriorities',
                               title='AR Priorities')
        obj = portal.bika_setup.bika_arpriorities
        obj.unmarkCreationFlag()
        obj.reindexObject()
    except BadRequest:
        # folder already exists
        pass

    return True
Beispiel #2
0
def upgrade(tool):
    """Upgrade step required for Bika LIMS 3.2.0
    """
    portal = aq_parent(aq_inner(tool))

    qi = portal.portal_quickinstaller
    ufrom = qi.upgradeInfo('bika.lims')['installedVersion']
    logger.info("Upgrading Bika LIMS: %s -> %s" % (ufrom, '3.1.11'))

    """Updated profile steps
    list of the generic setup import step names: portal.portal_setup.getSortedImportSteps() <---
    if you want more metadata use this: portal.portal_setup.getImportStepMetadata('jsregistry') <---
    important info about upgrade steps in
    http://stackoverflow.com/questions/7821498/is-there-a-good-reference-list-for-the-names-of-the-genericsetup-import-steps
    """
    setup = portal.portal_setup
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'cssregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'factorytool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'catalog')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'propertiestool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'skins')
    # Creating all the sampling coordinator roles, permissions and indexes
    create_samplingcoordinator(portal)
    """Update workflow permissions
    """
    wf = getToolByName(portal, 'portal_workflow')
    wf.updateRoleMappings()
    return True
Beispiel #3
0
def upgrade(tool):

    portal = aq_parent(aq_inner(tool))
    setup = portal.portal_setup
    wf = getToolByName(portal, 'portal_workflow')

    mp = portal.manage_permission
    mp(AddPatient, ['Manager', 'LabManager', 'LabClerk'], 1)
    mp(EditPatient, ['Manager', 'LabManager', 'LabClerk'], 1)
    mp(ViewPatients, ['Manager', 'LabManager', 'Owner', 'LabClerk', 'Doctor', 'RegulatoryInspector'], 1)
    portal.bika_setup.laboratory.reindexObject()

    mp = portal.patients.manage_permission
    mp(CancelAndReinstate, ['Manager', 'LabManager', 'LabClerk'], 0)
    mp(EditPatient, ['Manage', 'LabManager', 'LabClerk'], 0)
    mp(View, ['Manager', 'LabManager', 'LabClerk', 'RegulatoryInspector', 'Doctor'], 0)
    mp(AccessContentsInformation, ['Manager', 'LabManager', 'LabClerk', 'RegulatoryInspector', 'Doctor'], 0)
    mp(ListFolderContents, ['Manager', 'LabManager', 'LabClerk', 'RegulatoryInspector', 'Doctor'], 0)
    mp(ModifyPortalContent, ['Manager', 'LabManager', 'LabClerk', 'RegulatoryInspector', 'Doctor'], 0)
    portal.patients.reindexObject()
    
    setup.runImportStepFromProfile('profile-bika.health:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.health:default', 'workflow')
    setup.runImportStepFromProfile('profile-bika.health:default', 'workflow-csv')
    setup.runImportStepFromProfile('profile-bika.health:default', 'controlpanel')
    setup.runImportStepFromProfile('profile-bika.health:default', 'jsregistry')
    logger.info("Updating workflow role/permission mappings")
    wf.updateRoleMappings()
Beispiel #4
0
    def _unlinkUser(self):
        """Remove the UID of the current Contact in the User properties and
        update all relevant own properties.
        """
        KEY = "linked_contact_uid"

        # Nothing to do if no user is linked
        if not self.hasUser():
            return False

        user = self.getUser()
        username = user.getId()

        # Unset the UID from the User Property
        user.setMemberProperties({KEY: ""})
        logger.info("Unlinked Contact UID from User {}".format(user.getProperty(KEY, "")))

        # Unset the Username
        self.setUsername(None)

        # Unset the Email
        self.setEmailAddress(None)

        # Revoke local Owner role
        self._delLocalOwnerRole(username)

        # Remove user from "Clients" group
        self._delUserFromGroup(username, group="Clients")

        # somehow the `getUsername` index gets out of sync
        self.reindexObject()

        return True
Beispiel #5
0
def upgrade(tool):
    """Upgrade step required for Bika LIMS 3.1.10
    """
    portal = aq_parent(aq_inner(tool))
    setup = portal.portal_setup
    # Updated profile steps
    # list of the generic setup import step names: portal.portal_setup.getSortedImportSteps() <---
    # if you want more metadata use this: portal.portal_setup.getImportStepMetadata('jsregistry') <---
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'cssregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'factorytool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'catalog')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'propertiestool')
    # important info about upgrade steps in
    # http://stackoverflow.com/questions/7821498/is-there-a-good-reference-list-for-the-names-of-the-genericsetup-import-steps
    setup.runImportStepFromProfile('profile-bika.lims:default', 'skins')
    # Update workflow permissions
    wf = getToolByName(portal, 'portal_workflow')
    wf.updateRoleMappings()

    qi = portal.portal_quickinstaller
    ufrom = qi.upgradeInfo('bika.lims')['installedVersion']
    logger.info("Upgrading Bika LIMS: %s -> %s" % (ufrom, '319'))

    # Migrations
    WINE119SupplyOrderPermissions(portal)

    return True
Beispiel #6
0
 def delColumn(self, catalog, column):
     cat = self._getCatalog(catalog)
     if column not in cat.schema():
         return
     cat.delColumn(column)
     logger.info('Deleted column {0} from catalog {1} deleted.'.format(
         column, cat.id))
Beispiel #7
0
 def cleanAndRebuildCatalog(self, catid):
     catalog = getToolByName(self.portal, catid)
     # manage_catalogRebuild does the same as clearFindAndRebuild
     # but it alse loggs cpu and time.
     catalog.manage_catalogRebuild()
     logger.info('Catalog {0} cleaned and rebuilt'.format(catid))
     transaction.commit()
Beispiel #8
0
def upgrade(tool):
    """ Sort by Type in instruments
    """
    # Hack prevent out-of-date upgrading
    # Related: PR #1484
    # https://github.com/bikalabs/Bika-LIMS/pull/1484
    from bika.lims.upgrade import skip_pre315
    if skip_pre315(aq_parent(aq_inner(tool))):
        return True


    portal = aq_parent(aq_inner(tool))
    bsc = getToolByName(portal, 'bika_setup_catalog', None)

    if 'getInstrumentType' not in bsc.indexes():
        bsc.addIndex('getInstrumentType', 'FieldIndex')
        bsc.addColumn('getInstrumentType')

        bsc.addIndex('getInstrumentTypeName','FieldIndex')
        bsc.addColumn('getInstrumentTypeName')

    #Del old "getType" Index, it's not used now.
    if 'getType' in bsc.indexes():
        bsc.delIndex('getType')
    if 'getType' in bsc.indexes():
        bsc.delColumn('getType')

    setup = portal.portal_setup

    logger.info("Reindex added indexes in bika_setup_catalog")
    bsc.manage_reindexIndex(
        ids=['getInstrumentType', 'getInstrumentTypeName', ])

    return True
Beispiel #9
0
def LIMS1558(portal):
    """Setting Sampling rounds stuff
    """
    # Setting departments and ARtemplates to portal_catalog
    at = getToolByName(portal, 'archetype_tool')
    at.setCatalogsByType('Department', ['bika_setup_catalog', "portal_catalog", ])
    at.setCatalogsByType('ARTemplate', ['bika_setup_catalog', 'portal_catalog'])
    for obj in portal.bika_setup.bika_departments.objectValues():
        obj.unmarkCreationFlag()
        obj.reindexObject()
    for obj in portal.bika_setup.bika_artemplates.objectValues():
        obj.unmarkCreationFlag()
        obj.reindexObject()
    # If Sampling rounds folder is not created yet, we should create it
    typestool = getToolByName(portal, 'portal_types')
    qi = portal.portal_quickinstaller
    if not portal['bika_setup'].get('bika_samplingrounds'):
        typestool.constructContent(type_name="SamplingRounds",
                                   container=portal['bika_setup'],
                                   id='bika_samplingrounds',
                                   title='Sampling Rounds')
    obj = portal['bika_setup']['bika_samplingrounds']
    obj.unmarkCreationFlag()
    obj.reindexObject()
    if not portal['bika_setup'].get('bika_samplingrounds'):
        logger.info("SamplingRounds not created")
    # Install Products.DataGridField
    qi.installProducts(['Products.DataGridField'])
    # add new types not to list in nav
    # SamplingRound
    portal_properties = getToolByName(portal, 'portal_properties')
    ntp = getattr(portal_properties, 'navtree_properties')
    types = list(ntp.getProperty('metaTypesNotToList'))
    types.append("SamplingRound")
    ntp.manage_changeProperties(MetaTypesNotToQuery=types)
Beispiel #10
0
    def process_form(self):

        # We want to first know the total number of analyses to be created
        num_analyses = 0
        uids_arr = [ar.get('Analyses', []) for ar in self.valid_states.values()]
        for arr in uids_arr:
            num_analyses += len(arr)

        if num_analyses < 50:
            # Do not process asynchronously
            return AnalysisRequestSubmit.process_form(self)

        # Only load asynchronously if queue ar-create is available
        task_queue = queryUtility(ITaskQueue, name='ar-create')
        if task_queue is None:
            # ar-create queue not registered. Proceed synchronously
            logger.info("SYNC: total = %s" % num_analyses)
            return AnalysisRequestSubmit.process_form(self)
        else:
            # ar-create queue registered, create asynchronously
            logger.info("[A]SYNC: total = %s" % num_analyses)
            path = self.request.PATH_INFO
            path = path.replace('_submit_async', '_submit')
            task_queue.add(path, method='POST')
            msg = _('One job added to the Analysis Request creation queue')
            self.context.plone_utils.addPortalMessage(msg, 'info')
            return json.dumps({'success': 'With taskqueue'})
Beispiel #11
0
def ObjectRemovedEventHandler(analysis, event):

    # This handler fires for DuplicateAnalysis because
    # DuplicateAnalysis also provides IAnalysis.
    # DuplicateAnalysis doesn't have analysis_workflow.
    if analysis.portal_type == "DuplicateAnalysis":
        return

    logger.info("ObjectRemoved: %s" % analysis)

    # May need to promote the AR's review_state
    #  if all other analyses are at a higher state than this one was.
    wf = getToolByName(analysis, 'portal_workflow')
    ar = analysis.aq_parent
    ar_UID = ar.UID()
    can_submit = True
    can_attach = True
    can_verify = True
    can_publish = True

    for a in ar.getAnalyses():
        a_state = a.review_state
        if a_state in \
           ('sample_due', 'sample_received',):
            can_submit = False
        if a_state in \
           ('sample_due', 'sample_received', 'attachment_due',):
            can_attach = False
        if a_state in \
           ('sample_due', 'sample_received', 'attachment_due', 'to_be_verified',):
            can_verify = False
        if a_state in \
           ('sample_due', 'sample_received', 'attachment_due', 'to_be_verified', 'verified',):
            can_publish = False

    # Note: AR adds itself to the skiplist so we have to take it off again
    #       to allow multiple promotions (maybe by more than one deleted analysis).
    if can_submit and wf.getInfoFor(ar, 'review_state') == 'sample_received':
        wf.doActionFor(ar, 'submit')
        analysis.REQUEST["workflow_skiplist"].remove(ar_UID)
    if can_attach and wf.getInfoFor(ar, 'review_state') == 'attachment_due':
        wf.doActionFor(ar, 'attach')
        analysis.REQUEST["workflow_attach_skiplist"].remove(ar_UID)
    if can_verify and wf.getInfoFor(ar, 'review_state') == 'to_be_verified':
        analysis.REQUEST["workflow_skiplist"].append('verify all analyses')
        wf.doActionFor(ar, 'verify')
        analysis.REQUEST["workflow_skiplist"].remove(ar_UID)
    if can_publish and wf.getInfoFor(ar, 'review_state') == 'verified':
        analysis.REQUEST["workflow_skiplist"].append('publish all analyses')
        wf.doActionFor(ar, 'publish')
        analysis.REQUEST["workflow_skiplist"].remove(ar_UID)

    ar_ws_state = wf.getInfoFor(ar, 'worksheetanalysis_review_state')
    if ar_ws_state == 'unassigned':
        if not ar.getAnalyses(worksheetanalysis_review_state = 'unassigned'):
            if ar.getAnalyses(worksheetanalysis_review_state = 'assigned'):
                wf.doActionFor(ar, 'assign')
                analysis.REQUEST["workflow_skiplist"].remove(ar_UID)

    return
Beispiel #12
0
def analyses_creation_date_recover():
    """
    This function walks over all Analysis Request objects, obtains their
    associate analyses and checks if their creation date is older than the
    Analysis Request one. If this condition is met, the system sets the
    analyses creation date with the Analysis Request one.
    :return: Boolean. True if the process succeed, and False otherwise.
    """

    ar_catalog = get_tool(CATALOG_ANALYSIS_REQUEST_LISTING)
    ans_catalog = get_tool(CATALOG_ANALYSIS_LISTING)
    # Getting all analysis requests to walk through
    ar_brains = ar_catalog()
    total_ars = len(ar_brains)
    total_iterated = 0
    logger.info("Analysis Requests to walk over: {}".format(total_ars))
    total_modified = 0
    for ar_brain in ar_brains:
        ans_brains = ans_catalog({"getRequestUID": ar_brain.UID})
        analyses_modified = set_correct_created_date(
            ar_brain.created, ans_brains)
        total_modified += analyses_modified
        total_iterated = commit_action(
            total_ars, total_iterated, total_modified)
    transaction.commit()
    logger.info("Analyses creation date sanitized.")
    return True
Beispiel #13
0
def get_search_results(portal_type=None, uid=None, **kw):
    """Search the catalog and return the results

    :returns: Catalog search results
    :rtype: iterable
    """

    # If we have an UID, return the object immediately
    if uid is not None:
        logger.info("UID '%s' found, returning the object immediately" % uid)
        return u.to_list(get_object_by_uid(uid))

    # allow to search search for the Plone Site with portal_type
    include_portal = False
    if u.to_string(portal_type) == "Plone Site":
        include_portal = True

    # The request may contain a list of portal_types, e.g.
    # `?portal_type=Document&portal_type=Plone Site`
    if "Plone Site" in u.to_list(req.get("portal_type")):
        include_portal = True

    # Build and execute a catalog query
    results = search(portal_type=portal_type, uid=uid, **kw)

    if include_portal:
        results = list(results) + u.to_list(get_portal())

    return results
Beispiel #14
0
 def delIndex(self, catalog, index):
     cat = self._getCatalog(catalog)
     if index not in cat.indexes():
         return
     cat.delIndex(index)
     logger.info('Deleted index {0} from catalog {1}'.format(
         index, cat.id))
Beispiel #15
0
def AfterTransitionEventHandler(sample, event):

    # creation doesn't have a 'transition'
    if not event.transition:
        return

    if not sample.REQUEST.has_key('workflow_skiplist'):
        sample.REQUEST['workflow_skiplist'] = [sample.UID(), ]
    else:
        if sample.UID() in sample.REQUEST['workflow_skiplist']:
            logger.info("SM Skip")
            return
        else:
            sample.REQUEST["workflow_skiplist"].append(sample.UID())

    logger.info("Starting: %s on %s" % (event.transition.id, sample))

    workflow = getToolByName(sample, 'portal_workflow')

    if event.transition.id == "receive":
        if sample.getDateSampled() > DateTime():
            raise WorkflowException
        sample.setDateReceived(DateTime())
        sample.reindexObject(idxs = ["review_state", "getDateReceived", ])
        # when a sample is received, all associated
        # AnalysisRequests are also transitioned
        for ar in sample.getAnalysisRequests():
            if not ar.UID() in sample.REQUEST['workflow_skiplist']:
                workflow.doActionFor(ar, "receive")

    elif event.transition.id == "expire":
        sample.setDateExpired(DateTime())
        sample.reindexObject(idxs = ["review_state", "getDateExpired", ])

    #---------------------
    # Secondary workflows:
    #---------------------

    elif event.transition.id == "reinstate":
        sample.reindexObject(idxs = ["cancellation_state", ])
        # reinstate all ARs for this sample.
        ars = sample.getAnalysisRequests()
        for ar in ars:
            if not ar.UID in sample.REQUEST['workflow_skiplist']:
                ar_state = workflow.getInfoFor(ar, 'cancellation_state')
                if ar_state == 'cancelled':
                    workflow.doActionFor(ar, 'reinstate')

    elif event.transition.id == "cancel":
        sample.reindexObject(idxs = ["cancellation_state", ])
        # cancel all ARs for this sample.
        ars = sample.getAnalysisRequests()
        for ar in ars:
            if not ar.UID in sample.REQUEST['workflow_skiplist']:
                ar_state = workflow.getInfoFor(ar, 'cancellation_state')
                if ar_state == 'active':
                    workflow.doActionFor(ar, 'cancel')

    return
Beispiel #16
0
def doActionFor(instance, action_id):
    workflow = getToolByName(instance, "portal_workflow")
    if not skip(instance, action_id, peek=True):
        try:
            workflow.doActionFor(instance, action_id)
        except WorkflowException, e:
            logger.info("Could not do action %s for %s: %s" % (action_id, instance.Title(), str(e)))
            pass
Beispiel #17
0
def upgrade_attachments_to_blobs(portal):
    """get/set the attachment file fields to migrate existing fields to blob
    """
    logger.info("Upgrading Attachments to Blobs")

    pc = api.get_tool("portal_catalog")
    attachments = map(api.get_object, pc({"portal_type": "Attachment"}))
    for attachment in attachments:
        attachment.setAttachmentFile(attachment.getAttachmentFile())
Beispiel #18
0
def get_generated_number(context, config, variables, **kw):
    """Generate a new persistent number with the number generator for the
    sequence type "Generated"
    """

    # separator where to split the ID
    separator = kw.get('separator', '-')

    # allow portal_type override
    portal_type = kw.get("portal_type") or api.get_portal_type(context)

    # The ID format for string interpolation, e.g. WS-{seq:03d}
    id_template = config.get("form", "")

    # The split length defines where the variable part of the ID template begins
    split_length = config.get("split_length", 1)

    # The prefix tempalte is the static part of the ID
    prefix_template = slice(id_template, separator=separator, end=split_length)

    # get the number generator
    number_generator = getUtility(INumberGenerator)

    # generate the key for the number generator storage
    prefix = prefix_template.format(**variables)

    # normalize out any unicode characters like Ö, É, etc. from the prefix
    prefix = api.normalize_filename(prefix)

    # The key used for the storage
    key = make_storage_key(portal_type, prefix)

    # Handle flushed storage
    if key not in number_generator:
        max_num = 0
        existing = get_ids_with_prefix(portal_type, prefix)
        numbers = map(lambda id: get_seq_number_from_id(id, id_template, prefix), existing)
        # figure out the highest number in the sequence
        if numbers:
            max_num = max(numbers)
        # set the number generator
        logger.info("*** SEEDING Prefix '{}' to {}".format(prefix, max_num))
        number_generator.set_number(key, max_num)

    if not kw.get("dry_run", False):
        # Generate a new number
        # NOTE Even when the number exceeds the given ID sequence format,
        #      it will overflow gracefully, e.g.
        #      >>> {sampleId}-R{seq:03d}'.format(sampleId="Water", seq=999999)
        #      'Water-R999999‘
        number = number_generator.generate_number(key=key)
    else:
        # => This allows us to "preview" the next generated ID in the UI
        # TODO Show the user the next generated number somewhere in the UI
        number = number_generator.get(key, 1)
    return number
Beispiel #19
0
def batch_receive(instance):
    """When a batch is recieved, all associated ARs must be received.
      (normal AR workflow cascade will transition samples, analyses, etc)
    """
    workflow = getToolByName(instance, 'portal_workflow')
    for ar in instance.getAnalysisRequests():
        try:
            workflow.doActionFor(ar, 'receive')
        except WorkflowException:
            logger.info("batch_receive: could not execute receive on %s" % ar.id)
Beispiel #20
0
def upgrade(tool):

    # Adding bika.health.analysisrequest.ar_add_health_standard.js
    portal = aq_parent(aq_inner(tool))
    setup = portal.portal_setup
    qi = portal.portal_quickinstaller
    ufrom = qi.upgradeInfo('bika.health')['installedVersion']    
    logger.info("Upgrading Bika Health: %s -> %s" % (ufrom, '318'))


    return True
Beispiel #21
0
 def create_attachment(self, ws, infile):
     attuid = self.create_mime_attachmenttype()
     attachment = None
     if attuid and infile:
         attachment = _createObjectByType("Attachment", ws, tmpID())
         logger.info("Creating %s in %s" % (attachment, ws))
         attachment.edit(
             AttachmentFile=infile,
             AttachmentType=attuid,
             AttachmentKeys='Results, Automatic import')
         attachment.reindexObject()
     return attachment
    def __call__(self, result=None, specification=None, **kwargs):
        searchTerm = _c(self.request.get('searchTerm', '')).lower()
        force_all = self.request.get('force_all', 'true')
        searchFields = 'search_fields' in self.request \
            and json.loads(_u(self.request.get('search_fields', '[]'))) \
            or ('Title',)
        # lookup objects from ZODB
        catalog_name = _c(self.request.get('catalog_name', 'portal_catalog'))
        catalog = getToolByName(self.context, catalog_name)
        base_query = json.loads(_c(self.request['base_query']))
        search_query = json.loads(_c(self.request.get('search_query', "{}")))

        # first with all queries
        contentFilter = dict((k, self.to_utf8(v)) for k, v in base_query.items())
        contentFilter.update(dict((k, self.to_utf8(v)) for k, v in search_query.items()))
        try:
            brains = catalog(contentFilter)
        except:
            from bika.lims import logger
            logger.info(contentFilter)
            raise
        if brains and searchTerm:
            _brains = []
            if len(searchFields) == 0 \
                    or (len(searchFields) == 1 and searchFields[0] == 'Title'):
                _brains = [p for p in brains
                           if p.Title.lower().find(searchTerm) > -1]
            else:
                for p in brains:
                    for fieldname in searchFields:
                        value = getattr(p, fieldname, None)
                        if not value:
                            instance = p.getObject()
                            schema = instance.Schema()
                            if fieldname in schema:
                                value = schema[fieldname].get(instance)
                        if callable(value):
                            value = value()
                        if value and value.lower().find(searchTerm) > -1:
                            _brains.append(p)
                            break

            brains = _brains
        # Then just base_query alone ("show all if no match")
        if not brains and force_all.lower() == 'true':
            if search_query:
                brains = catalog(base_query)
                if brains and searchTerm:
                    _brains = [p for p in brains
                               if p.Title.lower().find(searchTerm) > -1]
                    if _brains:
                        brains = _brains
        return brains
Beispiel #23
0
    def setup_data_load(self):
        transaction.commit()
        login(self.portal.aq_parent, SITE_OWNER_NAME)  # again

        # load test data
        self.request.form['setupexisting'] = 1
        self.request.form['existing'] = "bika.lims:test"
        lsd = LoadSetupData(self.portal, self.request)
        logger.info('Loading datas...')
        lsd()
        logger.info('Loading data finished...')
        logout()
Beispiel #24
0
def upgrade(tool):
    """Upgrade step required for Bika LIMS 3.3.0
    """
    portal = aq_parent(aq_inner(tool))

    qi = portal.portal_quickinstaller
    ufrom = qi.upgradeInfo('bika.lims')['installedVersion']
    logger.info("Upgrading Bika LIMS: %s -> %s" % (ufrom, '3.3.0'))

    upgrade_attachments_to_blobs(portal)

    return True
def AfterTransitionEventHandler(instance, event):

    # creation doesn't have a 'transition'
    if not event.transition:
        return

    debug_mode = App.config.getConfiguration().debug_mode
    if not debug_mode:
        return

    if not skip(instance, event.transition.id, peek=True):
        logger.info("Started transition %s on %s" %
                    (event.transition.id, instance))
Beispiel #26
0
 def addColumn(self, catalog, column):
     cat = self._getCatalog(catalog)
     if column in cat.schema():
         return
     cat.addColumn(column)
     logger.info('Added column {0} to catalog {1}'.format(
         column, cat.id))
     if cat.id not in self.refreshcatalog:
         logger.info("{} to refresh because col {} added".format(
             catalog, column
         ))
         self.refreshcatalog.append(cat.id)
     transaction.commit()
Beispiel #27
0
def AfterTransitionEventHandler(instance, event):
    """ This event is executed after each transition and delegates further
    actions to 'after_x_transition_event' function if exists in the instance
    passed in, where 'x' is the id of the event's transition.

    If the passed in instance has not a function with the abovementioned
    signature, or if there is no transition for the state change (like the
    'creation' state) or the same transition has already been run for the
    the passed in instance during the current server request, then the
    function does nothing.

    :param instance: the instance that has been transitioned
    :type instance: ATContentType
    :param event: event that holds the transition performed
    :type event: IObjectEvent
    """
    # there is no transition for the state change (creation doesn't have a
    # 'transition')
    if not event.transition:
        return

    # Set the request variable preventing cascade's from re-transitioning.
    if skip(instance, event.transition.id):
        return

    clazzname = instance.__class__.__name__
    currstate = getCurrentState(instance)
    msg = "Transition '{0}' finished: '{1}' '{2}' ({3})".format(
        event.transition.id,  clazzname, instance.getId(), currstate)
    logger.info(msg)

    # Because at this point, the object has been transitioned already, but
    # further actions are probably needed still, so be sure is reindexed
    # before going forward.
    instance.reindexObject()

    key = 'after_{0}_transition_event'.format(event.transition.id)
    after_event = getattr(instance, key, False)
    if not after_event:
        # TODO Workflow. this conditional is only for backwards compatibility,
        # to be removed when all workflow_script_* methods in contents are
        # replaced by the more explicity signature 'after_*_transition_event'
        key = 'workflow_script_' + event.transition.id
        after_event = getattr(instance, key, False)

    if not after_event:
        return

    msg = "AfterTransition: '{0}.{1}'".format(clazzname, key)
    logger.info(msg)
    after_event()
Beispiel #28
0
def upgrade(tool):
    portal = aq_parent(aq_inner(tool))

    bc = getToolByName(portal, 'bika_catalog')
    bac = getToolByName(portal, 'bika_analysis_catalog')
    pc = getToolByName(portal, 'portal_catalog')
    portal = aq_parent(aq_inner(tool))
    setup = portal.portal_setup
    wf = getToolByName(portal, 'portal_workflow')

    # Update all tools in which changes have been made
    setup.runImportStepFromProfile('profile-bika.lims:default', 'propertiestool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'repositorytool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'factorytool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets', run_dependencies=False)
    setup.runImportStepFromProfile('profile-bika.lims:default', 'viewlets')
    setup.runImportStepFromProfile('profile-plone.app.jquery:default', 'jsregistry')

    # Re-import the default permission maps
    gen = BikaGenerator()
    gen.setupPermissions(portal)

    logger.info("Updating workflow role/permission mappings")
    wf.updateRoleMappings()
    logger.info("Rebuilding portal_catalog")
    pc.clearFindAndRebuild()
    logger.info("Rebuilding bika_analysis_catalog")
    bac.clearFindAndRebuild()
    logger.info("Rebuilding bika_catalog")
    bc.clearFindAndRebuild()

    return True
Beispiel #29
0
 def addIndex(self, catalog, index, indextype):
     cat = self._getCatalog(catalog)
     if index in cat.indexes():
         return
     if indextype == 'ZCTextIndex':
         addZCTextIndex(cat, index)
     else:
         cat.addIndex(index, indextype)
     logger.info('Catalog index %s added.' % index)
     indexes = self.reindexcatalog.get(cat.id, [])
     indexes.append(index)
     indexes = list(set(indexes))
     self.reindexcatalog[cat.id] = indexes
     transaction.commit()
Beispiel #30
0
    def __call__(self):
        form = self.request.form
        portal = getSite()
        workbook = None

        if 'setupexisting' in form and 'existing' in form and form['existing']:
                fn = form['existing'].split(":")
                self.dataset_project = fn[0]
                self.dataset_name = fn[1]
                path = 'setupdata/%s/%s.xlsx' % \
                    (self.dataset_name, self.dataset_name)
                filename = resource_filename(self.dataset_project, path)
                workbook = load_workbook(filename=filename)  # , use_iterators=True)
        elif 'setupfile' in form and 'file' in form and form['file'] and 'projectname' in form and form['projectname']:
                self.dataset_project = form['projectname']
                tmp = tempfile.mktemp()
                file_content = form['file'].read()
                open(tmp, 'w').write(file_content)
                workbook = load_workbook(filename=tmp)  # , use_iterators=True)
                self.dataset_name = 'uploaded'

        assert(workbook is not None)

        adapters = [[name, adapter]
                    for name, adapter
                    in list(getAdapters((self.context, ), ISetupDataImporter))]
        for sheetname in workbook.get_sheet_names():
            transaction.savepoint()
            ad_name = sheetname.replace(" ", "_")
            if ad_name in [a[0] for a in adapters]:
                adapter = [a[1] for a in adapters if a[0] == ad_name][0]
                adapter(self, workbook, self.dataset_project, self.dataset_name)
                adapters = [a for a in adapters if a[0] != ad_name]
        for name, adapter in adapters:
            transaction.savepoint()
            adapter(self, workbook, self.dataset_project, self.dataset_name)

        check = len(self.deferred)
        while len(self.deferred) > 0:
            new = self.solve_deferred()
            logger.info("solved %s of %s deferred references" % (
                check - new, check))
            if new == check:
                raise Exception("%s unsolved deferred references: %s" % (
                    len(self.deferred), self.deferred))
            check = new

        logger.info("Rebuilding bika_setup_catalog")
        bsc = getToolByName(self.context, 'bika_setup_catalog')
        bsc.clearFindAndRebuild()
        logger.info("Rebuilding bika_catalog")
        bc = getToolByName(self.context, 'bika_catalog')
        bc.clearFindAndRebuild()
        logger.info("Rebuilding bika_analysis_catalog")
        bac = getToolByName(self.context, 'bika_analysis_catalog')
        bac.clearFindAndRebuild()

        message = PMF("Changes saved.")
        self.context.plone_utils.addPortalMessage(message)
        self.request.RESPONSE.redirect(portal.absolute_url())
Beispiel #31
0
def fix_javascript_registry(portal):
    """Fix JS registry
    """

    portal_javascripts = portal.portal_javascripts

    to_remove = [
        "++resource++senaite.lims.bika.static/js/bika.lims.common.js",
        "++resource++senaite.lims.bika.static/js/bika.lims.analysisrequest.js",
        "++resource++senaite.lims.jquery.js/jquery-1.12.4.min.js",
    ]

    for res_id in to_remove:
        logger.info("Removing JS resource {}".format(res_id))
        portal_javascripts.unregisterResource(res_id)
 def attach_attachment(self, analysis, attachment):
     if attachment:
         an_atts = analysis.getAttachment()
         attachments = []
         for an_att in an_atts:
             if an_att.getAttachmentFile().filename != \
                     attachment.getAttachmentFile().filename:
                 logger.info("Attaching %s to %s" %
                             (an_att.UID(), analysis))
                 attachments.append(attachment.UID())
                 analysis.setAttachment(attachments)
                 break
         else:
             self.warn("Attachment %s was not linked to analysis %s" %
                       (attachment, analysis))
Beispiel #33
0
    def translate_review_state(self, state, portal_type):
        """Translates the review state to the current set language

        :param state: Review state title
        :type state: basestring
        :returns: Translated review state title
        """
        ts = api.get_tool("translation_service")
        wf = api.get_tool("portal_workflow")
        state_title = wf.getTitleForStateOnType(state, portal_type)
        translated_state = ts.translate(
            _(state_title or state), context=self.request)
        logger.info(u"ListingView:translate_review_state: {} -> {} -> {}"
                    .format(state, state_title, translated_state))
        return translated_state
Beispiel #34
0
 def addColumn(self, catalog, column):
     cat = self._getCatalog(catalog)
     if column in cat.schema():
         return
     try:
         cat.addColumn(column)
         logger.info('Added column {0} to catalog {1}'.format(
             column, cat.id))
         if cat.id not in self.refreshcatalog:
             self.refreshcatalog.append(cat.id)
         transaction.commit()
     except:
         logger.error('Unable to add column {0} to catalog {1}'.format(
             column, cat.id))
         raise
Beispiel #35
0
def get_role_mappings_candidates(portal):
    logger.info("Getting candidates for role mappings ...")
    candidates = list()
    # Analysis workflow
    candidates.extend(get_rm_candidates_for_analysisworkfklow(portal))
    # Duplicate analysis workflow
    candidates.extend(get_rm_candidates_for_duplicateanalysisworkflow(portal))
    # Reference Analysis Workflow
    candidates.extend(get_rm_candidates_for_referenceanalysisworkflow(portal))
    # Analysis Request workflow
    candidates.extend(get_rm_candidates_for_ar_workflow(portal))
    # Worksheet workflow
    candidates.extend(get_rm_candidates_for_worksheet_workflow(portal))

    return candidates
Beispiel #36
0
def migrate_attachment_report_options(portal):
    """Migrate Attachments with the report option "a" (attach in report)
       to the option to "i" (ignore in report)
    """
    attachments = api.search({"portal_type": "Attachment"})
    total = len(attachments)
    logger.info("Migrating 'Attach to Report' -> 'Ingore in Report' "
                "for %d attachments" % total)
    for num, attachment in enumerate(attachments):
        obj = api.get_object(attachment)

        if obj.getReportOption() in ["a", ""]:
            obj.setReportOption("i")
            obj.reindexObject()
            logger.info("Migrated Attachment %s" % obj.getTextTitle())
Beispiel #37
0
 def publish(self, ar):
     """Set status to prepublished/published/republished
     """
     wf = api.get_tool("portal_workflow")
     status = wf.getInfoFor(ar, "review_state")
     transitions = {"verified": "publish",
                    "published": "republish"}
     transition = transitions.get(status, "prepublish")
     logger.info("AR Transition: {} -> {}".format(status, transition))
     try:
         wf.doActionFor(ar, transition)
         return True
     except WorkflowException as e:
         logger.debug(e)
         return False
Beispiel #38
0
def cleanup_worksheet_catalog(portal):
    """Removes stale indexes and metadata from worksheet_catalog.
    """
    cat_id = CATALOG_WORKSHEET_LISTING
    logger.info("Cleaning up indexes and metadata from {} ...".format(cat_id))
    indexes_to_remove = []
    metadata_to_remove = [
        "getLayout",
    ]
    for index in indexes_to_remove:
        del_index(portal, cat_id, index)

    for metadata in metadata_to_remove:
        del_metadata(portal, cat_id, metadata)
    commit_transaction(portal)
Beispiel #39
0
def RemoveVersionableTypes():
    # Remove versionable typesa
    logger.info("Removing versionable types...")
    portal_repository = get_tool('portal_repository')
    non_versionable = ['AnalysisSpec',
                       'ARPriority',
                       'Method',
                       'SamplePoint',
                       'SampleType',
                       'StorageLocation',
                       'WorksheetTemplate', ]
    versionable = list(portal_repository.getVersionableContentTypes())
    vers = [ver for ver in versionable if ver not in non_versionable]
    portal_repository.setVersionableContentTypes(vers)
    logger.info("Versionable types updated: {0}".format(', '.join(vers)))
Beispiel #40
0
def reindex_sortable_title(portal):
    """Reindex sortable_title from some catalogs
    """
    catalogs = [
        "bika_catalog",
        "bika_setup_catalog",
        "portal_catalog",
    ]
    for catalog_name in catalogs:
        logger.info(
            "Reindexing sortable_title for {} ...".format(catalog_name))
        handler = ZLogHandler(steps=100)
        catalog = api.get_tool(catalog_name)
        catalog.reindexIndex("sortable_title", None, pghandler=handler)
        commit_transaction(portal)
Beispiel #41
0
def fix_client_permissions(portal):
    """Fix client permissions
    """
    wfs = get_workflows()

    start = time.time()
    clients = portal.clients.objectValues()
    total = len(clients)
    for num, client in enumerate(clients):
        logger.info("Fixing permission for client {}/{} ({})".format(
            num, total, client.getName()))
        update_role_mappings(client, wfs=wfs)
    end = time.time()
    logger.info("Fixing client permissions took %.2fs" % float(end - start))
    transaction.commit()
Beispiel #42
0
 def get_services(self):
     """returns all services
     """
     catalog = api.get_tool("bika_setup_catalog")
     query = self.contentFilter.copy()
     # The contentFilter query get changed by the listing view to show only
     # services in a category. This update ensures that the sorting is kept
     # correct and that no inactive services are displayed.
     query.update({
         "inactive_state": "active",
         "sort_on": self.sort_on,
         "sort_order": self.sort_order,
     })
     logger.info("AnalysisSpecificationWidget::query=%r" % query)
     return catalog(query)
Beispiel #43
0
    def clearFindAndRebuild(self):
        # Empties catalog, then finds all contentish objects (i.e. objects
        # with an indexObject method), and reindexes them.
        # This may take a long time.

        # The Catalog ID
        cid = self.getId()

        # The catalog indexes
        idxs = list(self.indexes())

        # Types to consider for this catalog
        obj_metatypes = self.get_mapped_types()

        def indexObject(obj, path):
            __traceback_info__ = path

            # skip non-indexable types
            if not self.is_indexable(obj):
                return

            # skip types that are not mapped to this catalog
            if self.get_portal_type(obj) not in obj_metatypes:
                return

            self.counter += 1

            try:
                obj.reindexObject(idxs=idxs)
            except TypeError:
                # Catalogs have 'indexObject' as well, but they
                # take different args, and will fail
                pass

            if self.counter % 100 == 0:
                logger.info(
                    "Progress: {} objects have been cataloged for {}.".format(
                        self.counter, cid))
                transaction.savepoint(optimistic=True)

        logger.info("Cleaning and rebuilding catalog '{}'...".format(cid))
        self.counter = 0
        self.manage_catalogClear()
        portal = aq_parent(aq_inner(self))
        portal.ZopeFindAndApply(portal,
                                search_sub=True,
                                apply_func=indexObject)
        logger.info("Catalog '{}' cleaned and rebuilt".format(cid))
Beispiel #44
0
def add_partitioning_indexes(portal):
    """Adds the indexes for partitioning
    """
    logger.info("Adding partitioning indexes")

    add_index(portal,
              catalog_id=CATALOG_ANALYSIS_LISTING,
              index_name="getAncestorsUIDs",
              index_attribute="getAncestorsUIDs",
              index_metatype="KeywordIndex")

    add_index(portal,
              catalog_id=CATALOG_ANALYSIS_REQUEST_LISTING,
              index_name="isRootAncestor",
              index_attribute="isRootAncestor",
              index_metatype="BooleanIndex")
def setup_form_controller_actions(portal):
    """Setup custom CMF Form actions
    """
    logger.info("*** Setup Form Controller custom actions ***")
    fc_tool = api.get_tool("portal_form_controller")

    # Redirect the user to Worksheets listing view after the "remove" action
    # from inside Worksheet context is pressed
    # https://github.com/senaite/senaite.core/pull/1480
    fc_tool.addFormAction(
        object_id="content_status_modify",
        status="success",
        context_type="Worksheet",
        button=None,
        action_type="redirect_to",
        action_arg="python:object.aq_inner.aq_parent.absolute_url()")
Beispiel #46
0
def post_install(portal_setup):
    """Runs after the last import step of the *default* profile

    This handler is registered as a *post_handler* in the generic setup profile

    :param portal_setup: SetupTool
    """
    logger.info("SENAITE POST-INSTALL handler [BEGIN]")

    # https://docs.plone.org/develop/addons/components/genericsetup.html#custom-installer-code-setuphandlers-py
    profile_id = PROFILE_ID

    context = portal_setup._getImportContext(profile_id)
    portal = context.getSite()  # noqa

    logger.info("SENAITE POST-INSTALL handler [DONE]")
Beispiel #47
0
def _change_inactive_state(service, new_state):
    msg = "Upgrade v1.2.1: Updating status of {} to '{}'".\
        format(service.getKeyword(), new_state)
    logger.info(msg)
    wtool = api.get_tool('portal_workflow')
    workflow = wtool.getWorkflowById('bika_inactive_workflow')
    wf_state = {
        'action': None,
        'actor': None,
        'comments': msg,
        'inactive_state': new_state,
        'time': DateTime(),
    }
    wtool.setStatusOf('bika_inactive_workflow', service, wf_state)
    workflow.updateRoleMappingsFor(service)
    service.reindexObject(idxs=['allowedRolesAndUsers', 'inactive_state'])
Beispiel #48
0
    def getAnalysis(self):
        """Return the primary analysis this attachment is linked
        """
        analysis = None
        ans = self.getLinkedAnalyses()

        if len(ans) > 1:
            # Attachment is assigned to more than one Analysis. This might
            # happen when the AR was invalidated
            an_ids = ", ".join(map(api.get_id, ans))
            logger.info("Attachment assigned to more than one Analysis: [{}]. "
                        "The first Analysis will be returned".format(an_ids))

        if len(ans) >= 1:
            analysis = ans[0]

        return analysis
Beispiel #49
0
def remove_object_workflow_states_metadata(portal):
    """Removes the getObjectWorkflowStates metadata from catalogs
    """
    logger.info("Removing getObjectWorkflowStates metadata ...")
    catalogs = [
        BIKA_CATALOG,
        CATALOG_ANALYSIS_LISTING,
        CATALOG_ANALYSIS_REQUEST_LISTING,
        CATALOG_AUTOIMPORTLOGS_LISTING,
        CATALOG_REPORT_LISTING,
        CATALOG_WORKSHEET_LISTING,
        SETUP_CATALOG
    ]
    for catalog in catalogs:
        del_metadata(catalog, "getObjectWorkflowStates")

    logger.info("Removing getObjectWorkflowStates metadata [DONE]")
Beispiel #50
0
def after_retract(analysis):
    """Function triggered after a 'retract' transition for the analysis passed
    in is performed. The analysis transitions to "retracted" state and a new
    copy of the analysis is created. The copy initial state is "unassigned",
    unless the the retracted analysis was assigned to a worksheet. In such case,
    the copy is transitioned to 'assigned' state too
    """

    # Retract our dependents (analyses that depend on this analysis)
    cascade_to_dependents(analysis, "retract")

    # Retract our dependencies (analyses this analysis depends on)
    promote_to_dependencies(analysis, "retract")

    # Rename the analysis to make way for it's successor.
    # Support multiple retractions by renaming to *-0, *-1, etc
    parent = analysis.aq_parent
    keyword = analysis.getKeyword()

    # Get only those that are analyses and with same keyword as the original
    analyses = parent.getAnalyses(full_objects=True)
    analyses = filter(lambda an: an.getKeyword() == keyword, analyses)
    # TODO This needs to get managed by Id server in a nearly future!
    new_id = '{}-{}'.format(keyword, len(analyses))

    # Create a copy of the retracted analysis
    an_uid = api.get_uid(analysis)
    new_analysis = create_analysis(parent,
                                   analysis,
                                   id=new_id,
                                   RetestOf=an_uid)
    new_analysis.setResult("")
    new_analysis.setResultCaptureDate(None)
    new_analysis.reindexObject()
    logger.info("Retest for {} ({}) created: {}".format(
        keyword, api.get_id(analysis), api.get_id(new_analysis)))

    # Assign the new analysis to this same worksheet, if any.
    worksheet = analysis.getWorksheet()
    if worksheet:
        worksheet.addAnalysis(new_analysis)

    # Try to rollback the Analysis Request
    if IRequestAnalysis.providedBy(analysis):
        doActionFor(analysis.getRequest(), "rollback_to_receive")
        reindex_request(analysis)
Beispiel #51
0
def rollback_to_receive_inconsistent_ars(portal):
    logger.info("Rolling back inconsistent Analysis Requests ...")
    review_states = ["to_be_verified"]
    query = dict(portal_type="AnalysisRequest", review_state=review_states)
    brains = api.search(query, CATALOG_ANALYSIS_LISTING)
    total = len(brains)
    for num, brain in enumerate(brains):
        request = api.get_object(brain)
        if not isTransitionAllowed(request, "rollback_to_receive"):
            total -= 1
            continue

        if num % 100 == 0:
            logger.info("Rolling back inconsistent AR '{}': {}/{}".format(
                request.getId(), num, total))

        do_action_for(request, "rollback_to_receive")
Beispiel #52
0
def update_reflexrules_workflow_state(portal):
    """
    Updates Reflex Rules' inactive_state, otherwise they don't have it by
    default.
    :param portal: Portal object
    :return: None
    """
    wf_tool = getToolByName(portal, 'portal_workflow')
    logger.info("Updating Reflex Rules' 'inactive_state's...")
    wf = wf_tool.getWorkflowById("bika_inactive_workflow")
    uc = api.get_tool('portal_catalog')
    r_rules = uc(portal_type='ReflexRule')
    for rr in r_rules:
        obj = rr.getObject()
        wf.updateRoleMappingsFor(obj)
        obj.reindexObject()
    logger.info("Reflex Rules' 'inactive_state's were updated.")
 def get_service_by_keyword(self, keyword, default=None):
     """Get a service by keyword
     """
     logger.info("Get service by keyword={}".format(keyword))
     bsc = api.get_tool("bika_setup_catalog")
     results = bsc(portal_type='AnalysisService', getKeyword=keyword)
     if not results:
         logger.exception("No Analysis Service found for Keyword '{}'. "
                          "Related: LIMS-1614".format(keyword))
         return default
     elif len(results) > 1:
         logger.exception(
             "More than one Analysis Service found for Keyword '{}'. ".
             format(keyword))
         return default
     else:
         return api.get_object(results[0])
Beispiel #54
0
 def _extractObjects(self):
     fragment = self._doc.createDocumentFragment()
     objects = self.context.objectValues()
     if not IOrderedContainer.providedBy(self.context):
         objects = list(objects)
         objects.sort(lambda x, y: cmp(x.getId(), y.getId()))
     for obj in objects:
         # Check if the object can be exported
         if not can_export(obj):
             logger.info("Skipping {}".format(repr(obj)))
             continue
         exporter = queryMultiAdapter((obj, self.environ), INode)
         if exporter:
             node = exporter.node
             if node is not None:
                 fragment.appendChild(exporter.node)
     return fragment
    def add_analysis(self, instance, service, **kwargs):
        service_uid = api.get_uid(service)

        # Ensure we have suitable parameters
        specs = kwargs.get("specs") or {}

        # Get the hidden status for the service
        hidden = kwargs.get("hidden") or []
        hidden = filter(lambda d: d.get("uid") == service_uid, hidden)
        hidden = hidden and hidden[0].get("hidden") or service.getHidden()

        # Get the price for the service
        prices = kwargs.get("prices") or {}
        price = prices.get(service_uid) or service.getPrice()

        # Gets the analysis or creates the analysis for this service
        # Note this returns a list, because is possible to have multiple
        # partitions with same analysis
        analyses = self.resolve_analyses(instance, service)
        if not analyses:
            # Create the analysis
            keyword = service.getKeyword()
            logger.info("Creating new analysis '{}'".format(keyword))
            analysis = create_analysis(instance, service)
            analyses.append(analysis)

        skip = ["cancelled", "retracted", "rejected"]
        for analysis in analyses:
            # Skip analyses to better not modify
            if api.get_review_status(analysis) in skip:
                continue

            # Set the hidden status
            analysis.setHidden(hidden)

            # Set the price of the Analysis
            analysis.setPrice(price)

            # Set the internal use status
            parent_sample = analysis.getRequest()
            analysis.setInternalUse(parent_sample.getInternalUse())

            # Set the result range to the analysis
            analysis_rr = specs.get(service_uid) or analysis.getResultsRange()
            analysis.setResultsRange(analysis_rr)
            analysis.reindexObject()
Beispiel #56
0
def del_at_refs(relation):
    # Remove this relation from at_references
    rc = get_tool(REFERENCE_CATALOG)
    refs = rc(relationship=relation)
    removed = 0
    size = 0
    if refs:
        logger.info("Found %s refs for %s" % (len(refs), relation))
        ref_dict = {ref[0]: ref.getObject() for ref in refs}
        for ref_id, ref_obj in ref_dict.items():
            removed += 1
            size += 1
            if ref_obj is not None:
                ref_obj.aq_parent.manage_delObjects([ref_id])
    if removed:
        logger.info("Performed %s deletions" % removed)
    return removed
Beispiel #57
0
def upgrade(tool):
    """Upgrade step required for Bika LIMS 3.2.0
    """
    portal = aq_parent(aq_inner(tool))

    qi = portal.portal_quickinstaller
    ufrom = qi.upgradeInfo('bika.lims')['installedVersion']
    logger.info("Upgrading Bika LIMS: %s -> %s" % (ufrom, '3.2.0'))
    """Updated profile steps
    list of the generic setup import step names: portal.portal_setup.getSortedImportSteps() <---
    if you want more metadata use this: portal.portal_setup.getImportStepMetadata('jsregistry') <---
    important info about upgrade steps in
    http://stackoverflow.com/questions/7821498/is-there-a-good-reference-list-for-the-names-of-the-genericsetup-import-steps
    """
    setup = portal.portal_setup
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'cssregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'factorytool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'catalog')
    setup.runImportStepFromProfile('profile-bika.lims:default',
                                   'propertiestool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'skins')
    setup.runImportStepFromProfile('profile-bika.lims:default',
                                   'portlets',
                                   run_dependencies=False)
    # Creating all the sampling coordinator roles, permissions and indexes
    create_samplingcoordinator(portal)
    departments(portal)
    departments(portal)
    # Migrate Instrument Locations
    migrate_instrument_locations(portal)
    """Update workflow permissions
    """
    wf = getToolByName(portal, 'portal_workflow')
    wf.updateRoleMappings()
    # Updating Verifications of Analysis field from integer to String.
    multi_verification(portal)

    # Clean and rebuild affected catalogs
    cleanAndRebuildIfNeeded(portal)

    return True
Beispiel #58
0
def delete_orphaned_attachments(portal):
    """Delete attachments where the Analysis was removed
       https://github.com/senaite/senaite.core/issues/1025
    """
    attachments = api.search({"portal_type": "Attachment"})
    total = len(attachments)
    logger.info("Integrity checking %d attachments" % total)
    for num, attachment in enumerate(attachments):
        obj = api.get_object(attachment)
        # The method `getRequest` from the attachment tries to get the AR
        # either directly or from one of the linked Analyses. If it returns
        # `None`, we can be sure that the attachment is neither assigned
        # directly to an AR nor to an Analysis.
        ar = obj.getRequest()
        if ar is None:
            obj_id = api.get_id(obj)
            api.get_parent(obj).manage_delObjects(obj_id)
            logger.info("Deleted orphaned Attachment {}".format(obj_id))
Beispiel #59
0
 def publish(self, sample):
     """Set status to prepublished/published/republished
     """
     wf = api.get_tool("portal_workflow")
     status = wf.getInfoFor(sample, "review_state")
     transitions = {"verified": "publish", "published": "republish"}
     transition = transitions.get(status, "prepublish")
     logger.info("Transitioning sample {}: {} -> {}".format(
         api.get_id(sample), status, transition))
     try:
         # Manually update the view on the database to avoid conflict errors
         sample.getClient()._p_jar.sync()
         # Perform WF transition
         wf.doActionFor(sample, transition)
         # Commit the changes
         transaction.commit()
     except WorkflowException as e:
         logger.error(e)
Beispiel #60
0
def mark_analyses_transitions(portal):
    logger.info("Marking Analyses with ISubmitted and IVerified ...")
    statuses = ["to_be_verified", "verified", "published"]
    query = dict(review_state=statuses)
    brains = api.search(query, CATALOG_ANALYSIS_LISTING)
    total = len(brains)
    for num, brain in enumerate(brains):
        if num % 100 == 0:
            logger.info("Marking Analyses with ISubmitted and IVerified: {}/{}"
                        .format(num, total))

        an = api.get_object(brain)
        alsoProvides(an, ISubmitted)
        if brain.review_state in ["verified", "published"]:
            alsoProvides(an, IVerified)

        if num % 1000 == 0 and num > 0:
            commit_transaction(portal)