def make_ar(self, services):
     sampletypes = [p.getObject() for p in self.bsc(portal_type="SampleType")]
     samplepoints = [p.getObject() for p in self.bsc(portal_type="SamplePoint")]
     contacts = [c for c in self.client.objectValues() if c.portal_type == 'Contact']
     sample_id = self.client.invokeFactory(type_name='Sample', id=tmpID())
     sample = self.client[sample_id]
     sample.edit(
         SampleID=sample_id,
         SampleType=random.choice(sampletypes).Title(),
         SamplePoint=random.choice(samplepoints).Title(),
         ClientReference=chr(random.randint(70, 90)) * 3,
         ClientSampleID=chr(random.randint(70, 90)) * 3,
         SamplingDate=DateTime()
     )
     sample.processForm()
     ar_id = self.client.invokeFactory("AnalysisRequest", id=tmpID())
     ar = self.client[ar_id]
     ar.edit(
         RequestID=ar_id,
         Contact=contacts[0],
         CCContact=contacts[1],
         CCEmails="",
         Sample=sample,
         ClientOrderNumber=chr(random.randint(70, 90)) * 3
     )
     ar.processForm()
     prices = dict([(s.UID(), '10.00') for s in services])
     ar.setAnalyses([s.UID() for s in services], prices=prices)
     return ar
    def test_default_stickers(self):
        """https://jira.bikalabs.com/browse/WINE-44: display SampleID or
        SamplePartition ID depending on bikasetup.ShowPartitions value
        """

        folder = self.portal.bika_setup.bika_analysisservices
        services = [_createObjectByType("AnalysisService", folder, tmpID()),
                    _createObjectByType("AnalysisService", folder, tmpID())]
        services[0].processForm()
        services[1].processForm()
        services[0].edit(title="Detect Dust")
        services[1].edit(title="Detect water")
        service_uids = [s.UID for s in services]
        folder = self.portal.clients
        client = _createObjectByType("Client", folder, tmpID())
        client.processForm()
        folder = self.portal.clients.objectValues("Client")[0]
        contact = _createObjectByType("Contact", folder, tmpID())
        contact.processForm()
        contact.edit(Firstname="Bob", Surname="Dobbs", email="*****@*****.**")
        folder = self.portal.bika_setup.bika_sampletypes
        sampletype = _createObjectByType("SampleType", folder, tmpID())
        sampletype.processForm()
        sampletype.edit(title="Air", Prefix="AIR")

        values = {'Client': client.UID(),
                  'Contact': contact.UID(),
                  'SamplingDate': '2015-01-01',
                  'SampleType': sampletype.UID()}

        for size in ["large", "small"]:

            # create and receive AR
            ar = create_analysisrequest(client, {}, values, service_uids)
            ar.bika_setup.setShowPartitions(False)
            doActionFor(ar, 'receive')
            self.assertEquals(ar.portal_workflow.getInfoFor(ar, 'review_state'), 'sample_received')
            # check sticker text
            ar.REQUEST['items'] = ar.getId()
            ar.REQUEST['template'] = "bika.lims:sticker_%s.pt"%size
            sticker = Sticker(ar, ar.REQUEST)()
            pid = ar.getSample().objectValues("SamplePartition")[0].getId()
            self.assertNotIn(pid, sticker, "Sticker must not contain partition ID %s"%pid)

            # create and receive AR
            ar = create_analysisrequest(client, {}, values, service_uids)
            ar.bika_setup.setShowPartitions(True)
            doActionFor(ar, 'receive')
            self.assertEquals(ar.portal_workflow.getInfoFor(ar, 'review_state'), 'sample_received')
            # check sticker text
            ar.REQUEST['items'] = ar.getId()
            ar.REQUEST['template'] = "bika.lims:sticker_%s.pt"%size
            sticker = Sticker(ar, ar.REQUEST)()
            pid = ar.getSample().objectValues("SamplePartition")[0].getId()
            self.assertIn(pid, sticker, "Sticker must contain partition ID %s"%pid)
Example #3
0
    def __call__(self):

        if "viewlet_submitted" in self.request.form:
            data = {}
            try:
                data = self.validate_form_inputs()
            except ValidationError as e:
                self.form_error(e.message)
                return

            from Products.CMFPlone.utils import _createObjectByType
            from bika.lims.utils import tmpID
            instance = _createObjectByType('InvoiceBatch', self.context, tmpID(), title=data['title'])
            instance.unmarkCreationFlag()
            instance.edit(
                Project=data['project_uid'],
                Services=data['services'],
                BatchStartDate=data['start_date'],
                BatchEndDate=data['end_date']
            )
            renameAfterCreation(instance)
            instance.processForm()
            msg = u'Invoice for period "%s" to "%s" created.' % (data['start_date'], data['end_date'])
            self.context.plone_utils.addPortalMessage(msg)
            self.request.response.redirect(self.context.absolute_url())
Example #4
0
 def create_methods(self, methods_data):
     """
     Creates a set of methods to be used in the tests
     :methods_data: [{
             'title':'xxx',
             'description':'xxx',
             'Instructions':'xxx',
             'MethodID':'xxx',
             'Accredited':'False/True'},
         ...]
     """
     folder = self.portal.bika_setup.methods
     methods_list = []
     for meth_d in methods_data:
         _id = folder.invokeFactory('Method', id=tmpID())
         meth = folder[_id]
         meth.edit(
             title=meth_d['title'],
             description=meth_d.get('description', ''),
             Instructions=meth_d.get('Instructions', ''),
             MethodID=meth_d.get('MethodID', ''),
             Accredited=meth_d.get('Accredited', True),
             )
         meth.unmarkCreationFlag()
         renameAfterCreation(meth)
         methods_list.append(meth)
     return methods_list
Example #5
0
 def workflow_script_receive(self):
     """ receive order """
     products = self.aq_parent.objectValues('Product')
     items = self.order_lineitems
     for item in items:
         quantity = int(item['Quantity'])
         if quantity < 1:
             continue
         product = [p for p in products if p.getId() == item['Product']][0]
         folder = self.bika_setup.bika_stockitems
         for i in range(quantity):
             pi = _createObjectByType('StockItem', folder, tmpID())
             pi.setProduct(product)
             pi.setOrderId(self.getId())
             pi.setDateReceived(DateTime())
             pi.unmarkCreationFlag()
             renameAfterCreation(pi)
             # Manually reindex stock item in catalog
             self.bika_setup_catalog.reindexObject(pi)
         product.setQuantity(product.getQuantity() + quantity)
     self.setDateReceived(DateTime())
     self.reindexObject()
     # Print stock item stickers if opted for
     if self.bika_setup.getAutoPrintInventoryStickers():
         # TODO: Use better method to redirect after transition
         self.REQUEST.response.write(
             "<script>window.location.href='%s'</script>" % (
                 self.absolute_url() + '/stickers/?items=' + self.getId()))
    def test_MultiVerificationType(self):
        #Testing when the same user can verify multiple times
        self.portal.bika_setup.setNumberOfRequiredVerifications(4)
        self.portal.bika_setup.setTypeOfmultiVerification('self_multi_enabled')

        client = self.portal.clients['client-1']
        sampletype = self.portal.bika_setup.bika_sampletypes['sampletype-1']
        values = {'Client': client.UID(),
                  'Contact': client.getContacts()[0].UID(),
                  'SamplingDate': '2016-12-12',
                  'SampleType': sampletype.UID()}
        ar = _createObjectByType("AnalysisRequest", client, tmpID())
        servs = self.portal.bika_setup.bika_analysisservices
        service=servs['analysisservice-3']
        service.setSelfVerification(True)
        an = create_analysis(ar, service)
        member = self.portal.portal_membership.getMemberById('admin')
        an.setVerificators(member.getUserName())
        an.setNumberOfRequiredVerifications(4)
        self.assertEquals(an.isUserAllowedToVerify(member), True)

        #Testing when the same user can verify multiple times but not consequetively
        self.portal.bika_setup.setTypeOfmultiVerification('self_multi_not_cons')
        self.assertEquals(an.isUserAllowedToVerify(member), False)

        #Testing when the same user can not verify more than once
        self.portal.bika_setup.setTypeOfmultiVerification('self_multi_disabled')
        self.assertEquals(an.isUserAllowedToVerify(member), False)

        an.addVerificator(TEST_USER_NAME)
        self.portal.bika_setup.setTypeOfmultiVerification('self_multi_not_cons')
        self.assertEquals(an.isUserAllowedToVerify(member), True)

        self.portal.bika_setup.setTypeOfmultiVerification('self_multi_disabled')
        self.assertEquals(an.isUserAllowedToVerify(member), False)
Example #7
0
def create_sample(client, request, values):
    """Creates a sample for the passed in client
    """
    # Retrieve the required tools
    uc = getToolByName(client, 'uid_catalog')
    # Create sample or refer to existing for secondary analysis request
    if values.get('Sample_uid', ''):
        sample = uc(UID=values['Sample'])[0].getObject()
    else:
        sample = _createObjectByType('Sample', client, tmpID())
        # Determine if the sampling workflow is enabled
        workflow_enabled = client.bika_setup.getSamplingWorkflowEnabled()
        sample.setSamplingWorkflowEnabled(workflow_enabled)
        # Specifically set the sample type
        sample.setSampleType(values['SampleType'])
        # Specifically set the sample point
        if 'SamplePoint' in values:
            sample.setSamplePoint(values['SamplePoint'])
        # Specifically set the storage location
        if 'StorageLocation' in values:
            sample.setStorageLocation(values['StorageLocation'])
        # Specifically set the DateSampled
        if 'DateSampled' in values:
            sample.setDateSampled(values['DateSampled'])
        # Update the created sample with indicated values
        sample.processForm(REQUEST=request, values=values)
        # Set the SampleID
        sample.edit(SampleID=sample.getId())
    # Return the newly created sample
    return sample
Example #8
0
    def addReferenceAnalysis(self, service_uid, reference_type):
        """ add an analysis to the sample """
        rc = getToolByName(self, REFERENCE_CATALOG)
        service = rc.lookupObject(service_uid)

        analysis = _createObjectByType("ReferenceAnalysis", self, tmpID())
        calculation = service.getCalculation()
        interim_fields = calculation and calculation.getInterimFields() or []
        maxtime = service.getMaxTimeAllowed() and service.getMaxTimeAllowed() \
            or {'days':0, 'hours':0, 'minutes':0}
        starttime = DateTime()
        max_days = float(maxtime.get('days', 0)) + \
                 (
                     (float(maxtime.get('hours', 0)) * 3600 + \
                      float(maxtime.get('minutes', 0)) * 60)
                     / 86400
                 )
        duetime = starttime + max_days

        analysis.edit(
            ReferenceAnalysisID = analysis.id,
            ReferenceType = reference_type,
            Service = service,
            Unit = service.getUnit(),
            Calculation = calculation,
            InterimFields = interim_fields,
            ServiceUID = service.UID(),
            MaxTimeAllowed = maxtime,
            DueDate = duetime,
        )

        analysis.processForm()
        return analysis.UID()
Example #9
0
def create_sample(context, request, values):
    # Retrieve the required tools
    uc = getToolByName(context, 'uid_catalog')
    # Determine if the sampling workflow is enabled
    workflow_enabled = context.bika_setup.getSamplingWorkflowEnabled()
    # Create sample or refer to existing for secondary analysis request
    if values.get('Sample_uid', ''):
        sample = uc(UID=values['Sample'])[0].getObject()
    else:
        sample = _createObjectByType('Sample', context, tmpID())
        # Specifically set the sample type
        sample.setSampleType(values['SampleType'])
        # Specifically set the sample point
        if 'SamplePoint' in values:
            sample.setSamplePoint(values['SamplePoint'])
        # Specifically set the storage location
        if 'StorageLocation' in values:
            sample.setStorageLocation(values['StorageLocation'])
        # Update the created sample with indicated values
        sample.processForm(REQUEST=request, values=values)
        # Perform the appropriate workflow action
        workflow_action =  'sampling_workflow' if workflow_enabled \
            else 'no_sampling_workflow'
        context.portal_workflow.doActionFor(sample, workflow_action)
        # Set the SampleID
        sample.edit(SampleID=sample.getId())
    # Return the newly created sample
    return sample
Example #10
0
 def create_analysisservices(self, as_data):
     """
     Creates a set of analaysis services to be used in the tests
     :as_data: [{
             'title':'xxx',
             'ShortTitle':'xxx',
             'Keyword': 'xxx',
             'PointOfCapture': 'Lab',
             'Category':category object,
             'Methods': [methods object,],
             },
         ...]
     """
     folder = self.portal.bika_setup.bika_analysisservices
     ans_list = []
     for as_d in as_data:
         _id = folder.invokeFactory('AnalysisService', id=tmpID())
         ans = folder[_id]
         ans.edit(
             title=as_d['title'],
             ShortTitle=as_d.get('ShortTitle', ''),
             Keyword=as_d.get('Keyword', ''),
             PointOfCapture=as_d.get('PointOfCapture', 'Lab'),
             Category=as_d.get('Category', ''),
             Methods=as_d.get('Methods', []),
             )
         ans.unmarkCreationFlag()
         renameAfterCreation(ans)
         ans_list.append(ans)
     return ans_list
Example #11
0
 def __call__(self):
     request = self.request
     form = request.form
     CheckAuthenticator(form)
     if form.get('submitted'):
         # Validate form submission
         csvfile = form.get('csvfile')
         data = csvfile.read()
         lines = data.splitlines()
         filename = csvfile.filename
         if not csvfile:
             addStatusMessage(request, _("No file selected"))
             return self.template()
         if len(lines) < 3:
             addStatusMessage(request, _("Too few lines in CSV file"))
             return self.template()
         # Create the arimport object
         arimport = _createObjectByType("ARImport", self.context, tmpID())
         arimport.processForm()
         arimport.setTitle(self.mkTitle(filename))
         arimport.schema['OriginalFile'].set(arimport, data)
         # Save all fields from the file into the arimport schema
         arimport.save_header_data()
         arimport.save_sample_data()
         # immediate batch creation if required
         arimport.create_or_reference_batch()
         # Attempt first validation
         try:
             workflow = getToolByName(self.context, 'portal_workflow')
             workflow.doActionFor(arimport, 'validate')
         except WorkflowException:
             self.request.response.redirect(arimport.absolute_url() +
                                            "/edit")
     else:
         return self.template()
Example #12
0
    def addReferenceAnalysis(self, service_uid, reference_type):
        """ add an analysis to the sample """
        rc = getToolByName(self, REFERENCE_CATALOG)
        service = rc.lookupObject(service_uid)

        analysis = _createObjectByType("ReferenceAnalysis", self, tmpID())
        analysis.unmarkCreationFlag()

        calculation = service.getCalculation()
        interim_fields = calculation and calculation.getInterimFields() or []
        renameAfterCreation(analysis)

        # maxtime = service.getMaxTimeAllowed() and service.getMaxTimeAllowed() \
        #     or {'days':0, 'hours':0, 'minutes':0}
        # starttime = DateTime()
        # max_days = float(maxtime.get('days', 0)) + \
        #          (
        #              (float(maxtime.get('hours', 0)) * 3600 + \
        #               float(maxtime.get('minutes', 0)) * 60)
        #              / 86400
        #          )
        # duetime = starttime + max_days

        analysis.setReferenceType(reference_type)
        analysis.setService(service_uid)
        analysis.setInterimFields(interim_fields)
        return analysis.UID()
Example #13
0
 def create_or_reference_batch(self):
     """Save reference to batch, if existing batch specified
     Create new batch, if possible with specified values
     """
     client = self.aq_parent
     batch_headers = self.get_batch_header_values()
     if not batch_headers:
         return False
     # if the Batch's Title is specified and exists, no further
     # action is required. We will just set the Batch field to
     # use the existing object.
     batch_title = batch_headers.get('title', False)
     if batch_title:
         existing_batch = [x for x in client.objectValues('Batch')
                           if x.title == batch_title]
         if existing_batch:
             self.setBatch(existing_batch[0])
             return existing_batch[0]
     # If the batch title is specified but does not exist,
     # we will attempt to create the bach now.
     if 'title' in batch_headers:
         if 'id' in batch_headers:
             del (batch_headers['id'])
         if '' in batch_headers:
             del (batch_headers[''])
         batch = _createObjectByType('Batch', client, tmpID())
         batch.processForm()
         batch.edit(**batch_headers)
         self.setBatch(batch)
Example #14
0
 def is_import_allowed(self):
     # Checking if auto-import enabled in bika setup. Return False if not.
     interval = self.portal.bika_setup.getAutoImportInterval()
     if interval < 10:
         return False
     caches = self.portal.listFolderContents(contentFilter={
                                             "portal_type": 'BikaCache'})
     cache = None
     for c in caches:
         if c and c.getKey() == 'LastAutoImport':
             cache = c
     now = DateTime.strftime(DateTime(), '%Y-%m-%d %H:%M:%S')
     if not cache:
         _id = self.portal.invokeFactory("BikaCache", id=tmpID(),
                                         Key='LastAutoImport',
                                         Value=now)
         item = self.portal[_id]
         item.markCreationFlag()
         return True
     else:
         last_import = cache.getValue()
         diff = datetime.now() - datetime.strptime(last_import,
                                                   '%Y-%m-%d %H:%M:%S')
         if diff.seconds < interval * 60:
             return False
         cache.edit(Value=now)
         return True
Example #15
0
    def __call__(self):
        form = self.request.form
        bsc = getToolByName(self.context, 'bika_setup_catalog')

        # find and remove existing specs
        cs = bsc(portal_type='AnalysisSpec',
                 getClientUID=self.context.UID())
        if cs:
            self.context.manage_delObjects([s.id for s in cs])

        # find and duplicate lab specs
        ls = bsc(portal_type='AnalysisSpec',
                 getClientUID=self.context.bika_setup.bika_analysisspecs.UID())
        ls = [s.getObject() for s in ls]
        for labspec in ls:
            clientspec = _createObjectByType(
                "AnalysisSpec", self.context, tmpID())
            clientspec.processForm()
            clientspec.edit(
                SampleType=labspec.getSampleType(),
                ResultsRange=labspec.getResultsRange(),
            )
        translate = self.context.translate
        message = _("Analysis specifications reset to lab defaults.")
        self.context.plone_utils.addPortalMessage(message, 'info')
        self.request.RESPONSE.redirect(self.context.absolute_url() +
                                       "/analysisspecs")
        return
Example #16
0
    def Import(self):
        folder = self.context.patients
        rows = self.get_rows(3)
        for row in rows:
            if not row['Firstname'] or not row['PrimaryReferrer']:
                continue
            pc = getToolByName(self.context, 'portal_catalog')
            client = pc(portal_type='Client', Title=row['PrimaryReferrer'])
            if len(client) == 0:
                raise IndexError("Primary referrer invalid: '%s'" % row['PrimaryReferrer'])

            client = client[0].getObject()
            _id = folder.invokeFactory('Patient', id=tmpID())
            obj = folder[_id]
            obj.unmarkCreationFlag()
            renameAfterCreation(obj)
            Fullname = (row['Firstname'] + " " + row.get('Surname', '')).strip()
            obj.edit(title=Fullname,
                     ClientPatientID = row.get('ClientPatientID', ''),
                     Salutation = row.get('Salutation', ''),
                     Firstname = row.get('Firstname', ''),
                     Surname = row.get('Surname', ''),
                     PrimaryReferrer = client.UID(),
                     Gender = row.get('Gender', 'dk'),
                     Age = row.get('Age', ''),
                     BirthDate = row.get('BirthDate', ''),
                     BirthDateEstimated =self.to_bool(row.get('BirthDateEstimated','False')),
                     BirthPlace = row.get('BirthPlace', ''),
                     Ethnicity = row.get('Ethnicity', ''),
                     Citizenship =row.get('Citizenship', ''),
                     MothersName = row.get('MothersName', ''),
                     CivilStatus =row.get('CivilStatus', ''),
                     Anonymous = self.to_bool(row.get('Anonymous','False'))
                     )
            self.fill_contactfields(row, obj)
            self.fill_addressfields(row, obj)
            if 'Photo' in row and row['Photo']:
                try:
                    path = resource_filename("bika.lims",
                                             "setupdata/%s/%s" \
                                             % (self.dataset_name, row['Photo']))
                    file_data = open(path, "rb").read()
                    obj.setPhoto(file_data)
                except:
                    logger.error("Unable to load Photo %s"%row['Photo'])

            if 'Feature' in row and row['Feature']:
                try:
                    path = resource_filename("bika.lims",
                                             "setupdata/%s/%s" \
                                             % (self.dataset_name, row['Feature']))
                    file_data = open(path, "rb").read()
                    obj.setFeature(file_data)
                except:
                    logger.error("Unable to load Feature %s"%row['Feature'])

            obj.unmarkCreationFlag()
            renameAfterCreation(obj)
Example #17
0
    def create_reflex_rule(self, title, method, rules_data):
        """
        Given a dict with reflex rules data, it creates a reflex rules object
        :title: a string with the title
        :method: a method object
        :rules_data: there is an example
        [{'actions': [{'act_row_idx': 0,
                       'action': 'repeat',
                       'an_result_id': 'rep-1',
                       'analyst': '',
                       'otherWS': 'current',
                       'setresultdiscrete': '',
                       'setresulton': 'original',
                       'setresultvalue': '',
                       'worksheettemplate': ''}],
          'conditions': [{'analysisservice': '52853cf7d5114b5aa8c159afad2f3da1',
                          'and_or': 'no',
                          'cond_row_idx': 0,
                          'discreteresult': '',
                          'range0': '11',
                          'range1': '12'}],
          'mother_service_uid': '52853cf7d5114b5aa8c159afad2f3da1',
          'rulenumber': '0',
          'trigger': 'submit'},
         {'actions': [{'act_row_idx': 0,
                       'action': 'repeat',
                       'an_result_id': 'rep-2',
                       'analyst': '',
                       'otherWS': 'current',
                       'setresultdiscrete': '',
                       'setresulton': 'original',
                       'setresultvalue': '',
                       'worksheettemplate': ''}],
          'conditions': [{'analysisservice': 'rep-1',
                          'and_or': 'no',
                          'cond_row_idx': 0,
                          'discreteresult': '',
                          'range0': '12',
                          'range1': '12'},],
          'mother_service_uid': '52853cf7d5114b5aa8c159afad2f3da1',
          'rulenumber': '2',
          'trigger': 'submit'}]

        """
        # Creating a rule
        rules_list = []
        folder = self.portal.bika_setup.bika_reflexrulefolder
        _id = folder.invokeFactory('ReflexRule', id=tmpID())
        rule = folder[_id]
        rule.edit(
            title=title,
            )
        rule.setMethod(method.UID())
        if rules_data:
            rule.setReflexRules(rules_data)
        rule.unmarkCreationFlag()
        renameAfterCreation(rule)
        return rule
Example #18
0
 def Import(self):
     folder = self.context.bika_setup.bika_winetypes
     for row in self.get_rows(3):
         if 'title' in row and row['title']:
             _id = folder.invokeFactory('WineType', id=tmpID())
             obj = folder[_id]
             obj.edit(title=row['title'],
                      description=row['description'])
             obj.processForm()
Example #19
0
 def Import(self):
     folder = self.context.bika_setup.bika_identifiertypes
     for row in self.get_rows(3):
         obj = _createObjectByType('IdentifierType', folder, tmpID())
         if row['title']:
             obj.edit(title=row['title'],
                      description=row.get('description', ''),)
             obj.unmarkCreationFlag()
             renameAfterCreation(obj)
Example #20
0
 def Import(self):
     folder = self.context.bika_setup.bika_casesyndromicclassifications
     for row in self.get_rows(3):
         obj = _createObjectByType('CaseSyndromicClassification', folder, tmpID())
         if row['title']:
             obj.edit(title=row['title'],
                      description=row.get('description', ''),)
             obj.unmarkCreationFlag()
             renameAfterCreation(obj)
 def create_service(self, src_uid, dst_title, dst_keyword):
     folder = self.context.bika_setup.bika_analysisservices
     dst_service = _createObjectByType("AnalysisService", folder, tmpID())
     # manually set keyword and title
     dst_service.setKeyword(dst_keyword)
     dst_service.setTitle(dst_title)
     dst_service.unmarkCreationFlag()
     _id = renameAfterCreation(dst_service)
     dst_service = folder[_id]
     return dst_service
Example #22
0
 def Import(self):
     folder = self.context.bika_setup.bika_cultivars
     for row in self.get_rows(3):
         if 'title' in row and row['title']:
             _id = folder.invokeFactory('Cultivar', id=tmpID())
             obj = folder[_id]
             obj.edit(title=row['title'],
                      description=row['description'])
             obj.unmarkCreationFlag()
             renameAfterCreation(obj)
Example #23
0
 def Import(self):
     folder = self.context.bika_setup.bika_cultivars
     for row in self.get_rows(3):
         if 'title' in row and row['title']:
             _id = folder.invokeFactory('Cultivar', id=tmpID())
             obj = folder[_id]
             obj.edit(Code=row.get('code', ''),
                      title=row['title'],
                      description=row.get('description', ''),
                      )
             obj.processForm()
Example #24
0
 def __call__(self):
     wf = getToolByName(self.context, 'portal_workflow')
     part = _createObjectByType("SamplePartition", self.context, tmpID())
     part.processForm()
     SamplingWorkflowEnabled = part.bika_setup.getSamplingWorkflowEnabled()
     ## We force the object to have the same state as the parent
     sample_state = wf.getInfoFor(self.context, 'review_state')
     changeWorkflowState(part, "bika_sample_workflow", sample_state)
     self.request.RESPONSE.redirect(self.context.absolute_url() +
                                    "/partitions")
     return
Example #25
0
 def Import(self):
     folder = self.context.bika_setup.bika_ethnicities
     rows = self.get_rows(3)
     for row in rows:
         _id = folder.invokeFactory('Ethnicity', id=tmpID())
         obj = folder[_id]
         if row.get('Title', None):
             obj.edit(title=row['Title'],
                      description=row.get('Description', ''))
             obj.unmarkCreationFlag()
             renameAfterCreation(obj)
Example #26
0
 def Import(self):
     folder = self.context.bika_setup.bika_casestatuses
     rows = self.get_rows(3)
     for row in rows:
         if row['title']:
             _id = folder.invokeFactory('CaseStatus', id=tmpID())
             obj = folder[_id]
             obj.edit(title=row['title'],
                      description=row.get('description', ''))
             obj.unmarkCreationFlag()
             renameAfterCreation(obj)
Example #27
0
 def create_attachment(self, ws, infile):
     attuid = self.create_mime_attachmenttype()
     attachment = None
     if attuid and infile:
         attachment = _createObjectByType("Attachment", ws, tmpID())
         logger.info("Creating %s in %s" % (attachment, ws))
         attachment.edit(
             AttachmentFile=infile,
             AttachmentType=attuid,
             AttachmentKeys='Results, Automatic import')
         attachment.reindexObject()
     return attachment
Example #28
0
 def Import(self):
     print "EOOO"
     s_t = ''
     c_t = 'lab'
     bucket = {}
     pc = getToolByName(self.context, 'portal_catalog')
     bsc = getToolByName(self.context, 'bika_setup_catalog')
     # collect up all values into the bucket
     for row in self.get_rows(3):
         c_t = row['Client_title'] if row['Client_title'] else 'lab'
         if c_t not in bucket:
             bucket[c_t] = {}
         s_t = row['SampleType_title'] if row['SampleType_title'] else s_t
         if s_t not in bucket[c_t]:
             bucket[c_t][s_t] = []
         service = bsc(portal_type='AnalysisService', title=row['service'])
         if not service:
             service = bsc(portal_type='AnalysisService',
                           getKeyword=row['service'])
         try:
             service = service[0].getObject()
             bucket[c_t][s_t].append({
             'keyword': service.getKeyword(),
             'min': row.get('min','0'),
             'max': row.get('max','0'),
             'minpanic': row.get('minpanic','0'),
             'maxpanic': row.get('maxpanic','0'),
             'error': row.get('error','0'),
             })
         except IndexError:
             warning = "Error with service name %s on sheet %s. Service not uploaded."
             logger.warning(warning, row.get('service', ''), self.sheetname)
     # write objects.
     for c_t in bucket:
         if c_t == 'lab':
             folder = self.context.bika_setup.bika_analysisspecs
         else:
             folder = pc(portal_type='Client', title=c_t)
             if (not folder or len(folder) != 1):
                 logger.warn("Client %s not found. Omiting client specifications." % c_t)
                 continue
             folder = folder[0].getObject()
         for s_t in bucket[c_t]:
             resultsrange = bucket[c_t][s_t]
             sampletype = bsc(portal_type='SampleType', title=s_t)[0]
             _id = folder.invokeFactory('AnalysisSpec', id=tmpID())
             obj = folder[_id]
             obj.edit(
                 title=sampletype.Title,
                 ResultsRange=resultsrange)
             obj.setSampleType(sampletype.UID)
             obj.unmarkCreationFlag()
             renameAfterCreation(obj)
Example #29
0
 def Import(self):
     folder = self.context.bika_setup.bika_diseases
     rows = self.get_rows(3)
     for row in rows:
         _id = folder.invokeFactory('Disease', id=tmpID())
         obj = folder[_id]
         if row['Title']:
             obj.edit(ICDCode=row.get('ICDCode', ''),
                      title=row['Title'],
                      description=row.get('Description', ''))
             obj.unmarkCreationFlag()
             renameAfterCreation(obj)
Example #30
0
 def workflow_action_save_partitions_button(self):
     form = self.request.form
     # Sample Partitions or AR Manage Analyses: save Partition Table
     sample = self.context.portal_type == 'Sample' and self.context or\
         self.context.getSample()
     part_prefix = sample.getId() + "-P"
     nr_existing = len(sample.objectIds())
     nr_parts = len(form['PartTitle'][0])
     # add missing parts
     if nr_parts > nr_existing:
         for i in range(nr_parts - nr_existing):
             part = _createObjectByType("SamplePartition", sample, tmpID())
             part.setDateReceived = DateTime()
             part.processForm()
     # remove excess parts
     if nr_existing > nr_parts:
         for i in range(nr_existing - nr_parts):
             part = sample['%s%s' % (part_prefix, nr_existing - i)]
             for a in part.getBackReferences("AnalysisSamplePartition"):
                 a.setSamplePartition(None)
             sample.manage_delObjects(['%s%s' % (part_prefix, nr_existing - i), ])
     # modify part container/preservation
     for part_uid, part_id in form['PartTitle'][0].items():
         part = sample["%s%s" % (part_prefix, part_id.split(part_prefix)[1])]
         part.edit(
             Container=form['getContainer'][0][part_uid],
             Preservation=form['getPreservation'][0][part_uid],
         )
         part.reindexObject()
         # Adding the Security Seal Intact checkbox's value to the container object
         container_uid = form['getContainer'][0][part_uid]
         uc = getToolByName(self.context, 'uid_catalog')
         cbr = uc(UID=container_uid)
         if cbr and len(cbr) > 0:
             container_obj = cbr[0].getObject()
         else:
             continue
         value = form.get('setSecuritySealIntact', {}).get(part_uid, '') == 'on'
         container_obj.setSecuritySealIntact(value)
     objects = WorkflowAction._get_selected_items(self)
     if not objects:
         message = _("No items have been selected")
         self.context.plone_utils.addPortalMessage(message, 'info')
         if self.context.portal_type == 'Sample':
             # in samples his table is on 'Partitions' tab
             self.destination_url = self.context.absolute_url() +\
                 "/partitions"
         else:
             # in ar context this table is on 'ManageAnalyses' tab
             self.destination_url = self.context.absolute_url() +\
                 "/analyses"
         self.request.response.redirect(self.destination_url)
 def addthing(self, folder, portal_type, **kwargs):
     thing = _createObjectByType(portal_type, folder, tmpID())
     thing.unmarkCreationFlag()
     thing.edit(**kwargs)
     thing._renameAfterCreation()
     return thing
Example #32
0
    def create(self, context, request):
        """/@@API/create: Create new object.

        Required parameters:

            - obj_type = portal_type of new object.
            - obj_path = path of new object, from plone site root. - Not required for
             obj_type=AnalysisRequest

        Optionally:

            - obj_id = ID of new object.

        All other parameters in the request are matched against the object's
        Schema.  If a matching field is found in the schema, then the value is
        taken from the request and sent to the field's mutator.

        Reference fields may have their target value(s) specified with a
        delimited string query syntax, containing the portal_catalog search:

            <FieldName>=index1:value1|index2:value2

        eg to set the Client of a batch:

            ...@@API/update?obj_path=<path>...
            ...&Client=title:<client_title>&...

        And, to set a multi-valued reference, these both work:

            ...@@API/update?obj_path=<path>...
            ...&InheritedObjects:list=title:AR1...
            ...&InheritedObjects:list=title:AR2...

            ...@@API/update?obj_path=<path>...
            ...&InheritedObjects[]=title:AR1...
            ...&InheritedObjects[]=title:AR2...

        The Analysis_Specification parameter is special, it mimics
        the format of the python dictionaries, and only service Keyword
        can be used to reference services.  Even if the keyword is not
        actively required, it must be supplied:

            <service_keyword>:min:max:error tolerance

        The function returns a dictionary as a json string:

        {
            runtime: Function running time.
            error: true or string(message) if error. false if no error.
            success: true or string(message) if success. false if no success.
        }

        >>> portal = layer['portal']
        >>> portal_url = portal.absolute_url()
        >>> from plone.app.testing import SITE_OWNER_NAME
        >>> from plone.app.testing import SITE_OWNER_PASSWORD

        Simple AR creation, no obj_path parameter is required:

        >>> browser = layer['getBrowser'](portal, loggedIn=True, username=SITE_OWNER_NAME, password=SITE_OWNER_PASSWORD)
        >>> browser.open(portal_url+"/@@API/create", "&".join([
        ... "obj_type=AnalysisRequest",
        ... "Client=portal_type:Client|id:client-1",
        ... "SampleType=portal_type:SampleType|title:Apple Pulp",
        ... "Contact=portal_type:Contact|getFullname:Rita Mohale",
        ... "Services:list=portal_type:AnalysisService|title:Calcium",
        ... "Services:list=portal_type:AnalysisService|title:Copper",
        ... "Services:list=portal_type:AnalysisService|title:Magnesium",
        ... "SamplingDate=2013-09-29",
        ... "Specification=portal_type:AnalysisSpec|title:Apple Pulp",
        ... ]))
        >>> browser.contents
        '{..."success": true...}'

        If some parameters are specified and are not located as existing fields or properties
        of the created instance, the create should fail:

        >>> browser = layer['getBrowser'](portal, loggedIn=True, username=SITE_OWNER_NAME, password=SITE_OWNER_PASSWORD)
        >>> browser.open(portal_url+"/@@API/create?", "&".join([
        ... "obj_type=Batch",
        ... "obj_path=/batches",
        ... "title=Test",
        ... "Thing=Fish"
        ... ]))
        >>> browser.contents
        '{...The following request fields were not used: ...Thing...}'

        Now we test that the AR create also fails if some fields are spelled wrong

        >>> browser = layer['getBrowser'](portal, loggedIn=True, username=SITE_OWNER_NAME, password=SITE_OWNER_PASSWORD)
        >>> browser.open(portal_url+"/@@API/create", "&".join([
        ... "obj_type=AnalysisRequest",
        ... "thing=Fish",
        ... "Client=portal_type:Client|id:client-1",
        ... "SampleType=portal_type:SampleType|title:Apple Pulp",
        ... "Contact=portal_type:Contact|getFullname:Rita Mohale",
        ... "Services:list=portal_type:AnalysisService|title:Calcium",
        ... "Services:list=portal_type:AnalysisService|title:Copper",
        ... "Services:list=portal_type:AnalysisService|title:Magnesium",
        ... "SamplingDate=2013-09-29"
        ... ]))
        >>> browser.contents
        '{...The following request fields were not used: ...thing...}'

        """
        savepoint = transaction.savepoint()
        self.context = context
        self.request = request
        self.unused = [x for x in self.request.form.keys()]
        self.used("form.submitted")
        self.used("__ac_name")
        self.used("__ac_password")
        # always require obj_type
        self.require("obj_type")
        obj_type = self.request['obj_type']
        self.used("obj_type")
        # AnalysisRequest shortcut: creates Sample, Partition, AR, Analyses.
        if obj_type == "AnalysisRequest":
            try:
                return self._create_ar(context, request)
            except:
                savepoint.rollback()
                raise
        # Other object types require explicit path as their parent
        self.require("obj_path")
        obj_path = self.request['obj_path']
        if not obj_path.startswith("/"):
            obj_path = "/" + obj_path
        self.used("obj_path")
        site_path = request['PATH_INFO'].replace("/@@API/create", "")
        parent = context.restrictedTraverse(str(site_path + obj_path))
        # normal permissions still apply for this user
        if not getSecurityManager().checkPermission(AccessJSONAPI, parent):
            msg = "You don't have the '{0}' permission on {1}".format(
                AccessJSONAPI, parent.absolute_url())
            raise Unauthorized(msg)

        obj_id = request.get("obj_id", "")
        _renameAfterCreation = False
        if not obj_id:
            _renameAfterCreation = True
            obj_id = tmpID()
        self.used(obj_id)

        ret = {
            "url": router.url_for("create", force_external=True),
            "success": True,
            "error": False,
        }

        try:
            obj = _createObjectByType(obj_type, parent, obj_id)
            obj.unmarkCreationFlag()
            if _renameAfterCreation:
                renameAfterCreation(obj)
            ret['obj_id'] = obj.getId()
            used_fields = set_fields_from_request(obj, request)
            for field in used_fields:
                self.used(field)
            obj.reindexObject()
            obj.aq_parent.reindexObject()
            event.notify(ObjectInitializedEvent(obj))
            obj.at_post_create_script()
        except:
            savepoint.rollback()
            raise

        if self.unused:
            raise BadRequest("The following request fields were not used: %s.  Request aborted." % self.unused)

        return ret
Example #33
0
    def _create_ar(self, context, request):
        """Creates AnalysisRequest object, with supporting Sample, Partition
        and Analysis objects.  The client is retrieved from the obj_path
        key in the request.

        Required request parameters:

            - Contact: One client contact Fullname.  The contact must exist
              in the specified client.  The first Contact with the specified
              value in it's Fullname field will be used.

            - SampleType_<index> - Must be an existing sample type.

        Optional request parameters:

        - CCContacts: A list of contact Fullnames, which will be copied on
          all messages related to this AR and it's sample or results.

        - CCEmails: A list of email addresses to include as above.

        - Sample_id: Create a secondary AR with an existing sample.  If
          unspecified, a new sample is created.

        - Specification: a lookup to set Analysis specs default values
          for all analyses

        - Analysis_Specification: specs (or overrides) per analysis, using
          a special lookup format.

            &Analysis_Specification:list=<Keyword>:min:max:error&...


        """

        wftool = getToolByName(context, 'portal_workflow')
        bc = getToolByName(context, 'bika_catalog')
        bsc = getToolByName(context, 'bika_setup_catalog')
        pc = getToolByName(context, 'portal_catalog')
        ret = {
            "url": router.url_for("create", force_external=True),
            "success": True,
            "error": False,
        }
        SamplingWorkflowEnabled = context.bika_setup.getSamplingWorkflowEnabled()
        for field in [
            'Client',
            'SampleType',
            'Contact',
            'SamplingDate',
            'Services']:
            self.require(field)
            self.used(field)

        try:
            client = resolve_request_lookup(context, request, 'Client')[0].getObject()
        except IndexError:
            raise Exception("Client not found")

        # Sample_id
        if 'Sample' in request:
            try:
                sample = resolve_request_lookup(context, request, 'Sample')[0].getObject()
            except IndexError:
                raise Exception("Sample not found")
        else:
            # Primary AR
            sample = _createObjectByType("Sample", client, tmpID())
            sample.unmarkCreationFlag()
            fields = set_fields_from_request(sample, request)
            for field in fields:
                self.used(field)
            sample._renameAfterCreation()
            sample.setSampleID(sample.getId())
            event.notify(ObjectInitializedEvent(sample))
            sample.at_post_create_script()

            if SamplingWorkflowEnabled:
                wftool.doActionFor(sample, 'sampling_workflow')
            else:
                wftool.doActionFor(sample, 'no_sampling_workflow')

        ret['sample_id'] = sample.getId()

        parts = [{'services': [],
                  'container': [],
                  'preservation': '',
                  'separate': False}]

        specs = self.get_specs_from_request()
        ar = _createObjectByType("AnalysisRequest", client, tmpID())
        ar.unmarkCreationFlag()
        fields = set_fields_from_request(ar, request)
        for field in fields:
            self.used(field)
        ar.setSample(sample.UID())
        ar._renameAfterCreation()
        ret['ar_id'] = ar.getId()
        brains = resolve_request_lookup(context, request, 'Services')
        service_uids = [p.UID for p in brains]
        new_analyses = ar.setAnalyses(service_uids, specs=specs)
        ar.setRequestID(ar.getId())
        ar.reindexObject()
        event.notify(ObjectInitializedEvent(ar))
        ar.at_post_create_script()

        # Create sample partitions
        parts_and_services = {}
        for _i in range(len(parts)):
            p = parts[_i]
            part_prefix = sample.getId() + "-P"
            if '%s%s' % (part_prefix, _i + 1) in sample.objectIds():
                parts[_i]['object'] = sample['%s%s' % (part_prefix, _i + 1)]
                parts_and_services['%s%s' % (part_prefix, _i + 1)] = p['services']
                part = parts[_i]['object']
            else:
                part = _createObjectByType("SamplePartition", sample, tmpID())
                parts[_i]['object'] = part
                container = None
                preservation = p['preservation']
                parts[_i]['prepreserved'] = False
                part.edit(
                    Container=container,
                    Preservation=preservation,
                )
                part.processForm()
                if SamplingWorkflowEnabled:
                    wftool.doActionFor(part, 'sampling_workflow')
                else:
                    wftool.doActionFor(part, 'no_sampling_workflow')
                parts_and_services[part.id] = p['services']

        if SamplingWorkflowEnabled:
            wftool.doActionFor(ar, 'sampling_workflow')
        else:
            wftool.doActionFor(ar, 'no_sampling_workflow')

        # Add analyses to sample partitions
        # XXX jsonapi create AR: right now, all new analyses are linked to the first samplepartition
        if new_analyses:
            analyses = list(part.getAnalyses())
            analyses.extend(new_analyses)
            part.edit(
                Analyses=analyses,
            )
            for analysis in new_analyses:
                analysis.setSamplePartition(part)

        # If Preservation is required for some partitions,
        # and the SamplingWorkflow is disabled, we need
        # to transition to to_be_preserved manually.
        if not SamplingWorkflowEnabled:
            to_be_preserved = []
            sample_due = []
            lowest_state = 'sample_due'
            for p in sample.objectValues('SamplePartition'):
                if p.getPreservation():
                    lowest_state = 'to_be_preserved'
                    to_be_preserved.append(p)
                else:
                    sample_due.append(p)
            for p in to_be_preserved:
                doActionFor(p, 'to_be_preserved')
            for p in sample_due:
                doActionFor(p, 'sample_due')
            doActionFor(sample, lowest_state)
            for analysis in ar.objectValues('Analysis'):
                doActionFor(analysis, lowest_state)
            doActionFor(ar, lowest_state)

        # receive secondary AR
        if request.get('Sample_id', ''):
            doActionFor(ar, 'sampled')
            doActionFor(ar, 'sample_due')
            not_receive = ['to_be_sampled', 'sample_due', 'sampled',
                           'to_be_preserved']
            sample_state = wftool.getInfoFor(sample, 'review_state')
            if sample_state not in not_receive:
                doActionFor(ar, 'receive')
            for analysis in ar.getAnalyses(full_objects=1):
                doActionFor(analysis, 'sampled')
                doActionFor(analysis, 'sample_due')
                if sample_state not in not_receive:
                    doActionFor(analysis, 'receive')

        if self.unused:
            raise BadRequest("The following request fields were not used: %s.  Request aborted." % self.unused)

        return ret