Exemplo n.º 1
0
    def _submit_arimport_p(self):
        """ load the profiles import layout """

        ars = []
        samples = []
        valid_batch = False
        client = self.aq_parent
        contact_obj = None
        cc_contact_obj = None

        # validate contact
        for contact in client.objectValues('Contact'):
            if contact.getUsername() == self.getContactID():
                contact_obj = contact
            if self.getCCContactID() == None:
                if contact_obj != None:
                    break
            else:
                if contact.getUsername() == self.getCCContactID():
                    cc_contact_obj = contact
                    if contact_obj != None:
                        break

        if contact_obj == None:
            valid_batch = False

        # get Keyword to ServiceId Map
        services = {}
        service_uids = {}

        for service in self.bika_setup_catalog(
                portal_type = 'AnalysisService'):
            obj = service.getObject()
            keyword = obj.getKeyword()
            if keyword:
                services[keyword] = '%s:%s' % (obj.UID(), obj.getPrice())
            service_uids[obj.UID()] = '%s:%s' % (obj.UID(), obj.getPrice())

        samplepoints = self.bika_setup_catalog(
            portal_type = 'SamplePoint',
            Title = self.getSamplePoint())
        if not samplepoints:
            valid_batch = False

        profiles = {}
        aritems = self.objectValues('ARImportItem')

        request = self.REQUEST
        title = 'Submitting AR Import'
        bar = ProgressBar(
                self, request, title, description='')
        event.notify(InitialiseProgressBar(bar))

        row_count = 0
        item_count = len(aritems)
        prefix = 'Sample'
        for aritem in aritems:
            # set up analyses
            ar_profile = None
            analyses = []
            row_count += 1

            for profilekey in aritem.getAnalysisProfile():
                this_profile = None
                if not profiles.has_key(profilekey):
                    profiles[profilekey] = []
                    # there is no profilekey index
                    l_prox = self._findProfileKey(profilekey)
                    if l_prox:
                        profiles[profilekey] = \
                                [s.UID() for s in l_prox.getService()]
                        this_profile = l_prox
                    else:
                        #TODO This will not find it!!
                        # there is no profilekey index
                        c_prox = self.bika_setup_catalog(
                                    portal_type = 'AnalysisProfile',
                                    getClientUID = client.UID(),
                                    getProfileKey = profilekey)
                        if c_prox:
                            obj = c_prox[0].getObject()
                            profiles[profilekey] = \
                                    [s.UID() for s in obj.getService()]
                            this_profile = obj

                if ar_profile is None:
                    ar_profile = obj
                else:
                    ar_profile = None
                profile = profiles[profilekey]
                for analysis in profile:
                    if not service_uids.has_key(analysis):
                        service = tool.lookupObject(analysis)
                        keyword = service.getKeyword()
                        service_uids[obj.UID()] = '%s:%s' % (obj.UID(), obj.getPrice())
                        if keyword:
                            services[keyword] = '%s:%s' % (obj.UID(), obj.getPrice())

                    if service_uids.has_key(analysis):
                        if not service_uids[analysis] in analyses:
                            analyses.append(service_uids[analysis])
                    else:
                        valid_batch = False

            for analysis in aritem.getAnalyses(full_objects=True):
                if not services.has_key(analysis):
                    for service in self.bika_setup_catalog(
                            portal_type = 'AnalysisService',
                            getKeyword = analysis):
                        obj = service.getObject()
                        services[analysis] = '%s:%s' % (obj.UID(), obj.getPrice())
                        service_uids[obj.UID()] = '%s:%s' % (obj.UID(), obj.getPrice())

                if services.has_key(analysis):
                    analyses.append(services[analysis])
                else:
                    valid_batch = False

            sampletypes = self.portal_catalog(
                portal_type = 'SampleType',
                sortable_title = aritem.getSampleType().lower(),
                )
            if not sampletypes:
                valid_batch = False
                return
            sampletypeuid = sampletypes[0].getObject().UID()

            if aritem.getSampleDate():
                date_items = aritem.getSampleDate().split('/')
                sample_date = DateTime(
                    int(date_items[2]), int(date_items[0]), int(date_items[1]))
            else:
                sample_date = None

            sample_id = '%s-%s' % (prefix, tmpID())
            sample = _createObjectByType("Sample", client, sample_id)
            sample.unmarkCreationFlag()
            sample.edit(
                SampleID = sample_id,
                ClientReference = aritem.getClientRef(),
                ClientSampleID = aritem.getClientSid(),
                SampleType = aritem.getSampleType(),
                DateSampled = sample_date,
                SamplingDate = sample_date,
                DateReceived = DateTime(),
                Remarks = aritem.getClientRemarks(),
                )
            sample._renameAfterCreation()
            sample.setSamplePoint(self.getSamplePoint())
            sample.setSampleID(sample.getId())
            event.notify(ObjectInitializedEvent(sample))
            sample.at_post_create_script()
            sample_uid = sample.UID()
            samples.append(sample_id)
            aritem.setSample(sample_uid)

            priorities = self.bika_setup_catalog(
                portal_type = 'ARPriority',
                sortable_title = aritem.Priority.lower(),
                )
            if len(priorities) < 1:
                logger.warning(
                    'Invalid Priority: validation should have prevented this')
                priority = ''
            else:
                priority = priorities[0].getObject()

            ar_id = tmpID()
            ar = _createObjectByType("AnalysisRequest", client, ar_id)
            report_dry_matter = False

            ar.unmarkCreationFlag()
            ar.edit(
                RequestID = ar_id,
                Contact = self.getContact(),
                CCContact = self.getCCContact(),
                CCEmails = self.getCCEmailsInvoice(),
                ClientOrderNumber = self.getOrderID(),
                ReportDryMatter = report_dry_matter,
                Profile = ar_profile,
                Analyses = analyses,
                Remarks = aritem.getClientRemarks(),
                Priority = priority,
                )
            ar.setSample(sample_uid)
            sample = ar.getSample()
            ar.setSampleType(sampletypeuid)
            ar_uid = ar.UID()
            aritem.setAnalysisRequest(ar_uid)
            ars.append(ar_id)
            ar._renameAfterCreation()
            progress_index = float(row_count)/float(item_count)*100.0
            progress = ProgressState(request, progress_index)
            event.notify(UpdateProgressEvent(progress))
            self._add_services_to_ar(ar, analyses)

        self.setDateApplied(DateTime())
        self.reindexObject()
Exemplo n.º 2
0
    def _submit_arimport_c(self):
        """ load the classic import layout """

        ars = []
        samples = []
        valid_batch = True
        client = self.aq_parent
        contact_obj = None
        cc_contact_obj = None

        # validate contact
        for contact in client.objectValues('Contact'):
            if contact.getUsername() == self.getContactID():
                contact_obj = contact
            if self.getCCContactID() == None:
                if contact_obj != None:
                    break
            else:
                if contact.getUsername() == self.getCCContactID():
                    cc_contact_obj = contact
                    if contact_obj != None:
                        break

        if contact_obj == None:
            valid_batch = False

        # get Keyword to ServiceId Map
        services = {}
        for service in self.bika_setup_catalog(
                portal_type = 'AnalysisService'):
            obj = service.getObject()
            keyword = obj.getKeyword()
            if keyword:
                services[keyword] = '%s:%s' % (obj.UID(), obj.getPrice())

        samplepoints = self.bika_setup_catalog(
            portal_type = 'SamplePoint',
            Title = self.getSamplePoint())
        if not samplepoints:
            valid_batch = False

        aritems = self.objectValues('ARImportItem')
        request = self.REQUEST
        title = 'Submitting AR Import'
        bar = ProgressBar(
                self, request, title, description='')
        event.notify(InitialiseProgressBar(bar))

        SamplingWorkflowEnabled = \
            self.bika_setup.getSamplingWorkflowEnabled()
        row_count = 0
        item_count =len(aritems)
        prefix = 'Sample'
        for aritem in aritems:
            row_count += 1
            # set up analyses
            analyses = []
            for analysis in aritem.getAnalyses(full_objects=True):
                if services.has_key(analysis):
                    analyses.append(services[analysis])
                else:
                    valid_batch = False

            sampletypes = self.portal_catalog(
                portal_type = 'SampleType',
                sortable_title = aritem.getSampleType().lower(),
                )
            if not sampletypes:
                valid_batch = False
                return
            sampletypeuid = sampletypes[0].getObject().UID()
            if aritem.getSampleDate():
                date_items = aritem.getSampleDate().split('/')
                sample_date = DateTime(
                    int(date_items[2]), int(date_items[1]), int(date_items[0]))
            else:
                sample_date = None

            sample_id = '%s-%s' % (prefix, tmpID())
            sample = _createObjectByType("Sample", client, sample_id)
            sample.unmarkCreationFlag()
            sample.edit(
                SampleID = sample_id,
                ClientReference = aritem.getClientRef(),
                ClientSampleID = aritem.getClientSid(),
                SampleType = aritem.getSampleType(),
                DateSampled = sample_date,
                SamplingDate = sample_date,
                DateReceived = DateTime(),
                )
            sample._renameAfterCreation()
            #sp_id = client.invokeFactory('SamplePoint', id=tmpID())
            #sp = client[sp_id]
            #sp.edit(title=self.getSamplePoint())
            sample.setSamplePoint(self.getSamplePoint())
            sample.setSampleID(sample.getId())
            event.notify(ObjectInitializedEvent(sample))
            sample.at_post_create_script()
            sample_uid = sample.UID()
            samples.append(sample_id)
            aritem.setSample(sample_uid)

            priorities = self.bika_setup_catalog(
                portal_type = 'ARPriority',
                sortable_title = aritem.Priority.lower(),
                )
            if len(priorities) < 1:
                logger.warning(
                    'Invalid Priority: validation should have prevented this')

            #Create AR
            ar_id = tmpID()
            ar = _createObjectByType("AnalysisRequest", client, ar_id)
            if aritem.getReportDryMatter().lower() == 'y':
                report_dry_matter = True
            else:
                report_dry_matter = False
            ar.unmarkCreationFlag()
            ar.edit(
                RequestID = ar_id,
                Contact = self.getContact(),
                CCContact = self.getCCContact(),
                CCEmails = self.getCCEmailsInvoice(),
                ClientOrderNumber = self.getOrderID(),
                ReportDryMatter = report_dry_matter,
                Analyses = analyses,
                Priority = priorities[0].getObject(),
                )
            ar.setSample(sample_uid)
            sample = ar.getSample()
            ar.setSampleType(sampletypeuid)
            ar_uid = ar.UID()
            aritem.setAnalysisRequest(ar_uid)
            ars.append(ar_id)
            ar._renameAfterCreation()

            self._add_services_to_ar(ar, analyses)

            progress_index = float(row_count)/float(item_count)*100.0
            progress = ProgressState(request, progress_index)
            event.notify(UpdateProgressEvent(progress))
            #TODO REmove for production - just to look pretty
            #time.sleep(1)
        self.setDateApplied(DateTime())
        self.reindexObject()
Exemplo n.º 3
0
    def workflow_script_import(self):
        """Create objects from valid ARImport
        """
        bsc = getToolByName(self, 'bika_setup_catalog')
        workflow = getToolByName(self, 'portal_workflow')
        client = self.aq_parent

        title = _('Submitting AR Import')
        description = _('Creating and initialising objects')
        bar = ProgressBar(self, self.REQUEST, title, description)
        notify(InitialiseProgressBar(bar))

        gridrows = self.schema['SampleData'].get(self)
        row_cnt = 0
        for therow in gridrows:
            row = therow.copy()
            row_cnt += 1
            # Create Sample
            sample = _createObjectByType('Sample', client, tmpID())
            sample.unmarkCreationFlag()
            # First convert all row values into something the field can take
            sample.edit(**row)
            sample._renameAfterCreation()
            event.notify(ObjectInitializedEvent(sample))
            sample.at_post_create_script()
            swe = self.bika_setup.getSamplingWorkflowEnabled()
            if swe:
                workflow.doActionFor(sample, 'sampling_workflow')
            else:
                workflow.doActionFor(sample, 'no_sampling_workflow')
            part = _createObjectByType('SamplePartition', sample, 'part-1')
            part.unmarkCreationFlag()
            if swe:
                workflow.doActionFor(part, 'sampling_workflow')
            else:
                workflow.doActionFor(part, 'no_sampling_workflow')
            # Container is special... it could be a containertype.
            container = self.get_row_container(row)
            if container:
                if container.portal_type == 'ContainerType':
                    containers = container.getContainers()
                # XXX And so we must calculate the best container for this partition
                part.edit(Container=containers[0])
            # Profiles are titles, convert them to UIDs.
            newprofiles = []
            for title in row['Profiles']:
                brains = bsc(portal_type='AnalysisProfile', title=title)
                for brain in brains:
                    newprofiles.append(brain.UID)
            row['Profiles'] = newprofiles
            # BBB in bika.lims < 3.1.9, only one profile is permitted
            # on an AR.  The services are all added, but only first selected
            # profile name is stored.
            row['Profile'] = newprofiles[0] if newprofiles else None

            # Same for analyses
            newanalyses = set(
                self.get_row_services(row) +
                self.get_row_profile_services(row))
            row['Analyses'] = []
            # get batch
            batch = self.schema['Batch'].get(self)
            if batch:
                row['Batch'] = batch
            # Create AR
            ar = _createObjectByType("AnalysisRequest", client, tmpID())
            ar.setSample(sample)
            ar.unmarkCreationFlag()
            ar.edit(**row)
            ar._renameAfterCreation()
            for analysis in ar.getAnalyses(full_objects=True):
                analysis.setSamplePartition(part)
            ar.at_post_create_script()
            if swe:
                workflow.doActionFor(ar, 'sampling_workflow')
            else:
                workflow.doActionFor(ar, 'no_sampling_workflow')
            ar.setAnalyses(list(newanalyses))
            progress_index = float(row_cnt) / len(gridrows) * 100
            progress = ProgressState(self.REQUEST, progress_index)
            notify(UpdateProgressEvent(progress))
        # document has been written to, and redirect() fails here
        self.REQUEST.response.write(
            '<script>document.location.href="%s"</script>' %
            (self.absolute_url()))
Exemplo n.º 4
0
    def workflow_script_import(self):
        """Create objects from valid ARImport
        """
        def convert_date_string(datestr):
            return datestr.replace('-', '/')

        def lookup_sampler_uid(import_user):
            #Lookup sampler's uid
            found = False
            userid = None
            user_ids = []
            users = getUsers(self, ['LabManager', 'Sampler']).items()
            for (samplerid, samplername) in users:
                if import_user == samplerid:
                    found = True
                    userid = samplerid
                    break
                if import_user == samplername:
                    user_ids.append(samplerid)
            if found:
                return userid
            if len(user_ids) == 1:
                return user_ids[0]
            if len(user_ids) > 1:
                #raise ValueError('Sampler %s is ambiguous' % import_user)
                return ''
            #Otherwise
            #raise ValueError('Sampler %s not found' % import_user)
            return ''

        bsc = getToolByName(self, 'bika_setup_catalog')
        workflow = getToolByName(self, 'portal_workflow')
        client = self.aq_parent

        title = _('Submitting AR Import')
        description = _('Creating and initialising objects')
        bar = ProgressBar(self, self.REQUEST, title, description)
        notify(InitialiseProgressBar(bar))

        profiles = [x.getObject() for x in bsc(portal_type='AnalysisProfile')]

        gridrows = self.schema['SampleData'].get(self)
        row_cnt = 0
        for therow in gridrows:
            row = therow.copy()
            row_cnt += 1
            # Create Sample
            sample = _createObjectByType('Sample', client, tmpID())
            sample.unmarkCreationFlag()
            # First convert all row values into something the field can take
            sample.edit(**row)
            sample._renameAfterCreation()
            event.notify(ObjectInitializedEvent(sample))
            sample.at_post_create_script()
            swe = self.bika_setup.getSamplingWorkflowEnabled()
            if swe:
                workflow.doActionFor(sample, 'sampling_workflow')
            else:
                workflow.doActionFor(sample, 'no_sampling_workflow')
            part = _createObjectByType('SamplePartition', sample, 'part-1')
            part.unmarkCreationFlag()
            renameAfterCreation(part)
            if swe:
                workflow.doActionFor(part, 'sampling_workflow')
            else:
                workflow.doActionFor(part, 'no_sampling_workflow')
            # Container is special... it could be a containertype.
            container = self.get_row_container(row)
            if container:
                if container.portal_type == 'ContainerType':
                    containers = container.getContainers()
                # XXX And so we must calculate the best container for this partition
                part.edit(Container=containers[0])

            # Profiles are titles, profile keys, or UIDS: convert them to UIDs.
            newprofiles = []
            for title in row['Profiles']:
                objects = [
                    x for x in profiles
                    if title in (x.getProfileKey(), x.UID(), x.Title())
                ]
                for obj in objects:
                    newprofiles.append(obj.UID())
            row['Profiles'] = newprofiles

            # BBB in bika.lims < 3.1.9, only one profile is permitted
            # on an AR.  The services are all added, but only first selected
            # profile name is stored.
            row['Profile'] = newprofiles[0] if newprofiles else None

            # Same for analyses
            newanalyses = set(
                self.get_row_services(row) +
                self.get_row_profile_services(row))
            row['Analyses'] = []
            # get batch
            batch = self.schema['Batch'].get(self)
            if batch:
                row['Batch'] = batch
            # Add AR fields from schema into this row's data
            row['ClientReference'] = self.getClientReference()
            row['ClientOrderNumber'] = self.getClientOrderNumber()
            row['Contact'] = self.getContact()
            row['DateSampled'] = convert_date_string(row['DateSampled'])
            if row['Sampler']:
                row['Sampler'] = lookup_sampler_uid(row['Sampler'])

            # Create AR
            ar = _createObjectByType("AnalysisRequest", client, tmpID())
            ar.setSample(sample)
            ar.unmarkCreationFlag()
            ar.edit(**row)
            ar._renameAfterCreation()
            ar.setAnalyses(list(newanalyses))
            for analysis in ar.getAnalyses(full_objects=True):
                analysis.setSamplePartition(part)
            ar.at_post_create_script()
            if swe:
                workflow.doActionFor(ar, 'sampling_workflow')
            else:
                workflow.doActionFor(ar, 'no_sampling_workflow')
            progress_index = float(row_cnt) / len(gridrows) * 100
            progress = ProgressState(self.REQUEST, progress_index)
            notify(UpdateProgressEvent(progress))
        # document has been written to, and redirect() fails here
        self.REQUEST.response.write(
            '<script>document.location.href="%s"</script>' %
            (self.aq_parent.absolute_url()))
Exemplo n.º 5
0
    def workflow_script_import(self):
        """Create objects from valid ARImport
        """
        bsc = getToolByName(self, 'bika_setup_catalog')
        client = self.aq_parent

        title = _('Submitting AR Import')
        description = _('Creating and initialising objects')
        bar = ProgressBar(self, self.REQUEST, title, description)
        notify(InitialiseProgressBar(bar))

        profiles = [x.getObject() for x in bsc(portal_type='AnalysisProfile')]

        gridrows = self.schema['SampleData'].get(self)
        row_cnt = 0
        for therow in gridrows:
            row = deepcopy(therow)
            row_cnt += 1

            # Profiles are titles, profile keys, or UIDS: convert them to UIDs.
            newprofiles = []
            for title in row['Profiles']:
                objects = [x for x in profiles
                           if title in (x.getProfileKey(), x.UID(), x.Title())]
                for obj in objects:
                    newprofiles.append(obj.UID())
            row['Profiles'] = newprofiles

            # Same for analyses
            newanalyses = set(self.get_row_services(row) +
                              self.get_row_profile_services(row))
            # get batch
            batch = self.schema['Batch'].get(self)
            if batch:
                row['Batch'] = batch.UID()
            # Add AR fields from schema into this row's data
            row['ClientReference'] = self.getClientReference()
            row['ClientOrderNumber'] = self.getClientOrderNumber()
            contact_uid =\
                self.getContact().UID() if self.getContact() else None
            row['Contact'] = contact_uid
            # Creating analysis request from gathered data
            ar = create_analysisrequest(
                client,
                self.REQUEST,
                row,
                analyses=list(newanalyses),
                partitions=None,)

            # Container is special... it could be a containertype.
            container = self.get_row_container(row)
            if container:
                if container.portal_type == 'ContainerType':
                    containers = container.getContainers()
                # TODO: Since containers don't work as is expected they
                # should work, I am keeping the old logic for AR import...
                part = ar.getPartitions()[0]
                # XXX And so we must calculate the best container for this partition
                part.edit(Container=containers[0])

            # progress marker update
            progress_index = float(row_cnt) / len(gridrows) * 100
            progress = ProgressState(self.REQUEST, progress_index)
            notify(UpdateProgressEvent(progress))

        # document has been written to, and redirect() fails here
        self.REQUEST.response.write(
            '<script>document.location.href="%s"</script>' % (
                self.absolute_url()))
Exemplo n.º 6
0
    def _import_file(self, importoption, csvfile, client_id):
        fullfilename = csvfile.filename
        fullfilename = fullfilename.split('/')[-1]
        filename = fullfilename.split('.')[0]
        log = []
        r = self.portal_catalog(portal_type='Client', id=client_id)
        if len(r) == 0:
            #This is not a user input issue - client_id is added to template
            log.append('   Could not find Client %s' % client_id)
            return None, '\n'.join(log)

        client = r[0].getObject()
        updateable_states = ['sample_received', 'assigned']
        reader = csv.reader(csvfile.readlines())
        samples = []
        sample_headers = None
        batch_headers = None
        batch_remarks = []
        row_count = 0
        for row in reader:
            row_count = row_count + 1
            if not row:
                continue
            # a new batch starts
            if row_count == 1:
                if row[0] == 'Header':
                    continue
                else:
                    msg = '%s invalid batch header' % row
                    transaction_note(msg)
                    return None, msg
            elif row_count == 2:
                msg = None
                if row[1] != 'Import':
                    msg = 'Invalid batch header - Import required in cell B2'
                    transaction_note(msg)
                    return None, msg
                entered_name = fullfilename.split('.')[0]
                if not row[2] or entered_name.lower() != row[2].lower():
                    msg = 'Filename, %s, does not match entered filename, %s' \
                            % (filename, row[2])
                    transaction_note(msg)
                    return None, msg

                batch_headers = row[0:]
                arimport_id = tmpID()
                title = filename
                idx = 1
                while title in [i.Title() for i in client.objectValues()]:
                    title = '%s-%s' % (filename, idx)
                    idx += 1
                arimport = _createObjectByType("ARImport",
                                               client,
                                               arimport_id,
                                               title=title)
                arimport.unmarkCreationFlag()
                continue
            elif row_count == 3:
                sample_headers = row[10:]
                continue
            elif row_count in [4, 5, 6]:
                continue

            #otherwise add to list of sample
            samples.append(row)
        if not row_count:
            msg = 'Invalid batch header'
            transaction_note(msg)
            return None, msg

        pad = 8192 * ' '
        request = self.request

        title = 'Importing file'
        bar = ProgressBar(self.context, self.request, title, description='')
        notify(InitialiseProgressBar(bar))

        sample_count = len(samples)
        row_count = 0
        for sample in samples:
            next_num = tmpID()
            row_count = row_count + 1
            item_remarks = []
            progress_index = float(row_count) / float(sample_count) * 100.0
            progress = ProgressState(self.request, progress_index)
            notify(UpdateProgressEvent(progress))
            #TODO REmove for production - just to look pretty
            #time.sleep(1)
            analyses = []
            for i in range(10, len(sample)):
                if sample[i] != '1':
                    continue
                analyses.append(sample_headers[(i - 10)])
            if len(analyses) > 0:
                aritem_id = '%s_%s' % ('aritem', (str(next_num)))
                aritem = _createObjectByType("ARImportItem", arimport,
                                             aritem_id)
                aritem.edit(
                    SampleName=sample[0],
                    ClientRef=sample[1],
                    SampleDate=sample[2],
                    SampleType=sample[3],
                    PickingSlip=sample[4],
                    ContainerType=sample[5],
                    ReportDryMatter=sample[6],
                    Priority=sample[7],
                )

                aritem.setRemarks(item_remarks)
                if importoption == 'c':
                    aritem.setAnalyses(analyses)
                elif importoption == 'p':
                    aritem.setAnalysisProfile(analyses)

        cc_names_report = ','.join(
                [i.strip() for i in batch_headers[6].split(';')]) \
                if (batch_headers and len(batch_headers) > 7) else ""
        cc_emails_report = ','.join(
                [i.strip() for i in batch_headers[7].split(';')]) \
                if batch_headers and len(batch_headers) > 8 else ""
        cc_emails_invoice = ','.join(
                [i.strip() for i in batch_headers[8].split(';')]) \
                if batch_headers and len(batch_headers) > 9 else ""

        try:
            numOfSamples = int(batch_headers[12])
        except:
            numOfSamples = 0
        arimport.edit(
            ImportOption=importoption,
            FileName=batch_headers[2],
            OriginalFile=csvfile,
            ClientTitle=batch_headers[3],
            ClientID=batch_headers[4],
            ContactID=batch_headers[5],
            CCNamesReport=cc_names_report,
            CCEmailsReport=cc_emails_report,
            CCEmailsInvoice=cc_emails_invoice,
            OrderID=batch_headers[9],
            QuoteID=batch_headers[10],
            SamplePoint=batch_headers[11],
            NumberSamples=numOfSamples,
            Remarks=batch_remarks,
            Analyses=sample_headers,
            DateImported=DateTime(),
        )
        arimport._renameAfterCreation()

        valid = arimport.validateIt()
        return arimport, msg