Example #1
0
def upgrade(tool):
    """Upgrade step required for Bika LIMS 3.1.9
    """
    portal = aq_parent(aq_inner(tool))
    # Adding new feature multiple profiles per Analysis Request
    multipleAnalysisProfiles(portal)
    setup = portal.portal_setup
    # Updated profile steps
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv')
    # important info about upgrade steps in
    # http://stackoverflow.com/questions/7821498/is-there-a-good-reference-list-for-the-names-of-the-genericsetup-import-steps
    setup.runImportStepFromProfile('profile-bika.lims:default', 'skins')
    # Update workflow permissions
    wf = getToolByName(portal, 'portal_workflow')
    wf.updateRoleMappings()

    qi = portal.portal_quickinstaller
    ufrom = qi.upgradeInfo('bika.lims')['installedVersion']
    logger.info("Upgrading Bika LIMS: %s -> %s" % (ufrom, '319'))

    # Migrations

    return True
Example #2
0
def upgrade(tool):
    """ Adding getRawSamplePoint/Type idx to obtain Sample's uid easly
    """
    # Hack prevent out-of-date upgrading
    # Related: PR #1484
    # https://github.com/bikalabs/Bika-LIMS/pull/1484
    from lims.upgrade import skip_pre315
    if skip_pre315(aq_parent(aq_inner(tool))):
        return True

    portal = aq_parent(aq_inner(tool))
    bsc = getToolByName(portal, 'bika_setup_catalog', None)

    if 'getRawSamplePoints' not in bsc.indexes():
        bsc.addIndex('getRawSamplePoints', 'KeywordIndex')
    if 'getRawSampleTypes' not in bsc.indexes():
        bsc.addIndex('getRawSampleTypes', 'KeywordIndex')

    logger.info("Reindex added indexes in bika_setup_catalog")
    bsc.manage_reindexIndex(ids=[
        'getRawSamplePoints',
    ])
    bsc.manage_reindexIndex(ids=[
        'getRawSampleTypes',
    ])

    return True
Example #3
0
def upgrade(tool):
    """ Sort by Type in instruments
    """
    # Hack prevent out-of-date upgrading
    # Related: PR #1484
    # https://github.com/bikalabs/Bika-LIMS/pull/1484
    from lims.upgrade import skip_pre315
    if skip_pre315(aq_parent(aq_inner(tool))):
        return True

    portal = aq_parent(aq_inner(tool))
    bsc = getToolByName(portal, 'bika_setup_catalog', None)

    if 'getInstrumentType' not in bsc.indexes():
        bsc.addIndex('getInstrumentType', 'FieldIndex')
        bsc.addColumn('getInstrumentType')

        bsc.addIndex('getInstrumentTypeName', 'FieldIndex')
        bsc.addColumn('getInstrumentTypeName')

    #Del old "getType" Index, it's not used now.
    if 'getType' in bsc.indexes():
        bsc.delIndex('getType')
    if 'getType' in bsc.indexes():
        bsc.delColumn('getType')

    setup = portal.portal_setup

    logger.info("Reindex added indexes in bika_setup_catalog")
    bsc.manage_reindexIndex(ids=[
        'getInstrumentType',
        'getInstrumentTypeName',
    ])

    return True
Example #4
0
def upgrade(tool):
    # Hack prevent out-of-date upgrading
    # Related: PR #1484
    # https://github.com/bikalabs/Bika-LIMS/pull/1484
    from lims.upgrade import skip_pre315
    if skip_pre315(aq_parent(aq_inner(tool))):
        return True

    portal = aq_parent(aq_inner(tool))
    setup = portal.portal_setup
    typestool = getToolByName(portal, 'portal_types')

    # Add the object to bika_setup
    try:
        portal.bika_setup.manage_delObjects('bika_arpriorities')
    except BadRequest:
        logger.info("Folder doesn't exist")

    try:
        typestool.constructContent(type_name="ARPriorities",
                               container=portal.bika_setup,
                               id='bika_arpriorities',
                               title='AR Priorities')
        obj = portal.bika_setup.bika_arpriorities
        obj.unmarkCreationFlag()
        obj.reindexObject()
    except BadRequest:
        # folder already exists
        pass

    return True
Example #5
0
def upgrade(tool):
    """ Sort by Type in instruments
    """
    # Hack prevent out-of-date upgrading
    # Related: PR #1484
    # https://github.com/bikalabs/Bika-LIMS/pull/1484
    from lims.upgrade import skip_pre315
    if skip_pre315(aq_parent(aq_inner(tool))):
        return True


    portal = aq_parent(aq_inner(tool))
    bsc = getToolByName(portal, 'bika_setup_catalog', None)

    if 'getInstrumentType' not in bsc.indexes():
        bsc.addIndex('getInstrumentType', 'FieldIndex')
        bsc.addColumn('getInstrumentType')

        bsc.addIndex('getInstrumentTypeName','FieldIndex')
        bsc.addColumn('getInstrumentTypeName')

    #Del old "getType" Index, it's not used now.
    if 'getType' in bsc.indexes():
        bsc.delIndex('getType')
    if 'getType' in bsc.indexes():
        bsc.delColumn('getType')

    setup = portal.portal_setup

    logger.info("Reindex added indexes in bika_setup_catalog")
    bsc.manage_reindexIndex(
        ids=['getInstrumentType', 'getInstrumentTypeName', ])

    return True
Example #6
0
def upgrade(tool):
    """Upgrade step required for Bika LIMS 3.1.9
    """
    portal = aq_parent(aq_inner(tool))
    # Adding new feature multiple profiles per Analysis Request
    multipleAnalysisProfiles(portal)
    setup = portal.portal_setup
    # Updated profile steps
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv')
    # important info about upgrade steps in
    # http://stackoverflow.com/questions/7821498/is-there-a-good-reference-list-for-the-names-of-the-genericsetup-import-steps
    setup.runImportStepFromProfile('profile-bika.lims:default', 'skins')
    # Update workflow permissions
    wf = getToolByName(portal, 'portal_workflow')
    wf.updateRoleMappings()

    qi = portal.portal_quickinstaller
    ufrom = qi.upgradeInfo('bika.lims')['installedVersion']
    logger.info("Upgrading Bika LIMS: %s -> %s" % (ufrom, '319'))

    # Migrations

    return True
    def __call__(self, result=None, specification=None, **kwargs):
        searchTerm = _c(self.request.get('searchTerm', '')).lower()
        force_all = self.request.get('force_all', 'true')
        searchFields = 'search_fields' in self.request \
            and json.loads(_u(self.request.get('search_fields', '[]'))) \
            or ('Title',)
        # lookup objects from ZODB
        catalog_name = _c(self.request.get('catalog_name', 'portal_catalog'))
        catalog = getToolByName(self.context, catalog_name)
        base_query = json.loads(_c(self.request['base_query']))
        search_query = json.loads(_c(self.request.get('search_query', "{}")))
        # first with all queries
        contentFilter = dict((k, v) for k, v in base_query.items())
        contentFilter.update(search_query)
        try:
            brains = catalog(contentFilter)
        except:
            from lims import logger
            logger.info(contentFilter)
            raise
        if brains and searchTerm:
            _brains = []
            if len(searchFields) == 0 \
                    or (len(searchFields) == 1 and searchFields[0] == 'Title'):
                _brains = [
                    p for p in brains if p.Title.lower().find(searchTerm) > -1
                ]
            else:
                for p in brains:
                    for fieldname in searchFields:
                        value = getattr(p, fieldname, None)
                        if not value:
                            instance = p.getObject()
                            schema = instance.Schema()
                            if fieldname in schema:
                                value = schema[fieldname].get(instance)
                        if value and value.lower().find(searchTerm) > -1:
                            _brains.append(p)
                            break

            brains = _brains
        # Then just base_query alone ("show all if no match")
        if not brains and force_all.lower() == 'true':
            if search_query:
                brains = catalog(base_query)
                if brains and searchTerm:
                    _brains = [
                        p for p in brains
                        if p.Title.lower().find(searchTerm) > -1
                    ]
                    if _brains:
                        brains = _brains
        return brains
    def __call__(self, result=None, specification=None, **kwargs):
        searchTerm = _c(self.request.get('searchTerm', '')).lower()
        force_all = self.request.get('force_all', 'true')
        searchFields = 'search_fields' in self.request \
            and json.loads(_u(self.request.get('search_fields', '[]'))) \
            or ('Title',)
        # lookup objects from ZODB
        catalog_name = _c(self.request.get('catalog_name', 'portal_catalog'))
        catalog = getToolByName(self.context, catalog_name)
        base_query = json.loads(_c(self.request['base_query']))
        search_query = json.loads(_c(self.request.get('search_query', "{}")))
        # first with all queries
        contentFilter = dict((k, v) for k, v in base_query.items())
        contentFilter.update(search_query)
        try:
            brains = catalog(contentFilter)
        except:
            from lims import logger
            logger.info(contentFilter)
            raise
        if brains and searchTerm:
            _brains = []
            if len(searchFields) == 0 \
                    or (len(searchFields) == 1 and searchFields[0] == 'Title'):
                _brains = [p for p in brains
                           if p.Title.lower().find(searchTerm) > -1]
            else:
                for p in brains:
                    for fieldname in searchFields:
                        value = getattr(p, fieldname, None)
                        if not value:
                            instance = p.getObject()
                            schema = instance.Schema()
                            if fieldname in schema:
                                value = schema[fieldname].get(instance)
                        if value and value.lower().find(searchTerm) > -1:
                            _brains.append(p)
                            break

            brains = _brains
        # Then just base_query alone ("show all if no match")
        if not brains and force_all.lower() == 'true':
            if search_query:
                brains = catalog(base_query)
                if brains and searchTerm:
                    _brains = [p for p in brains
                               if p.Title.lower().find(searchTerm) > -1]
                    if _brains:
                        brains = _brains
        return brains
Example #9
0
 def check_new_version(self):
     """Look for new updates at pypi
     """
     self.current_version = self.versions['bika.lims']
     if not self.current_version:
         self.has_new_version = False
         return
     url = "https://pypi.python.org/pypi/bika.lims/json"
     try:
         jsonstr = urllib.urlopen(url).read()
         self.pypi = json.loads(jsonstr)
         v = self.new_version = self.pypi['info']['version']
         self.new_date = \
             self.pypi['releases'][v][0]['upload_time'].split('T')[0]
     except Exception as e:
         logger.info("Failed to retrieve new version info: %s" % e)
         v = self.current_version
         self.new_date = ""
     self.has_new_version = v > self.current_version
Example #10
0
def upgrade(tool):
    """Upgrade step required for Bika LIMS 3.1.7
    """
    portal = aq_parent(aq_inner(tool))
    setup = portal.portal_setup
    qi = portal.portal_quickinstaller
    ufrom = qi.upgradeInfo('bika.lims')['installedVersion']
    logger.info("Upgrading Bika LIMS: %s -> %s" % (ufrom, '317'))

    # Updated profile steps

    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'plone.app.registry')

    # Migrations

    LIMS1519(portal)

    return True
Example #11
0
def upgrade(tool):
    """Upgrade step required for Bika LIMS 3.1.8
    """
    portal = aq_parent(aq_inner(tool))
    setup = portal.portal_setup
    qi = portal.portal_quickinstaller
    ufrom = qi.upgradeInfo('bika.lims')['installedVersion']
    logger.info("Upgrading Bika LIMS: %s -> %s" % (ufrom, '318'))

    # Updated profile steps
    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv')

    # Adding Multifile content type
    at = getToolByName(portal, 'archetype_tool')
    at.setCatalogsByType('Multifile', [
        'bika_setup_catalog',
    ])

    # Adding indexes
    bsc = getToolByName(portal, 'bika_setup_catalog', None)
    if 'getMethodID' not in bsc.indexes():
        bsc.addIndex('getMethodID', 'FieldIndex')
    if 'getDocumentID' not in bsc.indexes():
        bsc.addIndex('getDocumentID', 'FieldIndex')

    # Define permissions for Multifile
    mp = portal.manage_permission
    mp(AddMultifile, ['Manager', 'Owner', 'LabManager', 'LabClerk'], 1)
    # Update workflow permissions
    wf = getToolByName(portal, 'portal_workflow')
    wf.updateRoleMappings()

    # Migrations
    HEALTH245(portal)

    return True
Example #12
0
def upgrade(tool):
    """ Adding getRawSamplePoint/Type idx to obtain Sample's uid easly
    """
    # Hack prevent out-of-date upgrading
    # Related: PR #1484
    # https://github.com/bikalabs/Bika-LIMS/pull/1484
    from lims.upgrade import skip_pre315
    if skip_pre315(aq_parent(aq_inner(tool))):
        return True

    portal = aq_parent(aq_inner(tool))
    bsc = getToolByName(portal, 'bika_setup_catalog', None)

    if 'getRawSamplePoints' not in bsc.indexes():
        bsc.addIndex('getRawSamplePoints', 'KeywordIndex')
    if 'getRawSampleTypes' not in bsc.indexes():
        bsc.addIndex('getRawSampleTypes', 'KeywordIndex')

    logger.info("Reindex added indexes in bika_setup_catalog")
    bsc.manage_reindexIndex(ids=['getRawSamplePoints', ])
    bsc.manage_reindexIndex(ids=['getRawSampleTypes', ])

    return True
Example #13
0
def upgrade(tool):
    """Upgrade step required for Bika LIMS 3.1.8
    """
    portal = aq_parent(aq_inner(tool))
    setup = portal.portal_setup
    qi = portal.portal_quickinstaller
    ufrom = qi.upgradeInfo('bika.lims')['installedVersion']
    logger.info("Upgrading Bika LIMS: %s -> %s" % (ufrom, '318'))

    # Updated profile steps
    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv')

    # Adding Multifile content type
    at = getToolByName(portal, 'archetype_tool')
    at.setCatalogsByType('Multifile', ['bika_setup_catalog', ])

    # Adding indexes
    bsc = getToolByName(portal, 'bika_setup_catalog', None)
    if 'getMethodID' not in bsc.indexes():
        bsc.addIndex('getMethodID', 'FieldIndex')
    if 'getDocumentID' not in bsc.indexes():
        bsc.addIndex('getDocumentID', 'FieldIndex')

    # Define permissions for Multifile
    mp = portal.manage_permission
    mp(AddMultifile, ['Manager', 'Owner', 'LabManager', 'LabClerk'], 1)
    # Update workflow permissions
    wf = getToolByName(portal, 'portal_workflow')
    wf.updateRoleMappings()


    # Migrations
    HEALTH245(portal)

    return True
Example #14
0
def upgrade(tool):
    # Hack prevent out-of-date upgrading
    # Related: PR #1484
    # https://github.com/bikalabs/Bika-LIMS/pull/1484
    from lims.upgrade import skip_pre315
    if skip_pre315(aq_parent(aq_inner(tool))):
        return True

    portal = aq_parent(aq_inner(tool))

    at = getToolByName(portal, 'archetype_tool')
    bc = getToolByName(portal, 'bika_catalog')
    bac = getToolByName(portal, 'bika_analysis_catalog')
    bsc = getToolByName(portal, 'bika_setup_catalog')
    pc = getToolByName(portal, 'portal_catalog')
    portal = aq_parent(aq_inner(tool))
    portal_catalog = getToolByName(portal, 'portal_catalog')
    portal_groups = portal.portal_groups
    setup = portal.portal_setup
    typestool = getToolByName(portal, 'portal_types')
    wf = getToolByName(portal, 'portal_workflow')

    # Update all tools in which changes have been made
    setup.runImportStepFromProfile('profile-bika.lims:default', 'propertiestool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'repositorytool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'factorytool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'portlets', run_dependencies=False)
    setup.runImportStepFromProfile('profile-bika.lims:default', 'viewlets')
    setup.runImportStepFromProfile('profile-plone.app.jquery:default', 'jsregistry')

    # Add RegulatoryInspectors group and RegulatoryInspector role.
    # Fix permissions: LabClerks don't see analysis results
    role = 'RegulatoryInspector'
    group = 'RegulatoryInspectors'
    if role not in portal.acl_users.portal_role_manager.listRoleIds():
        portal.acl_users.portal_role_manager.addRole(role)
        portal._addRole(role)

    if group not in portal_groups.listGroupIds():
        portal_groups.addGroup('RegulatoryInspectors',
                           title="Regulatory Inspectors",
                           roles=['Member', 'RegulatoryInspector', ])
    else:
        portal_groups.setRolesForGroup('RegulatoryInspectors',
                                       ['Member', 'RegulatoryInspector', ])

    # Add SampleConditions
    at.setCatalogsByType('SampleCondition', ['bika_setup_catalog'])
    if not portal['bika_setup'].get('bika_sampleconditions'):
        typestool.constructContent(type_name="SampleConditions",
                                   container=portal['bika_setup'],
                                   id='bika_sampleconditions',
                                   title='Sample Conditions')
    obj = portal['bika_setup']['bika_sampleconditions']
    obj.unmarkCreationFlag()
    obj.reindexObject()
    # Add SampleCondition to all Sample objects
    proxies = portal_catalog(portal_type="Sample")
    samples = (proxy.getObject() for proxy in proxies)
    for sample in samples:
        sample.setSampleCondition(None)

    # Some catalog indexes were added or modified
    if 'getSampleTypeTitle' in bc.indexes():
        bc.delIndex('getSampleTypeTitle')
    if 'getSamplePointTitle' in bc.indexes():
        bc.delIndex('getSamplePointTitle')
    bc.addIndex('getSampleTypeTitle', 'KeywordIndex')
    bc.addIndex('getSamplePointTitle', 'KeywordIndex')

    if 'getClientSampleID' not in pc.indexes():
        pc.addIndex('getClientSampleID', 'FieldIndex')
        pc.addColumn('getClientSampleID')
    if 'getParentUID' not in pc.indexes():
        pc.addIndex('getParentUID', 'FieldIndex')
        pc.addColumn('getParentUID')
    if 'getReferenceAnalysesGroupID' not in bac.indexes():
        bac.addIndex('getReferenceAnalysesGroupID', 'FieldIndex')
        bac.addColumn('getReferenceAnalysesGroupID')

    # Fix broken template partition containers
    for p in bsc(portal_type='ARTemplate'):
        o = p.getObject()
        parts = o.getPartitions()
        for i, part in enumerate(parts):
            if 'container_uid' in part:
                container = bsc(portal_type='Container',
                                UID=part['container_uid'])
                if container:
                    container = container[0].getObject()
                    parts[i]['Container'] = container.Title()
            if 'preservation_uid' in p:
                preservation = bsc(portal_type='Preservation',
                                   UID=part['preservation_uid'])
                if preservation:
                    preservation = preservation[0].getObject()
                    parts[i]['Preservation'] = preservation.Title()

    # Populate ReferenceAnalysesGroupIDs for ReferenceAnalyses
    # https://github.com/bikalabs/Bika-LIMS/issues/931
    wss = bc(portal_type='Worksheet')
    for ws in wss:
        ws = ws.getObject()
        wsangroups = {}
        codes = {}

        # Reference Analyses (not duplicates)
        refanalyses = [an for an in ws.getAnalyses()
                       if an.portal_type == 'ReferenceAnalysis'
                       or an.portal_type == 'DuplicateAnalysis']
        layout = ws.getLayout()
        for lay in layout:
            for an in refanalyses:
                if lay['analysis_uid'] == an.UID():
                    position = lay['position']
                    if position not in wsangroups.keys():
                        wsangroups[position] = []
                    wsangroups[position].append(an)

        for position, wsgroup in wsangroups.iteritems():
            analysis = wsgroup[0]
            if analysis.portal_type == 'ReferenceAnalysis':
                refsampleid = wsgroup[0].aq_parent.id
            else:
                # Duplicate
                _analysis = wsgroup[0].getAnalysis()
                if _analysis.portal_type == 'ReferenceAnalysis':
                    refsampleid = _analysis.aq_parent.id
                else:
                    refsampleid = wsgroup[0].getSamplePartition().id
            codre = refsampleid
            codws = '%s_%s' % (refsampleid, ws.UID())
            codgr = '%s_%s_%s' % (refsampleid, ws.UID(), position)
            if codgr in codes.keys():
                postfix = codes[codgr]
            elif codws in codes.keys():
                postfix = codes[codws]
                codes[codgr] = postfix
                codes[codws] = postfix + 1
            elif codre in codes.keys():
                postfix = codes[codre]
                codes[codgr] = postfix
                codes[codws] = postfix + 1
                codes[codre] = postfix + 1
            else:
                postfix = 1
                codes[codre] = postfix + 1

            for an in wsgroup:
                if an.portal_type == 'DuplicateAnalysis':
                    postfix = str(postfix).zfill(int(2))
                    refgid = '%s-D%s' % (refsampleid, postfix)
                else:
                    postfix = str(postfix).zfill(int(3))
                    refgid = '%s-%s' % (refsampleid, postfix)
                an.setReferenceAnalysesGroupID(refgid)

    # Re-import the default permission maps
    gen = BikaGenerator()
    gen.setupPermissions(portal)

    logger.info("Updating workflow role/permission mappings")
    wf.updateRoleMappings()

    logger.info("Reindex added indexes in portal_catalog")
    pc.manage_reindexIndex(ids=['getClientSampleID', 'getParentUID',])

    logger.info("Reindex added indexes in bika_analysis_catalog")
    bac.manage_reindexIndex(ids=['getReferenceAnalysesGroupID',])

    logger.info("Reindex added indexes in bika_catalog")
    bc.manage_reindexIndex(ids=['getSampleTypeTitle', 'getSamplePointTitle',])

    return True
Example #15
0
def read(context, request):
    tag = AuthenticatorView(context, request).authenticator()
    pattern = '<input .*name="(\w+)".*value="(\w+)"'
    _authenticator = re.match(pattern, tag).groups()[1]

    ret = {
        "url": router.url_for("read", force_external=True),
        "success": True,
        "error": False,
        "objects": [],
        "_authenticator": _authenticator,
    }
    debug_mode = True #App.config.getConfiguration().debug_mode "Commented by Yasir"
    catalog_name = request.get("catalog_name", "portal_catalog")
    if not catalog_name:
        raise ValueError("bad or missing catalog_name: " + catalog_name)
    catalog = getToolByName(context, catalog_name)
    indexes = catalog.indexes()

    contentFilter = {}
    for index in indexes:
        if index in request:
            if index == 'review_state' and "{" in request[index]:
                continue
            contentFilter[index] = safe_unicode(request[index])
        if "%s[]"%index in request:
            value = request["%s[]"%index]
            contentFilter[index] = [safe_unicode(v) for v in value]

    if 'limit' in request:
        try:
            contentFilter['sort_limit'] = int(request["limit"])
        except ValueError:
            pass
    sort_on = request.get('sort_on', 'id')
    contentFilter['sort_on'] = sort_on
    # sort order
    sort_order = request.get('sort_order', '')
    if sort_order:
        contentFilter['sort_order'] = sort_order
    else:
        sort_order = 'ascending'
        contentFilter['sort_order'] = 'ascending'

    include_fields = get_include_fields(request)
    if debug_mode:
        logger.info("contentFilter: " + str(contentFilter))

    # Get matching objects from catalog
    proxies = catalog(**contentFilter)

    # batching items
    page_nr = int(request.get("page_nr", 0))
    try:
        page_size = int(request.get("page_size", 10))
    except ValueError:
        page_size = 10
    # page_size == 0: show all
    if page_size == 0:
        page_size = len(proxies)
    first_item_nr = page_size * page_nr
    if first_item_nr > len(proxies):
        first_item_nr = 0
    page_proxies = proxies[first_item_nr:first_item_nr + page_size]
    for proxy in page_proxies:
        obj_data = {}

        # Place all proxy attributes into the result.
        obj_data.update(load_brain_metadata(proxy, include_fields))

        # Place all schema fields ino the result.
        obj = proxy.getObject()
        obj_data.update(load_field_values(obj, include_fields))

        obj_data['path'] = "/".join(obj.getPhysicalPath())

        # call any adapters that care to modify this data.
        adapters = getAdapters((obj, ), IJSONReadExtender)
        for name, adapter in adapters:
            adapter(request, obj_data)

        ret['objects'].append(obj_data)

    ret['total_objects'] = len(proxies)
    ret['first_object_nr'] = first_item_nr
    last_object_nr = first_item_nr + len(page_proxies)
    if last_object_nr > ret['total_objects']:
        last_object_nr = ret['total_objects']
    ret['last_object_nr'] = last_object_nr

    if debug_mode:
        logger.info("{0} objects returned".format(len(ret['objects'])))
    return ret
Example #16
0
def upgrade(tool):
    # Hack prevent out-of-date upgrading
    # Related: PR #1484
    # https://github.com/bikalabs/Bika-LIMS/pull/1484
    from lims.upgrade import skip_pre315
    if skip_pre315(aq_parent(aq_inner(tool))):
        return True

    portal = aq_parent(aq_inner(tool))

    at = getToolByName(portal, 'archetype_tool')
    bc = getToolByName(portal, 'bika_catalog')
    bac = getToolByName(portal, 'bika_analysis_catalog')
    bsc = getToolByName(portal, 'bika_setup_catalog')
    pc = getToolByName(portal, 'portal_catalog')
    portal = aq_parent(aq_inner(tool))
    portal_catalog = getToolByName(portal, 'portal_catalog')
    portal_groups = portal.portal_groups
    setup = portal.portal_setup
    typestool = getToolByName(portal, 'portal_types')
    wf = getToolByName(portal, 'portal_workflow')

    # Update all tools in which changes have been made
    setup.runImportStepFromProfile('profile-bika.lims:default',
                                   'propertiestool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
    setup.runImportStepFromProfile('profile-bika.lims:default',
                                   'repositorytool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow-csv')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'factorytool')
    setup.runImportStepFromProfile('profile-bika.lims:default', 'jsregistry')
    setup.runImportStepFromProfile('profile-bika.lims:default',
                                   'portlets',
                                   run_dependencies=False)
    setup.runImportStepFromProfile('profile-bika.lims:default', 'viewlets')
    setup.runImportStepFromProfile('profile-plone.app.jquery:default',
                                   'jsregistry')

    # Add RegulatoryInspectors group and RegulatoryInspector role.
    # Fix permissions: LabClerks don't see analysis results
    role = 'RegulatoryInspector'
    group = 'RegulatoryInspectors'
    if role not in portal.acl_users.portal_role_manager.listRoleIds():
        portal.acl_users.portal_role_manager.addRole(role)
        portal._addRole(role)

    if group not in portal_groups.listGroupIds():
        portal_groups.addGroup('RegulatoryInspectors',
                               title="Regulatory Inspectors",
                               roles=[
                                   'Member',
                                   'RegulatoryInspector',
                               ])
    else:
        portal_groups.setRolesForGroup('RegulatoryInspectors', [
            'Member',
            'RegulatoryInspector',
        ])

    # Add SampleConditions
    at.setCatalogsByType('SampleCondition', ['bika_setup_catalog'])
    if not portal['bika_setup'].get('bika_sampleconditions'):
        typestool.constructContent(type_name="SampleConditions",
                                   container=portal['bika_setup'],
                                   id='bika_sampleconditions',
                                   title='Sample Conditions')
    obj = portal['bika_setup']['bika_sampleconditions']
    obj.unmarkCreationFlag()
    obj.reindexObject()
    # Add SampleCondition to all Sample objects
    proxies = portal_catalog(portal_type="Sample")
    samples = (proxy.getObject() for proxy in proxies)
    for sample in samples:
        sample.setSampleCondition(None)

    # Some catalog indexes were added or modified
    if 'getSampleTypeTitle' in bc.indexes():
        bc.delIndex('getSampleTypeTitle')
    if 'getSamplePointTitle' in bc.indexes():
        bc.delIndex('getSamplePointTitle')
    bc.addIndex('getSampleTypeTitle', 'KeywordIndex')
    bc.addIndex('getSamplePointTitle', 'KeywordIndex')

    if 'getClientSampleID' not in pc.indexes():
        pc.addIndex('getClientSampleID', 'FieldIndex')
        pc.addColumn('getClientSampleID')
    if 'getParentUID' not in pc.indexes():
        pc.addIndex('getParentUID', 'FieldIndex')
        pc.addColumn('getParentUID')
    if 'getReferenceAnalysesGroupID' not in bac.indexes():
        bac.addIndex('getReferenceAnalysesGroupID', 'FieldIndex')
        bac.addColumn('getReferenceAnalysesGroupID')

    # Fix broken template partition containers
    for p in bsc(portal_type='ARTemplate'):
        o = p.getObject()
        parts = o.getPartitions()
        for i, part in enumerate(parts):
            if 'container_uid' in part:
                container = bsc(portal_type='Container',
                                UID=part['container_uid'])
                if container:
                    container = container[0].getObject()
                    parts[i]['Container'] = container.Title()
            if 'preservation_uid' in p:
                preservation = bsc(portal_type='Preservation',
                                   UID=part['preservation_uid'])
                if preservation:
                    preservation = preservation[0].getObject()
                    parts[i]['Preservation'] = preservation.Title()

    # Populate ReferenceAnalysesGroupIDs for ReferenceAnalyses
    # https://github.com/bikalabs/Bika-LIMS/issues/931
    wss = bc(portal_type='Worksheet')
    for ws in wss:
        ws = ws.getObject()
        wsangroups = {}
        codes = {}

        # Reference Analyses (not duplicates)
        refanalyses = [
            an for an in ws.getAnalyses()
            if an.portal_type == 'ReferenceAnalysis'
            or an.portal_type == 'DuplicateAnalysis'
        ]
        layout = ws.getLayout()
        for lay in layout:
            for an in refanalyses:
                if lay['analysis_uid'] == an.UID():
                    position = lay['position']
                    if position not in wsangroups.keys():
                        wsangroups[position] = []
                    wsangroups[position].append(an)

        for position, wsgroup in wsangroups.iteritems():
            analysis = wsgroup[0]
            if analysis.portal_type == 'ReferenceAnalysis':
                refsampleid = wsgroup[0].aq_parent.id
            else:
                # Duplicate
                _analysis = wsgroup[0].getAnalysis()
                if _analysis.portal_type == 'ReferenceAnalysis':
                    refsampleid = _analysis.aq_parent.id
                else:
                    refsampleid = wsgroup[0].getSamplePartition().id
            codre = refsampleid
            codws = '%s_%s' % (refsampleid, ws.UID())
            codgr = '%s_%s_%s' % (refsampleid, ws.UID(), position)
            if codgr in codes.keys():
                postfix = codes[codgr]
            elif codws in codes.keys():
                postfix = codes[codws]
                codes[codgr] = postfix
                codes[codws] = postfix + 1
            elif codre in codes.keys():
                postfix = codes[codre]
                codes[codgr] = postfix
                codes[codws] = postfix + 1
                codes[codre] = postfix + 1
            else:
                postfix = 1
                codes[codre] = postfix + 1

            for an in wsgroup:
                if an.portal_type == 'DuplicateAnalysis':
                    postfix = str(postfix).zfill(int(2))
                    refgid = '%s-D%s' % (refsampleid, postfix)
                else:
                    postfix = str(postfix).zfill(int(3))
                    refgid = '%s-%s' % (refsampleid, postfix)
                an.setReferenceAnalysesGroupID(refgid)

    # Re-import the default permission maps
    gen = BikaGenerator()
    gen.setupPermissions(portal)

    logger.info("Updating workflow role/permission mappings")
    wf.updateRoleMappings()

    logger.info("Reindex added indexes in portal_catalog")
    pc.manage_reindexIndex(ids=[
        'getClientSampleID',
        'getParentUID',
    ])

    logger.info("Reindex added indexes in bika_analysis_catalog")
    bac.manage_reindexIndex(ids=[
        'getReferenceAnalysesGroupID',
    ])

    logger.info("Reindex added indexes in bika_catalog")
    bc.manage_reindexIndex(ids=[
        'getSampleTypeTitle',
        'getSamplePointTitle',
    ])

    return True
Example #17
0
    def __call__(self):
        form = self.request.form
        portal = getSite()
        workbook = None

        if 'setupexisting' in form and 'existing' in form and form['existing']:
            fn = form['existing'].split(":")
            self.dataset_project = fn[0]
            self.dataset_name = fn[1]
            path = 'setupdata/%s/%s.xlsx' % \
                (self.dataset_name, self.dataset_name)
            filename = resource_filename(self.dataset_project, path)
            try:
                workbook = load_workbook(
                    filename=filename)  # , use_iterators=True)
            except AttributeError:
                print ""
                print traceback.format_exc()
                print "Error while loading ", path

        elif 'setupfile' in form and 'file' in form and form[
                'file'] and 'projectname' in form and form['projectname']:
            self.dataset_project = form['projectname']
            tmp = tempfile.mktemp()
            file_content = form['file'].read()
            open(tmp, 'wb').write(file_content)
            workbook = load_workbook(filename=tmp)  # , use_iterators=True)
            self.dataset_name = 'uploaded'

        assert (workbook is not None)

        adapters = [[name, adapter] for name, adapter in list(
            getAdapters((self.context, ), ISetupDataImporter))]
        for sheetname in workbook.get_sheet_names():
            transaction.savepoint()
            ad_name = sheetname.replace(" ", "_")
            if ad_name in [a[0] for a in adapters]:
                adapter = [a[1] for a in adapters if a[0] == ad_name][0]
                adapter(self, workbook, self.dataset_project,
                        self.dataset_name)
                adapters = [a for a in adapters if a[0] != ad_name]
        for name, adapter in adapters:
            transaction.savepoint()
            adapter(self, workbook, self.dataset_project, self.dataset_name)

        check = len(self.deferred)
        while len(self.deferred) > 0:
            new = self.solve_deferred()
            logger.info("solved %s of %s deferred references" %
                        (check - new, check))
            if new == check:
                raise Exception("%s unsolved deferred references: %s" %
                                (len(self.deferred), self.deferred))
            check = new

        logger.info("Rebuilding bika_setup_catalog")
        bsc = getToolByName(self.context, 'bika_setup_catalog')
        bsc.clearFindAndRebuild()
        logger.info("Rebuilding bika_catalog")
        bc = getToolByName(self.context, 'bika_catalog')
        bc.clearFindAndRebuild()
        logger.info("Rebuilding bika_analysis_catalog")
        bac = getToolByName(self.context, 'bika_analysis_catalog')
        bac.clearFindAndRebuild()

        message = PMF("Changes saved.")
        self.context.plone_utils.addPortalMessage(message)
        self.request.RESPONSE.redirect(portal.absolute_url())
    def set(self, instance, value, **kwargs):
        """ Mutator. """

        rc = getToolByName(instance, REFERENCE_CATALOG)
        targetUIDs = [ref.targetUID for ref in
                      rc.getReferences(instance, self.relationship)]

        # empty value
        if not value:
            value = ()
        # list with one empty item
        if type(value) in (list, tuple) and len(value) == 1 and not value[0]:
            value = ()

        if not value and not targetUIDs:
            return

        if not isinstance(value, (list, tuple)):
            value = value,
        elif not self.multiValued and len(value) > 1:
            raise ValueError("Multiple values given for single valued field %r" % self)

        ts = getToolByName(instance, "translation_service").translate

        #convert objects to uids
        #convert uids to objects
        uids = []
        targets = {}
        for v in value:
            if isinstance(v, basestring):
                uids.append(v)
                targets[v] = rc.lookupObject(v)
            elif hasattr(v, 'UID'):
                target_uid = callable(v.UID) and v.UID() or v.UID
                uids.append(target_uid)
                targets[target_uid] = v
            else:
                logger.info("Target has no UID: %s/%s" % (v, value))

        sub = [t for t in targetUIDs if t not in uids]
        add = [v for v in uids if v and v not in targetUIDs]

        newuids = [t for t in list(targetUIDs) + list(uids) if t not in sub]
        for uid in newuids:
            # update version_id of all existing references that aren't
            # about to be removed anyway (contents of sub)
            version_id = hasattr(targets[uid], 'version_id') and \
                       targets[uid].version_id or None
            if version_id is None:
                # attempt initial save of unversioned targets
                pr = getToolByName(instance, 'portal_repository')
                if pr.isVersionable(targets[uid]):
                    pr.save(obj=targets[uid],
                            comment=to_utf8(ts(_("Initial revision"))))
            if not hasattr(instance, 'reference_versions'):
                instance.reference_versions = {}
            if not hasattr(targets[uid], 'version_id'):
                targets[uid].version_id = None
            instance.reference_versions[uid] = targets[uid].version_id

        # tweak keyword arguments for addReference
        addRef_kw = kwargs.copy()
        addRef_kw.setdefault('referenceClass', self.referenceClass)
        if 'schema' in addRef_kw:
            del addRef_kw['schema']
        for uid in add:
            __traceback_info__ = (instance, uid, value, targetUIDs)
            # throws IndexError if uid is invalid
            rc.addReference(instance, uid, self.relationship, **addRef_kw)

        for uid in sub:
            rc.deleteReference(instance, uid, self.relationship)

        if self.referencesSortable:
            if not hasattr(aq_base(instance), 'at_ordered_refs'):
                instance.at_ordered_refs = {}

            instance.at_ordered_refs[self.relationship] = \
                tuple(filter(None, uids))

        if self.callStorageOnSet:
            #if this option is set the reference fields's values get written
            #to the storage even if the reference field never use the storage
            #e.g. if i want to store the reference UIDs into an SQL field
            ObjectField.set(self, instance, self.getRaw(instance), **kwargs)
Example #19
0
    def _process_request(self):
        """Scan request for parameters and configure class attributes
        accordingly.  Setup AdvancedQuery or catalog contentFilter.

        Request parameters:
        <form_id>_sort_on:          list items are sorted on this key
        <form_id>_manual_sort_on:   no index - sort with python
        <form_id>_pagesize:         number of items
        <form_id>_pagenumber:       page number
        <form_id>_filter:           A string, will be regex matched against
                                    indexes in <form_id>_filter_indexes
        <form_id>_filter_indexes:   list of index names which will be searched
                                    for the value of <form_id>_filter

        <form_id>_<index_name>:     Any index name can be used after <form_id>_.

            any request variable named ${form_id}_{index_name} will pass it's
            value to that index in self.contentFilter.

            All conditions using ${form_id}_{index_name} are searched with AND.

            The parameter value will be matched with regexp if a FieldIndex or
            TextIndex.  Else, AdvancedQuery.Generic is used.
        """
        form_id = self.form_id
        form = self.request.form
        workflow = getToolByName(self.context, 'portal_workflow')
        catalog = getToolByName(self.context, self.catalog)

        # Some ajax calls duplicate form values?  I have not figured out why!
        if self.request.form:
            for key, value in self.request.form.items():
                if isinstance(value, list):
                    self.request.form[key] = self.request.form[key][0]

        # If table_only specifies another form_id, then we abort.
        # this way, a single table among many can request a redraw,
        # and only it's content will be rendered.
        if form_id not in self.request.get('table_only', form_id):
            return ''


        # contentFilter is expected in every self.review_state.
        for k, v in self.review_state['contentFilter'].items():
            self.contentFilter[k] = v
        # sort on
        self.sort_on = self.request.get(form_id + '_sort_on', None)
        # manual_sort_on: only sort the current batch of items
        # this is a compromise for sorting without column indexes
        self.manual_sort_on = None
        if self.sort_on \
           and self.sort_on in self.columns.keys() \
           and self.columns[self.sort_on].get('index', None):
            idx = self.columns[self.sort_on].get('index', self.sort_on)
            self.contentFilter['sort_on'] = idx
        else:
            if self.sort_on:
                self.manual_sort_on = self.sort_on
                if 'sort_on' in self.contentFilter:
                    del self.contentFilter['sort_on']

        # sort order
        self.sort_order = self.request.get(form_id + '_sort_order', '')
        if self.sort_order:
            self.contentFilter['sort_order'] = self.sort_order
        else:
            if 'sort_order' not in self.contentFilter:
                self.sort_order = 'ascending'
                self.contentFilter['sort_order'] = 'ascending'
                self.request.set(form_id+'_sort_order', 'ascending')
            else:
                self.sort_order = self.contentFilter['sort_order']
        if self.manual_sort_on:
            del self.contentFilter['sort_order']

        # pagesize
        pagesize = self.request.get(form_id + '_pagesize', self.pagesize)
        if type(pagesize) in (list, tuple):
            pagesize = pagesize[0]
        try:
            pagesize = int(pagesize)
        except:
            pagesize = self.pagesize = 10
        self.pagesize = pagesize
        # Plone's batching wants this variable:
        self.request.set('pagesize', self.pagesize)
        # and we want to make our choice remembered in bika_listing also
        self.request.set(self.form_id + '_pagesize', self.pagesize)

        # pagenumber
        self.pagenumber = int(self.request.get(form_id + '_pagenumber', self.pagenumber))
        # Plone's batching wants this variable:
        self.request.set('pagenumber', self.pagenumber)

        # index filters.
        self.And = []
        self.Or = []
        ##logger.info("contentFilter: %s"%self.contentFilter)
        for k, v in self.columns.items():
            if not v.has_key('index') \
               or v['index'] == 'review_state' \
               or v['index'] in self.filter_indexes:
                continue
            self.filter_indexes.append(v['index'])
        ##logger.info("Filter indexes: %s"%self.filter_indexes)

        # any request variable named ${form_id}_{index_name}
        # will pass it's value to that index in self.contentFilter.
        # all conditions using ${form_id}_{index_name} are searched with AND
        for index in self.filter_indexes:
            idx = catalog.Indexes.get(index, None)
            if not idx:
                logger.debug("index named '%s' not found in %s.  "
                             "(Perhaps the index is still empty)." %
                            (index, self.catalog))
                continue
            request_key = "%s_%s" % (form_id, index)
            value = self.request.get(request_key, '')
            if len(value) > 1:
                ##logger.info("And: %s=%s"%(index, value))
                if idx.meta_type in('ZCTextIndex', 'FieldIndex'):
                    self.And.append(MatchRegexp(index, value))
                elif idx.meta_type == 'DateIndex':
                    logger.info("Unhandled DateIndex search on '%s'"%index)
                    continue
                else:
                    self.Or.append(Generic(index, value))

        # if there's a ${form_id}_filter in request, then all indexes
        # are are searched for it's value.
        # ${form_id}_filter is searched with OR agains all indexes
        request_key = "%s_filter" % form_id
        value = self.request.get(request_key, '')
        if type(value) in (list, tuple):
            value = value[0]
        if len(value) > 1:
            for index in self.filter_indexes:
                idx = catalog.Indexes.get(index, None)
                if not idx:
                    logger.debug("index named '%s' not found in %s.  "
                                 "(Perhaps the index is still empty)." %
                                 (index, self.catalog))
                    continue
                ##logger.info("Or: %s=%s"%(index, value))
                if idx.meta_type in('ZCTextIndex', 'FieldIndex'):
                    self.Or.append(MatchRegexp(index, value))
                    self.expand_all_categories = True
                    # https://github.com/bikalabs/Bika-LIMS/issues/1069
                    vals = value.split('-')
                    if len(vals) > 2:
                        valroot = vals[0]
                        for i in range(1, len(vals)):
                            valroot = '%s-%s' % (valroot, vals[i])
                            self.Or.append(MatchRegexp(index, valroot+'-*'))
                            self.expand_all_categories = True
                elif idx.meta_type == 'DateIndex':
                    if type(value) in (list, tuple):
                        value = value[0]
                    if value.find(":") > -1:
                        try:
                            lohi = [DateTime(x) for x in value.split(":")]
                        except:
                            logger.info("Error (And, DateIndex='%s', term='%s')"%(index,value))
                        self.Or.append(Between(index, lohi[0], lohi[1]))
                        self.expand_all_categories = True
                    else:
                        try:
                            self.Or.append(Eq(index, DateTime(value)))
                            self.expand_all_categories = True
                        except:
                            logger.info("Error (Or, DateIndex='%s', term='%s')"%(index,value))
                else:
                    self.Or.append(Generic(index, value))
                    self.expand_all_categories = True
            self.Or.append(MatchRegexp('review_state', value))

        # get toggle_cols cookie value
        # and modify self.columns[]['toggle'] to match.
        toggle_cols = self.get_toggle_cols()
        for col in self.columns.keys():
            if col in toggle_cols:
                self.columns[col]['toggle'] = True
            else:
                self.columns[col]['toggle'] = False
Example #20
0
def read(context, request):
    tag = AuthenticatorView(context, request).authenticator()
    pattern = '<input .*name="(\w+)".*value="(\w+)"'
    _authenticator = re.match(pattern, tag).groups()[1]

    ret = {
        "url": router.url_for("read", force_external=True),
        "success": True,
        "error": False,
        "objects": [],
        "_authenticator": _authenticator,
    }
    debug_mode = True  #App.config.getConfiguration().debug_mode "Commented by Yasir"
    catalog_name = request.get("catalog_name", "portal_catalog")
    if not catalog_name:
        raise ValueError("bad or missing catalog_name: " + catalog_name)
    catalog = getToolByName(context, catalog_name)
    indexes = catalog.indexes()

    contentFilter = {}
    for index in indexes:
        if index in request:
            if index == 'review_state' and "{" in request[index]:
                continue
            contentFilter[index] = safe_unicode(request[index])
        if "%s[]" % index in request:
            value = request["%s[]" % index]
            contentFilter[index] = [safe_unicode(v) for v in value]

    if 'limit' in request:
        try:
            contentFilter['sort_limit'] = int(request["limit"])
        except ValueError:
            pass
    sort_on = request.get('sort_on', 'id')
    contentFilter['sort_on'] = sort_on
    # sort order
    sort_order = request.get('sort_order', '')
    if sort_order:
        contentFilter['sort_order'] = sort_order
    else:
        sort_order = 'ascending'
        contentFilter['sort_order'] = 'ascending'

    include_fields = get_include_fields(request)
    if debug_mode:
        logger.info("contentFilter: " + str(contentFilter))

    # Get matching objects from catalog
    proxies = catalog(**contentFilter)

    # batching items
    page_nr = int(request.get("page_nr", 0))
    try:
        page_size = int(request.get("page_size", 10))
    except ValueError:
        page_size = 10
    # page_size == 0: show all
    if page_size == 0:
        page_size = len(proxies)
    first_item_nr = page_size * page_nr
    if first_item_nr > len(proxies):
        first_item_nr = 0
    page_proxies = proxies[first_item_nr:first_item_nr + page_size]
    for proxy in page_proxies:
        obj_data = {}

        # Place all proxy attributes into the result.
        obj_data.update(load_brain_metadata(proxy, include_fields))

        # Place all schema fields ino the result.
        obj = proxy.getObject()
        obj_data.update(load_field_values(obj, include_fields))

        obj_data['path'] = "/".join(obj.getPhysicalPath())

        # call any adapters that care to modify this data.
        adapters = getAdapters((obj, ), IJSONReadExtender)
        for name, adapter in adapters:
            adapter(request, obj_data)

        ret['objects'].append(obj_data)

    ret['total_objects'] = len(proxies)
    ret['first_object_nr'] = first_item_nr
    last_object_nr = first_item_nr + len(page_proxies)
    if last_object_nr > ret['total_objects']:
        last_object_nr = ret['total_objects']
    ret['last_object_nr'] = last_object_nr

    if debug_mode:
        logger.info("{0} objects returned".format(len(ret['objects'])))
    return ret
Example #21
0
    def set(self, instance, value, **kwargs):
        """ Mutator. """

        rc = getToolByName(instance, REFERENCE_CATALOG)
        targetUIDs = [
            ref.targetUID
            for ref in rc.getReferences(instance, self.relationship)
        ]

        # empty value
        if not value:
            value = ()
        # list with one empty item
        if type(value) in (list, tuple) and len(value) == 1 and not value[0]:
            value = ()

        if not value and not targetUIDs:
            return

        if not isinstance(value, (list, tuple)):
            value = value,
        elif not self.multiValued and len(value) > 1:
            raise ValueError(
                "Multiple values given for single valued field %r" % self)

        ts = getToolByName(instance, "translation_service").translate

        #convert objects to uids
        #convert uids to objects
        uids = []
        targets = {}
        for v in value:
            if isinstance(v, basestring):
                uids.append(v)
                targets[v] = rc.lookupObject(v)
            elif hasattr(v, 'UID'):
                target_uid = callable(v.UID) and v.UID() or v.UID
                uids.append(target_uid)
                targets[target_uid] = v
            else:
                logger.info("Target has no UID: %s/%s" % (v, value))

        sub = [t for t in targetUIDs if t not in uids]
        add = [v for v in uids if v and v not in targetUIDs]

        newuids = [t for t in list(targetUIDs) + list(uids) if t not in sub]
        for uid in newuids:
            # update version_id of all existing references that aren't
            # about to be removed anyway (contents of sub)
            version_id = hasattr(targets[uid], 'version_id') and \
                       targets[uid].version_id or None
            if version_id is None:
                # attempt initial save of unversioned targets
                pr = getToolByName(instance, 'portal_repository')
                if pr.isVersionable(targets[uid]):
                    pr.save(obj=targets[uid],
                            comment=to_utf8(ts(_("Initial revision"))))
            if not hasattr(instance, 'reference_versions'):
                instance.reference_versions = {}
            if not hasattr(targets[uid], 'version_id'):
                targets[uid].version_id = None
            instance.reference_versions[uid] = targets[uid].version_id

        # tweak keyword arguments for addReference
        addRef_kw = kwargs.copy()
        addRef_kw.setdefault('referenceClass', self.referenceClass)
        if 'schema' in addRef_kw:
            del addRef_kw['schema']
        for uid in add:
            __traceback_info__ = (instance, uid, value, targetUIDs)
            # throws IndexError if uid is invalid
            rc.addReference(instance, uid, self.relationship, **addRef_kw)

        for uid in sub:
            rc.deleteReference(instance, uid, self.relationship)

        if self.referencesSortable:
            if not hasattr(aq_base(instance), 'at_ordered_refs'):
                instance.at_ordered_refs = {}

            instance.at_ordered_refs[self.relationship] = \
                tuple(filter(None, uids))

        if self.callStorageOnSet:
            #if this option is set the reference fields's values get written
            #to the storage even if the reference field never use the storage
            #e.g. if i want to store the reference UIDs into an SQL field
            ObjectField.set(self, instance, self.getRaw(instance), **kwargs)
Example #22
0
    def __call__(self):
        form = self.request.form
        portal = getSite()
        workbook = None

        if 'setupexisting' in form and 'existing' in form and form['existing']:
                fn = form['existing'].split(":")
                self.dataset_project = fn[0]
                self.dataset_name = fn[1]
                path = 'setupdata/%s/%s.xlsx' % \
                    (self.dataset_name, self.dataset_name)
                filename = resource_filename(self.dataset_project, path)
                try:
                    workbook = load_workbook(filename=filename)  # , use_iterators=True)
                except AttributeError:
                    print ""
                    print traceback.format_exc()
                    print "Error while loading ", path

        elif 'setupfile' in form and 'file' in form and form['file'] and 'projectname' in form and form['projectname']:
                self.dataset_project = form['projectname']
                tmp = tempfile.mktemp()
                file_content = form['file'].read()
                open(tmp, 'wb').write(file_content)
                workbook = load_workbook(filename=tmp)  # , use_iterators=True)
                self.dataset_name = 'uploaded'

        assert(workbook is not None)

        adapters = [[name, adapter]
                    for name, adapter
                    in list(getAdapters((self.context, ), ISetupDataImporter))]
        for sheetname in workbook.get_sheet_names():
            transaction.savepoint()
            ad_name = sheetname.replace(" ", "_")
            if ad_name in [a[0] for a in adapters]:
                adapter = [a[1] for a in adapters if a[0] == ad_name][0]
                adapter(self, workbook, self.dataset_project, self.dataset_name)
                adapters = [a for a in adapters if a[0] != ad_name]
        for name, adapter in adapters:
            transaction.savepoint()
            adapter(self, workbook, self.dataset_project, self.dataset_name)

        check = len(self.deferred)
        while len(self.deferred) > 0:
            new = self.solve_deferred()
            logger.info("solved %s of %s deferred references" % (
                check - new, check))
            if new == check:
                raise Exception("%s unsolved deferred references: %s" % (
                    len(self.deferred), self.deferred))
            check = new

        logger.info("Rebuilding bika_setup_catalog")
        bsc = getToolByName(self.context, 'bika_setup_catalog')
        bsc.clearFindAndRebuild()
        logger.info("Rebuilding bika_catalog")
        bc = getToolByName(self.context, 'bika_catalog')
        bc.clearFindAndRebuild()
        logger.info("Rebuilding bika_analysis_catalog")
        bac = getToolByName(self.context, 'bika_analysis_catalog')
        bac.clearFindAndRebuild()

        message = PMF("Changes saved.")
        self.context.plone_utils.addPortalMessage(message)
        self.request.RESPONSE.redirect(portal.absolute_url())