Example #1
0
def setupFacets(context, logger=None):
    if logger is None:
        logger = LOG
    logger.info('BCCVL site facet setup handler')

    # only run for this product
    if context.readDataFile('org.bccvl.site.marker.txt') is None:
        return
    portal = context.getSite()

    from org.bccvl.site.faceted.interfaces import IFacetConfigUtility
    from org.bccvl.site.faceted.tool import import_facet_config

    def _setup_facets(content, config, layout=None):
        # enable faceting
        subtyper = getMultiAdapter((content, content.REQUEST),
                                   name=u'faceted_subtyper')
        subtyper.enable()
        # update default layout if requested
        if layout:
            IFacetedLayout(content).update_layout(layout)
        # load facet config
        widgets = getMultiAdapter((content, content.REQUEST),
                                  name=config)
        xml = widgets()
        environ = SnapshotImportContext(content, 'utf-8')
        importer = getMultiAdapter((content, environ), IBody)
        importer.body = xml

    # setup datasets search facets
    datasets = portal[defaults.DATASETS_FOLDER_ID]
    _setup_facets(datasets, 'datasets_default.xml', 'faceted-table-items')

    # go through all facet setups in portal_facetconfig and update them as well
    facet_tool = getUtility(IFacetConfigUtility)
    for obj in facet_tool.types(proxy=False):
        import_facet_config(obj)
Example #2
0
def upgrade_220_230_1(context, logger=None):
    if logger is None:
        logger = LOG
    # Run GS steps
    portal = api.portal.get()
    setup = api.portal.get_tool('portal_setup')
    setup.runImportStepFromProfile(PROFILE_ID, 'org.bccvl.site.content')
    setup.runImportStepFromProfile(PROFILE_ID, 'plone.app.registry')
    setup.runImportStepFromProfile(PROFILE_ID, 'actions')
    pc = api.portal.get_tool('portal_catalog')

   # search all experiments and update job object with infos from experiment
    # -> delete job info on experiment
    LOG.info('Migrating job data for experiments')
    EXP_TYPES = ['org.bccvl.content.sdmexperiment',
                 'org.bccvl.content.projectionexperiment',
                 'org.bccvl.content.biodiverseexperiment',
                 'org.bccvl.content.ensemble',
                 'org.bccvl.content.speciestraitsexperiment'
                 ]

    from org.bccvl.site.job.interfaces import IJobTracker
    import json
    for brain in pc.searchResults(portal_type=EXP_TYPES):
        # Update job with process statistic i.e. rusage
        for result in brain.getObject().values():
            if not 'pstats.json' in result:
                continue

            jt = IJobTracker(result)
            job = None
            try:
                job = jt.get_job()
            except Exception as e:
                LOG.info('Could not resolve %s: %s', result, e)
            if not job:
                continue

            pstats = result['pstats.json']
            if hasattr(pstats, 'file'):
                job.rusage = json.loads(pstats.file.data)
                del result['pstats.json']

    # Setup cookie settings
    sess = portal.acl_users.session
    sess.manage_changeProperties(
        mod_auth_tkt=True,
        secure=True
    )

    # update facet configurations
    from org.bccvl.site.faceted.interfaces import IFacetConfigUtility
    from org.bccvl.site.faceted.tool import import_facet_config
    fct = getUtility(IFacetConfigUtility)
    for cfgobj in fct.types():
        LOG.info("Import facet config for %s", cfgobj.id)
        import_facet_config(cfgobj)

    # set cookie secret from celery configuration
    from org.bccvl.tasks.celery import app
    cookie_cfg = app.conf.get('bccvl', {}).get('cookie', {})
    if cookie_cfg.get('secret', None):
        sess._shared_secret = cookie_cfg.get('secret').encode('utf-8')
        sess = portal.acl_users.session
        sess.manage_changeProperties(
            mod_auth_tkt=True,
            secure=cookie_cfg.get('secure', True)
        )