Ejemplo n.º 1
0
def setupTools(context, logger=None):
    if logger is None:
        logger = LOG
    logger.info('BCCVL site tools handler')
    # only run for this product
    if context.readDataFile('org.bccvl.site.marker.txt') is None:
        return
    portal = context.getSite()

    # setup job catalog
    from org.bccvl.site.job.catalog import setup_job_catalog
    setup_job_catalog(portal)

    # setup userannotation storage
    from org.bccvl.site.userannotation.utility import init_user_annotation
    init_user_annotation()
Ejemplo n.º 2
0
def upgrade_200_210_1(context, logger=None):
    if logger is None:
        logger = LOG
    # Run GS steps
    portal = api.portal.get()

    # Do some registry cleanup:
    registry = getUtility(IRegistry)
    for key in list(registry.records.keys()):
        if (key.startswith('plone.app.folderui')
                or key.startswith('dexterity.membrane')
                or key.startswith('collective.embedly')):
            del registry.records[key]

    setup = api.portal.get_tool('portal_setup')
    setup.runImportStepFromProfile(PROFILE_ID, 'propertiestool')
    setup.runImportStepFromProfile(PROFILE_ID, 'typeinfo')
    setup.runImportStepFromProfile(PROFILE_ID, 'toolset')
    setup.runImportStepFromProfile(PROFILE_ID, 'controlpanel')
    setup.runImportStepFromProfile(PROFILE_ID, 'org.bccvl.site.content')
    setup.runImportStepFromProfile(PROFILE_ID, 'plone.app.registry')
    setup.runImportStepFromProfile(PROFILE_ID, 'workflow')

    # make error logs visible
    ignored_exceptions = portal.error_log._ignored_exceptions
    portal.error_log._ignored_exceptions = ()
    from org.bccvl.site.job.catalog import setup_job_catalog
    setup_job_catalog(portal)

    pc = api.portal.get_tool('portal_catalog')
    # Update job_params with algorithm used for Climate Change Experiments
    LOG.info('Updating job params of old projection experiments')
    for brain in pc.searchResults(portal_type='org.bccvl.content.projectionexperiment'):
        # go through all results
        for result in brain.getObject().values():
            if 'function' in result.job_params:
                continue
            # Add algorithm to job_params if missing algorithm
            try:
                sdmds = uuidToObject(
                    result.job_params['species_distribution_models'])
                algorithm = sdmds.__parent__.job_params['function']
                if algorithm:
                    result.job_params['function'] = algorithm
            except Exception as e:
                LOG.warning("Can't add algorithm id to %s: %s", result, e)

    from org.bccvl.site.job.interfaces import IJobUtility
    jobtool = getUtility(IJobUtility)
    # search all datasets and create job object with infos from dataset
    # -> delete job info on dataset
    LOG.info('Migrating job data for datasets')
    DS_TYPES = ['org.bccvl.content.dataset',
                'org.bccvl.content.remotedataset']
    for brain in pc.searchResults(portal_type=DS_TYPES):
        job = jobtool.find_job_by_uuid(brain.UID)
        if job:
            # already processed ... skip
            continue
        try:
            ds = brain.getObject()
        except Exception as e:
            LOG.warning('Could not resolve %s: %s', brain.getPath(), e)
            continue
        annots = IAnnotations(ds)
        old_job = annots.get('org.bccvl.state', None)
        if not old_job:
            # no job state here ... skip it
            continue
        job = jobtool.new_job()
        job.created = ds.created()
        job.message = old_job['progress']['message']
        job.progress = old_job['progress']['state']
        job.state = old_job['state']
        job.title = old_job['name']
        job.taskid = old_job['taskid']
        job.userid = ds.getOwner().getId()
        job.content = IUUID(ds)
        job.type = brain.portal_type

        jobtool.reindex_job(job)
        del annots['org.bccvl.state']

    # search all experiments and create job object with infos from experiment
    # -> delete job info on experiment
    LOG.info('Migrating job data for experiments')
    EXP_TYPES = ['org.bccvl.content.sdmexperiment',
                 'org.bccvl.content.projectionexperiment',
                 'org.bccvl.content.biodiverseexperiment',
                 'org.bccvl.content.ensemble',
                 'org.bccvl.content.speciestraitsexperiment'
                 ]
    for brain in pc.searchResults(portal_type=EXP_TYPES):
        # go through all results
        for result in brain.getObject().values():
            job = None
            try:
                job = jobtool.find_job_by_uuid(IUUID(result))
            except Exception as e:
                LOG.info('Could not resolve %s: %s', result, e)
                continue
            if job:
                # already processed ... skip
                continue
            annots = IAnnotations(result)
            old_job = annots.get('org.bccvl.state', None)
            if not old_job:
                # no job state here ... skip it
                continue
            job = jobtool.new_job()
            job.created = result.created()
            job.message = old_job['progress']['message']
            job.progress = old_job['progress']['state']
            job.state = old_job['state']
            job.title = old_job['name']
            job.taskid = old_job['taskid']
            job.userid = result.getOwner().getId()
            job.content = IUUID(result)
            job.type = brain.portal_type
            job.function = result.job_paramsi.get('function')
            if job.function:
                job.toolkit = IUUID(
                    portal[defaults.TOOLKITS_FOLDER_ID][job.function])

            jobtool.reindex_job(job)
            del annots['org.bccvl.state']

    LOG.info('Updating layer metadata for projection outputs')
    from org.bccvl.site.interfaces import IBCCVLMetadata
    for brain in pc.searchResults(BCCDataGenre=('DataGenreCP', 'DataGenreCP_ENVLOP', 'DataGenreFP', 'DataGenreClampingMask')):
        ds = brain.getObject()
        md = IBCCVLMetadata(ds)
        # md['layers'][ds.file.filename] ... there should be only one key
        keys = md['layers'].keys()
        if len(keys) != 1:
            LOG.warning(
                'Found multiple layer keys; do not know what to do: %s', ds.absolute_url())
            continue
        layermd = md['layers'][keys[0]]
        if 'layer' in layermd:
            # already converted
            continue
        if md['genre'] == 'DataGenreClampingMask':
            layerid = 'clamping_mask'
        else:  # DataGenreCP and DataGenreFP
            algorithm = ds.__parent__.job_params['function']
            if algorithm in ('circles', 'convhull', 'voronoiHull'):
                layerid = 'projection_binary'
            elif algorithm in ('maxent',):
                layerid = 'projection_suitability'
            else:
                layerid = 'projection_probability'
        layermd['layer'] = layerid
        md['layers'] = {layerid: layermd}

    # restore error_log filter
    portal.error_log._ignored_exceptions = ignored_exceptions
    LOG.info('Upgrade step finished')