Esempio n. 1
0
    def __call__(self, batch=1000, dryrun=False):
        """ find all btree-based folder below the context, potentially
            migrate them & provide some logging and statistics doing so """
        log = self.mklog()
        log('migrating btree-based folders from %r:' % self.context)
        real = timer()          # real time
        lap = timer()           # real lap time (for intermediate commits)
        cpu = timer(clock)      # cpu time
        processed = 0

        def checkPoint():
            msg = 'intermediate commit '\
                  '(%d objects processed, last batch in %s)...'
            log(msg % (processed, next(lap)))
            trx = get()
            trx.note(u'migrated %d btree-folders' % processed)
            trx.savepoint()
        cpi = checkpointIterator(checkPoint, batch)
        for path, obj in findObjects(self.context):
            if isinstance(obj, BTreeFolder):
                if self.migrate(obj):
                    processed += 1
                    next(cpi)
            self.postprocess(obj)

        checkPoint()                # commit last batch
        if dryrun:
            get().abort()           # abort on test-run...
        msg = 'processed %d object(s) in %s (%s cpu time).'
        msg = msg % (processed, next(real), next(cpu))
        log(msg)
        logger.info(msg)
Esempio n. 2
0
    def __call__(self, batch=1000, dryrun=False):
        """ find all btree-based folder below the context, potentially
            migrate them & provide some logging and statistics doing so """
        log = self.mklog()
        log('migrating btree-based folders from %r:' % self.context)
        real = timer()  # real time
        lap = timer()  # real lap time (for intermediate commits)
        cpu = timer(clock)  # cpu time
        processed = 0

        def checkPoint():
            msg = 'intermediate commit '\
                  '(%d objects processed, last batch in %s)...'
            log(msg % (processed, next(lap)))
            trx = get()
            trx.note(u'migrated %d btree-folders' % processed)
            trx.savepoint()

        cpi = checkpointIterator(checkPoint, batch)
        for path, obj in findObjects(self.context):
            if isinstance(obj, BTreeFolder):
                if self.migrate(obj):
                    processed += 1
                    next(cpi)
            self.postprocess(obj)

        checkPoint()  # commit last batch
        if dryrun:
            get().abort()  # abort on test-run...
        msg = 'processed %d object(s) in %s (%s cpu time).'
        msg = msg % (processed, next(real), next(cpu))
        log(msg)
        logger.info(msg)
Esempio n. 3
0
    def load_ars(self):

        counts = {'Digestible Energy': 10,
                  'Micro-Bio check': 10,
                  'Micro-Bio counts': 10,
                  'Trace Metals': 10}

        sampletypes = [p.getObject() for p in self.portal_catalog(portal_type="SampleType")]
        samplepoints = [p.getObject() for p in self.portal_catalog(portal_type="SamplePoint")]
        for client in self.context.clients.objectValues():
            contacts = [c for c in client.objectValues() if c.portal_type == 'Contact']
            for profile, count_ars in counts.items():
                profile = self.portal_catalog(portal_type='ARProfile',
                                              Title=profile)[0].getObject()
                profile_services = profile.getService()

                _ars = []
                t = timer()
                for i in range(1, count_ars+1):
                    sample_id = client.generateUniqueId('Sample')
                    client.invokeFactory(id = sample_id, type_name = 'Sample')
                    sample = client[sample_id]
                    sample.edit(
                        SampleID = sample_id,
                        SampleType = random.choice(sampletypes).Title(),
                        SamplePoint = random.choice(samplepoints).Title(),
                        ClientReference = "".join([chr(random.randint(70,90)) for r in range(5)]),
                        ClientSampleID = "".join([chr(random.randint(70,90)) for r in range(5)]),
                        LastARNumber = 1,
                        DateSubmitted = DateTime(),
                        DateSampled = DateTime(),
                        SubmittedByUser = sample.current_user()
                    )
                    sample.unmarkCreationFlag()
                    ar_id = client.generateARUniqueId("AnalysisRequest", sample_id, 1)
                    client.invokeFactory("AnalysisRequest", ar_id)
                    ar = client[ar_id]
                    _ars.append(ar)
                    ar.edit(
                        RequestID = ar_id,
                        DateRequested = DateTime(),
                        Contact = contacts[0],
                        CCContact = contacts[1],
                        CCEmails = "",
                        Sample = sample,
                        Profile = profile,
                        ClientOrderNumber = "".join([chr(random.randint(70,90)) for r in range(10)]),
                    )
                    ar.unmarkCreationFlag()
                    prices = {}
                    service_uids = []
                    for service in profile_services:
                        service_uids.append(service.UID())
                        prices[service.UID()] = service.getPrice()
                    ar.setAnalyses(service_uids, prices = prices)
                for i in range(5):
                    self.portal_workflow.doActionFor(_ars[i], 'receive')
                print t.next()
                transaction.get().commit()
Esempio n. 4
0
    def __call__(self):
        log = self.mklog()
        real = timer()

        self.install_folderish_types()
        log("collective.folderishtypes installed.")

        changed_base_classes = [
            "plone.app.contenttypes.content.Document",
            "plone.app.contenttypes.content.NewsItem",
            "plone.app.contenttypes.content.Event",
        ]

        catalog = api.portal.get_tool("portal_catalog")
        migrated = []
        not_migrated = []
        for brain in catalog():
            obj = brain.getObject()
            old_class_name = dxmigration.get_old_class_name_string(obj)
            if old_class_name in changed_base_classes:
                if dxmigration.migrate_base_class_to_new_class(
                    obj, migrate_to_folderish=True
                ):
                    migrated.append(obj)
                else:
                    not_migrated.append(obj)

        if migrated:
            log("{0} objects have been migrated.".format(len(migrated)))
        if not_migrated:
            log(
                "{0} objects have NOT been migrated.".format(len(not_migrated)),
                level="warn",
            )

        catalog.clearFindAndRebuild()
        log("Portal catalog has been rebuilt.")

        msg = "Processed folderish types migration in {0}.".format(real.next())
        log(msg)
Esempio n. 5
0
def reset_scales(app, args):
    parser = argparse.ArgumentParser(
        description='Reset all scales in the application')
    parser.add_argument('--site', help='Add the site id', required=True)
    parser.add_argument(
        '--regenerate',
        help='Scale(s) you want to regnerate, multiple allowed',
        action='append')
    parser.add_argument('-c', help='stupid bug')
    args = parser.parse_args(args)
    site_name = args.site

    root = makerequest.makerequest(app)
    site = root.get(site_name, None)

    logger = getLogger(__name__)
    log = mklog(root.REQUEST)
    if site is None:
        msg = "No site called `%s` found in the database." % site_name
        log(msg)
        logger.info(msg)
        sys.exit(1)

    # Set up local site manager
    setHooks()
    setSite(site)

    # Set up security
    uf = app.acl_users
    user = uf.getUserById("admin")
    newSecurityManager(None, user)
    catalog = site.portal_catalog

    log('resetting all scales from %r:' % site)
    real = timer()  # real time
    lap = timer()  # real lap time (for intermediate commits)
    cpu = timer(clock)  # cpu time
    processed = 0

    def checkPoint():
        msg = 'intermediate commit '\
            '(%d objects processed, last batch in %s)...'
        log(msg % (processed, lap.next()))
        trx = get()
        trx.note(u'migrated %d btree-folders' % processed)
        trx.savepoint()

    cpi = checkpointIterator(checkPoint, 1000)
    for item in catalog(object_provides=IDexterityItem.__identifier__):
        o = item.getObject()
        storage = AnnotationStorage(o)
        storage.clear()
        msg = "Cleared storage for %s" % (item.getURL())
        log(msg)

        try:
            if args.regenerate is not None:
                for scale in args.regenerate:
                    if hasattr(o, 'image'):
                        scaler = o.unrestrictedTraverse('@@images')
                        if scaler.scale(fieldname='image',
                                        scale=scale) is not None:
                            log("regenerated scale  %s" % (scale))
                        else:
                            log("error regenerating scale  %s" % (scale))
        except AttributeError:
            continue
        except IOError:
            continue
        processed += 1
        cpi.next()
    checkPoint()
    msg = 'processed %d object(s) in %s (%s cpu time).'
    msg = msg % (processed, real.next(), cpu.next())
    log(msg)
    logger.info(msg)
    transaction.commit()
Esempio n. 6
0
    def __call__(self):
        log = self.mklog()
        real = timer()

        self.install_folderish_types()
        log("collective.folderishtypes installed.")

        catalog = api.portal.get_tool("portal_catalog")
        catalog.clearFindAndRebuild()
        log("Portal catalog has been rebuilt.")

        changed_base_classes = [
            "plone.app.contenttypes.content.Document",
            "plone.app.contenttypes.content.NewsItem",
            "plone.app.contenttypes.content.Event",
        ]

        migrated = []
        not_migrated = []
        for brain in catalog():
            obj = brain.getObject()
            old_class_name = dxmigration.get_old_class_name_string(obj)
            if old_class_name in changed_base_classes:
                prevented_delete = prevented_move = False
                obj_id = obj.getId()
                parent = aq_parent(aq_inner(obj))
                if IPreventDelete.providedBy(obj):
                    prevented_delete = True
                    noLongerProvides(obj, IPreventDelete)
                if IPreventMoveOrRename.providedBy(obj):
                    prevented_move = True
                    noLongerProvides(obj, IPreventMoveOrRename)
                position_in_parent = None
                ordered = IOrderedContainer(parent, None)
                if ordered is not None:
                    position_in_parent = ordered.getObjectPosition(obj_id)
                if dxmigration.migrate_base_class_to_new_class(
                        obj, migrate_to_folderish=True):
                    migrated.append(obj)
                    if position_in_parent is not None:
                        ordered.moveObject(obj_id, position_in_parent)
                    if prevented_delete:
                        alsoProvides(obj, IPreventDelete)
                    if prevented_move:
                        alsoProvides(obj, IPreventMoveOrRename)
                else:
                    not_migrated.append(obj)

        if migrated:
            log("{0} objects have been migrated.".format(len(migrated)))
        if not_migrated:
            log(
                "{0} objects have NOT been migrated.".format(
                    len(not_migrated)),
                level="warn",
            )

        catalog.clearFindAndRebuild()
        log("Portal catalog has been rebuilt.")

        msg = "Processed folderish types migration in {0}.".format(real.next())
        log(msg)