def __call__(self): self.install_upgrade_profile() query = {'portal_type': 'ftw.mail.mail'} msg = 'Set correct message contentType for mails.' for brain in ProgressLogger(msg, self.catalog_unrestricted_search(query)): self.ensure_correct_content_type(brain)
def begin(self): self.should_log = IDuringUpgrade.providedBy(getRequest()) if not self.should_log: return indexing_queue_length = getQueue().length() self.logger = ProgressLogger('Processing indexing queue', indexing_queue_length)
def test_accepts_iterable_object(self): items = range(5) with ProgressLogger('Foo', items, logger=self.logger) as step: for _item in items: step() self.assertEqual(['STARTING Foo', '1 of 5 (20%): Foo', 'DONE Foo'], self.read_log())
def __call__(self): self.install_upgrade_profile() annotations = IAnnotations(self.portal) jobs = annotations.get('publisher-queue', ()) if hasattr(jobs, 'values'): jobs = jobs.values() queue = annotations['publisher-queue'] = Queue() map(queue.put, ProgressLogger('Migrate jobs to new queue storage', jobs))
def __call__(self): self.install_upgrade_profile() service = get_service_v3() msg = 'Reindex checksum for documents having one of the following ' \ 'file-extensions: {}.'.format(', '.join(self.additional_mimetypes)) for obj in ProgressLogger(msg, self.objs_to_perform()): IBumblebeeDocument(obj).update_checksum() service.trigger_storing(obj, deferred=True)
def test_succeeding_logging(self): with ProgressLogger('Foo', 5, logger=self.logger, timeout=0.03) as step: for i in range(5): step() sleep(0.0151) self.assertEqual([ 'STARTING Foo', '1 of 5 (20%): Foo', '3 of 5 (60%): Foo', '5 of 5 (100%): Foo', 'DONE Foo' ], self.read_log())
def objects(self, catalog_query, message, logger=None): """Queries the catalog (unrestricted) and an iterator with full objects. The iterator configures and calls a ``ProgressLogger`` with the passed ``message``. """ objects = self.catalog_unrestricted_search(catalog_query, full_objects=True) return ProgressLogger(message, objects, logger=logger)
def test_current_item_is_printed_when_logger_exits_unexpectedly(self): items = range(5) with self.assertRaises(ValueError): for item in ProgressLogger('Foo', items, logger=self.logger): if item == 4: raise ValueError('baz') self.assertEquals([ 'STARTING Foo', '1 of 5 (20%): Foo', 'FAILED Foo (GeneratorExit: ) at 4' ], self.read_log())
def test_acts_as_iterable_wrapper(self): items = range(5) result = [] for item in ProgressLogger('Foo', items, logger=self.logger): result.append(item) self.assertEqual(['STARTING Foo', '1 of 5 (20%): Foo', 'DONE Foo'], self.read_log()) self.assertEqual( items, result, 'Iterating over the progresslogger yields the original items.')
def catalog_reindex_objects(self, query, idxs=None): """Reindex all objects found in the catalog with `query`. A list of indexes can be passed as `idxs` for limiting the indexed indexes. """ if idxs is None: idxs = [] title = '.'.join((self.__module__, self.__class__.__name__)) objects = self.catalog_unrestricted_search(query, full_objects=True) with ProgressLogger(title, objects) as step: for obj in objects: obj.reindexObject(idxs=idxs) step()
def __call__(self): self.install_upgrade_profile() catalog = api.portal.get_tool('portal_catalog') brains = catalog.unrestrictedSearchResults( {'object_provides': IDocumentSchema.__identifier__}) for brain in ProgressLogger('Fix content type for Open XML Visio.', brains): if brain.getContentType == 'application/octet-stream': obj = brain.getObject() if not obj.file: continue filename, ext = splitext(obj.file.filename) mime_type = EXTENSION_TO_MIMETYPE.get(ext) if not mime_type: continue obj.file.contentType = mime_type obj.reindexObject()
def test_failing_logging(self): timeout = 0 with self.assertRaises(ValueError): data = range(5) with ProgressLogger('Bar', data, logger=self.logger, timeout=timeout) as step: for i in data: if i == 3: raise ValueError('baz') step() self.assertEqual([ 'STARTING Bar', '1 of 5 (20%): Bar', '2 of 5 (40%): Bar', '3 of 5 (60%): Bar', 'FAILED Bar (ValueError: baz) at item nr. 3' ], self.read_log())