def __call__(self, *args, **kwargs): context = self.context request = self.request catalog = getToolByName(context, config.CATALOG_ID) storage = IDataStorage(context) # now we load the tabel view and rebuild all rows by using the ignore_cache parameter table_view = getMultiAdapter((context, request), name=u'view-table') table_view.rows(ignore_cache=True) for index, row in enumerate(storage): uuid = row.get('__uuid__') if not uuid: # this should not happen logger.warning( "Row without an uuid! index %d, document at %s" % (index, context.absolute_url_path())) continue catalog.reindex_rows(context, uuid, storage) if index and index % 100 == 0: logger.info("Refreshing catalog (%d)" % index) transaction.savepoint() logger.info("Refreshing catalog and caches: done") getToolByName(context, 'plone_utils').addPortalMessage( _('reindex_performed_message', u'$count rows has been updated', mapping={'count': index + 1})) request.response.redirect('%s/edit-table' % context.absolute_url())
def addCatalogIndex(portal, name, type="FieldIndex"): catalog = portal.tablepage_catalog logger.info("Adding index %s (%s)" % (name, type)) try: catalog.addIndex(name, type) except CatalogError: logger.info("... already exists: skipping")
def addCatalogIndex(portal, name, type="FieldIndex"): catalog = portal.tablepage_catalog logger.info("Adding index %s (%s)" % (name, type)) try: catalog.addIndex(name, type) except CatalogError: logger.info("... already exists: skipping")
def addCatalogColumns(portal, columns): catalog = portal.tablepage_catalog for c in columns: logger.info("Adding column %s" % c) try: catalog.addColumn(c) except CatalogError: logger.info("Column %s already exists" % c)
def addCatalogColumns(portal, columns): catalog = portal.tablepage_catalog for c in columns: logger.info("Adding column %s" % c) try: catalog.addColumn(c) except CatalogError: logger.info("Column %s already exists" % c)
def migrateTo08b1(context): portal = getToolByName(context, 'portal_url').getPortalObject() logger.info("Checking rows (or labels) without an uuid") _uuid_all(portal) logger.info("Adding new catalog columns") addCatalogColumns(portal, ['is_label', 'label']) logger.info("Now indexing all rows inside Table Page contents") portal.tablepage_catalog.clearFindAndRebuild() logger.info("...done") logger.info("Migrated to 0.8b1")
def migrateTo08b1(context): portal = getToolByName(context, 'portal_url').getPortalObject() logger.info("Checking rows (or labels) without an uuid") _uuid_all(portal) logger.info("Adding new catalog columns") addCatalogColumns(portal, ['is_label', 'label']) logger.info("Now indexing all rows inside Table Page contents") portal.tablepage_catalog.clearFindAndRebuild() logger.info("...done") logger.info("Migrated to 0.8b1")
def migrateTo081(context): portal = getToolByName(context, 'portal_url').getPortalObject() setup_tool = getToolByName(context, 'portal_setup') portal_javascripts = getToolByName(context, 'portal_javascripts') was_enabled = False resource = portal_javascripts.getResource('++resource++collective.tablepage.resources/jquery.dataTables.rowGrouping.js') if resource and resource.getEnabled(): was_enabled = True setup_tool.runImportStepFromProfile('profile-collective.tablepage:default', 'jsregistry') if was_enabled: logger.info("rowGroping plugin was enabled - re-activating with new configuration") portal_javascripts.getResource('++resource++collective.tablepage.resources/jquery.dataTables.rowGrouping.js').setEnabled(True) portal_javascripts.cookResources() logger.info("Migrated to 0.8.1")
def _uuid_all(context): logger.info("Generating uuids info for old rows") catalog = getToolByName(context, 'portal_catalog') results = catalog(object_provides=ITablePage.__identifier__) for brain in results: logger.info("Checking %s" % brain.getPath()) obj = brain.getObject() storage = IDataStorage(obj) for row in storage: if not row.get('__uuid__'): new_uid = str(uuid.uuid4()) logger.info(new_uid) row['__uuid__'] = new_uid logger.info("Done for %s" % brain.getPath()) logger.info("uuid generation done")
def _uuid_all(context): logger.info("Generating uuids info for old rows") catalog = getToolByName(context, 'portal_catalog') results = catalog(object_provides=ITablePage.__identifier__) for brain in results: logger.info("Checking %s" % brain.getPath()) obj = brain.getObject() storage = IDataStorage(obj) for row in storage: if not row.get('__uuid__'): new_uid = str(uuid.uuid4()) logger.info(new_uid) row['__uuid__'] = new_uid logger.info("Done for %s" % brain.getPath()) logger.info("uuid generation done")
def migrateTo08b3(context): portal = getToolByName(context, 'portal_url').getPortalObject() logger.info("Removing useless catalog index allowedRolesAndUsers") try: portal.tablepage_catalog.delIndex('allowedRolesAndUsers') logger.info("Removed!") except CatalogError: logger.info("...not found: doing nothing") logger.info("Migrated to 0.8b3")
def migrateTo08b3(context): portal = getToolByName(context, 'portal_url').getPortalObject() logger.info("Removing useless catalog index allowedRolesAndUsers") try: portal.tablepage_catalog.delIndex('allowedRolesAndUsers') logger.info("Removed!") except CatalogError: logger.info("...not found: doing nothing") logger.info("Migrated to 0.8b3")
def migrateTo08b2(context): portal = getToolByName(context, 'portal_url').getPortalObject() logger.info("Adding new catalog indexes") addCatalogIndex(portal, 'allowedRolesAndUsers', 'KeywordIndex') addCatalogIndex(portal, 'is_label') logger.info("Now indexing all rows inside Table Page contents") portal.tablepage_catalog.clearFindAndRebuild() logger.info("...done") logger.info("Migrated to 0.8b2")
def migrateTo08b2(context): portal = getToolByName(context, 'portal_url').getPortalObject() logger.info("Adding new catalog indexes") addCatalogIndex(portal, 'allowedRolesAndUsers', 'KeywordIndex') addCatalogIndex(portal, 'is_label') logger.info("Now indexing all rows inside Table Page contents") portal.tablepage_catalog.clearFindAndRebuild() logger.info("...done") logger.info("Migrated to 0.8b2")
def migrateTo081(context): portal = getToolByName(context, 'portal_url').getPortalObject() setup_tool = getToolByName(context, 'portal_setup') portal_javascripts = getToolByName(context, 'portal_javascripts') was_enabled = False resource = portal_javascripts.getResource( '++resource++collective.tablepage.resources/jquery.dataTables.rowGrouping.js' ) if resource and resource.getEnabled(): was_enabled = True setup_tool.runImportStepFromProfile('profile-collective.tablepage:default', 'jsregistry') if was_enabled: logger.info( "rowGroping plugin was enabled - re-activating with new configuration" ) portal_javascripts.getResource( '++resource++collective.tablepage.resources/jquery.dataTables.rowGrouping.js' ).setEnabled(True) portal_javascripts.cookResources() logger.info("Migrated to 0.8.1")
def __call__(self, *args, **kwargs): context = self.context request = self.request catalog = getToolByName(context, config.CATALOG_ID) storage = IDataStorage(context) # now we load the tabel view and rebuild all rows by using the ignore_cache parameter table_view = getMultiAdapter((context, request), name=u'view-table') table_view.rows(ignore_cache=True) for index, row in enumerate(storage): uuid = row.get('__uuid__') if not uuid: # this should not happen logger.warning("Row without an uuid! index %d, document at %s" % (index, context.absolute_url_path())) continue catalog.reindex_rows(context, uuid, storage) if index and index % 100 == 0: logger.info("Refreshing catalog (%d)" % index) transaction.savepoint() logger.info("Refreshing catalog and caches: done") getToolByName(context, 'plone_utils').addPortalMessage(_('reindex_performed_message', u'$count rows has been updated', mapping={'count': index+1})) request.response.redirect('%s/edit-table' % context.absolute_url())
def migrateTo08(context): portal = getToolByName(context, 'portal_url').getPortalObject() setup_tool = getToolByName(context, 'portal_setup') setup_tool.runImportStepFromProfile('profile-collective.tablepage:default', 'rolemap') setup_tool.runImportStepFromProfile('profile-collective.tablepage:default', 'cssregistry') createCatalog(portal) _uuid_all(context) logger.info("Now indexing all rows inside Table Page contents") portal.tablepage_catalog.clearFindAndRebuild() logger.info("...done") logger.info("Migrated to 0.8")
def migrateTo08(context): portal = getToolByName(context, 'portal_url').getPortalObject() setup_tool = getToolByName(context, 'portal_setup') setup_tool.runImportStepFromProfile('profile-collective.tablepage:default', 'rolemap') setup_tool.runImportStepFromProfile('profile-collective.tablepage:default', 'cssregistry') createCatalog(portal) _uuid_all(context) logger.info("Now indexing all rows inside Table Page contents") portal.tablepage_catalog.clearFindAndRebuild() logger.info("...done") logger.info("Migrated to 0.8")
def __call__(self): # PLEASE refactorgin this mess request = self.request context = self.context file = request.form.get('csv') check_duplicate = request.form.get('look_for_duplicate') tp_catalog = getToolByName(context, config.CATALOG_ID) if file and file.filename: try: dialect = csv.Sniffer().sniff(file.read(1024), delimiters=";,") if not dialect.delimiter: # some stupid Python 2.4 CSV bug may happens raise csv.Error except csv.Error: dialect = 'excel' file.seek(0) counter = 0 storage = IDataStorage(context) member = getMultiAdapter((context, request), name=u'plone_portal_state').member() reader = csv.reader(file, dialect) configuration = self.context.getPageColumns() valid_headers = [c['id'] for c in configuration] valid_retrievers = [self._getRetrieverAdapter(c['type']) for c in configuration] validators = [self._getRetrieveValidators(c['type']) for c in configuration] headers = [] first = True putils = getToolByName(context, 'plone_utils') for line, row in enumerate(reader): logger.info("Importing line %04d" % line) if first: headers = [h.strip() for h in row if h.strip()] if configuration: # CSV row is accessed by index headers = [(h, headers.index(h)) for h in headers if h in valid_headers] else: # No configuration. Let's guess a configuration using CSV headers self.context.setPageColumns([{'id' : h, 'label' : h, 'description' : '', 'type' : 'String', 'vocabulary' : '', 'options' : [], } for h in headers]) headers = [(h, headers.index(h)) for h in headers] configuration = self.context.getPageColumns() valid_retrievers = [self._getRetrieverAdapter(c['type']) for c in configuration] validators = [self._getRetrieveValidators(c['type']) for c in configuration] first = False continue tobe_saved = {} skip_row = False if len(row)<len(headers): putils.addPortalMessage(_('error_row_count_dont_match', default=u"Skipping line $line. Found $lrow columns instead of $lheaders", mapping={'line': line+1, 'lrow': len(row), 'lheaders': len(headers)}), type="error") continue for header, hindex in headers: skip_cell = False if request.form.get('validate'): required_field_validation_failed = False for vname, v in validators[hindex]: msg = v.validate(configuration[hindex], data=row[hindex]) if msg: if vname==u'required': putils.addPortalMessage(_('warn_invalid_row', default=u"Line $line can't be imported due to missing " u"required data", mapping={'line': line+1}), type="warning") required_field_validation_failed = True break putils.addPortalMessage(_('warn_invalid_cell', default=u"Line $line, cell $cell: can't import data " u"due to failed validator check", mapping={'line': line+1, 'cell': hindex}), type="warning") skip_cell = True break if required_field_validation_failed: skip_row = True break # do not spend time to save data if this will be discarded if not skip_row and not skip_cell: try: tobe_saved[header] = valid_retrievers[hindex].data_to_storage(row[hindex]) except NotImplementedError: # column is not implementing CSV data load continue if not skip_row and tobe_saved: if check_duplicate and self._checkDuplicateRow(tobe_saved, storage): putils.addPortalMessage(_('warn_duplicate', default=u"Line ${line_number} not added because duplicated " u"data has been found", mapping={'line_number': line+1}), type="warning") continue tobe_saved['__creator__'] = member.getId() tobe_saved['__uuid__'] = str(uuid.uuid4()) counter += 1 storage.add(tobe_saved) tp_catalog.catalog_row(context, tobe_saved) msg = _('count_rows_added', default=u'${count} rows added', mapping={'count': counter}) putils.addPortalMessage(msg) self._addNewVersion(msg) #return request.response.redirect('%s/edit-table' % context.absolute_url()) return self.index()
def uninstall(portal, reinstall=False): if not reinstall: setup_tool = portal.portal_setup setup_tool.runAllImportStepsFromProfile('profile-collective.tablepage:uninstall') logger.info("Uninstalled")
def uninstall(portal, reinstall=False): if not reinstall: setup_tool = portal.portal_setup setup_tool.runAllImportStepsFromProfile( 'profile-collective.tablepage:uninstall') logger.info("Uninstalled")
def createCatalog(portal): if not hasattr(portal, config.CATALOG_ID): manage_addTablePageCatalog(portal) logger.info('Added the catalog') else: logger.info('Catalog found. Skipping...')
def migrateTo05b2(context): setup_tool = getToolByName(context, 'portal_setup') setup_tool.runAllImportStepsFromProfile('profile-collective.tablepage:to1210') logger.info("Migrated to 0.5b2")
def migrateTo05(context): setup_tool = getToolByName(context, 'portal_setup') setup_tool.runImportStepFromProfile('profile-collective.tablepage:default', 'jsregistry') logger.info("Migrated to 0.5")
def migrateTo05(context): setup_tool = getToolByName(context, 'portal_setup') setup_tool.runImportStepFromProfile('profile-collective.tablepage:default', 'jsregistry') logger.info("Migrated to 0.5")
def migrateTo05b2(context): setup_tool = getToolByName(context, 'portal_setup') setup_tool.runAllImportStepsFromProfile( 'profile-collective.tablepage:to1210') logger.info("Migrated to 0.5b2")
def createCatalog(portal): if not hasattr(portal, config.CATALOG_ID): manage_addTablePageCatalog(portal) logger.info('Added the catalog') else: logger.info('Catalog found. Skipping...')
def __call__(self): # PLEASE refactorgin this mess request = self.request context = self.context file = request.form.get('csv') check_duplicate = request.form.get('look_for_duplicate') tp_catalog = getToolByName(context, config.CATALOG_ID) if file and file.filename: try: dialect = csv.Sniffer().sniff(file.read(1024), delimiters=";,") if not dialect.delimiter: # some stupid Python 2.4 CSV bug may happens raise csv.Error except csv.Error: dialect = 'excel' file.seek(0) counter = 0 storage = IDataStorage(context) member = getMultiAdapter((context, request), name=u'plone_portal_state').member() reader = csv.reader(file, dialect) configuration = self.context.getPageColumns() valid_headers = [c['id'] for c in configuration] valid_retrievers = [ self._getRetrieverAdapter(c['type']) for c in configuration ] validators = [ self._getRetrieveValidators(c['type']) for c in configuration ] headers = [] first = True putils = getToolByName(context, 'plone_utils') for line, row in enumerate(reader): logger.info("Importing line %04d" % line) if first: headers = [h.strip() for h in row if h.strip()] if configuration: # CSV row is accessed by index headers = [(h, headers.index(h)) for h in headers if h in valid_headers] else: # No configuration. Let's guess a configuration using CSV headers self.context.setPageColumns([{ 'id': h, 'label': h, 'description': '', 'type': 'String', 'vocabulary': '', 'options': [], } for h in headers]) headers = [(h, headers.index(h)) for h in headers] configuration = self.context.getPageColumns() valid_retrievers = [ self._getRetrieverAdapter(c['type']) for c in configuration ] validators = [ self._getRetrieveValidators(c['type']) for c in configuration ] first = False continue tobe_saved = {} skip_row = False if len(row) < len(headers): putils.addPortalMessage(_( 'error_row_count_dont_match', default= u"Skipping line $line. Found $lrow columns instead of $lheaders", mapping={ 'line': line + 1, 'lrow': len(row), 'lheaders': len(headers) }), type="error") continue for header, hindex in headers: skip_cell = False if request.form.get('validate'): required_field_validation_failed = False for vname, v in validators[hindex]: msg = v.validate(configuration[hindex], data=row[hindex]) if msg: if vname == u'required': putils.addPortalMessage(_( 'warn_invalid_row', default= u"Line $line can't be imported due to missing " u"required data", mapping={'line': line + 1}), type="warning") required_field_validation_failed = True break putils.addPortalMessage(_( 'warn_invalid_cell', default= u"Line $line, cell $cell: can't import data " u"due to failed validator check", mapping={ 'line': line + 1, 'cell': hindex }), type="warning") skip_cell = True break if required_field_validation_failed: skip_row = True break # do not spend time to save data if this will be discarded if not skip_row and not skip_cell: try: tobe_saved[header] = valid_retrievers[ hindex].data_to_storage(row[hindex]) except NotImplementedError: # column is not implementing CSV data load continue if not skip_row and tobe_saved: if check_duplicate and self._checkDuplicateRow( tobe_saved, storage): putils.addPortalMessage(_( 'warn_duplicate', default= u"Line ${line_number} not added because duplicated " u"data has been found", mapping={'line_number': line + 1}), type="warning") continue tobe_saved['__creator__'] = member.getId() tobe_saved['__uuid__'] = str(uuid.uuid4()) counter += 1 storage.add(tobe_saved) tp_catalog.catalog_row(context, tobe_saved) msg = _('count_rows_added', default=u'${count} rows added', mapping={'count': counter}) putils.addPortalMessage(msg) self._addNewVersion(msg) #return request.response.redirect('%s/edit-table' % context.absolute_url()) return self.index()