Esempio n. 1
0
 def beforeTearDown(self):
   """Clear everything for next test."""
   self._safeTic()
   for module in [ 'sale_packing_list_module',
                   'organisation_module',
                   'person_module',
                   'currency_module',
                   'product_module',
                   'portal_simulation' ]:
     folder = getattr(self.getPortal(), module, None)
     if folder:
       [x.unindexObject() for x in folder.objectValues()]
       self._safeTic()
       folder.manage_delObjects([x.getId() for x in folder.objectValues()])
   accounting_module = self.getPortal().accounting_module
   [x.cancel() for x in accounting_module.objectValues()]
   accounting_module.manage_delObjects([x.getId() for x in
                                        accounting_module.objectValues()])
   self._safeTic()
   # cancel remaining messages
   activity_tool = self.getPortal().portal_activities
   for message in activity_tool.getMessageList():
     activity_tool.manageCancel(message.object_path, message.method_id)
     _print('\nCancelling active message %s.%s()\n'
            % (message.object_path, message.method_id) )
   transaction.commit()
Esempio n. 2
0
 def testSkins(self):
     '''Runs the ZChecker on skins'''
     dirs = self.portal.portal_skins.objectValues()
     for dir in dirs:
         results = self.portal.zchecker.checkObjects(dir.objectValues())
         for result in results:
             self._report(result)
     if self.verbose:
         _print('\n')
Esempio n. 3
0
 def testSkins(self):
     '''Runs the ZChecker on skins'''
     dirs = self.portal.portal_skins.objectValues()
     for dir in dirs:
         results = self.portal.zchecker.checkObjects(dir.objectValues())
         for result in results:
             self._report(result)
     if self.verbose:
         _print('\n')
Esempio n. 4
0
 def testSkins(self):
     '''Runs the ZChecker on skins'''
     dirs = self.portal.portal_skins.objectValues()
     for dir in dirs:
         # filter out certain skin layers
         if self._skinpath(dir) not in ignoredSkinLayers:
             results = self.portal.zchecker.checkObjects(dir.objectValues())
             for result in results:
                 self._report(result)
     if self.verbose:
         _print('\n')
Esempio n. 5
0
 def _report(self, result):
     msg = result['msg']
     obj = result['obj']
     if msg:
         if self.verbose:
             _print('\n')
         _print('------\n%s\n' % self._skinpath(obj))
         for line in msg:
             _print('%s\n' % line)
     else:
         if self.verbose:
             _print('.')
Esempio n. 6
0
 def _report(self, result):
     msg = result['msg']
     obj = result['obj']
     if msg:
         if self.verbose:
             _print('\n')
         _print('------\n%s\n' % self._skinpath(obj))
         for line in msg:
             _print('%s\n' % line)
     else:
         if self.verbose:
             _print('.')
Esempio n. 7
0
  def test_Archive(self, quiet=quiet, run=1): #run_all_test):
    if not run: return
    if not quiet:
      message = 'Archive'
      _print('\n%s ' % message)
      LOG('Testing... ',0,message)

    portal = self.getPortal()
    portal_category = self.getCategoryTool()
    portal_archive = self.getArchiveTool()
    portal_catalog = self.getCatalogTool()
    inventory_module = portal.getDefaultModule(portal_type = "Inventory Module")
    # Create some objects
    self.base_category = portal_category.newContent(portal_type='Base Category',
                                               title="GreatTitle1")
    module = portal.getDefaultModule('Organisation')
    self.organisation = module.newContent(portal_type='Organisation',
                                     title="GreatTitle2")
    getInventory = self.getSimulationTool().getInventory
    self.mvt = self._makeMovement(quantity=100, stop_date=DateTime("2006/06/06"),
                                  simulation_state='delivered',)
    self.assertEqual(100, getInventory(node_uid=self.node.getUid()))
    self.assertEqual(len(self.folder.searchFolder(portal_type="Dummy Movement")), 1)

    # Create an inventory object
    self.inventory = self._makeInventory(date=DateTime("2006/06/15"))
    self.assertEqual(len(inventory_module.searchFolder(portal_type="Inventory")), 1)

    # Flush message queue
    self.tic()

    # Check well in catalog
    self.original_connection_id = 'erp5_sql_connection'
    self.original_deferred_connection_id = 'erp5_sql_deferred_connection'
    path_list = [self.organisation.getRelativeUrl(), self.inventory.getRelativeUrl()]
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.original_connection_id)

    # Create new connectors for destination
    addSQLConnection = portal.manage_addProduct['ZMySQLDA'] \
      .manage_addZMySQLConnection
    self.new_connection_id = 'erp5_sql_connection1'
    db1, db2 = getExtraSqlConnectionStringList()[:2]
    addSQLConnection(self.new_connection_id,'', db1)
    new_connection = portal[self.new_connection_id]
    new_connection.manage_open_connection()
    # the deferred one
    self.new_deferred_connection_id = 'erp5_sql_connection2'
    addSQLConnection(self.new_deferred_connection_id,'', db1)
    new_deferred_connection = portal[self.new_deferred_connection_id]
    new_deferred_connection.manage_open_connection()

    # Create new connectors for archive
    self.archive_connection_id = 'erp5_sql_connection3'
    addSQLConnection(self.archive_connection_id,'', db2)
    archive_connection = portal[self.archive_connection_id]
    archive_connection.manage_open_connection()
    # the deferred one
    self.archive_deferred_connection_id = 'erp5_sql_connection4'
    addSQLConnection(self.archive_deferred_connection_id,'', db2)
    archive_deferred_connection = portal[self.archive_deferred_connection_id]
    archive_deferred_connection.manage_open_connection()

    # Create new catalog for destination
    self.original_catalog_id = 'erp5_mysql_innodb'
    self.new_catalog_id = self.original_catalog_id + '_2'
    cp_data = portal_catalog.manage_copyObjects(ids=('erp5_mysql_innodb',))
    new_id = portal_catalog.manage_pasteObjects(cp_data)[0]['new_id']
    new_catalog_id = 'erp5_mysql_innodb_2'
    portal_catalog.manage_renameObject(id=new_id,new_id=new_catalog_id)

    # Create new catalog for archive
    self.archive_catalog_id = self.original_catalog_id + '_archive'
    cp_data = portal_catalog.manage_copyObjects(ids=('erp5_mysql_innodb',))
    archive_id = portal_catalog.manage_pasteObjects(cp_data)[0]['new_id']
    archive_catalog_id = 'erp5_mysql_innodb_archive'
    portal_catalog.manage_renameObject(id=archive_id,new_id=archive_catalog_id)

    # Create an archive
    archive = portal_archive.newContent(portal_type="Archive",
                                        catalog_id=self.archive_catalog_id,
                                        connection_id=self.archive_connection_id,
                                        deferred_connection_id=self.archive_deferred_connection_id,
                                        priority=3,
                                        inventory_method_id='Archive_createAllInventory',
                                        test_method_id='Archive_test',
                                        stop_date_range_min=DateTime("2006/06/01"),
                                        stop_date_range_max=DateTime("2006/07/01"),
                                        )
    archive.ready()
    # Create an archive for destination catalog
    dest = portal_archive.newContent(portal_type="Archive",
                                     catalog_id=self.new_catalog_id,
                                     connection_id=self.new_connection_id,
                                     deferred_connection_id=self.new_deferred_connection_id,
                                     priority=1,
                                     test_method_id='Archive_test',
                                     stop_date_range_min=DateTime("2006/07/01"),
                                     )
    dest.ready()

    # make sure to commit to release any lock on tables
    self.commit()

    # Do archive
    portal_archive.manage_archive(destination_archive_id=dest.getId(),
                                  archive_id=archive.getId(),
                                  update_destination_sql_catalog=True,
                                  update_archive_sql_catalog=True,
                                  clear_destination_sql_catalog=True,
                                  clear_archive_sql_catalog=True)

    self.tic()
    self.assertEqual(portal_catalog.getSQLCatalog().id, self.new_catalog_id)
    self.assertEqual(archive.getValidationState(), 'validated')
    self.assertEqual(dest.getValidationState(), 'validated')
    # Check objects organisation are indexed
    # in both archive and current catalog and old one
    path_list = [self.organisation.getRelativeUrl()]
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.original_connection_id)
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.new_connection_id)
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.archive_connection_id)
    # Create a new organisation and check it goes in both catalog and not old one
    self.organisation_1 = module.newContent(portal_type='Organisation',
                                            title="GreatTitle3")
    self.tic()
    path_list = [self.organisation_1.getRelativeUrl()]
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.original_connection_id)
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.new_connection_id)
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.archive_connection_id)

    # Check objects movement are indexed
    # in archive and old one and not in current catalog
    path_list = [self.mvt.getRelativeUrl()]
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.original_connection_id)
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.new_connection_id)
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.archive_connection_id)

    # Check inventory are indexed
    # in archive and old one and not in current catalog
    path_list = [self.inventory.getRelativeUrl()]
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.original_connection_id)
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.new_connection_id)
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.archive_connection_id)

    # Create a new movement and check it goes only in new catalog
    self.assertEqual(len(self.folder.searchFolder(portal_type="Dummy Movement")), 0)
    self.assertEqual(100, getInventory(node_uid=self.node.getUid()))
    self.new_mvt = self._makeMovement(quantity=50, stop_date=DateTime("2006/08/06"),
                                      simulation_state='delivered',)
    self.tic()
    self.assertEqual(len(self.folder.searchFolder(portal_type="Dummy Movement")), 1)
    # Check objects movement are indexed
    # not in archive and old one but in current catalog
    path_list = [self.new_mvt.getRelativeUrl()]
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.original_connection_id)
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.new_connection_id)
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.archive_connection_id)
    self.assertEqual(150, getInventory(node_uid=self.node.getUid()))

    # now play with preference to select to view document from archive
    portal_preferences = self.getPreferenceTool()
    self.pref = portal_preferences.newContent(id='user_pref',
                                              portal_type='Preference',
                                              preferred_archive=archive.getRelativeUrl())
    self.tic()
    self.getPreferenceTool().recursiveReindexObject()

    self.portal.portal_workflow.doActionFor(self.pref,
                                            'enable_action',
                                            wf_id='preference_workflow')
    self.assertEqual(self.pref.getPreferenceState(),    'enabled')

    path_list = [self.pref.getRelativeUrl()]
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.original_connection_id)
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.new_connection_id)
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.archive_connection_id)

    self.assertEqual(portal_catalog.getPreferredSQLCatalogId(), archive.getCatalogId())
    self.assertEqual(len(self.folder.searchFolder(portal_type="Dummy Movement")), 1)

    # As we only have first movement in archive, inventory must be 100
    self.assertEqual(100, getInventory(node=self.node.getRelativeUrl()))

    # go on current catalog
    self.pref.edit(preferred_archive=None)
    self.tic()

    # unindex and reindex an older movement and check it's well reindexed
    self.inventory.unindexObject()
    self.tic()
    path_list = [self.inventory.getRelativeUrl()]
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.new_connection_id)
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.archive_connection_id)
    self.inventory.reindexObject()
    self.tic()
    path_list = [self.inventory.getRelativeUrl()]
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.new_connection_id)
    self.checkRelativeUrlInSQLPathList(path_list, connection_id=self.archive_connection_id)
    # check inventory in archive now
    self.pref.edit(preferred_archive=archive.getRelativeUrl())
    self.tic()
    self.assertEqual(100, getInventory(node=self.node.getRelativeUrl()))

    # check if we unindex an object, it's remove in all catalog:
    module.manage_delObjects([self.organisation_1.id,])
    self.tic()
    path_list = [self.organisation_1.getRelativeUrl()]
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.new_connection_id)
    self.checkRelativeUrlNotInSQLPathList(path_list, connection_id=self.archive_connection_id)

    # check the current archive
    self.assertEqual(portal_archive.getCurrentArchive(), dest)
Esempio n. 8
0
    def test_Archive(self, quiet=quiet, run=1):  #run_all_test):
        if not run: return
        if not quiet:
            message = 'Archive'
            _print('\n%s ' % message)
            LOG('Testing... ', 0, message)

        portal = self.getPortal()
        portal_category = self.getCategoryTool()
        portal_archive = self.getArchiveTool()
        portal_catalog = self.getCatalogTool()
        inventory_module = portal.getDefaultModule(
            portal_type="Inventory Module")
        # Create some objects
        self.base_category = portal_category.newContent(
            portal_type='Base Category', title="GreatTitle1")
        module = portal.getDefaultModule('Organisation')
        self.organisation = module.newContent(portal_type='Organisation',
                                              title="GreatTitle2")
        getInventory = self.getSimulationTool().getInventory
        self.mvt = self._makeMovement(
            quantity=100,
            stop_date=DateTime("2006/06/06"),
            simulation_state='delivered',
        )
        self.assertEqual(100, getInventory(node_uid=self.node.getUid()))
        self.assertEqual(
            len(self.folder.searchFolder(portal_type="Dummy Movement")), 1)

        # Create an inventory object
        self.inventory = self._makeInventory(date=DateTime("2006/06/15"))
        self.assertEqual(
            len(inventory_module.searchFolder(portal_type="Inventory")), 1)

        # Flush message queue
        self.tic()

        # Check well in catalog
        self.original_connection_id = 'erp5_sql_connection'
        self.original_deferred_connection_id = 'erp5_sql_deferred_connection'
        path_list = [
            self.organisation.getRelativeUrl(),
            self.inventory.getRelativeUrl()
        ]
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.original_connection_id)

        # Create new connectors for destination
        addSQLConnection = portal.manage_addProduct['ZMySQLDA'] \
          .manage_addZMySQLConnection
        self.new_connection_id = 'erp5_sql_connection1'
        db1, db2 = getExtraSqlConnectionStringList()[:2]
        addSQLConnection(self.new_connection_id, '', db1)
        new_connection = portal[self.new_connection_id]
        new_connection.manage_open_connection()
        # the deferred one
        self.new_deferred_connection_id = 'erp5_sql_connection2'
        addSQLConnection(self.new_deferred_connection_id, '', db1)
        new_deferred_connection = portal[self.new_deferred_connection_id]
        new_deferred_connection.manage_open_connection()

        # Create new connectors for archive
        self.archive_connection_id = 'erp5_sql_connection3'
        addSQLConnection(self.archive_connection_id, '', db2)
        archive_connection = portal[self.archive_connection_id]
        archive_connection.manage_open_connection()
        # the deferred one
        self.archive_deferred_connection_id = 'erp5_sql_connection4'
        addSQLConnection(self.archive_deferred_connection_id, '', db2)
        archive_deferred_connection = portal[
            self.archive_deferred_connection_id]
        archive_deferred_connection.manage_open_connection()

        # Create new catalog for destination
        self.original_catalog_id = 'erp5_mysql_innodb'
        self.new_catalog_id = self.original_catalog_id + '_2'
        cp_data = portal_catalog.manage_copyObjects(
            ids=('erp5_mysql_innodb', ))
        new_id = portal_catalog.manage_pasteObjects(cp_data)[0]['new_id']
        new_catalog_id = 'erp5_mysql_innodb_2'
        portal_catalog.manage_renameObject(id=new_id, new_id=new_catalog_id)

        # Create new catalog for archive
        self.archive_catalog_id = self.original_catalog_id + '_archive'
        cp_data = portal_catalog.manage_copyObjects(
            ids=('erp5_mysql_innodb', ))
        archive_id = portal_catalog.manage_pasteObjects(cp_data)[0]['new_id']
        archive_catalog_id = 'erp5_mysql_innodb_archive'
        portal_catalog.manage_renameObject(id=archive_id,
                                           new_id=archive_catalog_id)

        # Create an archive
        archive = portal_archive.newContent(
            portal_type="Archive",
            catalog_id=self.archive_catalog_id,
            connection_id=self.archive_connection_id,
            deferred_connection_id=self.archive_deferred_connection_id,
            priority=3,
            inventory_method_id='Archive_createAllInventory',
            test_method_id='Archive_test',
            stop_date_range_min=DateTime("2006/06/01"),
            stop_date_range_max=DateTime("2006/07/01"),
        )
        archive.ready()
        # Create an archive for destination catalog
        dest = portal_archive.newContent(
            portal_type="Archive",
            catalog_id=self.new_catalog_id,
            connection_id=self.new_connection_id,
            deferred_connection_id=self.new_deferred_connection_id,
            priority=1,
            test_method_id='Archive_test',
            stop_date_range_min=DateTime("2006/07/01"),
        )
        dest.ready()

        # make sure to commit to release any lock on tables
        self.commit()

        # Do archive
        portal_archive.manage_archive(destination_archive_id=dest.getId(),
                                      archive_id=archive.getId(),
                                      update_destination_sql_catalog=True,
                                      update_archive_sql_catalog=True,
                                      clear_destination_sql_catalog=True,
                                      clear_archive_sql_catalog=True)

        self.tic()
        self.assertEqual(portal_catalog.getSQLCatalog().id,
                         self.new_catalog_id)
        self.assertEqual(archive.getValidationState(), 'validated')
        self.assertEqual(dest.getValidationState(), 'validated')
        # Check objects organisation are indexed
        # in both archive and current catalog and old one
        path_list = [self.organisation.getRelativeUrl()]
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.original_connection_id)
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.new_connection_id)
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.archive_connection_id)
        # Create a new organisation and check it goes in both catalog and not old one
        self.organisation_1 = module.newContent(portal_type='Organisation',
                                                title="GreatTitle3")
        self.tic()
        path_list = [self.organisation_1.getRelativeUrl()]
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.original_connection_id)
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.new_connection_id)
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.archive_connection_id)

        # Check objects movement are indexed
        # in archive and old one and not in current catalog
        path_list = [self.mvt.getRelativeUrl()]
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.original_connection_id)
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.new_connection_id)
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.archive_connection_id)

        # Check inventory are indexed
        # in archive and old one and not in current catalog
        path_list = [self.inventory.getRelativeUrl()]
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.original_connection_id)
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.new_connection_id)
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.archive_connection_id)

        # Create a new movement and check it goes only in new catalog
        self.assertEqual(
            len(self.folder.searchFolder(portal_type="Dummy Movement")), 0)
        self.assertEqual(100, getInventory(node_uid=self.node.getUid()))
        self.new_mvt = self._makeMovement(
            quantity=50,
            stop_date=DateTime("2006/08/06"),
            simulation_state='delivered',
        )
        self.tic()
        self.assertEqual(
            len(self.folder.searchFolder(portal_type="Dummy Movement")), 1)
        # Check objects movement are indexed
        # not in archive and old one but in current catalog
        path_list = [self.new_mvt.getRelativeUrl()]
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.original_connection_id)
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.new_connection_id)
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.archive_connection_id)
        self.assertEqual(150, getInventory(node_uid=self.node.getUid()))

        # now play with preference to select to view document from archive
        portal_preferences = self.getPreferenceTool()
        self.pref = portal_preferences.newContent(
            id='user_pref',
            portal_type='Preference',
            preferred_archive=archive.getRelativeUrl())
        self.tic()
        self.getPreferenceTool().recursiveReindexObject()

        self.portal.portal_workflow.doActionFor(self.pref,
                                                'enable_action',
                                                wf_id='preference_workflow')
        self.assertEqual(self.pref.getPreferenceState(), 'enabled')

        path_list = [self.pref.getRelativeUrl()]
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.original_connection_id)
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.new_connection_id)
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.archive_connection_id)

        self.assertEqual(portal_catalog.getPreferredSQLCatalogId(),
                         archive.getCatalogId())
        self.assertEqual(
            len(self.folder.searchFolder(portal_type="Dummy Movement")), 1)

        # As we only have first movement in archive, inventory must be 100
        self.assertEqual(100, getInventory(node=self.node.getRelativeUrl()))

        # go on current catalog
        self.pref.edit(preferred_archive=None)
        self.tic()

        # unindex and reindex an older movement and check it's well reindexed
        self.inventory.unindexObject()
        self.tic()
        path_list = [self.inventory.getRelativeUrl()]
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.new_connection_id)
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.archive_connection_id)
        self.inventory.reindexObject()
        self.tic()
        path_list = [self.inventory.getRelativeUrl()]
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.new_connection_id)
        self.checkRelativeUrlInSQLPathList(
            path_list, connection_id=self.archive_connection_id)
        # check inventory in archive now
        self.pref.edit(preferred_archive=archive.getRelativeUrl())
        self.tic()
        self.assertEqual(100, getInventory(node=self.node.getRelativeUrl()))

        # check if we unindex an object, it's remove in all catalog:
        module.manage_delObjects([
            self.organisation_1.id,
        ])
        self.tic()
        path_list = [self.organisation_1.getRelativeUrl()]
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.new_connection_id)
        self.checkRelativeUrlNotInSQLPathList(
            path_list, connection_id=self.archive_connection_id)

        # check the current archive
        self.assertEqual(portal_archive.getCurrentArchive(), dest)