Пример #1
0
    def add_to_path_cache(self, path, entity):
        """
        Adds a path and entity to the path cache sqlite db.

        :param path: Absolute path to add.
        :param entity: Entity dictionary with values for keys 'id', 'name', and 'type'
        """

        # fix name/code discrepancy
        if "code" in entity:
            entity["name"] = entity["code"]

        path_cache = tank.path_cache.PathCache(self.tk)

        data = [{"entity": {"id": entity["id"],
                            "type": entity["type"],
                            "name": entity["name"]},
                 "metadata": [],
                 "path": path,
                 "primary": True}]
        path_cache.add_mappings(data, None, [])

        # On windows path cache has persisted, interfering with teardowns, so get rid of it.
        path_cache.close()
        del(path_cache)
Пример #2
0
 def _get_path_cache(self):
     path_cache = tank.path_cache.PathCache(self.tk)
     c = path_cache._connection.cursor()
     cache = list(c.execute("select * from path_cache"))
     c.close()
     path_cache.close()
     return cache
Пример #3
0
 def _get_path_cache(self):
     path_cache = tank.path_cache.PathCache(self.tk)
     c = path_cache._connection.cursor()
     cache = list(c.execute("select * from path_cache" ))
     c.close()
     path_cache.close()
     return cache
Пример #4
0
    def add_to_path_cache(self, path, entity):
        """
        Adds a path and entity to the path cache sqlite db.

        :param path: Absolute path to add.
        :param entity: Entity dictionary with values for keys 'id', 'name', and 'type'
        """

        # fix name/code discrepancy
        if "code" in entity:
            entity["name"] = entity["code"]

        path_cache = tank.path_cache.PathCache(self.tk)

        data = [ {"entity": {"id": entity["id"],
                             "type": entity["type"],
                             "name": entity["name"]},
                  "metadata": [],
                  "path": path,
                  "primary": True } ]
        path_cache.add_mappings(data, None, [])

        # On windows path cache has persisted, interfering with teardowns, so get rid of it.
        path_cache.close()
        del(path_cache)
Пример #5
0
    def debug_dump(self):
        """
        Prints out the contents of the mockgun shotgun database and the path cache
        """
        print("")
        print(
            "-----------------------------------------------------------------------------"
        )
        print(" Shotgun contents:")

        print(pprint.pformat(self.mockgun._db))
        print("")
        print("")
        print("Path Cache contents:")

        path_cache = tank.path_cache.PathCache(self.tk)
        c = path_cache._connection.cursor()
        for x in list(c.execute("select * from path_cache")):
            print(x)
        c.close()
        path_cache.close()

        print(
            "-----------------------------------------------------------------------------"
        )
        print("")
Пример #6
0
    def test_multiple_projects_eventlog(self):
        """
        Tests that projects don't get their path caches mixed up.

        This tests that the path cache for a project isn't influenced
        or affected by filesystem locations and event logs created
        by other projects.
        """

        # now create folders down to task level
        folder.process_filesystem_structure(self.tk,
                                            self.task["type"],
                                            self.task["id"],
                                            preview=False,
                                            engine=None)

        # simulate event from other project inserted
        sg_proj = self.tk.shotgun.create("Project", {"name": "other_project"})
        sg_data = {
            'description':
            'Toolkit HEAD: Created folders on disk for Tasks with id: 888',
            'entity': {
                'id': 666,
                'type': 'PipelineConfiguration'
            },
            'event_type': 'Toolkit_Folders_Create',
            'meta': {
                'core_api_version': 'HEAD',
                'sg_folder_ids': [768]
            },
            'project': sg_proj
        }
        for x in range(100):
            self.tk.shotgun.create("EventLogEntry", sg_data)

        # now delete our path cache so that next time, a full sync is done
        path_cache = tank.path_cache.PathCache(self.tk)
        path_cache_location = path_cache._get_path_cache_location()
        path_cache.close()
        os.remove(path_cache_location)

        # now because we deleted our path cache, we will do a full sync
        log = sync_path_cache(self.tk)
        self.assertTrue("Performing a complete Shotgun folder sync" in log)

        # now if we sync again, this should be incremental and the sync
        # should detect that there are no new entries for this project,
        # even though there are new entries for other projects.
        log = sync_path_cache(self.tk)
        self.assertTrue("Path cache syncing not necessary" in log)
Пример #7
0
    def test_incremental_sync(self):
        """Tests that the incremental sync kicks in when possible."""

        # get the location of the pc
        path_cache = tank.path_cache.PathCache(self.tk)
        pcl = path_cache._get_path_cache_location()
        path_cache.close()

        # now process the sequence level folder creation
        folder.process_filesystem_structure(self.tk,
                                            self.seq["type"],
                                            self.seq["id"],
                                            preview=False,
                                            engine=None)

        # now have project and sequence in the path cache
        self.assertEqual(
            len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 2)
        self.assertEqual(len(self._get_path_cache()), 2)

        # make a copy of the path cache at this point
        shutil.copy(pcl, "%s.snap1" % pcl)

        # now create folders down to task level
        folder.process_filesystem_structure(self.tk,
                                            self.task["type"],
                                            self.task["id"],
                                            preview=False,
                                            engine=None)

        # now have project / seq / shot / step
        self.assertEqual(
            len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 4)
        self.assertEqual(len(self._get_path_cache()), 4)

        # now replace our path cache file with with snap1
        # so that we have a not-yet-up to date path cache file.
        shutil.copy("%s.snap1" % pcl, pcl)
        self.assertEqual(len(self._get_path_cache()), 2)

        # now we run the sync - and this sync should be incremental
        log = sync_path_cache(self.tk)
        # make sure the log mentions an incremental sync
        self.assertTrue("Doing an incremental sync" in log)
        # and make sure the sync generated new records
        self.assertEqual(len(self._get_path_cache()), 4)
Пример #8
0
    def test_incremental_sync(self):
        """Tests that the incremental sync kicks in when possible."""

        # get the location of the pc
        path_cache = tank.path_cache.PathCache(self.tk)
        pcl = path_cache._get_path_cache_location()
        path_cache.close()
        
        # now process the sequence level folder creation
        folder.process_filesystem_structure(self.tk, 
                                            self.seq["type"], 
                                            self.seq["id"], 
                                            preview=False,
                                            engine=None)        
        
        # now have project and sequence in the path cache 
        self.assertEqual(len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 2)
        self.assertEqual( len(self._get_path_cache()), 2)
                
        # make a copy of the path cache at this point
        shutil.copy(pcl, "%s.snap1" % pcl) 

        # now create folders down to task level 
        folder.process_filesystem_structure(self.tk, 
                                            self.task["type"], 
                                            self.task["id"], 
                                            preview=False,
                                            engine=None)        
        
        # now have project / seq / shot / step 
        self.assertEqual(len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 4)
        self.assertEqual( len(self._get_path_cache()), 4)
        
        # now replace our path cache file with with snap1
        # so that we have a not-yet-up to date path cache file. 
        shutil.copy("%s.snap1" % pcl, pcl)
        self.assertEqual( len(self._get_path_cache()), 2)
        
        # now we run the sync - and this sync should be incremental 
        log = sync_path_cache(self.tk)
        # make sure the log mentions an incremental sync
        self.assertTrue( "Doing an incremental sync" in log )
        # and make sure the sync generated new records
        self.assertEqual( len(self._get_path_cache()), 4)
Пример #9
0
    def test_multiple_projects_eventlog(self):
        """
        Tests that projects don't get their path caches mixed up.

        This tests that the path cache for a project isn't influenced
        or affected by filesystem locations and event logs created
        by other projects.
        """

        # now create folders down to task level
        folder.process_filesystem_structure(self.tk,
                                            self.task["type"],
                                            self.task["id"],
                                            preview=False,
                                            engine=None)

        # simulate event from other project inserted
        sg_proj = self.tk.shotgun.create("Project", {"name": "other_project"})
        sg_data = {
            'description': 'Toolkit HEAD: Created folders on disk for Tasks with id: 888',
            'entity': {'id': 666, 'type': 'PipelineConfiguration'},
            'event_type': 'Toolkit_Folders_Create',
            'meta': {'core_api_version': 'HEAD', 'sg_folder_ids': [768]},
            'project': sg_proj
        }
        for x in range(100):
            self.tk.shotgun.create("EventLogEntry", sg_data)

        # now delete our path cache so that next time, a full sync is done
        path_cache = tank.path_cache.PathCache(self.tk)
        path_cache_location = path_cache._get_path_cache_location()
        path_cache.close()
        os.remove(path_cache_location)

        # now because we deleted our path cache, we will do a full sync
        log = sync_path_cache(self.tk)
        self.assertTrue("Performing a complete Shotgun folder sync" in log)

        # now if we sync again, this should be incremental and the sync
        # should detect that there are no new entries for this project,
        # even though there are new entries for other projects.
        log = sync_path_cache(self.tk)
        self.assertTrue("Path cache syncing not necessary" in log)
Пример #10
0
    def debug_dump(self):
        """
        Prints out the contents of the mockgun shotgun database and the path cache
        """
        print ""
        print "-----------------------------------------------------------------------------"
        print " Shotgun contents:"

        print pprint.pformat(self.tk.shotgun._db)
        print ""
        print ""
        print "Path Cache contents:"

        path_cache = tank.path_cache.PathCache(self.tk)
        c = path_cache._connection.cursor()
        for x in list(c.execute("select * from path_cache" )):
            print x
        c.close()
        path_cache.close()

        print "-----------------------------------------------------------------------------"
        print ""
Пример #11
0
    def test_shot(self):
        """Test full and incremental path cache sync."""

        path_cache = tank.path_cache.PathCache(self.tk)
        pcl = path_cache._get_path_cache_location()
        path_cache.close()

        self.assertEqual(
            len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 1)
        self.assertEqual(len(self._get_path_cache()), 1)

        folder.process_filesystem_structure(self.tk,
                                            self.seq["type"],
                                            self.seq["id"],
                                            preview=False,
                                            engine=None)

        # now have project / seq
        self.assertEqual(
            len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 2)
        self.assertEqual(len(self._get_path_cache()), 2)

        # nothing should happen
        sync_path_cache(self.tk)
        self.assertEqual(
            len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 2)
        self.assertEqual(len(self._get_path_cache()), 2)

        # make a copy of the path cache at this point
        shutil.copy(pcl, "%s.snap1" % pcl)

        # now insert a new path in Shotgun
        folder.process_filesystem_structure(self.tk,
                                            self.task["type"],
                                            self.task["id"],
                                            preview=False,
                                            engine=None)

        # now have project / seq / shot / step
        self.assertEqual(
            len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 4)
        self.assertEqual(len(self._get_path_cache()), 4)
        path_cache_contents_1 = self._get_path_cache()

        # now replace our path cache with snap1
        shutil.copy(pcl, "%s.snap2" % pcl)
        shutil.copy("%s.snap1" % pcl, pcl)

        # now path cache has not been synchronized but shotgun has an entry
        self.assertEqual(
            len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 4)
        self.assertEqual(len(self._get_path_cache()), 2)
        sync_path_cache(self.tk)

        # check that the sync happend
        self.assertEqual(
            len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 4)
        self.assertEqual(len(self._get_path_cache()), 4)

        # and that the content is the same
        path_cache_contents_2 = self._get_path_cache()
        self.assertEqual(path_cache_contents_2, path_cache_contents_1)

        # now clear the path cache completely. This should trigger a full flush
        os.remove(pcl)
        log = sync_path_cache(self.tk)
        self.assertTrue("Performing a complete Shotgun folder sync" in log)

        # check that the sync happend
        self.assertEqual(
            len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 4)
        self.assertEqual(len(self._get_path_cache()), 4)

        # and that the content is the same
        path_cache_contents_3 = self._get_path_cache()
        self.assertEqual(path_cache_contents_3, path_cache_contents_1)
Пример #12
0
    def test_shot(self):
        """Test full and incremental path cache sync."""
        
        path_cache = tank.path_cache.PathCache(self.tk)
        pcl = path_cache._get_path_cache_location()
        path_cache.close()
        
        self.assertEqual(len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 1)        
        self.assertEqual( len(self._get_path_cache()), 1)
        
        
        folder.process_filesystem_structure(self.tk, 
                                            self.seq["type"], 
                                            self.seq["id"], 
                                            preview=False,
                                            engine=None)        
        
        # now have project / seq 
        self.assertEqual(len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 2)
        self.assertEqual( len(self._get_path_cache()), 2)
                
        # nothing should happen
        sync_path_cache(self.tk)
        self.assertEqual(len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 2)
        self.assertEqual( len(self._get_path_cache()), 2)

        # make a copy of the path cache at this point
        shutil.copy(pcl, "%s.snap1" % pcl) 

        # now insert a new path in Shotgun
        folder.process_filesystem_structure(self.tk, 
                                            self.task["type"], 
                                            self.task["id"], 
                                            preview=False,
                                            engine=None)        
        
        # now have project / seq / shot / step 
        self.assertEqual(len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 4)
        self.assertEqual( len(self._get_path_cache()), 4)
        path_cache_contents_1 = self._get_path_cache()
        
        # now replace our path cache with snap1
        shutil.copy(pcl, "%s.snap2" % pcl) 
        shutil.copy("%s.snap1" % pcl, pcl)
        
        # now path cache has not been synchronized but shotgun has an entry
        self.assertEqual(len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 4)
        self.assertEqual( len(self._get_path_cache()), 2)
        sync_path_cache(self.tk)
        
        # check that the sync happend
        self.assertEqual(len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 4)
        self.assertEqual( len(self._get_path_cache()), 4)
        
        # and that the content is the same
        path_cache_contents_2 = self._get_path_cache()
        self.assertEqual(path_cache_contents_2, path_cache_contents_1)
        
        # now clear the path cache completely. This should trigger a full flush
        os.remove(pcl)
        sync_path_cache(self.tk)
        
        # check that the sync happend
        self.assertEqual(len(self.tk.shotgun.find(tank.path_cache.SHOTGUN_ENTITY, [])), 4)
        self.assertEqual( len(self._get_path_cache()), 4)
        
        # and that the content is the same
        path_cache_contents_3 = self._get_path_cache()
        self.assertEqual(path_cache_contents_3, path_cache_contents_1)