def sync_path_cache(tk, force_full_sync=False): """ Synchronizes the path cache with Shotgun. :param force_full_sync: Force a full sync. :returns: log output in a variable """ # capture sync log to string stream = StringIO.StringIO() handler = logging.StreamHandler(stream) log = logging.getLogger("sgtk.core.path_cache") log.setLevel(logging.DEBUG) log.addHandler(handler) # Use the path cache to look up all paths associated with this entity pc = path_cache.PathCache(tk) pc.synchronize(force_full_sync) pc.close() # Do not close StringIO here, as on Python 2.5 this will cause some garbage to be printed # when the unit tests are complete. The SteamIO object will be gc'ed anyway, so it shouldn't # be too bad. log_contents = stream.getvalue() log.removeHandler(handler) return log_contents
def setUp(self): """ Creates a bunch of entities in Mockgun and adds an entry to the FilesystemLocation. """ super(TestPathCacheDelete, self).setUp() # Create a bunch of entities for unit testing. self._project_link = self.mockgun.create("Project", {"name": "MyProject"}) self._shot_entity = self.mockgun.create("Shot", {"code": "MyShot", "project": self._project_link}) self._shot_entity["name"] = "MyShot" self._shot_full_path = os.path.join(self.project_root, "shot") self._asset_entity = self.mockgun.create("Asset", {"code": "MyAsset", "project": self._project_link}) self._asset_entity["name"] = "MyAsset" self._asset_full_path = os.path.join(self.project_root, "asset") self._pc = path_cache.PathCache(self.tk) # Register the asset. This will be our sentinel to make sure we are not deleting too much stuff during # the tests. add_item_to_cache(self._pc, self._asset_entity, self._asset_full_path) # Wrap some methods in a mock so we can track their usage. self._pc._do_full_sync = Mock(wraps=self._pc._do_full_sync) self._pc._import_filesystem_location_entry = Mock(wraps=self._pc._import_filesystem_location_entry) self._pc._remove_filesystem_location_entities = Mock(wraps=self._pc._remove_filesystem_location_entities)
def _move_data(path): """ Rename directory to backup name, if backup currently exists replace it. """ if path and os.path.exists(path): dirname, basename = os.path.split(path) new_basename = "%s.old" % basename backup_path = os.path.join(dirname, new_basename) if os.path.exists(backup_path): shutil.rmtree(backup_path) try: os.rename(path, backup_path) except WindowsError: # On windows intermittent problems with sqlite db file occur tk = sgtk.sgtk_from_path(path) pc = path_cache.PathCache(tk) db_path = pc._get_path_cache_location() if os.path.exists(db_path): print('Removing db %s' % db_path) # Importing pdb allows the deletion of the sqlite db sometimes... import pdb # noqa # try multiple times, waiting longer in between for count in range(5): try: os.remove(db_path) break except WindowsError: time.sleep(count * 2) os.rename(path, backup_path)
def tearDown(self): """ Cleans up after tests. """ self._tear_down_called = True try: sgtk.set_authenticated_user(self._authenticated_user) # get rid of path cache from local ~/.shotgun storage if self._do_io: pc = path_cache.PathCache(self.tk) path_cache_file = pc._get_path_cache_location() pc.close() if os.path.exists(path_cache_file): os.remove(path_cache_file) # get rid of init cache if os.path.exists(pipelineconfig_factory._get_cache_location()): os.remove(pipelineconfig_factory._get_cache_location()) # move project scaffold out of the way self._move_project_data() # important to delete this to free memory self.tk = None # clear global shotgun accessor tank.util.shotgun.connection._g_sg_cached_connections = threading.local() finally: if self._old_shotgun_home is not None: os.environ[self.SHOTGUN_HOME] = self._old_shotgun_home else: del os.environ[self.SHOTGUN_HOME]
def test_shot(self): """Tests paths used in making a shot are as expected.""" expected_paths = [] shot_path = os.path.join( self.project_root, "%s_%s" % (self.shot["code"], self.seq["code"])) expected_paths.extend([self.project_root, shot_path]) folder.process_filesystem_structure(self.tk, self.shot["type"], self.shot["id"], preview=False, engine=None) assert_paths_to_create(expected_paths) # now check the path cache! # there shouldbe two entries, one for the shot and one for the seq pc = path_cache.PathCache(self.pipeline_configuration) shot_paths = pc.get_paths("Shot", self.shot["id"], primary_only=False) seq_paths = pc.get_paths("Sequence", self.seq["id"], primary_only=False) self.assertEquals(len(shot_paths), 1) self.assertEquals(len(seq_paths), 1) pc.close() # it's the same folder for seq and shot self.assertEquals(shot_paths, seq_paths)
def setUp(self): super(TestFolderCreationEdgeCases, self).setUp() self.setup_fixtures() self.seq = {"type": "Sequence", "id": 2, "code": "seq_code", "project": self.project} self.shot = {"type": "Shot", "id": 1, "code": "shot_code", "sg_sequence": self.seq, "project": self.project} self.step = {"type": "Step", "id": 3, "code": "step_code", "short_name": "step_short_name"} self.task = {"type":"Task", "id": 1, "content": "this task", "entity": self.shot, "step": {"type": "Step", "id": 3}, "project": self.project} # Add these to mocked shotgun self.add_to_sg_mock_db([self.shot, self.seq, self.step, self.project, self.task]) self.path_cache = path_cache.PathCache(self.tk)
def tearDown(self): """ Cleans up after tests. """ # get rid of path cache from local ~/.shotgun storage pc = path_cache.PathCache(self.tk) path_cache_file = pc._get_path_cache_location() pc.close() if os.path.exists(path_cache_file): os.remove(path_cache_file) # clear global shotgun accessor tank.util.shotgun.g_sg_cached_connection = None # get rid of init cache if os.path.exists(self.init_cache_location): os.remove(self.init_cache_location) # move project scaffold out of the way self._move_project_data() # important to delete this to free memory self.tk = None self._original_get_cache_location = tank.pipelineconfig_factory._get_cache_location tank.util.shotgun.get_associated_sg_config_data = self._original_get_associated_sg_config_data tank.util.shotgun.get_associated_sg_base_url = self._original_get_associated_sg_base_url tank.util.shotgun.create_sg_connection = self._original_create_sg_connection
def test_db_exists(self): if os.path.exists(self.path_cache_location): self.path_cache.close() os.remove(self.path_cache_location) self.assertFalse(os.path.exists(self.path_cache_location)) pc = path_cache.PathCache(self.tk) pc.close() self.assertTrue(os.path.exists(self.path_cache_location))
def test_db_exists(self): pc = tank.pipelineconfig.from_path(self.project_root) db_path = pc.get_path_cache_location() if os.path.exists(db_path): self.path_cache.close() os.remove(db_path) self.assertFalse(os.path.exists(db_path)) pc = path_cache.PathCache(self.pipeline_configuration) pc.close() self.assertTrue(os.path.exists(db_path))
def sync_path_cache(tk, force_full_sync=False): """ Synchronizes the path cache with Shotgun. :param force_full_sync: Force a full sync. """ # Use the path cache to look up all paths associated with this entity pc = path_cache.PathCache(tk) pc.synchronize(log=None, full_sync=force_full_sync) pc.close()
def mock_remote_path_cache(self): """ Mocks a remote path cache that can be updated. """ # Override the SHOTGUN_HOME so that path cache is read from another location. with temp_env_var(SHOTGUN_HOME=os.path.join(self.tank_temp, "other_path_cache_root")): pc = path_cache.PathCache(self.tk) pc.synchronize() try: yield pc finally: pc.close()
def setUp(self): super(TestFolderCreationPathCache, self).setUp() # Use a task based fixtures, as task folders generate two path cache entries with same path, one linked # to a task as a primary item, and one linked to a step as a secondary item. self.setup_fixtures( parameters={"core": "core.override/shotgun_multi_task_core"}) self.seq = { "type": "Sequence", "id": 2, "code": "seq_code", "project": self.project } self.shot = { "type": "Shot", "id": 1, "code": "shot_code", "sg_sequence": self.seq, "project": self.project } self.step = { "type": "Step", "id": 3, "code": "step_code", "short_name": "step_short_name" } self.task = { "type": "Task", "id": 23, "entity": self.shot, "content": "task1", "step": self.step, "project": self.project } entities = [self.shot, self.seq, self.step, self.project, self.task] # Add these to mocked shotgun self.add_to_sg_mock_db(entities) self.path_cache = path_cache.PathCache(self.tk) folder.process_filesystem_structure(self.tk, self.task["type"], self.task["id"], preview=False, engine=None) self.db_cursor = self.path_cache._connection.cursor()
def test_task_a(self): """Tests paths used in making a shot are as expected.""" folder.process_filesystem_structure(self.tk, self.task["type"], self.task["id"], preview=False, engine=None) expected_paths = [] shot_path = os.path.join( self.project_root, "%s_%s" % (self.shot["code"], self.seq["code"])) step_path = os.path.join( shot_path, "%s_%s" % (self.task["content"], self.step["short_name"])) expected_paths.extend([self.project_root, shot_path, step_path]) # add non-entity paths expected_paths.append(os.path.join(step_path, "images")) assert_paths_to_create(expected_paths) # now check the path cache! # there should be two entries, one for the task and one for the step pc = path_cache.PathCache(self.pipeline_configuration) step_paths = pc.get_paths("Step", self.step["id"], primary_only=False) task_paths = pc.get_paths("Task", self.task["id"], primary_only=False) self.assertEquals(len(step_paths), 1) self.assertEquals(len(task_paths), 1) # it's the same folder for seq and shot self.assertEquals(step_paths, task_paths) pc.close() # finally check the context. ctx = self.tk.context_from_path(step_path) self.assertEquals(ctx.task["id"], self.task["id"]) self.assertEquals(ctx.task["type"], self.task["type"]) # now because of the double entity matching, we should have a step and a task! self.assertEquals(ctx.step["id"], self.step["id"]) self.assertEquals(ctx.step["type"], self.step["type"])
def sync_path_cache(tk, force_full_sync=False): """ Synchronizes the path cache with Shotgun. :param force_full_sync: Force a full sync. :returns: log output in a variable """ # capture sync log to string stream = StringIO.StringIO() handler = logging.StreamHandler(stream) log = logging.getLogger("sgtk.core.path_cache") log.setLevel(logging.DEBUG) log.addHandler(handler) # Use the path cache to look up all paths associated with this entity pc = path_cache.PathCache(tk) pc.synchronize(force_full_sync) pc.close() log_contents = stream.getvalue() stream.close() log.removeHandler(handler) return log_contents
def setUp(self): super(TestPathCache, self).setUp() self.setup_multi_root_fixtures() self.path_cache = path_cache.PathCache(self.tk) self.path_cache_location = self.path_cache._get_path_cache_location()
def setUp(self): super(TestPathCache, self).setUp() self.setup_multi_root_fixtures() self.path_cache = path_cache.PathCache(self.pipeline_configuration)