def do_show(objID, version): success,searchFile = ryw.open_search_file( 'DisplayObject:', os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), False) if not success: return False success,meta = searchFile.get_meta(objID, version) if not success or not meta: ryw.give_bad_news( 'DisplayObject.do_show: get_meta failed.', logging.critical) if searchFile: searchFile.done() return False EditObject.show_one_server_object(meta, searchFile) searchFile.done() return True
def deal_with_stub(localObjRoot, objID, version, metaData, dataDir, auxDir, searchFile = None): """thoughts regarding how many (unnecessary) times I'm going through the SearchFile... if there's data, the time spent on metadata is probably not a big deal... so only this one, when we're only dealing with metadata, somewhat matters. I think I'm still going to write through, to make things simpler. the only optimization to take care of is to not to re-open the SearchFile over and over again.""" success,searchFile = EditObject.do_update_metadata( localObjRoot, objID, version, metaData, searchFile = searchFile) if not success: ryw.give_bad_news('ProcessDiscs: EditObject failed: ' + objID + '#' + str(version), logging.error) return (False, None) if not os.path.exists(auxDir): return (True, searchFile) oldPaths = objectstore.name_version_to_paths_aux( localObjRoot, objID, version) oldAuxDir = oldPaths[2] try: #ryw.give_news3('auxDir: ' + auxDir, logging.info) #ryw.give_news3('oldAuxDir: ' + oldAuxDir, logging.info) #ryw.give_news3('force remove: ' + oldAuxDir, logging.info) ryw.force_remove(oldAuxDir, 'deal_with_found_objects') #ryw.give_news3('recursive copy: ' + auxDir + ' -> ' + oldAuxDir, # logging.info) shutil.copytree(auxDir, oldAuxDir) except: ryw.give_bad_news('ProcessDiscs: failed to overwrite auxi dirs: ' + auxDir + ' -> ' + oldAuxDir, logging.critical) return (False, searchFile) return (True, searchFile)