def get_meta(searchFile, objID, version, repositoryRoot): success,meta = searchFile.get_meta(objID, version) if success: # # I'm doing this to hardwire all # places of gettting objectstoreroot. # #return (meta, meta['objectstore']) return (meta, ryw.hard_wired_objectstore_root()) logging.warning( 'ryw_meta.get_meta: not finding it in the SearchFile: ' + objID + ' # ' + str(version) + ', but attempting to continue') # # look for the hardwired objectstore root. not nice but... # objroot = os.path.join(repositoryRoot, 'WWW', 'ObjectStore') if not os.path.exists(objroot): ryw.give_bad_news( 'DeleteObject.get_meta: even the hardwired root does not exist: '+ objroot, logging.critical) return (None, None) success,meta = ryw.get_meta(objroot, objID, version) if not success: logging.warning( 'ryw.get_meta: failed to read metadata from objstore: '+ objID + ' # ' + str(version)) return (None, objroot) return (meta, objroot)
def get_objectstore_root(repositoryRoot, meta): if meta and meta.has_key('objectstore'): # # I'm doing this to hardwire all # places of gettting objectstoreroot. # #return meta['objectstore'] return ryw.hard_wired_objectstore_root() logging.warning('get_objectstore_root: meta has no objstore root! ') return os.path.join(repositoryRoot, 'WWW', 'ObjectStore')
def uploadobject(values, data, auxDir, hasVersion = False, cloneVersion=False): """update metadata file. upload object into object store. create view path.""" logging.debug('UploadObject.uploadobject entered.') ryw.db_print2('uploadobject: values are: ' + repr(values), 57) ################################################## # check attributes. ################################################## success,resources = check_attributes(values) if not success: return False objectid = values['id'] # # not strictly necessary, but I'm doing this to hardwire all # places of gettting objectstoreroot. # #objectstoreroot = values['objectstore'] objectstoreroot = ryw.hard_wired_objectstore_root() logging.debug('uploadobject: passed check_attributes: '+ objectstoreroot + ' ' + objectid) ################################################## # give metadata to the search server. ################################################## success,version = add_to_search_file(values, hasVersion, cloneVersion) if not success: remove_from_search_file(objectid, version) return False else: ryw.db_print_info_browser('uploadobject: ' + objectid + '#' + str(version), 29) ################################################## # put stuff into the object store. ################################################## success,parent,datapath,metapath,auxpath,donepath = \ add_to_object_store(values, data, objectstoreroot, objectid, version, auxDir, cloneVersion) if not success: remove_from_search_file(objectid, version) remove_from_object_store(parent, datapath, metapath, auxpath, donepath) return False ################################################## # create view path. ################################################## if not cloneVersion: success,path = create_view_path(values, resources, objectid, version) #ryw.give_news('uploadobject: introducing error...', logging.debug) #success = False if not success: remove_from_search_file(objectid,version) remove_from_object_store(parent, datapath, metapath, auxpath, donepath) remove_view_path(path) return False # logging.debug('just for testing...') # remove_view_path(path) # return False return True
def collect_req_info(reqs, objKB): logging.debug('collect_req_info: entered...') success,searchFile = ryw.open_search_file( 'collect_req_info:', os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), False) if not success: return(False, None, None, None) reqsize = {} reqpath = {} reqList = [] for item in reqs: logging.debug('collect_req_info: item is: '+item) try: objname, version = item.split('#') version = int(version) except: ryw.give_bad_news( 'collect_req_info: bad format, split failed: '+item, logging.error) continue logging.debug('collect_req_info, obj, version: ' + objname + ' ' + repr(version)) success,metaData = searchFile.get_meta(objname, version) if not success: ryw.give_bad_news( 'collect_req_info: failed to get_meta.', logging.error) continue # # I'm doing this to hardwire all # places of gettting objectstoreroot. # #objroot = metaData['objectstore'] objroot = ryw.hard_wired_objectstore_root() try: itempath = objectstore.name_version_to_paths_aux(objroot, objname, version) except: ryw.give_bad_news( 'collect_req_info: nameversiontopaths failed: ' + objroot + ' ' + objname + ' ' + repr(version), logging.critical) continue logging.debug('collect_req_info: after getting itempath...' + repr(itempath)) if not ryw.good_repo_paths(itempath): ryw.give_bad_news('collect_req_info: check_obj_paths failed.', logging.error) continue success,itemSize = ryw.get_obj_size(itempath) if not success: continue logging.debug('collect_req_info, size in KB is: ' + repr(itemSize)) if (itemSize > ryw.maxSizeInKB - objKB): ryw.give_bad_news( 'collect_req_info: item size too big to fit on one disc: ' + repr(itemSize), logging.error) continue reqsize[item] = itemSize reqpath[item] = itempath logging.debug('collect_req_info: size, path: ' + repr(itemSize) + ' ' + itempath[0]) # build a list for sorting. reqItem = {} reqItem['name'] = item if metaData.has_key('upload_datetime'): reqItem['upload_datetime'] = metaData['upload_datetime'] reqList.append(reqItem) searchFile.done() reqList.sort(key = ryw.datetimesortkey, reverse = False) sortedItems = [] for r in reqList: sortedItems.append(r['name']) return (True, reqsize, reqpath, sortedItems)