def add_to_search_file(values, hasVersion, cloneVersion=False): """need search file to send meta-data to. in turn, it gives us the version number to use for the object.""" success,searchFile = ryw.open_search_file( 'add_to_search_file:', os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), True) if not success: return (False, None) version = None try: if hasVersion: success = searchFile.add_this_version_to_search_file(values) version = values['version'] else: success,version = searchFile.add_to_search_file( values, cloneVersion=cloneVersion) searchFile.done() values['version'] = version except: ryw.give_bad_news( 'fatal_error: failed to add_to_search_file().', logging.critical) searchFile.done() return (False, version) logging.debug('add_to_search_file passed: got version: ' + repr(version)) return (True, version)
def main(): ryw.check_logging(os.path.join(RepositoryRoot, "WWW", "logs"), "upload.log") logging.debug("SelectAll: entered...") ryw_view.print_header_logo() name = os.getenv("REMOTE_USER") if name == "" or name == None: ryw.give_bad_news("SelectAll: no user name given", logging.error) ryw_upload.quick_exit(1) queue = os.path.join(RepositoryRoot, "QUEUES", name) try: resources = su.parseKeyValueFile(os.path.join(RepositoryRoot, "Resources.txt")) searchFileName = resources["searchfile"] except: ryw.give_bad_news("SelectAll: failed to get search file name from resources.", logging.critical) ryw_upload.quick_exit(1) success, searchFile = ryw.open_search_file( "SelectAll:", os.path.join(RepositoryRoot, "WWW", "logs"), "upload.log", searchFileName, False ) if not success: ryw.give_bad_news("SelectAll: failed to open search file.", logging.critical) ryw_upload.quick_exit(1) if not ProcessDownloadReq.add_all(queue, searchFile): ryw.give_bad_news("selectall: addAll failed.", logging.critical) ryw_upload.quick_exit(1) searchFile.done() ryw_upload.quick_exit(0)
def remove_from_search_file(objectID, version): """remove all traces from the search meta file.""" if not version or not objectID: return logging.debug('remove_from_search_file: entering: ' + objectID + ' ' + str(version)) success,searchFile = ryw.open_search_file( 'remove_from_search_file:', os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), True) if not success: return try: searchFile.delete([(objectID, version)]) except: ryw.give_news( 'remove_from_search_file: delete: exception.', logging.debug) searchFile.done()
def copy_search_file(searchFile, repDir): ryw.give_news2('copying search file... ', logging.info) # place catalog # TODO: lock searchfile -> now done logging.debug('copy_search_file: ' + searchFile + ' ' + repDir) dst = os.path.join(repDir, 'searchfile') success,binSearchFile = ryw.open_search_file( 'copy_search_file:', None, None, searchFile, False, skipRead = True) if not success: return False try: shutil.copyfile(searchFile, dst) binSearchFile.done() except: ryw.give_bad_news('copy_search_file failed: ' + searchFile + ' ' + dst, logging.critical) return False logging.debug('copy_search_file: ' + searchFile + ' ' + dst) ryw.give_news2('done. ', logging.info) ryw.give_news2('<BR>', logging.info) return True
def merge_incoming(existingName, incomingName, repositoryRoot, searchFile=None): """called by ProcessDiscs.py""" logging.info('merge_incoming: ' + existingName + ' <- ' + incomingName) if not ryw.is_valid_file(incomingName, 'copy_reverse_lists:'): ryw.give_news3('merge_incoming: incoming ReverseLists not found.', logging.info) return True if not searchFile: success,searchFile = ryw.open_search_file( 'merge_incoming:', os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), False) if not success: if searchFile: searchFile.done() ryw.give_bad_news('merge_incoming: open search file failed. ', logging.critical) return False success,existingRL = open_reverse_lists('ReverseLists.merge_incoming:', '', '', existingName, True, searchFile = searchFile, repositoryRoot = repositoryRoot) if not success: ryw.give_bad_news('merge_incoming: failed to open existing list.', logging.critical) if existingRL: existingRL.done() return False success,incomingRL = open_reverse_lists('ReverseLists.merge_incoming:', '', '', incomingName, False, skipLk = True, allowNullSearchFile = True) if not success: ryw.give_bad_news('merge_incoming: failed to open incoming list.', logging.error) if incomingRL: incomingRL.done() return False success = existingRL.merge(incomingRL, repositoryRoot) existingRL.done() incomingRL.done() if searchFile: searchFile.done() return success
def open_search_file(RepositoryRoot, grabWriteLock = True, skipLock = False): success,searchFile = ryw.open_search_file( 'ryw_meta.open_search_file:', os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), grabWriteLock, skipLk = skipLock) if not success: ryw.give_bad_news( 'DeleteObject.open_search_file: failed to open search file.', logging.critical) return None return searchFile
def open_searchfile_reverselists(callerStr, searchFileWriteFlag=False, reverseListsWriteFlag = True, newReverseListsFileName = None): """opens both SearchFile and ReverseLists in preparation for use by DisplayObject. called by all the display guys unless they need to open or already have opened these files separately.""" success,searchFile = ryw.open_search_file( callerStr, os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), searchFileWriteFlag) if not success: if searchFile: searchFile.done() ryw.give_bad_news('open_searchfile_reverselists: ' + 'open search file failed. ' + 'called by: ' + callerStr, logging.critical) return (False, None, None) if not newReverseListsFileName: newReverseListsFileName = os.path.join( RepositoryRoot, 'ReverseLists') success,reverseLists = open_reverse_lists( callerStr, '', '', newReverseListsFileName, True, searchFile = searchFile, repositoryRoot = RepositoryRoot) if not success: ryw.give_bad_news('open_searchfile_reverselists: ' + 'open reverse lists failed. ' + 'called by: ' + callerStr, logging.critical) if reverseLists: reverseLists.done() if searchFile: searchFile.done() return (False, None, None) return (True, searchFile, reverseLists)
def do_show(objID, version): success,searchFile = ryw.open_search_file( 'DisplayObject:', os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), False) if not success: return False success,meta = searchFile.get_meta(objID, version) if not success or not meta: ryw.give_bad_news( 'DisplayObject.do_show: get_meta failed.', logging.critical) if searchFile: searchFile.done() return False EditObject.show_one_server_object(meta, searchFile) searchFile.done() return True
def main(): name = print_header() form = cgi.FieldStorage() WebUpload_ryw.setup_logging() # # get objstr. # success,objID,version = ryw.get_obj_str2(form) if not success: ryw.give_bad_news('CloneVersion: failed to get objstr.', logging.critical) ryw_upload.quick_exit(1) message = 'CloneVersion: ' + objID + '#' + str(version) logging.info(message) ryw.db_print2("<BR>" + message + "<BR>", 23); # # open search file. # success,searchFile = ryw.open_search_file( 'CloneVerson', os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), False) if not success: if searchFile: searchFile.done() ryw.give_bad_news('CloneVersion: ' + 'open search file failed. ', logging.critical) ryw_upload.quick_exit(1) else: ryw.db_print2("search file opened." + "<BR>", 23); # # get meta and paths. # success,paths,meta = DisplaySelection.get_all_paths( objID, version, skipLock=False, searchFile=searchFile, allowNullSearchFile=False) if success: ryw.db_print_info_browser('CloneVersion: paths: ' + repr(paths), 24) ryw.db_print_info_browser('CloneVersion: meta: ' + repr(meta), 29) else: ryw_upload.quick_exit(1) if (searchFile): searchFile.done() # # we do want to clone the data if it were a list. # isList = ryw_meta.isList(meta) if isList: dataPath = paths[0] selName = DisplaySelection.get_sel_name(dataPath) if not selName: ryw.give_bad_news( 'CloneVersion: isList but failed to get selection name.', logging.error) ryw_upload.quick_exit(1) selPath = os.path.join(dataPath, selName) else: selPath,selName = None,None # # change meta. # meta = change_meta(meta, name) # # deal with auxi dir. # originalAuxiDir = paths[2] newAuxiDir = None tmpdir = None if os.path.exists(originalAuxiDir): tmpdir = WebUpload_ryw.attempt_make_tmpdir() if not tmpdir: ryw_upload.quick_exit(1) newAuxiDir = os.path.join(tmpdir, '_AUXI') message = 'CloneVersion: shutil.copytree(): ' + \ originalAuxiDir + ' -> ' + newAuxiDir try: shutil.copytree(originalAuxiDir, newAuxiDir) except: ryw.give_bad_news('failed: ' + message, logging.critical) ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) ryw.db_print_info_browser(message, 29) # # Now try to put a new object in the repository. # note that the version number will be incremented. # # "selPath" used to be just None. # when I added cloning list, I'm just using this to pass in the # path name of the selection file. # if not WebUpload_ryw.try_upload_object(meta, selPath, newAuxiDir, cloneVersion=True): ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) searchFile = WebUpload_ryw.show_one_server_object(meta) searchFile.done() ryw_upload.cleanup_and_exit(tmpdir, None, None, 0, successMessage = 'clone version completed.')
def collect_req_info(reqs, objKB): logging.debug('collect_req_info: entered...') success,searchFile = ryw.open_search_file( 'collect_req_info:', os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), False) if not success: return(False, None, None, None) reqsize = {} reqpath = {} reqList = [] for item in reqs: logging.debug('collect_req_info: item is: '+item) try: objname, version = item.split('#') version = int(version) except: ryw.give_bad_news( 'collect_req_info: bad format, split failed: '+item, logging.error) continue logging.debug('collect_req_info, obj, version: ' + objname + ' ' + repr(version)) success,metaData = searchFile.get_meta(objname, version) if not success: ryw.give_bad_news( 'collect_req_info: failed to get_meta.', logging.error) continue # # I'm doing this to hardwire all # places of gettting objectstoreroot. # #objroot = metaData['objectstore'] objroot = ryw.hard_wired_objectstore_root() try: itempath = objectstore.name_version_to_paths_aux(objroot, objname, version) except: ryw.give_bad_news( 'collect_req_info: nameversiontopaths failed: ' + objroot + ' ' + objname + ' ' + repr(version), logging.critical) continue logging.debug('collect_req_info: after getting itempath...' + repr(itempath)) if not ryw.good_repo_paths(itempath): ryw.give_bad_news('collect_req_info: check_obj_paths failed.', logging.error) continue success,itemSize = ryw.get_obj_size(itempath) if not success: continue logging.debug('collect_req_info, size in KB is: ' + repr(itemSize)) if (itemSize > ryw.maxSizeInKB - objKB): ryw.give_bad_news( 'collect_req_info: item size too big to fit on one disc: ' + repr(itemSize), logging.error) continue reqsize[item] = itemSize reqpath[item] = itempath logging.debug('collect_req_info: size, path: ' + repr(itemSize) + ' ' + itempath[0]) # build a list for sorting. reqItem = {} reqItem['name'] = item if metaData.has_key('upload_datetime'): reqItem['upload_datetime'] = metaData['upload_datetime'] reqList.append(reqItem) searchFile.done() reqList.sort(key = ryw.datetimesortkey, reverse = False) sortedItems = [] for r in reqList: sortedItems.append(r['name']) return (True, reqsize, reqpath, sortedItems)
def main(logDir, logFile, searchFile, scriptName, resources = None): """main function processing search request.""" # initialization. name = print_header() form = cgi.FieldStorage() setup_logging(logDir, logFile) ## cgi.print_form(form) ## parse the form to get: query, sorting information, start_index ## to know which subset of matches/results to return, ## search_attributes for the next_page_button query, sort_tuple, start_index, search_attributes, error_message = \ parse_form(form) if query is None: print '<P> ERROR while parsing form and constructing ' + \ 'search query:', error_message sys.exit(1) ## print '<HR>Query:', query ## read index file to get the list of dictionaries: ## one dictionary for each version of each object, contains its meta-data # # used to open search file without read. # because Sobti is doing the read by himself below. # I have to change this because the SearchFile will be passed # onto the ReverseLists module for more later lookups. # this makes it necessary to do a real SearchFile open. # #success,searchFileLock = ryw.open_search_file( # 'Search:', logDir, logFile, searchFile, False, skipRead = True) success,searchFileLock = ryw.open_search_file( 'Search:', logDir, logFile, searchFile, False, skipRead = False) if not success: ryw.give_bad_news('Search: failed to acquire search file lock: ' + searchFile, logging.critical) ryw_upload.quick_exit(1) #else: # # mis-named: it's really not a searchFileLock, but searchFile itself. # #displayObject.set_search_file(searchFileLock) # # this is when Sobti used to do his own read, now replaced by mine. # #metas, error_message = read_index_file_to_get_metas(searchFile) metas = searchFileLock.convert_to_sobti_list() if metas is None: print '<P> ERROR while reading the index file to get ' + \ 'the meta-data dictionaries:', error_message searchFileLock.done() ryw_upload.quick_exit(1) ## build a list of metas that satisfy the given query matches = [] for meta in metas: if not ryw_view.should_show_object(meta, resources): continue if cnf_match.matches_cnf(meta, query): matches.append(meta) ryw.db_print2('Search:main() ' + repr(meta), 53) num_matches = len(matches) ## sort the matches by the given sort tuple matches, error_message = sort_matches(matches, sort_tuple) if matches is None: print '<P> ERROR while sorting matches:', error_message searchFileLock.done() ryw_upload.quick_exit(1) # # save all current search results in a file for # possible inclusion in the current selection. # matchAllName = save_matches(matches) ## Return the start_index..(start_index + N) entries from the top if num_matches == 0 or start_index >= num_matches: num_items = 0 else: start_index = max(0, start_index) assert start_index < num_matches end_index = start_index + NUM_OBJECTS_PER_PAGE - 1 end_index = max(0, end_index) end_index = min(num_matches - 1, end_index) if not (0 <= start_index <= end_index < num_matches): print '<P> ASSERT ERROR: start_index, end_index, ' + \ 'num_matches', start_index, end_index, num_matches searchFileLock.done() ryw_upload.quick_exit(1) num_items = end_index - start_index + 1 ## num_items is the number of items that will actually be displayed ## num_items <= num_matches if num_items == 0: print '<P><H3>No objects to display</H3>' searchFileLock.done() ryw_upload.quick_exit(1) else: #print '<P><H3>%d objects satisfy the search criteria, ' + \ # 'displaying %d of them</H3>' % (num_matches, num_items) print """ <BR><B><FONT SIZE=2>%d object(s) satisfy the search criteria.</FONT></B>""" % \ (num_matches) print """ <BR><B><FONT SIZE=2>displaying matches %d - %d.</FONT></B><BR>""" % \ (start_index + 1, start_index + num_items) print_next_page_button1(search_attributes, end_index + 1, scriptName, num_matches) shownMatches = [] success,reverseLists = ReverseLists.open_reverse_lists( 'Search:', '', '', os.path.join(RepositoryRoot, 'ReverseLists'), True, searchFile = searchFileLock, repositoryRoot = RepositoryRoot) if not (success and reverseLists): ryw.give_bad_news('Search.main: failed to open ReverseLists.', logging.critical) if reverseLists: reverseLists.done() return False displayObject = ryw_view.DisplayObject( RepositoryRoot, calledByVillageSide = False, missingFileFunc=Browse.reqDownloadFunc, searchFile = searchFileLock, reverseLists = reverseLists) displayObject.begin_print() for i in range(start_index, end_index + 1): #display_object(matches[i]) displayObject.show_an_object_compact(matches[i]) shownMatches.append(matches[i]) displayObject.end_print() reverseLists.done() # # save search results on this page in a file for # possible inclusion in the current selection. # matchThisPageName = save_matches(shownMatches) print_selection_links(matchAllName, matchThisPageName) ## Include a next-page button print_next_page_button2(search_attributes, end_index + 1, scriptName, num_matches) searchFileLock.done() ryw_view.print_footer()