def do_update_metadata(objroot, objID, version, meta, searchFile=None): """this is also called by merging incoming data in ProcessDiscs.deal_with_stub(). only there, we're going to worry about the optional incoming SearchFile argument. there, we're trying to re-use the searchFile argument without re-opening it over and over again.""" if not searchFile: ryw.db_print('do_update_metadata: null searchFile', 11) else: ryw.db_print('do_update_metadata: reusing searchFile', 11) if not ryw_meta.rewrite_meta(objroot, objID, version, meta): ryw.give_bad_news('EditObject: rewrite_meta failed.', logging.error) return (False, None) if not searchFile: searchFile = ryw_meta.open_search_file(RepositoryRoot, grabWriteLock = True) if not searchFile: ryw.give_bad_news('EditObject: failed to open search file.', logging.critical) return (False, None) searchFile.modify(meta) return (True, searchFile)
def free_MB(path): """returns the amount of available space in MB. used to be in ryw.py""" #tested. ryw.db_print('ryw_xp:free_MB() entered...', 49) path = os.path.normpath(path) if not ryw.try_mkdir_ifdoesnt_exist(path, 'free_MB'): ryw.give_bad_news( 'free_MB: try_mkdir_ifdoesnt_exist failed: ' + path, logging.critical) return 0 try: sectorsPerCluster,bytesPerSector,numFreeClusters,totalNumClusters = \ win32file.GetDiskFreeSpace(path) except: ryw.give_bad_news( 'fatal_error: failed to determine free disk space: '+path, logging.critical) return 0 sectorsPerCluster = long(sectorsPerCluster) bytesPerSector = long(bytesPerSector) numFreeClusters = long(numFreeClusters) totalNumClusters = long(totalNumClusters) freeMB = (numFreeClusters * sectorsPerCluster * bytesPerSector) / \ (1024 * 1024) return freeMB
def delete_all(searchSel): completeSuccess = True searchFile = None for objstr in searchSel: success,objID,version = ryw.split_objstr(objstr) if not success: ryw.give_bad_news('DelSearchAll: invalid objstr: ' + objstr, logging.error) completeSuccess = False continue success,searchFile = DeleteObject.do_delete( objID, version, searchFile=searchFile) if not success: ryw.give_bad_news( 'DelSearchAll: DeleteObject.do_delete failed.' + objstr, logging.error) completeSuccess = False else: ryw.db_print('DelSearchAll.delete_all: do_delete succeeded.', 18) if searchFile: searchFile.done() return completeSuccess
def print_explorer_string(auxiURL, subDir, title): isLan = ryw.is_lan() ryw.db_print('print_explorer_string: isLan: ' + repr(isLan), 46) theStr = """ <BR> <A HREF="%(the_url)s" %(popup)s>%(anchor)s</A> """ dict = {} theURL = ryw_view.glue_auxi_URL(auxiURL, subDir, '', checkIt = False) dict['the_url'] = theURL dict['popup'] = ryw_view.make_explorer_popup_string( theURL, (ryw_view.POPUP_DIR_WIDTH, ryw_view.POPUP_DIR_HEIGHT), pdrive=isLan) anchorStr = """ <IMG SRC="%(iconImg)s" TITLE="%(title)s" BORDER=0>""" d2 = {} d2['title'] = title if isLan: d2['iconImg'] = '/icons/p_folder.gif' else: d2['iconImg'] = '/icons/folder.gif' dict['anchor'] = anchorStr % d2 print theStr % dict
def flock_unlock(flockSelf): #remove locks #tested ryw.db_print('ryw_xp:flock_unlock() entered...', 49) fl = flockSelf win32file.UnlockFileEx(fl.hfile,0, fl.highbits, fl.ov) fl.hfile.Close()
def unlock(adRotateHandler, file): """used to be in ad.py.""" ryw.db_print('ryw_xp:unlock() entered...', 49) ad = adRotateHandler ad.highbits=-0x7fff0000 ad.ov=pywintypes.OVERLAPPED() ad.hfile = win32file._get_osfhandle(file.fileno()) win32file.UnlockFileEx(ad.hfile,0,ad.highbits,ad.ov) #remove locks ad.hfile.Close()
def launch_explorer(path): """from explorer.launchExplorer()""" command = r"explorer" options = ["/n,","/root,",path] ryw.db_print('launch_explorer: command is: ' + repr([command] + options), 46) ret = subprocess.call([command] + options) if ret != 1: ryw.give_bad_news("launch_explorer: Error in launching explorer.", logging.error) return False return True
def __add_many_mappings_memory(self, listName, itemsList): """called by add() and redefine(). returns changed. only touches memory, so the wrapper flushes disk.""" ryw.db_print('add_many_mappings_memory entered: container: ' + listName + ' , containees: ' + repr(itemsList), 17) incomeSet = set(itemsList) if incomeSet == set([]): return False changed = False for containee in incomeSet: success,containeeID,containeeVersion = \ ryw.split_objstr(containee) if not success: logging.warning('ReverseLists.add: failed to split: ' + containee) if self.reverseDict.has_key(containee): del self.reverseDict[containee] changed = True continue # # this deals with the case when a container (list) # contains dead objects (containees). # so if we can't find the containees, # we not only skip adding, but also want to try # to remove the containee from the ReverseLists. # #success,meta,objroot = ryw_meta.get_meta2( # self.repositoryRoot, containeeID, containeeVersion) success,meta = self.searchFile.get_meta( containeeID, containeeVersion) if not success: logging.warning('ReverseLists.add: failed get_meta: '+ containee) if self.reverseDict.has_key(containee): del self.reverseDict[containee] changed = True continue # # now we add the singleton. # changed = True self._ReverseLists__union_one_mapping(containee, [listName]) return changed
def url2path(url, pdrive): path = os.environ['DOCUMENT_ROOT'] + "/" + url path = path.replace("%23","#") path = os.path.normpath(path) if pdrive: path = 'P:' + path[2:] ryw.db_print('url2path: is pdrive, path is: ' + path, 46) else: ryw.db_print('url2path: is not pdrive, path is: ' + path, 46) if not os.path.exists(path): #sys.stdout.write('explorer.py: path does not exist: ' + # path + ' \n') pass return path
def __minus_one_mapping(self, containee, containers): """helper function called at more than one place. containers is a list. returns whether changes have been made.""" if not containers or containers == []: ryw.db_print('minus_one_mapping: empty containers.', 14) return False if not self.reverseDict.has_key(containee): ryw.db_print('minus_one_mapping: containee not found.', 14) return False minusSet = set(containers) oldSet = self.reverseDict[containee] newSet = oldSet - minusSet changed = False if newSet == set([]): del self.reverseDict[containee] changed = True ryw.db_print('minus_one_mapping: no container left.', 14) elif oldSet != newSet: self.reverseDict[containee] = newSet changed = True ryw.db_print('minus_one_mapping: new set is: '+repr(newSet), 14) return changed
def flock_init(flockSelf, file): #tested. ryw.db_print('ryw_xp:flock_init() entered...', 49) fl = flockSelf secur_att = win32security.SECURITY_ATTRIBUTES() secur_att.Initialize() fl.highbits=0x7fff0000 #high-order 32 bits of byte range to lock #make a handle with read/write and open or create if doesn't exist fl.hfile=win32file.CreateFile( fl.file, win32con.GENERIC_READ|win32con.GENERIC_WRITE, win32con.FILE_SHARE_READ|win32con.FILE_SHARE_WRITE, secur_att, win32con.OPEN_ALWAYS, win32con.FILE_ATTRIBUTE_NORMAL, 0)
def __union_one_mapping(self, containee, containers): """helper function called at more than one place. containers is a list.""" if not containers or containers == []: return if self.reverseDict.has_key(containee): oldSet = self.reverseDict[containee] newSet = oldSet | set(containers) self.reverseDict[containee] = newSet ryw.db_print('union_one_mapping: the new set is: ' + repr(newSet), 11) else: self.reverseDict[containee] = set(containers)
def __write_to_disk(self): tmppath = self.reverseListsFile + '.TMP' try: su.pickdump(self.reverseDict, tmppath) except: ryw.give_bad_news('ReverseLists.add: pickdump failed.', logging.critical) return False success,bakpath = ryw.make_tmp_file_permanent( tmppath, self.reverseListsFile) ryw.cleanup_path(tmppath, '_write_to_disk: cleanup, tmppath:') ryw.cleanup_path(bakpath, '_write_to_disk: cleanup, bakpath:') ryw.db_print('write_to_disk: written to disk...', 11) return success
def get_chapterlist_name(dataPath): try: names = os.listdir(dataPath) if len(names) == 0: return None for name in names: if name == ChapterList.CHAPTER_LIST_NAME: ryw.db_print('get_chapterlist_name: found name: ' + name, 38) return name ryw.db_print('get_chapterlist_name: found no name: ', 38) return None except: ryw.give_bad_news( 'DisplaySelection.get_chapterlist_name: failed to listdir: ' + dataPath, logging.error) return None
def do_delete(objID, version, searchFile=None): """the optional searchFile argument is for cases of calling this thing in a loop: the first time, searchFile is None, so we do an open of the searchFile, and this gets returned to the caller, and the caller (in a loop) gets to reuse the searchFile argument.""" if not searchFile: searchFile = ryw_meta.open_search_file(RepositoryRoot) ryw.db_print('DeleteObject.do_delete: opening searchFile.', 18) else: ryw.db_print('DeleteObject.do_delete: reusing searchFile.', 18) if not searchFile: return (False, None) meta,objroot = ryw_meta.get_meta(searchFile, objID, version, RepositoryRoot) if not meta and not objroot: ryw.give_bad_news('do_delete: no meta, no objroot, giving up...', logging.critical) searchFile.done() return (False, searchFile) # # try to continue deletion even if we can't get metadata. # paths = ryw_meta.get_paths(objroot, objID, version, meta, RepositoryRoot) if not paths: ryw.give_bad_news('do_delete: failed to get paths.', logging.critical) searchFile.done() return (False, searchFile) if meta: # # hack. I had a bug when the following function crashed # with meta=None # I didn't bother to understand what's going on. # process_reverse_lists(objID, version, meta, searchFile) remove_from_search_file(searchFile, objID, version) remove_paths(paths) searchFile.done() return (True, searchFile)
def flock_lock(flockSelf): #tested. ryw.db_print('ryw_xp:flock_lock() entered...', 49) fl = flockSelf if fl.type['LOCK_EX']: #exclusive locking if fl.type['LOCK_NB']: #don't wait, non-blocking lock_flags=win32con.LOCKFILE_EXCLUSIVE_LOCK| \ win32con.LOCKFILE_FAIL_IMMEDIATELY else: #wait for lock to free lock_flags=win32con.LOCKFILE_EXCLUSIVE_LOCK else: #shared locking if fl.type['LOCK_NB']: #don't wait, non-blocking lock_flags=win32con.LOCKFILE_FAIL_IMMEDIATELY else:#shared lock wait for lock to free lock_flags=0 #used to indicate starting region to lock fl.ov=pywintypes.OVERLAPPED() win32file.LockFileEx(fl.hfile,lock_flags,0,fl.highbits,fl.ov)
def __delete_containee_object(self, objID, objVersion): """returns whether changes are made. called when an object is deleted. it checks to see whether this guy is a containee. this and the one below called by delete_object().""" containee = objID + '#' + str(objVersion) logging.info('delete_containee_object entered: ' + containee) if not self.reverseDict.has_key(containee): logging.info('delete_containee_object: not in ReverseLists.') return False logging.info('delete_containee_object: found to be a containee.') ryw.db_print('delete_containee_object: before del: ' + repr(self.reverseDict), 4) del self.reverseDict[containee] ryw.db_print('delete_containee_object: after del: ' + repr(self.reverseDict), 4) return True
def __minus_many_mappings(self, containees, container): ryw.db_print('minus_many_mappings: before deletion: ' + repr(self.reverseDict), 17) if not containees or containees == []: return False changed = False for containee in containees: logging.info('minus_many_mappings: deal with containee: '+ containee) if self._ReverseLists__remove_obsolete_containers_memory( containee, [container]): changed = True logging.info('minus_many_mappings: changed made.') ryw.db_print('minus_many_mappings: after deletion: ' + repr(self.reverseDict), 17) return changed
def read_container_file(conID, conVersion, searchFile, repositoryRoot): """given a container ID, read its content. this is an internal helper function.""" sys.path.append(os.path.join(repositoryRoot, 'cgi-bin')) import ShowQueue ryw.db_print('read_container_file entered: '+conID+'#'+ str(conVersion), 1) rfpath = get_file_path(conID, conVersion, searchFile, repositoryRoot) if not rfpath: return [] ryw.db_print('read_container_file: got file path: ' + rfpath, 1) containees = ShowQueue.read_list(rfpath) ryw.db_print('read_container_file: got content: ' + repr(containees), 1) return containees
def lookup(self, containee): """returns a list of containers, each of which is a triplet: objstr, alias, and title string. write lock should be held because we might need to delete obsolete containers. called by the object display code.""" #logging.debug('ReverseLists.lookup: containee = ' + containee) #logging.debug('ReverseLists.lookup: dict is: ' + # repr(self.reverseDict)) if not self.reverseDict.has_key(containee): #logging.debug('ReverseLists.lookup: containee not found.') return [] containers = self.reverseDict[containee] containerInfo = [] obsoleteContainers = [] for container in containers: #logging.debug('ReverseLists.lookup: container is ' + # container) success,containerID,containerVersion = \ ryw.split_objstr(container) if not success: obsoleteContainers.append(container) logging.warning('ReverseLists.lookup: failed splitting: '+ container) continue if not self.searchFile: ryw.give_bad_news('ReverseLists.lookup: no searchFile.', logging.critical) raise NameError('ReverseLists.lookup: no searchFile.') if not is_in_container_file(containerID, containerVersion, containee, self.searchFile, self.repositoryRoot): obsoleteContainers.append(container) logging.info('ReverseLists.lookup: obsolete mapping: ' + containee + ' -> ' + container) continue # # now get the metadata so we can retrieve its # alias and title. # success,meta = self.searchFile.get_meta(containerID, containerVersion) if success: ryw.db_print('ReverseLists.lookup: fast seachFile done!', 10) else: obsoleteContainers.append(container) logging.warning('ReverseLists.lookup: failed searchFile ' + 'lookup: '+ container) continue #success,meta,objroot = ryw_meta.get_meta2( # repositoryRoot, containerID, containerVersion) #if not success: # obsoleteContainers.append(container) # logging.warning('ReverseLists.lookup: failed get_meta2: '+ # container) # continue alias = 'unnamed' title = 'unnamed' if meta.has_key('content_alias'): alias = meta['content_alias'] if meta.has_key('title'): title = meta['title'] containerInfo.append([container, alias, title]) ryw.db_print('ReverseLists.lookup: appending good: ' + repr([container, alias, title]), 10) if obsoleteContainers: self._ReverseLists__remove_obsolete_containers( containee, obsoleteContainers) #logging.debug('ReverseLists.lookup: ' + repr(containerInfo)) return containerInfo
def process_disk_from_peer_repository(dir_name, diskRoot, overwrite=False): objectroot = os.path.join(dir_name, 'objects') if not os.path.exists(objectroot): logging.info( 'process_disk_from_peer_repository: no objects directory.' + objectroot) return True ## Process all incoming objects local_object_root = get_local_object_store_root() if local_object_root is None: return False mapDict = ryw_philips.get_map(diskRoot) searchFile = None for objectId,version in objectstore.objectversioniterator(objectroot): ryw.give_news3('----------', logging.info) if mapDict == None: ryw.give_bad_news( 'process_disk_from_peer_repository: failed to read map file: '+ diskRoot, logging.error) return False # # We used to just skip objects already present. # now we want to do something. # #if object_version_is_present(local_object_root, objectId, version): # ## object already present # continue objectFound = object_version_is_present(local_object_root, objectId, version) paths = objectstore.name_version_to_paths_aux(objectroot, objectId, version) itemName = objectId + '#' + str(version) obdata = get_data_name_mirror(paths[0], diskRoot, mapDict, itemName) if not obdata: continue success,isStub = is_data_stub(obdata) if not success: continue metadata = get_metadata_mirror(paths[1]) if not metadata: continue auxdir = get_aux_name_mirror(paths[2], diskRoot, mapDict, itemName) if isStub and not objectFound: #ryw.give_bad_news( # 'ProcessDiscs: is a stub but not found in database: '+ # itemName, logging.error) ryw.give_news3( 'ProcessDiscs error: is a stub but not found in database: '+ itemName, logging.error) continue if isStub: success,searchFile = deal_with_stub( local_object_root, objectId, version, metadata, obdata, auxdir, searchFile = searchFile) if success: ryw.give_news3( 'ProcessDiscs success: meta data processed.', logging.info) continue if objectFound: #ryw.give_bad_news( # 'ProcessDiscs: not a stub but found in the database: '+ # itemName, logging.error) ryw.give_news3( 'ProcessDiscs: not a stub but found in the database: '+ itemName, logging.error) if not overwrite: ryw.give_news3('processing skipped.', logging.error) continue # # I might want to delete the old version here. # using the code from DelObject, should be simple. # ryw.give_news3('deleting it...', logging.error) # # at one point, I thought I wanted to be clever and # tried to reuse searchFile below. But the trouble is # that the UploadObject.uploadobject() call below will # change the SearchFile beneath me, and if I reuse # the searchFile here, I end up flushing the incorrectly # cached version back to disk. I actually would have # expected a deadlock when UploadObject.uploadobject() # tries to lock again but the deadlock somehow # didn't happen... # success,searchFile = DeleteObject.do_delete( objectId, version, searchFile=None) if not success: ryw.give_bad_news( 'ProcessDiscs: DeleteObject failed: ' + objectId + '#' + str(version), logging.error) continue else: ryw.db_print('process_disk_from_peer_repository: ' + 'do_delete succeeded.', 18) # # falls through to continue onto adding the object. # if not UploadObject.uploadobject(metadata, obdata, auxdir, hasVersion = True): ryw.give_bad_news( 'process_disk_from_peer_repository: ' + 'UploadObject.uploadobject failed: '+ repr(metadata) + ' ' + obdata, logging.critical) continue else: ryw.give_news3('ProcessDiscs success: new data uploaded.', logging.info) continue incomingReverseLists = os.path.join(dir_name, 'ReverseLists') existingReverseLists = os.path.join(RepositoryRoot, 'ReverseLists') ReverseLists.merge_incoming( existingReverseLists, incomingReverseLists, RepositoryRoot, searchFile = searchFile) if searchFile: searchFile.done() return True
def merge(self, incoming, repositoryRoot): """called by ProcessDiscs.py. to merge an incoming ReverseLists.""" changed = False for containee,containers in incoming.reverseDict.iteritems(): ryw.db_print('ReverseLists.merge: examine ' + containee + ' -> ' + repr(containers), 11) if not is_valid_local_object(containee, repositoryRoot, self.searchFile): ryw.db_print('ReverseLists.merge: this containee skipped: '+ containee, 11) continue ryw.db_print('ReverseLists.merge: good containee: ' + containee, 11) goodContainers = [] for container in containers: if not is_valid_local_object(container, repositoryRoot, self.searchFile): ryw.db_print('ReverseLists.merge: container skipped: ' + container, 11) continue ryw.db_print('ReverseLists.merge: good container: ' + container, 11) goodContainers.append(container) if goodContainers: ryw.db_print('ReverseLists.merge: adding one mapping: ' + containee + ' -> ' + repr(goodContainers), 11) self._ReverseLists__union_one_mapping(containee, goodContainers) changed = True if changed: return self._ReverseLists__write_to_disk()
def make_form_string(self): """called by ChapterListForm() to display the chapter edit form.""" self.re_sort_items() pageStr1 = """ <BR> <FORM ACTION="/cgi-bin/ChapterListFormHandle.py" METHOD="post" ENCTYPE="multipart/form-data"> <INPUT TYPE="submit" VALUE="Save"> <P> <INPUT TYPE="HIDDEN" NAME="objstr" VALUE="%(objstr)s"> <INPUT TYPE="HIDDEN" NAME="selection_length" VALUE="%(selectionLength)s"> %(itemLines)s <BR> <INPUT TYPE="submit" VALUE="Save"> </FORM> """ selLen = len(self.itemList) dict = {} # # this is the objstr of the saved selection. # dict['objstr'] = self.objstr dict['selectionLength'] = str(selLen) itemLines = '' itemLines += '<TABLE CLASS=search>\n\n' # # BGCOLOR="bef7f7" # headerRow = """ <TR> <TD class=search BGCOLOR="c3d9ff"><FONT SIZE=2><B>title</B></FONT></TD> <TD class=search BGCOLOR="c3d9ff"><FONT SIZE=2><B>alias</B></FONT></TD> <TD class=search BGCOLOR="c3d9ff"><FONT SIZE=2><B>chapter</B></FONT></TD> <TD class=search BGCOLOR="c3d9ff" ALIGN=CENTER><FONT SIZE=2> <B>id</B></FONT></TD> </TR>""" itemLines += headerRow # # itemNumber is used to postfix field names. # itemNumber = 0 for itemStr in self.itemList: itemLines += '<TR>\n' # # 5th is the title field. # itemLines += '<TD CLASS=search>\n' itemLines += '<FONT size=1>' titleField = '' if self.chapterDict.has_key(itemStr): title = self.chapterDict[itemStr][1] if title: titleField = title itemLines += titleField + '</FONT>\n' itemLines += '</TD>\n' # # 4th is the alias field. # itemLines += '<TD CLASS=search>\n' itemLines += '<FONT size=1>' aliasField = '' if self.chapterDict.has_key(itemStr): alias = self.chapterDict[itemStr][0] if alias: aliasField = alias itemLines += aliasField + '</FONT>\n' itemLines += '</TD>\n' # # 3rd is the text entry box for the chapter number, like: # <INPUT TYPE="TEXT" NAME="chapter_number0" SIZE="10" VALUE=foo> # chapterEntryField = '<INPUT TYPE="TEXT"' + \ 'NAME="%(fieldName)s" SIZE="10" VALUE="%(chapterValue)s">\n' d3 = {} d3['fieldName'] = 'chapter_number' + str(itemNumber) d3['chapterValue'] = '' ryw.db_print('itemStr is: ' + itemStr, 39) if self.chapterDict.has_key(itemStr): defaultChapter = self.chapterDict[itemStr][2] if defaultChapter: ryw.db_print('found default chapter: ' + defaultChapter, 39) d3['chapterValue'] = defaultChapter else: ryw.db_print('found no default chapter', 39) else: ryw.db_print('itemStr ' + itemStr + ' not found.', 39) itemLines += '<TD CLASS=SEARCH>\n' itemLines += chapterEntryField % d3 itemLines += '</TD>\n' itemLines += '\n' # # 1st is the objstr of the item, printed on the page. # # 09/12/20: now it's a link to a popup of the object. # the popup_js() is in ryw_view.py. # url = '/cgi-bin/DisplayObject.py?objstr=' + urllib.quote(itemStr) linkStr = """<a href="%s" onClick="return popup_js('%s', 950, 500);">%s</a>""" % (url, url, itemStr) itemLines += '<TD CLASS=search>\n' itemLines += '<FONT SIZE="1" FACE=COURIER>' + linkStr + '</FONT>' itemLines += '\n' # # 2nd is the hidden field for the objstr, like: # <INPUT TYPE="HIDDEN" NAME="item_objstr0" VALUE=objstr> # hiddenItemStrField = '<INPUT TYPE="HIDDEN" ' + \ 'NAME="%(fieldName)s" VALUE="%(itemStr)s">\n' d2 = {} d2['fieldName'] = 'item_objstr' + str(itemNumber) d2['itemStr'] = itemStr itemLines += hiddenItemStrField % d2 itemLines += '</TD>\n' itemLines += '\n' itemLines += '</TR>\n' itemLines += '\n' itemNumber += 1 itemLines += '\n</TABLE>\n' dict['itemLines'] = itemLines return pageStr1 % dict