def main(): ryw.check_logging(os.path.join(RepositoryRoot, "WWW", "logs"), "upload.log") logging.debug("SelectAll: entered...") ryw_view.print_header_logo() name = os.getenv("REMOTE_USER") if name == "" or name == None: ryw.give_bad_news("SelectAll: no user name given", logging.error) ryw_upload.quick_exit(1) queue = os.path.join(RepositoryRoot, "QUEUES", name) try: resources = su.parseKeyValueFile(os.path.join(RepositoryRoot, "Resources.txt")) searchFileName = resources["searchfile"] except: ryw.give_bad_news("SelectAll: failed to get search file name from resources.", logging.critical) ryw_upload.quick_exit(1) success, searchFile = ryw.open_search_file( "SelectAll:", os.path.join(RepositoryRoot, "WWW", "logs"), "upload.log", searchFileName, False ) if not success: ryw.give_bad_news("SelectAll: failed to open search file.", logging.critical) ryw_upload.quick_exit(1) if not ProcessDownloadReq.add_all(queue, searchFile): ryw.give_bad_news("selectall: addAll failed.", logging.critical) ryw_upload.quick_exit(1) searchFile.done() ryw_upload.quick_exit(0)
def main(): ryw.check_logging(os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log') logging.debug('ProcessDiscs: entered...') success,tmpdir,jobfile,autorunMerge,overWrite = init_vals() if not success: ryw_upload.quick_exit(1) logging.debug('ProcessDiscs: tmpdir,jobfile: ' + tmpdir + ' ' + jobfile) ryw.give_news('processing incoming disc images located in: ' + tmpdir, logging.info) if autorunMerge: process_autorun_merge_request(tmpdir, overwrite = overWrite) sys.exit(0) process_finished_copies(tmpdir) ryw_upload.cleanup_incoming(tmpdir, jobfile) ryw_view.print_footer() sys.exit(0)
def main(): ryw.check_logging(os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log') logging.debug('eraseDisc.py: attempted...') robotJobsDir = get_resources() ryw_view.print_header_logo() if not robotJobsDir: ryw_upload.quick_exit(1) if not find_scsi_string_for_eraser(robotJobsDir): ryw_upload.quick_exit(1) count = 100 filename = write_job_file(count, robotJobsDir) if not filename: ryw_upload.quick_exit(1) do_erase(filename, count, robotJobsDir) wait_to_end_and_cleanup(filename, count, robotJobsDir) ryw_view.print_footer()
def main(): name = print_header() form = cgi.FieldStorage() WebUpload_ryw.setup_logging() # # get objstr. # success,objID,version = ryw.get_obj_str2(form) if not success: ryw.give_bad_news('CloneVersion: failed to get objstr.', logging.critical) ryw_upload.quick_exit(1) message = 'CloneVersion: ' + objID + '#' + str(version) logging.info(message) ryw.db_print2("<BR>" + message + "<BR>", 23); # # open search file. # success,searchFile = ryw.open_search_file( 'CloneVerson', os.path.join(RepositoryRoot, 'WWW', 'logs'), 'upload.log', os.path.join(RepositoryRoot, 'SearchFile'), False) if not success: if searchFile: searchFile.done() ryw.give_bad_news('CloneVersion: ' + 'open search file failed. ', logging.critical) ryw_upload.quick_exit(1) else: ryw.db_print2("search file opened." + "<BR>", 23); # # get meta and paths. # success,paths,meta = DisplaySelection.get_all_paths( objID, version, skipLock=False, searchFile=searchFile, allowNullSearchFile=False) if success: ryw.db_print_info_browser('CloneVersion: paths: ' + repr(paths), 24) ryw.db_print_info_browser('CloneVersion: meta: ' + repr(meta), 29) else: ryw_upload.quick_exit(1) if (searchFile): searchFile.done() # # we do want to clone the data if it were a list. # isList = ryw_meta.isList(meta) if isList: dataPath = paths[0] selName = DisplaySelection.get_sel_name(dataPath) if not selName: ryw.give_bad_news( 'CloneVersion: isList but failed to get selection name.', logging.error) ryw_upload.quick_exit(1) selPath = os.path.join(dataPath, selName) else: selPath,selName = None,None # # change meta. # meta = change_meta(meta, name) # # deal with auxi dir. # originalAuxiDir = paths[2] newAuxiDir = None tmpdir = None if os.path.exists(originalAuxiDir): tmpdir = WebUpload_ryw.attempt_make_tmpdir() if not tmpdir: ryw_upload.quick_exit(1) newAuxiDir = os.path.join(tmpdir, '_AUXI') message = 'CloneVersion: shutil.copytree(): ' + \ originalAuxiDir + ' -> ' + newAuxiDir try: shutil.copytree(originalAuxiDir, newAuxiDir) except: ryw.give_bad_news('failed: ' + message, logging.critical) ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) ryw.db_print_info_browser(message, 29) # # Now try to put a new object in the repository. # note that the version number will be incremented. # # "selPath" used to be just None. # when I added cloning list, I'm just using this to pass in the # path name of the selection file. # if not WebUpload_ryw.try_upload_object(meta, selPath, newAuxiDir, cloneVersion=True): ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) searchFile = WebUpload_ryw.show_one_server_object(meta) searchFile.done() ryw_upload.cleanup_and_exit(tmpdir, None, None, 0, successMessage = 'clone version completed.')
import os, sys import su, RunPYProcessDetached as Run, KillXMLRPCServer as Kill import ryw_view, ryw_upload import cgi, cgitb cgitb.enable() name = os.getenv("REMOTE_USER") ryw_view.print_header_logo() print '<P> Hello!,', name if name != 'admin': print '<P>Administrator only.' ryw_upload.quick_exit(1) ##################################################################### resfile = os.path.join(RepositoryRoot, 'Resources.txt') resources = {} for line in open(resfile).readlines(): line = line.strip() key, value = line.split('=', 1) key = key.strip() value = value.strip()
def main(): """main function processing upload.""" # initialization. name = WebUpload_ryw.print_header() form = cgi.FieldStorage() WebUpload_ryw.setup_logging() if not ryw_upload.check_required_fields(form, checkFile = False): ryw_upload.quick_exit(1) localExcerptResult = ryw_upload.check_local_file( form, fieldName = "local_excerpt_filename") # check aux file uploads: thumbnails, exerpts... success,auxExists,aux = ryw_upload.check_aux_file_uploads( form, localExcerptStuff = localExcerptResult) if not success: ryw_upload.quick_exit(1) tmpdir = WebUpload_ryw.attempt_make_tmpdir() if not tmpdir: ryw_upload.quick_exit(1) success,auxDir,auxInfo = ryw_upload.read_aux_files( form, aux, tmpdir, auxExists, localExcerptStuff = localExcerptResult) if not success: ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) bytes,name = ryw_upload.copy_queue_file(tmpdir, name) if bytes == 0: ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) kB = math.ceil(bytes / 1024.0) filename = name # # the rest of this stuff copied straight from WebUpload_ryw.py # not nice, but hey. # meta = ryw_upload.try_process_attributes(name, form, filename, kB, bytes) if not meta: ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) ryw_upload.add_set_attrs(meta, 'sys_attrs', 'isList') meta = ryw_upload.add_aux_attributes(meta, auxInfo) success,metafile = ryw_upload.write_tmp_metafile(meta, tmpdir) if not success: ryw_upload.cleanup_and_exit(tmpdir, metafile, None, 1) nameToUpload,extractDir = ryw_upload.try_unzip_file( form, tmpdir, filename, kB) if not nameToUpload: ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 1) ryw.give_news2('<BR>Storing the list in the repository...', logging.info) if not WebUpload_ryw.try_upload_object(meta, nameToUpload, auxDir): ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 1) #ryw_view.show_server_object(meta) searchFile = WebUpload_ryw.show_one_server_object(meta) # # Ok to do this stuff after the display, because there's no # way the newly added selection could be a containee of someone else. # ReverseLists.add_queue(meta, searchFile, RepositoryRoot) searchFile.done() ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 0)
def main(): """main function processing upload.""" # initialization. name = print_header() form = cgi.FieldStorage() setup_logging() localSuccess, localFound, localFilePath, localIsDir = ryw_upload.check_local_file(form) if not localSuccess: ryw.give_bad_news("check_local_file failed.", logging.error) ryw_upload.quick_exit(1) if not ryw_upload.check_required_fields(form, checkFile=not localFound): ryw_upload.quick_exit(1) if localFound: buf = None else: # just read a tiny bit to see if we have an empty upload file. buf = ryw_upload.attempt_read_uploaded_file(form, "local_filename") if not buf: ryw_upload.quick_exit(1) localExcerptResult = ryw_upload.check_local_file(form, fieldName="local_excerpt_filename") # check aux file uploads: thumbnails, exerpts... success, auxExists, aux = ryw_upload.check_aux_file_uploads(form, localExcerptStuff=localExcerptResult) if not success: ryw_upload.quick_exit(1) tmpdir = attempt_make_tmpdir() if not tmpdir: ryw_upload.quick_exit(1) success, auxDir, auxInfo = ryw_upload.read_aux_files( form, aux, tmpdir, auxExists, localExcerptStuff=localExcerptResult ) if not success: ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) filename = ryw_upload.decide_tmp_data_file_name(form, localPath=localFilePath, isLocalDir=localIsDir) if not filename: ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) success, found, bytes = ryw_upload.copy_local_file_for_upload( form, tmpdir, filename, localFound, localFilePath, localIsDir ) if not success: ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) kB = math.ceil(bytes / 1024.0) if not found: ryw.give_news2("<BR>Copying remote file...", logging.info) kB, bytes = ryw_upload.read_uploaded_file(form, buf, tmpdir, filename, "local_filename") if kB == 0: ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) meta = ryw_upload.try_process_attributes(name, form, filename, kB, bytes) if not meta: ryw_upload.cleanup_and_exit(tmpdir, None, None, 1) meta = ryw_upload.add_aux_attributes(meta, auxInfo) if not localIsDir: ryw_ffmpeg.try_exec(RepositoryRoot, meta, tmpdir, filename) success, metafile = ryw_upload.write_tmp_metafile(meta, tmpdir) if not success: ryw_upload.cleanup_and_exit(tmpdir, metafile, None, 1) if localIsDir: nameToUpload, extractDir = (os.path.join(tmpdir, filename), None) else: nameToUpload, extractDir = ryw_upload.try_unzip_file(form, tmpdir, filename, kB) if not nameToUpload: ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 1) ryw.give_news2("<BR>Storing the data in the repository...", logging.info) ryw.db_print2("<BR>" + "meta: " + repr(meta) + "<BR>", 57) ryw.db_print2("nameToUpload: " + nameToUpload + "<BR>", 22) ryw.db_print2("auxDir: " + repr(auxDir) + "<BR>", 22) if not try_upload_object(meta, nameToUpload, auxDir): ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 1) # ryw_view.show_server_object(meta) searchFile = show_one_server_object(meta) searchFile.done() # cgi.print_form(form) ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 0)
def main(): # initialization. print_header() form = cgi.FieldStorage() setup_logging() # cgi.print_form(form) success,objID,version = ryw.get_obj_str2(form) if not success: ryw_upload.quick_exit(1) logging.debug('EditObject: ' + objID + '#' + str(version)) if not ryw_meta.check_required_fields(form): ryw_upload.quick_exit(1) success,meta,objroot = ryw_meta.get_meta2(RepositoryRoot, objID, version) if not success: ryw_upload.quick_exit(1) reext = re_extract_or_not(form) if reext: paths = ryw_meta.get_paths(objroot, objID, version, meta, RepositoryRoot) if not paths: ryw.give_bad_news('EditObject: failed to get paths.', logging.critical) ryw_upload.quick_exit(1) dataPath = paths[0] fileName = get_file_name(dataPath) if not fileName: reext = False success = ryw_meta.process_error_fields(form, meta) if not success: ryw_upload.quick_exit(1) ryw_meta.process_fields(form, meta) logging.debug('EditObjects: ' + repr(meta)) meta = ryw_upload.get_change_date_time(meta) #ryw.give_news(repr(meta), logging.info) if reext: re_extract(dataPath, fileName, meta) success,searchFile = do_update_metadata(objroot, objID, version, meta) if not success: ryw_upload.quick_exit(1) show_one_server_object(meta, searchFile) ryw.give_good_news('edit completed.', logging.info) ryw_view.print_footer() if searchFile: searchFile.done() sys.exit(0)
def main(logDir, logFile, searchFile, scriptName, resources = None): """main function processing search request.""" # initialization. name = print_header() form = cgi.FieldStorage() setup_logging(logDir, logFile) ## cgi.print_form(form) ## parse the form to get: query, sorting information, start_index ## to know which subset of matches/results to return, ## search_attributes for the next_page_button query, sort_tuple, start_index, search_attributes, error_message = \ parse_form(form) if query is None: print '<P> ERROR while parsing form and constructing ' + \ 'search query:', error_message sys.exit(1) ## print '<HR>Query:', query ## read index file to get the list of dictionaries: ## one dictionary for each version of each object, contains its meta-data # # used to open search file without read. # because Sobti is doing the read by himself below. # I have to change this because the SearchFile will be passed # onto the ReverseLists module for more later lookups. # this makes it necessary to do a real SearchFile open. # #success,searchFileLock = ryw.open_search_file( # 'Search:', logDir, logFile, searchFile, False, skipRead = True) success,searchFileLock = ryw.open_search_file( 'Search:', logDir, logFile, searchFile, False, skipRead = False) if not success: ryw.give_bad_news('Search: failed to acquire search file lock: ' + searchFile, logging.critical) ryw_upload.quick_exit(1) #else: # # mis-named: it's really not a searchFileLock, but searchFile itself. # #displayObject.set_search_file(searchFileLock) # # this is when Sobti used to do his own read, now replaced by mine. # #metas, error_message = read_index_file_to_get_metas(searchFile) metas = searchFileLock.convert_to_sobti_list() if metas is None: print '<P> ERROR while reading the index file to get ' + \ 'the meta-data dictionaries:', error_message searchFileLock.done() ryw_upload.quick_exit(1) ## build a list of metas that satisfy the given query matches = [] for meta in metas: if not ryw_view.should_show_object(meta, resources): continue if cnf_match.matches_cnf(meta, query): matches.append(meta) ryw.db_print2('Search:main() ' + repr(meta), 53) num_matches = len(matches) ## sort the matches by the given sort tuple matches, error_message = sort_matches(matches, sort_tuple) if matches is None: print '<P> ERROR while sorting matches:', error_message searchFileLock.done() ryw_upload.quick_exit(1) # # save all current search results in a file for # possible inclusion in the current selection. # matchAllName = save_matches(matches) ## Return the start_index..(start_index + N) entries from the top if num_matches == 0 or start_index >= num_matches: num_items = 0 else: start_index = max(0, start_index) assert start_index < num_matches end_index = start_index + NUM_OBJECTS_PER_PAGE - 1 end_index = max(0, end_index) end_index = min(num_matches - 1, end_index) if not (0 <= start_index <= end_index < num_matches): print '<P> ASSERT ERROR: start_index, end_index, ' + \ 'num_matches', start_index, end_index, num_matches searchFileLock.done() ryw_upload.quick_exit(1) num_items = end_index - start_index + 1 ## num_items is the number of items that will actually be displayed ## num_items <= num_matches if num_items == 0: print '<P><H3>No objects to display</H3>' searchFileLock.done() ryw_upload.quick_exit(1) else: #print '<P><H3>%d objects satisfy the search criteria, ' + \ # 'displaying %d of them</H3>' % (num_matches, num_items) print """ <BR><B><FONT SIZE=2>%d object(s) satisfy the search criteria.</FONT></B>""" % \ (num_matches) print """ <BR><B><FONT SIZE=2>displaying matches %d - %d.</FONT></B><BR>""" % \ (start_index + 1, start_index + num_items) print_next_page_button1(search_attributes, end_index + 1, scriptName, num_matches) shownMatches = [] success,reverseLists = ReverseLists.open_reverse_lists( 'Search:', '', '', os.path.join(RepositoryRoot, 'ReverseLists'), True, searchFile = searchFileLock, repositoryRoot = RepositoryRoot) if not (success and reverseLists): ryw.give_bad_news('Search.main: failed to open ReverseLists.', logging.critical) if reverseLists: reverseLists.done() return False displayObject = ryw_view.DisplayObject( RepositoryRoot, calledByVillageSide = False, missingFileFunc=Browse.reqDownloadFunc, searchFile = searchFileLock, reverseLists = reverseLists) displayObject.begin_print() for i in range(start_index, end_index + 1): #display_object(matches[i]) displayObject.show_an_object_compact(matches[i]) shownMatches.append(matches[i]) displayObject.end_print() reverseLists.done() # # save search results on this page in a file for # possible inclusion in the current selection. # matchThisPageName = save_matches(shownMatches) print_selection_links(matchAllName, matchThisPageName) ## Include a next-page button print_next_page_button2(search_attributes, end_index + 1, scriptName, num_matches) searchFileLock.done() ryw_view.print_footer()