def verify_ffmpeg_existence(RepositoryRoot):
    commandPath = os.path.join(RepositoryRoot, FFMPEG_PATH_SUFFIX)
    if ryw.is_valid_file(commandPath, 'verify_ffmpeg_existence'):
        return commandPath
    ryw.give_news('Cannot find ffmpeg.  Advise you to install it.',
                  logging.info)
    return commandPath
def main():
    ryw.check_logging(os.path.join(RepositoryRoot, 'WWW', 'logs'),
                      'upload.log')
    logging.debug('ThisSelToCurrSel: entered...')
    ryw_view.print_header_logo()

    rfpath = get_file_path(allowNullSearchFile = True)
    if not rfpath:
        ryw.give_bad_news(
            'ThisSelToCurrSel: no selection file name found.',
            logging.error)
        DisplaySelection.exit_now(1)

    queueName = DeleteRepReq.get_queue_name()
    if not queueName:
        ryw.give_bad_news(
            'ThisSelToCurrSel: failed to get current selection file name.',
            logging.error)
        DisplaySelection.exit_now(1)

    try:
        shutil.copyfile(rfpath, queueName)
    except:
        ryw.give_bad_news('ThisSelToCurrSel: failed to overwrite: ' +
                          rfpath + ' -> ' + queueName,
                          logging.critical)
        DisplaySelection.exit_now(1)

    ryw.give_news(
        'This selection successfully loaded as the current selection.',
        logging.info)
    ryw.give_news('You may want to reload the page containing '+
                  'the affected current selection.', logging.info)
    ryw_view.print_footer()
def main():
    name = ShowQueue.init_log()
    ryw_view.print_header_logo()

    queueName = DeleteRepReq.get_queue_name()
    if not queueName:
        ryw.give_bad_news(
            'DelQueueData.py: failed to get current selection file name.',
            logging.error)
        DisplaySelection.exit_now(1)

    queueSel = ProcessDownloadReq.get_reqs(queueName)
    if not queueSel or len(queueSel) == 0:
        ryw.give_bad_news(
            'DelQueueData.py: there is nothing in the selection.',
            logging.error)
        DisplaySelection.exit_now(1)

    success = DelSearchAll.delete_all(queueSel)
    if success:
        ryw.give_news('All these objects have been removed.', logging.info)
    else:
        ryw.give_bad_news('DelQueueData.py: an error occurred.',
                          logging.error)
        
    DisplaySelection.exit_now(0)
def open_output_text_file():
    """modeled after setting up temporary output file in rebuildSearchFile.py.
    """

    resourcesPath = os.path.join(RepositoryRoot, "Resources.txt")
    try:
        resources = ryw.get_resources(resourcesPath)
        tmpOutDir = ryw.get_resource_str(resources, "tmpout")
        if not tmpOutDir:
            ryw.give_bad_news("SelectTexts.py: failed to get tmpout resource.", logging.error)
            return None
    except:
        ryw.give_bad_news("SelectTexts.py: failed to get resources.", logging.error)
        return None

    dateTimeRand = ryw.date_time_rand()
    textfilePath = os.path.join(tmpOutDir, "Texts2Trans" + dateTimeRand)
    ryw.give_news("PHP file is generated at: ", logging.info)
    ryw.give_news(textfilePath, logging.info)

    try:
        outf = open(textfilePath, "w")
        outf.write("<?\n")
        outf.write("$trans_source = array(")
    except:
        ryw.give_bad_news("SelectTexts.py: unable to open output text file: " + textfilePath, logging.error)
        return None

    return outf
def add_all(queueName, searchFile):
    try:
        reqs = set('')

        count = 0
        for meta in searchFile.iterator():
            objstr = meta['id'] + '#' + str(meta['version'])
            reqs.add(objstr)
            count += 1
            logging.debug('add_all: ' + objstr)

        ryw.give_news(
            'add_all: number of objects added to the request queue: ' +
            str(count), logging.info)

        success,tmppath,bakpath = write_reqs(queueName, reqs)
        if not success:
            ryw.give_bad_news('add_all: write_reqs failed: ' + queueName,
                              logging.critical)
            return False
        cleanup(tmppath, bakpath)
        return True
    except:
        ryw.give_bad_news('add_all: failed.', logging.critical)
        return False
def remove_from_search_file(objectID, version):
    """remove all traces from the search meta file."""
    if not version or not objectID:
        return

    logging.debug('remove_from_search_file: entering: ' + objectID + ' ' +
                  str(version))

    success,searchFile = ryw.open_search_file(
        'remove_from_search_file:',
        os.path.join(RepositoryRoot, 'WWW', 'logs'),
        'upload.log',
        os.path.join(RepositoryRoot, 'SearchFile'),
        True)
    if not success:
        return

    try:
        searchFile.delete([(objectID, version)])
    except:
        ryw.give_news(
            'remove_from_search_file: delete: exception.',
            logging.debug)
        
    searchFile.done()
def extract_texts(reqList, searchFile):
    """modeled after ShowQueue.go_through_list()."""

    metaList = ryw_meta.get_meta_list(reqList, searchFile)
    metaList = ryw.sortmeta_chapter_number(metaList)

    totalCount = 0
    outputFile = open_output_text_file()
    if outputFile == None:
        return False

    numMatches = len(metaList)
    if numMatches <= 0:
        ryw.give_news("SelectTexts: no object selected.<BR>", logging.error)
        success = False
    else:
        success = True
        firstLine = True
        for meta in metaList:
            success, count = output_text(outputFile, meta, firstLine=firstLine)
            totalCount += count
            if not success:
                break
            if count != 0:
                firstLine = False

    close_output_file(outputFile)
    searchFile.done()

    ryw.give_news(str(totalCount) + " strings need to be translated.", logging.info)
    return success
示例#8
0
def save_matches(matches):
    if matches == None or len(matches) == 0:
        ryw.give_news('save_matches: no match to save', logging.warning)
        return None

    success,tmpDir,selName = get_save_search_name()
    if not success:
        ryw.give_bad_news(
            'save_matches: failed to determine selection file name',
            logging.error)
        return None

    selList = make_selection_list(matches)

    #
    # copied from ProcessDownloadReq.py
    #
    success,tmppath,bakpath = ProcessDownloadReq.write_reqs(
        os.path.join(tmpDir, selName), set(selList))
    ProcessDownloadReq.cleanup(tmppath, bakpath)

    if not success:
        ryw.give_bad_news(
            'save_matches: ProcessDownloadReq.write_reqs failed: ' +
            os.path.join(tmpDir, selName), logging.error)
        return None

    cleanup_old_search_results(tmpDir, selName)

    return selName
    def add_to_search_file(self, meta, cloneVersion=False):
        """adds one to the existing version number."""
        logging.debug('add_to_search_file: ' + repr(meta))
        if not meta.has_key('id'):
            ryw.give_bad_news('add_to_search_file: missing ID...',
                              logging.critical)
            return (False, None)
        
        success,latestVersion = \
            self._SearchFile__get_latest_version(meta['id'])
        if not success:
            return (False, None)

        if not cloneVersion and \
            self._SearchFile__is_same_as_latest(meta, latestVersion):
            ryw.give_news('add_to_search_file: same version.',
                          logging.warning)
            return (True, latestVersion)

        latestVersion += 1
        meta['version'] = latestVersion

        if not self._SearchFile__append_to_search_file(meta):
            return (False, latestVersion)

        self._SearchFile__add_to_memory_index(meta['id'], latestVersion, meta)
        logging.debug('add_to_search_file: success.')
        return (True, latestVersion)
def main():
    ryw.check_logging(os.path.join(RepositoryRoot, 'WWW', 'logs'),
                      'upload.log')
    logging.debug('ThisSelAddedToCurrSel: entered...')
    ryw_view.print_header_logo()

    rfpath = ThisSelToCurrSel.get_file_path(allowNullSearchFile=True)
    if not rfpath:
        ryw.give_bad_news(
            'ThisSelAddedToCurrSel: no selection file name found.',
            logging.error)
        DisplaySelection.exit_now(1)

    savedSel = ProcessDownloadReq.get_reqs(rfpath)
    if (not savedSel) or (len(savedSel) == 0):
        ryw.give_news('ThisSelAddedToCurrSel: this selection is empty.',
                      logging.error)
        DisplaySelection.exit_now(1)

    queueName = DeleteRepReq.get_queue_name()
    if not queueName:
        ryw.give_bad_news(
            'ThisSelToCurrSel: failed to get current selection file name.',
            logging.error)
        DisplaySelection.exit_now(1)

    queueSel = ProcessDownloadReq.get_reqs(queueName)

    AddSearchAll.union_and_write(queueSel, savedSel, queueName)
    ryw.give_news('You may want to reload the page containing '+
                  'the affected current selection.', logging.info)
    DisplaySelection.exit_now(0)
示例#11
0
def main():
    name = init_log()
    ryw_view.print_header_logo()
    print '<TITLE>Browsing Selection</TITLE>'

    offsetResult = get_offset()
    success,isAll,offsetNum = offsetResult
    if not success:
        DisplaySelection.exit_now(1)
    
    print_header(name)
    rfpath = os.path.join(RepositoryRoot, 'QUEUES', name)
    reqList = read_list(rfpath)
    if not reqList:
        ryw.give_news('no object selected.', logging.info)
        DisplaySelection.exit_now(0)


    success,searchFile,reverseLists = \
        ReverseLists.open_searchfile_reverselists(
        'ShowQueue.main:')

    if not success:
        DisplaySelection.exit_now(0)

    if go_through_list(reqList, offResult = offsetResult,
                       searchFile = searchFile, reverseLists = reverseLists):
        print_buttons(name)
        
    ryw_view.print_footer()
    searchFile.done()
    reverseLists.done()
def create_view_path(path, objectID, version):
    if os.path.exists(path):
        ryw.give_news('create_view_path: already exists: ' + path + ', ' +
                      'replacing with new version.',
                      logging.warning)
        #return True
        #remove old view path and continue
        if not ryw.cleanup_path(path, 'create_view_path:'):
            ryw.give_news('create_view_path: fails to remove old view path.',
                          logging.error)
            return True
    
    try:
        su.createparentdirpath(path)
    except:
        ryw.give_bad_news('create_view_path: failed to createparentdirpath: '+
                          path, logging.critical)
        return False
    
    try:
        f = open(path, 'w')
        f.write(objectID + '#' + str(version))
        f.close()
    except:
        ryw.give_bad_news('create_view_path: failed to write leaf file: '+path,
                          logging.critical)
        return False
    
    logging.debug('create_view_path: done creating path: '+path)
    return True
示例#13
0
    def add_this_version_to_search_file(self, meta):
        """same as above but does not increment version number."""
        logging.debug('add_this_version_to_search_file: ' + repr(meta))
        if not meta.has_key('id') or not meta.has_key('version'):
            ryw.give_bad_news(
                'add_this_version_to_search_file: missing field(s)...',
                logging.critical)
            return False

        objID = meta['id']
        version = meta['version']

        success,existingMeta = self.get_meta(objID, version)
        if success:
            ryw.give_news(
                'warning: add_this_version_to_search_file: already exists: '+
                objID + ' ' + str(version), logging.warning)
            return True

        if not self._SearchFile__append_to_search_file(meta):
            return False

        self._SearchFile__add_to_memory_index(objID, version, meta)
        logging.debug('add_this_version_to_search_file: success.')
        return True
示例#14
0
def main():
    name = ShowQueue.init_log()
    ryw_view.print_header_logo()

    rfpath = ThisSelToCurrSel.get_file_path(allowNullSearchFile=True)
    if not rfpath:
        ryw.give_bad_news(
            'DelSelData.py: no selection file name found.', logging.error)
        DisplaySelection.exit_now(1)
    
    selection = ProcessDownloadReq.get_reqs(rfpath)
    if not selection or len(selection) == 0:
        ryw.give_bad_news(
            'DelSelData.py: there is nothing in the selection.',
            logging.error)
        DisplaySelection.exit_now(1)

    success = DelSearchAll.delete_all(selection)
    if success:
        ryw.give_news('All these objects have been removed.', logging.info)
    else:
        ryw.give_bad_news('DelSelData.py: an error occurred.',
                          logging.error)
        
    DisplaySelection.exit_now(0)
示例#15
0
def wait_to_end_and_cleanup(filename, count, robotJobsDir):
	rexp = re.compile(regexp % (os.path.basename(filename),))
	try:	
		ryw.give_news("Waiting for job to end...",None)
		while True:
			section = load_section(filename, robotJobsDir, rexp)
			if section.has_key("TimeCompleted") and \
				section["TimeCompleted"].strip() != "":
					break
			ryw.give_news2(" * ",None)
			time.sleep(10)

		section['TimeCompleted'] = section['TimeCompleted'].strip()
		mesg = "Job finished at %(TimeCompleted)s. %(GoodDiscs)s good discs and %(BadDiscs)s bad discs were produced.\n"
		if "JobErrorNumber" in section and section['JobErrorNumber'] != "16":
			mesg += "Job ended with error. Error code = %(JobErrorNumber)s. Error String = %(JobErrorString)s.\n"
		for i in range(0,10):
			if not "DiscErrorIndex%d" % (i,) in section:
				break
			index = section["DiscErrorIndex%d" % (i,)]
			number = section["DiscErrorNumber%d" % (i,)]
			errstr = section["DiscErrorString%d" % (i,)]
			mesg += "Disc %s had error. Error code = %s. Error Message = %s\n" % (index, number, errstr)

		ryw.give_news("<PRE>" + mesg % section + "</PRE>", None)
		if ("JobErrorNumber" in section and section['JobErrorNumber'] != "16") or \
			section['BadDiscs'] != "0" or "DiscErrorIndex0" in section:
			logging.warning("Erase job ended with errors. Job's status dict: " + str(section))
		else:
			logging.debug("Erase job ended with no errors. Job's status dict: " + str(section)) 
		clean_up(filename)
	except:
		ryw.give_bad_news("Error while waiting for job to finish",logging.warning)
		clean_up(filename)
def process_disk(ppp, overWrite = False):
    ryw.check_logging(os.path.join(RepositoryRoot, 'WWW', 'logs'),
                      'upload.log')
    mydir = os.path.join(ppp, 'repository')
    logging.debug('process_disk: attempted: ' + mydir)

    username = get_credential(mydir)
    if not username:
        return False

    ## MIRRORING HACKS
    ## If incoming disk has username equal to this repository's name,
    ## then this is a disk coming from another peer repository, and hence
    ## must be processed with care.
    ## (If the disk was coming from a village, then it would have the username
    ## of that village.)
    if username == self_name():
        ryw.give_news(
            'process_disk: processing data intended for this ' +
            'peer repository: '+ username, logging.info)
        return process_disk_from_peer_repository(mydir, ppp,
                                                 overwrite = overWrite)

    ryw.give_news('process_disk: processing data from village site: '+
                  username, logging.info)
    
    # even if this fails, (ie., we failed to process download
    # requests,) we'll keep going.
    process_download_requests(mydir, username)

    process_uploaded_objects(mydir)
            
    return True
def process_autorun_merge_request(discRoot, overwrite = False):
    if not process_disk(discRoot, overWrite = overwrite):
        ryw.give_bad_news('process_finished_copies: failed for this disc1: ',
                          logging.error)
        redirect_to_on_disk_page(discRoot,redirectDisk)
    else:
        ryw.give_news('done processing discs.', logging.info)
        redirect_to_main_page(redirectWeb)
示例#18
0
def locate_burner_prog():
    import _winreg
    a = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, "software\\vso")
    pathDir = _winreg.QueryValueEx(a,"copytodvd")[0]
    exefile = os.path.join(pathDir, "copytocd.exe")
    if (os.path.exists(exefile)):
        return exefile
    if (os.path.exists(r"c:/program files/vso/copytodvd/copytocd.exe")):
        return r"c:/program files/vso/copytodvd/copytocd.exe"
    ryw.give_news("copytodvd program not found",logging.error)
    return ""
def main():
    ryw.check_logging(os.path.join(RepositoryRoot, 'WWW', 'logs'),
                      'upload.log')
    logging.debug('CurrSelAddedToThisSel: entered...')
    ryw_view.print_header_logo()


    success,objID,version = ryw.get_obj_str()
    if not success:
        ryw.give_bad_news('CurrSelToThisSel: failed to get objstr.',
                          logging.critical)
        DisplaySelection.exit_now(1)
    
    success,searchFile,reverseLists = \
        ReverseLists.open_searchfile_reverselists('CurrSelAddedToThisSel:')
    if not success:
        ryw.give_bad_news('CurrSelAddedToThisSel: ' +
                          'open_searchfile_reverselists failed.',
                          logging.critical)
        DisplaySelection.exit_now(1)
    

    rfpath = DisplaySelection.get_file_path(
        objID, version, searchFile = searchFile)
    if not rfpath:
        ryw.give_bad_news(
            'CurrSelAddedToThisSel: no selection file name found.',
            logging.error)
        DisplaySelection.exit_now(1)

    queueName = DeleteRepReq.get_queue_name()
    if not queueName:
        ryw.give_bad_news(
            'CurrSelToThisSel: failed to get current selection file name.',
            logging.error)
        DisplaySelection.exit_now(1)

    queueSel = ProcessDownloadReq.get_reqs(queueName)
    savedSel = ProcessDownloadReq.get_reqs(rfpath)
    newContainees = list(queueSel)

    AddSearchAll.union_and_write(savedSel, queueSel, rfpath)
    ryw.give_news('You may want to reload the page containing '+
                  'this selection.', logging.info)


    success = reverseLists.add(objID+'#'+str(version), newContainees)
    if not success:
        ryw.give_bad_news('CurrSelAddedToThisSel: reverseLists.add failed.',
                          logging.critical)

    reverseLists.done()
    searchFile.done()
    DisplaySelection.exit_now(0)
示例#20
0
def wait_till_job_starts(filename, robotJobsDir):
	try:
		ryw.give_news("Waiting for robot to pick up the job...",None)
		time.sleep(20)
		while not os.path.exists(filename + ".INP"):
			ryw.give_news2(" * ",None)
			time.sleep(5)
	except:
		ryw.give_bad_news("Bad things happened while waiting for job to be processed",logging.critical)
		return False
	return True
def addRobotWriteRequest(name, items, itempath, currentSize, tmpStoreName,
                         tmpDirOption = '', onlyMeta = False):
    logging.debug('addRobotWriteRequest: entered...')

    success,resources,robotJobsDir,tmpOutDir,searchFile,viewRoot,firstRoot, \
        robotPresent = get_resources(tmpDirOption = tmpDirOption)
    if not success:
        return (False, None, None)

    if not onlyMeta and not check_free_disk_space(currentSize, tmpOutDir):
        return (False, None, None)
            
    tmpImgDir,objPrefix = ryw_upload.attempt_just_make_tmpdir(
        tmpOutDir, 'Im_'+name[:3]+'_', '')
    if not tmpImgDir:
        return (False, None, None)

    ryw.give_news2('<BR>outgoing data image name: ' + tmpImgDir, logging.info)

    if not write_recipient_file(tmpImgDir, name):
        cleanup_image(tmpImgDir)
        return (False, None, None)

    repDir = os.path.join(tmpImgDir, 'repository')
    if not ryw.try_mkdir(repDir, 'addRobotWriteRequest'):
        cleanup_image(tmpImgDir)
        return (False, None, None)

    # noObjStore:
    #   not copy_objectstore(firstRoot, repDir, tmpStoreName) or \
    #   not copy_view(repDir, viewRoot) or \
    if not write_user_credentials(repDir, name) or \
       not copy_scripts(repDir) or \
       not copy_search_file(searchFile, repDir) or \
       not copy_reverse_lists(repDir) or \
       not copy_objects(items, itempath, repDir,
                        tmpImgDir, metaOnly = onlyMeta) or \
       not generate_html(items, itempath, repDir, tmpImgDir) or \
       not copy_autorunfiles(tmpImgDir):
        cleanup_image(tmpImgDir)
        return (False, None, None)

    ryw.give_news(' ', logging.info)
    ryw.give_news('done copying all data, now invoking the robot.',
                  logging.info)
    
#    success,jrq = write_robot_job_file(robotJobsDir, tmpImgDir, objPrefix, robotPresent = robotPresent)
#    if not success:
#        cleanup_image(tmpImgDir)
#        ryw.cleanup_path(jrq, 'addRobotWriteRequest:')
#        return (False, None, None)

    return (True, tmpImgDir, "blah")
示例#22
0
    def initialize_with_meta_list(self, reqList, searchFile, reverseLists):
        """called by ChapterListForm() to initialize chapter list from
        the meta data.
        modeled after ShowQueue.go_through_list().
        also called by ChapterListFormHandle()."""

        success = True
        
        if not searchFile or not reverseLists:
            raise NameError('ChapterList.initialize_with_meta_list:' +
                            'bad searchFile or bad reverseLists.')

        metaList = ryw_meta.get_meta_list(reqList, searchFile)
        searchFile.done()

        metaList = ryw.sortmeta_chapter_number(metaList)

        ryw.db_print2('initialize_with_meta_list: done sorting.', 38)

        numMatches = len(metaList)

        if numMatches <= 0:
            ryw.give_news('ChapterList: the selection is empty.<br>',
                          logging.error)
            success = False
        else:
            self.chapterDict = {}
            self.itemList = []
            self.metaList = metaList

            for meta in metaList:
                objID = meta['id']
                version = meta['version']
                objstr = objID + '#' + str(version)
                self.itemList.append(objstr)

                title = ''
                if meta.has_key('title'):
                    title = meta['title']

                chapter = None
                if meta.has_key('chapter_number'):
                    chapter = meta['chapter_number']

                alias = None
                if meta.has_key('content_alias'):
                    alias = meta['content_alias']

                self.chapterDict[objstr] = [alias, title, chapter]

            ryw.db_print2('chapterDict is: ' + repr(self.chapterDict), 41)
        
        return success
示例#23
0
def rejectDisc(discID, filename, robotJobsDir):
	try:
		ryw.give_news("Writing req to reject disc...",None)
		f = open(filename,"w")
		f.write("""Message=REJECT_DISC
DiscID=%s
""" % (discID,))
		f.close()
		os.rename(filename, filename + ".PTM")
	except:
		ryw.give_bad_news("Error writing command to reject disc",logging.critical)
		return False
	return True
示例#24
0
def check_robot_finished(jobFile):
    """returns doneFlag."""
    errExists  = os.path.exists(jobFile + '.ERR')
    doneExists = os.path.exists(jobFile + '.DON')
    if not errExists and not doneExists:
        return False
    if errExists:
        ryw.give_news(
            'check_robot_finished: .ERR file found: ' +
            jobFile, logging.warning)
    if doneExists:
        logging.debug('check_robot_finished: .DON file found: ' +
                      jobFile)
    return True
def NOTUSED_main_overlap():
    ryw.check_logging(os.path.join(RepositoryRoot, "WWW", "logs"), "upload.log")
    logging.debug("ReadIncomingCDStack: entered...")

    success, numDiscs, resources, robotJobsDir, tmpIn = get_init_vals()
    if not success:
        sys.exit(1)

    freeGB = ryw.free_MB(tmpIn) / 1000.0
    ryw.give_news("ReadIncomingCDStack: current available disk space: " + repr(freeGB) + " GB.", logging.info)

    tmpdir, objPrefix = ryw_upload.attempt_just_make_tmpdir(tmpIn, "I_", "")
    if not tmpdir:
        ryw.give_bad_news("ReadIncomingCDStack: failed to make tmpdir.", logging.critical)
        sys.exit(1)

    jobFile = robot_read_all(robotJobsDir, objPrefix, numDiscs, tmpdir)
    if not jobFile:
        ryw_upload.cleanup_incoming(tmpdir, jobFile)
        sys.exit(1)

    # monitor the robot's job folder for completion of job
    # also periodically monitor the tmp folder for completed disk reads

    while True:
        ryw.give_news2("*", logging.info)
        time.sleep(5)
        logging.debug("ReadIncomingCDStack: done sleeping...")

        success, done, doneList = check_finished(jobFile, tmpdir)
        if not success:
            ryw_upload.cleanup_incoming(tmpdir, jobFile)
            sys.exit(1)

        if not done:
            success, doneList = check_partial_completion(tmpdir)
            if not success:
                ryw_upload.cleanup_incoming(tmpdir, jobFile)
                sys.exit(1)

        process_finished_copies(tmpdir, doneList)

        if done:
            logging.debug("ReadIncomingCDStack: done.")
            break

    logging.debug("ReadIncomingCDStack: removing robot job data: " + tmpdir + " " + jobFile)
    ryw_upload.cleanup_incoming(tmpdir, jobFile)
示例#26
0
def clean_up(filename, f = None):
	try:
		ryw.give_news("cleaning up files...",None)
		if f and not f.closed:
			f.close()
		if os.path.exists(filename):
			os.remove(filename)
		if os.path.exists(filename + ".JRQ"):
			os.remove(filename + ".JRQ")
		if os.path.exists(filename + ".ERR"):
			os.remove(filename + ".ERR")
		if os.path.exists(filename + ".DON"):
			os.remove(filename + ".DON")
			
	except:
		ryw.give_bad_news("Error cleaning up files used",logging.critical)
示例#27
0
def eraseDiscInDrive(driveLetter):
	
	count = 0
	while count < 12:
		try:
			os.listdir(driveLetter + ":\\")
		except Exception, err:
			if err.__class__ == exceptions.WindowsError:
				if err.errno == 1 or err.errno == 1005:
					return True
				if err.errno == 21:
					ryw.give_news("waiting for drive to become ready", None)
					time.sleep(5)
					count += 1
			else:
				ryw.give_bad_news("eraseDiscInDrive failed",logging.critical)
				return False
def union_and_write(currSel, searchSel, queueName):

    unionResults = currSel | searchSel

    #
    # copied from ProcessDownloadReq.py
    #
    success,tmppath,bakpath = ProcessDownloadReq.write_reqs(
        queueName, unionResults)
    ProcessDownloadReq.cleanup(tmppath, bakpath)

    if success:
        numResults = len(searchSel)
        ryw.give_news(str(numResults) +
                      ' objects added to the chosen selection.',
                      logging.info)
    else:
        ryw.give_bad_news('union_and_write: failed.', logging.error)
def main():
    name = ShowQueue.init_log()
    ryw_view.print_header_logo()
    
    searchSel = AddSearchAll.get_search_result()
    if not searchSel:
        ryw.give_bad_news(
            'DelSearchAll: failed to load current search result.',
            logging.error)
        DisplaySelection.exit_now(1)

    success = delete_all(searchSel)
    if success:
        ryw.give_news('All these objects have been removed.', logging.info)
    else:
        ryw.give_bad_news('DelSearchAll.py: an error occurred.',
                          logging.error)
        
    DisplaySelection.exit_now(0)
def main():
    ryw.check_logging(os.path.join(RepositoryRoot, 'WWW', 'logs'),
                      'upload.log')
    logging.debug('ProcessDiscs: entered...')

    success,tmpdir,jobfile,autorunMerge,overWrite = init_vals()
    if not success:
        ryw_upload.quick_exit(1)
    
    logging.debug('ProcessDiscs: tmpdir,jobfile: ' + tmpdir + ' ' + jobfile)
    ryw.give_news('processing incoming disc images located in: ' + tmpdir,
                  logging.info)

    if autorunMerge:
	    process_autorun_merge_request(tmpdir, overwrite = overWrite)
	    sys.exit(0)

    process_finished_copies(tmpdir)
    ryw_upload.cleanup_incoming(tmpdir, jobfile)
    
    ryw_view.print_footer()
    sys.exit(0)