def wait_to_end_and_cleanup(filename, count, robotJobsDir):
	rexp = re.compile(regexp % (os.path.basename(filename),))
	try:	
		ryw.give_news("Waiting for job to end...",None)
		while True:
			section = load_section(filename, robotJobsDir, rexp)
			if section.has_key("TimeCompleted") and \
				section["TimeCompleted"].strip() != "":
					break
			ryw.give_news2(" * ",None)
			time.sleep(10)

		section['TimeCompleted'] = section['TimeCompleted'].strip()
		mesg = "Job finished at %(TimeCompleted)s. %(GoodDiscs)s good discs and %(BadDiscs)s bad discs were produced.\n"
		if "JobErrorNumber" in section and section['JobErrorNumber'] != "16":
			mesg += "Job ended with error. Error code = %(JobErrorNumber)s. Error String = %(JobErrorString)s.\n"
		for i in range(0,10):
			if not "DiscErrorIndex%d" % (i,) in section:
				break
			index = section["DiscErrorIndex%d" % (i,)]
			number = section["DiscErrorNumber%d" % (i,)]
			errstr = section["DiscErrorString%d" % (i,)]
			mesg += "Disc %s had error. Error code = %s. Error Message = %s\n" % (index, number, errstr)

		ryw.give_news("<PRE>" + mesg % section + "</PRE>", None)
		if ("JobErrorNumber" in section and section['JobErrorNumber'] != "16") or \
			section['BadDiscs'] != "0" or "DiscErrorIndex0" in section:
			logging.warning("Erase job ended with errors. Job's status dict: " + str(section))
		else:
			logging.debug("Erase job ended with no errors. Job's status dict: " + str(section)) 
		clean_up(filename)
	except:
		ryw.give_bad_news("Error while waiting for job to finish",logging.warning)
		clean_up(filename)
def try_exec(RepositoryRoot, meta, tmpdir, uploadFileName):
    commandPath = verify_ffmpeg_existence(RepositoryRoot)
    if commandPath == None:
        return
    
    filePath = os.path.join(tmpdir, uploadFileName)
    executeThis = commandPath + ' -i ' + '"' + filePath + '"'

    ryw.give_news2('<BR>Invoking ffmpeg... ', logging.info)

    try:
        pipe = subprocess.Popen(executeThis, shell=True,
                                stderr=subprocess.PIPE)
        execResult = pipe.communicate()[1]
    except:
        ryw.give_bad_news('try_extract_duration: ffmpeg execution failed: '+
                          executeThis, logging.error)
        return

    #ryw.give_news(execResult, logging.info)

    extract_all(meta, execResult)

    if meta.has_key('ffmpeg') and meta['ffmpeg'].has_key('image'):
        if meta['ffmpeg'].has_key('frame_rate'):
            del meta['ffmpeg']['frame_rate']        
    else:
        searchResult,group1 = extract_one_pattern(
            execResult, FFMPEG_INFO_TABLE['duration'][1])
        extract_duration(meta, searchResult)
def get_resources(tmpDirOption = ''):
    logging.debug('get_resources: entered...')
    try:
        resources = su.parseKeyValueFile(os.path.join(RepositoryRoot,
                                                      'Resources.txt'))
        robotJobsDir = resources['robotsjobdir']

        if tmpDirOption:
            tmpOutDir = tmpDirOption
        else:
            tmpOutDir = resources['tmpout']
        ryw.give_news2('<BR>outgoing data placed in: ' + tmpOutDir,
                       logging.info)
        
        searchFile = resources['searchfile']
        viewRoot = resources['viewroot']
        objectstoreroots = resources['objectstore'].split(';')
        firstRoot = objectstoreroots[0]
        robotPresent = ryw.has_robot(resources)
    except:
        ryw.give_bad_news('get_resources failed.', logging.critical)
        return (False, None, None, None, None, None, None, None)
    
    logging.debug('get_resources succeeded.')
    logging.debug('get_resources: robotJobsDir, tmpOutDir, searchFile, viewRoot' + robotJobsDir + tmpOutDir + searchFile + viewRoot + firstRoot)
    return (True, resources, robotJobsDir, tmpOutDir, searchFile, viewRoot,
            firstRoot, robotPresent)
def wait_till_job_starts(filename, robotJobsDir):
	try:
		ryw.give_news("Waiting for robot to pick up the job...",None)
		time.sleep(20)
		while not os.path.exists(filename + ".INP"):
			ryw.give_news2(" * ",None)
			time.sleep(5)
	except:
		ryw.give_bad_news("Bad things happened while waiting for job to be processed",logging.critical)
		return False
	return True
def addRobotWriteRequest(name, items, itempath, currentSize, tmpStoreName,
                         tmpDirOption = '', onlyMeta = False):
    logging.debug('addRobotWriteRequest: entered...')

    success,resources,robotJobsDir,tmpOutDir,searchFile,viewRoot,firstRoot, \
        robotPresent = get_resources(tmpDirOption = tmpDirOption)
    if not success:
        return (False, None, None)

    if not onlyMeta and not check_free_disk_space(currentSize, tmpOutDir):
        return (False, None, None)
            
    tmpImgDir,objPrefix = ryw_upload.attempt_just_make_tmpdir(
        tmpOutDir, 'Im_'+name[:3]+'_', '')
    if not tmpImgDir:
        return (False, None, None)

    ryw.give_news2('<BR>outgoing data image name: ' + tmpImgDir, logging.info)

    if not write_recipient_file(tmpImgDir, name):
        cleanup_image(tmpImgDir)
        return (False, None, None)

    repDir = os.path.join(tmpImgDir, 'repository')
    if not ryw.try_mkdir(repDir, 'addRobotWriteRequest'):
        cleanup_image(tmpImgDir)
        return (False, None, None)

    # noObjStore:
    #   not copy_objectstore(firstRoot, repDir, tmpStoreName) or \
    #   not copy_view(repDir, viewRoot) or \
    if not write_user_credentials(repDir, name) or \
       not copy_scripts(repDir) or \
       not copy_search_file(searchFile, repDir) or \
       not copy_reverse_lists(repDir) or \
       not copy_objects(items, itempath, repDir,
                        tmpImgDir, metaOnly = onlyMeta) or \
       not generate_html(items, itempath, repDir, tmpImgDir) or \
       not copy_autorunfiles(tmpImgDir):
        cleanup_image(tmpImgDir)
        return (False, None, None)

    ryw.give_news(' ', logging.info)
    ryw.give_news('done copying all data, now invoking the robot.',
                  logging.info)
    
#    success,jrq = write_robot_job_file(robotJobsDir, tmpImgDir, objPrefix, robotPresent = robotPresent)
#    if not success:
#        cleanup_image(tmpImgDir)
#        ryw.cleanup_path(jrq, 'addRobotWriteRequest:')
#        return (False, None, None)

    return (True, tmpImgDir, "blah")
def main_nonoverlap():
    ryw.check_logging(os.path.join(RepositoryRoot, "WWW", "logs"), "upload.log")
    logging.debug("ReadIncomingCDStack: entered...")

    success, numDiscs, resources, robotJobsDir, tmpIn = get_init_vals()
    if not success:
        sys.exit(1)

    freeGB = ryw.free_MB(tmpIn) / 1000.0
    ryw.give_news("current available disk space: " + repr(freeGB) + " GB.", logging.info)

    tmpdir, objPrefix = ryw_upload.attempt_just_make_tmpdir(tmpIn, "I_", "")
    if not tmpdir:
        ryw.give_bad_news("ReadIncomingCDStack: failed to make tmpdir.", logging.critical)
        sys.exit(1)

    ryw.give_news("begin copying incoming discs...", logging.info)

    jobFile = robot_read_all(robotJobsDir, objPrefix, numDiscs, tmpdir)
    if not jobFile:
        ryw_upload.cleanup_incoming(tmpdir, jobFile)
        sys.exit(1)

    # monitor the robot's job folder for completion of job.

    ryw.give_news("", logging.info)
    oldDone = []
    while True:
        ryw.give_news2("*", logging.info)
        time.sleep(5)
        logging.debug("ReadIncomingCDStack: done sleeping...")

        success, done, doneList = check_finished(jobFile, tmpdir)
        if not success:
            ryw_upload.cleanup_incoming(tmpdir, jobFile)
            sys.exit(1)

        if not done:
            success, doneList = check_partial_completion(tmpdir)
            if not success:
                ryw_upload.cleanup_incoming(tmpdir, jobFile)
                sys.exit(1)

        # process_finished_copies(tmpdir, doneList)
        print_done_discs(doneList, oldDone)
        oldDone = doneList

        if done:
            logging.debug("ReadIncomingCDStack: done.")
            break

    print_conclusion(doneList, tmpdir, jobFile)
def write_recipient_file(tmpImgDir, name):
    # make a temporary recipient file

    ryw.give_news2('<BR>', logging.info)
    ryw.give_news2('writing recipient file... &nbsp;&nbsp;', logging.info)
    
    rcptFileName = os.path.join(tmpImgDir, 'Recipient.txt')
    try:
        f = open(rcptFileName, 'w')
        f.write('Recipient Info---\n\n')
        f.write('Username: '******'\n')

        ep = getAddress(name)
        if ep:
            f.write('Address: ' + ep + '\n')
            
        f.flush()
        os.fsync(f.fileno())
        f.close()
    except:
        ryw.give_bad_news('write_recipient_file failed, tmpImgDir, name: '+
                          tmpImgDir + ' ' + name, logging.critical)
        return False

    logging.debug('write_recipient_file done, tmpImgDir, name: '+
                  tmpImgDir + ' ' + name)
    ryw.give_news2('done, ' + name, logging.info)
    ryw.give_news2('<BR>', logging.info)
    return True
def extract_all(meta, ffmpegOut):
    for key, val in FFMPEG_INFO_TABLE.iteritems():
        if key == 'duration':
            continue
        description,regex = val
        searchResult,group1 = extract_one_pattern(ffmpegOut, regex)
        if group1 == None:
            continue
        ryw.give_news2(key + ' - ' + group1 + ', ', logging.info)
        set_ffmpeg_attribute(meta, key, group1)

    if has_these_attrs(meta, FFMPEG_VIDEO_KEYS):
        set_media_attrs(meta, 'video')
    elif has_these_attrs(meta, FFMPEG_AUDIO_KEYS):
        set_media_attrs(meta, 'audio_without_video')
def NOTUSED_main_overlap():
    ryw.check_logging(os.path.join(RepositoryRoot, "WWW", "logs"), "upload.log")
    logging.debug("ReadIncomingCDStack: entered...")

    success, numDiscs, resources, robotJobsDir, tmpIn = get_init_vals()
    if not success:
        sys.exit(1)

    freeGB = ryw.free_MB(tmpIn) / 1000.0
    ryw.give_news("ReadIncomingCDStack: current available disk space: " + repr(freeGB) + " GB.", logging.info)

    tmpdir, objPrefix = ryw_upload.attempt_just_make_tmpdir(tmpIn, "I_", "")
    if not tmpdir:
        ryw.give_bad_news("ReadIncomingCDStack: failed to make tmpdir.", logging.critical)
        sys.exit(1)

    jobFile = robot_read_all(robotJobsDir, objPrefix, numDiscs, tmpdir)
    if not jobFile:
        ryw_upload.cleanup_incoming(tmpdir, jobFile)
        sys.exit(1)

    # monitor the robot's job folder for completion of job
    # also periodically monitor the tmp folder for completed disk reads

    while True:
        ryw.give_news2("*", logging.info)
        time.sleep(5)
        logging.debug("ReadIncomingCDStack: done sleeping...")

        success, done, doneList = check_finished(jobFile, tmpdir)
        if not success:
            ryw_upload.cleanup_incoming(tmpdir, jobFile)
            sys.exit(1)

        if not done:
            success, doneList = check_partial_completion(tmpdir)
            if not success:
                ryw_upload.cleanup_incoming(tmpdir, jobFile)
                sys.exit(1)

        process_finished_copies(tmpdir, doneList)

        if done:
            logging.debug("ReadIncomingCDStack: done.")
            break

    logging.debug("ReadIncomingCDStack: removing robot job data: " + tmpdir + " " + jobFile)
    ryw_upload.cleanup_incoming(tmpdir, jobFile)
def copy_search_file(searchFile, repDir):
    ryw.give_news2('copying search file... &nbsp;&nbsp;', logging.info)
    # place catalog
    # TODO: lock searchfile -> now done
    logging.debug('copy_search_file: ' + searchFile + ' ' + repDir)
    dst = os.path.join(repDir, 'searchfile')

    success,binSearchFile = ryw.open_search_file(
        'copy_search_file:',
        None,
        None,
        searchFile,
        False,
        skipRead = True)
    if not success:
        return False
    
    try:
        shutil.copyfile(searchFile, dst)
        binSearchFile.done()
    except:
        ryw.give_bad_news('copy_search_file failed: ' +
                          searchFile + ' ' + dst, logging.critical)
        return False
    logging.debug('copy_search_file: ' + searchFile + ' ' + dst)
    ryw.give_news2('done. ', logging.info)
    ryw.give_news2('<BR>', logging.info)
    return True    
def copy_reverse_lists(repDir):
    ryw.give_news2('copying reverse lists... &nbsp;&nbsp;', logging.info)
    logging.debug('copy_reverse_lists: ' + repDir)

    reverseListsFile = os.path.join(RepositoryRoot, 'ReverseLists')
    if not ryw.is_valid_file(reverseListsFile, 'copy_reverse_lists:'):
        ryw.give_news2('not found: ' + reverseListsFile + '<BR>',
                       logging.info)
        return True

    success,reverseLists = ReverseLists.open_reverse_lists(
        'AddRobotWriteRequests.copy_reverse_lists:',
        '', '', reverseListsFile, False,
        allowNullSearchFile = True)
    if not success:
        return False

    dst = os.path.join(repDir, 'ReverseLists')
    
    try:
        shutil.copyfile(reverseListsFile, dst)
    except:
        ryw.give_bad_news('copy_reverse_lists failed: ' +
                          reverseListsFile + ' ' + dst, logging.critical)
        if reverseLists:
            reverseLists.done()
        return False

    if reverseLists:
        reverseLists.done()
    
    logging.debug('copy_reverse_lists: ' + reverseListsFile + ' ' + dst)
    ryw.give_news2('done.<BR>', logging.info)
    return True    
def wait_for_robot(finishedStuff):
    #
    # monitor robot job status for all the discs to finish.
    #
    totalToWaitFor = len(finishedStuff)
    reallyFinished = 0
    reallyFinishedList = []
    ryw.give_news2('waiting for robot...', logging.info)
    ryw.give_news2('<BR>', logging.info)
    while True:
        for job in finishedStuff:
            tmpImgDir,jrq = job
            done = ryw_upload.check_robot_finished(jrq)
            if done and (not job in reallyFinishedList):
                reallyFinished += 1
                reallyFinishedList.append(job)
                ryw.give_news('robot finished: ' + str(reallyFinished) +
                              ' out of ' + str(totalToWaitFor) + ' disc(s).',
                              logging.info)
                ryw.give_news('  tmp files: ' + tmpImgDir + ' ' + jrq,
                              logging.info)
                ryw.give_news(' ', logging.info)
        if reallyFinished >= totalToWaitFor:
            break
        ryw.give_news2('*', logging.info)
        time.sleep(5)

    #
    # wait 60 seconds before removing temp data.
    #
    delay = 60
    ryw.give_news(' ', logging.info)
    ryw.give_news('will delete all temporary data in ' + str(delay) +
                  ' seconds.  press esc to stop it...', logging.info)
    ryw.give_news(' ', logging.info)
    while delay > 0:
        ryw.give_news2(str(delay) + '...', logging.info)
        time.sleep(5)
        delay -= 5
    ryw.give_news(' ', logging.info)
    ryw.give_news('deleting all temporary data...', logging.info)
        
    #
    # remove all temporary data...
    #
    for job in reallyFinishedList:
        tmpImgDir,jrq = job
        ryw_upload.cleanup_incoming(tmpImgDir, jrq)
    ryw.give_news('all temporary data removed.', logging.info)
def out_copy(itemName, srcDataPath, srcMetaPath, srcAuxiPath, currCounter,
             dataRoot, auxiRoot, mapDict, onlyMeta = False):
    objID,version = itemName.split('#')
    version = int(version)
    success,dirName,currCounter = out_obj_dir_name(
        os.path.join(RepositoryRoot, 'WWW', 'ObjectStore'),
        objID, version, currCounter)
        
    if not success:
        raise 'out_copy: out_obj_dir_name failed.'

    dataDir = os.path.join(dataRoot, dirName)
    auxiDir = os.path.join(auxiRoot, dirName)

    ryw.give_news2(dirName + ', ', logging.info)

    if onlyMeta:
        try:
            os.makedirs(os.path.join(dataDir, stubDirName))
        except:
            msg = 'out_copy: failed to make meta only stub: '+ \
                  dataDir
            ryw.give_bad_news(msg, logging.critical)
            raise msg
    else:
        su.copytree(srcDataPath, dataDir)
        
    logging.debug('out_copy: successfully copied: ' +
                  srcDataPath + ' -> ' + dataDir)

    if os.path.exists(srcAuxiPath):
        su.copytree(srcAuxiPath, auxiDir)
        logging.debug('out_copy: successfully copied: ' +
                      srcAuxiPath + ' -> ' + auxiDir)

    mapDict[itemName] = dirName
    logging.debug('out_copy: entered mapping: ' + itemName + ' : ' + dirName)
    return (currCounter, mapDict)
def main():
    init_log()
    ryw_view.print_header_logo()
    print '<TITLE>Rebuild ReverseLists</TITLE>'

    success,newReverseListsFile,objectStoreRoot = decide_path()
    if not success:
        sys.exit(1)

    success,searchFile,reverseLists = \
        ReverseLists.open_searchfile_reverselists(
            'RebuildReverseLists:',
            newReverseListsFileName = newReverseListsFile)
    if not success:
        sys.exit(1)
    else:
        ryw.give_news2('<BR>new ReverseLists generated at: ' +
                       newReverseListsFile + '<BR>', logging.info)

    go_through_object_store(objectStoreRoot, reverseLists)

    if reverseLists:
        reverseLists.done()
    ryw_view.print_footer()
def write_user_credentials(repDir, name):
    ryw.give_news2('writing user credential... &nbsp;&nbsp;', logging.info)
    try:
        open(os.path.join(repDir, 'usercredentials'), 'w').write(name)
    except:
        ryw.give_bad_news('write_user_credentials failed: ' + repDir + ' ' +
                          name, logging.critical)
        return False
    logging.debug('write_user_credentials done: ' + repDir + ' ' + name)
    ryw.give_news2('done, ' + name, logging.info)
    ryw.give_news2('<BR>', logging.info)
    return True
def copy_autorunfiles(rootDir):
    ryw.give_news2('copying auto run files... &nbsp;&nbsp;', logging.info)
    autoinf = os.path.join(RepositoryRoot,"bin","autorun.inf")
    autopy = os.path.join(RepositoryRoot,"bin","autorun.py")
    try:
        shutil.copy(autoinf,rootDir)
        shutil.copy(autopy,rootDir)
    except:
        ryw.give_bad_news("addRobotWriteRequest: copy_autorunfiles failed: ", logging.critical)
        return False
    ryw.give_news2('done. ', logging.info)
    ryw.give_news2('<BR>', logging.info)
    return True
def copy_view(repDir, viewRoot):
    ryw.give_news2('copying view... &nbsp;&nbsp;', logging.info)
    try:
        dst = os.path.join(repDir, 'View')
        su.copytree(viewRoot, dst)
    except:
        ryw.give_bad_news('copy_view: failed: ' + viewRoot + ' ' + dst,
                          logging.critical)
        return False
    
    logging.debug('copy_view: done: '+dst)
    ryw.give_news2('done. ', logging.info)
    ryw.give_news2('<BR>', logging.info)
    return True
def go_through_object_store(objectStoreRoot, reverseLists):

    ryw.give_news2('go_through_object_store: entered...<BR>', logging.info)

    searchFile = reverseLists.searchFile
    for objID,version in objectstore.objectversioniterator(objectStoreRoot):

        objstr = objID + '#' + str(version)
        ryw.db_print2('examining ' + objstr + '...<BR>', 7)

        success,meta = searchFile.get_meta(objID, version)
        if not success or not meta:
            ryw.give_bad_news('go_through_object_store: get_meta2 failed: ' +
                               objstr, logging.warning)
            continue

        isList = meta.has_key('sys_attrs') and 'isList' in meta['sys_attrs']
        if not isList:
            ryw.db_print2('go_through_object_store: not a list object: ' +
                         objstr + '<BR>', 6)
            ryw.give_news2(' . ', logging.info)
            continue
        ryw.db_print2('go_through_object_sotre: found a list.<BR>', 6)

        containees = ReverseLists.read_container_file(
            objID, version, searchFile, RepositoryRoot)
        ryw.db_print2('go_through_object_store: containees are: ' +
                      repr(containees) + '<BR>', 7)
        if not reverseLists.add(objstr, containees):
            ryw.give_bad_news('go_through_object_store: ReverseLists.add ' +
                              'failed: ' + objstr, logging.error)
        else:
            ryw.db_print2('go_through_object_store: successfully added.<BR>',
                          7)
            alias = 'unnamed'
            if meta.has_key('content_alias'):
                alias = meta['content_alias']
            ryw.give_news2(objstr + ' ' + alias + ', ', logging.info)

    ryw.give_news2('<BR>done.', logging.info)
def main():

    dateTimeRand = ryw.date_time_rand()
    searchfile = os.path.join(tmpOutDir, 'NewSearchFile' + dateTimeRand)
    osr = ObjectStoreRoot
    
    #ryw.give_news2('ObjectStore is at: ' + osr + '<BR>', logging.info)
    ryw.give_news2('new SearchFile at: ' + searchfile + '<BR>', logging.info)
    ryw.give_news2('generating... &nbsp;&nbsp;', logging.info)
    
    l = glob.glob(osr + "/?/?/?/?/*_META")
    for filename in l:
        meta = loadMeta(filename)
        append_to_new_search_file(searchfile,meta)

    ryw.give_news2('done.<BR>', logging.info)

    osf = OriginalSearchFile
    ryw.give_news2('comparing against ' + osf + ' ... <BR>', logging.info)
    compareSearchFile(osf, searchfile)

    ryw_view.print_footer()
def copy_scripts(repDir):
    ryw.give_news2('copying scripts... &nbsp;&nbsp;', logging.info)
    
    # place a whole copy of code anyhow
    try:
        su.copytree(os.path.join(RepositoryRoot, '..', 'Postmanet-2'),
                    os.path.join(repDir, '..', 'Postmanet-2'),
                    isInstall = True)
        ryw.give_news2('Postmanet-2: done. ', logging.info)
    except:
        ryw.give_bad_news('copy_scripts: failed to place Postmanet-2.',
                          logging.error)
        return False

    logging.debug('copy_scripts: done.')
    ryw.give_news2('<BR>', logging.info)
    return True
def obj_store_size_inKB_not_used(tmpdir=""):
    if tmpdir:
        ryw.give_news2('temp objectstore copied to: ' + tmpdir + '<BR>',
                       logging.info)
    ryw.give_news2('computing outgoing objectstore size...',
                   logging.info)
    
    try:
        resources = su.parseKeyValueFile(os.path.join(RepositoryRoot,
                                                      'Resources.txt'))
        objectstoreroots = resources['objectstore'].split(';')
        firstRoot = objectstoreroots[0]
        if tmpdir:
                tmpOutDir = tmpdir
        else:
                tmpOutDir = resources['tmpout']
    except:
        ryw.give_bad_news('obj_store_size_inKB: get_resources failed.',
                          logging.critical)
        return (False, None, None, None)

    tmpStoreDir,objPrefix = ryw_upload.attempt_just_make_tmpdir(
        tmpOutDir, 'outgoing_obj_store_', '')
    if not tmpStoreDir:
        ryw.give_bad_news('obj_store_size_inKB: failed to make tmpdir: ' +
                          tmpOutDir, logging.critical)
        return (False, None, None, None)

    tmpStoreName = os.path.join(tmpStoreDir, 'outgoing_store')

    try:
        success = ryw_copytree.copy_tree_diff_repo(firstRoot, tmpStoreName)
        if not success:
            raise 'copy_tree_diff_repo failed.'
    except:
        ryw.give_bad_news('obj_store_size_inKB: copy_tree_diff_repo failed: '+
                          firstRoot + ' -> ' + tmpStoreName, logging.critical)
        return (False, None, None, None)

    kB = ryw_disc.getRecursiveSizeInKB(tmpStoreName)
    logging.debug('obj_store_size_inKB: ' + tmpStoreName + ' = ' + str(kB))

    ryw.give_news2 (str(kB) + ' KB<BR>', logging.info)
    return (True, kB, tmpStoreDir, tmpStoreName)
def copy_objects(items, itempath, repDir, tmpImgDir, metaOnly = False):
    ryw.give_news2('copying requested objects... &nbsp;&nbsp;', logging.info)
    #
    # quite a bit of the copying details should be pulled out to a piece
    # of common code.
    #

    if len(items) == 0:
        ryw.give_news2('no requested objects. ', logging.info)
        ryw.give_news2('<BR>', logging.info)
        return True    

    success,dataRoot,auxiRoot,mapDict,counter = ryw_philips.out_init(tmpImgDir)
    if not success:
        ryw.give_bad_news('copy_objects: out_init failed.', logging.error)
        return True

    for item in items:
        logging.debug('copy_objects: item: ' + item)
        try:
            objname,version = item.split('#')
            version = int(version)
            logging.debug('copy_objects: got name, version: ' +
                          objname + ' ' + repr(version))
            #destpath = objectstore.nameversiontopaths(
            #    os.path.join(repDir, 'objects'), objname, version)
            destpath = objectstore.name_version_to_paths_aux(
                os.path.join(repDir, 'objects'), objname, version)
            logging.debug('copy_objects: got destpath: ' + repr(destpath))

            if not ryw.good_repo_paths(itempath[item]):
                ryw.give_bad_news('copy_objects: good_repo_paths failed.',
                                  logging.error)
                raise('something missing in the source paths.')

            su.createparentdirpath(destpath[0])
            logging.debug('copy_objects: created parent dir: ' + destpath[0])

            # code prior to preparing for Philips DVD player
            # su.copytree(itempath[item][0], destpath[0])
            # logging.debug('copy_objects: done copying data: ' + destpath[0])

            counter,mapDict = \
                ryw_philips.out_copy(item,
                                     itempath[item][0], itempath[item][1],
                                     itempath[item][2], counter,
                                     dataRoot, auxiRoot, mapDict,
                                     onlyMeta = metaOnly)

            shutil.copyfile(itempath[item][1], destpath[1])
            logging.debug('copy_objects: done copying metadata: ' +
                          destpath[1])

            # code prior to preparing for Philips DVD player
            # if os.path.exists(itempath[item][2]):
            #     su.copytree(itempath[item][2], destpath[2])
            #     logging.debug('copy_objects: done copying auxi files: ' +
            #                   destpath[2])
                
            shutil.copyfile(itempath[item][3], destpath[3])
            logging.debug('copy_objects: done copying done flag: ' +
                          destpath[3])
            
        except:
            ryw.give_bad_news(
                'copy_objects: failed to copy data: ' + item, logging.critical)
            ryw.give_bad_news(
                'copy_objects: skip copying an object and continue.',
                logging.error)
            continue

    ryw_philips.out_done(tmpImgDir, mapDict)

    ryw.give_news2('done. ', logging.info)
    ryw.give_news2('<BR>', logging.info)
    return True    
def copy_objectstore(firstRoot, repDir, tmpStoreName):
    """copies the objectstore minus the big data items.
    only copying the first root.  I assume this is ok.  the multiple
    roots will just get merged over time at one root on the village side."""
    
    dst = os.path.join(repDir, 'ObjectStore')

    if os.path.exists(tmpStoreName):
        ryw.give_news2('moving a pre-copied object store... &nbsp;&nbsp;',
                       logging.info)
        logging.debug('copy_objectstore: moving a pre-copied store: ' +
                      tmpStoreName)
        try:
            shutil.move(tmpStoreName, dst)
            ryw.cleanup_partial_dir(
                ryw.parent_dir(tmpStoreName), 'copy_objectstore:', True)
        except:
            ryw.give_bad_news('copy_objectstore: failed to move store: ' +
                              tmpStoreName, logging.critical)
            return False
        ryw.give_news2('done. ', logging.info)
        ryw.give_news2('<BR>', logging.info)
        return True

    ryw.give_news2('copying object store... &nbsp;&nbsp;', logging.info)
    logging.debug('copy_objectstore: ' + firstRoot + ' -> ' + repDir)
    try:
        success = ryw_copytree.copy_tree_diff_repo(firstRoot, dst)
        if not success:
            raise 'copy_tree_diff_repo failed.'
    except:
        ryw.give_bad_news('copy_objectstore: copy_tree_diff_repo failed: '+
                          firstRoot + ' -> ' + dst, logging.critical)
        return False

    logging.debug('copy_objectstore: done: ' + dst)
    ryw.give_news2('done. ', logging.info)
    ryw.give_news2('<BR>', logging.info)
    return True
def main():
    """main function processing upload."""

    # initialization.
    name = WebUpload_ryw.print_header()
    form = cgi.FieldStorage()
    WebUpload_ryw.setup_logging()

    if not ryw_upload.check_required_fields(form, checkFile = False):
        ryw_upload.quick_exit(1)


    localExcerptResult = ryw_upload.check_local_file(
        form, fieldName = "local_excerpt_filename")

    # check aux file uploads: thumbnails, exerpts...
    success,auxExists,aux = ryw_upload.check_aux_file_uploads(
        form, localExcerptStuff = localExcerptResult)
    if not success:
        ryw_upload.quick_exit(1)

    tmpdir = WebUpload_ryw.attempt_make_tmpdir()
    if not tmpdir:
        ryw_upload.quick_exit(1)

    success,auxDir,auxInfo = ryw_upload.read_aux_files(
        form, aux, tmpdir, auxExists,
        localExcerptStuff = localExcerptResult)
    if not success:
        ryw_upload.cleanup_and_exit(tmpdir, None, None, 1)

    bytes,name = ryw_upload.copy_queue_file(tmpdir, name)
    if bytes == 0:
        ryw_upload.cleanup_and_exit(tmpdir, None, None, 1)
    kB = math.ceil(bytes / 1024.0)
    filename = name
        

    #
    # the rest of this stuff copied straight from WebUpload_ryw.py
    # not nice, but hey.
    #
    meta = ryw_upload.try_process_attributes(name, form, filename, kB, bytes)
    if not meta:
        ryw_upload.cleanup_and_exit(tmpdir, None, None, 1)
    ryw_upload.add_set_attrs(meta, 'sys_attrs', 'isList')

    meta = ryw_upload.add_aux_attributes(meta, auxInfo)

    success,metafile = ryw_upload.write_tmp_metafile(meta, tmpdir)
    if not success:
        ryw_upload.cleanup_and_exit(tmpdir, metafile, None, 1)
    
    nameToUpload,extractDir = ryw_upload.try_unzip_file(
        form, tmpdir, filename, kB)
    if not nameToUpload:
        ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 1)

    ryw.give_news2('<BR>Storing the list in the repository...',
                   logging.info)
    if not WebUpload_ryw.try_upload_object(meta, nameToUpload, auxDir):
        ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 1)

    #ryw_view.show_server_object(meta)
    searchFile = WebUpload_ryw.show_one_server_object(meta)
    #
    # Ok to do this stuff after the display, because there's no
    # way the newly added selection could be a containee of someone else.
    #
    ReverseLists.add_queue(meta, searchFile, RepositoryRoot)
    searchFile.done()

    ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 0)
def obj_store_size_inKB(tmpdir=""):
    ryw.give_news2('skipping computing objectstore size...', logging.info)
    return (True, 0, None, None)
def main():
    """main function processing upload."""

    # initialization.
    name = print_header()
    form = cgi.FieldStorage()
    setup_logging()

    localSuccess, localFound, localFilePath, localIsDir = ryw_upload.check_local_file(form)
    if not localSuccess:
        ryw.give_bad_news("check_local_file failed.", logging.error)
        ryw_upload.quick_exit(1)

    if not ryw_upload.check_required_fields(form, checkFile=not localFound):
        ryw_upload.quick_exit(1)

    if localFound:
        buf = None
    else:
        # just read a tiny bit to see if we have an empty upload file.
        buf = ryw_upload.attempt_read_uploaded_file(form, "local_filename")
        if not buf:
            ryw_upload.quick_exit(1)

    localExcerptResult = ryw_upload.check_local_file(form, fieldName="local_excerpt_filename")

    # check aux file uploads: thumbnails, exerpts...
    success, auxExists, aux = ryw_upload.check_aux_file_uploads(form, localExcerptStuff=localExcerptResult)
    if not success:
        ryw_upload.quick_exit(1)

    tmpdir = attempt_make_tmpdir()
    if not tmpdir:
        ryw_upload.quick_exit(1)

    success, auxDir, auxInfo = ryw_upload.read_aux_files(
        form, aux, tmpdir, auxExists, localExcerptStuff=localExcerptResult
    )
    if not success:
        ryw_upload.cleanup_and_exit(tmpdir, None, None, 1)

    filename = ryw_upload.decide_tmp_data_file_name(form, localPath=localFilePath, isLocalDir=localIsDir)
    if not filename:
        ryw_upload.cleanup_and_exit(tmpdir, None, None, 1)

    success, found, bytes = ryw_upload.copy_local_file_for_upload(
        form, tmpdir, filename, localFound, localFilePath, localIsDir
    )
    if not success:
        ryw_upload.cleanup_and_exit(tmpdir, None, None, 1)
    kB = math.ceil(bytes / 1024.0)

    if not found:
        ryw.give_news2("<BR>Copying remote file...", logging.info)
        kB, bytes = ryw_upload.read_uploaded_file(form, buf, tmpdir, filename, "local_filename")
    if kB == 0:
        ryw_upload.cleanup_and_exit(tmpdir, None, None, 1)

    meta = ryw_upload.try_process_attributes(name, form, filename, kB, bytes)
    if not meta:
        ryw_upload.cleanup_and_exit(tmpdir, None, None, 1)

    meta = ryw_upload.add_aux_attributes(meta, auxInfo)

    if not localIsDir:
        ryw_ffmpeg.try_exec(RepositoryRoot, meta, tmpdir, filename)

    success, metafile = ryw_upload.write_tmp_metafile(meta, tmpdir)
    if not success:
        ryw_upload.cleanup_and_exit(tmpdir, metafile, None, 1)

    if localIsDir:
        nameToUpload, extractDir = (os.path.join(tmpdir, filename), None)
    else:
        nameToUpload, extractDir = ryw_upload.try_unzip_file(form, tmpdir, filename, kB)
        if not nameToUpload:
            ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 1)

    ryw.give_news2("<BR>Storing the data in the repository...", logging.info)
    ryw.db_print2("<BR>" + "meta: " + repr(meta) + "<BR>", 57)
    ryw.db_print2("nameToUpload: " + nameToUpload + "<BR>", 22)
    ryw.db_print2("auxDir: " + repr(auxDir) + "<BR>", 22)
    if not try_upload_object(meta, nameToUpload, auxDir):
        ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 1)

    # ryw_view.show_server_object(meta)
    searchFile = show_one_server_object(meta)
    searchFile.done()

    # cgi.print_form(form)

    ryw_upload.cleanup_and_exit(tmpdir, metafile, extractDir, 0)
def copy_local_file_for_upload(form, tmpdir, uploadFileName,
                               localFound, localPath, localDir,
                               isCopyingExcerpt = False):
    if not localFound:
        return (True, False, 0)

    if localDir:
        dirKB = ryw_disc.getRecursiveSizeInKB(localPath)
        bytes = dirKB * 1024
        if bytes == 0:
            ryw.give_bad_news(
                'copy_local_file_for_upload: 0-sized local directory: '+
                localPath, logging.error)
            return (False, False, 0)
    else:
        success,bytes = ryw.get_file_size(localPath)
        if not success:
            return (False, False, 0)

    #if uploadFileName != truncateLocalName:
    #    ryw.give_bad_news(
    #        'copy_local_file_for_upload: repeated local file name does<BR>' +
    #        ' not match the name of the local file to be uploaded.<BR>' +
    #        uploadFileName + '<BR>' + localName,
    #        logging.error)
    #    return (False, False, 0)
    
    freeKB = ryw.free_MB(tmpdir) * 1024
    kB = math.ceil(bytes / 1024.0)

    if pretty_much_out_of_space(kB, freeKB):
        ryw.give_bad_news(
            'copy_local_file_for_upload: nearly out of space ' +
            'while uploading queue, KB: ' + repr(kB), logging.error)
        return (False, False, 0)

    tmpFileName = os.path.join(tmpdir, uploadFileName)
    try:
        if localDir:
            ryw.give_news2('<BR>Copying local directory: ' + localPath,
                           logging.info)
            if isCopyingExcerpt:
                #
                # don't want the "excerpts" directory to contain
                # just a lone directory inside in the common case.
                # copy the content into the "excerpts" directory directly.
                # "tmpdir" in this case is the "excerpts" directory.
                # su.copytree tolerates an existing destination directory
                # and doesn't wipe out what's already in it.
                #
                tmpFileName = os.path.normpath(tmpdir)
                su.copytree(localPath, tmpFileName)
            else:
                shutil.copytree(localPath, tmpFileName)
        else:
            ryw.give_news2('<BR>Copying local file: ' + localPath,
                           logging.info)
            shutil.copyfile(localPath, tmpFileName)
            #shutil.copyfile(
            #    os.path.join(RepositoryRoot, 'Tmp-in', 'foo.txt'),
            #    tmpFileName)
    except:
        ryw.give_bad_news('copy_local_file_for_upload: ' +
                          'failed to copy data: ' +
                          localPath + ' -> ' + tmpFileName,
                          logging.critical)
        return (False, False, 0)

    logging.debug('copy_local_file_for_upload: ' +
                  'succeeded copying local data: ' +
                  localPath + ' -> ' + tmpFileName, logging.info)
    return (True, True, bytes)
def generate_html(items,itempath, repDir, tmpImgDir):
        ryw.give_news2('generating static html pages... &nbsp;&nbsp;',
                       logging.info)
	htmlDir = os.path.join(repDir, "html")
	if not ryw.try_mkdir(htmlDir, 'addRobotWriteRequest:generate_html'):
		return False
	try:
		f = open(os.path.join(htmlDir,"index.html"),"w")
		f.write("""
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">

<html>

<head>
	<title>Objects on this disk</title>
""")
                f.write(ryw_view.css_str())
                f.write("""
</head>

<body>
""")
		f.write(ryw_view.logo_str_for_disc())
		f.write("""
<h3>Objects on Disk</h3><p>(you are on this page either because you are browsing 
the disc directly or were directed to it due to an error while merging it.)
<BR>
""")
		write_page(items,itempath,f, tmpImgDir)
                f.write(ryw_view.end_print_str())
		f.write(ryw_view.footer2_str())
		f.write("""
</body>
</html>
""")
		f.close()
		srcIconsDir = os.path.join(RepositoryRoot, "WWW", "icons")
		dstIconsDir = os.path.join(htmlDir, "icons")
		su.copytree(srcIconsDir, dstIconsDir)
		parentDir, repdirname = os.path.split(repDir)
		f = open(os.path.join(parentDir,"index.html"), "w")
		f.write("""
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">

<html>
<head>
<title> Repository folders on this disk </title>
<meta http-equiv="refresh" content="1;URL=.\%s\html\index.html">
</head>
<body>
loading page containing list of things on this disk....
</body>
</html>
""" % (repdirname,))
		f.close()
	except:
		ryw.give_bad_news('addRobotWriteRequest: generate_html files: failed to write file: ',logging.critical)
		return False

        ryw.give_news2('done. ', logging.info)
        ryw.give_news2('<BR>', logging.info)
	return True
#print 'Dear <B><I>' + name + ':</I></B>'


form = cgi.FieldStorage()
tmpdir = form.getfirst("tmpdir","")

if tmpdir and not ryw.is_valid_dir(tmpdir, 'FlushQueue'):
    ryw_view.print_footer()
    sys.exit(1)


meta = form.getfirst('meta', '')
metaOnly = meta == 'true' or meta == 'on'
if metaOnly:
    ryw.give_news2('sending metadata only.<BR>', logging.info)


discLimit = form.getfirst('disc_limit', '')
discLimitInt = None
if discLimit:
    try:
        discLimitInt = int(discLimit)
    except:
        ryw.give_bad_news('FlushQueue.py: bad disc limit: ' + discLimit,
                          logging.error)
        ryw_view.print_footer()
        sys.exit(1)

if discLimitInt and (discLimitInt < 10 or discLimitInt > 1000000):
    ryw.give_bad_news("The disc limit size should be somewhere between "+
def make_discs(reqsize, reqpath, username, objKB, tmpStoreName,
               reqList, tmpDirOption="", metaOnly = False):
    # make at least one CD
    # make more than one if the requested data does not fit in one CD
    countCDs = 0
    remainingItems = list(reqList)
    while True:
        # make a CD
        currentSize = 0
        yes = []

        # logging.debug('make_discs: before, reqList is: ' + repr(reqList))

        for item in reqList:
            # logging.debug('make_discs: item is: ' + item)
            # logging.debug('    reqsize for item is: ' + repr(reqsize[item]))
            # logging.debug('    currentSize is: ' + repr(currentSize))
            if metaOnly or currentSize + reqsize[item] <= ryw.maxSizeInKB - objKB:
                currentSize += reqsize[item]
                yes.append(item)
                del reqsize[item]
                remainingItems.remove(item)
            else:
                break
                # logging.debug('make_discs: size too big for this disc.')
        reqList = list(remainingItems)

        # logging.debug('make_discs: after, reqList is: ' + repr(reqList))
        # logging.debug('make_discs: yes set is: ' + repr(yes))

        # make CD with the requests in 'yes'
        logging.debug('make_discs: about to add to robot: ' + repr(yes))
        # deliberately not catching exceptions for now.
        success,tmpImgDir, jrq = \
            AddRobotWriteRequest.addRobotWriteRequest( \
                username, yes, reqpath, currentSize, tmpStoreName,
                tmpDirOption = tmpDirOption,
                onlyMeta = metaOnly)
        if not success:
            ryw.give_bad_news('make_discs: addRobotWriteRequest failed: ' +
                              username + ' ' + repr(yes), logging.error)
            return (False, countCDs)

        countCDs += 1
        logging.debug('make_discs: done adding to robot: disc #' +
                      repr(countCDs))
        ryw.give_news2('<BR>', logging.info)
        ryw.give_news2('sent disc # ' + str(countCDs) +
                      ' to the robot...', logging.info)
        ryw.give_news2('<BR>', logging.info)

        if len(reqsize) != len(reqList):
            ryw.give_bad_news('make_discs: unexpected list size mismatch.',
                              logging.critical)
        if len(reqsize) == 0:
            break

    ryw.give_news2('finished sending ' + str(countCDs) +
                   ' disc(s) to the robot...', logging.info)
    ryw.give_news2('<BR>', logging.info)
    logging.debug('make_discs: produced ' + repr(countCDs) + ' discs.')
    return (True, countCDs)