Пример #1
0
def process_repo_dir(srcd, dstd, copyFileFunc, copyDirFunc):
    #check_village_log('test process_repo_dir')
    success,entries,regularEntries,prefixes = get_repo_dir_entries(srcd)
    goodPrefixes = ryw.cleanup_partial_repo_dir(srcd, prefixes)
    # logging.debug('goodPrefixes: ' + repr(goodPrefixes))
    # logging.debug('regular: ' + repr(regularEntries))

    success = True
    for regular in regularEntries:
        srcName = os.path.join(srcd, regular)
        dstName = os.path.join(dstd, regular)
        logging.debug(' regular is: ' + regular)
        thisSuccess = copy_tree_diff_common(srcName, dstName,
                                            copyFileFunc, copyDirFunc)
        success = success and thisSuccess

    for prefix in goodPrefixes:
        success,isBig = is_big_data(srcd, prefix)
        if not success:
            ryw.give_bad_news(
                'process_repo_dir: failed to determine data size: '+srcd,
                logging.warning)
            continue
        thisSuccess = copy_an_outgoing_object(srcd, dstd, prefix,
                                              bigdata = isBig)
        success = success and thisSuccess
        
    return success
Пример #2
0
def get_paths(objroot, objID, version, meta, repositoryRoot):

    paths = get_paths1(objroot, objID, version)
    
    if not meta:
        paths.append(None)
        return paths
    if not meta.has_key('path'):
        ryw.give_bad_news('DeleteObject.get_paths: missing path attribute: '+
                          repr(meta), logging.error)
        paths.append(None)
        return paths

    path = meta['path']
    try:
        resources = su.parseKeyValueFile(
            os.path.join(repositoryRoot, 'Resources.txt'))
        viewroot = resources['viewroot']
    except:
        ryw.give_bad_news('DeleteObject.get_paths: failed to get view root.',
                          logging.critical)
        paths.append(None)
        return paths
    
    viewpath = os.path.join(viewroot, path)
    paths.append(viewpath)
    logging.debug('DeleteObject.get_paths: ' + repr(paths))
    return paths    
Пример #3
0
def get_meta(searchFile, objID, version, repositoryRoot):
    success,meta = searchFile.get_meta(objID, version)
    if success:
        #
        # I'm doing this to hardwire all
        # places of gettting objectstoreroot.
        #
        #return (meta, meta['objectstore'])
        return (meta, ryw.hard_wired_objectstore_root())
    
    logging.warning(
        'ryw_meta.get_meta: not finding it in the SearchFile: ' +
        objID + ' # ' + str(version) + ', but attempting to continue')
    
    #
    # look for the hardwired objectstore root.  not nice but...
    #
    objroot = os.path.join(repositoryRoot, 'WWW', 'ObjectStore')
    if not os.path.exists(objroot):
        ryw.give_bad_news(
            'DeleteObject.get_meta: even the hardwired root does not exist: '+
            objroot, logging.critical)
        return (None, None)
    
    success,meta = ryw.get_meta(objroot, objID, version)
    if not success:
        logging.warning(
            'ryw.get_meta: failed to read metadata from objstore: '+
            objID + ' # ' + str(version))
        return (None, objroot)

    return (meta, objroot)
Пример #4
0
def launchExplorer(path):
    ryw.db_print2('launchExplorer: path is: ' + path, 59)
    try:
        return ryw_bizarro.launch_explorer(path)
    except:
        ryw.give_bad_news("Failed to launch Explorer",logging.warning)
	return False
Пример #5
0
    def add_this_version_to_search_file(self, meta):
        """same as above but does not increment version number."""
        logging.debug('add_this_version_to_search_file: ' + repr(meta))
        if not meta.has_key('id') or not meta.has_key('version'):
            ryw.give_bad_news(
                'add_this_version_to_search_file: missing field(s)...',
                logging.critical)
            return False

        objID = meta['id']
        version = meta['version']

        success,existingMeta = self.get_meta(objID, version)
        if success:
            ryw.give_news(
                'warning: add_this_version_to_search_file: already exists: '+
                objID + ' ' + str(version), logging.warning)
            return True

        if not self._SearchFile__append_to_search_file(meta):
            return False

        self._SearchFile__add_to_memory_index(objID, version, meta)
        logging.debug('add_this_version_to_search_file: success.')
        return True
Пример #6
0
def out_obj_dir_name(objStoreRoot, objname, version, currCounter):

    if currCounter >= 9999:
        ryw.give_bad_news('out_obj_dir_name: counter exceeded 9999.',
                          logging.warning)
        #return (False, None, currCounter)
    
    success,meta = ryw.get_meta(objStoreRoot, objname, version)
    if not success:
        ryw.give_bad_news('out_obj_dir_name: failed to get meta data: ' +
                          objname + '#' + str(version), logging.error)
        return (False, None, currCounter)

    if meta.has_key('content_alias'):
        author = meta['content_alias']
        author = stripStr(author)
    elif meta.has_key('author_name'):
        author = meta['author_name']
        author = stripStr(author)
        author = re.sub('(^[mM]s)|(^[mM]r)|(^[mM]rs)|(^[mM]iss)', '', author)
    else:
        author = 'unknown'

    prefix = str(currCounter).zfill(2)
    dirName = prefix + author
    dirName = dirName[:32]
    logging.debug('out_obj_dir_name: dirName is: ' + dirName)
        
    return (True, dirName, currCounter + 1)
def add_all(queueName, searchFile):
    try:
        reqs = set('')

        count = 0
        for meta in searchFile.iterator():
            objstr = meta['id'] + '#' + str(meta['version'])
            reqs.add(objstr)
            count += 1
            logging.debug('add_all: ' + objstr)

        ryw.give_news(
            'add_all: number of objects added to the request queue: ' +
            str(count), logging.info)

        success,tmppath,bakpath = write_reqs(queueName, reqs)
        if not success:
            ryw.give_bad_news('add_all: write_reqs failed: ' + queueName,
                              logging.critical)
            return False
        cleanup(tmppath, bakpath)
        return True
    except:
        ryw.give_bad_news('add_all: failed.', logging.critical)
        return False
Пример #8
0
def NOTUSED_talk_to_search_server(values):
    """NOT USED ANY MORE.
    need searchserver to send meta-data to.
    in turn, it gives us the version number to use for the object."""

    try:
        searchserver = xmlrpclib.ServerProxy("http://localhost:53972")
    except:
        ryw.give_bad_news(
            'fatal_error: uploadobject: failed to connect to search server.',
            logging.critical)
        return (False, None, None)

    version = None
    try:
        version = searchserver.addtosearchfile(values)
        values['version'] = version
    except:
        ryw.give_bad_news(
            'fatal_error: uploadobject: failed to addtosearchfile().', 
            logging.critical)
        return (False, searchserver, version)

    logging.debug('talk_to_search_server passed: got version: ' +
                  repr(version))
    return (True, searchserver, version)
Пример #9
0
def add_to_search_file(values, hasVersion, cloneVersion=False):
    """need search file to send meta-data to.
    in turn, it gives us the version number to use for the object."""

    success,searchFile = ryw.open_search_file(
        'add_to_search_file:',
        os.path.join(RepositoryRoot, 'WWW', 'logs'),
        'upload.log',
        os.path.join(RepositoryRoot, 'SearchFile'),
        True)
    if not success:
        return (False, None)

    version = None
    try:
        if hasVersion:
            success = searchFile.add_this_version_to_search_file(values)
            version = values['version']
        else:
            success,version = searchFile.add_to_search_file(
                values, cloneVersion=cloneVersion)

        searchFile.done()
        values['version'] = version
    except:
        ryw.give_bad_news(
            'fatal_error: failed to add_to_search_file().', 
            logging.critical)
        searchFile.done()
        return (False, version)

    logging.debug('add_to_search_file passed: got version: ' +
                  repr(version))
    return (True, version)
Пример #10
0
def show_one_server_object(meta, searchFile):
    """like WebUpload_ryw.show_one_server_object() except that
    the searchFile is passed in."""
    
    print "<BR>"
    print Browse.script_str()
    #displayObject = ryw_view.DisplayObject(RepositoryRoot,
    #                                       calledByVillageSide = False,
    #                                       missingFileFunc = None)

    success,reverseLists = ReverseLists.open_reverse_lists(
        'EditObject:', '', '',
        os.path.join(RepositoryRoot, 'ReverseLists'), True,
        searchFile = searchFile,
        repositoryRoot = RepositoryRoot)
    if not (success and reverseLists):
        ryw.give_bad_news('EditObject: failed to open ReverseLists.',
                          logging.critical)
        if reverseLists:
            reverseLists.done()
        return False

    displayObject = ryw_view.DisplayObject(
        RepositoryRoot, calledByVillageSide = False,
        missingFileFunc = Browse.reqDownloadFunc,
        searchFile = searchFile,
        reverseLists = reverseLists)
    
    displayObject.begin_print()
    displayObject.show_an_object_compact(meta)
    displayObject.end_print()
    reverseLists.done()
Пример #11
0
def delete_all(searchSel):
    completeSuccess = True
    searchFile = None
    
    for objstr in searchSel:
        success,objID,version = ryw.split_objstr(objstr)
        if not success:
            ryw.give_bad_news('DelSearchAll: invalid objstr: ' + objstr,
                              logging.error)
            completeSuccess = False
            continue
        success,searchFile = DeleteObject.do_delete(
            objID, version, searchFile=searchFile)
        if not success:
            ryw.give_bad_news(
                'DelSearchAll: DeleteObject.do_delete failed.' + objstr,
                logging.error)
            completeSuccess = False
        else:
            ryw.db_print('DelSearchAll.delete_all: do_delete succeeded.',
                         18)

    if searchFile:
        searchFile.done()
    return completeSuccess
Пример #12
0
def do_update_metadata(objroot, objID, version, meta, searchFile=None):
    """this is also called by merging incoming data in
    ProcessDiscs.deal_with_stub().  only there, we're going to worry
    about the optional incoming SearchFile argument. there,
    we're trying to re-use the searchFile argument without
    re-opening it over and over again."""

    if not searchFile:
        ryw.db_print('do_update_metadata: null searchFile', 11)
    else:
        ryw.db_print('do_update_metadata: reusing searchFile', 11)
    
    if not ryw_meta.rewrite_meta(objroot, objID, version, meta):
        ryw.give_bad_news('EditObject: rewrite_meta failed.', logging.error)
        return (False, None)

    if not searchFile:
        searchFile = ryw_meta.open_search_file(RepositoryRoot,
                                               grabWriteLock = True)
    if not searchFile:
        ryw.give_bad_news('EditObject: failed to open search file.',
                          logging.critical)
        return (False, None)
        
    searchFile.modify(meta)
    return (True, searchFile)
Пример #13
0
def copy_tree_diff_dir(src, dst, copyFileFunc, copyDirFunc):
    """normal, except for moving _DONE items to the end of copying..."""
    assert(os.path.exists(src))
    assert(os.path.isdir(src))

    logging.debug('copy_tree_diff_dir: ' + src + ' -> ' + dst)

    try:
        make_dst_dir(src, dst)
        #
        # make sure we copy any _DONE items last.
        #
        dirItems = move_done_last(os.listdir(src))
        success = True
        for n in dirItems:
            srcName = os.path.join(src, n)
            dstName = os.path.join(dst, n)
            #logging.debug(' n is: ' + n)
            thisSuccess = copy_tree_diff_common(srcName, dstName,
                                                copyFileFunc,
                                                copyDirFunc)
            success = success and thisSuccess
        return success
    except:
        ryw.give_bad_news('copy_tree_diff_dir: failed to copy dir: ' +
                          src + ' -> ' + dst, logging.critical)
        return False
Пример #14
0
def copy_tree_diff_file_repo(src, dst):
    """used during recursive copying of the object store:
    files that are too big are not copied."""
    
    assert(os.path.exists(src))
    assert(os.path.isfile(src))

    #logging.debug('copy_tree_diff_file_repo: ' + src + ' -> ' + dst)

    try:
        src = os.path.normpath(src)
        dst = os.path.normpath(dst)
        srcBase = os.path.basename(src)

        kB = os.path.getsize(src) / 1024
        if kB > ryw.smallFileSizeCeilingKB:
            #logging.debug(
            #    'copy_tree_diff_file_repo: ' +
            #    'exceeds small file size ceiling: ' +
            #    src + ' ' + repr(kB) + ' KB')
            #ryw.give_news('copy_tree_diff_file_repo: ' +
            #              'exceeds small file size ceiling: ' +
            #              src + ' ' + repr(kB) + ' KB', logging.info)
            return True
        #logging.debug('copy_tree_diff_file_repo: ' +
        #      'does not exceed small file size ceiling: ' +
        #      src + ' ' + repr(kB) + ' KB')

        return copy_tree_diff_file(src, dst)
    except:
        ryw.give_bad_news('copy_tree_diff_file_repo: failed to copy file: ' +
                          src + ' -> ' + dst, logging.critical)
        return False
def deleteRequested(form):
	image = form.getfirst("Img","")
	if not image:
		print "No Image specified to delete"
		sys.exit(1)
	success, resources = get_resources()
	if not success:
		ryw.give_bad_news("Error parsing resource file",logging.error)
		sys.exit(1)

	robotsJobDir = resources['robotsjobdir']
	jobfile = os.path.join(robotsJobDir, image)
	ryw.cleanup_path(jobfile+".JRQ",'deleteOutgoingImage.deleteRequested:')
	ryw.cleanup_path(jobfile+".ERR",'deleteOutgoingImage.deleteRequested:')
	ryw.cleanup_path(jobfile+".DON",'deleteOutgoingImage.deleteRequested:')
	ryw.cleanup_path(jobfile,'deleteOutgoingImage.deleteRequested:')

	tmpout = resources['tmpout']
	image = os.path.join(tmpout,image)
	if not os.path.exists(image):
		ryw.give_bad_news("specified image doesnt exist",logging.info)
		sys.exit(1)
	ryw.cleanup_path(image,"deleteOutgoingImage.deleteRequested:")
	sys.stdout.write("True")
	sys.exit(0)
def get_file_paths2(objID, version, skipLk=False, searchFile=None,
                  allowNullSearchFile = False):
    """10/21/08: rewritten to return chapterListFile as well."""
    
    success,dataPath = get_path(objID, version, skipLock = skipLk,
                                searchFile = searchFile,
                                allowNullSearchFile = allowNullSearchFile)
    if not success:
        return None

    name = get_sel_name(dataPath)
    if not name:
        ryw.give_bad_news(
            'DisplaySelection: failed to get selection file name:<br>'+
            dataPath, logging.error)
        return None

    rfpath = os.path.join(dataPath, name)

    chapterListName = get_chapterlist_name(dataPath)
    chapterListFullName = os.path.join(dataPath,
                                       ChapterList.CHAPTER_LIST_NAME)
    ryw.db_print2('get_file_paths2: full name is: ' +
                  chapterListFullName, 39)
    
    return (rfpath, chapterListName, chapterListFullName)
Пример #17
0
def in_copy(objname, version, dstDataPath, dstAuxiPath, driveroot, mapDict):
    itemName = objname + '#' + version
    
    success,dirName,dataDir,auxiDir = get_map_entry(driveroot, mapDict,
                                                    itemName)
    if not success:
        return False

    ryw.give_news3('  copying ' + dirName + ' ... ', logging.info)

    try:
        su.copytree(dataDir, dstDataPath)
    except:
        ryw.give_bad_news('in_copy: failed to copy data directory: ' +
                          itemName + ': ' + dataDir, logging.error)
        return False

    logging.debug('in_copy: successfully copied data directory: ' +
                  itemName + ': ' + dataDir)
    
    if os.path.exists(auxiDir):
        try:
            su.copytree(auxiDir, dstAuxiPath)
        except:
            ryw.give_bad_news('in_copy: failed to copy auxi directory: ' +
                              itemName + ': ' + auxiDir, logging.error)
            return False
        logging.debug('in_copy: successfully copied auxi directory: ' +
                      itemName + ': ' + auxiDir)

    return True
Пример #18
0
def process_hindi_bracket(form, hindiKey1, hindiKey2,
                          englishCategoryName, meta,
                          oldPair = ['unknown', 'unknown']):
    hindiVal1 = form.getfirst(hindiKey1, '')
    hindiVal2 = form.getfirst(hindiKey2, '')
    if not hindiVal1 or not hindiVal2:
        return
    if hindiVal1 == ryw_hindi.UNTRANSLATED_STRING or \
       hindiVal2 == ryw_hindi.UNTRANSLATED_STRING:
        return
    englishVal1 = ryw_hindi.hindi_to_english(hindiVal1)
    englishVal2 = ryw_hindi.hindi_to_english(hindiVal2)
    if not englishVal1 or not englishVal2:
        ryw.give_bad_news(
            'process_hindi_bracket: warning: no hindi to english mapping: ' +
            ryw_hindi.html(hindiVal1) + ', ' + ryw_hindi.html(hindiVal2),
            logging.warning)
        return
    if englishVal1 == 'unknown' or englishVal2 == 'unknown':
        logging.debug('process_hindi_bracket: value is unknown: ' +
                      englishCategoryName)
        return

    if englishVal1 == oldPair[0] and englishVal2 == oldPair[1]:
        return
    
    meta[englishCategoryName] = [englishVal1, englishVal2]
    logging.debug('process_hindi_bracket: ' + englishCategoryName + ' = ' +
                  repr([englishVal1, englishVal2]))
    logging.debug('process_hindi_bracket: ' + repr(meta))
Пример #19
0
def wait_to_end_and_cleanup(filename, count, robotJobsDir):
	rexp = re.compile(regexp % (os.path.basename(filename),))
	try:	
		ryw.give_news("Waiting for job to end...",None)
		while True:
			section = load_section(filename, robotJobsDir, rexp)
			if section.has_key("TimeCompleted") and \
				section["TimeCompleted"].strip() != "":
					break
			ryw.give_news2(" * ",None)
			time.sleep(10)

		section['TimeCompleted'] = section['TimeCompleted'].strip()
		mesg = "Job finished at %(TimeCompleted)s. %(GoodDiscs)s good discs and %(BadDiscs)s bad discs were produced.\n"
		if "JobErrorNumber" in section and section['JobErrorNumber'] != "16":
			mesg += "Job ended with error. Error code = %(JobErrorNumber)s. Error String = %(JobErrorString)s.\n"
		for i in range(0,10):
			if not "DiscErrorIndex%d" % (i,) in section:
				break
			index = section["DiscErrorIndex%d" % (i,)]
			number = section["DiscErrorNumber%d" % (i,)]
			errstr = section["DiscErrorString%d" % (i,)]
			mesg += "Disc %s had error. Error code = %s. Error Message = %s\n" % (index, number, errstr)

		ryw.give_news("<PRE>" + mesg % section + "</PRE>", None)
		if ("JobErrorNumber" in section and section['JobErrorNumber'] != "16") or \
			section['BadDiscs'] != "0" or "DiscErrorIndex0" in section:
			logging.warning("Erase job ended with errors. Job's status dict: " + str(section))
		else:
			logging.debug("Erase job ended with no errors. Job's status dict: " + str(section)) 
		clean_up(filename)
	except:
		ryw.give_bad_news("Error while waiting for job to finish",logging.warning)
		clean_up(filename)
Пример #20
0
def check_upload_file_validity(form, fieldname):
    result = {}
    if not check_upload_file_request(form, fieldname):
        logging.debug(
            'check_upload_file_validity: no request for: ' + fieldname)
        result['success'] = True
        result['exists']  = False
        result['buf'] = None
        return result
    
    buf = attempt_read_uploaded_file(form, fieldname)
    if not buf:
        ryw.give_bad_news(
            'check_upload_file_validity: check failed for: ' + fieldname,
            logging.info)
        result['success'] = False
        result['exists'] = False
        result['buf'] = None
        return result

    logging.debug('check_upload_file_validity: found valid upload request: '+
                  fieldname)
    result['success'] = True
    result['exists'] = True
    result['buf'] = buf
    return result
def check_partial_completion(tmpdir):
    """returns successFlag, doneList."""
    try:
        ll = os.listdir(tmpdir)
        ctime = {}
        for n in ll:
            ctime[n] = os.path.getctime(os.path.join(tmpdir, n))
    except:
        ryw.give_bad_news(
            "ReadIncomingCDStack.check_partial_completion: failed listdir " + "getctime: " + tmpdir, logging.critical
        )
        return (False, None)

    latestctime = 0
    latestname = ""
    for n in ll:
        if ctime[n] > latestctime:
            latestctime = ctime[n]
            latestname = n

    donelist = []
    if latestctime != 0:
        logging.debug("check_partial_completion: latestname: " + latestname)
        for n in ll:
            if n != latestname:
                donelist.append(n)

    logging.debug("check_partial_completion: donelist: " + repr(donelist))
    return (True, donelist)
Пример #22
0
def unzip_excerpt_files(auxDir):
    if not os.path.exists(auxDir):
        return True

    exDir = os.path.join(auxDir, 'excerpts')
    if not os.path.exists(exDir):
        return True

    exNames = os.listdir(exDir)
    if len(exNames) == 0:
        return True

    for exName in exNames:
        try:
            exPath = os.path.join(exDir, exName)
            pair = os.path.splitext(exName)
            ext = string.capwords(pair[1])
            if ext != '.zip':
                continue
            su.zipfile_extractall(exPath, exDir)
            logging.debug('unzip_excerpt_files: successfully unzipped ' +
                          exPath)
        except:
            ryw.give_bad_news('unzip_excerpt_files: failed to unzip ' + exPath,
                              logging.error)

    return True
Пример #23
0
    def add_to_search_file(self, meta, cloneVersion=False):
        """adds one to the existing version number."""
        logging.debug('add_to_search_file: ' + repr(meta))
        if not meta.has_key('id'):
            ryw.give_bad_news('add_to_search_file: missing ID...',
                              logging.critical)
            return (False, None)
        
        success,latestVersion = \
            self._SearchFile__get_latest_version(meta['id'])
        if not success:
            return (False, None)

        if not cloneVersion and \
            self._SearchFile__is_same_as_latest(meta, latestVersion):
            ryw.give_news('add_to_search_file: same version.',
                          logging.warning)
            return (True, latestVersion)

        latestVersion += 1
        meta['version'] = latestVersion

        if not self._SearchFile__append_to_search_file(meta):
            return (False, latestVersion)

        self._SearchFile__add_to_memory_index(meta['id'], latestVersion, meta)
        logging.debug('add_to_search_file: success.')
        return (True, latestVersion)
Пример #24
0
def check_local_file(form, fieldName = 'repeat_local_filename'):
    if not form.has_key(fieldName):
        return (True, False, None, False)

    localName = form.getfirst(fieldName, '')
    if localName == '':
        return (True, False, None, False)

    success,isFile,isDir = ryw.is_valid_file_or_dir(
        localName, msg='check_local_file')
    if not success:
        return (False, False, None, False)
    localName = os.path.normpath(localName)

    if isFile:
        success,bytes = ryw.get_file_size(localName)
        if not success:
            return (False, False, None, False)
        if bytes == 0:
            ryw.give_bad_news('check_local_file: zero-sized file: '+
                              localName, logging.error)
            return (False, False, None, False)
        return (True, True, localName, False)

    return (True, True, localName, True)
Пример #25
0
def check_required_fields(form):
    if not form.has_key('title') and not form.has_key('hindi_title'):
        ryw.give_bad_news(
            'check_required_fields: needs at least one title field filled.',
            logging.error)
        return False
    return True
Пример #26
0
def process_date_time_str(dateTimeStr):
    ryw.db_print_info_browser('process_date_time_str: input: ' +
                              dateTimeStr, 99)
    dateTimeStr = dateTimeStr.replace(' ', ',')
    dateTimeStr = dateTimeStr.replace('-', ',')
    dateTimeStr = dateTimeStr.replace(':', ',')
    dateTimeStr = dateTimeStr + ',0'
    #
    # ugly hack to fix bug for chopping leading zeros.
    #
    dateTimeStr = dateTimeStr.replace(',,', ',0,')
    dateTimeStr = 'datetime.datetime(' + dateTimeStr + ')'
    ryw.db_print_info_browser('process_date_time_str: after replacement: ' +
                              dateTimeStr, 99)

    try:
        dateTime = eval(dateTimeStr)
        #logging.debug('process_date_time: ' + repr(dateTime))
        ryw.db_print_info_browser('process_date_time_str: eval success: ' +
                                  repr(dateTime), 99)
        return repr(dateTime)
    except:
        ryw.db_print_info_browser('process_date_time_str: '+
                                  'eval failed!!!!!', 99)
        ryw.give_bad_news('ryw_upload.process_date_time_str: eval failed: ' +
                          repr(dateTimeStr), logging.error)
        return None
Пример #27
0
def get_meta_list(reqList, searchFile):
    """broken out of ShowQueue.py, used by ShowQueue, DisplaySelection,
    and ChapterListForm."""

    metaList = []

    for item in reqList:
        try:
            objname, version = item.split('#')
            version = int(version)
        except:
            ryw.give_bad_news('ryw_meta.get_meta_list: ' +
                              'ill-formed line: ' + item,
                              logging.critical)
            continue

        success,d = searchFile.get_meta(objname, version)
        if not success:
            ryw.give_bad_news2(
                'ryw_meta.get_meta_list: get_meta failed ' +
                '(possibly because object has been ' +
                'deleted from objectstore): ' +
                objname + ' ' + str(version), logging.error)
            continue

        metaList.append(d)

    return metaList
Пример #28
0
def get_data_name_mirror(objDataDir, diskRoot, mapDict, itemName):
    if os.path.exists(objDataDir):
        updatadir = objDataDir
        logging.debug('get_data_name_mirror: found under objects: ' +
                      objDataDir)
    else:
        success,dirName,dataDir,auxiDir = ryw_philips.get_map_entry(
            diskRoot, mapDict, itemName)
        if not success:
            return None
        #ryw.give_news('get_data_name_mirror: found data: ' + dirName,
        #              logging.info)
        ryw.give_news3('found data: ' + dirName, logging.info)
        updatadir = dataDir

    try:    
        ll = os.listdir(updatadir)
    except:
        ryw.give_bad_news('get_data_name_mirror: failed to listdir: ' +
                          updatadir, logging.error)
        return None

    if len(ll) == 1 and os.path.isfile(os.path.join(updatadir, ll[0])):
        obdata = os.path.join(updatadir, ll[0])
    else:
        obdata = updatadir

    logging.debug('get_data_name_mirror: got data name: ' + obdata)
    return obdata
Пример #29
0
def make_data_size(meta, dict):
    dict['size_text'] = ''
    dict['data_size_options'] = """
<OPTION SELECTED>MB
<OPTION>KB
<OPTION>B
"""    
    
    if not meta.has_key('bytes'):
        ryw.give_bad_news('make_data_size: no bytes field found.',
                          logging.critical)
        return

    bytes = meta['bytes']
    if bytes < 1024:
        displayNum = bytes
        unit = 'B'
    elif bytes < 1024 * 1024:
        displayNum = bytes / 1024
        unit = 'KB'
    else:
        displayNum = bytes/ 1024 / 1024
        unit = 'MB'

    optStr,hindiStr = make_options_strings(ryw_upload.POSSIBLE_SIZE_UNIT,
                                           unit, noHindi=True)

    dict['size_text'] = str(displayNum)
    dict['data_size_options'] = optStr
Пример #30
0
def copy_an_outgoing_object(srcd, dstd, prefix, bigdata = False):
    try:
        sData,sMeta,sAuxi,sDone,sMdon = ryw.get_store_paths(srcd, prefix)
        dData,dMeta,dAuxi,dDone,dMdon = ryw.get_store_paths(dstd, prefix)

        #
        # first deal with data.
        #
        if not bigdata:
            shutil.copytree(sData, dData)
            #logging.debug('copy_an_outgoing_object: copied data: ' +
            #              sData + ' -> ' + dData)
        else:
            #logging.debug('copy_an_outgoing_object: skipping data: ' +
            #              sData)
            #ryw.give_news('copy_an_outgoing_object: skipping data: ' +
            #              sData, logging.info)
	    pass
                
        #
        # now copy metadata.
        #
        shutil.copyfile(sMeta, dMeta)
        #logging.debug('copy_an_outgoing_object: copied metadata: ' +
        #              sMeta + ' -> ' + dMeta)

        #
        # copy the _AUXI directory, skipping big files in them.
        #
        if sAuxi and os.path.exists(sAuxi):
            success = copy_tree_diff_common(sAuxi, dAuxi,
                                            copy_tree_diff_file_repo,
                                            copy_tree_diff_dir_simple)
            if not success:
                raise 'copy_tree_diff_common failed.'
            #logging.debug('copy_an_outgoing_object: copied AUXI files: ' +
            #              sAuxi + ' -> ' + dAuxi)

        #
        # place a done flag.
        #
        if not bigdata:
            shutil.copyfile(sDone, dDone)
            #logging.debug('copy_an_outgoing_object: placed DONE flag: ' +
            #              sDone + ' -> ' + dDone)
        else:
            shutil.copyfile(sDone, dMdon)
            #logging.debug('copy_an_outgoing_object: placed MDON flag: ' +
            #              sDone + ' -> ' + dMdon)
            #ryw.give_news('copy_an_outgoing_object: placed MDON flag: ' +
            #              sDone + ' -> ' + dMdon, logging.info)
        success = True
    except:
        ryw.give_bad_news('copy_an_outgoing_object: failed: ' +
                          srcd + ' ' + dstd + ' ' + prefix + ' ' +
                          repr(bigdata), logging.critical)
        success = False

    ryw.cleanup_partial_repo_dir(dstd, [prefix])
    return success