Esempio n. 1
0
def get_paths(objroot, objID, version, meta, repositoryRoot):

    paths = get_paths1(objroot, objID, version)
    
    if not meta:
        paths.append(None)
        return paths
    if not meta.has_key('path'):
        ryw.give_bad_news('DeleteObject.get_paths: missing path attribute: '+
                          repr(meta), logging.error)
        paths.append(None)
        return paths

    path = meta['path']
    try:
        resources = su.parseKeyValueFile(
            os.path.join(repositoryRoot, 'Resources.txt'))
        viewroot = resources['viewroot']
    except:
        ryw.give_bad_news('DeleteObject.get_paths: failed to get view root.',
                          logging.critical)
        paths.append(None)
        return paths
    
    viewpath = os.path.join(viewroot, path)
    paths.append(viewpath)
    logging.debug('DeleteObject.get_paths: ' + repr(paths))
    return paths    
Esempio n. 2
0
def main():
    ryw.check_logging(os.path.join(RepositoryRoot, "WWW", "logs"), "upload.log")
    logging.debug("SelectAll: entered...")

    ryw_view.print_header_logo()

    name = os.getenv("REMOTE_USER")

    if name == "" or name == None:
        ryw.give_bad_news("SelectAll: no user name given", logging.error)
        ryw_upload.quick_exit(1)

    queue = os.path.join(RepositoryRoot, "QUEUES", name)

    try:
        resources = su.parseKeyValueFile(os.path.join(RepositoryRoot, "Resources.txt"))
        searchFileName = resources["searchfile"]
    except:
        ryw.give_bad_news("SelectAll: failed to get search file name from resources.", logging.critical)
        ryw_upload.quick_exit(1)

    success, searchFile = ryw.open_search_file(
        "SelectAll:", os.path.join(RepositoryRoot, "WWW", "logs"), "upload.log", searchFileName, False
    )
    if not success:
        ryw.give_bad_news("SelectAll: failed to open search file.", logging.critical)
        ryw_upload.quick_exit(1)

    if not ProcessDownloadReq.add_all(queue, searchFile):
        ryw.give_bad_news("selectall: addAll failed.", logging.critical)
        ryw_upload.quick_exit(1)

    searchFile.done()
    ryw_upload.quick_exit(0)
def get_resources(tmpDirOption = ''):
    logging.debug('get_resources: entered...')
    try:
        resources = su.parseKeyValueFile(os.path.join(RepositoryRoot,
                                                      'Resources.txt'))
        robotJobsDir = resources['robotsjobdir']

        if tmpDirOption:
            tmpOutDir = tmpDirOption
        else:
            tmpOutDir = resources['tmpout']
        ryw.give_news2('<BR>outgoing data placed in: ' + tmpOutDir,
                       logging.info)
        
        searchFile = resources['searchfile']
        viewRoot = resources['viewroot']
        objectstoreroots = resources['objectstore'].split(';')
        firstRoot = objectstoreroots[0]
        robotPresent = ryw.has_robot(resources)
    except:
        ryw.give_bad_news('get_resources failed.', logging.critical)
        return (False, None, None, None, None, None, None, None)
    
    logging.debug('get_resources succeeded.')
    logging.debug('get_resources: robotJobsDir, tmpOutDir, searchFile, viewRoot' + robotJobsDir + tmpOutDir + searchFile + viewRoot + firstRoot)
    return (True, resources, robotJobsDir, tmpOutDir, searchFile, viewRoot,
            firstRoot, robotPresent)
Esempio n. 4
0
def get_resources(path):
    try:
        resources = su.parseKeyValueFile(path)
        return resources
    except:
        give_bad_news("fatal_error: failed to read resources file", logging.critical)
        return None
Esempio n. 5
0
def get_resources():
	logging.debug('get_resources: entered...')
	try:
		resources = su.parseKeyValueFile(os.path.join(RepositoryRoot,
                                                      'Resources.txt'))
		robotJobsDir = resources['robotsjobdir']
	except:
		ryw.give_bad_news("get_resources failed.",logging.critical)
		return ""
	return robotJobsDir
def get_resources():
    logging.debug('get_resources: entered...')
    try:
        resources = su.parseKeyValueFile(os.path.join(RepositoryRoot,
                                                      'Resources.txt'))
    except:
        ryw.give_bad_news('get_resources failed.', logging.critical)
        return (False, None)
    
    logging.debug('get_resources succeeded.')
    return (True, resources)
def get_init_vals():
    numDiscs = 1000
    try:
        resources = su.parseKeyValueFile(os.path.join(RepositoryRoot, "Resources.txt"))
        robotJobsDir = resources["robotsjobdir"]
        tmpIn = resources["tmpin"]
    except:
        ryw.give_bad_news(
            "get_init_vals: failed to get resources: " + os.path.join(RepositoryRoot, "Resources.txt"), logging.critical
        )
        return (False, None, None, None, None)
    return (True, numDiscs, resources, robotJobsDir, tmpIn)
def get_local_object_store_root():
    try:
        resources = su.parseKeyValueFile(
            os.path.join(RepositoryRoot, 'Resources.txt'))
        objectstoreroots = resources['objectstore'].split(';')
        objectstoreroot = objectstoreroots[0]
        return objectstoreroot
    except:
        ryw.give_bad_news(
            'fatal_error: get_local_object_store_root: failed to access Resources.txt.',
            logging.critical)
        return None
def attempt_make_tmpdir():
    """attempt to make a temporary directory.
    returns the directory name made."""

    try:
        resources = su.parseKeyValueFile(os.path.join(RepositoryRoot, "Resources.txt"))
        resTmpin = resources["tmpin"]
    except:
        give_bad_news("fatal_error: failed to read Resources.txt", logging.critical)
        return None

    tmpdir = ryw_upload.attempt_make_tmpdir(resTmpin)
    ryw.db_print_info_browser("attempt_make_tmpdir: " + repr(tmpdir), 27)
    return tmpdir
Esempio n. 10
0
def check_attributes(values):
    """check, fix up attributes."""

    if not ryw_uploadObj.check_some_attributes(values):
        return (False, None)
    
    # generate unique upload timestamp
    now = datetime.datetime.now()
    date = now.date()
    time = now.time()
    # like Sep-07-1975---11-05-04-pm---467
    stamp = date.strftime('%b-%d-%Y') + '---' + time.strftime('%I-%M-%S-%p')
    random.seed()
    randomNo = random.randint(0,999)
    stamp += '---%03u' % randomNo

    if not values.has_key('repositoryuploadtime'):
        values['repositoryuploadtime'] = stamp

    # sanitize path
    if values.has_key('path'):
        separator = os.sep
        path = values['path']
        path = os.path.normpath(path)
        if path.startswith(separator):
            path = path[1:]
        if not path.startswith(values['creator']):
            path = os.path.join(values['creator'], path)
        if path.endswith(separator):
            path += 'anobject'
        values['path'] = path
        logging.debug('uploadobject: sanitized path: '+path)

    # need resources for objectstoreroot and viewroot
    try:
        resources = su.parseKeyValueFile(
            os.path.join(RepositoryRoot, 'Resources.txt'))
        objectstoreroots = resources['objectstore'].split(';')
        objectstoreroot = objectstoreroots[0]
    except:
        ryw.give_bad_news(
            'fatal_error: uploadobject: failed to access Resources.txt.',
            logging.critical)
        return (False, None)
    values['objectstore'] = objectstoreroot

    return (True, resources)
Esempio n. 11
0
def obj_store_size_inKB_not_used(tmpdir=""):
    if tmpdir:
        ryw.give_news2('temp objectstore copied to: ' + tmpdir + '<BR>',
                       logging.info)
    ryw.give_news2('computing outgoing objectstore size...',
                   logging.info)
    
    try:
        resources = su.parseKeyValueFile(os.path.join(RepositoryRoot,
                                                      'Resources.txt'))
        objectstoreroots = resources['objectstore'].split(';')
        firstRoot = objectstoreroots[0]
        if tmpdir:
                tmpOutDir = tmpdir
        else:
                tmpOutDir = resources['tmpout']
    except:
        ryw.give_bad_news('obj_store_size_inKB: get_resources failed.',
                          logging.critical)
        return (False, None, None, None)

    tmpStoreDir,objPrefix = ryw_upload.attempt_just_make_tmpdir(
        tmpOutDir, 'outgoing_obj_store_', '')
    if not tmpStoreDir:
        ryw.give_bad_news('obj_store_size_inKB: failed to make tmpdir: ' +
                          tmpOutDir, logging.critical)
        return (False, None, None, None)

    tmpStoreName = os.path.join(tmpStoreDir, 'outgoing_store')

    try:
        success = ryw_copytree.copy_tree_diff_repo(firstRoot, tmpStoreName)
        if not success:
            raise 'copy_tree_diff_repo failed.'
    except:
        ryw.give_bad_news('obj_store_size_inKB: copy_tree_diff_repo failed: '+
                          firstRoot + ' -> ' + tmpStoreName, logging.critical)
        return (False, None, None, None)

    kB = ryw_disc.getRecursiveSizeInKB(tmpStoreName)
    logging.debug('obj_store_size_inKB: ' + tmpStoreName + ' = ' + str(kB))

    ryw.give_news2 (str(kB) + ' KB<BR>', logging.info)
    return (True, kB, tmpStoreDir, tmpStoreName)
Esempio n. 12
0
def main():
    ryw.check_logging(os.path.join(RepositoryRoot, "WWW", "logs"), "upload.log")
    logging.info("ClearTmp: entered...")

    ryw_view.print_header_logo()

    try:
        resources = su.parseKeyValueFile(os.path.join(RepositoryRoot, "Resources.txt"))
        resTmpin = resources["tmpin"]
        resTmpOut = resources["tmpout"]
        robotDir = resources["robotsjobdir"]
    except:
        ryw.give_bad_news("failed to parse resource file.", logging.critical)
        sys.exit(1)

    ryw.empty_tmp_dir(resTmpin, chmodFirst=True)
    ryw.empty_tmp_dir(resTmpOut)
    ryw.empty_tmp_dir(robotDir, skipList=["Status", "Log"])

    ryw_view.print_footer()
    sys.exit(0)
def process_args(optlist, args):
    pk = False
    for i in optlist:
        option, value = i
        if option == '-p':
            pk = True

    meta = args[0]
    data = args[1]
    auxDir = args[2]

    try:
        if pk:
            values = su.pickload(meta)
        else:
            values = su.parseKeyValueFile(meta)
    except:
        ryw.give_bad_news('fatal_error: failed to get metadata: meta, data: '+
                          meta + ' ' + data, logging.critical)
        return (False, None, None, None)

    return (True, data, values, auxDir)
    try:
        pwdfile = os.path.join(RepositoryRoot, 'Passwords')
##        lines = []
##        for line in open(pwdfile).readlines():
##            name, rest = line.split(':', 1)
##            if name != username:
##                lines.append(line)
##
##        os.rename(pwdfile, pwdfile + '.OLD')
##        f = open(pwdfile, 'w')
##        for line in lines:
##            f.write(line)
##        f.close()

        try:
            resources = su.parseKeyValueFile(os.path.join(RepositoryRoot, 'Resources.txt'))
        except:
            print '<P>Could not read resources file correctly'
            sys.exit(1)

        apachepath = resources['apachepath']
        htpasswdpath = os.path.join(apachepath, 'bin', 'htpasswd.exe')

##        command = '"%s" -b "%s" "%s" "%s"' % (apachepath, pwdfile, username, passwd)
##        print 'Executing command: %s' % command
##    # replace \ by \\
##        command.replace('\\', '\\\\')
##        os.system(command)

##        print '<P>spawning'
        os.spawnl(os.P_WAIT, htpasswdpath, 'htpasswd.exe', '-b', pwdfile, username, passwd)