def getAddress(name):
    try:
        eps = su.pickload(os.path.join(RepositoryRoot, 'Endpoints.pk'))
        ep = eps[name]
    except:
        ryw.give_bad_news('getAddress: failed, name: ' + name,
                          logging.warning)
        return None
    return ep
Ejemplo n.º 2
0
def get_map(tmpImgDir):
    path = os.path.join(tmpImgDir, 'data_map')
    try:
        mapDict = su.pickload(path)
    except:
        return None

    logging.debug('get_map: done reading map file: ' + path)
    return mapDict
Ejemplo n.º 3
0
def get_metadata_mirror(metaName):
    try:
        meta = su.pickload(metaName)
    except:
        ryw.give_bad_news('get_metadata_mirror: failed to load metadata: ' + metaName,
                          logging.error)
        return None
        
    logging.debug('get_metadata_mirror: got metadata ' + metaName + ' ' + repr(meta))
    return meta
def make_DVDobject_list(item, itempath, listSoFar):

    try:
        meta = su.pickload(itempath[item][1])
    except:
        ryw.give_bad_news('make_DVDobject_list: failed to load metadata: '+
                          itempath[item][1], logging.critical)
        return

    listSoFar.append((meta, item, itempath[item]))
Ejemplo n.º 5
0
def get_metadata(uploaddir, obpref):
    metaName = os.path.join(uploaddir, obpref + '_META')
    try:
        meta = su.pickload(metaName)
    except:
        ryw.give_bad_news('get_metadata: failed to load metadata: ' + metaName,
                          logging.error)
        return None
        
    logging.debug('get_metadata: got metadata ' + metaName + ' ' + repr(meta))
    return meta
Ejemplo n.º 6
0
def process_download_requests(mydir, username):
    dlrq = os.path.join(mydir, 'downloadrequestqueue')
    if not os.path.exists(dlrq):
        logging.debug('process_download_requests: no incoming queue found.')
        return True

    try:
        newrq = su.pickload(dlrq)
    except:
        ryw.give_bad_news(
            'process_download_requests: failed to load new queue: ' + dlrq,
            logging.error)
        return False

    logging.debug('process_download_requests: found new queue: ' +
                  repr(newrq))

    oldrqfile = os.path.join(RepositoryRoot, 'QUEUES', username)

    oldrq = set([])
    if os.path.exists(oldrqfile):
        try:
            oldrq = su.pickload(oldrqfile)
        except:
            ryw.give_bad_news(
                'process_download_requests: failed to load old queue: ' +
                oldrqfile, logging.error)
            oldrq = set([])
    
    newrq = newrq.union(oldrq)
    logging.debug('process_download_requests: new queue: ' + repr(newrq))

    try:
        su.pickdump(newrq, oldrqfile)
    except:
        ryw.give_bad_news(
            'process_download_requests: failed to write new queue back: ' +
            oldrqfile, logging.error)
        return False
    return True
Ejemplo n.º 7
0
def read_list(rfpath):
    if os.path.exists(rfpath):
        try:
            reqs = su.pickload(rfpath)
        except:
            ryw.give_bad_news('ShowQueue: failed to load queue: ' + rfpath,
                              logging.critical)
            return None
    else:
        reqs = set('')

    l = list(reqs)
    l.sort()
    return l
Ejemplo n.º 8
0
    def read_file(self, fullFileName):
        """called by read_list_and_merge() below."""

        if not os.path.exists(fullFileName):
            self.chapterList = None
            return True

        try:
            self.chapterList = su.pickload(fullFileName)
        except:
            ryw.give_bad_news('ChapterList.read_file: pickload failed: ' +
                              fullFileName, logging.error)
            return False

        ryw.db_print2('ChapterList.read_file succes: ' + fullFileName, 41)
        ryw.db_print2('ChapterList.read_file: ' + repr(self.chapterList), 41)
        return True
Ejemplo n.º 9
0
def get_pathsFunc(name):
    username = name
    logging.debug('WriteCDs: username: '******'QUEUES', username)

    if not os.path.exists(rfpath):
        return (True, username, rfpath, set([]))

    try:
        reqs = su.pickload(rfpath)
    except:
        ryw.give_bad_news('get_paths: failed to load reqs: ' +
                          rfpath, logging.critical)
        return (False, None, None, None)

    logging.debug('get_paths: found queue, reqs: '+repr(reqs))
    return (True, username, rfpath, reqs)
def get_reqs(rfpath):
    if not os.path.exists(rfpath):
        #logging.debug('get_reqs: no existing request found.')
        return set('')

    if not ryw.is_valid_file(rfpath, msg='get_reqs'):
        ryw.give_bad_news('get_reqs: not a valid file: ' + rfpath,
                          logging.error)
        return set('')

    try:
        reqs = su.pickload(rfpath)
        logging.debug('get_reqs: get_reqs succeeded.')
        return reqs
    except:
        ryw.give_bad_news(
            'fatal_error: get_reqs: failed to load requests :' + rfpath, 
            logging.critical)
        return None
Ejemplo n.º 11
0
def process_args(optlist, args):
    pk = False
    for i in optlist:
        option, value = i
        if option == '-p':
            pk = True

    meta = args[0]
    data = args[1]
    auxDir = args[2]

    try:
        if pk:
            values = su.pickload(meta)
        else:
            values = su.parseKeyValueFile(meta)
    except:
        ryw.give_bad_news('fatal_error: failed to get metadata: meta, data: '+
                          meta + ' ' + data, logging.critical)
        return (False, None, None, None)

    return (True, data, values, auxDir)
Ejemplo n.º 12
0
    def __read_reverse_lists_file(self):
        """lock held if instantiate for write.
        lock released if instantiate for read.
        skipLk is True when dealing with merging
        incoming ReverseLists file"""
        
        assert(self.reverseListsFile != None)

        if not ryw.is_valid_file(self.reverseListsFile,
                                 msg='__read_reverse_lists_file'):
            ryw.give_bad_news('__read_reverse_lists_file: '+
                              'not a valid file: ' + self.reverseListsFile,
                              logging.error)
            return False

        if not self.skipLock:
            if not self._ReverseLists__lock():
                self._ReverseLists__unlock()
                return False

        try:
            self.reverseDict = su.pickload(self.reverseListsFile)
        except:
            ryw.give_bad_news(
                '__read_reverse_lists_file: ' +
                'failed to open reverse lists file: ' +
                self.reverseListsFile, logging.critical)
            self._ReverseLists__unlock()
            return False

        if not self.skipLock:
            if not self.exclusive:
                self._ReverseLists__unlock()

        #logging.debug('ReverseLists.__read_reverse_lists_file: ' +
        #              repr(self.reverseDict))
        return True
Ejemplo n.º 13
0
def get_meta(objroot, objname, version):
    """uses the file system to get meta data instead of getting it from
    either the SearchFile or SearchServer.  should be a bit faster if
    there is no need to read all the metadata.  just a performance issue:
    should not be a robustness issue now that we have gotten rid of the
    SearchServers."""

    logging.debug("ryw.get_meta: " + objroot + " " + objname + " " + str(version))

    try:
        # paths = objectstore.nameversiontopaths(objroot, objname, version)
        paths = objectstore.name_version_to_paths_aux(objroot, objname, version)

        if not good_repo_paths(paths):
            logging.warning("ryw.get_meta: good_repo_paths failed.")
            return (False, None)

        metapath = paths[1]
        meta = su.pickload(metapath)
        logging.debug("ryw.get_meta: success.")
        return (True, meta)
    except:
        logging.warning("ryw.get_meta: failed.")
        return (False, None)
 def read(self, file_name):
     requests = su.pickload(file_name)
     for request in requests:
         self.add(request)
     self.file_name = file_name
Ejemplo n.º 15
0
##        command.replace('\\', '\\\\')
##        os.system(command)

##        print '<P>spawning'
        os.spawnl(os.P_WAIT, htpasswdpath, 'htpasswd.exe', '-b', pwdfile, username, passwd)
        print '<P><FONT COLOR=green>Password updated successfully.</FONT>'
    except:
        print '<P>Password could not be updated correctly.'
        print '<BR>Look at files "%s" and "%s".' % (pwdfile, pwdfile + '.OLD')
        sys.exit(1)

# read/write Endpoints file
endpointfile = os.path.join(RepositoryRoot, 'Endpoints.pk')
epupdated = False
try:
    ep = su.pickload(endpointfile)
except:
    ep = {}
    epupdated = True

if address:
    ep[username] = address.replace('\r', '')
    epupdated = True
    print '<P><FONT COLOR=red>Updating address.</FONT>'
else:
    if ep.has_key(username):
        address = ep[username]

if epupdated:
    try:
##        print '<P>Before dumping'