Esempio n. 1
0
def exe_extract_windows(argv):
    topic_path, judge_path, text_db_path, windows_db_path = argv;
    text_db = bsddb.hashopen(text_db_path);
    window_db = bsddb.hashopen(windows_db_path, 'w');
    judge_file = QRelFile(judge_path);
    topics = StandardFormat().read(topic_path);
    topic_chain = TextChain([TextTokenizer(word_tokenize), TextStopRemover('data/stoplist.dft'), TextStemmer(EnglishStemmer()), TextTokenNormalizer()]); 
    sentence_chain = TextChain([TextTokenizer(word_tokenize), TextStemmer(EnglishStemmer()), TextTokenNormalizer()]);
    for topic_id, topic_str in topics.items():
        print topic_id;
        sys.stdout.flush();
        topic = TextPiece(topic_str);
        topic_chain.work(topic);
        if not judge_file.has_key(topic_id):
            continue;
        docnos = judge_file[topic_id].keys();
        for docno in docnos:
            if not is_cluewebB(docno):
                continue;
            doc_text = text_db[docno];
            window_candidates = match_window(topic, doc_text, sentence_chain);
            sentences = map(lambda text_piece: text_piece.text, window_candidates);
            text = '\n'.join(sentences);
            window_db[docno] = text.encode('utf8');
    window_db.close();
Esempio n. 2
0
def exe_build_train(argv):
#1. create the workers;
    judge_path, topic_path, word_stat_path, doc_path, window_path, out_path = argv;
    global judge_file, topics, doc_db, window_db, word_stat, ranker;
    judge_file = QRelFile(judge_path);
    topics = StandardFormat().read(topic_path);
    doc_db = bsddb.hashopen(doc_path);
    window_db = bsddb.hashopen(window_path);
    word_stat = load_word_stat(word_stat_path);
#    aggregators = map(lambda k: Aggregator(k), K_options);
#    ranker = DistanceWindowRanker(CosTextScorer(), DocumentModelFactory(word_stat),aggregators);
    ranker = RetrievalWindowRanker(CosTextScorer(), DocumentModelFactory(word_stat));

#2. build the training data;
#    p = Pool(4);
    topic_ids = judge_file.keys();
#    docs_groups = p.map(build_train, topic_ids);
    docs_groups = map(build_train, topic_ids);
    assert len(docs_groups) == len(topic_ids);

#3. write out the training data
    writer = open(out_path, 'w');
    for i in xrange(len(topic_ids)):
        topic_id = topic_ids[i];
        docs = docs_groups[i];
        for doc in docs:
            docno = doc.docno;
            judge = judge_file[topic_id][docno];
            for scores, sentence_id in doc.score_windows:
                score_str = ','.join(map(str, scores));
                writer.write('%s %s %s %d %s\n' % (topic_id, docno, judge, sentence_id, score_str));    
    writer.close();
Esempio n. 3
0
def index(request):
    address=[]

    
    database1=bsddb.hashopen('D:\Python2.7\hw\database\main.db','c')
    database2=bsddb.hashopen('D:\Python2.7\hw\database\website.db','c')
    words=[]
    
    if request.POST.has_key('q'):
        key=unicode(request.POST['q'])
        words=key.split()
    else:
        words=[]
    
    for i in range(0,305):
        tmpaddr={}
        f=open('D:\Python2.7\hw\html\\'+str(i)+'.html','r')
        html_content=f.read()
        num=len(words)
        for j in words:
            flag=re.search(j,html_content,re.I)
            if flag:
               num-=1
        if num==0:
            tmpaddr['index']=database1[str(i)]
            tmpaddr['url']=database2[str(i)]
            address.append(tmpaddr
                           )
    database1.close()
    database2.close()
    
    return render_to_response('searchResult.html', {'address': address})
Esempio n. 4
0
def exe_view_train(argv):
    train_path, window_path, doc_path = argv;
    from Learner import TrainFile;
    train_file = TrainFile();
    train_file.load(train_path);
    window_db = bsddb.hashopen(window_path);
    doc_db = bsddb.hashopen(doc_path);

    num = 1000;
    key = train_file.keys()[num];
    qid, docno, rel, sid = key.split();
    doc_text = doc_db[docno];
    print qid, docno;
    print doc_text;
    print '=' * 50;
    windows = window_db[docno].split('\n'); 
    window_scores = [];
    for key in train_file.keys()[num:]:
        qid, curr_docno, rel, sid = key.split();
        if curr_docno <> docno:
            break;
        window_text = windows[int(sid)]; 
        value = train_file[key];
        window_scores.append((value, window_text));
    window_scores.sort();
    for score, window_text in window_scores:
        print score, window_text;
Esempio n. 5
0
def main():
  mobwrite_core.CFG.initConfig(ROOT_DIR + "lib/mobwrite_config.txt")
  if STORAGE_MODE == BDB:
    import bsddb
    global texts_db, lasttime_db
    texts_db = bsddb.hashopen(DATA_DIR + "/texts.db")
    lasttime_db = bsddb.hashopen(DATA_DIR + "/lasttime.db")
  
  if STORAGE_MODE == REDIS:
    import redis
    global redis_db

    if os.environ.get("DOTCLOUD_PROJECT") == "rockonline":
      redis_db = redis.StrictRedis(host='rockonline-N5USRDTV.dotcloud.com', port=22492, password='******')
      mobwrite_core.LOG.info("Connected to PRODUCTION Redis")
    else:
      redis_db = redis.StrictRedis(host='localhost', port=6379)
      mobwrite_core.LOG.info("Connected to DEVELOPMENT Redis")

  # Start up a thread that does timeouts and cleanup
  thread.start_new_thread(cleanup_thread, ())

  port = int(os.environ.get("PORT_MOBWRITE", 3017))
  mobwrite_core.LOG.info("Listening on port %d..." % port)
  s = SocketServer.ThreadingTCPServer(("", port), DaemonMobWrite)
  try:
    s.serve_forever()
  except KeyboardInterrupt:
    mobwrite_core.LOG.info("Shutting down.")
    s.socket.close()
    if STORAGE_MODE == BDB:
      texts_db.close()
      lasttime_db.close()
    if STORAGE_MODE == REDIS:
      redis_db.connection_pool.disconnect()
Esempio n. 6
0
def main():
    print "Pickle is available."
    db = dumbdbm.open("dumbdb", "c")
    db["1"] = "1"
    db.close()
    dbstr = whichdb.whichdb("dumbdb")
    if dbstr:
        print "Dumbdbm is available."
    else:
        print "Dumbdbm is not available."

    db = dbhash.open("dbhash", "c")
    db["1"] = "1"
    db.close()
    dbstr = whichdb.whichdb("dbhash")
    if dbstr == "dbhash":
        print "Dbhash is available."
    else:
        print "Dbhash is not available."

    if bsddb is None:
        dbstr = ""
    else:
        db = bsddb.hashopen("bsddb3", "c")
        db["1"] = "1"
        db.close()
        dbstr = whichdb.whichdb("bsddb3")
    if dbstr == "dbhash":
        print "Bsddb[3] is available."
    else:
        print "Bsddb[3] is not available."

    print

    hammie = get_pathname_option("Storage", "persistent_storage_file")
    use_dbm = options["Storage", "persistent_use_database"]
    if not use_dbm:
        print "Your storage %s is a: pickle" % (hammie,)
        return

    if not os.path.exists(hammie):
        print "Your storage file does not exist yet."
        return
    db_type = whichdb.whichdb(hammie)
    if db_type == "dbhash":
        # could be dbhash or bsddb3
        # only bsddb3 has a __version__ attribute - old bsddb module does not
        if hasattr(bsddb, '__version__'):
            try:
                db = bsddb.hashopen(hammie, "r")
            except bsddb.error:
                pass
            else:
                db.close()
                print "Your storage", hammie, "is a: bsddb[3]"
                return
    elif db_type is None:
        print "Your storage %s is unreadable." % (hammie,)
    print "Your storage %s is a: %s" % (hammie, db_type)
Esempio n. 7
0
def main():
    print "Pickle is available."
    db = dumbdbm.open("dumbdb", "c")
    db["1"] = "1"
    db.close()
    dbstr = whichdb.whichdb("dumbdb")
    if dbstr:
        print "Dumbdbm is available."
    else:
        print "Dumbdbm is not available."

    db = dbhash.open("dbhash", "c")
    db["1"] = "1"
    db.close()
    dbstr = whichdb.whichdb("dbhash")
    if dbstr == "dbhash":
        print "Dbhash is available."
    else:
        print "Dbhash is not available."

    if bsddb is None:
        dbstr = ""
    else:
        db = bsddb.hashopen("bsddb3", "c")
        db["1"] = "1"
        db.close()
        dbstr = whichdb.whichdb("bsddb3")
    if dbstr == "dbhash":
        print "Bsddb[3] is available."
    else:
        print "Bsddb[3] is not available."

    print

    hammie = get_pathname_option("Storage", "persistent_storage_file")
    use_dbm = options["Storage", "persistent_use_database"]
    if not use_dbm:
        print "Your storage %s is a: pickle" % (hammie,)
        return

    if not os.path.exists(hammie):
        print "Your storage file does not exist yet."
        return
    db_type = whichdb.whichdb(hammie)
    if db_type == "dbhash":
        # could be dbhash or bsddb3
        # only bsddb3 has a __version__ attribute - old bsddb module does not
        if hasattr(bsddb, '__version__'):
            try:
                db = bsddb.hashopen(hammie, "r")
            except bsddb.error:
                pass
            else:
                db.close()
                print "Your storage", hammie, "is a: bsddb[3]"
                return
    elif db_type is None:
        print "Your storage %s is unreadable." % (hammie,)
    print "Your storage %s is a: %s" % (hammie, db_type)
Esempio n. 8
0
 def mkDB (self):
   path= self.path
   self.debug (1, "opening %s-data.bsddb" % path)
   self.data= Monitor (hashopen ("%s-data.bsddb" % path, 'c'), 'data')
   self.debug (1, "opening %s-metadata.bsddb" % path)
   self.meta= Monitor (hashopen ("%s-metadata.bsddb" % path, 'c'), 'meta')
   # per inode locks. any action over an inode must acquire
   self.locks= Monitor ({}, 'locks')
Esempio n. 9
0
 def mkDB(self):
     path = self.path
     self.debug(1, "opening %s-data.bsddb" % path)
     self.data = Monitor(hashopen("%s-data.bsddb" % path, 'c'), 'data')
     self.debug(1, "opening %s-metadata.bsddb" % path)
     self.meta = Monitor(hashopen("%s-metadata.bsddb" % path, 'c'), 'meta')
     # per inode locks. any action over an inode must acquire
     self.locks = Monitor({}, 'locks')
Esempio n. 10
0
def main():
    print("Pickle is available.")
    db = dbm.dumb.open("dumbdb", "c")
    db["1"] = "1"
    db.close()
    dbstr = dbm.whichdb("dumbdb")
    if dbstr:
        print("Dumbdbm is available.")
    else:
        print("Dumbdbm is not available.")
    db = dbm.bsd.open("dbhash", "c")
    db["1"] = "1"
    db.close()
    dbstr = dbm.whichdb("dbhash")
    if dbstr == "dbhash":
        print("Dbhash is available.")
    else:
        print("Dbhash is not available.")
    if bsddb is None:
        dbstr = ""
    else:
        db = bsddb.hashopen("bsddb3", "c")
        db["1"] = "1"
        db.close()
        dbstr = dbm.whichdb("bsddb3")
    if dbstr == "dbhash":
        print("Bsddb[3] is available.")
    else:
        print("Bsddb[3] is not available.")
    print()
    hammie = get_pathname_option("Storage", "persistent_storage_file")
    use_dbm = options["Storage", "persistent_use_database"]
    if not use_dbm:
        print("Your storage %s is a: pickle" % (hammie,))
        return
    if not os.path.exists(hammie):
        print("Your storage file does not exist yet.")
        return
    db_type = dbm.whichdb(hammie)
    if db_type == "dbhash":
        if hasattr(bsddb, '__version__'):
            try:
                db = bsddb.hashopen(hammie, "r")
            except bsddb.error:
                pass
            else:
                db.close()
                print("Your storage", hammie, "is a: bsddb[3]")
                return
    elif db_type is None:
        print("Your storage %s is unreadable." % (hammie,))
    print("Your storage %s is a: %s" % (hammie, db_type))
Esempio n. 11
0
def Intstid2Intstname(l_intstid):
    import bsddb
    l_intstid = str(l_intstid)
    nidb = bsddb.hashopen('name2id.db')
    indb = bsddb.hashopen('id2name.db')
    if l_intstid in indb:
        return indb[l_intstid]
    p = nidb.first()
    while 1:
        if p[1] == l_intstid:
            indb[l_intstid] = p[0]
            return p[0]
        try: p = nidb.next()
        except Exception, e: return 'unknown'
Esempio n. 12
0
def _is_bsd_hashdb(dbpath):
    """
    FIXME: Is this enough to check if given file ``dbpath`` is RPM DB file ?
    """
    return True

    try:
        if bsddb is None:
            return True  # bsddb is not avialable in python3.

        bsddb.hashopen(dbpath, 'r')
    except:
        return False

    return True
Esempio n. 13
0
    def poedit_tm_import(self):
        """Attempt to import the Translation Memory used in KBabel."""
        if bsddb is None or not hasattr(self, "poedit_database_path"):
            return

        # import each language separately
        for lang in self.poedit_languages:
            strings_db_file = path.join(self.poedit_database_path, lang,
                                        'strings.db')
            translations_db_file = path.join(self.poedit_database_path, lang,
                                             'translations.db')
            if not path.exists(strings_db_file) or not path.exists(
                    translations_db_file):
                continue
            sources = bsddb.hashopen(strings_db_file, 'r')
            targets = bsddb.rnopen(translations_db_file, 'r')
            for source, str_index in sources.iteritems():
                unit = {"context": ""}
                # the index is a four byte integer encoded as a string
                # was little endian on my machine, not sure if it is universal
                index = struct.unpack('i', str_index)
                target = targets[index[0]][:-1]  # null-terminated
                unit["source"] = _prepare_db_string(source)
                unit["target"] = _prepare_db_string(target)
                self.tmdb.add_dict(unit, "en", lang, commit=False)
            self.tmdb.connection.commit()

            logging.debug('%d units migrated from Poedit TM: %s.' %
                          (len(sources), lang))
            sources.close()
            targets.close()
            self.migrated.append(_("Poedit's Translation Memory: %(database_language_code)s") % \
                    {"database_language_code": lang})
Esempio n. 14
0
 def __init__(self, filename):
     self.bytesPut = 0
     self.bytesGotten = 0
     
     self.filename = filename
     self.bytesPut = 0
     self.bytesGotten = 0
     try:
         self.kvstore = bsddb.hashopen(self.filename)
     except:
         # The file failed to load, so delete it and try again.
         try:
             os.unlink(filename)
         except:
             pass
         self.kvstore = bsddb.hashopen(self.filename)
Esempio n. 15
0
 def nuke(self):
     self.kvstore.close()
     try:
         os.unlink(self.filename)
     except:
         pass
     self.kvstore = bsddb.hashopen(self.filename)
Esempio n. 16
0
 def __init__(self):
     dirname = "./data/"
     dbnames = glob.glob(dirname+"*.db")
     for name in dbnames:
         if os.path.basename(name) == "card_images.db":
             dbnames.remove(name)
             break
     self._dbs = []
     for filename in dbnames:
         self._dbs.append(bsddb.hashopen(filename))
     #self._txtcards = set(os.path.basename(c) for c in glob.glob("./data/cards/*"))
     self._txts = {}
     for dirpath, dirnames, filenames in os.walk("./data/cards/", topdown=False):
         for filename in filenames:
             file = open(os.path.join(dirpath, filename))
             current = []
             for line in file:
                 if not line.startswith(CARD_DELIM): current.append(line.rstrip())
                 else:
                     name = line[line.index(" ")+1:].strip()
                     if not name in self._txts:
                         self._txts[name] = '\n'.join(current)
                     current = []
             file.close()
     self._invalid = set()
Esempio n. 17
0
 def __del__(self):
     shelve.BsdDbShelf.sync(self)
     self.dbhandle.close()
     if self._indexes and isinstance(self._indexes, dict):
         indexfile = bsddb.hashopen(self.dbname+".idx", 'c')
         for indexname, indexes in self._indexes.items():
             indexfile[indexname] = indexes
Esempio n. 18
0
    def load(self):
        """
        Load all data from backend
        """
        pisiprogress.getCallback().verbose("Evolution: Loading")
        pisiprogress.getCallback().progress.push(0, 100)
        file = bsddb.hashopen(self._path)
        pisiprogress.getCallback().update("Loading")
        amount = len(file.keys())
        i = 0
        for key in file.keys():
            data = file[key]
            #            print data
            if not data.startswith("BEGIN:VCARD"):
                continue
            comps = vobject.readComponents(
                data[: len(data) - 1]
            )  # there is some problem with a traling white space in each entry
            for x in comps:
                #                print x.n.value.given

                atts = extractVcfEntry(x)
                id = contacts.assembleID(atts)
                c = contacts.Contact(id, atts)
                self._allContacts[id] = c
                self._rawData[id] = x
                self._edsIDs[id] = key
                i += 1
                pisiprogress.getCallback().progress.setProgress((i * 100) / amount)
                pisiprogress.getCallback().update("Loading")
        pisiprogress.getCallback().progress.drop()
Esempio n. 19
0
def createHash(hashfile,datalist,verbose):
    hashdb = bsddb.hashopen(hashfile,"c")
    flist = open(datalist,"r")
    for f in flist:
        f=f.strip()
        if f != "":
            if(verbose):
                print "Start to processs the file : %s" % (f)
        
            datafile = codecs.open(f,"r","UTF-8")
            count = 0
            for line in datafile:
                record=line.strip()
                if record != "":
                    values=record.split("\t")
                    if len(values) > 3:
                        value= values[1]+"\t"+values[3]
                        hashdb[values[0].encode("UTF-8")]= value.encode("UTF-8")
                    else:
                        errmsg="Error line value: %s " % line
                        print errmsg.encode("UTF-8")
                count += 1
                if (verbose and count % 100000==0):
                    print "Have processed %d lines" % (count)
            datafile.close()
    flist.close()
    return hashdb
Esempio n. 20
0
    def __init__(self, name, path="", unique=True):
        #
        #  Constructs new DiskQueueManager
        #
        #  @param name : name of the queue
        #  @param path : path where databases will be stored
        #  @param unique : if set to False, disable unique capability,
        #                  also add() begin to work exactly as set()
        #

        #  @param ext_tree : file extention of tree db
        #  @param ext_hash : file extention of hash db

        ext_tree = ".tree.db"
        ext_hash = ".hash.db"

        self.name = name

        file_md5 = self._hash(name)

        self.file_tree = path + file_md5 + ext_tree
        self.file_hash = path + file_md5 + ext_hash

        self.db_tree = bsddb.btopen(self.file_tree, "c")
        self.db_hash = bsddb.hashopen(self.file_hash, "c")

        self.new_key_id = 0L

        self.unique = unique
def dataliststore(temp_data_list, TwitterWorkDB, Data_DB_Path):
    TwitterWorkDB
    datadblock = FileLock(Data_DB_Path)
    rowlist = []
    worklist = []
    finishlist = []
    for temp_data in temp_data_list:
        buff = ''
        buff += str(temp_data[1])+'\t'
        if temp_data[2]!=None: buff += str(temp_data[2])
        buff += '\t'
        if temp_data[3]!=None: buff += str(temp_data[3])
        buff += '\t'
        if temp_data[4]!=None: buff += str(temp_data[4])
        buff += '\t'
        if temp_data[5]!=None: buff += temp_data[5].encode('ascii','replace')
        buff += '\t'
        buff += json.dumps(temp_data[6])+'\t'
        buff += json.dumps(temp_data[7])
        rowlist.append([temp_data[0],buff])
        worklist.extend(temp_data[6].keys())
        finishlist.append(temp_data[0])
    if len(rowlist)!=0:
        with datadblock:
            DataDB = bsddb.hashopen(Data_DB_Path, 'c')
            for row in rowlist:
                DataDB[row[0]] = row[1]
            DataDB.close()
    for workitem in set(worklist):
        TwitterWorkDB.put(workitem) #debug pass
    for finishitem in finishlist:
        TwitterWorkDB.finish(finishitem)
    return
Esempio n. 22
0
def get_srfcst_data(stn, start_date, end_date):
    hourly_fcst = {}
    try:
        start_date_dt = DateTime.DateTime(*start_date)
        end_date_dt = DateTime.DateTime(*end_date)
        stn = stn.upper()
        forecast_db = hashopen('/ndfd/hourly_forecasts.db', 'r')
        stn_dict = loads(forecast_db[stn])
        latlon = stn_dict['ll']
        for requested_var in ['tsky', 'dwpt']:
            if stn_dict.has_key(requested_var):
                if not hourly_fcst.has_key(requested_var):
                    hourly_fcst[requested_var] = {}
                for dkey in stn_dict[requested_var].keys():
                    dkey_dt = DateTime.DateTime(*dkey)
                    if dkey_dt >= start_date_dt and dkey_dt <= end_date_dt:
                        for h in range(0, 24):
                            if stn_dict[requested_var][dkey][h] != miss:
                                tkey = (dkey[0], dkey[1], dkey[2], h)
                                hourly_fcst[requested_var][tkey] = stn_dict[
                                    requested_var][dkey][h]
        forecast_db.close()
    except:
        print_exception()
    return latlon, hourly_fcst
Esempio n. 23
0
 def config_EVOLUTION_addressbook(self):
     """Convierte e integra los contactos en la libreta de direcciones
     de Evolution
     
     """
     vcard = os.path.join(self.dest.path, _("Contactos"))
     if not os.path.exists(vcard):
         vcard = commands.getoutput("rgrep -l VCARD %s" % self.dest.path.replace(' ', '\ '))
     if not vcard or not os.path.exists(vcard):
         return 0
     import bsddb
     adb=os.path.join(os.path.expanduser('~'),'.evolution','addressbook','local','system','addressbook.db')
     folder(os.path.dirname(adb))
     db = bsddb.hashopen(adb,'w')
     if not 'PAS-DB-VERSION\x00' in db.keys():
         db['PAS-DB-VERSION\x00'] = '0.2\x00'
     contacts = open(vcard, 'r')
     while 1:
         l = contacts.readline()
         if not l:
             break
         if l.find('BEGIN:VCARD') != -1:
             randomid = 'pas-id-' + str(random.random())[2:]
             db[randomid+'\x00'] = 'BEGIN:VCARD\r\nUID:' + randomid + '\r\n'
             while 1:
                 v = contacts.readline()
                 if v.find('END:VCARD') != -1:
                     db[randomid+'\x00'] += 'END:VCARD\x00'
                     break
                 else:
                     db[randomid+'\x00'] += v.replace('PERSONAL','HOME').replace('\n', '\r\n')
     db.sync()
     db.close()
     os.remove(vcard)
Esempio n. 24
0
def getcontacts(folder):
    """Returns the contacts as a list of string vcards

    Note that the Windows EOL convention is used"""

    if folder == evolutionexporter['folderid']:
        return getcontacts_evoexporter()
    
    dir=os.path.expanduser(folder)
    p=os.path.join(dir, "addressbook.db")
    if not os.path.isfile(p):
        # ok, this is not an address book folder
        if not os.path.isfile(os.path.join(dir, "folder-metadata.xml")):
            raise ValueError("Supplied folder is not a folder! "+folder)
        raise ValueError("Folder does not contain contacts! "+folder)
    res=[]
    db=bsddb.hashopen(p, 'r')
    for key in db.keys():
        if key.startswith("PAS-DB-VERSION"): # no need for this field
            continue
        data=db[key]
        while data[-1]=="\x00": # often has actual null on the end
            data=data[:-1]  
        res.append(data)
    db.close()
    return res
Esempio n. 25
0
 def __init__(self, base, full):
     db = base + "-volatile/thread.db"
     if full and os.path.exists(db):
         os.unlink(db)
         mailpie.log.log("removed old thread db")
     self.db = bsddb.hashopen(db)
     self._cache = {}
Esempio n. 26
0
 def __init__(self, file_path=None, session_class=Session):
     import bsddb
     SessionStore.__init__(self, session_class)
     if file_path is None:
         from tempfile import gettempdir
         file_path = os.path.join(gettempdir(), 'session.bdb')
     self.db = bsddb.hashopen(file_path)
Esempio n. 27
0
def get_fcst_data (stn, requested_var, start_date_dt, end_date_dt):
	hourly_fcst = {}
	try:
		if requested_var == 'prcp': requested_var = 'qpf'
		if requested_var == 'srad':
			hourly_fcst = solar_main_fcst2(stn,(start_date_dt.year,start_date_dt.month,start_date_dt.day,start_date_dt.hour),\
									(end_date_dt.year,end_date_dt.month,end_date_dt.day,end_date_dt.hour))
		else:
			stn = stn.upper()
			forecast_db = hashopen('/Users/keith/NDFD/hourly_forecasts.db','r')		
			stn_dict = loads(forecast_db[stn])
			forecast_db.close()
			if stn_dict.has_key(requested_var):
				for dkey in stn_dict[requested_var].keys():
					dkey_dt = DateTime.DateTime(*dkey)
					if dkey_dt >= start_date_dt and dkey_dt <= end_date_dt:
						for h in range(0,24):
							if stn_dict[requested_var][dkey][h] != miss:
								if requested_var != 'qpf':
									tkey = (dkey[0],dkey[1],dkey[2],h)
									hourly_fcst[tkey] = stn_dict[requested_var][dkey][h]
								else:
									#split qpf over last 6 hours
									for phr in range(0,6):
										pdt = dkey_dt + DateTime.RelativeDate(hour=h) + DateTime.RelativeDate(hours=-phr)
										tkey = (pdt.year,pdt.month,pdt.day,pdt.hour)
										hourly_fcst[tkey] = stn_dict[requested_var][dkey][h]/6.
	except:
		print_exception()
	return hourly_fcst
Esempio n. 28
0
def Username2Userid(l_username):
    import requests
    import re
    import bsddb
    db = bsddb.hashopen('name2id.db')
    if l_username in db:
        if db[l_username][0] == '-':
            return abs(int(db[l_username]))
        else:
            return unicode(db[l_username])
    l_url = 'https://twitter.com/' + l_username
    try:
        l_r = requests.get(l_url)
    except:
        return -1
    m = re.search(
        '<div class="profile-card-inner" data-screen-name=".+?" data-user-id=".+?">',
        l_r.text)
    if m == None:
        db[l_username] = str(-l_r.status_code)
        return l_r.status_code
    m = re.search('data-user-id=".+?"', m.group(0))
    m = m.group(0)[14:-1]
    if len(m) > 0:
        db[l_username] = m
        return m
    db[l_username] = str(-l_r.status_code)
    return l_r.status_code
Esempio n. 29
0
def extractKey(inFile, password):
    db = bsddb.hashopen(inFile, 'r')
    keyDict = {}
    try:
        key, value = db.first()
        while 1:
            keyDict[key] = value
            key, value = db.next()
    except KeyError:
        db.close()

    nsKey = asn1.decode(keyDict['Server-Key\0'])
    rc4 = nsKey[1][1][0][1][0][1]
    if rc4 != RC4Signature:
        raise ValueError, 'RC4 signature not found'
    entrySalt = nsKey[0][1]
    octetString = nsKey[1][1][1][1]

    globalSalt = keyDict['global-salt']
    passwd = keyDict['global-salt'] + password
    saltedPasswd = x509.SHA1(passwd, 0, len(passwd))
    key = entrySalt + saltedPasswd
    rc4Key = x509.MD5(key, 0, len(key))
    data = x509.RC4(rc4Key, octetString, 0, len(octetString))
    pkcs1 = asn1.decode(data)
    keyData = pkcs1[0][1][2][1]
    return x509.PrivateKey('rsa', keyData, 0, len(keyData))
Esempio n. 30
0
def dataliststore(temp_data_list, TwitterWorkDB, Data_DB_Path):
    TwitterWorkDB
    datadblock = FileLock(Data_DB_Path)
    rowlist = []
    worklist = []
    finishlist = []
    for temp_data in temp_data_list:
        buff = ''
        buff += str(temp_data[1]) + '\t'
        if temp_data[2] != None: buff += str(temp_data[2])
        buff += '\t'
        if temp_data[3] != None: buff += str(temp_data[3])
        buff += '\t'
        if temp_data[4] != None: buff += str(temp_data[4])
        buff += '\t'
        if temp_data[5] != None:
            buff += temp_data[5].encode('ascii', 'replace')
        buff += '\t'
        buff += json.dumps(temp_data[6]) + '\t'
        buff += json.dumps(temp_data[7])
        rowlist.append([temp_data[0], buff])
        worklist.extend(temp_data[6].keys())
        finishlist.append(temp_data[0])
    if len(rowlist) != 0:
        with datadblock:
            DataDB = bsddb.hashopen(Data_DB_Path, 'c')
            for row in rowlist:
                DataDB[row[0]] = row[1]
            DataDB.close()
    for workitem in set(worklist):
        TwitterWorkDB.put(workitem)  #debug pass
    for finishitem in finishlist:
        TwitterWorkDB.finish(finishitem)
    return
Esempio n. 31
0
 def run(self):
     import networking
     import sys
     if sys.platform == 'win32':
         import bsddb
         self.DB = bsddb.hashopen(self.database_name)
         self._get = self.DB.__getitem__
         self._put = self.DB.__setitem__
         self._del = self.DB.__delitem__
         self._close = self.DB.close
     else:
         import leveldb
         self.DB = leveldb.LevelDB(self.database_name)
         self._get = self.DB.Get
         self._put = self.DB.Put
         self._del = self.DB.Delete
         self._close = _noop # leveldb doesn't have a close func
     try:
         self.salt = self._get('salt')
     except KeyError:
         self.salt = os.urandom(5)
         self._put('salt', self.salt)
     def command_handler(command):
         try:
             name = command['type']
             assert (name not in ['__init__', 'run'])
             return getattr(self, name)(command['args'])
         except Exception as exc:
             self.logf(exc)
             self.logf('command: ' + str(command))
             self.logf('command type: ' + str(type(command)))
             return {'error':'bad data'}
     networking.serve_forever(command_handler, self.port, self.heart_queue)
     self._close()
Esempio n. 32
0
def get_records (thr_id,rn,infile):
	records_dict = {}
	name = ''
	start_yr = 9999
	end_yr = 9999
	por = (9999,9999)
	try:
		thrdx_dict = hashopen(infile,'r')
		if thrdx_dict.has_key(thr_id):
			thr_recs = loads(thrdx_dict[thr_id])
			name = '%s, %s' % (thr_recs['name'],thr_recs['state'])
			start_yr = min(thr_recs['maxt']['start_yr'],thr_recs['mint']['start_yr'])
			end_yr = max(thr_recs['maxt']['end_yr'],thr_recs['mint']['end_yr'])
			por = (start_yr,end_yr)
			for (element,hilo) in [('maxt','-hi'),('mint','-lo'),('maxt','-lo'),('mint','-hi'),('pcpn','-hi')]:
				k = element+hilo
				records = thr_recs[element][hilo]
				reclist = []
				for tt in range(1,60):
					reclist.append((records[tt][rn-1][0],records[tt][rn-1][1]))
				reclist.append((records[366][rn-1][0],records[366][rn-1][1]))
				for tt in range(60,366):
					reclist.append((records[tt][rn-1][0],records[tt][rn-1][1]))
				records_dict[k] = reclist
		thrdx_dict.close()
	except:
		print_exception()
	return records_dict, name, por
Esempio n. 33
0
def Username2Userid(l_username):
    """Return a int indicating the id of the l_username.
    """
    import requests
    import re
    import bsddb
    import string
    db = bsddb.hashopen('name2id.db')
    l_username = l_username.decode("cp1252").encode('ascii','ignore').translate(string.maketrans("",""), ''.join(string.punctuation.split('_'))+' ')
    if l_username in db:
        if db[l_username][0] == '-':
            return abs(int(db[l_username]))
        else: 
            return unicode(db[l_username])
    l_url = 'https://twitter.com/'+l_username
    l_r = requests.get(l_url)
    m = re.search('<div class="profile-card-inner" data-screen-name=".+?" data-user-id=".+?">',l_r.text)
    if m == None:
        db[l_username] = str(-l_r.status_code)
        return l_r.status_code
    m = re.search('data-user-id=".+?"',m.group(0))
    m = m.group(0)[14:-1]
    if len(m)>0:
        db[l_username] = m
        return m
    db[l_username] = str(-l_r.status_code)
    return l_r.status_code
Esempio n. 34
0
    def poedit_tm_import(self):
        """Attempt to import the Translation Memory used in KBabel."""
        if not hasattr(self, "poedit_database_path"):
            return

        # import each language separately
        for lang in self.poedit_languages:
            strings_db_file = path.join(self.poedit_database_path, lang, 'strings.db')
            translations_db_file = path.join(self.poedit_database_path, lang, 'translations.db')
            if not path.exists(strings_db_file) or not path.exists(translations_db_file):
                continue
            sources = bsddb.hashopen(strings_db_file, 'r')
            targets = bsddb.rnopen(translations_db_file, 'r')
            for source, str_index in sources.iteritems():
                unit = {"context" : ""}
                # the index is a four byte integer encoded as a string
                # was little endian on my machine, not sure if it is universal
                index = struct.unpack('i', str_index)
                target = targets[index[0]][:-1] # null-terminated
                unit["source"] = _prepare_db_string(source)
                unit["target"] = _prepare_db_string(target)
                self.tmdb.add_dict(unit, "en", lang, commit=False)
            self.tmdb.connection.commit()

            logging.debug('%d units migrated from Poedit TM: %s.' % (len(sources), lang))
            sources.close()
            targets.close()
            self.migrated.append(_("Poedit's Translation Memory: %(database_language_code)s") % \
                    {"database_language_code": lang})
Esempio n. 35
0
 def __init__(self, db_filename):
     # hashopen, cause we operate on single keys and do full scan at start.
     self.db = bsddb.hashopen(db_filename)
     # For better performance we are allowing some tasks to be executed
     # multiple times in case of server failure. Hence, we are skipping
     # specific database sync and doing it later with LoopingCall.
     self.db_sync_task = LoopingCall(self.db.sync)
Esempio n. 36
0
    def cache_results(self, clusters_set, mergable_clusters):
        if not self.use_cache:
            return

        try:
            import bsddb as bsddb3
            self.cache = bsddb3.hashopen(self.CACHENAME)
            myhash = str(hash(self.learner))
            c = str(self.learner.c)
            key = '%s:%s' % (myhash, c)

            clusters_set = [cluster.to_dict() for cluster in clusters_set]
            mergable_clusters = [
                cluster.to_dict() for cluster in mergable_clusters
            ]
            self.cache[key] = json.dumps((clusters_set, mergable_clusters))

            cs_to_keys = json.loads(
                self.cache[myhash]) if myhash in self.cache else {}
            cs_to_keys[c] = key
            self.cache[myhash] = json.dumps(cs_to_keys)
            self.cache.close()
            _logger.debug("saved cache %f", self.learner.c)
        except:
            import traceback
            traceback.print_exc()
Esempio n. 37
0
 def _openHash_OnlyOnExist(self, theFileName):
     expectedFullFilePath = self._get_DB_FullPath_ForFilename(theFileName)
     ret_DB = None
     isExist = self._does_DB_Exist(theFileName)
     if (isExist == True):
         ret_DB = bsddb.hashopen(expectedFullFilePath, 'c')
     return ret_DB
Esempio n. 38
0
	def __init__(self, name, path = "", unique = True):
		#
		#  Constructs new DiskQueueManager
		#
		#  @param name : name of the queue
		#  @param path : path where databases will be stored
		#  @param unique : if set to False, disable unique capability, 
		#                  also add() begin to work exactly as set()
		#
		

		#  @param ext_tree : file extention of tree db
		#  @param ext_hash : file extention of hash db
		
		ext_tree = ".tree.db"
		ext_hash = ".hash.db"
		
		self.name = name
		
		file_md5 = self._hash(name)

		self.file_tree = path + file_md5 + ext_tree
		self.file_hash = path + file_md5 + ext_hash
			
		self.db_tree = bsddb.btopen(  self.file_tree, "c")
		self.db_hash = bsddb.hashopen(self.file_hash, "c")

		self.new_key_id = 0L
		
		self.unique = unique
Esempio n. 39
0
    def config_smtp(self,mailName):

        """
        Config SMTP server for mail.
        If SMTP server exists, return the name,
        else get a new server string from user input.

        :param mailName:
            Mail that will send mail that attached files.
        :type mailName:
            ``string``
        """

        mailPat = '[^@]+@([^@]+\.[^@]+)'
        m = re.match(mailPat,mailName)
        # should check after user inputing their mail
        # if not m:
        serverName = m.groups()[0]
        smtpName = None
        smtpDb = bsddb.hashopen('smtp.db')
        if smtpDb.has_key(serverName):
            smtpName = smtpDb[serverName]
        else:
            smtpName = inputSMTP(mailName)
            smtpDb[serverName] = smtpName
            smtpDb.sync()
        smtpDb.close()
        return smtpName
Esempio n. 40
0
def getcontacts(folder):
    """Returns the contacts as a list of string vcards

    Note that the Windows EOL convention is used"""

    if folder == evolutionexporter['folderid']:
        return getcontacts_evoexporter()

    dir = os.path.expanduser(folder)
    p = os.path.join(dir, "addressbook.db")
    if not os.path.isfile(p):
        # ok, this is not an address book folder
        if not os.path.isfile(os.path.join(dir, "folder-metadata.xml")):
            raise ValueError("Supplied folder is not a folder! " + folder)
        raise ValueError("Folder does not contain contacts! " + folder)
    res = []
    db = bsddb.hashopen(p, 'r')
    for key in db.keys():
        if key.startswith("PAS-DB-VERSION"):  # no need for this field
            continue
        data = db[key]
        while data[-1] == "\x00":  # often has actual null on the end
            data = data[:-1]
        res.append(data)
    db.close()
    return res
Esempio n. 41
0
def rebalance():
    ploud_config.initializeConfig()
    APACHE = ptah.get_settings('apache')
    file = APACHE['lbfile']
    processes = APACHE['processes']

    conn = ploud_config.PLOUD_POOL.getconn()
    c1 = conn.cursor()

    c1.execute("SELECT vhost.host,sites.bwin,sites.bwout,sites.site_name FROM vhost, sites "
               "WHERE vhost.id = sites.id and sites.disabled = %s ORDER by sites.id",(False,))

    db = bsddb.hashopen(file, 'w')

    data = [(bwin+bwout, host, name) for host, bwin, bwout, name in c1.fetchall()]
    data.sort()

    i = 1
    for size, host, name in data:
        db[host] = str(i)
        db[name] = str(i)
        i = i + 1
        if i > processes:
            i = 1

    print 'Rebalancing is done.'

    db.close()

    c1.close()
    conn.close()
Esempio n. 42
0
def extractKey(inFile, password):
	db = bsddb.hashopen(inFile, 'r')
	keyDict = {}
	try:
		key, value = db.first()
		while 1:
			keyDict[key] = value
			key, value = db.next()
	except KeyError:
		db.close()

	nsKey = asn1.decode(keyDict['Server-Key\0'])
	rc4 = nsKey[1][1][0][1][0][1]
	if rc4 != RC4Signature:
		raise ValueError, 'RC4 signature not found'
	entrySalt = nsKey[0][1]
	octetString = nsKey[1][1][1][1]

	globalSalt = keyDict['global-salt']
	passwd = keyDict['global-salt'] + password
	saltedPasswd = x509.SHA1(passwd, 0, len(passwd))
	key = entrySalt + saltedPasswd
	rc4Key = x509.MD5(key, 0, len(key))
	data = x509.RC4(rc4Key, octetString, 0, len(octetString))
	pkcs1 = asn1.decode(data)
	keyData = pkcs1[0][1][2][1]
	return x509.PrivateKey('rsa', keyData, 0, len(keyData))
Esempio n. 43
0
def _openDbSnapshot(ngamsCfgObj,
                    mtPt):
    """
    Open a bsddb file DB. If the file exists and this is not
    a read-only NGAS system the file is opened for reading and writing.
    If this is a read-only NGAS system it is only opened for reading.

    If the file DB does not exist, a new DB is created.

    If the file DB does not exist and this is a read-only NGAS system,
    None is returned.

    The name of the DB file is:

      <Disk Mount Point>/NGAMS_DB_DIR/NGAMS_DB_NGAS_FILES

    ngamsCfgObj:    NG/AMS Configuration Object (ngamsConfig).

    mtPt:           Mount point (string).

    Returns:        File DB object (bsddb|None).
    """
    snapShotFile = os.path.normpath(mtPt + "/" + NGAMS_DB_DIR + "/" +\
                                    NGAMS_DB_NGAS_FILES)
    checkCreatePath(os.path.normpath(mtPt + "/" + NGAMS_DB_CH_CACHE))
    if (os.path.exists(snapShotFile)):
        if (_updateSnapshot(ngamsCfgObj)):
            # Open the existing DB Snapshot for reading and writing.
            snapshotDbm = bsddb.hashopen(snapShotFile, "w")
        else:
            # Open only for reading.
            snapshotDbm = bsddb.hashopen(snapShotFile, "r")
    else:
        if (_updateSnapshot(ngamsCfgObj)):
            # Create a new DB Snapshot.
            snapshotDbm = bsddb.hashopen(snapShotFile, "c")
        else:
            # There is no DB Snapshot and it is not possible to
            # create one - the check cannot be carried out.
            snapshotDbm = None

    # Remove possible, old /<mt pt>/.db/NgasFiles.xml snapshots.
    # TODO: Remove when it can be assumed that all old XML snapshots have
    #       been removed.
    rmFile(os.path.normpath(mtPt + "/" + NGAMS_DB_DIR + "/NgasFiles.xml"))

    return snapshotDbm
Esempio n. 44
0
    def __connect(self):
        if self.__db is not None:
            self.close()

        if self.__option == 'bt':
            self.__db = bsddb.btopen(self.__filename, self.__flag, self.__mode)
        if self.__option == 'hash':
            self.__db = bsddb.hashopen(self.__filename, self.__flag, self.__mode)
Esempio n. 45
0
 def __init__(self, path=None):
     self.logging = logging.getLogger(self.__class__.__name__)
     if not path:
         fd, path = tempfile.mkstemp(".shelve", "octo_", text=False)
         os.close(fd)
     self.logging.debug("Shelve DB: %s", path)
     self._db = bsddb.hashopen(path, 'n')
     self._shelve = shelve.BsdDbShelf(self._db, protocol=2, writeback=True)
Esempio n. 46
0
 def __init__(self, chmpath):
     assert isinstance(chmpath, unicode)
     self.md5 = md5sum(chmpath)
     self.cfghome = os.path.join(home, '.pychmviewer')
     try:
         os.mkdir(self.cfghome, 0700)
     except exceptions.OSError:
         pass
     self.cfghome = os.path.join(self.cfghome, self.md5)
     try:
         os.mkdir(self.cfghome, 0700)
     except exceptions.OSError:
         pass
     bookmarkpath = os.path.join(self.cfghome, 'bookmark.db')
     lastconfpath = os.path.join(self.cfghome, 'last.db')
     self.bookmarkdb = bsddb.hashopen(bookmarkpath)
     self.lastconfdb = bsddb.hashopen(lastconfpath)
Esempio n. 47
0
def btree_test():
    #db = bsddb.btopen(' test.btree', 'n')
    db = bsddb.hashopen(' test.btree', 'n')

    for count in xrange(ELEM_NUM):
        key = str(count)
        db[key] = ROW
    db.close()
Esempio n. 48
0
 def __open_bdb(self):
     '''
     open bdb file with hashopen(), exit when exception occurs(eg: pemition denied)
     '''
     try:
         self.dbh = bsddb.hashopen(options.dbfile)
     except Exception, e:
         print e
         sys.exit(-1)
Esempio n. 49
0
    def __init__(self):
        self.cardfile = bsddb.hashopen("./data/card_images.db", "c")
        self.back = pyglet.resource.texture("back.jpg")
        self.notfound = pyglet.resource.texture("notfound.jpg")
        self.combat = pyglet.resource.texture("combat.png")
        self.triggered = pyglet.resource.texture("triggered.png")
        self.activated = pyglet.resource.texture("activated.png")

        # XXX This is a hack
        Card.build_fbo()
Esempio n. 50
0
def show_database(filename):
    """ Show database contents. (Usernames and passwords) """
    try:
        db = bsddb.hashopen(filename, 'r')
    except:
        print("Cannot open database")
        return

    for k, v in db.items():
        print(k + " : " + v)
Esempio n. 51
0
def get_precip_forecast(stn, start_date_dt, end_date_dt):
    hourly_fcst = []
    miss = -999
    try:
        stn = stn.upper()
        pdict = hashopen('/ndfd/hourly_forecasts.db', 'r')
        if pdict.has_key(stn):
            stndict = loads(pdict[stn])
            pdict.close()
            firstday_hour = start_date_dt.hour
            lastday_hour = end_date_dt.hour
            start_date_dt = start_date_dt + DateTime.RelativeDate(hour=0)
            end_date_dt = end_date_dt + DateTime.RelativeDate(hour=0)
            theDate_dt = start_date_dt
            while theDate_dt <= end_date_dt:
                theDate = (theDate_dt.year, theDate_dt.month, theDate_dt.day)
                if stndict['qpf'].has_key(theDate):
                    qpf = stndict['qpf'][theDate]
                else:
                    qpf = [miss] * 24
                if stndict['pop12'].has_key(theDate):
                    pop12 = stndict['pop12'][theDate]
                else:
                    pop12 = [miss] * 24
                if theDate_dt == start_date_dt:
                    shour = firstday_hour
                else:
                    shour = 0
                if theDate_dt == end_date_dt:
                    ehour = lastday_hour
                else:
                    ehour = 23
                for hr in range(shour, ehour + 1):
                    theTime = (theDate_dt.year, theDate_dt.month,
                               theDate_dt.day, hr)
                    hourly_fcst.append((theTime, qpf[hr], pop12[hr]))
                    # distribute precipitation over last 6 hours
                    if qpf[hr] != miss:
                        x = len(hourly_fcst) - 1
                        for i in range(x, x - 6, -1):
                            if i >= 0:
                                hourly_fcst[i] = hourly_fcst[i][0:1] + (
                                    qpf[hr] / 6., ) + hourly_fcst[i][2:]
                theDate_dt = theDate_dt + DateTime.RelativeDate(days=+1)
    except:
        print_exception()
    return hourly_fcst


#stn = 'cli'
#start_date_dt = DateTime.DateTime(2009,4,16,8)
#end_date_dt = DateTime.DateTime(2009,4,22,23)
#forecast_dict = get_precip_forecast(stn,start_date_dt,end_date_dt)
#for item in forecast_dict:
#	print item
Esempio n. 52
0
def _is_bsd_hashdb(dbpath):
    """
    TODO: Is this enough to check if given file ``dbpath`` is RPM DB file ?
    And also, maybe some db files should be opened w/ bsddb.btopen instead of
    bsddb.hashopen.

    >>> if os.path.exists("/etc/redhat-release"):
    ...     _is_bsd_hashdb("/var/lib/rpm/Packages")
    True
    """
    try:
        if bsddb is None:
            return True  # bsddb is not avialable in python3.

        bsddb.hashopen(dbpath, 'r')
    except:
        logging.warn("Not a Berkley DB?: %s" % dbpath)
        return False

    return True
Esempio n. 53
0
def main():
    if STORAGE_MODE == BDB:
        import bsddb
        global texts_db, lasttime_db
        texts_db = bsddb.hashopen(DATA_DIR + "/texts.db")
        lasttime_db = bsddb.hashopen(DATA_DIR + "/lasttime.db")

    # Start up a thread that does timeouts and cleanup
    thread.start_new_thread(cleanup_thread, ())

    mobwrite_core.LOG.info("Listening on port %d..." % LOCAL_PORT)
    s = SocketServer.ThreadingTCPServer(("", LOCAL_PORT), DaemonMobWrite)
    try:
        s.serve_forever()
    except KeyboardInterrupt:
        mobwrite_core.LOG.info("Shutting down.")
        s.socket.close()
        if STORAGE_MODE == BDB:
            texts_db.close()
            lasttime_db.close()
Esempio n. 54
0
def list_users(filename):
    db = bsddb.hashopen(filename, "r")
    users = db.keys()
    users.sort()
    max_len = 0
    for user in users:
        max_len = max([max_len, len(user)])
    print "%s %s" % ("NAME".ljust(max_len), "GROUPS")
    for user in users:
        print "%s %s" % (user.ljust(max_len), db[user][16:])
    db.close()
Esempio n. 55
0
def importdb(filename):
    global buf, buffered_coords, buffered_nodes, pushed_coords, pushed_nodes
    buf=[]
    buffered_coords=set()
    buffered_nodes=set()
    pushed_coords=set()
    pushed_nodes=set()
    global waycount, nodecount, coordcount
    nodecount = waycount = coordcount = 0

    global nodedb, node_way_db
    if isfile(nodedb_name):
        os.remove(nodedb_name)
    if isfile(node_way_db_name):
        os.remove(node_way_db_name)

    nodedb = bsddb.hashopen(nodedb_name, 'c')
    node_way_db = bsddb.hashopen(node_way_db_name, 'c')
    
    p = OSMParser(concurrency=4, 
                  ways_callback=way,
                  coords_callback=coord,
                  nodes_callback=node,
                  relations_callback=relation)
    p.parse(filename)
    
    for n, ws in node_way_db.iteritems():
        try:
            val = eval(nodedb[n])
            for w in eval(ws):
                val['ways'].append(w)
            nodedb[n] = str(val) 
        except KeyError:
            pass
#                print "Warning: Key %s not found in nodedb"%n

    for n in nodedb.itervalues():
        put_buffer(eval(n), False, True)

    flush_buffer()
Esempio n. 56
0
def getconfig():

    Config = bsddb.hashopen(None, 'w')
    Tests = bsddb.hashopen(None, 'w')

    config = ConfigParser.ConfigParser()
    config.readfp(open(sys.argv[1]))

    if not config.has_section('Config'):
        print "Config file *must* have section [Config]"
        sys.exit(1)
    for options in config.options('Config'):
        Config[options] = config.get('Config', options)
    if not Config.has_key('pvfs2tabfile'):
        print "Config section must have a \"PVFS2TABFILE\" option"
        sys.exit(1)
    if not Config.has_key('pvfs2bindir'):
        print "Config section must have a \"PVFS2BINDIR\" option"
        sys.exit(1)
    if not Config.has_key('mpichbindir'):
        print "Config section must have a \"MPICHBINDIR\" option"
        sys.exit(1)
    if not Config.has_key('pavdir'):
        print "Config section must have a \"PAVDIR\" option"
        sys.exit(1)
    if not Config.has_key('mountpoint'):
        print "Config section must have a \"MOUNTPOINT\" option"
        sys.exit(1)
    if not Config.has_key('email'):
        print "Config section must have a \"EMAIL\" option"
        sys.exit(1)

    if not config.has_section('Tests'):
        print "Config file *must* have section [Tests]"
        sys.exit(1)

    for options in config.options('Tests'):
        Tests[options] = (config.get('Tests', options))

    return Config, Tests
Esempio n. 57
0
def main():
    remote = '10.2.0.1'
    db = bsddb.hashopen('latency.db')
    while True:
        now = int(time.time())
        count, timeout = 10, 30
        # do ping
        lost, mrtt, artt = quiet_ping(remote, timeout=timeout, count=count)
        end = int(time.time())
        # save data point
        db['{}|{}|{}'.format(remote, now, end)] = '{}|{}|{}|{}|{}'.format(
            count, timeout, lost, mrtt, artt)
        db.sync()
Esempio n. 58
0
 def __init__(self):
     dirname = "./data/"
     dbnames = glob.glob(dirname + "*.db")
     for name in dbnames:
         if os.path.basename(name) == "card_images.db":
             dbnames.remove(name)
             break
     self._dbs = []
     for filename in dbnames:
         self._dbs.append(bsddb.hashopen(filename))
     self._txtcards = set(
         os.path.basename(c) for c in glob.glob("./data/cards/*"))
     self._invalid = set()
Esempio n. 59
0
def add(filename, user, group):
    db = bsddb.hashopen(filename, "w")
    if user in db.keys():
        print "WARNING: User %s is already present in file %s!" % (user,
                                                                   filename)
    else:
        password = getpass.getpass("Input password:"******"Verify password:"******"%s%s" % (hashlib.md5(password).digest(), group)
        else:
            print "WARNING: Password verification failed, user %s not added." % user
    db.close()