def _initBTrees(self): self._htree = OOBTree() self._count = Length() self._tree_list = PersistentMapping()
def __init__(self, id, title=None): self._id = self.id = id self.title = title self._groups = OOBTree() self._principal_groups = OOBTree()
def __init__(self, id=None): super(Course, self).__init__(id) self.attended_users = OOBTree() # userid -> DateTime
def __init__(self, parent): self.__parent__ = parent self._fields = OOBTree() self._values = OOBTree()
def __setitem__(self, key, value): "set this key and value in the OOBTree" if self.records is None: self.records = OOBTree() self.records[key] = value
def init_annotations(self): self.annotations = OOBTree()
def spill_to_disk(self): if "filenum" not in const.manifest: const.manifest["filenum"] = 0 file_name = 'data' + str(const.manifest["filenum"] + 1) + '.txt' # pickle.dump(self.mem_table, open("save.p", "wb")) # entry = 0 with open(file_name, 'w') as outfile: for table_name in self.mem_table: if table_name not in const.manifest["ssindex"]: const.manifest["ssindex"].update({table_name: {}}) for column_family in self.mem_table[table_name]: if column_family not in const.manifest["ssindex"][table_name]: const.manifest["ssindex"][table_name].update({column_family: {}}) for column in self.mem_table[table_name][column_family]: if column not in const.manifest["ssindex"][table_name][column_family]: const.manifest["ssindex"][table_name][column_family].update({column: OOBTree()}) for row in self.mem_table[table_name][column_family][column]: if row not in const.manifest["ssindex"][table_name][column_family][column]: const.manifest["ssindex"][table_name][column_family][column].update({row: []}) last_pos = outfile.tell() json.dump(self.mem_table[table_name][column_family][column][row], outfile) outfile.write("\n") const.manifest["ssindex"][table_name][column_family][column][row].insert(0, {"file_name": file_name, "offset":last_pos}) os.remove(const.WAL_filename) self.mem_table = {} self.entries = 0 if "files" not in const.manifest: const.manifest["files"] = [] const.manifest["files"].append(file_name) const.manifest["filenum"] += 1 with open(const.manifest_filename, 'wb') as outfile: pickle.dump(const.manifest, outfile)
def _init_trees(self): # (provider_name, provider_userid) -> userid self._userid_by_identityinfo = OOBTree() # userid -> userdata self._useridentities_by_userid = OOBTree()
def __init__(self, *args, **kw): ZODBGroupManager.__init__(self, *args, **kw) # reverse index of groups->principal self._group_principal_map = OOBTree()
def __init__(self): self._container = OOBTree() # add home category by default self.add_category('0')
def __init__(self, *args, **kwargs): Item.__init__(self, *args, **kwargs) self._pending_tasks = IOBTree() self._entries_by_tag = OOBTree() self._entry_tags = OOBTree()
def __init__(self, id): self.id = id self._histories = OOBTree()
f'Merging complete.\n' f'Index build complete.\n' f'{Fore.BLUE}' f'Words indexed: {words_count}' f'{Style.RESET_ALL}') start_time = time.time() build_index(listdir(r"D:\PyCharmWorkspace\InfoRetrival\data\books\bigtxt"), r"D:\PyCharmWorkspace\InfoRetrival\data\books\bigtxt", 500000) print(f'{Fore.BLUE}Indexing took {time.time() - start_time} seconds{Style.RESET_ALL}') with open('term_tree.pkl', 'rb') as file_handler: data = pickle.load(file_handler) term_tree = OOBTree(data) storage = StorageManager(term_tree, 0) print(storage.find_term('half')) # Checking how the VB encoding works encode_nums = vbencoder.vb_encode([1, 2, 3, 4]) print(encode_nums) print(vbencoder.vb_decode(encode_nums)) with open('../task_2/my_inverted_index.json', 'r') as handler: index = json.load(handler) with open(fr'uncompressed_index.txt', 'w') as handler: handler.write(str(index))
def invite_tickets(self): storage = getattr(self, '__invite_tickets__', None) if storage is None: storage = self.__invite_tickets__ = OOBTree() return storage
def __init__(self, id=None): super(PasswordTool, self).__init__(id) self._password_request_dict = OOBTree()
def __init__(self, num_columns): #initializes number of trees by creating a tree for each assignment. Number assignment is defined by columns. self.btree_list = [] for i in range(num_columns): self.btree_list.append(OOBTree())
import transaction shutil.rmtree(DBPATH, ignore_errors=True) os.mkdir(DBPATH) if USEFILE: storage = FileStorage(os.path.join(DBPATH, 'Data.fs')) else: storage = MappingStorage() print "created scratch new DB" db = DB(storage) conn = db.open() dbroot = conn.root() dbroot['tab1'] = OOBTree() tab1 = dbroot['tab1'] from randstr import newid start_time = time.time() for i in xrange(N): tab1[newid(i)] = str(i) if COMMITSIZE > 0 and i % COMMITSIZE == 0: transaction.commit() transaction.commit() elapsed_time = time.time() - start_time db.close()
for k in range(0, 60, 4): del self.t[k] tm1.commit() copy[1] = 1 try: tm2.commit() except ConflictError, detail: self.assert_(str(detail).startswith('database conflict error')) else: self.fail("expected ConflictError") # Same thing, except commit the transactions in the opposite order. b = OOBTree() for i in range(0, 200, 4): b[i] = i tm1 = transaction.TransactionManager() r1 = self.db.open(transaction_manager=tm1).root() r1["t"] = b tm1.commit() tm2 = transaction.TransactionManager() r2 = self.db.open(transaction_manager=tm2).root() copy = r2["t"] # Make sure all of copy is loaded. list(copy.values()) self.assertEqual(b._p_serial, copy._p_serial)
def __init__(self, id, title=''): """ Initialize a new instance """ self.id = id self.title = title self._domain_map = OOBTree()
def __init__(self): self._members = OOBTree() # Create the default properties. self._setProperty('description', '', 'text') self._setProperty('email', '', 'string')
import numpy as np from tabulate import tabulate import numpy.lib.recfunctions as rfn from operator import itemgetter from itertools import groupby from collections import defaultdict from BTrees.OOBTree import OOBTree import re import copy import time hashmap={} btree = OOBTree() collection_hash=[] collection_btree=[] hash_keys = {} btree_keys = {} """This function is used to create a hashtable when the user requests an index to be created for a particular column name""" def HashTable(tname,table_name,col_name): hashmap = defaultdict(lambda:[]) collection_hash = table_name for row in range(len(collection_hash)): hashmap[collection_hash[row][col_name]].append(row) hash_keys[tname+ '.' + col_name] = hashmap """This is used by select to check if there is a hashmap for the given column and return the value(s) reqeuested by select""" def Hash(tname, col_name, val): hashmap = hash_keys[tname + '.' + col_name] matched_rows = hashmap[float(val)] print(matched_rows) return matched_rows
def __init__(self): self._forward = OOBTree() # name -> uid self._reverse = OOBTree() # uid -> name
if False: storage = FileStorage('Data.fs') else: storage = MappingStorage() db = DB(storage) conn = db.open() dbroot = conn.root() print dbroot.keys() if not dbroot.has_key('subscriptions'): print 'initializing subscriptions' dbroot['subscriptions'] = OOBTree() t = dbroot['subscriptions'] for b in ['foo', 'bar', 'foobar']: BASEURI = "http://example.com/%s" % b for i in xrange(5): t[BASEURI + '#event%d' % i] = [i, 1, 2, 3] transaction.commit() else: print 'subscriptions already initialized' subs = dbroot['subscriptions'] print subs print subs.minKey() print subs.minKey("http://example.com/") print subs.minKey("http://example.com/fo")
def getUpgradeStepsForProfile(self, profile_id): """Return the upgrade steps mapping for a given profile""" profile_steps = self._registry.get(profile_id) if profile_steps is None: profile_steps = self._registry[profile_id] = OOBTree() return profile_steps
def __init__(self, id, title=None, buildRelations=True): ZenModelRM.__init__(self, id, title, buildRelations) self.prevCollectorPerDevice = OOBTree()
def update(self, language, items, clear=False): self._fixup() tree = self.data.setdefault(language, OOBTree()) if clear: tree.clear() # A new tree always uses the newest version. if not tree: version = self.version[language] = 2 else: version = self.version.get(language, 1) order = self.order.setdefault(language, IOBTree()) count = self.count.get(language, 0) if clear: order.clear() count = 0 # Always migrate to newest version. if version == 1: def fix(path): return path.replace(LEGACY_PATH_SEPARATOR, PATH_SEPARATOR) for i in list(order): path = order[i] order[i] = fix(path) for path in list(tree): value = tree.pop(path) tree[fix(path)] = value version = self.version[language] = 2 logger.info( "Taxonomy '%s' upgraded to version %d for language '%s'." % (self.name, version, language)) # Make sure we update the modification time. self.data[language] = tree # The following structure is used to expunge updated entries. inv = {} if not clear: for i, key in order.items(): inv[key] = i seen = set() for key, value in items: if key in seen: logger.warning("Duplicate key entry: %r" % (key, )) seen.add(key) update = key in tree tree[key] = value order[count] = key count += 1 # If we're updating, then we have to pop out the old ordering # information in order to maintain relative ordering of new items. if update: i = inv.get(key) if i is not None: del order[i] self.count[language] = count
indexFile = open(sys.argv[1]) zoneFile = open(sys.argv[2]) stopWords = open(sys.argv[3]) pageRankFile = open(sys.argv[4]) titleFile = open(sys.argv[5]) classifFile = open(sys.argv[6]) # global dict which avoids doing unnecessary # disk reads. The cache should be flushed # before every query. cache = dict() zoneCache = dict() # global reference to the btree kept in memory btree = OOBTree() zoneBtree = OOBTree() # global map from docID to Euclidian normalization factor normDict = dict() zoneNorm = dict() # create a set of stop words from the stopWords infile # assuming that there is one word per line for line in stopWords: stopWordSet.add(line.strip()) # construct the dictionaries as btrees buildDictionary(indexFile, btree, normDict) buildDictionary(zoneFile, zoneBtree, zoneNorm)
def load_sec_zones(self): 'Setting up sec zone by router' self.nome_sinal.append('sec_zone') self.historico_sinal.update({'sec_zone': OOBTree()}) self.historico_sinal['sec_zone'].update({0: 0}) self.nome_sinal.append('sec_zone_ID') self.historico_sinal.update({'sec_zone_ID': OOBTree()}) self.historico_sinal['sec_zone_ID'].update({0: (-1, -1)}) # criar zonas seguras # SET_SECURE_ZONE_SERVICE "00111"-- diz no payload na parte superior: linha e coluna canto inferior esquerdo e superior direito # SET_SZ_RECEIVED_SERVICE "11101"-- # SET_EXCESS_SZ_SERVICE "11110"-- elimina da zona segura linha e coluna inferior esquerda até a superior direita # SECURE_ZONE_CLOSED_SERVICE "01011"-- confirma que a zona segura está fechada(wrappers fechados) # eliminar zonas seguras: # OPEN_SECURE_ZONE_SERVICE "01010"-- mesma logica, canto inferior esquerdo até superior direito # SECURE_ZONE_OPENED_SERVICE "01100"-- confirma que a zona segura não está mais ativa # vou numerar o estado de sec zone conforme a orde acima: 0 até 4 print(f"router {self.router_address}") # for inout in ['out','in']: inout = 'in' for at_tick in self.historico_sinal[inout + '_service']: val = self.get_value_at(at_tick, inout + '_service') # print(f"{at_tick} {inout} times:") if val[1] in [ '00111' # SET_SECURE_ZONE_SERVICE = 1 # ,'11101' # SET_SZ_RECEIVED_SERVICE , '11110' # SET_EXCESS_SZ_SERVICE = 0 , '01011' # SECURE_ZONE_CLOSED_SERVICE = 2 , '01010' # OPEN_SECURE_ZONE_SERVICE = 1 , '01100' # SECURE_ZONE_OPENED_SERVICE = 0 ]: source = self.get_value_at(at_tick, inout + '_source') target = self.get_value_at(at_tick, inout + '_target') payload = self.get_value_at(at_tick, inout + '_payload') if self.limits(self.router_address, self.target_port_to_int(target[1]), self.payload_port_to_int(payload[1])): print( f"{at_tick} - {inout} service {services[val[1]]}, source: {source} target {self.target_port_to_int(target[1])} payload {self.payload_port_to_int(payload[1])}" ) print( f"is {self.router_address} inside {services[val[1]]} limits {self.limits(self.router_address,self.target_port_to_int(target[1]),self.payload_port_to_int(payload[1]))}\n" ) if val[1] == '00111': # SET_SECURE_ZONE_SERVICE = 1 self.historico_sinal['sec_zone'].update({at_tick: 1}) self.historico_sinal['sec_zone_ID'].update( {at_tick: self.payload_port_to_int(payload[1])}) elif val[1] == '11110': # SET_EXCESS_SZ_SERVICE = 0 if self.get_value_at( at_tick, 'sec_zone_ID')[1] == self.payload_port_to_int( payload[1]): # se perternce a zona de RH if (self.router_address != self.payload_port_to_int( payload[1])): # se não for o RH self.historico_sinal['sec_zone'].update( {at_tick: 0}) # desativa self.historico_sinal['sec_zone_ID'].update( {at_tick: (-1, -1)}) # tira da zona segura elif val[1] == '01011': # SECURE_ZONE_CLOSED_SERVICE = 2 print( f"habilitacao da zona segura em {self.router_address} sz {self.get_value_at(at_tick,'sec_zone_ID')[1]} payload {self.payload_port_to_int(payload[1])}" ) if self.get_value_at( at_tick, 'sec_zone_ID')[1] == self.payload_port_to_int( payload[1]): print("ativou") self.historico_sinal['sec_zone'].update( {at_tick: 2}) elif val[1] == '01010': # OPEN_SECURE_ZONE_SERVICE = 1 if self.get_value_at( at_tick, 'sec_zone_ID')[1] == self.payload_port_to_int( payload[1]): self.historico_sinal['sec_zone'].update( {at_tick: 1}) elif val[1] == '01100': # SECURE_ZONE_OPENED_SERVICE = 0 if self.get_value_at( at_tick, 'sec_zone_ID')[1] == self.payload_port_to_int( payload[1]): self.historico_sinal['sec_zone'].update( {at_tick: 0})
def initStorageUsers(self): if not hasattr(self, 'attended_users'): setattr(self, 'attended_users', OOBTree())
def __call__(self): alsoProvides(self.request, IDisableCSRFProtection) site = portal.get() storage = IRecommendationStorage(site) storage_recom = storage.get(STORAGE_KEY, None) if 'migrate-recommendations' in self.request.form: new_recommendations = [] for i, (_, recommendation) in enumerate(storage_recom.items()): id_recom = str(i + 1) rec_code = recommendation.code.split('/') if len(rec_code) == 4: rec_code = "/".join( (rec_code[0], rec_code[1], rec_code[3].strip())) else: rec_code = "/".join(rec_code) recommendation = Recommendation(id_recom, rec_code, recommendation.topic, recommendation.text, recommendation.ms_region, recommendation.descriptors) # storage_recom.pop(recommendation.code, None) # storage_recom.pop(id_recom, None) # storage_recom.pop(int(id_recom), None) new_recommendations.append(recommendation) # asd = [x for x in storage_recom.keys()] # asdf = [x._id_recommendation for x in new_recommendations] # import pdb; pdb.set_trace() storage_recom = OOBTree() storage[STORAGE_KEY] = storage_recom for new_rec in new_recommendations: id_recom = new_rec._id_recommendation storage_recom[id_recom] = new_rec if not storage_recom: storage_recom = OOBTree() storage[STORAGE_KEY] = storage_recom if 'add-recommendation' in self.request.form: form_data = self.request.form id_recom = form_data.get('rec_id', '') code = form_data.get('rec_code', '') topic = form_data.get('topic', '') text = form_data.get('rec_text', '') ms_region = form_data.get('ms_or_region', []) descriptors = form_data.get('descriptors', []) if not id_recom: max_id = max([ int(_rec._id_recommendation) for _code, _rec in storage_recom.items() ]) id_recom = str(int(max_id) + 1) recom = Recommendation(id_recom, code, topic, text, ms_region, descriptors) storage_recom[id_recom] = recom if 'remove-recommendation' in self.request.form: form_data = self.request.form id_recom = form_data.get('rec_id', '') storage_recom.pop(id_recom) if 'edit-topics' in self.request.form: topics = self.request.form.get('topics', '') topics = topics.split('\r\n') storage[TOPICS_STORAGE_KEY] = topics recommendations = [] if len(storage_recom.items()): for code, recommendation in storage_recom.items(): recommendations.append(recommendation.data_to_list()) sorted_rec = sorted(recommendations, key=lambda i: i[0]) if 'download-excel' in self.request.form: return self.download(sorted_rec) show_edit_buttons = self.can_view_assessment_data() self.recommendations_table = RecommendationsTable( recommendations=sorted_rec, show_edit_buttons=show_edit_buttons) return self.index()