def lambda_handler(event, session): import boto3 from pydblite import Base db = Base('/tmp/TechHub.pld') if db.exists(): db.open() else: db.create('name', 'quantity') db.open() dynamodb = boto3.client('dynamodb') print("[INTENT_HANDLER]") print("event.session.application.applicationId=" + event['session']['application']['applicationId']) logger.info('got event{}'.format(event)) if event['session']['new']: on_session_started({'requestId': event['request']['requestId']}, event['session']) if event['request']['type'] == "LaunchRequest": output = on_launch(event['request'], event['session']) elif event['request']['type'] == "IntentRequest": output = on_intent(event['request'], event['session'], db, dynamodb) elif event['request']['type'] == "SessionEndedRequest": output = on_session_ended(event['request'], event['session']) db.commit() return output
def generateWeights(graph, weightFile, param): pdb = Base(weightFile) pdb.create('pair', 'node1', 'node2','TS02','TS05','TS08', 'JC') pdb.create_index('pair') sortedNodes = sorted(graph.nodes()) for node in sortedNodes: others = sorted(set(n for n in sortedNodes if n > node)) for other in others: if graph.has_edge(node, other): informations = list(edge for n1, n2, edge in graph.edges([node, other], data=True) if ((n1 == node and n2 == other) or (n1 == other and n2 == node)) ) timesofLinks = [] for info in informations: timesofLinks.append(int(info['time'])) bagNode1 = list(eval(edge['keywords']) for n1, n2, edge in graph.edges([node], data=True) if (n1 != other and n2 != other) ) bagNode2 = list(eval(edge['keywords']) for n1, n2, edge in graph.edges([other], data=True) if (n1 != node and n2 != node) ) total_publications = len(informations) k = int(param.t0_) - max(timesofLinks) decayfunction02 = (1 - 0.2) ** k decayfunction05 = (1 - 0.5) ** k decayfunction08 = (1 - 0.8) ** k pdb.insert(str(node) + ';' + str(other),node,other,(total_publications * decayfunction02) , (total_publications * decayfunction05) , (total_publications * decayfunction08), get_jacard_domain(bagNode1, bagNode2) ) pdb.commit() return pdb
def generateWeights(graph, weightFile, param): pdb = Base(weightFile) pdb.create('pair', 'node1', 'node2', 'TS02', 'TS05', 'TS08') pdb.create_index('pair') sortedNodes = sorted(graph.nodes()) for node in sortedNodes: others = sorted(set(n for n in sortedNodes if n > node)) for other in others: if graph.has_edge(node, other): informations = list( edge for n1, n2, edge in graph.edges([node, other], data=True) if ((n1 == node and n2 == other) or ( n1 == other and n2 == node))) timesofLinks = [] for info in informations: timesofLinks.append(int(info['time'])) total_publications = len(informations) k = int(param.t0_) - max(timesofLinks) decayfunction02 = (1 - 0.2)**k decayfunction05 = (1 - 0.5)**k decayfunction08 = (1 - 0.8)**k pdb.insert( str(node) + ';' + str(other), node, other, (total_publications * decayfunction02), (total_publications * decayfunction05), (total_publications * decayfunction08)) pdb.commit() return pdb
def do_Delete (self, result, request, args): def transfomType(x): if isinstance(x, unicode): return str(x) else: return x ####### Replace this section by your logic ####### db = Base('database_service6.pdl') db.create('testId', 'testMessage', mode="open") result = db(testId = int(args['testId'])) if len(result) == 0: responseCode = 404 #ResponseCode.Ok responseBody = json.dumps(result, sort_keys=True, indent=4, separators=(',', ': ')) else: responseCode = 200 #ResponseCode.Ok responseBody = json.dumps(result[0], sort_keys=True, indent=4, separators=(',', ': ')) db.delete(result[0]) db.commit() ####### Replace this section by your logic ####### request.setResponseCode(responseCode) resp = utils.serviceResponse(responseCode, responseBody) return resp
def do_Post (self, result, request, args): def transfomType(x): if isinstance(x, unicode): return str(x) else: return x requestBody = args['requestContent'] ####### Replace this section by your logic ####### vTestId = transfomType(json.loads(requestBody)['testId']) vTestMessage = transfomType(json.loads(requestBody)['testMessage']) responseCode = 200 #ResponseCode.Ok db = Base('database_service6.pdl') db.create('testId', 'testMessage', mode="open") db.insert(testId = vTestId, testMessage = vTestMessage) db.commit() result = [] responseBody = json.dumps(result, sort_keys=True, indent=4, separators=(',', ': ')) ####### Replace this section by your logic ####### request.setResponseCode(responseCode) resp = utils.serviceResponse(responseCode, responseBody) return resp
def calculatingWeights(graph, nodesnotLinked, database, calculatingFile): pdb = Base(calculatingFile) pdb.create('node1', 'node2', 'WCNFI','WAAFI') pdb.create_index('node1', 'node2') element = 0 qtyofNodesToProcess = len(nodesnotLinked) for pair in nodesnotLinked: element = element+1 FormatingDataSets.printProgressofEvents(element, qtyofNodesToProcess, "Calculating features for nodes not liked: ") neighbors_node1 = all_neighbors(graph, pair[0]) neighbors_node2 = all_neighbors(graph, pair[1]) len_neihbors_node1 = len(neighbors_node1) len_neihbors_node2 = len(neighbors_node2) CommonNeigbors = neighbors_node1.intersection(neighbors_node2) WCNFI = 0; WAAFI = 0; for cn in CommonNeigbors: item = get_partOfWeightCalculating(graph, database, pair, cn) WCNFI = WCNFI + item['WCN']; WAAFI = WAAFI + item['WAA']; pdb.insert(str(pair[0]), str(pair[1]), WCNFI, WAAFI ) pdb.commit() return pdb;
def generateWeights(graph, weightFile, param): pdb = Base(weightFile) pdb.create('pair', 'node1', 'node2','FTI01','FTI02','FTI03','FTI04','FTI05','FTI06','FTI07','FTI08','FTI09') pdb.create_index('pair') sortedNodes = sorted(graph.nodes()) for node in sortedNodes: others = sorted(set(n for n in sortedNodes if n > node)) for other in others: if graph.has_edge(node, other): informations = list(edge for n1, n2, edge in graph.edges([node, other], data=True) if ((n1 == node and n2 == other) or (n1 == other and n2 == node)) ) timesofLinks = [] for info in informations: timesofLinks.append(int(info['time'])) total_publications = len(informations) k = int(param.t0_) - max(timesofLinks) FTI01 = total_publications * (0.1**k) FTI02 = total_publications * (0.2**k) FTI03 = total_publications * (0.3**k) FTI04 = total_publications * (0.4**k) FTI05 = total_publications * (0.5**k) FTI06 = total_publications * (0.6**k) FTI07 = total_publications * (0.7**k) FTI08 = total_publications * (0.8**k) FTI09 = total_publications * (0.9**k) pdb.insert(str(node) + ';' + str(other),node,other, FTI01, FTI02, FTI03, FTI04, FTI05, FTI06, FTI07, FTI08, FTI09 ) pdb.commit() return pdb
class Dao_in_memory(): def __init__(self, region_name): self.db = Base(region_name, save_to_file=False) self.db.create('address', 'band', 'latency', 'resources', mode="override") def add_blocks(self, blocks): for block in blocks: self.db.insert(address=block.get('address'), band=block.get('band'), latency=block.get('latency'), resources=block.get('resources')) def update_blocks(self, blocks): for block in blocks: try: old_record = self.db(address=block.get('address'))[0] self.db.update(old_record, band=block.get('band'), latency=block.get('latency'), resources=block.get('resources')) except IndexError: print ('Not present') def get_block(self, address): try: block = self.db(address=address)[0] return block except IndexError: return None def get_all_blocks(self): return self.db
class Posts: def __init__(self, filename, erase_db): self.db = Base(filename) self.db.create('author', 'content', 'date', mode="override" if erase_db else "open") def addPost(self, post): """ Persist a Post object in db and returns auto-generated id. """ post.id = self.db.insert(author = post.author, content = post.content, date = post.date) self.db.commit() return post.id def getPost(self, id): """ Get a post by its id. Returns a Post object or None if id is not found. """ db_entry = self.db[id] if id in self.db else None return self.__createPost(db_entry) if db_entry is not None else None def getPosts(self, from_date = None, to_date = None, author = None): """ Get all posts matching optionally provided conditions. Returns a list (can be empty). """ iterator = self.db.filter() if from_date is not None: iterator = iterator & (self.db("date") > from_date) if to_date is not None: iterator = iterator & (self.db("date") < to_date) if author is not None: iterator = iterator & (self.db("author") == author) return [self.__createPost(db_entry) for db_entry in iterator] def getPostsCount(self): """ Get total number of posts in db. """ return len(self.db) def __createPost(self, db_entry): return Post(db_entry['author'], db_entry['content'], db_entry['date'], db_entry['__id__'])
class MemDB(object): def __init__(self): self._pdb = Base('centric.partition') self._sdb = Base('centric.scanner') self._pdb.create('pid', 'maxSeq', 'minSeq', 'tableName', 'assigned', 'splitNum', 'phase', 'creator', 'createTime', 'lastUpdateTime', mode="override") self._sdb.create('startSeq', 'endSeq', 'intervalSecs', 'scannerId', 'pid', 'creator', 'createTime', 'lastUpdateTime', 'url', 'port', mode="override") def db(self): return self._pdb, self._sdb
def run_class_configuration(request): if request.method == 'POST': db = Base('backendDB.pdl') if db.exists(): db.open() else: db.create('Type','Log', 'Run', 'Prefix','Rule','Threshold', 'TimeStamp', 'Status') configuration_json = json.loads(request.body) print configuration_json log = configuration_json["log"] prefix = configuration_json['prefix'] rule = configuration_json['rule'] threshold = configuration_json['threshold'] # Encode the file. encoding.encode(log, prefix) for encodingMethod in configuration_json['encoding']: for clustering in configuration_json['clustering']: for classification in configuration_json['classification']: django_rq.enqueue(tasks.classifierTask, log, prefix, encodingMethod, clustering, classification, rule, threshold) run = classification + '_' + encodingMethod + '_' + clustering records = [r for r in db if r['Run'] == run and r['Prefix'] == str(prefix) and r['Log'] == log and r['Rule'] == rule and r['Threshold'] == str(threshold)] print records if not records: db.insert("Classification", log, run, str(prefix), rule, str(threshold), time.strftime("%b %d %Y %H:%M:%S", time.localtime()), 'queued') else: db.update(records[0], TimeStamp=time.strftime("%b %d %Y %H:%M:%S", time.localtime()), Status= 'queued') db.commit() return HttpResponse("YOLO")
def generateWeights(graph, weightFile, param): pdb = Base(weightFile) pdb.create('pair', 'node1', 'node2', 'FI') pdb.create_index('pair') sortedNodes = sorted(graph.nodes()) for node in sortedNodes: others = sorted(set(n for n in sortedNodes if n > node)) for other in others: if graph.has_edge(node, other): informations = list( edge for n1, n2, edge in graph.edges([node, other], data=True) if ((n1 == node and n2 == other) or ( n1 == other and n2 == node))) total_publications = len(informations) bagNode1 = list( eval(edge['keywords']) for n1, n2, edge in graph.edges([node], data=True) if (n1 != other and n2 != other)) bagNode2 = list( eval(edge['keywords']) for n1, n2, edge in graph.edges([other], data=True) if (n1 != node and n2 != node)) jc = get_jacard_domain(bagNode1, bagNode2) pdb.insert( str(node) + ';' + str(other), node, other, (total_publications * (jc + 1))) pdb.commit() return pdb
class Data: def __init__(self): self.db = Base(':memory:') self.db.create('container_manager', 'network_plugin', 'operating_system') def set(self, container_manager, network_plugin, operating_system): self.db.insert(container_manager=container_manager, network_plugin=network_plugin, operating_system=operating_system) self.db.commit() def exists(self, container_manager, network_plugin, operating_system): return len((self.db("container_manager") == container_manager) & (self.db("network_plugin") == network_plugin) & (self.db("operating_system") == operating_system)) > 0 def jinja(self): template = env.get_template('table.md.j2') container_engines = list(self.db.get_unique_ids('container_manager')) network_plugins = list(self.db.get_unique_ids("network_plugin")) operating_systems = list(self.db.get_unique_ids("operating_system")) container_engines.sort() container_engines.reverse( ) # reverse sort container_engines to get Docker first in the list network_plugins.sort() operating_systems.sort() return template.render(container_engines=container_engines, network_plugins=network_plugins, operating_systems=operating_systems, exists=self.exists) def markdown(self): out = '' for container_manager in self.db.get_unique_ids('container_manager'): # Prepare the headers out += "# " + container_manager + "\n" headers = '|OS / CNI| ' underline = '|----|' for network_plugin in self.db.get_unique_ids("network_plugin"): headers += network_plugin + ' | ' underline += '----|' out += headers + "\n" + underline + "\n" for operating_system in self.db.get_unique_ids("operating_system"): out += '| ' + operating_system + ' | ' for network_plugin in self.db.get_unique_ids("network_plugin"): if self.exists(container_manager, network_plugin, operating_system): emoji = ':white_check_mark:' else: emoji = ':x:' out += emoji + ' | ' out += "\n" pprint(self.db.get_unique_ids('operating_system')) pprint(self.db.get_unique_ids('network_plugin')) return out
def save_in_db(comic_url): db = Base(os.path.join(SCRIPT_DIR, 'comics.db')) db.create('comic', 'time', mode="open") db.insert(comic=comic_url, time=datetime.datetime.now()) db.commit() return len(db(comic=comic_url)) == 1
def open_db(): db = Base(DB_FILE) db.create('abbr', 'name', 'available', 'volume', 'buy', 'sell', 'date', mode="open") if not db.exists(): raise Exception('Database error') db.create_index('abbr') return db
class YahoourlsearcherPipeline(object): def open_spider(self, spider): filename = "urls_log.txt" self.log_target = codecs.open(filename, 'a+', encoding='utf-8') self.log_target.truncate() self.db = Base('URL_database.pdl') self.db.create('url', 'date', mode="open") self.log_target.write("***New url scraping session started at: "+ str(datetime.datetime.strftime(datetime.datetime.now(), ' %Y-%m-%d %H:%M:%S ')) + " ***" +"\n") print("***New url scraping session started at: "+ str(datetime.datetime.strftime(datetime.datetime.now(), ' %Y-%m-%d %H:%M:%S ')) + " ***" +"\n") self.log_target.write("*** Total url in the Database BEFORE new search: "+ str(len(self.db)) + " ***" + "\n") dispatcher.connect(self.spider_closed, signals.spider_closed) def process_item(self, item, spider): self.db.insert(url=item['url'], date=item['date'] ) self.log_target.write(item['url'] + " " + item['date'] + "\n") self.db.commit() return item def spider_closed(self, spider): url_structure = [] print ("End of database") i = 1 for r in self.db: #print (str(r["url"]) + " " + str(r["date"]) + " \n") url_structure.append(url_date(r["url"],r["date"])) i += 1 print (str(i) + "Url in the DB \n") self.log_target.write("Session ends at: "+ str(datetime.datetime.strftime(datetime.datetime.now(), ' %Y-%m-%d %H:%M:%S ')) + "\n") print ("Session ends at: "+ str(datetime.datetime.strftime(datetime.datetime.now(), ' %Y-%m-%d %H:%M:%S ')) + "\n") self.log_target.write("*** Total url in the Database AFTER the search: "+ str(len(self.db)) + " ***" + "\n") print ("Elementi presenti nel database: "+ str(len(self.db)) + " in struttura: " + str(len(url_structure))) all_record = [] for r in self.db: all_record.append(r) self.db.delete(all_record) print ("Elementi presenti nel database: "+ str(len(self.db))) #set qui url_structure = {x.url: x for x in url_structure}.values() for any_url in url_structure: self.db.insert(any_url.url, any_url.date) print ("Elementi presenti nel database: "+ str(len(self.db))) self.db.commit() self.log_target.write("--- After SET operation: "+ str(len(self.db)) + " --- " + "\n" + "\n" + "\n" + "\n") self.log_target.close()
def save_in_db(channel, count): db = Base(os.path.join(SCRIPT_DIR, f'{channel}_members.db')) db.create('members', 'time', mode="open") len_db = len(db) count_previous = db[len_db - 1]['members'] if len_db else 0 if count != count_previous: db.insert(members=count, time=datetime.datetime.now()) db.commit() return True
class DBPipeline(object): def __init__(self): #Creating log file filename = "session_log.txt" self.log_target = codecs.open(filename, 'a+', encoding='utf-8') self.log_target.truncate() self.log_target.write("***New session started at: "+ str(datetime.datetime.strftime(datetime.datetime.now(), ' %Y-%m-%d %H:%M:%S ')) + " ***" +"\n") #Creating database for items self.db = Base('QuestionThreadExtracted.pdl') self.db.create('uid', 'type', 'author', 'title', 'text', 'date_time', 'tags', 'views', 'answers', 'resolve', 'upvotes', 'url', mode="open") #Some data for the log file self.number_of_questions = 0 self.number_of_answers = 0 self.last_id=0 dispatcher.connect(self.spider_closed, signals.spider_closed) def process_item(self, item, spider): self.db.insert(uid=item['uid'], type=item['type'], author=item['author'], title=item['title'], text=item['text'], date_time=item['date_time'], tags=item['tags'], views=item['views'], answers=item['answers'], resolve=item['resolve'], upvotes=item['upvotes'], url=item['url'] ) #Count questions and answers if "question" in item['type']: self.number_of_questions+=1 if self.last_id<item['uid']: self.last_id=item['uid'] else: self.number_of_answers+=1 self.db.commit() return item def spider_closed(self, spider): self.log_target.write("Questions founded: "+ str(self.number_of_questions) + "\n") self.log_target.write("Answers founded: "+ str(self.number_of_answers) + "\n") self.log_target.write("Last UID: "+str(self.last_id) + "\n" + "\n") self.log_target.write("***Session End at: "+ str(datetime.datetime.strftime(datetime.datetime.now(), ' %Y-%m-%d %H:%M:%S ')) + " ***" +"\n") self.log_target.close()
def save_in_db(channel, count, date): db = Base(os.path.join(SCRIPT_DIR, f'{channel}_members.db')) db.create('members', 'time', mode="open") variant = db[0]['members'].split()[1] value = f"{count} {variant}" already_present = [r for r in db if r['members'] == value] if not already_present: db.insert(members=value, time=date) db.commit() return True
class DBPipeline(object): # Pipeline to write an Item in the database def open_spider(self, spider): # Creation of DB self.db = Base(spider.database) self.db.create('uid', 'type', 'author', 'title', 'text', 'date_time', 'tags', 'views', 'answers', 'resolve', 'upvotes', 'url', mode="override") dispatcher.connect(self.spider_closed, signals.spider_closed) def process_item(self, item, spider): # Writing of the item self.db.insert(uid=item['uid'], type=item['type'], author=item['author'], title=item['title'], text=item['text'], date_time=item['date_time'], tags=item['tags'], views=item['views'], answers=item['answers'], resolve=item['resolve'], upvotes=item['upvotes'], url=item['url']) self.db.commit() return item def spider_closed(self, spider): # Number of items saved, shown at the end i = 0 j = 0 for r in self.db: if r["type"] == "question": i += 1 else: j += 1 print('Number of questions and answers found:') print(str(i) + ' questions \n') print(str(j) + ' answers \n')
def generateDataForCalculate(self): if self.trainnigGraph == None: self.generating_Training_Graph() _nodes = sorted(self.trainnigGraph.nodes()) adb = Base(self.filePathTrainingGraph + ".calc.pdl") adb.create('pairNodes', 'common', 'time', 'domain' ) for node in sorted(_nodes): othernodes = set(n for n in _nodes if n > node) for other in othernodes: common = set(networkx.common_neighbors(self.trainnigGraph, node, other)) arestas = self.trainnigGraph.edges([node, other], True)
def generateWeights(graph, weightFile, param): pdb = Base(weightFile) pdb.create('pair', 'node1', 'node2', 'FTI01', 'FTI02', 'FTI03', 'FTI04', 'FTI05', 'FTI06', 'FTI07', 'FTI08', 'FTI09') pdb.create_index('pair') sortedNodes = sorted(graph.nodes()) for node in sortedNodes: others = sorted(set(n for n in sortedNodes if n > node)) for other in others: if graph.has_edge(node, other): informations = list( edge for n1, n2, edge in graph.edges([node, other], data=True) if ((n1 == node and n2 == other) or ( n1 == other and n2 == node))) timesofLinks = [] for info in informations: timesofLinks.append(int(info['time'])) total_publications = len(informations) k = int(param.t0_) - max(timesofLinks) decayfunction = (0.8)**k bagNode1 = list( eval(edge['keywords']) for n1, n2, edge in graph.edges([node], data=True) if (n1 != other and n2 != other)) bagNode2 = list( eval(edge['keywords']) for n1, n2, edge in graph.edges([other], data=True) if (n1 != node and n2 != node)) jc = get_jacard_domain(bagNode1, bagNode2) FTI01 = total_publications * (decayfunction * (1 / (0.1**jc))) FTI02 = total_publications * (decayfunction * (1 / (0.2**jc))) FTI03 = total_publications * (decayfunction * (1 / (0.3**jc))) FTI04 = total_publications * (decayfunction * (1 / (0.4**jc))) FTI05 = total_publications * (decayfunction * (1 / (0.5**jc))) FTI06 = total_publications * (decayfunction * (1 / (0.6**jc))) FTI07 = total_publications * (decayfunction * (1 / (0.7**jc))) FTI08 = total_publications * (decayfunction * (1 / (0.8**jc))) FTI09 = total_publications * (decayfunction * (1 / (0.9**jc))) pdb.insert( str(node) + ';' + str(other), node, other, FTI01, FTI02, FTI03, FTI04, FTI05, FTI06, FTI07, FTI08, FTI09) pdb.commit() return pdb
def generateWeights(graph, weightFile, param): pdb = Base(weightFile) pdb.create('pair', 'node1', 'node2', 'CTS02', 'CTS05', 'CTS08') pdb.create_index('pair') sortedNodes = sorted(graph.nodes()) for node in sortedNodes: others = sorted(set(n for n in sortedNodes if n > node)) for other in others: if graph.has_edge(node, other): informations = list( edge for n1, n2, edge in graph.edges([node, other], data=True) if ((n1 == node and n2 == other) or ( n1 == other and n2 == node))) timesofLinks = [] for info in informations: timesofLinks.append(int(info['time'])) bagNode1 = list( eval(edge['keywords']) for n1, n2, edge in graph.edges([node], data=True) if (n1 != other and n2 != other)) bagNode2 = list( eval(edge['keywords']) for n1, n2, edge in graph.edges([other], data=True) if (n1 != node and n2 != node)) total_publications = len(informations) k = int(param.t0_) - max(timesofLinks) jc = get_jacard_domain(bagNode1, bagNode2) decayfunction = (0.6)**k decayfunction02 = (1 - 0.7)**jc decayfunction05 = (1 - 0.8)**jc decayfunction08 = (1 - 0.9)**jc CTS02 = total_publications * (decayfunction * (1 / decayfunction02)) CTS05 = total_publications * (decayfunction * (1 / decayfunction05)) CTS08 = total_publications * (decayfunction * (1 / decayfunction08)) pdb.insert( str(node) + ';' + str(other), node, other, CTS02, CTS05, CTS08) pdb.commit() return pdb
def generateDataForCalculate(self): if self.trainnigGraph == None: self.generating_Training_Graph() _nodes = sorted(self.trainnigGraph.nodes()) adb = Base(self.filePathTrainingGraph + ".calc.pdl") adb.create('pairNodes', 'common', 'time', 'domain') for node in sorted(_nodes): othernodes = set(n for n in _nodes if n > node) for other in othernodes: common = set( networkx.common_neighbors(self.trainnigGraph, node, other)) arestas = self.trainnigGraph.edges([node, other], True)
def new_repo(db_path): all_items = list_queue() pbs_id = [] for line in all_items: pbs_id.append(str(re.search(r'\d+', line).group())) job_db = Base(os.path.join(db_path, 'PBS_job_database.pdl')) job_db.create('PBS_id', 'uniq_id', 'work_dir') for ele in pbs_id: ele_dir = init_work_dir(ele) ele_id = re.findall(r'\d+', ele_dir)[-1] job_db.insert(PBS_id=ele, work_dir=ele_dir, uniq_id=ele_id) job_db.commit() return job_db
def convertcsv2db(csvpath, dbpath): #Converts a CSV file to a PyDBLite database db = Base(dbpath) try: csvfile = open(csvpath, 'rb') except csv.Error: print "Could not open CSV file at " + csvpath + "\n" reader = csv.reader(csvfile) header = reader.next() try: db.create(*header) except IOError: print "Existing DB at " + dbpath + "\n" for row in reader: db.insert(*row) db.commit()
def generate_finalResult(order,topRank, TestGraph, FileNameResult ): pdb = Base(FileNameResult) pdb.create('node1', 'node2', 'value', 'sucesso','topRank') pdb.create_index('node1', 'node2') indice = 0 for nodeToCheck in order: indice = indice+1 isTopRank = (indice <= topRank) if (TestGraph.has_edge(nodeToCheck['node1'],nodeToCheck['node2'])): pdb.insert(str(nodeToCheck['node1']), nodeToCheck['node2'],nodeToCheck['value'] , True, isTopRank ) else: pdb.insert(str(nodeToCheck['node1']), nodeToCheck['node2'],nodeToCheck['value'] , False, isTopRank ) pdb.commit() return pdb
def __init__(self, database_name=None): print ("Opening " + database_name) db_r = Base(database_name) # Choose the DB of the Question Thread URL db_r.create('url', 'date', mode="open") # Check if the DB is empty or new if len(db_r)==0: print ("ERROR: Database not found or empty") sys.exit() else: print ("Database elements: " + str(len(db_r))) for r in db_r: self.url_to_scrape.append(UrlDate(r["url"], r["date"])) # Making a SET of the Database in order to delete duplicate URLS self.url_to_scrape = {x.url: x for x in self.url_to_scrape}.values() print ("Database elements after set operation: " + str(len(db_r)))
def get_cursor(db_name): if db_name == 'weather_data': db = Base('storage/weather_data.pdl') if not db.exists(): db.create('low', 'tmw', 'high', 'temp', 'date', 'text', 'code', 'history', 'uniq_id', 'location', 'astronomy', 'atmosphere', 'country_name', 'created_date', 'location_name') elif db_name == 'locations': db = Base('storage/locations.pdl') if not db.exists(): db.create('uniq_id', 'location', 'created_date') else: raise Exception('Please Enter Valid Name!') cursor = db.open() return cursor
def checkCondition(cls, result): if result == "error": return "erro" else: db = Base('database_service1.pdl') db.create('cod', 'message', mode="open") db.insert(cod='1', message='valid') db.insert(cod='2', message='not valid') db.commit() #for rec in (db("age") > 30): for rec in db: print rec["cod"] +' '+ rec["message"] return "ok"
def generateWeights(graph, weightFile, param): pdb = Base(weightFile) pdb.create('pair', 'node1', 'node2','FI') pdb.create_index('pair') sortedNodes = sorted(graph.nodes()) for node in sortedNodes: others = sorted(set(n for n in sortedNodes if n > node)) for other in others: if graph.has_edge(node, other): informations = list(edge for n1, n2, edge in graph.edges([node, other], data=True) if ((n1 == node and n2 == other) or (n1 == other and n2 == node)) ) total_publications = len(informations) pdb.insert(str(node) + ';' + str(other),node,other, total_publications ) pdb.commit() return pdb
def get_all_record_data(): db = Base('nfc_auth_ok_rec.pdl') if not db.exists(): db.create('time', 'device', 'name') db.open() all_str = '[' i = 0 for r in db: obj = '{\"time\":'+str(r['time'])+\ ',\"name\":\"'+r['name']+\ '\",\"device\":\"'+r['device']+'\"}' all_str += obj if i < len(db) - 1: all_str += ',' i = i + 1 all_str += ']' return all_str
class DBPipeline(object): # Pipeline to write an Item in the database def open_spider(self, spider): # Creation of DB self.db = Base(spider.database) self.db.create('uid', 'type', 'author', 'title', 'text', 'date_time', 'tags', 'views', 'answers', 'resolve', 'upvotes', 'url', mode="override") dispatcher.connect(self.spider_closed, signals.spider_closed) def process_item(self, item, spider): # Writing of the item self.db.insert(uid=item['uid'], type=item['type'], author=item['author'], title=item['title'], text=item['text'], date_time=item['date_time'], tags=item['tags'], views=item['views'], answers=item['answers'], resolve=item['resolve'], upvotes=item['upvotes'], url=item['url'] ) self.db.commit() return item def spider_closed(self, spider): # Number of items saved, shown at the end i = 0 j = 0 for r in self.db: if r["type"] == "question": i += 1 else: j += 1 print ('Number of questions and answers found:') print (str(i) + ' questions \n') print (str(j) + ' answers \n')
def calculatingWeights(graph, nodesnotLinked, database, calculatingFile): pdb = Base(calculatingFile) pdb.create('node1', 'node2', 'cnWTS02', 'cnWTS05', 'cnWTS08', 'aaWTS02', 'aaWTS05', 'aaWTS08') pdb.create_index('node1', 'node2') element = 0 qtyofNodesToProcess = len(nodesnotLinked) for pair in nodesnotLinked: element = element + 1 FormatingDataSets.printProgressofEvents( element, qtyofNodesToProcess, "Calculating features for nodes not liked: ") neighbors_node1 = all_neighbors(graph, pair[0]) neighbors_node2 = all_neighbors(graph, pair[1]) len_neihbors_node1 = len(neighbors_node1) len_neihbors_node2 = len(neighbors_node2) CommonNeigbors = neighbors_node1.intersection(neighbors_node2) CNWts02Feature = 0 CNWts05Feature = 0 CNWts08Feature = 0 AAWts02Feature = 0 AAWts05Feature = 0 AAWts08Feature = 0 CNWJCFeature = 0 AAWJCFeature = 0 for cn in CommonNeigbors: item = get_partOfWeightCalculating(graph, database, pair, cn) CNWts02Feature = CNWts02Feature + item['cnWts02'] CNWts05Feature = CNWts05Feature + item['cnWts05'] CNWts08Feature = CNWts08Feature + item['cnWts08'] AAWts02Feature = AAWts02Feature + item['aaWts02'] AAWts05Feature = AAWts05Feature + item['aaWts05'] AAWts08Feature = AAWts08Feature + item['aaWts08'] #CNWJCFeature = CNWJCFeature + item['cnWJC']; #AAWJCFeature = AAWJCFeature + item['aaWJC']; pdb.insert(str(pair[0]), str(pair[1]), CNWts02Feature, CNWts05Feature, CNWts08Feature, AAWts02Feature, AAWts05Feature, AAWts08Feature) pdb.commit() return pdb
def run_configuration(request): if request.method == 'POST': db = Base('backendDB.pdl') # db.create('Type','Log', 'Run', 'Prefix','Rule','Threshold', 'TimeStamp', 'Status', mode="override") if db.exists(): db.open() else: db.create('Type','Log', 'Run', 'Prefix','Rule','Threshold', 'TimeStamp', 'Status') configuration_json = json.loads(request.body) print configuration_json log = configuration_json["log"] prefix = configuration_json['prefix'] # Encode the file. encoding.encode(log, prefix) for encodingMethod in configuration_json['encoding']: for clustering in configuration_json['clustering']: for regression in configuration_json['regression']: django_rq.enqueue(tasks.regressionTask, log, prefix, encodingMethod, clustering, regression) run = regression + '_' + encodingMethod + '_' + clustering records = [r for r in db if r['Run'] == run and r['Prefix'] == str(prefix) and r['Log'] == log] # for r in db: # if (r['Run'] == run) and (r['Prefix'] == str(prefix)) and (r['Log'] == log): # records.append(r) print records if not records: db.insert("Regression", log, run, str(prefix),"NaN","NaN", time.strftime("%b %d %Y %H:%M:%S", time.localtime()), 'queued') else: db.update(records[0], TimeStamp=time.strftime("%b %d %Y %H:%M:%S", time.localtime()), Status= 'queued') # if run in df['Run'].unique(): # df.loc[df.Run == run, 'TimeStamp'] = time.strftime("%b %d %Y %H:%M:%S", time.localtime()) # df.loc[df.Run == run, 'Status'] = "queued" # else: # df.loc[df.shape[0]] = [run, time.strftime("%b %d %Y %H:%M:%S", time.localtime()), 'queued'] # print df # print df['Run'] # df.to_csv('core_results_queue/' + log + '/' + str(prefix) + '/reg_queueStatus.csv', sep=',',header=writeHeader, mode='w+', index=False) db.commit() return HttpResponse("YOLO")
def initialize_database(): print("Initializing database!") db = Base('block_chain.pdl') if (db.exists()): print("Delete block chain database") db.delete db.create('index', 'timestamp', 'hash', 'previousHash', 'data', mode="override") genesisData = "It all begins here!" timestamp = datetime.now() index = 0 hash = calculateHash(index, "", timestamp, genesisData) genesisBlock = Block(index, str(timestamp), hash, None, genesisData) db.insert(genesisBlock.index, genesisBlock.timestamp, genesisBlock.hash, genesisBlock.previousHash, genesisBlock.data) db.commit()
def do_Get (self, result, request, args): ####### Replace this section by your logic ####### db = Base('database_service6.pdl') db.create('testId', 'testMessage', mode="open") result = db(testId = int(args['testId'])) if len(result) == 0: responseCode = 404 #ResponseCode.Ok responseBody = json.dumps(result, sort_keys=True, indent=4, separators=(',', ': ')) else: responseCode = 200 #ResponseCode.Ok responseBody = json.dumps(result[0], sort_keys=True, indent=4, separators=(',', ': ')) ####### Replace this section by your logic ####### request.setResponseCode(responseCode) resp = utils.serviceResponse(responseCode, responseBody) return resp
def likes(currentMessage): try: db = Base('chatStorage/likes/likes.pdl') #The path to the database db.create('userID', 'likes', 'dislikes', 'history', 'liked', mode="open") #Create a new DB if one doesn't exist. If it does, open it sentMyKarma = False try: likes = 0 dislikes = 0 if int(currentMessage.reply_to_message.from_user.id) == int(117924410): #is replying to bot message with /likes for user in db: if int(user['userID']) == 117924410: likes = int(user['likes']) dislikes = int(user[dislikes]) karma = int(likes) - int(dislikes) response = "Since you asked, I have " + str(likes) + " likes and " + str(dislikes) + " dislikes, for a total of " + str(karma) + " karma. " if karma > 0: response += "\xF0\x9F\x98\x83".decode("utf-8") #smiley else: response += "\xF0\x9F\x98\xAD".decode("utf-8") #crying return response sentMyKarma = True except Exception: print traceback.format_exc() if not sentMyKarma: userWasFound = False for user in db: if(int(user['userID']) == int(currentMessage.from_user.id)): userWasFound = True likes = int(user['likes']) dislikes = int(user['dislikes']) if userWasFound: return currentMessage.from_user.first_name + ", you have " + str(likes) + " likes and " + str(dislikes) + " dislikes, for a total of " + str(int(likes) - int(dislikes)) + " karma." else: return "No like data found!" except Exception: print traceback.format_exc() return ""
def setup(args): if os.path.isfile(path): backup_path = path + '.bak' shutil.copy(path, backup_path) db = Base(path) if args.overwrite: while 1: confirm = input('Do you really want to overwrite the database ? (y/n) ') if confirm in ['y', 'n']: break if confirm == 'y': db.create('name', 'cost', 'date', 'tags', mode='override') print('Data base in {} has been overwritten!'.format(path)) else: if db.exists(): print('{} already exists and is a database. If you want to recreate' ' it, use the -o flag.'.format(path)) else: db.create('name', 'cost', 'date', 'tags', mode='open') print('Created database at {}!'.format(path))
class Rates: """ Persistence layer for exchange rates. """ def __init__(self, filename, erase_db): self.__db = Base(filename) self.__db.create('currency', 'rate', mode="override" if erase_db else "open") self.__db.create_index('currency') def setRate(self, currency, rate): """ Persist a currency's exchange rate. """ assert rate > 0.0 records = self.__db._currency[currency] if len(records) > 0: assert len( records ) == 1 # We never expect several exchange rates for the same currency self.__db.update(records[0], rate=rate) else: self.__db.insert(currency=currency, rate=rate) self.__db.commit() def getRate(self, currency): """ Get the exchange rate with EUR for the provided currency or None if it is not found. An exchange rate for currency CUR is Value(EUR) / Value(CUR): 1 EUR = rate(CUR) CUR <=> 1/rate(CUR) EUR = 1 CUR. """ records = self.__db._currency[currency] return records[0]['rate'] if len(records) > 0 else None def getAllRates(self): """ Get all known exchange rates as a dict. """ return [(r['currency'], r['rate']) for r in self.__db] def getRatesCount(self): """ Get total number of exchange rates in db. """ return len(self.__db)
def calculatingWeights(graph, nodesnotLinked, database, calculatingFile): pdb = Base(calculatingFile) pdb.create('node1', 'node2', 'cnWTS02','cnWTS05','cnWTS08', 'aaWTS02', 'aaWTS05', 'aaWTS08') pdb.create_index('node1', 'node2') element = 0 qtyofNodesToProcess = len(nodesnotLinked) for pair in nodesnotLinked: element = element+1 FormatingDataSets.printProgressofEvents(element, qtyofNodesToProcess, "Calculating features for nodes not liked: ") neighbors_node1 = all_neighbors(graph, pair[0]) neighbors_node2 = all_neighbors(graph, pair[1]) len_neihbors_node1 = len(neighbors_node1) len_neihbors_node2 = len(neighbors_node2) CommonNeigbors = neighbors_node1.intersection(neighbors_node2) CNWts02Feature = 0; CNWts05Feature = 0; CNWts08Feature = 0; AAWts02Feature = 0; AAWts05Feature = 0; AAWts08Feature = 0; CNWJCFeature = 0; AAWJCFeature = 0; for cn in CommonNeigbors: item = get_partOfWeightCalculating(graph, database, pair, cn) CNWts02Feature = CNWts02Feature + item['cnWts02']; CNWts05Feature = CNWts05Feature + item['cnWts05']; CNWts08Feature = CNWts08Feature + item['cnWts08']; AAWts02Feature = AAWts02Feature + item['aaWts02']; AAWts05Feature = AAWts05Feature + item['aaWts05']; AAWts08Feature = AAWts08Feature + item['aaWts08']; #CNWJCFeature = CNWJCFeature + item['cnWJC']; #AAWJCFeature = AAWJCFeature + item['aaWJC']; pdb.insert(str(pair[0]), str(pair[1]), CNWts02Feature, CNWts05Feature, CNWts08Feature, AAWts02Feature, AAWts05Feature, AAWts08Feature ) pdb.commit() return pdb;
def get_results_db(clear_cache=False, skip=[]): cache_file = 'cache/results.pdl' db = Base(cache_file) if clear_cache or not db.exists() or os.path.getmtime(cache_file) < os.path.getmtime(results_dir): warnings.warn('Rebuilding results cache...') columns = set() rows = [] p = pathlib.Path(results_dir) for config_file in p.glob('*.config'): with config_file.open() as config_fh: settings_hash = config_file.stem row = json.loads(config_fh.read()) if settings_hash in skip: continue row['hash'] = settings_hash tests_count = analyze.count(config_file.parent, settings_hash) row['iostat_cpu'], len_cpu_values = analyze.iostat_cpu(config_file.parent, settings_hash) row['iperf_result'], len_iperf_values = getattr(analyze, row['iperf_name'])(config_file.parent, settings_hash, row) if tests_count != len_cpu_values or tests_count != len_iperf_values: raise analyze.AnalysisException('For test {}, mismatch in cardinality of tests between count ({}), iostat ({}) and iperf ({})'.format(settings_hash, tests_count, len_cpu_values, len_iperf_values), settings_hash) if len_iperf_values > 0: min_fairness = row['iperf_result']['fairness'][0] - row['iperf_result']['fairness'][1] if min_fairness < (1 - 1 / (2 * row['parallelism'])): warnings.warn('For test {}, fairness has a critical value: {}.'.format(settings_hash, row['iperf_result']['fairness']), RuntimeWarning) columns = columns | set(row.keys()) rows.append(row) db.create(*columns, mode='override') for r in rows: db.insert(**r) db.commit() warnings.warn('Results cache built.') else: warnings.warn('Reusing results cache.') db.open() return db
class Rates: """ Persistence layer for exchange rates. """ def __init__(self, filename, erase_db): self.__db = Base(filename) self.__db.create('currency', 'rate', mode="override" if erase_db else "open") self.__db.create_index('currency') def setRate(self, currency, rate): """ Persist a currency's exchange rate. """ assert rate > 0.0 records = self.__db._currency[currency] if len(records) > 0: assert len(records) == 1 # We never expect several exchange rates for the same currency self.__db.update(records[0], rate = rate) else: self.__db.insert(currency = currency, rate = rate) self.__db.commit() def getRate(self, currency): """ Get the exchange rate with EUR for the provided currency or None if it is not found. An exchange rate for currency CUR is Value(EUR) / Value(CUR): 1 EUR = rate(CUR) CUR <=> 1/rate(CUR) EUR = 1 CUR. """ records = self.__db._currency[currency] return records[0]['rate'] if len(records) > 0 else None def getAllRates(self): """ Get all known exchange rates as a dict. """ return [(r['currency'], r['rate']) for r in self.__db] def getRatesCount(self): """ Get total number of exchange rates in db. """ return len(self.__db)
from pathlib import Path from pydblite import Base if not Path("db").exists(): Path("db").mkdir() """ Base for client's application on service via client_id """ client_base = Base("db/client_base.pdl") if client_base.exists(): client_base.open() else: client_base.create("secret", "redirect_uri", "name") """ Base for keeping authorization codes while oauth """ authorization_code = Base("db/authorization_code.pdl") if authorization_code.exists(): authorization_code.open() else: authorization_code.create("user_id", "code", "expire_time") """ Base for access_tokens for authorized users """ access_token = Base("db/access_token.pdl") if access_token.exists(): access_token.open() else: access_token.create("user_id", "access", "expire_time", "refresh") """
from pathlib import Path from pydblite import Base if not Path('db').exists(): Path('db').mkdir() client = Base('db/client.pdl') if client.exists(): client.open() else: client.create('secret', 'redirect_uri', 'name') authorization_code = Base('db/authorization_code.pdl') if authorization_code.exists(): authorization_code.open() else: authorization_code.create('user_id', 'code', 'expire_time') token = Base('db/token.pdl') if token.exists(): token.open() else: token.create('user_id', 'access', 'expire_time', 'refresh') user = Base('db/user.pdl') if user.exists(): user.open() else: user.create('login', 'password_hash', 'name', 'email', 'phone') clothes = Base('db/clothes.pdl')
def _create_agent_table(self): agent_table = Base('agents', save_to_file=False) agent_table.create('sock', 'team_id', 'agent_id', 'side_name', 'agent_name') self._db = agent_table self._count_joined_teams = 0 self._count_not_joined_agents = sum([len(agent_names) for agent_names in self._sides.values()])
def process(bot, chat_id, parsedCommand, messageText, currentMessage, update, instanceAge): def sendText(givenText, replyingMessageID=0, keyboardLayout=[]): if not chatInstanceArray[chat_id]['adminDisable']: atbSendFunctions.sendText(bot, chat_id, givenText, replyingMessageID, keyboardLayout) def sendPhoto(imageName): atbSendFunctions.sendPhoto(bot, chat_id, "images/" + imageName) def sendSticker(stickerName): atbSendFunctions.sendSticker(bot, chat_id, "stickers/" + stickerName) def sendAudio(audioName): atbSendFunctions.sendAudio(bot, chat_id, "audio/" + audioName) def sendVideo(videoName): atbSendFunctions.sendVideo(bot, chat_id, "videos/" + videoName) def passSpamCheck(timeDelay=15): return atbMiscFunctions.spamCheck(chat_id, currentMessage.date, timeDelay) try: chatInstanceArray[chat_id]['checking'] = True except Exception: chatInstanceArray[chat_id] = {'checking': True, 'adminDisable': False, 'spamTimestamp': 0, 'shottyTimestamp': 0, 'shottyWinner': "", 'checkingVehicles': False, 'whoArray': []} try: #commands go here, in this if-elif block. Python doesn't have switch statements. if parsedCommand == "/mom": #sends "MOM GET THE CAMERA" sendText("MOM GET THE CAMERA") elif atbMiscFunctions.isMoom(parsedCommand): #sends M {random number of Os} M if passSpamCheck(): #use this to prevent spamming of a command response = "M" for i in range(0, random.randint(3, 75)): response += "O" sendText(response + "M") elif parsedCommand == "/swag": sendText("swiggity swag, what\'s in the bag?") elif parsedCommand == "/worms": if passSpamCheck(): response = "hey man can I borrow your " if len(messageText) > len("/worms "): response += messageText[len("/worms "):] else: response += "worms" sendText(response) elif parsedCommand == "/shh" or parsedCommand == "/shhh": if passSpamCheck(): sendPhoto("shhh.jpg") elif parsedCommand == "/father": if (random.randint(0, 1)): sendText("You ARE the father!") else: sendText("You are NOT the father!") elif parsedCommand == "/rip": #sends "I can't believe that [name (defaults to sender's name)] is f*****g dead." if passSpamCheck(): response = "I can't believe that " while "my " in messageText: messageText = messageText.replace("my ", currentMessage.from_user.first_name + "\'s ", 1) if len(messageText) > len("/rip "): if messageText[len("/rip "):] == "me": response += currentMessage.from_user.first_name else: response += messageText[len("/rip "):] else: response += currentMessage.from_user.first_name response += " is f*****g dead." sendText(response) elif parsedCommand == "/rips": #sends "I can't believe that [name (defaults to sender's name)] is f*****g dead." if passSpamCheck(): response = "I can't believe that " while "my " in messageText: messageText = messageText.replace("my ", currentMessage.from_user.first_name + "\'s ", 1) if len(messageText) > len("/rip "): if messageText[len("/rip "):] == "me": response += currentMessage.from_user.first_name else: response += messageText[len("/rip "):] else: response += currentMessage.from_user.first_name response += " are f*****g dead." sendText(response) elif parsedCommand == "/scrub": checkingStats = False try: if currentMessage.text.lower().split()[1] == "stats": db = Base('chatStorage/scrub.pdl') #The path to the DB db.create('username', 'name', 'counter', mode="open") K = list() for user in db: K.append(user) sortedK = sorted(K, key=lambda x: int(x['counter']), reverse=True) outputString = "SCRUBBIEST LEADERBOARD:\n" for user in sortedK: pluralString = " SCRUB POINT" if not(int(user['counter']) == 1): pluralString += "S" pluralString += "\n" outputString += user['name'].upper() + ": " + str(user['counter']) + pluralString sendText(outputString) checkingStats = True except IndexError: pass if not checkingStats and (currentMessage.from_user.id == 169883788 or currentMessage.from_user.id == 44961843): db = Base('chatStorage/scrub.pdl') db.create('username', 'name', 'counter', mode="open") userWasFound = False valueSuccessfullyChanged = False try: pointsAdded = float(currentMessage.text.lower().split()[1]) except (IndexError, ValueError): pointsAdded = 1 for user in db: if int(user['username']) == currentMessage.reply_to_message.from_user.id: db.update(user, counter=int(user['counter']) + pointsAdded) valueSuccessfullyChanged = True userWasFound = True db.commit() if not userWasFound: db.insert(currentMessage.reply_to_message.from_user.id, currentMessage.reply_to_message.from_user.first_name, pointsAdded) db.commit() if valueSuccessfullyChanged or not userWasFound: sendText("Matt Gomez awarded " + str(pointsAdded) + " scrub point(s) to " + currentMessage.reply_to_message.from_user.first_name + ".") elif not checkingStats: sendText("AdamTestBot, powered by ScrubSoft (C)") elif parsedCommand == "/hiss": checkingStats = False try: if currentMessage.text.lower().split()[1] == "stats": db = Base('chatStorage/hiss.pdl') db.create('username', 'name', 'counter', mode="open") K = list() for user in db: K.append(user) sortedK = sorted(K, key=lambda x: int(x['counter']), reverse=True) outputString = "Hiss Leaderboard:\n" for user in sortedK: pluralString = " hiss" if not(int(user['counter']) == 1): pluralString += "es" pluralString += "\n" outputString += user['name'] + ": " + str(user['counter']) + pluralString sendText(outputString) checkingStats = True except IndexError: pass if not checkingStats and (currentMessage.from_user.id == 122526873 or currentMessage.from_user.id == 44961843): db = Base('chatStorage/hiss.pdl') db.create('username', 'name', 'counter', mode="open") userWasFound = False valueSuccessfullyChanged = False for user in db: if int(user['username']) == currentMessage.reply_to_message.from_user.id: db.update(user, counter=int(user['counter']) + 1) valueSuccessfullyChanged = True userWasFound = True db.commit() if not userWasFound: db.insert(currentMessage.reply_to_message.from_user.id, currentMessage.reply_to_message.from_user.first_name, 1) db.commit() if valueSuccessfullyChanged or not userWasFound: sendText("Robyn hissed at " + currentMessage.reply_to_message.from_user.first_name + ".") elif parsedCommand == "/water": if passSpamCheck(): if (random.randint(0, 1) == 0): sendSticker("water.webp") else: sendSticker("hoboken_water.webp") elif parsedCommand == "/sysinfo": if passSpamCheck(): cpu = [] for x in range(3): cpu.append(psutil.cpu_percent(interval=1)) cpuavg = round(sum(cpu) / float(len(cpu)), 1) memuse = psutil.virtual_memory()[2] diskuse = psutil.disk_usage('/')[3] sendText("The CPU uasge is " + str(cpuavg) + "%, the memory usage is " + str(memuse) + "%, and " + str(diskuse) + "% of the disk has been used.") elif parsedCommand == "/grill": if passSpamCheck(): sendPhoto("grill.jpg") elif parsedCommand == "/pants": if passSpamCheck(): sendText("Shit! I can't find my pants.") elif parsedCommand == "/broken": if passSpamCheck(): if len(messageText) > len("/broken "): message = str(currentMessage.from_user.username) + " says: @magomez96 my " + messageText[len("/broken "):] + " is broken!" else: message = str(currentMessage.from_user.username) + " says: @magomez96 my shit is broken!" sendText(message) elif parsedCommand == "/quote": if passSpamCheck(5): try: sendText(atbQuote.getQuoteAt(chat_id, int(messageText.split()[1]))) except: sendText(atbQuote.getQuote(chat_id)) elif parsedCommand == "/quotefrom": print("\n" + messageText[len("/quotefrom "):]) if passSpamCheck(5): sendText(atbQuote.getQuoteFrom(chat_id, messageText[len("/quotefrom "):])) elif parsedCommand == "/quoteremove": if currentMessage.from_user.username == "AdamZG" or currentMessage.from_user.username == "magomez96" or currentMessage.from_user.username == "Peribot": if atbQuote.quoteRemove(chat_id, int(messageText.split()[1])): sendText("Quote " + messageText.split()[1] + " removed") else: sendText("That quote doesn't exist or you never added any quotes") elif parsedCommand == "/quoteadd": if currentMessage.reply_to_message == None and messageText == "/quoteadd": sendText("Try replying to a message with this command to add it to the quote list") else: userLastName = "" try: userLastName = " " + currentMessage.from_user.last_name except: pass try: replyUserLastName = "" try: replyUserLastName = " " + currentMessage.reply_to_message.from_user.last_name except: replyUserLastName = "" quote_resp = atbQuote.quoteAdd(chat_id, '"' + currentMessage.reply_to_message.text + '"', (currentMessage.reply_to_message.from_user.first_name + replyUserLastName).strip()) sendText(quote_resp) except(Exception): quoteParse = currentMessage.text.split("-") quote = "-".join(quoteParse[:-1]) quote = quote[len("/quoteadd "):].strip() quote_resp = atbQuote.quoteAdd(chat_id, quote, quoteParse[-1].strip(), (currentMessage.from_user.first_name + userLastName).strip()) sendText(quote_resp) elif parsedCommand == "/quotelegacy": if passSpamCheck(5): sendText(atbQuote.getQuoteLegacy(chat_id)) elif parsedCommand == "/pogo": def getPokeInfo(): start = time.time() try: nf = urlopen("https://pgorelease.nianticlabs.com/plfe/", timeout = 3) page = nf.read() end = time.time() nf.close() except TimeoutError: end=time.time() rTime = round((end - start) * 1000) if (rTime < 800): sendText("Pokémon GO is UP\n{}ms Response Time".format(rTime)) elif (rTime >= 800 and rTime < 3000): sendText("Pokémon GO's servers are struggling\n{}ms Response Time".format(rTime)) elif (rTime >= 3000): sendText("Pokémon GO is DOWN\n{}ms Response Time".format(rTime)) except Exception as e: sendText("Pokémon GO's servers are really not doing well\nHere's what I got back\n" + e.__str__()) myThread = Thread(target=getPokeInfo) myThread.start() elif parsedCommand == "/discourse": if passSpamCheck(): if (random.randint(0, 1) == 0): sendPhoto("discourse.jpg") else: sendText("http://thediscour.se") #this command should go last: elif parsedCommand == "/community": #add your command to this list response = "/mom - get the camera\n" response += "/mooom (any number of \'o\'s) - call for help\n" response += "/swag - more memes\n" response += "/worms - can I borrow them?\n" response += "/shh(h) - here, be relaxed\n" response += "/father - are you the father?\n" response += "/rip(s) (something) - I can't believe they're dead! (The s is for plural dead things)\n" response += "/hiss stats - see how many time Robyn has hissed at people\n" response += "/scrub or /scrub stats - see who sponsors me or how many times Matt Gomez has called you a scrub\n" response += "/water - does this water look brown to you?\n" response += "/sysinfo - Gets server performance info.\n" response += "/grill - I'm a George Foreman grill!\n" response += "/pants - Pants?\n" response += "/broken - Tell Matt Gomez your stuff is broken.\n" response += "/quote - Pulls a random quote from a list. Reply to a message with /quoteadd to add one!\n" response += "/pogo - Want to know if Pokémon GO's servers are up?\n" response += "/discourse - Break in case of spicy discourse" sendText(response) else: return False return True except Exception: print(traceback.format_exc()) return False
from datetime import datetime from pathlib import Path from pydblite import Base if not Path('db').exists(): Path('db').mkdir() client = Base('db/client.pdl') if client.exists(): client.open() else: client.create('secret', 'redirect_uri', 'name') authorization_code = Base('db/authorization_code.pdl') if authorization_code.exists(): authorization_code.open() else: authorization_code.create('user_id', 'code', 'expire_time') token = Base('db/token.pdl') if token.exists(): token.open() else: token.create('user_id', 'access', 'expire_time', 'refresh') user = Base('db/user.pdl') if user.exists(): user.open() else: user.create('login', 'password_hash', 'name', 'email', 'phone')
from pydblite import Base from pathlib import Path if not Path('database').exists(): Path('database').mkdir() client = Base('database/client.pdl') if client.exists(): client.open() else: client.create('client_id', 'client_secret', 'redirect_uri') auth_code = Base('database/auth_code.pdl') if auth_code.exists(): auth_code.open() else: auth_code.create('user_id', 'code', 'expired') token = Base('database/token.pdl') if token.exists(): token.open() else: token.create('user_id', 'access', 'expired', 'refresh') user = Base('database/user.pdl') if user.exists(): user.open() else: user.create('login', 'password', 'name', 'email', 'phone') item = Base('database/item1.pdl')
from pathlib import Path from pydblite import Base if not Path('database').exists(): Path('database').mkdir() client = Base('database/client.pdl') if client.exists(): client.open() else: client.create('secret', 'redirect_uri', 'name') authorization_code = Base('database/authorization_code.pdl') if authorization_code.exists(): authorization_code.open() else: authorization_code.create('user_id', 'code', 'expire_time') token = Base('database/token.pdl') if token.exists(): token.open() else: token.create('user_id', 'access', 'expire_time', 'refresh') if len(client) == 0: client.insert(secret='test_secret', redirect_uri='http://example.com', name='app1') client.commit()
parser.add_argument('-c','--cat', help='Category', required=True) parser.add_argument('-n','--top_n', help='Number of top categories', required=True) parser.add_argument('-r','--r_path', help='Path to R script', required=True) parser.add_argument('-y','--year', help='Year to R script', required=True) parser.add_argument('-w','--week', help='Week to R script', required=True) parser.add_argument('-tw','--t_weeks', help='Training weeks to R script', required=True) parser.add_argument('-wb','--w_back', help='Weeks back to R script', required=True) parser.add_argument('-fd','--f_depth', help='Forecast depth to R script', required=True) args = parser.parse_args() pylitedb_path = args.db_path db = Base(pylitedb_path) if db.exists(): db.open() else: db.create('key') df = update_database(db, connection_string) predict_logic( db=db, path_input1=args.path1, is_zip=args.iszip, path_input2=args.path2, is_sql=args.issql, row_sql=args.r_sql, connection_string=args.constr, name_output1=args.output1, name_output2=args.output2, top_n=int(args.top_n), cat=args.cat, r_script_path = args.r_path,
def yolo(request): db = Base('backendDB.pdl') db.create('Type','Log', 'Run', 'Prefix','Rule','Threshold', 'TimeStamp', 'Status', mode="override") # if db.exists(): # db.open() records = []; for r in db: records.append(r) db.delete(records) db.commit() # encoding.encode("SepsisCasesEventLog.xes", 8) # prediction.regressior("SepsisCasesEventLog.xes", 8, 'complexIndex', "Kmeans", 'linear') # prediction.classifier("SepsisCasesEventLog.xes", 8, 'simpleIndex', "None", 'DecisionTree', 'duration', 'default') # prediction.classifier("SepsisCasesEventLog.xes", 8, 'simpleIndex', "Kmeans", 'DecisionTree', 'duration', 'default') # prediction.classifier("SepsisCasesEventLog.xes", 8, 'boolean', "Kmeans", 'DecisionTree', 'duration', 'default') # prediction.classifier("SepsisCasesEventLog.xes", 8, 'simpleIndex', "None", 'DecisionTree', 'remainingTime', 'default') # prediction.classifier("Production.xes", 5, 'simpleIndex', "Kmeans", 'RandomForest') # prediction.classifier("Production.xes", 5, 'simpleIndex', "Kmeans", 'DecisionTree') # prediction.classifier("Production.xes", 13, 'simpleIndex', "None", 'KNN') # prediction.classifier("Production.xes", 13, 'simpleIndex', "None", 'RandomForest') # prediction.classifier("Production.xes", 3, 'complexIndex', "Kmeans", 'DecisionTree') #regression.linear() # df = pd.read_csv(filepath_or_buffer='core_encodedFiles/simpleIndex_Production.xes_16.csv', header=0) # data_ = df[["Id", "remainingTime"]] # estimator = DBSCAN(eps=0.3, min_samples=10, metric='haversine') # estimator.fit(data_) # print estimator # labels = estimator.labels_ # n_clusters = len(set(labels)) - (1 if -1 in labels else 0) # print n_clusters # cluster_lists = {i: df.iloc[np.where(estimator.labels_ == i)[0]] for i in range(n_clusters)} # print len(cluster_lists) # writeHeader = True # for cluster_list in cluster_lists: # clusterd_data = cluster_lists[cluster_list] # original_cluster_data = cluster_lists[cluster_list] # lm = Lasso(fit_intercept=True, warm_start=True) # y = clusterd_data['remainingTime'] # clusterd_data = clusterd_data.drop('remainingTime', 1) # lm.fit(clusterd_data, y) # original_cluster_data['prediction'] = lm.predict(clusterd_data) # if writeHeader is True: # original_cluster_data.to_csv('core_results/cluster.csv', sep=',', mode='a', header=True, index=False) # writeHeader = False # else: # original_cluster_data.to_csv('core_results/cluster.csv', sep=',', mode='a', header=False, index=False) # #regression.linear("Production.xes", 9, 'xg', "sd") # # regression.linear("Production.xes", 5, 'simple_index', "sd") # regression.xgboost("Production.xes", 9, 'simpleIndex', "sd") # # fileName, prefix, encoding, cluster, regression # # django_rq.enqueue(tasks.regressionTask,"Production.xes", 5, 'simple_index', "sd", "xgboost") return HttpResponse("YOLO")
# -*- coding: utf-8 -*- from pydblite import Base ''' 序列化采用cPickle ''' db = Base("test.db", save_to_file=False) if db.exists(): db.open() else: db.create("name", "age") db.insert("bob", 10) index = db.insert(name="alice", age=20) print db[index] # 按照主键访问record record = db[1] db.update(record, name="dellian") #db.delete(record) # db.records (所有记录) # query for r in db("age") > 10: print r
class Inventory: def __init__(self, session): self.session = session self.client = session.resource("ec2") self.create_database() def create_database(self): self.db = Base("test.pdl") if not self.db.exists(): self.db.create("resource", "name", "env") self.db.open() def fetch_instance(self, id): try: instances = self.client.instances.filter(InstanceIds=[id]) for instance in instances: tags = {t["Key"]: t["Value"] for t in instance.tags} env = tags["Env"] if "Env" in tags else tags["Environment"] self.db.insert(resource=id, name=tags["Name"], env=env) except Exception as e: print(e) self.db.insert(resource=id, name=id, env="") def get_instance(self, id): instance = self.db(resource=id) if not instance: self.fetch_instance(id) instance = self.db(resource=id) return instance[0] if instance else None def fetch_network_interface(self, id): c = self.client.meta.client try: data = c.describe_network_interfaces(NetworkInterfaceIds=[id]) nif = data["NetworkInterfaces"][0] if "Attachment" in nif: instance = self.get_instance(nif["Attachment"]["InstanceId"]) if instance: self.db.insert(resource=id, name=instance["name"], env=instance["env"]) except Exception as e: print(e) self.db.insert(resource=id, name=id, env="") def add_group(self, id, name): self.db.insert(resource=id, name=name, env="") def get_network_interface(self, id): nif = self.db(resource=id) if not nif: self.fetch_network_interface(id) nif = self.db(resource=id) return nif[0] if nif else None def fetch_security_group(self, id): c = self.client.meta.client try: groups = c.describe_security_groups(GroupIds=[id]) for group in groups["SecurityGroups"]: self.db.insert(resource=id, name=group["GroupName"]) except Exception as e: print(e) def get_security_group(self, id): group = self.db(resource=id) if not group: self.fetch_security_group(id) group = self.db(resource=id) return group[0] if group else None def get_resource_name(self, id): resource = self.db(resource=id) return resource[0]["name"] if resource else None
#from weppy.dal import Field, Model from pydblite import Base except: pip.main(['install','weppy']) pip.main(['install','pydblite']) from weppy import App,Field, Form,request, session, url, redirect, abort from weppy.dal import DAL, Field, Model, belongs_to, has_many from weppy.tools import requires from weppy.tools.auth import Auth, AuthUser from weppy.sessions import SessionCookieManager #from weppy.dal import Field, Model from pydblite import Base import os if (not os.path.isfile("db.pydb")): db = Base('db.pydb') # สร้างไฟล์ฐานข้อมูล test.pydb db.create('topic','text','name','email') # สร้าง field เก็บข้อมูล print("install ok") else: db = Base('db.pydb') app = App(__name__) nameblog = "My Blog" @app.route('/form') class User(Model): name = Field('string') has_many('posts') validation = { 'email': {'is': 'email'} } class Post(Model):
def calculatingWeights(graph, nodesnotLinked, database, calculatingFile): pdb = Base(calculatingFile) pdb.create('node1', 'node2', 'WCNFTI01','WCNFTI02', 'WCNFTI03','WCNFTI04','WCNFTI05','WCNFTI06','WCNFTI07','WCNFTI08','WCNFTI09','WAAFTI01','WAAFTI02', 'WAAFTI03','WAAFTI04','WAAFTI05','WAAFTI06','WAAFTI07','WAAFTI08','WAAFTI09') pdb.create_index('node1', 'node2') element = 0 qtyofNodesToProcess = len(nodesnotLinked) for pair in nodesnotLinked: element = element+1 FormatingDataSets.printProgressofEvents(element, qtyofNodesToProcess, "Calculating features for nodes not liked: ") neighbors_node1 = all_neighbors(graph, pair[0]) neighbors_node2 = all_neighbors(graph, pair[1]) len_neihbors_node1 = len(neighbors_node1) len_neihbors_node2 = len(neighbors_node2) CommonNeigbors = neighbors_node1.intersection(neighbors_node2) WCNFTI01 = 0; WCNFTI02 = 0; WCNFTI03 = 0; WCNFTI04 = 0; WCNFTI05 = 0; WCNFTI06 = 0; WCNFTI07 = 0; WCNFTI08 = 0; WCNFTI09 = 0; WAAFTI01 = 0; WAAFTI02 = 0; WAAFTI03 = 0; WAAFTI04 = 0; WAAFTI05 = 0; WAAFTI06 = 0; WAAFTI07 = 0; WAAFTI08 = 0; WAAFTI09 = 0; for cn in CommonNeigbors: item = get_partOfWeightCalculating(graph, database, pair, cn) WCNFTI01 = WCNFTI01 + item['WCN'][0]; WCNFTI02 = WCNFTI02 + item['WCN'][1]; WCNFTI03 = WCNFTI03 + item['WCN'][2]; WCNFTI04 = WCNFTI04 + item['WCN'][3]; WCNFTI05 = WCNFTI05 + item['WCN'][4]; WCNFTI06 = WCNFTI06 + item['WCN'][5]; WCNFTI07 = WCNFTI07 + item['WCN'][6]; WCNFTI08 = WCNFTI08 + item['WCN'][7]; WCNFTI09 = WCNFTI09 + item['WCN'][8]; WAAFTI01 = WAAFTI01 + item['WAA'][0]; WAAFTI02 = WAAFTI02 + item['WAA'][1]; WAAFTI03 = WAAFTI03 + item['WAA'][2]; WAAFTI04 = WAAFTI04 + item['WAA'][3]; WAAFTI05 = WAAFTI05 + item['WAA'][4]; WAAFTI06 = WAAFTI06 + item['WAA'][5]; WAAFTI07 = WAAFTI07 + item['WAA'][6]; WAAFTI08 = WAAFTI08 + item['WAA'][7]; WAAFTI09 = WAAFTI09 + item['WAA'][8]; pdb.insert(str(pair[0]), str(pair[1]), WCNFTI01, WCNFTI02, WCNFTI02, WCNFTI03,WCNFTI04,WCNFTI05,WCNFTI06,WCNFTI07,WCNFTI08,WCNFTI09, WAAFTI01, WAAFTI02, WAAFTI02, WAAFTI03,WAAFTI04,WAAFTI05,WAAFTI06,WAAFTI07,WAAFTI08,WAAFTI09, ) pdb.commit() return pdb;
def process(bot, chat_id, parsedCommand, messageText, currentMessage, update, instanceAge): def sendText(givenText, replyingMessageID=0, keyboardLayout=[]): if not chatInstanceArray[chat_id]['adminDisable']: atbSendFunctions.sendText(bot, chat_id, givenText, replyingMessageID, keyboardLayout) def sendPhoto(imageName): atbSendFunctions.sendPhoto(bot, chat_id, "images/"+ imageName) def sendSticker(stickerName): atbSendFunctions.sendSticker(bot, chat_id, "stickers/"+ stickerName) def passSpamCheck(): return atbMiscFunctions.spamCheck(chat_id, currentMessage.date) try: chatInstanceArray[chat_id]['checking'] = True except Exception: chatInstanceArray[chat_id] = {'checking': True, 'adminDisable': False, 'spamTimestamp': 0, 'shottyTimestamp': 0, 'shottyWinner': "", 'checkingVehicles': False, 'whoArray': []} try: #commands go here, in this if-elif block. Python doesn't have switch statements. if parsedCommand == "/mom": #sends "MOM GET THE CAMERA" sendText("MOM GET THE CAMERA") elif atbMiscFunctions.isMoom(parsedCommand): #sends M {random number of Os} M if passSpamCheck(): #use this to prevent spamming of a command response = "M" for i in range(0, random.randint(3, 75)): response += "O" sendText(response + "M") elif parsedCommand == "/swag": sendText("swiggity swag, what\'s in the bag?") elif parsedCommand == "/worms": if passSpamCheck(): response = "hey man can I borrow your " if len(messageText) > len("/worms "): response += messageText[len("/worms "):] else: response += "worms" sendText(response) elif parsedCommand == "/shh" or parsedCommand == "/shhh": if passSpamCheck(): sendPhoto("shhh.jpg") elif parsedCommand == "/father": if (random.randint(0, 1)): sendText("You ARE the father!") else: sendText("You are NOT the father!") elif parsedCommand == "/rip": #sends "I can't believe that [name (defaults to sender's name)] is f*****g dead." if passSpamCheck(): response = "I can't believe that " if len(messageText) > len("/rip "): if (messageText[len("/rip "):] == "me"): response += currentMessage.from_user.first_name else: response += messageText[len("/rip "):] else: response += currentMessage.from_user.first_name response += " is f*****g dead." sendText(response) elif parsedCommand == "/scrub": checkingStats = False try: if currentMessage.text.lower().split()[1] == "stats": db = Base('chatStorage/scrub.pdl') #The path to the DB db.create('username', 'name', 'counter', mode="open") K = list() for user in db: K.append(user) sortedK = sorted(K, key=lambda x: int(x['counter']), reverse=True) outputString = "SCRUBBIEST LEADERBOARD:\n" for user in sortedK: pluralString = " SCRUB POINT" if not(int(user['counter']) == 1): pluralString += "S" pluralString += "\n" outputString += user['name'].upper() + ": " + str(user['counter']) + pluralString sendText(outputString) checkingStats = True except IndexError: pass if not checkingStats and (currentMessage.from_user.id == 169883788 or currentMessage.from_user.id == 44961843): db = Base('chatStorage/scrub.pdl') db.create('username', 'name', 'counter', mode="open") userWasFound = False valueSuccessfullyChanged = False for user in db: if int(user['username']) == currentMessage.reply_to_message.from_user.id: db.update(user, counter=int(user['counter']) + 1) valueSuccessfullyChanged = True userWasFound = True db.commit() if not userWasFound: db.insert(currentMessage.reply_to_message.from_user.id, currentMessage.reply_to_message.from_user.first_name, 1) db.commit() if valueSuccessfullyChanged or not userWasFound: sendText("Matt Gomez awarded a scrub point to " + currentMessage.reply_to_message.from_user.first_name + ".") elif not checkingStats: sendText("AdamTestBot, powered by ScrubSoft (C)") elif parsedCommand == "/hiss": checkingStats = False try: if currentMessage.text.lower().split()[1] == "stats": db = Base('chatStorage/hiss.pdl') db.create('username', 'name', 'counter', mode="open") K = list() for user in db: K.append(user) sortedK = sorted(K, key=lambda x: int(x['counter']), reverse=True) outputString = "Hiss Leaderboard:\n" for user in sortedK: pluralString = " hiss" if not(int(user['counter']) == 1): pluralString += "es" pluralString += "\n" outputString += user['name'] + ": " + str(user['counter']) + pluralString sendText(outputString) checkingStats = True except IndexError: pass if not checkingStats and (currentMessage.from_user.id == 122526873 or currentMessage.from_user.id == 44961843): db = Base('chatStorage/hiss.pdl') db.create('username', 'name', 'counter', mode="open") userWasFound = False valueSuccessfullyChanged = False for user in db: if int(user['username']) == currentMessage.reply_to_message.from_user.id: db.update(user, counter=int(user['counter']) + 1) valueSuccessfullyChanged = True userWasFound = True db.commit() if not userWasFound: db.insert(currentMessage.reply_to_message.from_user.id, currentMessage.reply_to_message.from_user.first_name, 1) db.commit() if valueSuccessfullyChanged or not userWasFound: sendText("Robyn hissed at " + currentMessage.reply_to_message.from_user.first_name + ".") elif parsedCommand == "/water": if (random.randint(0, 1) == 0): sendSticker("water.webp") else: sendSticker("hoboken_water.webp") #this command should go last: elif parsedCommand == "/community": #add your command to this list response = "/mom - get the camera\n" response += "/mooom (any number of \'o\'s) - call for help\n" response += "/swag - more memes\n" response += "/worms - can I borrow them?\n" response += "/shh(h) - here, be relaxed\n" response += "/father - are you the father?\n" response += "/rip (something) - I can't believe they're dead!\n" response += "/hiss stats - see how many time Robyn has hissed at people\n" response += "/scrub or /scrub stats - see who sponsors me or how many times Matt Gomez has called you a scrub\n" response += "/water - does this water look brown to you?" sendText(response) else: return False return True except Exception: print traceback.format_exc() return False