def update(self, repo_list): if repo_list: self._db.table('latest_repo').purge() self._db.table('latest_repo').insert(repo_list[0]) language_docs = self._get_index_docs('language') keyword_docs = self._get_index_docs('keyword') for repo in repo_list: # save repo data doc_id = self._db.insert(repo) # update index name = repo.get('name') language = repo.get('language') description = repo.get('description') if language: for lang in language.split(): update_inverted_index(language_docs, lang.lower(), doc_id) keywords = split_repo_name(name) if description: keywords += split_repo_desc(description) for keyword in split_keywords(keywords): update_inverted_index(keyword_docs, keyword.lower(), doc_id) self._idx.update(operations.set('docs', language_docs), Query().name == 'language') self._idx.update(operations.set('docs', keyword_docs), Query().name == 'keyword')
def book(): try: if isinstance(flask.request.json, dict) is False: raise AttributeError except AttributeError: return flask.jsonify({'error': 400}), 400 content = flask.request.json row = content.get('row', None) place = content.get('place', None) if isinstance(row, int) is False or isinstance(place, int) is False: return flask.jsonify({'error': 400}), 400 id_ = get_authernticated_id(content) q = tinydb.Query() booked_already = len( tickets_table.search((q.free == False) & (q.row == row) & (q.place == place))) == 1 print(booked_already) if id_ is None: return flask.jsonify({'error': 401}), 401 if booked_already: return flask.jsonify({'error': 403}), 403 if len(tickets_table.search((q.row == row) & (q.place == place))) == 0: return flask.jsonify({'error': 403}), 403 tickets_table.update(set('id', id_), (q.row == row) & (q.place == place)) tickets_table.update(set('free', False), (q.row == row) & (q.place == place)) return ''
def update(self, repo_list): if repo_list: self._db.table('latest_repo').purge() self._db.table('latest_repo').insert(repo_list[0]) language_docs = self._get_index_docs('language') keyword_docs = self._get_index_docs('keyword') for repo in repo_list: # save repo data doc_id = self._db.insert(repo) # update index name = repo.get('name') language = repo.get('language') description = repo.get('description') if language: for lang in language.split(): update_inverted_index(language_docs, lang.lower(), doc_id) keywords = split_repo_name(name) if description: keywords += split_repo_desc(description) for keyword in split_keywords(keywords): update_inverted_index(keyword_docs, keyword.lower(), doc_id) self._idx.update(operations.set('docs', language_docs), Query().name == 'language') self._idx.update(operations.set('docs', keyword_docs), Query().name == 'keyword')
async def on_message(self, message): #Temp remove messages with content in drawing arena if (message.channel.id == 750753280694550539 and message.content != ""): await message.delete() member = message.author #Add non-staff to list of active users if (not member.bot and (str(member.id) not in self.activeUsers and not member.guild_permissions.manage_messages)): self.activeUsers.append(str(member.id)) self.events.update(set('activeUsers', self.activeUsers), where('name') == 'coolguy') #Cool guy raffle once a day now = datetime.datetime.now() if (now > self.noon and (date.today() > self.lastCoolGuy)): #Set date self.lastCoolGuy = date.today() self.events.update(set('last', str(self.lastCoolGuy)), where('name') == 'coolguy') coolGuyRole = message.guild.get_role(self.coolGuyRole) #Remove last cool guy(s) coolGuys = [] if coolGuyRole.members is None else coolGuyRole.members for coolGuy in coolGuys: await coolGuy.remove_roles(coolGuyRole) #New cool guys found = False while (not found): selection = choice(self.activeUsers) if message.guild.get_member(int(selection)) != None: found = True winner = message.guild.get_member(int(selection)) await winner.add_roles(coolGuyRole) found = False while (not found): selection = choice(message.guild.members) if (selection != winner): await selection.add_roles(coolGuyRole) found = True general = message.guild.get_channel(self.generalChannel) await general.send(winner.mention + " and " + selection.mention + " won the cool guy raffle! ") #Reset active users self.activeUsers = [] self.events.update(set('activeUsers', self.activeUsers), where('name') == 'coolguy')
def write_down(self, redditor, history): """ticks the redditor as scanned in the db and adds its history. """ Redditor = Query() self.redditor_db.update(set("scanned", True), Redditor.name == redditor.name) self.redditor_db.update(set("history", history), Redditor.name == redditor.name)
def upload_file(): if request.method == 'GET': print("Hi upload POST call", request.args) file_name = os.path.basename(request.args['file']) file_url = IMAGE_URL + file_name file_path = IMAGE_PATH + file_name db.update(set("img_url", file_url), db_query.chat_id == 1) db.update(set("img_path", file_path), db_query.chat_id == 1) return file_url else: return '404'
def export_manager(instanceid): inst = t_db.search(Query().InstanceId == instanceid) inst_ip = inst[0]["IpAddress"] inst_id = inst[0]["InstanceId"] inst_type = inst[0]["Type"] export_manager_command(inst_ip) out = export_manager_contract(inst_ip) t_db.update(set('Managers', out[0]), Query().InstanceId == inst_id) t_db.update(set('IsManagerExported', 'true'), Query().InstanceId == inst_id) flash(out) return redirect(url_for('rootchain'))
def moduser(self, name, user_id, utype): user_id = str(user_id) User = Query() t = self.db.table("users") r = t.search(User.id == user_id) #TODO: also check if name has been updated if len(r) == 0: #add new user t.insert({ "name": name, "id": user_id, "type": utype, "added": str(datetime.datetime.now()), "N": 0 }) #remove from strangers self.db.table("strangers").remove(where("id") == user_id) return False, elif len(r) == 1: #user already registered if r[0]["type"] != utype: #check if we're not blocking the main admin if not user_id == self.cf["ADMIN"]["admin_id"]: #update user type t.update(tdop.set("type", utype), User.id == user_id) return False, else: return True, "register_block_error_admin" else: return True, "error_user_duplicate" #TODO: duplicate entry, should not occur, consider sending a warning
def generate(self, params=None, verbose=False, debug=False): mutants = self.db.all() mutants_total = len(mutants) print('Starting generation for %s products...' % mutants_total) for i, mutant in enumerate(mutants): if not mutant['generated']: config = GCCConfig() if not params: config.params = [] else: config.params = list(params) config.params += _get_i_params(self.state.include_dirs) config.params += _get_d_params(mutant['features']) config.output_file = mutant['output_file'] config.input_file = mutant['file'] config.source_file = self.state.source_file Executor(config=config, strategy=self.gcc_strategy).run(log=debug) self.db.update( set('generated', True), (Query().name == mutant['name']) & (Query().product_code == mutant['product_code']) ) print_progress(i + 1, mutants_total) print(' [DONE]')
def update_role_data(role_emote_dict): data = db_role_data.search(where('role_emote').exists()) if len(data) > 0: db_role_data.update(operations.set('role_emote', role_emote_dict), where('role_emote').exists()) else: db_role_data.insert({'role_emote': role_emote_dict})
def set_impact_analysis(self, mutant, result): if self.disabled: impacted_macros = result.all_macros else: impacted_macros = result.impacted_macros self.db.update( set( 'impact_analysis', { 'impacted_features': impacted_macros, 'not_impacted_features': [ feature for feature in result.all_macros if feature not in impacted_macros ], 'all_features': result.all_macros, 'all_features_len': len(result.all_macros), 'elapsed_time': str(result.elapsed_time) }), Query().name == mutant.get('name')) self.state.db.update(add('products', 2**len(result.all_macros)), Query().type == 'config') self.state.db.update(add('products_impacted', 2**len(impacted_macros)), Query().type == 'config')
def converstion(): if request.method == 'GET': question = request.args['msg'] print("MESSAGE : " + question) # subprocess call to vqa by mmoving out to root # print("current working dir prev " + os.getcwd()) web_dir = os.getcwd() os.chdir("..") # print("current working dir after " + os.getcwd()) img_path = db.search(db_query.chat_id == 1)[0]['img_path'] print(img_path) std_out = open("web//output.txt", "r+") # std_out.write(question) s = subprocess.Popen([ 'python', 'demo.py', '-image_file_name', img_path, '-question', question ], stdout=std_out) s.wait() os.chdir(web_dir) # parse the output to dict result_dict = parse_output() db.update(set("q_a", [{ "question": question, "result": result_dict }]), db_query.chat_id == 1) return jsonify(result_dict)
def get_article_to_crawl(self) -> Optional[Tuple[str, str, str, str]]: """ Returns a tuple with the id, language, url and storage directory an article to crawl if possible, else None. As a sideeffect, it updates the status for this article so it does not get crawled again :return: a tuple of (id, language, url, storage_dir) or None if no article could be found """ with self.lock: article_query = Query() articles = self.get_article_db() language = self.languages.pop(0) self.languages.append(language) found_articles = articles.search( (article_query.type == self.article_type) & (article_query.crawl_status == 0) & (article_query.language == language)) logging.info(f"[{language}]Search article to download in db") if len(found_articles) > 0: article = found_articles[0] id = article["id"] language = article["language"] article_query = self.create_article_query(id, language) articles.update(set("crawl_status", 1), article_query) return id, language, article["full_url"], article[ "article_dir"] return None, language, None, None
def check_status(): error = None if request.method == "POST": inst_id = request.form['instance_id'] inst = t_db.search(Query().InstanceId == inst_id)[0] # TODO : CHECK STATUS - Pending | enable | mining | dead prior_status = t_db.search(Query().InstanceId == inst_id)[0]['Status'] inst_resource = get_instance_resource(inst_id) inst_monitor = inst_resource.monitor() status = inst_monitor['InstanceMonitorings'][0]['Monitoring']['State'] #Check if prior status is mining, let it be if prior_status == "mining": status = prior_status else: t_db.update(set('Status', status), Query().InstanceId == inst_id) # return by Type of data : rootchain | operator | usernode flash([ time.ctime()[11:19] + " Status Checked(" + prior_status + " --> " + status + ')!' ]) if inst['Type'] == "rootchain": return redirect(url_for('rootchain')) elif inst['Type'] == "operator": return redirect(url_for('operator')) elif inst['Type'] == "usernode": return redirect(url_for('usernode')) else: return redirect(url_for('rootchain'))
def _setup(self): products = self.products_db.all() output_dir = os.path.join(self.state.output_dir, 'products') self.state.db.update(set('products_dir', output_dir), Query().type == 'config') if not os.path.exists(output_dir): os.mkdir(output_dir) products_total = len(products) print('Preparing generation for %s configurations...' % products_total) for i, product in enumerate(products): product_dir = os.path.join(output_dir, product['product_code']) if not os.path.exists(product_dir): os.mkdir(product_dir) original = { 'operator': 'ORIGINAL', 'file': self.state.source_file, 'name': 'ORIGINAL_0' } self.db.insert(_initialize_mutant(original, product, product_dir)) mutants_total = len(product['mutants']) for j, mutant in enumerate(product['mutants']): self.db.insert(_initialize_mutant(mutant, product, product_dir)) pprint_progress((i + 1), products_total, (j + 1), mutants_total) print_progress((i + 1), products_total) print(' [DONE]')
def operator_export_genesis(): error = None res = [] if request.method == 'POST': inst_id = request.form["instance_id"] inst = t_db.search(Query().InstanceId == inst_id)[0] genesis = export_genesis(inst["IpAddress"]) t_db.update(set('Genesis', genesis), Query().InstanceId == inst_id) t_db.update(set('IsExported', "true"), Query().InstanceId == inst_id) flash([ time.ctime()[11:19] + " genesis.json exported! --> " + str(genesis) ]) return redirect(url_for('operator')) else: return redirect(url_for('operator'))
def usernode_initialize(): error = None res = [] if request.method == 'POST': inst_id = request.form["instance_id"] user_inst = t_db.search(Query().InstanceId == inst_id)[0] #set variable user_ip = user_inst["IpAddress"] rootchain_ip = user_inst["RootChain"]["IpAddress"] chain_id = user_inst["Operator"]["ChainID"] operator_ip = user_inst["Operator"]["IpAddress"] enode_value = user_inst["Enode"] out1 = set_usernode_variable(user_ip, rootchain_ip, operator_ip, enode_value, chain_id) # print("out1 : ", out1) # import genesis to usernode genesis = user_inst["Operator"]["Genesis"] out2 = import_genesis_usernode(user_ip, genesis) check_genesis(user_ip) # print("out1 : ", out2) #initialize usernode initialize_usernode(user_ip) t_db.update(set('IsInitialized', "true"), Query().InstanceId == inst_id) flash([time.ctime()[11:19] + " Usernode Initialized!"]) return redirect(url_for('usernode')) else: return redirect(url_for('usernode'))
async def update_by_day(day: int, month: int, year: int, new_prediction: int): Mediciones = Query() updated_elements = db.update( set("Prediction", new_prediction), (Mediciones.Day == day) & (Mediciones.Month == month) & (Mediciones.Year == year)) return {"success": True, "updated_elements": updated_elements}
def operator_set_variable(): error = None res = [] if request.method == 'POST': inst_id = request.form["instance_id"] inst = t_db.search(Query().InstanceId == inst_id)[0] parameter = [ inst['OperatorAccountKey'], inst['OperatorAccount'], inst['OperatorPassword'], inst['DeployGasprice'], inst['Gasprice'], inst['Stamina']['OperatorAmount'], inst['Stamina']['MinDeposit'], inst['Stamina']['RecoverEpochLength'], inst['Stamina']['WithdrawalDelay'], inst['ChainID'], inst['PreAsset'], inst['Epoch'], inst['NodeKey'], inst['RootChain']["IpAddress"], inst['Dashboard']['OperatorName'], inst['Dashboard']['Website'], inst['Dashboard']['Description'], inst['Dashboard']['ApiServer'], inst["IpAddress"], ] res = change_account_operator(parameter) t_db.update(set('IsSet', "true"), Query().InstanceId == inst_id) flash([time.ctime()[11:19] + " Operator Variable Set!"]) return redirect(url_for('operator')) else: return redirect(url_for('operator'))
def rootchain_start(): error = None if request.method == "POST": inst_id = request.form['instance_id'] inst = t_db.search(Query().InstanceId == inst_id)[0] inst_faucet = inst['Faucet'] inst_ip = inst['IpAddress'] #change account change_rootchain_account( inst_ip, inst_faucet[0], inst_faucet[1], inst_faucet[2], inst_faucet[3], inst_faucet[4], inst_faucet[5], inst['Operator'], inst['Staking']['WithdrawalDelay'], inst['Staking']['SeigPerBlock'], inst['Staking']['PwertTONRoundTime'], inst['OperatorPassword']) #run rootchain node run_rootchain(inst_ip) #update database t_db.update(set('Status', 'mining'), Query().InstanceId == inst_id) #flash flash([time.ctime()[11:19] + " Rootchain Mining Started!"]) return redirect(url_for('rootchain')) else: return redirect(url_for('rootchain'))
def callback(self, update, context): num = 0 for item in self.handler.printq: if item['printed'] is False: self.handler.printq.update(tdbop.set("printed", True), Query().date == item['date']) num += 1 update.message.reply_text(f"Purged {num} message(s) from the queue")
def complete(): with app.app_context(): db = get_db() try: pocket = db.search(Query().type == "pocket_key")[0] except: click.echo("Key not found") return auth_data = { "consumer_key": pocket["consumer_key"], "code": pocket["code"] } resp = requests.post( "https://getpocket.com/v3/oauth/authorize", json=auth_data, headers={ "X-Accept": "application/json", "Content-Type": "application/json", }, ) db.update( operations.set("access_token", resp.json()["access_token"]), Query().type == "pocket_key", ) click.echo( "Successfully completed auth process, you can now run archivy pocket sync to load the data" )
def attr_auth_device_keygen(device_id, attr_list, token): if device_id not in " ".join(attr_list): click.echo( f"attr_list argument should contain device_id ({device_id})") return doc = search_tinydb_doc(path, 'aa_keys', where('public_key').exists()) if not doc: with click.Context(get_attr_auth_keys) as ctx: click.echo( f"Public key not present, please use: {ctx.command.name}") click.echo(get_attr_auth_keys.get_help(ctx)) return data = {"attr_list": " ".join(attr_list)} r = requests.post(AA_URL_DEVICE_KEYGEN, headers={"Authorization": token}, data=data, verify=VERIFY_CERTS) content = r.content.decode('unicode-escape') json_content = json_string_with_bytes_to_dict(content) if not json_content["success"]: click.echo(json_content) return t = get_tinydb_table(path, "device_keys") device_data_doc = { "private_key": json_content["private_key"], "attr_list": attr_list, } t.update(set("device_data:data", device_data_doc), Query().device_id == device_id)
def setEnemyAttribute(id, attr, value): try: GCenemies.update(set(attr, value), where('id') == id) return True except: print("Failed to set {} to {} for player {}".format( attr, str(value), str(id))) return False
def setNextTime(self): now = datetime.datetime.now() randTime = (now + datetime.timedelta(hours=random.randint(12, 23))).replace(minute=random.randint(0, 59), second=0, microsecond=0) #Set next cheese time self.events.update(set('next', str(randTime)), where('name') == 'ratrace') self.getNextTime()
def setPlayerAttribute(userid, attr, value): try: GCplayers.update(set(attr, value), where('id') == userid) return True except: print("Failed to set {} to {} for player {}".format( attr, str(value), str(userid))) return False
def insertMoney(self, funds, context=None): if not self.ok_credentials(context.ctx['id'], context.ctx['key']): raise Bank.IncorrectCredentials() user_funds = self.user[0]['funds'] for (k, v) in funds.items(): user_funds[str(k)] += v self.db.update(set('funds', user_funds), self.user)
def mark_all_uploaded(self, cutoff_time=None): if cutoff_time is None: cutoff_time = datetime.now(gettz()) older = lambda ts: parse(ts) < cutoff_time Scite = Query() self.db.update(set('rm_uploaded', str(cutoff_time)), Scite.created_at.test(older))
async def update_by_day(day: int, month: int, year: int, new_prediction: int): Mediciones = Query() updated = db.update(set("Prediction", 7), (Mediciones.Day == day) & (Mediciones.Month == month) & (Mediciones.Year == year)) success = True if len(updated) > 0 else False response = {"success": success, "updated_elements": updated} return Response(content=json.dumps(response), media_type="application/json")
async def update_by_day(day: int, month: int, year: int, new_prediction: int): # implementar 4 aquí Mediciones = Query() cambios = db.update(set("Prediction", new_prediction), (Mediciones.Day == day) & (Mediciones.Month == month) & (Mediciones.Year == year)) if len(cambios) == 0: return {'success': False} else: return {'success': True, "updated_elements": cambios}
def prom(self, TgID, token): record = Query() current = self.dbU.search(user.TgID == TgID) retval = -1 if current[0]["attempts"] != 0: if (self.dbC.contains(record.token == token)): self.dbC.update(delete('token'), token.chiave == token) user = Query() self.dbU.update(set('account_status', 1), user.TgID == TgID) retval = True else: self.dbU.update(set('attempts', current[0]["attempts"]), user.TgID == TgID) if current[0]["attempts"] == 1: self.dbU.update(set('account_status', -1), user.TgID == TgID) retval = current[0]["attempts"] - 1 return retval
def test_set(db): db.update(set('char', 'xyz'), where('char') == 'a') assert db.get(where('char') == 'xyz')['int'] == 1