def recv(self, num_bytes, sock=None): print "Waiting to receive", num_bytes, "bytes" if sock is None: data = utils.get_all(self.sock, num_bytes) print "Received data: {}".format(data) return data else: data = utils.get_all(sock, num_bytes) print "Received data: {}".format(data) return data
def cmp_time_status(delta=10): for val in utils.get_all(): if datetime.datetime.strptime(str( val[4]), "%Y-%m-%d %H:%M:%S") + datetime.timedelta( minutes=delta) < datetime.datetime.now(): utils.update_status_product(val[3], sign=0) utils.delete_payment(val[4], val[1])
def read_category(): rows = utils.get_all("SELECT * FROM category") data = [] for r in rows: data.append({"id": r[0], "subject": r[1], "url": r[2]}) with open("json_file/category.json", "w") as cat: json.dump(data, cat)
def main(): logging.basicConfig( format= '%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', datefmt='%Y-%m-%d:%H:%M:%S', level=logging.DEBUG) people = utils.get_all(BASE_URL + "people") people.sort(key=lambda x: len(x['films']), reverse=True) people = people[:9] people.sort(key=lambda x: int(x['height']), reverse=True) try: with open(DEFAULT_FILENAME, "w", newline="") as csvfile: writer = csv.writer(csvfile, quoting=csv.QUOTE_MINIMAL) writer.writerow(["name", "species", "height", "appearances"]) for char in people: writer.writerow([ char['name'], utils.get_one(char['species'][0])['name'], char['height'], len(char['films']) ]) upload_status = utils.send_file_to_url(PASTEBIN, DEFAULT_FILENAME) finally: os.remove(DEFAULT_FILENAME) if upload_status: print("File uploaded successfully!") else: print("File upload unsuccessful...")
def get_categories(): rows = utils.get_all("SELECT * FROM category") data = [] for r in rows: data.append({"id": r[0], "subject": r[1], "url": r[2]}) #return jsonify({"category": data}) return render_template("category.html", data=data)
def get_categories(): sql = 'SELECT * FROM Category' rows = utils.get_all(sql) data = [] for r in rows: data.append({ "id": r[0], "name": r[1], "source": r[2], "url_name": r[3] }) return jsonify({'categories': data})
def get_category(): rows = utils.get_all("SELECT * FROM category") data = [] for r in rows: data.append( { "id":r[0], "subject": r[1], "url": r[2] } ) return jsonify({"category":data})
def get_news(): rows = utils.get_all('SELECT * FROM news') data = [] for r in rows: data.append({ "id": r[0], "subject": r[1], "description": r[2], "image": r[3], "original_url": r[4] }) return jsonify({"news": data})
def listen_for_connection(sock): # Accept connections forever while True: print "Waiting for connections..." # Accept connections client_sock, addr = sock.accept() print "Accepted connection from client: {0} {1}"\ .format(addr[0], addr[1]) print "\n" # The buffer to all data received from the # the client. conn_data = "" # The temporary buffer to store the received # data. recv_buff = "" # The buffer containing the file size conn_header = "" # Receive the first 10 bytes indicating the # size of the file conn_header = utils.get_all(client_sock, HEADER_MSG_SIZE) print "connection header: ", conn_header usr, pwd = conn_header.split() if not authenticate(usr, pwd): return listen_for_command((addr, client_sock)) # # Get the file size # data_size = int(data_sizeBuff) # # print "The file size is ", data_size # # # Get the file data # conn_data = get_all(client_sock, data_size) # # print "The file data is: " # print conn_data # Close our side client_sock.close()
def get_news(): rows = utils.get_all("SELECT * FROM news") data = [] for r in rows: data.append({ "id": r[0], "subject": r[1], "description": r[2], "image": r[3], "original_url": r[4], "category_id": r[5] }) with open("json_file/news.json", "w", encoding="utf8") as f: json.dump(data, f)
def listen_for_command(client): # Accept connections forever while True: print "wating for command from ", client # The buffer to all data received from the # the client. file_data = "" # The temporary buffer to store the received # data. recv_buff = "" # The buffer containing the file size cmd_header = "" # Receive the first 10 bytes indicating the # size of the file cmd_header = utils.get_all(client[1], HEADER_MSG_SIZE) print "command header", cmd_header cmd, size = cmd_header.split() with create_data_port as ep_sock: if (cmd == 'put'): data = commands.do_get(ep_sock, size) print 'received', data # # Get the file size # file_size = int(file_sizeBuff) # # print "The file size is ", file_size # # # Get the file data # file_data = get_all(client_sock, file_size) # # print "The file data is: " # print conn_data # Close our side client[1].close()
def get_instances_summary(request): all_data = (get_json(ec2) for ec2 in get_all()) l = [] for data in all_data: t = { 'id': data['id'], 'cpu': list(sorted(data['cpu'], key=lambda x: x['index']))[-1]['load_avg_1'], 'memory': list(sorted(data['mem'], key=lambda x: x['index']))[-1]['%memused'], 'network': 100000, 'storage': list(sorted(data['storage'], key=lambda x: x['index']))[-1]['%util'] } l.append(t) return JsonResponse({"total": l})
def preenche_feed(): """Feito para buscar dados do feed. Lida com dados antigos caso um elemento ja exista na pagina ele nao é atualizado """ resposta = requests.get(url_feed) lst_saida = [] ir = False if resposta.ok: # extrai o conteudo da pagina ir = True parse = bs4.BeautifulSoup(resposta.text, 'html.parser') # /html/body/div/div/div[2]/div[2]/div/ul/li[] xpath div_principal = parse.find(id="main-area-1") filhos = div_principal.children alvo = None for i in filhos: if i.name == "ul": alvo = i break lst_img = alvo.find_all('img') agora = datetime.datetime.now().strftime(utils.foramto_full_db) for k in lst_img: texto = k.get('alt') link_img = url_feed + k.get('src') link = k.parent.get('href') # pode nao existir link = link if link else "NULL" lst_saida.append(["NULL", agora, texto, link_img, link]) else: print("pagina fora do ar") return conn = None template = "({}, '{}', '{}', '{}', '{}');" if ir: print("-- feed") conn = utils.get_db_conn() lst_pagina = lst_saida nomes_pagina = [i[2] for i in lst_pagina] try: with conn.cursor() as cursor: sql_vazio = "SELECT count(*) FROM `feed`" if utils.is_empty(cursor, sql_vazio): sql_inserir = "INSERT INTO `feed` VALUES " preenche_vazio(cursor, lst_saida, template, sql_inserir) else: # atulizacao, dados ja existem sql_todos = "SELECT * FROM `feed` ORDER BY `texto`" nome_todos = "SELECT `texto` FROM `feed` ORDER BY `texto`" lst_nome_todos = utils.get_all(cursor, nome_todos) lst_nome_todos = [i[0] for i in lst_nome_todos] lst_todos = utils.get_all(cursor, sql_todos) # apenas insere ou exclui nao atualiza lst_inserir = [] for k in lst_pagina: # print(repr(k[2])) if k[2] not in lst_nome_todos: lst_inserir.append(k) # remover lst_remover = [] for k1 in lst_todos: if k1[2] not in nomes_pagina: lst_remover.append(k1) if len(lst_remover): print("exlui", len(lst_remover)) excluir(cursor, lst_remover, 'feed') conn.commit() lst_todos = utils.get_all(cursor, sql_todos) if len(lst_inserir): print("novos", len(lst_inserir)) sql_inserir = "INSERT INTO `feed` VALUES " preenche_vazio(cursor, lst_inserir, template, sql_inserir) conn.commit() finally: conn.close()
async def on_message(message): if message.author.bot: return # Check for player player = game.get_player(message.author.id) if player is None: player = game.new_player(message.author.id, message.author.name) # Dropout because we're running in private mode if get_setting(settings, 'private-mode', False) and not player.has_role('operator'): return # Only respond if this is an explicit command [first character is a $] or if bot gets pinged and it is a command if message.clean_content.startswith('$') or ( client.user in message.mentions and '$' in message.clean_content): s = message.clean_content.find('$') cmds = shlex.split(message.clean_content[s + 1:]) command = utils.get_alias(cmds[0]) if command is None: print('Failed to interpret command {0}.'.format(cmds[0])) elif not utils.can_do(command, player.roles): print('Player tried to execute command {0} without permission.'. format(cmds[0])) else: print('Running command {0}.'.format(command)) if command == 'help': if len(cmds) > 1: cmd = utils.get_alias(cmds[1]) desc = utils.get_help(cmd, player.roles) else: desc = utils.get_help(command, player.roles) respondqueue.put({'to': message.channel, 'message': desc}) elif command == 'all': desc = utils.get_all(player.roles) respondqueue.put({'to': message.author, 'message': desc}) elif command == 'save': game.save(gamefile) banking.queue.put({'type': 'save'}) respondqueue.put({ 'to': message.author, 'message': 'Executing full save.' }) elif command == 'stop': await client.logout() game.save(gamefile) banking.queue.put({'type': 'save'}) banking.stop() manager.stop() client.loop.call_soon(sys.exit, 0) elif command == 'kill': await client.logout() banking.stop() manager.stop() client.loop.call_soon(sys.exit, 0) elif command == 'roles': if len(cmds) > 1: tgt = game.get_player(cmds[1]) else: tgt = player if tgt is None: respondqueue.put({ 'to': message.channel, 'message': 'Cannot find player *{0}*'.format(cmds[1]) }) elif len(tgt.roles) == 0: respondqueue.put({ 'to': message.channel, 'message': '*{0}* has no roles.'.format(tgt.username) }) else: reply = '*{0}*\'s roles:'.format(tgt.username) for i, r in enumerate(tgt.roles): if r.term_length.total_seconds() > 0: reply += '\n{0}. **{1}** [runs out {2}]'.format( i + 1, r.name.capitalize(), r.term_end.strftime('%d-%m')) else: reply += '\n{0}. **{1}**'.format( i + 1, r.name.capitalize()) respondqueue.put({'to': message.channel, 'message': reply}) elif command == 'addrole': if len(cmds) < 3: respondqueue.put({ 'to': message.channel, 'message': 'Usage: `{0}`'.format(utils.cmds[command]['example']) }) return tgt = game.get_player(cmds[1]) role = Role(cmds[2]) if tgt.has_role(role): respondqueue.put({ 'to': message.channel, 'message': 'Player {0} already has role {1}'.format( tgt.username, role.name) }) return role.term_start = datetime.datetime.now() if len(cmds) < 4: role.term_length = datetime.timedelta(days=-1) else: try: role.term_length = datetime.datetime.strptime( cmds[3], '%d-%m-%Y') - role.term_start except Exception as e: respondqueue.put({ 'to': message.author, 'message': 'Failed to interpret date format: {0}'.format( str(e)) }) return tgt.roles.append(role) respondqueue.put({ 'to': message.channel, 'message': 'Gave role {0} to player {1}'.format( role.name, tgt.username) }) elif command == 'delrole': if len(cmds) < 3: respondqueue.put({ 'to': message.channel, 'message': 'Usage: `{0}`'.format(utils.cmds[command]['example']) }) return tgt = game.get_player(cmds[1]) role = Role(cmds[2]) if not tgt.has_role(role): respondqueue.put({ 'to': message.channel, 'message': 'Player {0} does not have role {1}'.format( tgt.username, role.name) }) return tgt.remove_role(role) respondqueue.put({ 'to': message.channel, 'message': 'Removed role {0} from player {1}'.format( role.name, tgt.username) }) elif command == 'newaccount': if len(cmds) < 2: respondqueue.put({ 'to': message.channel, 'message': 'Usage: `{0}`'.format(utils.cmds[command]['example']) }) return banking.queue.put({ 'type': 'new', 'pid': player.uid, 'name': cmds[1], 'channel': message.channel }) elif command == 'balance': banking.queue.put({ 'type': 'balance', 'pid': player.uid, 'channel': message.channel }) elif command == 'transfer': if len(cmds) < 4: respondqueue.put({ 'to': message.channel, 'message': 'Usage: `{0}`'.format(utils.cmds[command]['example']) }) return fromid = cmds[1] toid = cmds[2] amount = cmds[3] details = '' if len(cmds) > 4: details = cmds[4] targets = [] for u in banking.get_owners(fromid): user = await client.get_user_info(u) if not user in targets: targets.append(user) for u in banking.get_owners(toid): user = await client.get_user_info(u) if not user in targets: targets.append(user) banking.queue.put({ 'type': 'transfer', 'pid': player.uid, 'from': fromid, 'to': toid, 'amount': amount, 'details': details, 'channel': targets }) elif command == 'veterans': if message.channel.is_private: respondqueue.put({ 'to': message.channel, 'message': 'You need to use this command in a server.' }) return print_all = False if len(cmds) > 1: if cmds[1] == 'all' and player.has_role('operator'): print_all = True people = [] names = [] joined = [] for m in message.server.members: people.append(m.id) names.append(m.name) joined.append(m.joined_at) people = [p for _, p in sorted(zip(joined, people))] names = [p for _, p in sorted(zip(joined, names))] joined = sorted(joined) res = None if print_all: res = [] for i in range(len(people)): addition = '{0:2d}. {1} ({2})\n'.format( i + 1, names[i], joined[i].strftime('%d-%m-%Y')) if len(res) == 0: res = ['```' + addition] elif len(res[-1]) + len(addition) < 1500: res[-1] += addition else: res[-1] += '```' res.append('```' + addition) res[-1] += '```' else: res = '```' for i in range(10): res += '{0:2d}. {1} ({2})\n'.format( i + 1, names[i], joined[i].strftime('%d-%m-%Y')) i = people.index(message.author.id) if i > 10: res += '...\n{0:2d}. {1} ({2})\n'.format( i, names[i - 1], joined[i - 1].strftime('%d-%m-%Y')) res += '{0:2d}. {1} ({2})\n'.format( i + 1, names[i], joined[i].strftime('%d-%m-%Y')) if i + 1 < len(people): res += '{0:2d}. {1} ({2})\n'.format( i + 2, names[i + 1], joined[i + 1].strftime('%d-%m-%Y')) if i + 2 < len(people): res += '...' res += '```' respondqueue.put({'to': message.channel, 'message': res})
def do_get(sock, num_bytes): return utils.get_all(sock, num_bytes)
def get_all_cpu(request): instances = get_all() l = [] for instance in instances: l.append(get_cpu(instance)) return JsonResponse({"all_cpu": l})
def get_instances(request): instances = get_all() l = [e.name for e in instances] return JsonResponse({"instances": l})
'embed_size': space[2], # Length of the vector that we willl get from the embedding layer 'latent_dim': space[3], # Hidden layers dimension 'dropout_rate': space[4]}#, # Rate of the dropout layers #'epochs': space[0], # Number of epochs #'max_features': space[0], # Max num of vocabulary #'category': space[0], # Is categoty labels #'embedding': space[0], # Using pre-made embedidng matrix as weight #'model_type': space[0] #} f1 = no_pad_time_tuning(param, notes_train, labels_train, up_notes_train, up_labels_train, gold_labels_train, notes_test, labels_test, gold_labels_test) return (-f1) if __name__ == "__main__": # loading data notes_train_1, labels_train_1, up_notes_train_1, up_labels_train_1, gold_labels_train_1 = get_all('/host_home/data/i2b2/2014/training/training-RiskFactors-Complete-Set1') notes_train_2, labels_train_2, up_notes_train_2, up_labels_train_2, gold_labels_train_2 = get_all('/host_home/data/i2b2/2014/training/training-RiskFactors-Complete-Set2') notes_train = notes_train_1 + notes_train_2 labels_train = labels_train_1 + labels_train_2 up_notes_train = up_notes_train_1 + up_notes_train_2 up_labels_train = up_labels_train_1 + up_labels_train_2 gold_labels_train = gold_labels_train_1 + gold_labels_train_2 notes_test, labels_test, _1, _2, gold_labels_test = get_all('/host_home/data/i2b2/2014/testing/testing-RiskFactors-Complete') space = [Integer(5, 10, name='up'), Integer(3, 7, name='window_size'), Integer(2, 4, name='embed_size'), Integer(1, 3, name='latent_dim'), Real(0, 0.3, name='dropout_rate')]
def get_categories(): rows = utils.get_all("SELECT * from category") data = [] for r in rows: data.append({"id": r[0], "name": r[1], "url": r[2]}) return jsonify({"categories": data})
parser.add_argument('-m', '--test_method', required=False) parser.add_argument('-p', '--test_par', required=False) io_args = parser.parse_args() question = io_args.question def read_dataset(filename): with open(os.path.join("..", "data", filename), "rb") as f: dataset = pd.read_csv(f, keep_default_na=False) return dataset if question == 'find_nan_ct': dataset = read_dataset("phase1_training_data.csv") n_ct, ct_key, ct_mapper, ct_inv_mapper, ct_ind = utils.make_mapper( dataset) dataset = utils.get_all(dataset) dataset_CA = dataset[dataset[ct_key] == "CA"] print("Input Country Number:", n_ct) n_ct1 = 0 i = 0 ct_nan_ind = n_ct * [None] for nct in range(n_ct): if type(ct_inv_mapper[nct]) == str: n_ct1 += 1 else: ct_nan_ind[i] = nct i += 1 print("Output Country Number:", n_ct1) if n_ct1 == n_ct: print("No 'nan' named country found!")
def preenche_evento(): """ Busca no site e atualiza a tabela de eventos """ resposta = requests.get(url_eventos) lst_saida = [] ir = False if resposta.ok: ir = True parse = bs4.BeautifulSoup(resposta.text, 'html.parser') lst = parse.find_all("table") filtro = [] inome = 0 iinscricao = 2 ivagas = 3 agora = datetime.datetime.now().strftime(utils.foramto_full_db) for i in lst: tmp = i.get("class") if tmp and "bordas" in tmp: filtro.append(i) if len(filtro) > 0: alvo = filtro[0] lst_linha = alvo.find_all('tr')[2:] for j in lst_linha: lst_coluna = j.find_all('td') nome = inicio_inscricao = fim_inscricao = link = "" k = lst_coluna[inome] nome = k.font.a.get_text().strip() link = url_eventos + k.font.a.get('href').strip() k = lst_coluna[iinscricao] inscricao = k.font.get_text().strip() dinscricao = extrari_inscricao(inscricao) inicio_inscricao = dinscricao["inicio_inscricao"] fim_inscricao = dinscricao["fim_inscricao"] k = lst_coluna[ivagas] vagas = k.span.font.get_text().strip() lst_saida.append([ "NULL", agora, nome, inicio_inscricao, fim_inscricao, link, vagas ]) else: print("pagina forma do ar") return template = "({}, '{}', '{}', '{}', '{}', '{}', {});" if ir: print("-- evento") conn = utils.get_db_conn() lst_pagina = sorted(lst_saida, key=lambda elemento: elemento[2]) nomes_pagina = [i[2] for i in lst_pagina] try: with conn.cursor() as cursor: sql_vazio = "SELECT count(*) FROM `evento`" if utils.is_empty(cursor, sql_vazio): sql_inserir = "INSERT INTO `evento` VALUES " preenche_vazio(cursor, lst_saida, template, sql_inserir) else: # atulizacao, dados ja existem sql_todos = "SELECT * FROM `evento` ORDER BY `nome`" nome_todos = "SELECT `nome` FROM `evento` ORDER BY `nome`" lst_nome_todos = utils.get_all(cursor, nome_todos) lst_nome_todos = [i[0] for i in lst_nome_todos] lst_todos = utils.get_all(cursor, sql_todos) # atualizar lst_atualizar = [] lst_inserir = [] for k in lst_pagina: # print(repr(k[2])) if k[2] in lst_nome_todos: lst_atualizar.append(k) else: lst_inserir.append(k) # remover lst_remover = [] for k1 in lst_todos: if k1[2] not in nomes_pagina: lst_remover.append(k1) if len(lst_remover): print("exlui", len(lst_remover)) excluir(cursor, lst_remover, 'evento') conn.commit() lst_todos = utils.get_all(cursor, sql_todos) # datas e vagas lst_nomes = [g[2] for g in lst_todos] chaves = {g[2]: g[0] for g in lst_todos} if len(lst_atualizar): # elementos que vem da pagina sql = "UPDATE `evento` SET `inicio_inscricao`='{}'" + \ ", `fim_inscricao`='{}', `link`='{}',`vagas`={}" +\ " WHERE `id`={}" print("atualiza", len(lst_atualizar)) for i in lst_atualizar: tmp_lst = i[3:] + [chaves[i[2]]] sql_completo = sql.format(*tmp_lst) # print(sql_completo) cursor.execute(sql_completo) if len(lst_inserir): print("novos", len(lst_inserir)) sql_inserir = "INSERT INTO `evento` VALUES " preenche_vazio(cursor, lst_inserir, template, sql_inserir) """ meu proprio controle: remover eventos que nao tem vaga, ou terminou a data de inscricao casos especiais: A evento novo na pagina B evento nao existe mais na pagina *. trazer os eventos ordenas por nome. (ordernar os eventos aqui tmb) (essa parte vai ser igual ao do feed) A. not (nome in lst_nomes) => elemento novo B. banco = banco[:len(pagina)] # oque sobra do tamanho da pagina eh pq foi removido 1. controle da pagina 2. meu controle """ conn.commit() finally: conn.close()
def receive_header(sock): data = utils.get_all(sock, 10) data_spl = data.split('|') return (data_spl[0], data_spl[1])