def one_set_insert(data, cnt): # 여러개 값 conn = connector.Connector() switch = 1 if len(data) > int(cnt): conn.insert("result", data) switch = 0 return switch
def get_connector(port, num_threads, directory): # Listen on the specified port. connector = connector.Connector(converter=converters.StringConverter, port=port) add_file_writers(connector, num_threads, directory) connector.run() return connector
def setUp(self): self.connector = connector.Connector() # 初始化连接器 self.dr = self.connector.open_connection('Android') # 获取连接器 self.common = case_common.CommonCase(self.dr) self.ssq = case_ssq.Ssq(self.dr) self.kit = case_klt.Klt(self.dr)
def __init__(self): config = Config() sender_email = config.get_sender() smtp_server, smtp_port = config.get_smtp_info() imap_server, imap_port = config.get_imap_info() password = getpass.getpass() self.conn = connector.Connector(sender_email, password, smtp_server, smtp_port, imap_server, imap_port)
def one_set_get(xywh=A): ## ******** """ :param xywh: A , B1 ~ B7 :return: """ conn = connector.Connector() g_id = int( conn.select_limit("result", {}, column=["g_id"], order_by="g_id desc")[0][0]) + 1 seq, ex_p, ex_b, p, b, t = 1, 0, 0, 0, 0, 0 latest = "" img = set_image(xywh) data = [] if xywh == A: w2, h2 = 22, 22 else: w2, h2 = 17, 17 for i in range(0, 10): x2 = w2 * i for j in range(0, 6): y2 = h2 * j result = "" c, g, r = img[y2 + 9:y2 + 10, x2 + 3:x2 + 4][0][0] # 한칸 크기로 줄임 if (r > 100) & (g > 100): break elif (g > r) & (g > c): result = "T" t = t + 1 elif c > r: result = "P" p = p + 1 elif r > c: result = "B" b = b + 1 latest = latest + result row = { "g_id": g_id, "sequence": seq, "result": result, "latest": latest, "ex_p": ex_p, "ex_b": ex_b, "P": p, "B": b, "T": t } # print(row) data.append(row) ex_p, ex_b = p, b seq = seq + 1 return data
def get_tasks(): conn = connector.Connector(config['DataBase']['dbName'], config['DataBase']['dbTable'], config['Elasticsearch']['index'], config['Elasticsearch']['doc_type']) try: if request.data.decode('ascii') == 'index': conn.index() elif request.data.decode('ascii') == 'delete': conn.delete_index() else: return abort(400) return f'{request.data.decode("ascii")} is executed' except Exception as e: self.ROOT_LOGGER.exception(f'error {type(e).__name__}: {e.args[0]}')
def __init__(self, *args): self.setupUi() self.dialog = init.initform(self) QtCore.QObject.connect(self.dialog.ui.PB_login, QtCore.SIGNAL("clicked()"), self.login) QtCore.QObject.connect(self.dialog.ui.PB_exit, QtCore.SIGNAL("clicked()"), self.queryExit) QtCore.QObject.connect( self.extension, SIGNAL( 'openUrlRequest (const KUrl&, const KParts::OpenUrlArguments&, const KParts::BrowserArguments&)' ), self.changePage) self.conn = connector.Connector() self.conn.register(self.writeMsgBox, "writeMsgBox") self.conn.register(self.setNumItems, "setNumItems") self.dialog.exec_()
def coreParser(url): """Parse html code for links. Args: - url - The URL address for parsing example http://example.com Returns: - output - Reference at link object which has an array with links. """ connect = connector.Connector(url) dataDirty = connect.getWebData() link = links.Links() link.findLinks(dataDirty) return link
def setup(insock, servsock, qrysock, backup): try: conn = connector.Connector() recvr = native.receiver.Receiver(insock, conn.loop) fproc = FlowProc(recvr, backup) if backup: fproc._on_restore(backup) conn.timer(1, fproc.on_time) conn.listen(qrysock, fproc) except KeyboardInterrupt: logger.dump("closing") finally: conn.close()
def setup(insock, outsock, qrysock): try: conn = connector.Connector() pub = conn.publish(outsock) fproc = FlowRaw(pub) fproc.header() conn.subscribe(insock, 'flow', fproc.on_flow) conn.timer(1, fproc.on_time) conn.listen(qrysock, fproc) except KeyboardInterrupt: logger.dump("closing") finally: conn.close()
def setup(self): self.background = arcade.load_texture('../../images/bg-texture.jpg') self.sprite_list = arcade.SpriteList() self.button_list = [] self.buttons = { 'Начать игру': self.start_game, 'Вход': self.auth, 'Регистрация': self.registration, 'Выход': self.exit } self.cover_sprite = arcade.Sprite('../../images/menu-cover.png', SPRITE_SCALING * 0.9) self.cover_sprite.center_x = SCREEN_WIDTH // 2 self.cover_sprite.center_y = SCREEN_HEIGHT // 1.3 self.sprite_list.append(self.cover_sprite) self.button_bg_sprite = arcade.Sprite('../../images/button-bg.png', SPRITE_SCALING) self.button_bg_sprite.center_x = SCREEN_WIDTH // 2 self.button_bg_sprite.center_y = SCREEN_HEIGHT // 3.5 self.sprite_list.append(self.button_bg_sprite) y = self.button_bg_sprite.top - 80 for key in self.buttons.keys(): y -= 50 self.button_list.append( custom_button.TextButton(self.button_bg_sprite.center_x, y, 180, 50, key, self.buttons.get(key), font_size=20, font_face='../../fonts/17719.ttf')) self.connector = connector.Connector('http://localhost/') self.login = self.connector.get_login()
def __init__(self, platform, name): self.mPlatform = platform self.appNane = name strtime = time.strftime("_%Y_%m_%d_%H_%M_%S") self.dir = self.appNane + strtime os.mkdir(self.dir) self.mDevice = devices.Devices(self.mPlatform).get_device() self.mCon = connector.Connector('localhost', 8084) # self.eventsXML = xmls.EventsXML(self.mCon) # self.countersXML = xmls.CountersXML(self.mCon) self.capturedXML = xmls.CapturedXML(self.mCon) self.sessionXML = xmls.SessionXML(self.mCon) self.mAPC = apc.Apc(self.mCon, self.dir, '0000000000') self.mBuf = buffer.Buffer(self.mDevice, self.mCon, self.mAPC, self.capturedXML) self.mXls = xls.Xls(self.dir, 'Calc.xlsx', self.mDevice) self.status = -1 for i in range(14): self.chkstatus.append(1)
def data(): conn = c.Connector() # fetching form data stood_out = request.form['stood_out'] first_time = request.form['first_time'] attend_reason = ' '.join(request.form.getlist('attend_reason')) disap = request.form['disappointing'] rating = request.form['rating'] water = request.form['water_conserving'] knew_about = request.form['knew_about'] heard_about = ' '.join(request.form.getlist('heard_about')) post_social = request.form['post_social'] platform_social = request.form['social_platform'] topic_interests = request.form['topic_interests'] get_involved = ' '.join(request.form.getlist('get_involved')) gender = request.form['gender'] adult_ages = request.form['adult_ages'] child_ages = request.form['child_ages'] zip_code = request.form['zip'] income = request.form['income'] ethnicity = ' '.join(request.form.getlist('ethnicity')) qr_id = session['qr_id'] all_data = (first_time, attend_reason, stood_out, disap, rating, water, knew_about, heard_about, post_social, platform_social, topic_interests, get_involved, gender, adult_ages, child_ages, zip_code, income, ethnicity, qr_id) cols = ('FirstTime', 'Reason', 'StoodOut', 'Disappointing', 'Rating', 'AppreciationIncrease', 'KnewAbout', 'HowHeardAbout', 'SocialMediaPosted', 'SocialMediaPlatform', 'TopicInterests', 'GetInvolved', 'Gender', 'AdultAges', 'ChildAges', 'Zip', 'Income', 'Ethnicity', 'QRCode') conn.insert_row(table_name, cols, all_data) return redirect(url_for('root'))
def __init__(self): self.message_processor = defaultmsgprocessor.DefaultMsgProcessor() self.connector = connector.Connector()
# Test for two links testTextTwoLink = "Totoje proste test <a href=\"http://example.com\"> " \ "example.com </a> <a href=\"https://seznam.cz\"> " \ "seznam.cz </a> that's all." link1.findLinks(testTextTwoLink) assert (link1.linksCount == 2) assert (link.links[0] == "http://example.com") assert (link.links[1] == "https://seznam.cz") assert (len(link.links) == 2) # *********** Modul connector *********** url = "http://example.com" conn = connector.Connector(url) def test_init(): """Test for store URL address into a class variable URL.""" assert (conn.url == "http://example.com") def test_getWebData(): """Test for proper output from the example.""" fd = open("index.txt", "r") if fd is None: # pragma no cover print("File index.html not found") exit(-1) else: htmlData = fd.read()
def dashboard(suffix=None): if 'username' not in session: return redirect(url_for('root')) conn = c.Connector() conn.add_suffix(suffix) # Selecting all text responses social_media_plat = conn.select_column('SocialMediaPlatform', table_name) print("\n\n") print(social_media_plat) stoodout = conn.select_column('StoodOut', table_name) disappointing = conn.select_column('Disappointing', table_name) topic_interests = conn.select_column('TopicInterests', table_name) adult_ages = conn.select_column('AdultAges', table_name) child_ages = conn.select_column('ChildAges', table_name) zip_code = conn.select_column('Zip', table_name) # Selecting radio button, checkboxes, or dropdown responses first_time = ['yes', 'no'] attend_reason = [ 'learning about plants', 'saw an ad', 'rest/relax', 'children away from screens', 'entertainment', 'picnic', 'connect with nature', 'bird watching', 'from out of town', 'free community resource', 'famliy activity', 'photography', 'bring a child', 'friend brought me', 'other' ] rating = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] water_conserving = ['none', 'some', 'high'] knew_about = ['yes', 'no'] how_heard_about = [ 'website', 'online', 'newspaper', 'magazine', 'drove_by', 'word_of_mouth', 'social_media', 'always_knew_about', 'other' ] post_social = ['yes', 'no'] get_involved = [ 'newsletter', 'donation', 'photography_permit', 'volunteer', 'marmalade', 'facebook', 'other' ] gender = ['male', 'female', 'no answer'] income = [ 'below 20', '20-40', '40-60', '60-75', '75-100', 'above 100', 'no answer' ] ethnicities = [ 'caucasian', 'african-american', 'asian/pacific islander', 'hispanic/latino/chicano', 'native-american/alaskan native', 'other' ] week_day = [ 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday' ] # Count the number of occurances of values in the table. The third # argument to collect() must match the row name of the database being # queried. first_time_count = collect(conn, first_time, 'FirstTime') attend_reason_count = collect(conn, attend_reason, 'Reason') rating_count = collect(conn, rating, 'Rating') water_conserving_count = collect(conn, water_conserving, 'AppreciationIncrease') knew_about_count = collect(conn, knew_about, 'KnewAbout') how_heard_about_count = collect(conn, how_heard_about, 'HowHeardAbout') post_social_count = collect(conn, post_social, 'SocialMediaPosted') get_involved_count = collect(conn, get_involved, 'GetInvolved') gender_count = collect(conn, gender, 'Gender') income_count = collect(conn, income, 'Income') ethn_count = collect(conn, ethnicities, 'Ethnicity') week_day_count = collect(conn, week_day, 'DAYNAME(Timestamp)') questions = json.dumps({ 'FirstTime': first_time_count, 'Reason': attend_reason_count, 'Rating': rating_count, 'AppreciationIncrease': water_conserving_count, 'KnewAbout': knew_about_count, 'HowHeardAbout': how_heard_about_count, 'SocialMediaPosted': post_social_count, 'GetInvolved': get_involved_count, 'Gender': gender_count, 'Income': income_count, 'Ethnicity': ethn_count, 'SubmissionDay': week_day_count, 'StoodOut': stoodout, 'Disappointing': disappointing, 'SocialMediaPlatform': social_media_plat, 'AdultAges': adult_ages, 'ChildAges': child_ages, 'Zip': zip_code, 'TopicInterests': topic_interests }) response_count = conn.select_num_rows(table_name) others = json.dumps( conn.select_other_responses( table_name, ['GetInvolved', 'Reason', 'HowHeardAbout', 'Ethnicity'])) return render_template('dashboard.html', questions=questions, others=others, response_count=response_count)
def training_model(main_path, type, config_file, from_date, to_date, customer): #logging.basicConfig(filename=logCustomer, level=logging.INFO) #lg.configureLogger(QIUserLogger, customer, "training") # QIUserLogger.info( "-----------------------------------------------------------------") QIUserLogger.info( "------------------------Training Start---------------------------") # QIUserLogger.info("** Initialization start... **") main_path = main_path type = type config_file = config_file from_date = from_date to_date = to_date QIUserLogger.info(" MainPath - " + str(main_path)) QIUserLogger.info(" Type - " + str(type)) QIUserLogger.info(" ConfigFile - " + str(config_file)) QIUserLogger.info(" FromDate - " + str(from_date)) QIUserLogger.info(" ToDate - " + str(to_date)) # QIUserLogger.info("** Initialization End **") try: QIUserLogger.info("1 - Load Configurations") QIUserLogger.info(" ** Config for Classification") # Load Config files configModel = cg.Config() configModel.configFromFile(config_file) configModel.main_path = main_path configModel.updateDataOfMainPath(config_file, main_path) dataL = dt.Data(configModel) # QIUserLogger.info("2 - Login In API") # Login to API configConnection = con.ConfigConnection() dir_path = os.path.dirname(os.path.realpath(__file__)) configConnection.configFromFile(dir_path + "/config/" + customer + "/connector_config.json") connector = con.Connector(configConnection) # Create Persistent Session Reqsess = requests.session() # LogIN connector.login(Reqsess) QIUserLogger.info("3 - GET TICKETS FROM API") # params = "closedfrom=" + str(from_date) + "&closedto=" + str( to_date) + "&maxnum=" + str(configConnection.max_tickets_to_get) #params = {"closedfrom": from_date, "closedto": to_date, "maxnum" : configConnection.max_tickets_to_get} responseTicket = connector.getTickets(Reqsess, params) if len(responseTicket) > 0: rTicket = [] for t in responseTicket: rTicket.append(t['description']) # id2lab = dict( zip(configModel.labels_map.values(), configModel.labels_map.keys())) # gather_tickets, gather_targets = gatherData( type, responseTicket, configModel, id2lab) # QIUserLogger.info("4 - REMOVE STOP WORDS FROM NEW TICKETS") tok = tk.Tokenizer(gather_tickets) tok.tokenizeTickets() tickets_to_lower = tok.toLower() gather_tickets, gather_targets = tok.removeStopWordsToString( tickets_to_lower, gather_targets) QIUserLogger.info("5 - GET STORED DATA TICKETS") tickets_train = dataL.loadDataInArray( configModel.data_path + "/tickets.txt", configModel.csv_encoding) targets_train = dataL.loadDataInArray(configModel.data_path + "/targets.txt") # # Count if we reached the threshold QIUserLogger.info("6 - MERGE THE DATA - STORED AND GATHERED") max_length = configModel.max_num_tickets len_gather_tickets = len(gather_tickets) len_tickets = len(tickets_train) #Effettuo un nuovo training su tutto il dataset e non un transfer #learning perchè voglio utilizzare sempre un vocabolario aggiornato. tickets = tickets_train + gather_tickets targets = targets_train + gather_targets reached_dim = len_gather_tickets + len_tickets if reached_dim > max_length: elem_to_cut = reached_dim - max_length #cut out the firsts elem_to_cut elements merged_targets = tickets[elem_to_cut:] merged_tickets = targets[elem_to_cut:] tickets = merged_tickets targets = merged_targets reached_dim = max_length QIUserLogger.info("7 - REMOVE IDENTICAL TICKETS") #tickets, targets = ut.removeIdenticalTickets(tickets, targets) tickets, targets = ut.removeIdenticalTicketsFromNew( tickets, targets, len_tickets, reached_dim) QIUserLogger.info("8 - SAVING MERGED DATA") dataL.writeArrayInFileCompleteDataPath( tickets, configModel.data_path + '/tickets.txt', "utf-8") dataL.writeArrayInFileCompleteDataPath( targets, configModel.data_path + '/targets.txt', "utf-8") # QIUserLogger.info("9 - EXTRACT WORDS FROM TICKETS") words = tok.extractWordsTicketString(tickets) # QIUserLogger.info("10 - BUILD NEW VOCABULARY") # Create Vocabulary voc = vc.Vocabulary(configModel) dictionary, reverse_dict = voc.build_dictionary( words, configModel.labels) voc.saveDictionary(dictionary, "vocabulary") QIUserLogger.info("*** Vocabulary saved") # QIUserLogger.info("11 -- SPLIT DATA IN TRAINING AND TEST DATASET") tickets_training, tickets_test, Target_training, Target_test = ut.get_train_and_test( tickets, targets) dataL.writeArrayInFileCompleteDataPath( tickets_training, configModel.data_path + '/tickets_training.txt', "utf-8") dataL.writeArrayInFileCompleteDataPath( Target_training, configModel.data_path + '/targets_training.txt', "utf-8") dataL.writeArrayInFileCompleteDataPath( tickets_test, configModel.data_path + '/tickets_test.txt', "utf-8") dataL.writeArrayInFileCompleteDataPath( Target_test, configModel.data_path + '/targets_test.txt', "utf-8") # QIUserLogger.info("12 - CREATE TICKETS AND TARGETS SEQUENCES") # Create Sequences and HotVectors for the Target tickets_training_sequences = dataL.createDataSequenceTicketsString( tickets_training, dictionary) oneHotVectorTarget_training = dataL.transformInOneHotVector( configModel.labels, Target_training) # QIUserLogger.info("13 - FILTER OUT DATA - Removing Token OOV") filtdata = fd.FilterData(configModel, configModel.labels) tickets_training_sequences, oneHotVectorTarget_training, trash = filtdata.removeTokenOOV( tickets_training_sequences, oneHotVectorTarget_training, dictionary) QIUserLogger.info(" *** Classe Cestino in Training : " + str(len(trash))) # #QIUserLogger.info(" -- Split Training | Test Dataset") #tickets_training_sequences, tickets_test_sequences, oneHotVectorTarget_training, oneHotVectorTarget_test = ut.get_train_and_test(tickets_training_sequences, oneHotVectorTarget_training) # QIUserLogger.info("14 - SAVING TRAINING SEQUENCES") dataL.writeArrayInFileCompleteDataPath( tickets_training_sequences, configModel.data_sequences_path + '/tickets_training.txt', "utf-8") dataL.writeArrayInFileCompleteDataPath( oneHotVectorTarget_training, configModel.data_sequences_path + '/target_training.txt', "utf-8") QIUserLogger.info(" *** Training Size : " + str(len(tickets_training_sequences)) + "\n") if configModel.use_pretrained_embs: QIUserLogger.info(" *** Use pretrained Words Embedding") skip = sk.SkipgramModel(configModel) skipgramModel = skip.get_skipgram() skipgramEmbedding = skip.getCustomEmbeddingMatrix( skipgramModel, reverse_dict) configModel.skipgramEmbedding = skipgramEmbedding # Start Training QIUserLogger.info("15 - START TRAINING") ml.runTraining(configModel, tickets_training_sequences, oneHotVectorTarget_training, configModel.labels) QIUserLogger.info("============ End =============") else: QIUserLogger.info( "No New Tickets found. There is no need of a new training.") # LogIN connector.logout(Reqsess) # except Exception as e: print(str(e)) QIUserLogger.error("Error in training_model " + str(e))
import connector from itertools import combinations conn = connector.Connector() aa = range(1, 46) for num in combinations(aa, 6): conn.insert("origin", str(list(num))) print("finish")
def test_postRequest_raisesRequestErrorOnMalformedURL(): con = connector.Connector('localhost/') con._post_request(url="notExisting", post_params={}, files=None)
def test_postRequest_raisesRequestErrorOnNoConnection(): con = connector.Connector('http://localhost/') con._post_request(url="notExisting", post_params={}, files=None)
(gpl v3) by Tobias Sauer http://tobi.leichtdio.de part of - Dreifach Glauben : http://dreifachglauben.de - Leichtdio.de : http://leichtdio.de tobias [at] leichtdio . de """ from bottle import route, run, template, error, static_file, debug import json import os.path import connector pointer = connector.Connector() @route('/css/<css>') def server_static(css): return static_file(css, root="css") @route('/fonts/<fontfile>') def server_static(fontfile): return static_file(fontfile, root="fonts") @route('/js/<js_lib>') def server_static(js_lib): return static_file(js_lib, root="js")
def get_connector(): # Do not listen on any port. connector = connector.Connector(converter=converters.FileConverter) connector.run() return connector
def newCustomer(userName, userEmail, password, customerEmail, modelsDir): try: # QIlogger.info("------------------------------------------------------") QIlogger.info("1 - New Customer Script") # # Connector API configConnector = con.ConfigConnection() connector = con.Connector(configConnector) body_params = { "email": userEmail, "cellphone": "0000", "password": password, "emailsupport": customerEmail } QIlogger.info("------------------------------------------------------") QIlogger.info("2 - SignIN new User") # #connector.signin(body_params) # QIlogger.info("------------------------------------------------------") QIlogger.info("3 - Add User to script/config") # with open("config/config.json", 'r') as json_file: data = json.load(json_file) data["users"].append({'name': userName, 'config_dir': ''}) with open("config/config.json", 'w') as json_file: json.dump(data, json_file) json_file.close() QIlogger.info("------------------------------------------------------") QIlogger.info("4 - Create DIR main Config in ticket_classification") # userConfigDir = dir_path + '/../ticket_classification/config/' + userName createNewDir(userConfigDir) QIlogger.info("------------------------------------------------------") QIlogger.info( "5 - Copy base models config and connector to new user DIR config") # copyDatasFromToDirs(dir_path + '/../base_configs', userConfigDir) QIlogger.info("------------------------------------------------------") QIlogger.info("6 - Read models config and change paths") # with open(userConfigDir + "/category_config.json", 'r') as json_file: dataC = json.load(json_file) dataC["paths"]["main_path"] = dataC["paths"]["main_path"].replace( "base_config", userName) #json.dump(data, json_file) with open(userConfigDir + "/category_config.json", 'w') as json_file: json.dump(dataC, json_file) json_file.close() # with open(userConfigDir + "/priority_config.json", 'r') as json_file: dataP = json.load(json_file) dataP["paths"]["main_path"] = dataP["paths"]["main_path"].replace( "base_config", userName) #json.dump(data, json_file) with open(userConfigDir + "/priority_config.json", 'w') as json_file: json.dump(dataP, json_file) json_file.close() # with open(userConfigDir + "/connector_config.json", 'r') as json_file: dataCC = json.load(json_file) dataCC["web_service"]["user"] = userEmail dataCC["web_service"]["password"] = password #json.dump(data, json_file) with open(userConfigDir + "/connector_config.json", 'w') as json_file: json.dump(dataCC, json_file) json_file.close() # QIlogger.info("------------------------------------------------------") QIlogger.info("7 - Create user main DIR models") # userModelDir = modelsDir + '/' + userName createNewDir(userModelDir) QIlogger.info("------------------------------------------------------") QIlogger.info( "8 - Copy base models checkpoints and datas to new user DIR") # #copyDatasFromToDirs(modelsDir + '/base_model', userModelDir) copy_tree(modelsDir + '/base_model', userModelDir) QIlogger.info("------------------------------------------------------") QIlogger.info("9 - Change name in the checkpoint files") # replaceDataInCheckpoint( modelsDir + '/' + userName + '/models/category/model/best_models/checkpoint', userName) replaceDataInCheckpoint( modelsDir + '/' + userName + '/models/priority/model/best_models/checkpoint', userName) QIlogger.info("------------------------------------------------------") QIlogger.info("10 - Add a crontab") # my_cron = CronTab(user='******') cronJob_command = '/user/bin/python3 /home/questit/assist_classifier/scripts/classification_script.py ' + userName + ' > /home/questit/tmp/classification_log_' + userName + '.txt' job = my_cron.new(command=cronJob_command) job.minute.every(3) my_cron.write() except Exception as e: QIlogger.error("Error in New Customer Script method " + str(e))
def connect(host, port, shared_value, is_server): """ Connect to the host:port with the Diffie-Hellman exchange, making sure to authenticate the connection. """ global MAC_KEY ctr = None if port: ctr = connector.Connector(is_server, host, port) else: ctr = connector.Connector(is_server, host) # Generate a 16 byte key, from a hash of the shared secret value. # Then use that value, to encrypt a Diffie-Hellman exchange to # ensure Perfect Forward Secrecy. md5_key = hashlib.md5() shared_val = shared_value.encode('utf-8') md5_key.update(shared_val) long_term_key = md5_key.digest() md5_key = hashlib.md5() md5_key.update(long_term_key) MAC_KEY = md5_key.digest() ctr.connect() session_key = [] if not is_server: #Client Authenticated DH exchange # Send initial DH trigger message logging.getLogger().info('Sending initial authentication message') client_dh_init_msg = dh_auth.gen_auth_msg() ctr.send(bytes(client_dh_init_msg)) # Receive server authentication response logging.getLogger().info('Waiting for server authentication response') rcv_server_public_transport = ctr.receive_wait() rcv_server_nonce = rcv_server_public_transport[:16] rcv_server_dh_data_encrypted = rcv_server_public_transport[16:] # Send back client authentication response logging.getLogger().info('Sending client authentication response') client_auth_msg = dh_auth.gen_auth_msg(rcv_server_nonce) client_dh_data_tup = dh_auth.gen_public_transport( long_term_key, client_auth_msg) client_public_transport = client_dh_data_tup[dh_auth.PUB_TRANSPORT_IDX] ctr.send(bytes(client_public_transport)) # Authenticate received data from server logging.getLogger().info('Authenticating data received from server') expect_rcv_server_id = [int(byte) for byte in host.split('.')] expect_rcv_server_auth_msg = expect_rcv_server_id + client_dh_init_msg[ 4:] logging.getLogger().info('Generating session key') session_key = dh_auth.gen_session_key( rcv_server_dh_data_encrypted, client_dh_data_tup[dh_auth.LOC_EXPONENT_IDX], long_term_key, expect_rcv_server_auth_msg) else: #Server Authenticated DH exchange # Receive initial DH trigger message logging.getLogger().info('Waiting for initial authentication message') rcv_client_dh_data = ctr.receive_wait() rcv_client_id = rcv_client_dh_data[:4] rcv_client_nonce = rcv_client_dh_data[4:] # send response logging.getLogger().info('Sending server authentication response') server_nonce = dh_auth.gen_nonce() server_auth_msg = dh_auth.gen_auth_msg(rcv_client_nonce) server_dh_data_tup = dh_auth.gen_public_transport( long_term_key, server_auth_msg) server_public_transport = server_nonce + server_dh_data_tup[ dh_auth.PUB_TRANSPORT_IDX] ctr.send(bytes(server_public_transport)) # Receive client authentication response - client_public_transport is the same as rcv_client_dh_data_encrypted logging.getLogger().info('Waiting for client authentication response') rcv_client_public_transport = ctr.receive_wait() # Authenticate received data from client logging.getLogger().info('Authenticating client response') expect_rcv_client_auth_msg = list(rcv_client_id) + list(server_nonce) session_key = dh_auth.gen_session_key( rcv_client_public_transport, server_dh_data_tup[dh_auth.LOC_EXPONENT_IDX], long_term_key, expect_rcv_client_auth_msg) if session_key == 0: logging.getLogger().info('Failed to authenticate: session key invalid') else: logging.getLogger().info( 'Authenticated with session key: ' + connector.bytestring_as_hex_string(session_key)) # Enforce Perfect Forward Security by forgetting local exponent client_dh_data_tup = (0, 0) server_dh_data_tup = (0, 0) return (session_key, ctr)
from datetime import date, datetime, timedelta import operator import HTML import bizdays as bd import zipfile from StringIO import StringIO import pro_ratas as prt # Define datas utilizadas ao longo do programa last_biz_day = bd.Calendar('ANBIMA').offset(date.today().isoformat(),-1) vna_date = datetime.strftime(date.today(),'%d/%m/%Y') vna_mth_yr = datetime.strftime(date.today(),'%m/%Y') # Conecta no proxy para acessar as páginas externas user = pl.proxy_login() conn = connector.Connector(user) # Dados Publicados no mesmo formato url_dict = dict(selic = 'https://www.selic.rtm/extranet/consulta/taxaSelic.do?method=listarTaxaDiaria', ipca = 'http://www.anbima.com.br/indicadores/indicadores.asp', igpm = 'http://www.anbima.com.br/indicadores/indicadores.asp', ipca_index = 'http://www.anbima.com.br/indicadores/indicadores.asp', igpm_index = 'http://www.anbima.com.br/indicadores/indicadores.asp', euro_bce = 'http://www.ecb.europa.eu/stats/eurofxref/eurofxref-daily.xml', cdi_cetip = 'http://www.cetip.com.br/', irfm_anbima = 'http://www.anbima.com.br/ima/arqs/ima_completo.xml' ) contents = dict([url,conn.read(url_dict[url])] for url in url_dict) dictionary = dict([url, vd.fparser(url,contents[url])] for url in contents)
def one_circle_insert(data): # 한개 값 conn = connector.Connector() conn.insert("result", data)