def tokenPublish(transactionData): if len(transaction['input']) > 7: msg = transaction['input'] #print('msg:', msg) if msg[:4] == '90f4': name = msg[4:7] amount = int(msg[7:]) requiredFee = int(config.config()["fee"]) if int(transaction['out'][config.config()["feeToken"]]) < int( config.config()["tokenPublishfee"]): return False if transaction['to'] != config.config()["receiveAddress"]: return False return True
def connectMongo(self): try: connString = config.config()["DB_STRING"] userName = config.config()["DB_PARAMS"]["USER"] password = config.config()["DB_PARAMS"]["PASSWORD"] db = config.config()["DB_PARAMS"]["DB_NAME"] connString = connString.format(username=userName, password=password) client = pymongo.MongoClient(connString) DB = client[db] print("Connected") return DB except Exception as ex: print(ex) return False
def connect(): """ Connect to the PostgreSQL database server """ conn = None try: # read connection parameters params = config() # connect to the PostgreSQL server print('Connecting to the PostgreSQL database...') conn = psycopg2.connect(**params) # create a cursor cur = conn.cursor() # execute a statement print('PostgreSQL database version:') cur.execute('SELECT version()') # display the PostgreSQL database server version db_version = cur.fetchone() print(db_version) # close the communication with the PostgreSQL cur.close() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close() print('Database connection closed.')
def truncateData(): connection = None try: # Read connection parameters params = config() # Connect to PostgreSQL server connection = psycopg2.connect(**params) # Create cursor cursor = connection.cursor() # Excute insert statement cursor.execute('TRUNCATE TABLE pagedata') # Commit changes to database connection.commit() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: # Closing database connection. if (connection): cursor.close() connection.close()
def insertJob(title, company, location, link): sql = """INSERT INTO pagedata(jobtitle, company, location, link) VALUES(%s, %s, %s, %s);""" connection = None try: # Read connection parameters params = config() # Connect to PostgreSQL server connection = psycopg2.connect(**params) # Create cursor cursor = connection.cursor() # Excute insert statement cursor.execute(sql, ( title, company, location, link, )) # Commit changes to database connection.commit() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: # Closing database connection. if (connection): cursor.close() connection.close()
def getSender(): sql = """SELECT username, password FROM email""" connection = None try: # Read connection parameters params = config() # Connect to PostgreSQL server connection = psycopg2.connect(**params) # Create cursor cursor = connection.cursor() # Excute SELECT statement cursor.execute(sql) # Assigning email information email = cursor.fetchall() return email except (Exception, psycopg2.DatabaseError) as error: print(error) finally: # Closing database connection. if (connection): cursor.close() connection.close()
def one_hot(arr): final_list = [] for element in arr: temp = [0] * config.config().speaker_reco.num_speakers temp[element] = 1 final_list.append(temp) return final_list
def run(testname: str): ''' 运行测试用例生成html报告 :param testname: 测试人 :return: ''' c = config() #获取测试账号转换为str test_username = str(c.username) with open(c.API_REPORT_PATH, 'wb') as f: suite = unittest.defaultTestLoader.discover(start_dir=c.APICASE_PATH, pattern='test_*.py') runner = HTMLTestRunner(stream=f, verbosity=2, title='API测试报告', description='接口html测试报告', tester=testname, test_user=test_username) runner.run(suite) e = Email(server='smtp.qq.com', sender='*****@*****.**', password='******', receiver='*****@*****.**', title='老马发送的今天的API自动化报告又来了,请注意查看!', message='来了来了,你的测试API自动化报告!!,注意如果收不到邮件注意查看垃圾箱还是退信了!', path=[c.API_REPORT_PATH, c.log_file_name]) e.send()
def dataToCSV(): query = """SELECT DISTINCT ON (jobtitle, company) jobtitle, company, location, link FROM pagedata ORDER BY company""" sql = 'COPY ({0}) TO STDOUT WITH CSV HEADER'.format(query) connection = None try: # Read connection parameters params = config() # Connect to PostgreSQL server connection = psycopg2.connect(**params) # Create cursor cursor = connection.cursor() with open('IndeedSearchResults.csv', 'w') as fileOutput: cursor.copy_expert(sql, fileOutput) except (Exception, psycopg2.DatabaseError) as error: print(error) finally: # Closing database connection. if (connection): cursor.close() connection.close()
def create_table(): create_command = (""" CREATE TABLE invoices ( id uuid PRIMARY KEY, document VARCHAR(255) NOT NULL, description VARCHAR(255), amount DECIMAL NOT NULL, referenceMonth DATE NOT NULL, referenceYear INTEGER NOT NULL, createdAt TIMESTAMP NOT NULL, isActive BOOLEAN NOT NULL, deactiveAt TIMESTAMP ) """) conn = None try: params = config.config() conn = psycopg2.connect(**params) cur = conn.cursor() print("Generating Invoices table") cur.execute(create_command) cur.close() conn.commit() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close()
def run_main(): """ 这是主函数 """ # 初始化参数配置类 pre_model_path = os.path.abspath("xxxx") cfg = config(pre_model_path=pre_model_path) # 构造训练集和测试集数据生成器 #dataset_dir = os.path.join(cfg.dataset_dir, "train") dataset_dir = os.path.join(cfg.dataset_dir, "test") image_data = K.preprocessing.image.ImageDataGenerator() datagen = image_data.flow_from_directory(dataset_dir, class_mode='categorical', batch_size=1, target_size=(cfg.image_size, cfg.image_size), shuffle=False) # 初始化相关参数 iter_num = datagen.samples # 训练集1个epoch的迭代次数 # 初始化VGG16,并进行训练 vgg16 = VGG16(cfg) vgg16.eval_on_dataset(datagen, iter_num, weight_path=cfg.pre_model_path)
def get_images_from_word(file_path, dest_path=""): env = bottle.default_app().config.get('env') conf = config(env) win_server = conf.get('WIN_SERVER') url = win_server + API.ROOT + API.VERSION + '/get_images_from_word' return _get_image_from_file_(url, file_path)
def __init__(self): registry_ = registry.registry() config_ = config.config() framework.framework(config_) registry_.set("config", config_) registry_.set("loader", loader.loader(registry_)) registry_.run() pass
def __init__(self): '''数据库配置''' self.c = config() self.host = self.c.host self.port = self.c.port self.user = self.c.user self.passwd = self.c.passwd self.db = self.c.db
def connect(self) -> None: try: # read connection parameters params = config() # connect to the PostgreSQL server self.conn = psycopg2.connect(**params) except (Exception, psycopg2.DatabaseError) as error: print(error)
def run_main(): # 初始化参数配置类和DANN模型 cfg = config() dann = MNIST2MNIST_M_DANN(cfg) # 初始化图像与权重路径 image_path = "F:\\DANN-MNIST\\dataset\\Mnist2MnistM\\mnistM\\test\\7\\00000000_7.png" model_path = "C:\\Users\\77183\\Desktop\\DANN\\checkpoints\\20200220090516\\trained_model.ckpt" # 测试图像 dann.test_image(os.path.abspath(image_path), os.path.abspath(model_path))
def gen(mode): import numpy as np import csv import json BATCH_SIZE = config.config().speaker_reco.batch_size SAMPLE_RATE = 16000 SNIPPET_SIZE = 3 * SAMPLE_RATE # indexfile = sys.argv[1] if mode == 'train': indexfile = 'train_index' else: indexfile = 'val_index' print(indexfile) print("Reading csv file") f = open(indexfile, 'r') reader = csv.reader(f) l = [] for row in reader: l.append(row) f.close() with open('speaker_names.json') as f: data = json.load(f) print("Reading done!") b = True while b: curr_list = np.random.permutation(l)[0:BATCH_SIZE] curr1 = [] curr2 = [] for file in curr_list: file_to_open = file[0] c = True while c: try: stft = np.load(file_to_open) c = False except: file_to_open = np.random.permutation(l)[0][0] arr = np.expand_dims(stft['arr_0']) mean = mean + arr / 341021 label = file_to_open.split('/')[8] curr1.append(np.expand_dims(stft['arr_0'], 2)) curr2.append(data[label]) # three_secs = np.split(stft, indices_or_sections = range(300, stft.shape[1], 300), axis=1) # curr.append((label, part)) # print(label, part) curr1 = np.stack(curr1, axis=0) curr2 = np.stack(one_hot(curr2), axis=0) yield ((curr1, curr2))
def run_main(): """ 这是主函数 """ # 初始化参数配置类 cfg = config() # Load MNIST (mnist_x_train, mnist_y_train), (mnist_x_test, mnist_y_test) = keras.datasets.mnist.load_data() mnist_x_train = np.expand_dims(mnist_x_train,-1) mnist_x_test = np.expand_dims(mnist_x_test,-1) mnist_x_train = np.concatenate([mnist_x_train, mnist_x_train, mnist_x_train], 3).astype(np.float32) mnist_x_test = np.concatenate([mnist_x_test, mnist_x_test, mnist_x_test], 3).astype(np.float32) mnist_y_train = keras.utils.to_categorical(mnist_y_train).astype(np.float32) mnist_y_test = keras.utils.to_categorical(mnist_y_test).astype(np.float32) # Load MNIST-M mnistm = pkl.load(open(os.path.abspath('./dataset/mnistm/mnistm_data.pkl'), 'rb')) mnistm_train = mnistm['train'].astype(np.float32) mnistm_valid = mnistm['valid'].astype(np.float32) # Compute pixel mean for normalizing data pixel_mean = np.vstack([mnist_x_train, mnistm_train]).mean((0, 1, 2)) cfg.set(pixel_mean=pixel_mean) mnist_x_train = (mnist_x_train - pixel_mean) / 255.0 mnistm_train = (mnistm_train - pixel_mean)/ 255.0 mnist_x_test = (mnist_x_test - pixel_mean)/ 255.0 mnistm_valid = (mnistm_valid - pixel_mean)/ 255.0 # 构造数据生成器 train_source_datagen = batch_generator([mnist_x_train,mnist_y_train],cfg.batch_size // 2) train_target_datagen = batch_generator([mnistm_train,mnist_y_train],cfg.batch_size // 2) val_target_datagen = batch_generator([mnistm_valid,mnist_y_test],cfg.batch_size) """ train_source_datagen = DataGenerator(os.path.join(cfg.dataset_dir, 'mnist'),int(cfg.batch_size/2), cfg.image_size,source_flag=True,mode="train") train_target_datagen = DataGenerator(os.path.join(cfg.dataset_dir, 'mnistM'),int(cfg.batch_size/2), cfg.image_size,source_flag=False,mode="train") val_datagen = DataGenerator(os.path.join(cfg.dataset_dir, 'mnistM'),cfg.batch_size, cfg.image_size,source_flag=False,mode="val") """ # 初始化每个epoch的训练次数和每次验证过程的验证次数 train_source_batch_num = int(len(mnist_x_train) // (cfg.batch_size // 2)) train_target_batch_num = int(len(mnistm_train) // (cfg.batch_size // 2)) train_iter_num = int(np.max([train_source_batch_num,train_target_batch_num])) val_iter_num = int(len(mnistm_valid) // cfg.batch_size) # 初始化DANN,并进行训练 dann = MNIST2MNIST_M_DANN(cfg) #pre_model_path = os.path.abspath("./pre_model/trained_model.ckpt") dann.train(train_source_datagen,train_target_datagen,val_target_datagen,train_iter_num,val_iter_num)
def connect(): """ Connect to the PostgreSQL database server """ params = config.config('cmis') conn = psycopg2.connect(**params) #return(conn) try: # read connection parameters print("in try1") #params = config.config(cmis) print("in try2") # connect to the PostgreSQL server print('Connecting to the PostgreSQL database...') #conn = psycopg2.connect(**params) # create a cursor cur = conn.cursor() #Empty table sql1 = """truncate prc_requisition_name;""" cur.execute(sql1) #open file data_folder = Path("E:/Shared/prc_req_name/") file_to_open = data_folder / "req_name.csv" in_file = open(file_to_open, mode="r") csvReader = csv.reader(in_file) SQL = """ COPY %s FROM STDIN WITH CSV HEADER DELIMITER AS ',' """ def process_file(conn, table, file_object): cursor = conn.cursor() cursor.copy_expert(sql=SQL % table, file=file_object) conn.commit() cursor.close() #Load CMIS data process_file(conn, 'prc_requisition_name', in_file) in_file.close() cur.close() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close() print('Database connection closed.')
def __init__(self, client): self.client = client self.config = config('koshka.db') self.cur_s = self.config.get_table('Settings', get_row=True, column_name='Currency_name')[0][0] self.cur_p = self.config.get_table( 'Settings', get_row=True, column_name='Currency_plural_name')[0][0] self.cur_i = self.config.get_table('Settings', get_row=True, column_name='Currency_icon')[0][0]
def __init__(self, databasePath, logging): self.__cfg = config() self.logging = logging try: self.connection = lite.connect(databasePath) self.connection.text_factory = str with self.connection: self.connection.row_factory = lite.Row self.cursor = self.connection.cursor() except lite.Error, e: self.logging.err("Database connecting %s:" % e.args[0])
def read_config(): file_content = open('config.json').read() json_content = json.loads(file_content) tables = [ code_item.code_item(**item) for item in json_content['source_tables'] ] connection_str = json_content['connection_string'] destination_items = [ destination_item.destination_item(**item) for item in json_content['destination'] ] return config(connection_str, tables, destination_items)
def __init__(self,logger_name='Auto_frame'): self.logger = logging.getLogger(logger_name) logging.root.setLevel(logging.NOTSET) #引入配置文件log c = config() #日志文件名 self.log_file_name = c.log_file_name #日志名 self.backup_count = c.backup #保留日志数量 #日志输出级别 self.console_output_level = c.console_level self.file_output_level = c.file_level #日志输出格式 self.formatter = c.pattern if c and c.pattern else logging.Formatter('%(asctime)s -- %(name)s -- %(filename)s -- %(lineno)d -- %(levelname)s -- %(message)s')
def login(): '''Check user is not login GET return registration form POST return answer about user status registration IF user that return all data about user''' if not current_user.is_authenticated: if request.method == 'POST': conn = None cur = None try: params = config() conn = psycopg2.connect(**params) cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) email = request.form['email'] auth_pswd = request.form['password'] cur.execute( 'SELECT username, pswd, referral_key, parent_status, inheritor_status ' 'FROM chk_email_return_username_pswd_referral_parent_inheritor(%s)', (email, )) result = cur.fetchone() if result['username']: db_pswd = result['pswd'] if check_password_hash(db_pswd, auth_pswd): username = result['username'] referral = result['referral_key'] parent = result['parent_status'] inheritor = result['inheritor_status'] login_user(User(email, username, parent, inheritor, referral), remember=True) return redirect(url_for('index')) flash('Неправильный логин или пароль') return redirect(url_for('login')) flash('Неправильный логин или пароль') return redirect(url_for('login')) except (Exception, psycopg2.DatabaseError) as error: flash('Error: ', error) return redirect(url_for('message')) finally: if cur is not None: cur.close() if conn is not None: conn.close() return render_template('login.html') abort(404)
def __init__(self): my_cof = config() # my_cof.cofig_update() my_cof.get_config() self.msg_level = my_cof.level["msg_level"] self.ip1 = my_cof.scanner["s1_ip"] # "172.24.24.222" self.port1 = my_cof.scanner["s1_port"] # 51236 self.ip2 = my_cof.scanner["s2_ip"] # "172.24.24.222" self.port2 = my_cof.scanner["s2_port"] # 51236 self.db = pymysql.Connect(my_cof.mysql["host"], my_cof.mysql["user"], my_cof.mysql["passwd"], my_cof.mysql["dbName"]) self.cursor = self.db.cursor()
def __init__(self): super().__init__(command_prefix=commands.when_mentioned_or("a]")) self.session = ClientSession(loop=self.loop) self.config = config() self.color = self.config.color self.cb_key = self.config.cb_key self.token = self.config.token self.initial_extensions = self.config.initial_extensions self.status_emojis = self.config.status_emojis self.base_embed = discord.Embed(color=self.color)
def gen(mode): import numpy as np import csv import json BATCH_SIZE = config.config().speaker_reco.batch_size SAMPLE_RATE = 16000 SNIPPET_SIZE = 3 * SAMPLE_RATE # indexfile = sys.argv[1] if mode == 'train': indexfile = 'train_index' else: indexfile = 'val_index' print(indexfile) print("Reading csv file") f = open(indexfile, 'r') reader = csv.reader(f) l = [] for row in reader: l.append(row) f.close() with open('speaker_names.json') as f: data = json.load(f) mean,variance = np.zeros((512,300)),np.zeros((512,300)) count = 0; for file in l: file_to_open = file[0] c = True try: stft = np.load(file_to_open) c = False mean += stft['arr_0'] variance += stft['arr_0']**2/341021 except: continue if count%100 == 0: print(count*100/len(l)) count+=1 mean = mean/341021 variance = variance - (mean)**2 mean = np.mean(mean,axis = 1) variance = np.mean(variance,axis = 1) np.save('normalise.npy',np.matrix([mean,variance]))
def run_main(): """ 这是主函数 """ # 初始化参数配置类 pre_model_path = os.path.abspath("xxxx") cfg = config(pre_model_path=pre_model_path) # 初始化图像与结果路径 image_path = os.path.abspath("./test/10405299_1.png") # 初始化VGG16,并进行测试一张图像 print(cfg.pre_model_path) vgg16 = VGG16(cfg) vgg16.test_single_image(image_path, cfg.pre_model_path)
def pendingBTCRelay(transactions): transactionDB = db.DB("trie/transactionDB") con = config.config() key = con["pendingTransaction"] try: pending = pickle.loads(transactionDB.get(key.encode())) except: pending = [] for t in transactions: pending.append(t) try: transactionDB.put(key.encode(), pickle.dumps(pending)) print("pending:", pickle.loads(transactionDB.get(key.encode()))) return True except: return False
def notify(self, diff): env_config = config.config() mail_config = env_config['mail'] msg = self.build_msg_html(diff, mail_config, template_filename=self.template) s = smtplib.SMTP_SSL(mail_config['server']['name']) user = mail_config['server']['user'] password = mail_config['server']['password'] if not user and not password : logger.debug('no log in') else : logger.debug('logging in to smtp') s.login(user, password) s.sendmail(msg['From'], self.recipients, msg.as_string()) s.quit()
def blockTransaction(): con = config.config() configDB = db.DB("trie/configDB") currBlockkey = con["currBTCRelayBlock"] confirmation = con["CCRConfirmation"] currNonceKey = con["currNonceCCR"] while True: currBlockRead = pickle.loads(configDB.get(currBlockkey.encode())) blockNum = json.loads(bitcoinRPC().blocknumber())["result"] print("blockNum:", blockNum) print("currBlockRead:", currBlockRead) if blockNum < currBlockRead + confirmation: time.sleep(60) continue else: currNonce = pickle.loads(configDB.get(currNonceKey.encode())) r = bitcoinRPC().blockInfo(currBlockRead) z = json.loads(r) transactions = [] key = "4f269e92bde3b00f9b963d665630445b297e2e8d29987b1d50d1e8785372e393" for y in z["result"]["tx"]: #print(y) flag, value, address = bitcoinInfo.parseTransaction(y) if flag: print(flag, value, address) currNonce += 1 transaction = { "to": address, "out": { "btr": str(int(float(value) * 10e7)) }, "nonce": str(currNonce), "fee": "10", "type": "btc", "input": "" } transaction = Transaction.newTransaction( transaction, key) print("transaction:", transaction) transactions.append(transaction) a = bitcoinInfo.pendingBTCRelay(transactions) print(a) currBlockRead += 1 configDB.put(currBlockkey.encode(), pickle.dumps(currBlockRead)) configDB.put(currNonceKey.encode(), pickle.dumps(currNonce))
def run_main(): """ 这是主函数 """ # 初始化参数配置类 pre_model_path = os.path.abspath( "./pre_weights/20200320070419/Epoch004_ val_loss_0.693,val_accuracy_100.000%.ckpt" ) cfg = config(pre_model_path=pre_model_path) # 初始化图像与结果路径 image_dir = os.path.abspath("./test") # 初始化UNet,并进行训练 print(cfg.pre_model_path) vgg16 = VGG16(cfg) vgg16.test_single_image(image_dir, cfg.pre_model_path)
def load_user(email): """For connect to user data it needed for flask decorator it like a double check form flask security or login raise hard types errors.""" conn = None cur = None try: params = config() conn = psycopg2.connect(**params) cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute( 'SELECT username, referral_key, parent_status, inheritor_status ' 'FROM chk_email_return_username_pswd_referral_parent_inheritor(%s)', (email, )) result = cur.fetchone() if result['username']: username = result['username'] referral = result['referral_key'] parent = result['parent_status'] inheritor = result['inheritor_status'] if inheritor == 0: inheritor = 'Я регистрирован без реферального кода' elif inheritor == 1: inheritor = 'Я регистрирован по реферальному коду.' user = User(email, username, parent, inheritor, referral) return user else: return None except (Exception, psycopg2.DatabaseError): abort(404) finally: if cur is not None: cur.close() if conn is not None: conn.close()
def __init__(self): registry_ = registry.registry() config_ = config.config() config_.set("active_module", "video") registry_.set("config", config_) log_ = log.handler(registry_) registry_.set("log", log_) timer_ = timer.timer(registry_) registry_.set("timer",timer_) session_ = session.sessions() registry_.set("session", session_) socket_ = socket_connection.connection(registry_) registry_.set("socket", socket_) http_ = http_connection.connection(registry_) registry_.set("http", http_) calculate_ = calculate.calculate(registry_) registry_.set("calculate", calculate_) video_ = video.video(registry_) registry_.set("video", video_) robot_ = robot.robot(registry_) registry_.set("robot", robot_) line_ = line.line(registry_) registry_.set("line", line_) loader_ = loader.loader(registry_) registry_.set("loader", loader_) registry_.run() pass
return diff else: print('..no change') if __name__ == '__main__' : if len(sys.argv) == 2 : config_file = sys.argv[1] print ('running with config from', config_file) params = json.load(open(config_file, 'r')) else : from config import config recipients = config.config()['mail']['default_recipients'] params = { 'input' : { 'type' : 'url.content', "url" : "http://www.timeapi.org/utc/now?format=%25a%20%25b%20%25d%20%25I:%25M:%25S" }, 'stateManager' : { 'type' : 'file', 'filename' : '../output/aa.json' }, 'comparator' : { 'type' : 'text' }, 'notifier' : { 'type' : 'debug', 'recipients' : recipients