def _generate_serial(self): timestamp = time.time() # List of characters that will be excluded from the generator excluded_chars = ['O'] # List of characters that will be used by the generator extras = [c for c in string.ascii_uppercase if not c in excluded_chars] # Generate the key using the current timestamp as the seed key_custom = generate(3, SEPARATOR, LENGTH, LENGTH, type_of_value='int', capital='none', extras=extras, seed=timestamp).get_key().upper() # Generate the signature but only on a key cleared of separator char key_without_separator = key_custom.replace(SEPARATOR, '') key_signature = SHA256.SHA256Hash( key_without_separator.encode()).hexdigest() # We keep only the first values of the signature as a checksum key_checksum = key_signature[0:LENGTH].upper() # The checksum is added at the end of the key serial = '{}{}{}'.format(key_custom, SEPARATOR, key_checksum) return serial
def _make_key(self) -> None: self._KEY = None if self.ENCRYPT_BYTECODE: self._KEY = generate(1, '', 16, 16, type_of_value='char', capital='mix', extras=list(map(str, range(10))), seed=round(time())).get_key()
def get_account(email): account = users.find_one({'email': email}) username = account['username'] password = account['password'] key = generate(seed=random.randint(0, 101)) key = key.get_key() shortkey = key[0:4] users.update_one({'email': email}, {'$set': {'password': shortkey}}) message = f"Your accound information- Your username is :{username}. Your password is :{password}" emailuser.get_email(email, username, shortkey) return {}
def main(): global token full_path_to_token = os.path.join(os.getcwd(), config.path_to_token) with open(full_path_to_token, "a+") as f: if os.stat(full_path_to_token).st_size == 0: key = generate() f.write(key.get_key()) _ = requests.post(f"{config.api_post_device}{key.get_key()}") token = key.get_key() with open(full_path_to_token, "r") as f: for line in f: token = line break app = QtWidgets.QApplication(sys.argv) # Новый экземпляр QApplication window = LoginApp() # Создаём объект класса ExampleApp window.show() # Показываем окно app.exec_() # и запускаем приложение
def AddAnnouncement(self, db_conn, ID=None, Title="", Location="", Description="", PictureLink="", HyperLink="", PostingTime=None): ID = generate(num_of_atom=1, min_atom_len=10, max_atom_len=10).get_key() if (ID == None) else ID PostingTime = datetime.datetime.now() if (PostingTime == None) else PostingTime cur = db_conn.cursor() query = "INSERT INTO `pesuapp`.`announcement` (`id`, `title`, `location`, `description`, `picture_link`, `hyperlink`, `posting_time`) VALUES (%s, %s, %s, %s, %s, %s, %s);" cur.execute(query, (ID, Title, Location, Description, PictureLink, HyperLink, PostingTime)) db_conn.commit() cur.close()
from key_generator.key_generator import generate all_sizes_required = [(100, '100'), (500, '500'), (1000, '1K'), (5000, '5K'), (10000, '10K'), (50000, '50K'), (100000, '100K'), (500000, '500K')] for file_size in all_sizes_required: OUTPUT_PATH = "./int_test_" + file_size[1] + ".txt" STRING_COUNT = file_size[0] output_file = open(OUTPUT_PATH, "w") for i in range(STRING_COUNT): integer = generate( num_of_atom = 1, min_atom_len = 3, max_atom_len = 9, type_of_value = "int", seed = i ).get_key() integer = integer.lstrip("0") if(integer == ""): output_file.write("0" + "\n") else: output_file.write(integer + "\n") output_file.close() print("Done with " + OUTPUT_PATH)
test_passed = 0 test_failed = 0 h_total = 100 extras_test = ['^', '%', '&', '#', '!', '@', '$'] pbar = tqdm(total=h_total) for h in range(h_total): for i in range(1, 5): for j in range(1, 5): for k in range(j, 10): for l in ['hex', 'char', 'int']: for m in ['none', 'all', 'mix']: key = generate(i, '-', j, k, seed=h, type_of_value=l, capital=m).get_key() flag_check = 0 if (len(key.split('-')) == i): flag_check += 1 len_atoms = [len(x) for x in key.split('-')] if (min(len_atoms) >= j and max(len_atoms) <= k): flag_check += 1 is_in = 0 if (l == 'hex' and m == 'none'): for _ in ''.join(key.split('-')): if (_ not in [str(x) for x in list(range(0, 10))] + [
from key_generator.key_generator import generate all_sizes_required = [(100, '100'), (500, '500'), (1000, '1K'), (5000, '5K'), (10000, '10K'), (50000, '50K'), (100000, '100K'), (500000, '500K')] for file_size in all_sizes_required: OUTPUT_PATH = "./double_test_" + file_size[1] + ".txt" STRING_COUNT = file_size[0] output_file = open(OUTPUT_PATH, "w") for i in range(STRING_COUNT): integer_before_dot = generate(num_of_atom=1, min_atom_len=1, max_atom_len=6, type_of_value="int", seed=i).get_key() integer_after_dot = generate(num_of_atom=1, min_atom_len=1, max_atom_len=6, type_of_value="int", seed=i).get_key() integer_before_dot = integer_before_dot.lstrip("0") integer_after_dot = integer_after_dot.rstrip("0") integer_before_dot = integer_before_dot.lstrip( "0") if integer_before_dot != "" else "0" integer_after_dot = integer_after_dot.rstrip( "0") if integer_after_dot != "" else "0"
def train(args=None): key = generate().get_key() model_config = { 'key': key, 'bs': args.bs, 'data': args.data, 'lr': args.lr, 'epochs': args.epochs, 'steps': args.steps, 'loss': args.loss, 'two': args.two_layers } config_path = "model_configs/" + key + ".json" with open(config_path, 'w') as outfile: json.dump(model_config, outfile) print("model_config ", model_config) #Normalisation of features. x_train, y_train = load_data(args.data) x_train = x_train.astype(np.float) y_train = y_train.astype(np.float) print("x_train.shape[0] ", x_train.shape[0]) callbacks = [] callbacks.append( tf.keras.callbacks.EarlyStopping(monitor='val_loss', mode='min', patience=args.early_stop_patience, min_delta=0.001, restore_best_weights=True)) callbacks.append( tf.keras.callbacks.ModelCheckpoint(os.path.join( 'models', '{name}.h5'.format(name=key)), verbose=1, save_best_only=True, save_weights_only=True, monitor='val_loss', mode='min')) callbacks.append( tf.keras.callbacks.TensorBoard(log_dir=args.tensorboard_dir + "/" + key)) #Model building model_sequence = [] if args.two_layers: model_sequence.append(layers.Dense(units=int(x_train.shape[0] / 2))) model_sequence.append(layers.Dense(units=1)) model = tf.keras.Sequential(model_sequence) input_shape = [None, x_train[0].shape[0]] model.build(input_shape) model.summary() model.compile(optimizer=tf.optimizers.Adam(learning_rate=args.lr), loss=args.loss) history = model.fit( x_train, y_train, epochs=args.epochs, verbose=1, # steps_per_epoch=args.steps, batch_size=args.bs, validation_split=0.1, callbacks=callbacks) hist = pd.DataFrame(history.history) hist['epoch'] = history.epoch hist.tail()
all_sizes_required = [(100, '100'), (500, '500'), (1000, '1K'), (5000, '5K'), (10000, '10K'), (50000, '50K'), (100000, '100K'), (500000, '500K')] for file_size in all_sizes_required: OUTPUT_PATH = "./string_test_" + file_size[1] + ".txt" STRING_COUNT = file_size[0] output_file = open(OUTPUT_PATH, "w") for i in range(STRING_COUNT): string = "" recipient = generate(num_of_atom=1, type_of_value="hex", capital="mix", extras=["-", "_"], seed=i).get_key() domain = generate(num_of_atom=2, separator=".", min_atom_len=3, max_atom_len=5, type_of_value="hex", capital="mix", extras=["-"], seed=i).get_key() string = recipient + "@" + domain output_file.write(string + "\n")