def _get_new_id(cls, collection_id): """Generates a unique ID for the question of the form {{collection_id}}.{{random_hash_of_16_chars}} Args: collection_id: str. The ID of collection containing the question. Returns: new_id: int. ID of the new QuestionModel instance. Raises: Exception: The ID generator for QuestionModel is producing too many collisions. """ for _ in range(base_models.MAX_RETRIES): new_id = '%s.%s' % ( collection_id, utils.convert_to_hash( str(utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH)) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for QuestionModel is producing too many ' 'collisions.')
def _generate_id(cls, intent): """Generates an ID for a new SentEmailModel instance. Args: intent: str. The intent string, i.e. the purpose of the email. Valid intent strings are defined in feconf.py. Returns: str. The newly-generated ID for the SentEmailModel instance. Raises: Exception: The id generator for SentEmailModel is producing too many collisions. """ id_prefix = '%s.' % intent for _ in range(base_models.MAX_RETRIES): new_id = '%s.%s' % ( id_prefix, utils.convert_to_hash( str(utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH)) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for SentEmailModel is producing too many ' 'collisions.')
def generate_new_thread_id(cls, entity_type: str, entity_id: str) -> str: """Generates a new thread ID which is unique. Args: entity_type: str. The type of the entity. entity_id: str. The ID of the entity. Returns: str. A thread ID that is different from the IDs of all the existing threads within the given entity. Raises: Exception. There were too many collisions with existing thread IDs when attempting to generate a new thread ID. """ for _ in python_utils.RANGE(_MAX_RETRIES): thread_id = ( '%s.%s.%s%s' % (entity_type, entity_id, utils.base64_from_int( int(utils.get_current_time_in_millisecs())), utils.base64_from_int(utils.get_random_int(_RAND_RANGE)))) if not cls.get_by_id(thread_id): return thread_id raise Exception( 'New thread id generator is producing too many collisions.')
def get_new_id(cls, entity_name): """Gets a new id for an entity, based on its name. The returned id is guaranteed to be unique among all instances of this entity. Args: entity_name: The name of the entity. Coerced to a utf-8 encoded string. Returns: str. New unique id for this entity class. Raises: Exception. An ID cannot be generated within a reasonable number of attempts. """ for _ in python_utils.RANGE(MAX_RETRIES): new_id = utils.convert_to_hash( '%s%s' % (entity_name, utils.get_random_int(RAND_RANGE)), ID_LENGTH) if not cls.get_by_id(new_id): return new_id raise Exception('New id generator is producing too many collisions.')
def _generate_id(cls, exp_id): """Generates a unique id for the classifier model of the form {{exp_id}}.{{random_hash_of_16_chars}} Args: exp_id: str. ID of the exploration. Returns: ID of the new classifier model. Raises: Exception: The id generator for ClassifierModel is producing too many collisions. """ for _ in range(base_models.MAX_RETRIES): new_id = '%s.%s' % ( exp_id, utils.convert_to_hash( str(utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH)) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for ClassifierModel is producing too many ' 'collisions.')
def _generate_id(cls, exp_id): """Generates a unique id for the training job of the form {{exp_id}}.{{random_hash_of_16_chars}} Args: exp_id: str. ID of the exploration. Returns: ID of the new ClassifierTrainingJobModel instance. Raises: Exception: The id generator for ClassifierTrainingJobModel is producing too many collisions. """ for _ in range(base_models.MAX_RETRIES): new_id = '%s.%s' % ( exp_id, utils.convert_to_hash( str(utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH)) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for ClassifierTrainingJobModel is producing ' 'too many collisions.')
def get_offset(query: datastore_services.Query) -> int: """Helper function to get the offset.""" question_count = query.count() if question_count > 2 * question_count_per_skill: return utils.get_random_int(question_count - (question_count_per_skill * 2)) return 0
def _generate_id(cls, exp_id): """Generates a unique id for the training job of the form '[exp_id].[random hash of 16 chars]'. Args: exp_id: str. ID of the exploration. Returns: str. ID of the new ClassifierTrainingJobModel instance. Raises: Exception. The id generator for ClassifierTrainingJobModel is producing too many collisions. """ for _ in python_utils.RANGE(base_models.MAX_RETRIES): new_id = '%s.%s' % ( exp_id, utils.convert_to_hash( python_utils.UNICODE( utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH)) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for ClassifierTrainingJobModel is producing ' 'too many collisions.')
def _generate_id(cls, ticket_name): """Generates key for the instance of AppFeedbackReportTicketModel class in the required format with the arguments provided. Args: ticket_name: str. The name assigned to the ticket on creation. Returns: str. The generated ID for this entity using the current datetime in milliseconds (as the entity's creation timestamp), a SHA1 hash of the ticket_name, and a random string, of the form '[creation_datetime_msec]:[hash(ticket_name)]:[random hash]'. """ current_datetime_in_msec = utils.get_time_in_millisecs( datetime.datetime.utcnow()) for _ in python_utils.RANGE(base_models.MAX_RETRIES): name_hash = utils.convert_to_hash(ticket_name, base_models.ID_LENGTH) random_hash = utils.convert_to_hash( python_utils.UNICODE( utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH) new_id = '%s.%s.%s' % (int(current_datetime_in_msec), name_hash, random_hash) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for AppFeedbackReportTicketModel is producing too' 'many collisions.')
def get_offset(query): """Helper function to get the offset.""" question_count = query.count() if question_count > 2 * question_count_per_skill: return utils.get_random_int( question_count - (question_count_per_skill * 2)) return 0
def _generate_id(cls, platform, submitted_on_datetime): """Generates key for the instance of AppFeedbackReportModel class in the required format with the arguments provided. Args: platform: str. The platform the user is the report from. submitted_on_datetime: datetime.datetime. The datetime that the report was submitted on in UTC. Returns: str. The generated ID for this entity using platform, submitted_on_sec, and a random string, of the form '[platform].[submitted_on_msec].[random hash]'. """ submitted_datetime_in_msec = utils.get_time_in_millisecs( submitted_on_datetime) for _ in python_utils.RANGE(base_models.MAX_RETRIES): random_hash = utils.convert_to_hash( python_utils.UNICODE( utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH) new_id = '%s.%s.%s' % (platform, int(submitted_datetime_in_msec), random_hash) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for AppFeedbackReportModel is producing too ' 'many collisions.')
def _generate_id(cls, intent): """Generates an ID for a new SentEmailModel instance. Args: intent: str. The intent string, i.e. the purpose of the email. Valid intent strings are defined in feconf.py. Returns: str. The newly-generated ID for the SentEmailModel instance. Raises: Exception: The id generator for SentEmailModel is producing too many collisions. """ id_prefix = '%s.' % intent for _ in python_utils.RANGE(base_models.MAX_RETRIES): new_id = '%s.%s' % (id_prefix, utils.convert_to_hash( python_utils.UNICODE( utils.get_random_int( base_models.RAND_RANGE)), base_models.ID_LENGTH)) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for SentEmailModel is producing too many ' 'collisions.')
def get_new_id(cls, entity_name): """Gets a new id for an entity, based on its name. The returned id is guaranteed to be unique among all instances of this entity. Args: entity_name: the name of the entity. Coerced to a utf-8 encoded string. Defaults to ''. Returns: str: a new unique id for this entity class. Raises: - Exception: if an id cannot be generated within a reasonable number of attempts. """ try: entity_name = unicode(entity_name).encode('utf-8') except Exception: entity_name = '' MAX_RETRIES = 10 RAND_RANGE = 127 * 127 ID_LENGTH = 12 for i in range(MAX_RETRIES): new_id = utils.convert_to_hash( '%s%s' % (entity_name, utils.get_random_int(RAND_RANGE)), ID_LENGTH) if not cls.get_by_id(new_id): return new_id raise Exception('New id generator is producing too many collisions.')
def generate_xml_file(): global xml_template params = { "unique_string": get_unique_sting(), "random_int": get_random_int(100), "random_lines": generate_lines(random.randint(1, 10)) } xml_text = xml_template.format(**params) return xml_text
def _generate_unique_reply_to_id(cls): for _ in range(base_models.MAX_RETRIES): new_id = utils.convert_to_hash( '%s' % (utils.get_random_int(base_models.RAND_RANGE)), REPLY_TO_ID_LENGTH) if not cls.get_by_reply_to_id(new_id): return new_id raise Exception('Unique id generator is producing too many collisions.')
def generate_xml_file(): global xml_template params = { "unique_string" : get_unique_sting(), "random_int" : get_random_int(100), "random_lines" : generate_lines(random.randint(1,10)) } xml_text = xml_template.format(**params) return xml_text
def generate_new_thread_id(cls, exploration_id): """Generates a new thread id, unique within the exploration. Exploration ID + the generated thread ID is globally unique. """ for _ in range(_MAX_RETRIES): thread_id = ( utils.base64_from_int(utils.get_current_time_in_millisecs()) + utils.base64_from_int(utils.get_random_int(_RAND_RANGE))) if not cls.get_by_exp_and_thread_id(exploration_id, thread_id): return thread_id raise Exception( 'New thread id generator is producing too many collisions.')
def _generate_id(cls, intent): id_prefix = '%s.' % intent for _ in range(base_models.MAX_RETRIES): new_id = '%s.%s' % ( id_prefix, utils.convert_to_hash( str(utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH)) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for SentEmailModel is producing too many ' 'collisions.')
def create_purchase_detail(session, customer, product, purchase, purchase_detail_status): global purchase_detail_pool new = _get_value_from_pool(purchase_detail_pool) if new is not None: return new purchase_detail = PurchaseDetail() purchase_detail.customer = customer purchase_detail.product = product purchase_detail.purchase_detail_status = purchase_detail_status purchase_detail.purchase = purchase purchase_detail.purchase_detail_product_quantity = utils.get_random_int(10) purchase_detail_pool.append(purchase_detail) session.add(purchase_detail) session.commit() return purchase_detail
def _generate_unique_reply_to_id(cls): """Generates the unique reply-to id. Raises: Exception. When unique id generator produces too many collisions. Returns: str. The unique reply-to id if there are no collisions. """ for _ in python_utils.RANGE(base_models.MAX_RETRIES): new_id = utils.convert_to_hash( '%s' % (utils.get_random_int(base_models.RAND_RANGE)), REPLY_TO_ID_LENGTH) if not cls.get_by_reply_to_id(new_id): return new_id raise Exception('Unique id generator is producing too many collisions.')
def buy_bitcoin_messages(message): global bitcoin_value letters = [letter for letter in string.ascii_letters] errors = [] for letter in letters: if letter in message.text: errors.append(letter) else: pass if not errors and len(message.text) != 19 and "-" not in message.text: try: bitcoin_value = float(message.text) except ValueError: txt = "Прошу прощения, но вы ввели неверное значение. Повторите попытку." bitcoin_value = 0 bot.send_message(message.chat.id, txt) if 0 < bitcoin_value < 100000: txt = """Отлично! Я успешно получил вашу сумму Биткоинов. Теперь, пожалуйста скажите мне номер карты куда я должен выслать Биткоины (формат xxxx-xxxx-xxxx-xxxx - замените символы 'хххх' на цифры) Введите номер карты: """ bot.send_message(message.chat.id, txt) else: txt = """Вы ввели неверное значение суммы Биткоинов. Это должно быть значение менее чем 100 000 и более чем 0. Повторите попытку.""" bot.send_message(message.chat.id, txt) if len(message.text) == 19 and "-" in message.text: address = message.text txt = "Принял! Спасибо!" bot.send_message(message.chat.id, txt) rub = convert_bitcoin_to_rub(bitcoin_value) comment = get_random_int() text = """Чтобы купить Биткойны - перечислите {} рублей на кошелек QIWI '{}' с комментарием {}""".format( rub, config.QIWI_PURSE, comment) bot.send_message(message.chat.id, text)
def get(self, request, *args, **kwargs): verify_type = self.request.GET.get("verify_type", "forget_pwd") user = None if verify_type == "forget_pwd": user, check_code = UserAccountUtils.account_check(self.account) if check_code != ErrorCode.SUCCESS: return general_json_response(status.HTTP_200_OK, check_code) elif verify_type == "info_modify": user, check_code = UserAccountUtils.account_check(self.account) if check_code == ErrorCode.SUCCESS: return general_json_response(status.HTTP_200_OK, ErrorCode.USER_ACCOUNT_EXISTS) else: # register pass # account = request.data.get('account', None) # user, check_code = UserAccountUtils.account_check(account) # 不存在的用户,注册,也可以获取验证码 # if check_code != ErrorCode.SUCCESS: # return general_json_response(status.HTTP_200_OK, check_code) pic_code_verify = VerifyCodeExpireCache('picture-%s' % self.pic_code).check_verify_code(self.pic_code) if not pic_code_verify: return general_json_response(status.HTTP_200_OK, ErrorCode.USER_PIC_CODE_INVALID) code = get_random_int() # verify_key = self.account if user is None else user.id VerifyCodeExpireCache(self.account).set_verify_code(code) # send code if user is not None: send_general_code.delay(code, user.phone, user.email) # if user.phone: # Sms.send_general_code(code, [user.phone]) # if user.email: # # TODO: 发送邮件验证码 # EmailUtils().send_general_code(code, user.email) else: if RegularUtils.phone_check(self.account): send_general_code.delay(code, phone=self.account) # Sms.send_general_code(code, [self.account]) elif RegularUtils.email_check(self.account): send_general_code.delay(code, email=self.account) # EmailUtils().send_general_code(code, self.account) return general_json_response(status.HTTP_200_OK, ErrorCode.SUCCESS)
def generate_new_blog_post_id(cls): """Generates a new blog post ID which is unique and is in the form of random hash of 12 chars. Returns: str. A blog post ID that is different from the IDs of all the existing blog posts. Raises: Exception. There were too many collisions with existing blog post IDs when attempting to generate a new blog post ID. """ for _ in python_utils.RANGE(base_models.MAX_RETRIES): blog_post_id = utils.convert_to_hash( python_utils.UNICODE( utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH) if not cls.get_by_id(blog_post_id): return blog_post_id raise Exception( 'New blog post id generator is producing too many collisions.')
def get_new_id(cls, entity_name): """Gets a new 12-character id for an entity, based on its name. This id is unique among all instances of this entity. Raises: Exception: if an id cannot be generated within a reasonable number of attempts. """ MAX_RETRIES = 10 RAND_RANGE = 127*127 for i in range(MAX_RETRIES): new_id = base64.urlsafe_b64encode( hashlib.sha1( '%s%s' % (entity_name.encode('utf-8'), utils.get_random_int(RAND_RANGE)) ).digest())[:12] if not cls.get(new_id, strict=False): return new_id raise Exception('New id generator is producing too many collisions.')
def generate_new_thread_id(cls, exploration_id): """Generates a new thread ID which is unique within the exploration. Args: exploration_id: str. The ID of the exploration. Returns: str. A thread ID that is different from the IDs of all the existing threads within the given exploration. Raises: Exception: There were too many collisions with existing thread IDs when attempting to generate a new thread ID. """ for _ in range(_MAX_RETRIES): thread_id = ( utils.base64_from_int(utils.get_current_time_in_millisecs()) + utils.base64_from_int(utils.get_random_int(_RAND_RANGE))) if not cls.get_by_exp_and_thread_id(exploration_id, thread_id): return thread_id raise Exception( 'New thread id generator is producing too many collisions.')
def _get_new_id(cls): """Generates a unique ID for the question of the form {{random_hash_of_12_chars}} Returns: new_id: int. ID of the new QuestionModel instance. Raises: Exception: The ID generator for QuestionModel is producing too many collisions. """ for _ in python_utils.RANGE(base_models.MAX_RETRIES): new_id = utils.convert_to_hash( python_utils.STR(utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for QuestionModel is producing too many ' 'collisions.')
def post(self, request, *args, **kwargs): # TODO: 发送激活码,记录激活码 self.model = RoleUser self.serializer_class = RoleUserBasicSerializer active_code = get_random_int(8) remark = self.request.data.get("remark", "") user_qs = AuthUser.objects.filter( is_active=True).filter(Q(phone=self.phone) | Q(email=self.email)) if user_qs.exists(): user = user_qs[0] user.nickname = self.username if self.phone: user.phone = self.phone if self.email: user.email = self.email user.role_type = AuthUser.ROLE_ADMIN user.remark = remark user.save() else: # u_qs = AuthUser.objects.filter(username=self.phone) random_char = get_random_char(6) user = AuthUser.objects.create(username="******" % (self.phone, random_char), nickname=self.username, phone=self.phone, email=self.email, role_type=AuthUser.ROLE_ADMIN, active_code=active_code, active_code_valid=True, remark=remark) # send_general_code.delay(str(active_code), str(user.phone), str(user.email)) send_general_code(str(active_code), str(user.phone), str(user.email)) role_user = RoleUser.objects.get_or_create( role_id=self.role_id, user_id=user.id, remark=self.request.data.get("remark", "")) return general_json_response(status.HTTP_200_OK, ErrorCode.SUCCESS)
def _get_new_id(cls) -> str: """Generates a unique ID for the question in the form of random hash of 12 chars. Returns: new_id: str. ID of the new QuestionModel instance. Raises: Exception. The ID generator for QuestionModel is producing too many collisions. """ for _ in python_utils.RANGE(base_models.MAX_RETRIES): new_id = utils.convert_to_hash( python_utils.UNICODE( utils.get_random_int(base_models.RAND_RANGE)), base_models.ID_LENGTH) if not cls.get_by_id(new_id): return new_id raise Exception( 'The id generator for QuestionModel is producing too many ' 'collisions.')
def _get_value_from_pool(pool): new = len(pool) > 0 and utils.get_random_int(1) == 0 if new is True: index = utils.get_random_index(pool) return pool[index] return None
import utils # import docx print(utils.get_random_int(10)) print(utils.get_file_extension("test.txt")) # docx.api.Document()
class OrganizationUtils(object): @classmethod def get_child_orgs(cls, assess_id, parent_id, max_depth=None, depth_time=1): # def get_child_orgs(cls, enterprise_id, parent_id, max_depth=None, depth_time=1): from wduser.serializers import OrganizationBasicSerializer if parent_id == 0: level = 1 else: level = depth_time orgs = Organization.objects.filter_active(assess_id=assess_id, parent_id=parent_id) org_data = [] child_org_ids = [] for org in orgs: org_info = OrganizationBasicSerializer(instance=org).data org_info["level"] = level # org_info = dict({ # "id": org.id, # "name": org.name, # "identification_code": org.identification_code, # "parent_id": parent_id, # "level": level, # "list_custom_attrs": [u"自定义标签1", u"自定义标签2", u"自定义标签3"] # }) child_org_ids.append(org.id) if max_depth is None or depth_time < max_depth: child_org_data, ids = OrganizationUtils.get_child_orgs(assess_id, org.id, max_depth, depth_time + 1) org_info["child_orgs"] = child_org_data child_org_ids += ids else: org_info["child_orgs"] = [] org_data.append(org_info) return org_data, child_org_ids @classmethod def get_parent_org_names(cls, org_codes): org_names = [] def get_org_name(org_code=None, org_id=None): try: if org_code: org = Organization.objects.get(identification_code=org_code, is_active=True) elif org_id: org = Organization.objects.get(id=org_id) else: return org_names.append(org.name) if not org.parent_id: return else: get_org_name(org_id=org.parent_id) except: pass for org_code in org_codes: get_org_name(org_code=org_code) return org_names @classmethod def get_tree_organization(cls, assess_id, parent_id=0, max_depth=None): from wduser.serializers import OrganizationBasicSerializer level = 1 if parent_id != 0: porg = Organization.objects.get(id=parent_id) porg_info = OrganizationBasicSerializer(instance=porg).data porg_info["level"] = level porg_info["child_orgs"] = OrganizationUtils.get_child_orgs(assess_id, parent_id, max_depth)[0] # porg_info = dict({ # "id": porg.id, # "name": porg.name, # "identification_code": porg.identification_code, # "parent_id": porg.parent_id, # "list_custom_attrs": [u"自定义标签1", u"自定义标签2", u"自定义标签3"], # "level": level, # "child_orgs": OrganizationUtils.get_child_orgs(enterprise_id, parent_id, max_depth)[0] # }) return porg_info return OrganizationUtils.get_child_orgs(assess_id, parent_id, max_depth)[0] @classmethod def generate_org_code(cls, assess_id, org_name): if type(org_name) == str: try: org_name = org_name.encode("utf-8") except Exception, e: logger.error("org name encode utf8 error, msg: %s" % e) code = '%s%s%s' % (get_first_char(org_name, 4), assess_id, get_random_int(4)) return code
def NUMBER(): return get_random_int()
# -*- coding: utf-8 -*- from utils import get_random_str, get_random_int, get_random_date STRING = get_random_str() NUMBER = get_random_int() DATE = get_random_date()
def train(): #images = tensor_images images_path = all_image_paths reduced_text_embedding = utils.lrelu(utils.linear(tensor_captions, 256)) #G = get_generator([None, z_dim+reduced_text_embedding.shape[1]]) #D = get_discriminator([None, output_size, output_size, c_dim], input_rnn_embed = [None, reduced_text_embedding.shape[1]]) G = generator_txt2img_resnet( [None, z_dim + reduced_text_embedding.shape[1]]) D = discriminator_txt2img_resnet( [None, output_size, output_size, c_dim], t_txt=[None, reduced_text_embedding.shape[1]]) G.train() D.train() d_optimizer = tf.optimizers.Adam(lr, beta_1=beta1) g_optimizer = tf.optimizers.Adam(lr, beta_1=beta1) n_step_epoch = int(len(images_path) // batch_size) dataset_img = tf.data.Dataset.from_tensor_slices(tensor_images) batched_ds_img = dataset_img.batch(batch_size, drop_remainder=True) dataset_txt = tf.data.Dataset.from_tensor_slices(reduced_text_embedding) batched_ds_txt = dataset_txt.batch(batch_size, drop_remainder=True) list_batch_txt = [batch for batch in batched_ds_txt] for epoch in range(n_epoch): for step, batch_images in enumerate(batched_ds_img): batch_txt = list_batch_txt[step] step_time = time.time() with tf.GradientTape(persistent=True) as tape: z = np.random.normal(loc=0.0, scale=1.0, size=[batch_size, z_dim]).astype(np.float32) z = tf.convert_to_tensor(z) z_text_concat = tf.concat([z, batch_txt], 1) net_fake_image = G(z_text_concat) cross_entropy = tf.keras.losses.BinaryCrossentropy( from_logits=True) disc_fake_image_logits = D([net_fake_image, batch_txt]) g_loss = cross_entropy(disc_fake_image_logits, tf.ones_like(disc_fake_image_logits)) disc_real_image_logits = D([batch_images, batch_txt]) d_loss1 = cross_entropy(disc_real_image_logits, tf.ones_like(disc_real_image_logits)) idexs = utils.get_random_int(min=0, max=len(batch_txt) - 1, number=batch_size) wrong_caption = [batch_txt[i] for i in idexs] wrong_caption = tf.stack(wrong_caption) disc_mismatch_logits = D([batch_images, wrong_caption]) d_loss2 = cross_entropy(disc_mismatch_logits, tf.zeros_like(disc_mismatch_logits)) d_loss3 = cross_entropy(disc_fake_image_logits, tf.zeros_like(disc_fake_image_logits)) d_loss = d_loss1 + (d_loss2 + d_loss3) * 0.5 #before = G.trainable_weights[0] grad = tape.gradient(g_loss, G.trainable_weights) g_optimizer.apply_gradients(zip(grad, G.trainable_weights)) #after = G.trainable_weights[0] #print(np.array_equal(before, after)) grad = tape.gradient(d_loss, D.trainable_weights) d_optimizer.apply_gradients(zip(grad, D.trainable_weights)) del tape print("Epoch: [{}/{}] [{}/{}] took: {:.3f}, d_loss: {:.5f}, g_loss: {:.5f}".format(epoch, \ n_epoch, step, n_step_epoch, time.time() - step_time, d_loss, g_loss)) if np.mod(epoch, save_every_epoch) == 0: #G.save_weights('{}/G_weights.npz'.format(checkpoint_dir), format='npz') #D.save_weights('{}/D_weights.npz'.format(checkpoint_dir), format='npz') G.save('{}/G.h5'.format(checkpoint_dir), save_weights=True) G.eval() D.save('{}/D.h5'.format(checkpoint_dir), save_weights=True) D.eval() z = np.random.normal(loc=0.0, scale=1.0, size=[4, z_dim]).astype(np.float32) z = tf.convert_to_tensor(z) test_descr = np.reshape(batch_txt[0:4], (4, 256)) z_text_concat = tf.concat([z, test_descr], 1) result = G(z_text_concat) G.train() D.train() img = result.numpy().squeeze().astype(np.uint8) #Sauvegarde une image tl.visualize.save_images( img, [2, 2], '{}/train_{:02d}.png'.format(sample_dir, epoch))