def _clone_function(layer): if type(layer) == src_embedding_class: logger.debug( "Replace {} with {}".format( src_embedding_class, dst_embedding_class ) ) # ElasticDL embedding only accept a string type initializer if src_embedding_class == Embedding: init = tf.keras.initializers.get( layer.embeddings_initializer ) if dst_embedding_class == Embedding: init = tf.keras.initializers.serialize( layer.embeddings_initializer )["class_name"] embedding_layer = dst_embedding_class( output_dim=layer.output_dim, input_dim=layer.input_dim, embeddings_initializer=init, mask_zero=layer.mask_zero, input_length=layer.input_length, ) return embedding_layer return layer
def _clone_function(layer): if type(layer) in [tf.keras.layers.Embedding, SparseEmbedding]: logger.debug("Replace {} with {}".format( layer.name, Embedding)) # ElasticDL embedding only accept a string type initializer init = tf.keras.initializers.serialize( layer.embeddings_initializer)["class_name"] if type(layer) == tf.keras.layers.Embedding: embedding_layer = Embedding( output_dim=layer.output_dim, input_dim=layer.input_dim, embeddings_initializer=init, mask_zero=layer.mask_zero, input_length=layer.input_length, name=layer.name, ) else: embedding_layer = Embedding( output_dim=layer.output_dim, input_dim=layer.input_dim, embeddings_initializer=init, name=layer.name, combiner=layer.combiner, ) return embedding_layer return layer
def _clone_function(layer): if type(layer) in [ tf.keras.layers.Embedding, SparseEmbedding, ] and _need_partition_embedding(layer): logger.debug("Replace {} with {}".format( layer.name, Embedding)) # ElasticDL embedding only accept a string type initializer init = tf.keras.initializers.serialize( layer.embeddings_initializer)["class_name"] if type(layer) == tf.keras.layers.Embedding: embedding_layer = Embedding( output_dim=layer.output_dim, input_dim=layer.input_dim, embeddings_initializer=init, mask_zero=layer.mask_zero, input_length=layer.input_length, name=layer.name, ) else: embedding_layer = Embedding( output_dim=layer.output_dim, input_dim=layer.input_dim, embeddings_initializer=init, name=layer.name, combiner=layer.combiner, ) embedding_layer.set_embedding_weight_name( layer.trainable_weights[0].name) return embedding_layer elif type(layer) == tf.keras.layers.DenseFeatures: return _replace_tf_embedding_column_with_edl(layer) return layer
def _watch(self): while True: try: stream = watch.Watch().stream( self.client.list_namespaced_pod, self.namespace, label_selector=ELASTICDL_JOB_KEY + "=" + self.job_name, ) for event in stream: self._event_cb(event) except Exception as e: logger.debug(e) # In case of any flaky issue causing exceptions, we wait for little # time and retry. time.sleep(5)