Skip to content

Commit

Permalink
Internal change
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 464934071
  • Loading branch information
saberkun authored and tensorflower-gardener committed Aug 3, 2022
1 parent a81f859 commit 4e9f951
Show file tree
Hide file tree
Showing 8 changed files with 14 additions and 14 deletions.
2 changes: 1 addition & 1 deletion official/nlp/modeling/layers/transformer_scaffold.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def get_layer_instance(instance_or_cls, config, default_config):
self._output_layer_norm = tf.keras.layers.LayerNormalization(
name="output_layer_norm", axis=-1, epsilon=1e-12, dtype=tf.float32)

super(TransformerScaffold, self).build(input_shape)
super().build(input_shape)
logging.info("%s configs: %s", self.__class__.__name__, self.get_config())

def get_config(self):
Expand Down
2 changes: 1 addition & 1 deletion official/nlp/modeling/networks/albert_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def __init__(self,
# created using the Functional API. Once super().__init__ is called, we
# can assign attributes to `self` - note that all `self` assignments are
# below this line.
super(AlbertEncoder, self).__init__(
super().__init__(
inputs=[word_ids, mask, type_ids], outputs=outputs, **kwargs)
config_dict = {
'vocab_size': vocab_size,
Expand Down
2 changes: 1 addition & 1 deletion official/nlp/modeling/networks/classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def __init__(self,
('Unknown `output` value "%s". `output` can be either "logits" or '
'"predictions"') % output)

super(Classification, self).__init__(
super().__init__(
inputs=[cls_output], outputs=output_tensors, **kwargs)

# b/164516224
Expand Down
2 changes: 1 addition & 1 deletion official/nlp/modeling/networks/encoder_scaffold.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ def __init__(self,
# created using the Functional API. Once super().__init__ is called, we
# can assign attributes to `self` - note that all `self` assignments are
# below this line.
super(EncoderScaffold, self).__init__(
super().__init__(
inputs=inputs, outputs=outputs, **kwargs)

self._hidden_cls = hidden_cls
Expand Down
2 changes: 1 addition & 1 deletion official/nlp/modeling/networks/mobile_bert_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def __init__(self,
encoder_outputs=all_layer_outputs,
attention_scores=all_attention_scores)

super(MobileBERTEncoder, self).__init__(
super().__init__(
inputs=self.inputs, outputs=outputs, **kwargs)

def get_embedding_table(self):
Expand Down
8 changes: 4 additions & 4 deletions official/nlp/modeling/networks/packed_sequence_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def __init__(self,
[attention_mask, sub_seq_mask])

outputs = [embeddings, attention_mask]
super(PackedSequenceEmbedding, self).__init__(
super().__init__(
inputs=inputs, outputs=outputs, **kwargs)
# TF does not track immutable attrs which do not contain Trackables,
# so by creating a config namedtuple instead of a dict we avoid tracking it.
Expand Down Expand Up @@ -221,7 +221,7 @@ def __init__(self,
if 'dtype' not in kwargs:
kwargs['dtype'] = 'float32'

super(PositionEmbeddingWithSubSeqMask, self).__init__(**kwargs)
super().__init__(**kwargs)
if use_dynamic_slicing and max_sequence_length is None:
raise ValueError(
'If `use_dynamic_slicing` is True, `max_sequence_length` must be set.'
Expand All @@ -236,7 +236,7 @@ def get_config(self):
'initializer': tf.keras.initializers.serialize(self._initializer),
'use_dynamic_slicing': self._use_dynamic_slicing,
}
base_config = super(PositionEmbeddingWithSubSeqMask, self).get_config()
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items()))

def build(self, input_shape):
Expand Down Expand Up @@ -273,7 +273,7 @@ def build(self, input_shape):
shape=[weight_sequence_length, width],
initializer=self._initializer)

super(PositionEmbeddingWithSubSeqMask, self).build(input_shape)
super().build(input_shape)

def call(self, inputs, position_ids=None, sub_sequence_mask=None):
"""Implements call() for the layer.
Expand Down
2 changes: 1 addition & 1 deletion official/nlp/modeling/networks/span_labeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def __init__(self,
# created using the Functional API. Once super().__init__ is called, we
# can assign attributes to `self` - note that all `self` assignments are
# below this line.
super(SpanLabeling, self).__init__(
super().__init__(
inputs=[sequence_data], outputs=output_tensors, **kwargs)
config_dict = {
'input_width': input_width,
Expand Down
8 changes: 4 additions & 4 deletions official/nlp/modeling/networks/xlnet_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ class RelativePositionEncoding(tf.keras.layers.Layer):
"""

def __init__(self, hidden_size, **kwargs):
super(RelativePositionEncoding, self).__init__(**kwargs)
super().__init__(**kwargs)
self._hidden_size = hidden_size
self._inv_freq = 1.0 / (10000.0**(
tf.range(0, self._hidden_size, 2.0) / self._hidden_size))
Expand Down Expand Up @@ -476,7 +476,7 @@ def __init__(self,
use_cls_mask=False,
embedding_width=None,
**kwargs):
super(XLNetBase, self).__init__(**kwargs)
super().__init__(**kwargs)

self._vocab_size = vocab_size
self._initializer = initializer
Expand Down Expand Up @@ -574,7 +574,7 @@ def get_config(self):
"embedding_width":
self._embedding_width,
}
base_config = super(XLNetBase, self).get_config()
base_config = super().get_config()
return dict(list(base_config.items()) + list(config.items()))

def get_embedding_lookup_table(self):
Expand All @@ -601,7 +601,7 @@ def __call__(self,
"target_mapping": target_mapping,
"masked_tokens": masked_tokens
}
return super(XLNetBase, self).__call__(inputs, **kwargs)
return super().__call__(inputs, **kwargs)

def call(self, inputs):
"""Implements call() for the layer."""
Expand Down

0 comments on commit 4e9f951

Please sign in to comment.