forked from tensorflow/models
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathper_dim_scale_attention_test.py
52 lines (42 loc) · 1.72 KB
/
per_dim_scale_attention_test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# Copyright 2024 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for PerDimScaleAttention."""
import tensorflow as tf, tf_keras
from official.nlp.modeling.layers import per_dim_scale_attention as attention
class PerDimScaleAttentionTest(tf.test.TestCase):
def test_attention(self):
num_heads = 12
key_dim = 64
seq_length = 1024
batch_size = 2
test_layer = attention.PerDimScaleAttention(
num_heads=num_heads, key_dim=key_dim)
query = tf.random.normal(
shape=(batch_size, seq_length, key_dim * num_heads))
value = query
output = test_layer(query=query, value=value)
self.assertEqual(output.shape,
[batch_size, seq_length, key_dim * num_heads])
def test_config(self):
num_heads = 12
key_dim = 64
test_layer = attention.PerDimScaleAttention(
num_heads=num_heads, key_dim=key_dim)
print(test_layer.get_config())
new_layer = attention.PerDimScaleAttention.from_config(
test_layer.get_config())
# If the serialization was successful, the new config should match the old.
self.assertAllEqual(test_layer.get_config(), new_layer.get_config())
if __name__ == '__main__':
tf.test.main()