From 933f453ff139532ae330d88230568d40b0ee1147 Mon Sep 17 00:00:00 2001 From: Abhijith Gandrakota Date: Thu, 20 Jul 2023 11:39:25 -0400 Subject: [PATCH] Bug fix --- part4/4.JetTaggingPointCloud.ipynb | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/part4/4.JetTaggingPointCloud.ipynb b/part4/4.JetTaggingPointCloud.ipynb index e0227f2..b1fea0d 100644 --- a/part4/4.JetTaggingPointCloud.ipynb +++ b/part4/4.JetTaggingPointCloud.ipynb @@ -508,14 +508,14 @@ " self.attention = tf.keras.layers.MultiHeadAttention(num_heads=self.num_heads, \n", " key_dim=self.hidden_units//self.num_heads)\n", " self.feedforward = tf.keras.Sequential([\n", - " Dense(units=self.mlp_hidden_units, activation=\"relu\"),\n", + " layers.Dense(units=self.mlp_hidden_units, activation=\"relu\"),\n", " # Dropout(rate=self.dropout_rate),\n", - " Dense(units=input_shape[-1])\n", + " layers.Dense(units=input_shape[-1])\n", " ])\n", - " self.layer_norm1 = LayerNormalization(epsilon=1e-6)\n", - " self.layer_norm2 = LayerNormalization(epsilon=1e-6)\n", - " self.dropout1 = Dropout(rate=self.dropout_rate)\n", - " self.dropout2 = Dropout(rate=self.dropout_rate)\n", + " self.layer_norm1 = layers.LayerNormalization(epsilon=1e-6)\n", + " self.layer_norm2 = layers.LayerNormalization(epsilon=1e-6)\n", + " self.dropout1 = layers.Dropout(rate=self.dropout_rate)\n", + " self.dropout2 = layers.Dropout(rate=self.dropout_rate)\n", " super(SABTransformerBlock, self).build(input_shape)\n", " \n", " def call(self, inputs, mask=None):\n",