问题
I am building a Bi-LSTM network and I have included an attention layer in it. But it is giving an error that added layer must be an instance of class layer.
Some of the libraries which I have imported are
from keras.models import Model, Sequential
from keras.layers import LSTM, Activation, Dense, Dropout, Input, Embedding, Bidirectional, Conv1D, Flatten, GlobalMaxPooling1D, SpatialDropout1D
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras import backend as K
from tensorflow.keras.layers import *
The attention layer class is
class attention(Layer):
def __init__(self, return_sequences=True):
self.return_sequences = return_sequences
super(attention,self).__init__()
def build(self, input_shape):
self.W=self.add_weight(name="att_weight", shape=(input_shape[-1],1),
initializer="normal")
self.b=self.add_weight(name="att_bias", shape=(input_shape[1],1),
initializer="zeros")
super(attention,self).build(input_shape)
def call(self, x):
e = K.tanh(K.dot(x,self.W)+self.b)
a = K.softmax(e, axis=1)
output = x*a
if self.return_sequences:
return output
return K.sum(output, axis=1)
The model looks like this
model = Sequential()
model.add(Embedding(max_words, 1152, input_length=max_len, weights=[embeddings]))
model.add(BatchNormalization())
model.add(Activation('tanh'))
model.add(Dropout(0.5))
model.add(Bidirectional(LSTM(32, return_sequences=True)))
model.add(attention(return_sequences=True))
model.add(BatchNormalization())
model.add(Activation('tanh'))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
model.summary()
But it is giving an error
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-41-ba5b52fe2c87> in <module>()
1 model = Sequential()
----> 2 model.add(Embedding(max_words, 1152, input_length=max_len, weights=[embeddings]))
3 model.add(BatchNormalization())
4 model.add(Activation('tanh'))
5 #model.add(SpatialDropout1D(0.5))
/usr/local/lib/python3.6/dist-packages/keras/engine/sequential.py in add(self, layer)
131 raise TypeError('The added layer must be '
132 'an instance of class Layer. '
--> 133 'Found: ' + str(layer))
134 self.built = False
135 if not self._layers:
TypeError: The added layer must be an instance of class Layer. Found: <tensorflow.python.keras.layers.embeddings.Embedding object at 0x7f0da41aec50>
回答1:
This documentation page states that when defining a custom Layer
, you should use the following syntax:
class Linear(tf.keras.layers.Layer):
def __init__(self, units=32, input_dim=32):
super(Linear, self).__init__()
w_init = tf.random_normal_initializer()
self.w = tf.Variable(
initial_value=w_init(shape=(input_dim, units), dtype="float32"),
trainable=True,
)
b_init = tf.zeros_initializer()
self.b = tf.Variable(
initial_value=b_init(shape=(units,), dtype="float32"), trainable=True
)
def call(self, inputs):
return tf.matmul(inputs, self.w) + self.b
so, your Layer
import is technically correct. However, you initialize your model with pure keras
layers, and that results in an error. Use the tf.keras
functionality everywhere and the error will go, like in:
https://www.tensorflow.org/guide/keras/sequential_model
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
# Your own layer here
class AttentionCustom(layers.Layer):
pass
model = keras.Sequential(
[
layers.Dense(2, activation="relu", name="layer1"),
layers.Dense(3, activation="relu", name="layer2"),
Attention(),
layers.Dense(4, name="layer3"),
]
)
来源:https://stackoverflow.com/questions/62949662/added-layer-must-be-an-instance-of-class-layer