AttributeError:无法设置属性分层注意网络

问题描述

当我定义分层注意力网络时,弹出一个错误提示“ AttributeError:无法设置属性”。请帮忙。

这是Attention.py文件


import keras
import Attention 
from keras.engine.topology import Layer,Input
from keras import backend as K
from keras import initializers

#Hierarchical Attention Layer Implementation
'''
Implemented by Arkadipta De (MIT Licensed)
'''

class Hierarchical_Attention(Layer):
    def __init__(self,attention_dim):
        self.init = initializers.get('normal')
        self.supports_masking = True
        self.attention_dim = attention_dim
        super(Hierarchical_Attention,self).__init__()

    def build(self,input_shape):
        assert len(input_shape) == 3
        self.W = K.variable(self.init((input_shape[-1],self.attention_dim)))
        self.b = K.variable(self.init((self.attention_dim,)))
        self.u = K.variable(self.init((self.attention_dim,1)))
        self.trainable_weights = [self.W,self.b,self.u]
        super(Hierarchical_Attention,self).build(input_shape)

    def compute_mask(self,inputs,mask=None):
        return mask

    def call(self,x,mask=None):
        # size of x :[batch_size,sel_len,attention_dim]
        # size of u :[batch_size,attention_dim]
        # uit = tanh(xW+b)
        uit = K.tanh(K.bias_add(K.dot(x,self.W),self.b))
        ait = K.dot(uit,self.u)
        ait = K.squeeze(ait,-1)

        ait = K.exp(ait)

        if mask is not None:
            # Cast the mask to floatX to avoid float64 upcasting in theano
            ait *= K.cast(mask,K.floatx())
        ait /= K.cast(K.sum(ait,axis=1,keepdims=True) + K.epsilon(),K.floatx())
        ait = K.expand_dims(ait)
        weighted_input = x * ait
        output = K.sum(weighted_input,axis=1)

        return output

    def compute_output_shape(self,input_shape):
        return (input_shape[0],input_shape[-1])

这是我用来建立模型的主要文件

import re
import os
import numpy as np
import pandas as pd
import keras
from keras.engine.topology import Layer,Input
import Attention
from sklearn.model_selection import train_test_split
from keras.models import Model,Input
from keras.layers import Dropout,Dense,LSTM,GRU,Bidirectional,concatenate,Multiply,Subtract
from keras.utils import to_categorical
from keras import backend as K
from keras import initializers

Max_Title_Length = 0
Max_Content_Length = 0

for i in range(0,len(X)):
  Max_Title_Length = max(Max_Title_Length,len(X['title'][i]))
  Max_Content_Length = max(Max_Content_Length,len(X['text'][i]))

vector_size = 100

input_title = Input(shape = (Max_Title_Length,vector_size,),name = 'input_title')
input_content = Input(shape = (Max_Content_Length,name = 'input_content')

def Classifier(input_title,input_content):
    #x = Bidirectional(GRU(units = 100,return_sequences = True,kernel_initializer = keras.initializers.lecun_normal(seed = None),unit_forget_bias = True))(input_title)
    x = Bidirectional(GRU(100,return_sequences=True))(input_title)
    x_attention = Attention.Hierarchical_Attention(100)(x)
    #y = Bidirectional(LSTM(units = 100,unit_forget_bias = True))(input_content)
    y = Bidirectional(GRU(100,return_sequences=True))(input_content)
    y_attention = Attention.Hierarchical_Attention(100)(y)
    z = concatenate([x_attention,y_attention])
    z = Dense(units = 512,activation = 'relu')(z)
    z = Dropout(0.2)(z)
    z = Dense(units = 256,activation = 'relu')(z)
    z = Dropout(0.2)(z)
    z = Dense(units = 128,activation = 'relu')(z)
    z = Dropout(0.2)(z)
    z = Dense(units = 50,activation = 'relu')(z)
    z = Dropout(0.2)(z)
    z = Dense(units = 10,activation = 'relu')(z)
    z = Dropout(0.2)(z)
    output = Dense(units = 2,activation = 'softmax')(z)
    model = Model(inputs = [input_title,input_content],outputs = output)
    model.summary()
    return model

def compile_and_train(model,num_epochs): 
    model.compile(optimizer= 'adam',loss= 'categorical_crossentropy',metrics=['acc']) 
    history = model.fit([train_x_title,train_x_content],train_label,batch_size=32,epochs=num_epochs)
    return history

Classifier_Model = Classifier(input_title,input_content)

这段代码给我一个错误提示

---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer.py in __setattr__(self,name,value)
   2761       try:
-> 2762         super(tracking.AutoTrackable,self).__setattr__(name,value)
   2763       except AttributeError:

AttributeError: can't set attribute

During handling of the above exception,another exception occurred:

AttributeError                            Traceback (most recent call last)
6 frames
<ipython-input-43-32804502e0b0> in <module>()
     32     return history
     33 
---> 34 Classifier_Model = Classifier(input_title,input_content)

<ipython-input-43-32804502e0b0> in Classifier(input_title,input_content)
      7     #x = Bidirectional(GRU(units = 100,unit_forget_bias = True))(input_title)
      8     x = Bidirectional(GRU(200,return_sequences=True))(input_title)
----> 9     x_attention = Attention.Hierarchical_Attention(100)(x)
     10     #y = Bidirectional(LSTM(units = 100,unit_forget_bias = True))(input_content)
     11     y = Bidirectional(GRU(100,return_sequences=True))(input_content)

/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer.py in __call__(self,*args,**kwargs)
    924     if _in_functional_construction_mode(self,args,kwargs,input_list):
    925       return self._functional_construction_call(inputs,--> 926                                                 input_list)
    927 
    928     # Maintains info about the `Layer.call` stack.

/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer.py in _functional_construction_call(self,input_list)
   1096         # Build layer if applicable (if the `build` method has been
   1097         # overridden).
-> 1098         self._maybe_build(inputs)
   1099         cast_inputs = self._maybe_cast_inputs(inputs,input_list)
   1100 

/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer.py in _maybe_build(self,inputs)
   2641         # operations.
   2642         with tf_utils.maybe_init_scope(self):
-> 2643           self.build(input_shapes)  # pylint:disable=not-callable
   2644       # We must set also ensure that the layer is marked as built,and the build
   2645       # shape is stored since user defined build functions may not be calling

/content/Attention.py in build(self,input_shape)
     23         self.b = K.variable(self.init((self.attention_dim,)))
     24         self.u = K.variable(self.init((self.attention_dim,1)))
---> 25         self.trainable_weights = [self.W,self.u]
     26         super(Hierarchical_Attention,self).build(input_shape)
     27 

/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer.py in __setattr__(self,value)
   2765             ('Can\'t set the attribute "{}",likely because it conflicts with '
   2766              'an existing read-only @property of the object. Please choose a '
-> 2767              'different name.').format(name))
   2768       return
   2769 

AttributeError: Can't set the attribute "trainable_weights",likely because it conflicts with an existing read-only @property of the object. Please choose a different name.

我是神经网络领域的菜鸟。请帮忙。

解决方法

当我尝试在Google Colab上执行代码时遇到了同样的问题。

我在StackOverflow上找到了一些答案,说这是Colab上tf的一个持续问题。 link here

对我来说,它仍然没有解决,但是我相信您可以尝试设置self._trainable_weights而不是self.trainable_weights

相关问答

Selenium Web驱动程序和Java。元素在(x,y)点处不可单击。其...
Python-如何使用点“。” 访问字典成员?
Java 字符串是不可变的。到底是什么意思?
Java中的“ final”关键字如何工作?(我仍然可以修改对象。...
“loop:”在Java代码中。这是什么,为什么要编译?
java.lang.ClassNotFoundException:sun.jdbc.odbc.JdbcOdbc...