keras 在Tensorflow中使用Prelu

mdfafbf1  于 2022-11-13  发布在  其他
关注(0)|答案(1)|浏览(660)

我正在建立一个强化学习模型。我正在尝试在我的2D Conv模型中使用Prelu,使用tensorflow 。下面是演员模型的代码。
密码:

from tensorflow.keras.layers import Conv2D, Input, MaxPool1D, concatenate, Lambda, Dense, Flatten
import tensorflow as tf

# activation = tf.keras.layers.LeakyReLU(alpha=0.5)
activation =   tf.keras.layers.PReLU(alpha_initializer=tf.initializers.constant(0.25))

def ActorNetwork(input_shape_A,input_shape_B, n_actions):
    input_layer_A = Input(shape=input_shape_A[1:], name="input_layer_A")
    input_layer_B = Input(shape=input_shape_B[1:], name="input_layer_B")

    Rescale = Lambda(lambda x: tf.divide(tf.subtract(x, tf.reduce_max(x)), tf.subtract(tf.reduce_max(x), tf.reduce_min(x))))(input_layer_A)

    Conv1 = Conv2D(32, 3, activation= activation, padding='same', name="Conv1")(Rescale)
    Conv2 = Conv2D(32, 3, activation=activation, padding='same', name="Conv2")(Conv1)
    Conv_pool_1 = Conv2D(32, 2, strides=2, activation='relu', padding='same', name="Conv_pool_1")(Conv2)

    Batchnorm_1 = tf.keras.layers.BatchNormalization(name='Batchnorm_1')(Conv_pool_1)
  
    Conv3 = Conv2D(32, 3, activation= activation, padding='same', name="Conv3")(Batchnorm_1)
    Conv4 = Conv2D(32, 3, activation=activation, padding='same', name="Conv4")(Conv3)
    Conv_pool_2 = Conv2D(32, 2, strides=2, activation='relu', padding='same', name="Conv_pool_2")(Conv4)

    Batchnorm_2 = tf.keras.layers.BatchNormalization(name='Batchnorm_2')(Conv_pool_2)
  

    Conv5 = Conv2D(64, 3, activation= activation, padding='same', name="Conv5")(Batchnorm_2)
    Conv6 = Conv2D(64, 3, activation=activation, padding='same', name="Conv6")(Conv5)
    Conv_pool_3 = Conv2D(64, 2, strides=2, activation='relu', padding='same', name="Conv_pool_3")(Conv6)

    Batchnorm_3 = tf.keras.layers.BatchNormalization(name='Batchnorm_3')(Conv_pool_3)
  

    Conv7 = Conv2D(64, 3, activation= activation, padding='same', name="Conv7")(Batchnorm_3)
    Conv8 = Conv2D(64, 3, activation=activation, padding='same', name="Conv8")(Conv7)
    Conv_pool_4 = Conv2D(64, 2, strides=2, activation='relu', padding='same', name="Conv_pool_4")(Conv8)

    Batchnorm_4 = tf.keras.layers.BatchNormalization(name='Batchnorm_4')(Conv_pool_4)
  
    Conv9 = Conv2D(128, 3, activation= activation, padding='same', name="Conv9")(Batchnorm_4)
    Conv10 = Conv2D(128, 3, activation=activation, padding='same', name="Conv10")(Conv9)
    Conv_pool_5 = Conv2D(128, 2, strides=2, activation='relu', padding='same', name="Conv_pool_5")(Conv10)

    Batchnorm_5 = tf.keras.layers.BatchNormalization(name='Batchnorm_5')(Conv_pool_5)
  
    Conv11 = Conv2D(128, 3, activation= activation, padding='same', name="Conv11")(Batchnorm_5)
    Conv12 = Conv2D(128, 3, activation=activation, padding='same', name="Conv12")(Conv11)
    Conv_pool_6 = Conv2D(128, 2, strides=2, activation='relu', padding='same', name="Conv_pool_6")(Conv12)

    Batchnorm_6 = tf.keras.layers.BatchNormalization(name='Batchnorm_6')(Conv_pool_6)
  

    Conv_pool_7 = Conv2D(128, 1, strides=1, activation='relu', padding='same', name="Conv_pool_7")(Batchnorm_6)
    Conv_pool_8 = Conv2D(64, 1, strides=1, activation='relu', padding='same', name="Conv_pool_8")(Conv_pool_7)
    Conv_pool_9 = Conv2D(32, 1, strides=1, activation='relu', padding='same', name="Conv_pool_9")(Conv_pool_8)

    flatten = Flatten()(Conv_pool_9)
    

    Concat_2 = tf.keras.layers.concatenate([flatten, input_layer_B], axis=-1,name='Concat_2')

    fc1 = Dense(8194, activation='relu', name="fc1")(Concat_2)
    fc2 = Dense(4096, activation='relu', name="fc2")(fc1)
    fc3 = Dense(n_actions, activation='softmax', name="fc3")(fc2)

    return tf.keras.models.Model(inputs=[input_layer_A,input_layer_B], outputs = fc3, name="actor_model")

model=ActorNetwork((1,1000,4000,1),(1,2),3)
model.compile()
model.summary()
print(model([tf.random.uniform((1,1000,4000,1)),tf.random.uniform((1,2))]))
tf.keras.utils.plot_model(model, show_shapes=True)

我用LeakyRelu工作得很好,但是当我使用Prelu时,我会出现与尺寸有关的错误。我不明白
错误:

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-17-0a596da4bc68> in <module>
    131 
    132 
--> 133 model=ActorNetwork((1,1000,4000,1),(1,2),3)
    134 model.compile()
    135 model.summary()

2 frames
/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs, op_def)
   2011   except errors.InvalidArgumentError as e:
   2012     # Convert to ValueError for backwards compatibility.
-> 2013     raise ValueError(e.message)
   2014 
   2015   return c_op

ValueError: Exception encountered when calling layer "p_re_lu_10" (type PReLU).

Dimensions must be equal, but are 1000 and 500 for '{{node Conv3/p_re_lu_10/mul}} = Mul[T=DT_FLOAT](Conv3/p_re_lu_10/Neg, Conv3/p_re_lu_10/Relu_1)' with input shapes: [1000,4000,32], [?,500,2000,32].

Call arguments received:
  • inputs=tf.Tensor(shape=(None, 500, 2000, 32), dtype=float32)

我做错了什么?

eoxn13cs

eoxn13cs1#

PReLu激活函数保持了一个可学习的参数alpha,它与函数的输入具有相同的形状。
每次要使用该激活函数时,都需要定义一个新层。
也就是

Conv1 = Conv2D(32, 3, activation=None, padding='same', name="Conv1")(Rescale)
Conv1_p_relu = tf.keras.layers.PReLU(alpha_initializer=tf.initializers.constant(0.25))(Conv1)
Conv2 = Conv2D(32, 3, activation=None, padding='same', name="Conv2")(Conv1_p_relu)
Conv2_p_relu = tf.keras.layers.PReLU(alpha_initializer=tf.initializers.constant(0.25))(Conv2)
Conv_pool_1 = Conv2D(32, 2, strides=2, activation='relu', padding='same', name="Conv_pool_1")(Conv2_p_relu)

相关问题