Keras LSTM -可变序列长度上的循环

jaxagkaj  于 2022-11-13  发布在  其他
关注(0)|答案(1)|浏览(196)

我想手动循环输入序列的不同序列长度,但是Tensorflow在注意到不同的序列长度后,会自动将时间轴设置为None。是否有解决方法?
示例

import tensorflow as tf
import numpy as np

class MyExample(tf.keras.Model):

    def __init__(self, int_dim, **kwargs):
        super(MyExample, self).__init__(**kwargs)
        self.int_dim = int_dim
        self.lstm = tf.keras.layers.LSTMCell(self.int_dim)
        self.d2 = tf.keras.layers.Dense(self.int_dim)

    def call(self, inputs):
        states = (tf.zeros((1, self.int_dim)), 
                  tf.zeros((1, self.int_dim)))
        outputs = []
        for t in range(inputs.shape[1]):
            lstm_out, states = self.lstm(inputs[:, t, :], states)
            d2_out = self.d2(lstm_out)
            outputs.append(d2_out)
        output_stack = tf.stack(outputs, 1)
        return output_stack

def generator():
    while True:
      seq_len = np.random.randint(2, 10)
      X = tf.random.uniform((1, seq_len, 5))
      Y = tf.random.uniform((1, seq_len, 5))
      yield X, Y

model = MyExample(5)
model.compile('adam', 'BinaryCrossentropy')
model.fit(generator(), batch_size=1)
6tqwzwtp

6tqwzwtp1#

以下是Eager Execution模式的修复程序:

import tensorflow as tf
import numpy as np

class MyExample(tf.keras.Model):

    def __init__(self, int_dim, **kwargs):
        super(MyExample, self).__init__(**kwargs)
        self.int_dim = int_dim
        self.lstm = tf.keras.layers.LSTMCell(self.int_dim)
        self.d2 = tf.keras.layers.Dense(self.int_dim)

    def call(self, inputs):
        states = (tf.zeros((tf.shape(inputs)[0], self.int_dim)), 
                  tf.zeros((tf.shape(inputs)[0], self.int_dim)))
        outputs = []
        for t in range(tf.shape(inputs)[1]):
            lstm_out, states = self.lstm(inputs[:, t, :], states)
            d2_out = self.d2(lstm_out)
            outputs.append(d2_out)
        output_stack = tf.stack(outputs, 1)
        return output_stack

def generator():
    while True:
      seq_len = np.random.randint(2, 10)
      X = tf.random.uniform((1, seq_len, 5))
      Y = tf.random.uniform((1, seq_len, 5))
      yield X, Y

model = MyExample(5)
model.compile('adam', 'BinaryCrossentropy', run_eagerly=True)
model.fit(generator(), batch_size=1)

Graph模式解决方案可能如下所示:

import tensorflow as tf
import numpy as np

class MyExample(tf.keras.Model):

    def __init__(self, int_dim, **kwargs):
        super(MyExample, self).__init__(**kwargs)
        self.int_dim = int_dim
        self.lstm = tf.keras.layers.LSTMCell(self.int_dim)
        self.d2 = tf.keras.layers.Dense(self.int_dim)

    def some_logic(self, i, inputs, s1, s2, o):
      lstm_out, s = self.lstm(inputs[:, i, :], (s1, s2))
      d2_out = self.d2(lstm_out)
      o = o.write(o.size(), d2_out)
      s1, s2 = s
      return tf.add(i, 1), inputs, s1, s2, o

    def call(self, inputs):
        states = (tf.zeros((tf.shape(inputs)[0], self.int_dim)), 
                  tf.zeros((tf.shape(inputs)[0], self.int_dim)))
    
        s1, s2 = states
        outputs = tf.TensorArray(dtype=tf.float32, size=0, dynamic_size=True)

        i = tf.constant(0)
        while_condition = lambda i, inputs, s1, s2, outputs: tf.less(i, tf.shape(inputs)[1])
        _, _, _, _, result = tf.while_loop(while_condition, self.some_logic, loop_vars=(i, inputs, s1, s2, outputs))
        return result.stack()

def generator():
    while True:
      seq_len = np.random.randint(2, 10)
      X = tf.random.uniform((1, seq_len, 5))
      Y = tf.random.uniform((1, seq_len, 5))
      yield X, Y

model = MyExample(5)
model.compile('adam', 'BinaryCrossentropy')
model.fit(generator(), batch_size=1)

相关问题