我从https://github.com/FengleiFan/QAE中获取代码,并更改了数据集。我用tensorflow 1.x版本在google colab上执行代码,但我得到了我无法解决的错误。
下面提到的代码是二次自动编码器的架构。而不是Conv2D,他们使用二次卷积函数。
def CTcnn(shape=(SIZE,SIZE,1)):
x_input = Input(shape=(SIZE,SIZE,1))
encode_conv1 = Quad_conv_layer_same(x_input,shape=[3,3,1,15])
encode_conv2 = Quad_conv_layer_same(encode_conv1,shape=[3,3,15,15])
encode_conv3 = Quad_conv_layer_same(encode_conv2,shape=[3,3,15,15])
encode_conv4 = Quad_conv_layer_same(encode_conv3,shape=[3,3,15,15])
encode_conv5 = Quad_conv_layer_valid(encode_conv4,shape=[3,3,15,15])
decode_conv4 = tf.nn.relu(Quad_deconv_layer_valid_linear(encode_conv5,shape=[3, 3,15,15],outputshape=tf.shape(encode_conv4))+encode_conv4)
decode_conv3 = Quad_deconv_layer_same(decode_conv4,shape=[3, 3,15,15],outputshape=tf.shape(encode_conv3))
decode_conv2 = tf.nn.relu(Quad_deconv_layer_same_linear(decode_conv3,shape=[3, 3,15,15],outputshape=tf.shape(encode_conv2))+encode_conv2)
decode_conv1 = Quad_deconv_layer_same(decode_conv2,shape=[3, 3,15,15],outputshape=tf.shape(encode_conv1))
x_output = tf.nn.relu(Quad_deconv_layer_same_linear(decode_conv1,shape=[3, 3,1,15],outputshape=tf.shape(x_input))+x_input)
model=tf.keras.Model(inputs=x_input, outputs=x_output, name='CTcnn')
return model
**model = CTcnn(shape=(SIZE,SIZE,1))** #getting error in this line
model.compile(optimizer='adam', loss='mean_squared_error')
model.summary()
以下是完整的错误:
FailedPreconditionError Traceback (most recent call last)
<ipython-input-49-07fdc66cd455> in <module>()
----> 1 model = CTcnn(shape=(SIZE,SIZE,1))
2 model.compile(optimizer='adam', loss='mean_squared_error')
3 model.summary()
<ipython-input-48-503bfc463612> in CTcnn(shape)
16 x_output = tf.nn.relu(Quad_deconv_layer_same_linear(decode_conv1,shape=[3, 3,1,15],outputshape=tf.shape(x_input))+x_input)
17
---> 18 model=tf.keras.Model(inputs=x_input, outputs=x_output, name='CTcnn')
19 return model
20
/tensorflow-1.15.2/python3.6/tensorflow_core/python/keras/engine/training.py in __init__(self, *args, **kwargs)
145
146 def __init__(self, *args, **kwargs):
--> 147 super(Model, self).__init__(*args, **kwargs)
148 _keras_api_gauge.get_cell('model').set(True)
149 # initializing _distribution_strategy here since it is possible to call
/tensorflow-1.15.2/python3.6/tensorflow_core/python/keras/engine/network.py in __init__(self, *args, **kwargs)
162 'inputs' in kwargs and 'outputs' in kwargs):
163 # Graph network
--> 164 self._init_graph_network(*args, **kwargs)
165 else:
166 # Subclassed network
/tensorflow-1.15.2/python3.6/tensorflow_core/python/training/tracking/base.py in _method_wrapper(self, *args, **kwargs)
455 self._self_setattr_tracking = False # pylint: disable=protected-access
456 try:
--> 457 result = method(self, *args, **kwargs)
458 finally:
459 self._self_setattr_tracking = previous_value # pylint: disable=protected-access
/tensorflow-1.15.2/python3.6/tensorflow_core/python/keras/engine/network.py in _init_graph_network(self, inputs, outputs, name, **kwargs)
265
266 if any(not hasattr(tensor, '_keras_history') for tensor in self.outputs):
--> 267 base_layer_utils.create_keras_history(self._nested_outputs)
268
269 self._base_init(name=name, **kwargs)
/tensorflow-1.15.2/python3.6/tensorflow_core/python/keras/engine/base_layer_utils.py in create_keras_history(tensors)
182 keras_tensors: The Tensors found that came from a Keras Layer.
183 """
--> 184 _, created_layers = _create_keras_history_helper(tensors, set(), [])
185 return created_layers
186
/tensorflow-1.15.2/python3.6/tensorflow_core/python/keras/engine/base_layer_utils.py in _create_keras_history_helper(tensors, processed_ops, created_layers)
229 constants[i] = backend.function([], op_input)([])
230 processed_ops, created_layers = _create_keras_history_helper(
--> 231 layer_inputs, processed_ops, created_layers)
232 name = op.name
233 node_def = op.node_def.SerializeToString()
/tensorflow-1.15.2/python3.6/tensorflow_core/python/keras/engine/base_layer_utils.py in _create_keras_history_helper(tensors, processed_ops, created_layers)
229 constants[i] = backend.function([], op_input)([])
230 processed_ops, created_layers = _create_keras_history_helper(
--> 231 layer_inputs, processed_ops, created_layers)
232 name = op.name
233 node_def = op.node_def.SerializeToString()
/tensorflow-1.15.2/python3.6/tensorflow_core/python/keras/engine/base_layer_utils.py in _create_keras_history_helper(tensors, processed_ops, created_layers)
227 else:
228 with ops.init_scope():
--> 229 constants[i] = backend.function([], op_input)([])
230 processed_ops, created_layers = _create_keras_history_helper(
231 layer_inputs, processed_ops, created_layers)
/tensorflow-1.15.2/python3.6/tensorflow_core/python/keras/backend.py in __call__(self, inputs)
3474
3475 fetched = self._callable_fn(*array_vals,
-> 3476 run_metadata=self.run_metadata)
3477 self._call_fetch_callbacks(fetched[-len(self._fetches):])
3478 output_structure = nest.pack_sequence_as(
/tensorflow-1.15.2/python3.6/tensorflow_core/python/client/session.py in __call__(self, *args, **kwargs)
1470 ret = tf_session.TF_SessionRunCallable(self._session._session,
1471 self._handle, args,
-> 1472 run_metadata_ptr)
1473 if run_metadata:
1474 proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
FailedPreconditionError: 2 root error(s) found.
(0) Failed precondition: Attempting to use uninitialized value Variable_179
[[{{node Variable_179/read}}]]
[[Variable_179/read/_1]]
(1) Failed precondition: Attempting to use uninitialized value Variable_179
[[{{node Variable_179/read}}]]
0 successful operations.
0 derived errors ignored.
有什么办法可以补救吗?
1条答案
按热度按时间92vpleto1#
我遇到了类似的问题。我所做的就是重新启动内核,清除所有输出的xmlyter笔记本电脑,它的工作。这可能只是一个提示,以帮助别人在那里。