- 标题:加载预训练模型load_persistables报错
- 版本、环境信息:
1)PaddlePaddle版本:1.8
2)CPU/GPU:GPU Telsa V100
3)系统环境:AIstudio
4)Python版本号: 3.7
5)显存信息:16G
- 问题描述:
楼主在AI studio中训练自己的模型,有的时候设置的训练Epoch较少,想保存模型、参数之后接着继续训练,就想在AIstudio的notebook创建两个code块,其中一个用于第一次训练(仅保存模型),另一个用于后续的训练(导入前一次模型并且经过训练后保存训练好的模型),两个code块内容上基本一致,有差异的地方仅在保存导入模型部分。参考了官网文档后选择使用save_persistables、load_persistables进行保存和导入,但使用load_persistables导入后报错。
本人paddlepaddle小白,希望有大神赐教
第一个code块保存模型代码部分如下:# 保存模型 fluid.io.save_persistables(executor=exe, dirname='/home/aistudio/sEMG_model', main_program=None,filename='params')
第二个code块导入模型代码部分如下:
`model = sEMGNet(sEMG) # 获取回归器
cost = fluid.layers.square_error_cost(input=model, label=label) # 获取损失函数和准确率函数
avg_cost = fluid.layers.mean(cost)
test_program = fluid.default_main_program().clone(for_test=True) # 获取测试程序
optimizer = fluid.optimizer.AdamOptimizer(learning_rate=lr) # 定义优化方法
opts = optimizer.minimize(avg_cost)
place = fluid.CUDAPlace(0) # 定义一个使用GPU的执行器
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program()) # 进行参数初始化
fluid.io.load_persistables(executor=exe, dirname='/home/aistudio/sEMG_model', main_program=None, filename='params') `
报错信息
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/executor.py:1070: UserWarning: The following exception is not an EOF exception.
"The following exception is not an EOF exception.")
---------------------------------------------------------------------------EnforceNotMet Traceback (most recent call last) in
79 exe.run(fluid.default_startup_program())
80
---> 81 fluid.io.load_persistables(executor=exe, dirname='/home/aistudio/sEMG_model', main_program=None, filename='params')
82 # 定义输入数据维度
83 feeder = fluid.DataFeeder(place=place, feed_list=[sEMG, label])
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/io.py in load_persistables(executor, dirname, main_program, filename)
925 main_program=main_program,
926 predicate=is_persistable,
--> 927 filename=filename)
928
929
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/io.py in load_vars(executor, dirname, main_program, vars, predicate, filename)
748 main_program=main_program,
749 vars=list(filter(predicate, main_program.list_vars())),
--> 750 filename=filename)
751 else:
752 load_prog = Program()
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/io.py in load_vars(executor, dirname, main_program, vars, predicate, filename)
802 'model_from_memory': vars_from_memory
803 })
--> 804 executor.run(load_prog)
805
806 # check var shape
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/executor.py in run(self, program, feed, fetch_list, feed_var_name, fetch_var_name, scope, return_numpy, use_program_cache, return_merged, use_prune)
1069 warnings.warn(
1070 "The following exception is not an EOF exception.")
-> 1071 six.reraise(*sys.exc_info())
1072
1073 def _run_impl(self, program, feed, fetch_list, feed_var_name,
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/six.py in reraise(tp, value, tb)
701 if value.tracebackis not tb:
702 raise value.with_traceback(tb)
--> 703 raise value
704 finally:
705 value = None
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/executor.py in run(self, program, feed, fetch_list, feed_var_name, fetch_var_name, scope, return_numpy, use_program_cache, return_merged, use_prune)
1064 use_program_cache=use_program_cache,
1065 use_prune=use_prune,
-> 1066 return_merged=return_merged)
1067 except Exception as e:
1068 if not isinstance(e, core.EOFException):
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/executor.py in _run_impl(self, program, feed, fetch_list, feed_var_name, fetch_var_name, scope, return_numpy, use_program_cache, return_merged, use_prune)
1152 scope=scope,
1153 return_numpy=return_numpy,
-> 1154 use_program_cache=use_program_cache)
1155
1156 program._compile(scope, self.place)
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/executor.py in _run_program(self, program, feed, fetch_list, feed_var_name, fetch_var_name, scope, return_numpy, use_program_cache)
1227 if not use_program_cache:
1228 self._default_executor.run(program.desc, scope, 0, True, True,
-> 1229 fetch_var_name)
1230 else:
1231 self._default_executor.run_prepared_ctx(ctx, scope, False, False,
EnforceNotMet:
C++ Call Stacks (More useful to developers):
0 std::string paddle::platform::GetTraceBackString<std::string const&>(std::string const&, char const*, int)
1 paddle::platform::EnforceNotMet::EnforceNotMet(std::string const&, char const*, int)
2 paddle::framework::TensorFromStream(std::istream&, paddle::framework::Tensor*, paddle::platform::DeviceContext const&)
3 paddle::framework::DeserializeFromStream(std::istream&, paddle::framework::LoDTensor*, paddle::platform::DeviceContext const&)
4 paddle::operators::LoadCombineOpKernel<paddle::platform::CUDADeviceContext, float>::LoadParamsFromBuffer(paddle::framework::ExecutionContext const&, paddle::platform::Place const&, std::istream*, bool, std::vector<std::string, std::allocatorstd::string > const&) const
5 paddle::operators::LoadCombineOpKernel<paddle::platform::CUDADeviceContext, float>::Compute(paddle::framework::ExecutionContext const&) const
6 std::_Function_handler<void (paddle::framework::ExecutionContext const&), paddle::framework::OpKernelRegistrarFunctor<paddle::platform::CUDAPlace, false, 0ul, paddle::operators::LoadCombineOpKernel<paddle::platform::CUDADeviceContext, float>, paddle::operators::LoadCombineOpKernel<paddle::platform::CUDADeviceContext, double>, paddle::operators::LoadCombineOpKernel<paddle::platform::CUDADeviceContext, int>, paddle::operators::LoadCombineOpKernel<paddle::platform::CUDADeviceContext, signed char>, paddle::operators::LoadCombineOpKernel<paddle::platform::CUDADeviceContext, long> >::operator()(char const*, char const*, int) const::{lambda(paddle::framework::ExecutionContext const&)#1}>::_M_invoke(std::_Any_data const&, paddle::framework::ExecutionContext const&)
7 paddle::framework::OperatorWithKernel::RunImpl(paddle::framework::Scope const&, paddle::platform::Place const&, paddle::framework::RuntimeContext*) const
8 paddle::framework::OperatorWithKernel::RunImpl(paddle::framework::Scope const&, paddle::platform::Place const&) const
9 paddle::framework::OperatorBase::Run(paddle::framework::Scope const&, paddle::platform::Place const&)
10 paddle::framework::Executor::RunPartialPreparedContext(paddle::framework::ExecutorPrepareContext*, paddle::framework::Scope*, long, long, bool, bool, bool)
11 paddle::framework::Executor::RunPreparedContext(paddle::framework::ExecutorPrepareContext*, paddle::framework::Scope*, bool, bool, bool)
12 paddle::framework::Executor::Run(paddle::framework::ProgramDesc const&, paddle::framework::Scope*, int, bool, bool, std::vector<std::string, std::allocatorstd::string > const&, bool, bool)
Python Call Stacks (More useful to users):
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/framework.py", line 2610, in append_op
attrs=kwargs.get("attrs", None))
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/io.py", line 802, in load_vars
'model_from_memory': vars_from_memory
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/io.py", line 750, in load_vars
filename=filename)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/io.py", line 927, in load_persistables
filename=filename)
File "", line 81, in
fluid.io.load_persistables(executor=exe, dirname='/home/aistudio/sEMG_model', main_program=None, filename='params')
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3265, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3183, in run_ast_nodes
if (yield from self.run_code(code, result)):
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 3018, in run_cell_async
interactivity=interactivity, compiler=compiler, result=result)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/IPython/core/async_helpers.py", line 67, in _pseudo_sync_runner
coro.send(None)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 2843, in _run_cell
return runner(coro)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/IPython/core/interactiveshell.py", line 2817, in run_cell
raw_cell, store_history, silent, shell_futures)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/ipykernel/zmqshell.py", line 536, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args,**kwargs)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/ipykernel/ipkernel.py", line 294, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/tornado/gen.py", line 326, in wrapper
yielded = next(result)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/ipykernel/kernelbase.py", line 534, in execute_request
user_expressions, allow_stdin,
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/tornado/gen.py", line 326, in wrapper
yielded = next(result)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/ipykernel/kernelbase.py", line 267, in dispatch_shell
yield gen.maybe_future(handler(stream, idents, msg))
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/tornado/gen.py", line 326, in wrapper
yielded = next(result)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/ipykernel/kernelbase.py", line 357, in process_one
yield gen.maybe_future(dispatch(*args))
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/tornado/gen.py", line 1147, in run
yielded = self.gen.send(value)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/tornado/gen.py", line 1233, in inner
self.run()
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/tornado/stack_context.py", line 300, in null_wrapper
return fn(*args,**kwargs)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/tornado/ioloop.py", line 758, in _run_callback
ret = callback()
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/asyncio/events.py", line 88, in _run
self._context.run(self._callback, *self._args)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/asyncio/base_events.py", line 1771, in _run_once
handle._run()
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/asyncio/base_events.py", line 534, in run_forever
self._run_once()
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/tornado/platform/asyncio.py", line 132, in start
self.asyncio_loop.run_forever()
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/ipykernel/kernelapp.py", line 505, in start
self.io_loop.start()
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/traitlets/config/application.py", line 664, in launch_instance
app.start()
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/ipykernel_launcher.py", line 16, in
app.launch_new_instance()
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"main", mod_spec)
Error Message Summary:
InvalidArgumentError: Cannot parse tensor desc
[Hint: Expected desc.ParseFromArray(buf.get(), size) == true, but received desc.ParseFromArray(buf.get(), size):0 != true:1.] at (/paddle/paddle/fluid/framework/tensor_util.cu:527)
[operator < load_combine > error]
3条答案
按热度按时间0h4hbjxa1#
可以试试用load_vars:
93ze6v8z2#
建议使用
https://www.paddlepaddle.org.cn/documentation/docs/zh/api_cn/io_cn/load_cn.html#save
https://www.paddlepaddle.org.cn/documentation/docs/zh/api_cn/io_cn/load_cn.html#load
这两套接口进行参数的保存和加载
ykejflvf3#
请问我使用fluid.save又出现了下面的错误是怎么回事呀?
InvalidArgumentError: The Variable type must be N6paddle9framework9LoDTensorE, but the type it holds is N6paddle9operators13CudnnRNNCacheE.
[Hint: Expected holder_->Type() == VarTypeTrait::kId, but received holder_->Type():52 != VarTypeTrait::kId:7.] at (/paddle/paddle/fluid/framework/variable.h:54)