请问老师HDF5 error back trace怎么解决?
克隆策略
{"Description":"实验创建于2020/9/19","Summary":"","Graph":{"EdgesInternal":[{"DestinationInputPortId":"-1295:input_data","SourceOutputPortId":"-240:data"},{"DestinationInputPortId":"-1371:input_1","SourceOutputPortId":"-1295:data"},{"DestinationInputPortId":"-726:features","SourceOutputPortId":"-1298:data"},{"DestinationInputPortId":"-726:training_ds","SourceOutputPortId":"-1371:data_1"},{"DestinationInputPortId":"-726:predict_ds","SourceOutputPortId":"-1371:data_2"}],"ModuleNodes":[{"Id":"-240","ModuleId":"BigQuantSpace.input_csv.input_csv-v5","ModuleParameters":[{"Name":"file","Value":"model_data.csv","ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"coding","Value":"utf-8","ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"dtypes","Value":"{}","ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"date_type","Value":"%Y-%m-%d","ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"date_cols","Value":"['date']","ValueType":"Literal","LinkedGlobalParameter":null}],"InputPortsInternal":[],"OutputPortsInternal":[{"Name":"data","NodeId":"-240","OutputType":null}],"UsePreviousResults":true,"moduleIdForCode":2,"Comment":"","CommentCollapsed":true},{"Id":"-1295","ModuleId":"BigQuantSpace.dropnan.dropnan-v2","ModuleParameters":[],"InputPortsInternal":[{"DataSourceId":null,"TrainedModelId":null,"TransformModuleId":null,"Name":"input_data","NodeId":"-1295"},{"DataSourceId":null,"TrainedModelId":null,"TransformModuleId":null,"Name":"features","NodeId":"-1295"}],"OutputPortsInternal":[{"Name":"data","NodeId":"-1295","OutputType":null}],"UsePreviousResults":true,"moduleIdForCode":4,"Comment":"","CommentCollapsed":true},{"Id":"-1298","ModuleId":"BigQuantSpace.input_features.input_features-v1","ModuleParameters":[{"Name":"features","Value":"return_1\nreturn_3\nreturn_5\nreturn_10\nreturn_20\nMACD\nMACDsignal\nMACDhist\nKAMA\nslowk\nslowd\nfastk\nfastd\nboll_upper\nboll_middle\nboll_lower\nrsi_6\nrsi_9\nrsi_14\nWILLR\nSMA_5\nSMA_10\nSMA_20\nEMA_5\nEMA_10\nEMA_20\n","ValueType":"Literal","LinkedGlobalParameter":null}],"InputPortsInternal":[{"DataSourceId":null,"TrainedModelId":null,"TransformModuleId":null,"Name":"features_ds","NodeId":"-1298"}],"OutputPortsInternal":[{"Name":"data","NodeId":"-1298","OutputType":null}],"UsePreviousResults":true,"moduleIdForCode":5,"Comment":"","CommentCollapsed":true},{"Id":"-1371","ModuleId":"BigQuantSpace.cached.cached-v3","ModuleParameters":[{"Name":"run","Value":"# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端\ndef bigquant_run(input_1, input_2, input_3):\n # 示例代码如下。\n input_df = input_1.read_df().reset_index(drop='True')\n out1 = input_df[:2500]\n out2 = input_df[2500:]\n data_1 = DataSource.write_df(out1)\n data_2 = DataSource.write_pickle(out2)\n return Outputs(data_1=data_1, data_2=data_2, data_3=None)\n","ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"post_run","Value":"# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。\ndef bigquant_run(outputs):\n return outputs\n","ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"input_ports","Value":"","ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"params","Value":"{}","ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"output_ports","Value":"","ValueType":"Literal","LinkedGlobalParameter":null}],"InputPortsInternal":[{"DataSourceId":null,"TrainedModelId":null,"TransformModuleId":null,"Name":"input_1","NodeId":"-1371"},{"DataSourceId":null,"TrainedModelId":null,"TransformModuleId":null,"Name":"input_2","NodeId":"-1371"},{"DataSourceId":null,"TrainedModelId":null,"TransformModuleId":null,"Name":"input_3","NodeId":"-1371"}],"OutputPortsInternal":[{"Name":"data_1","NodeId":"-1371","OutputType":null},{"Name":"data_2","NodeId":"-1371","OutputType":null},{"Name":"data_3","NodeId":"-1371","OutputType":null}],"UsePreviousResults":true,"moduleIdForCode":1,"Comment":"","CommentCollapsed":true},{"Id":"-726","ModuleId":"BigQuantSpace.extra_trees_classifier.extra_trees_classifier-v1","ModuleParameters":[{"Name":"criterion","Value":"gini","ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"iterations","Value":10,"ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"feature_fraction","Value":1,"ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"max_depth","Value":30,"ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"min_samples_per_leaf","Value":200,"ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"key_cols","Value":"date,instrument","ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"workers","Value":1,"ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"random_state","Value":0,"ValueType":"Literal","LinkedGlobalParameter":null},{"Name":"other_train_parameters","Value":"{}","ValueType":"Literal","LinkedGlobalParameter":null}],"InputPortsInternal":[{"DataSourceId":null,"TrainedModelId":null,"TransformModuleId":null,"Name":"training_ds","NodeId":"-726"},{"DataSourceId":null,"TrainedModelId":null,"TransformModuleId":null,"Name":"features","NodeId":"-726"},{"DataSourceId":null,"TrainedModelId":null,"TransformModuleId":null,"Name":"model","NodeId":"-726"},{"DataSourceId":null,"TrainedModelId":null,"TransformModuleId":null,"Name":"predict_ds","NodeId":"-726"}],"OutputPortsInternal":[{"Name":"output_model","NodeId":"-726","OutputType":null},{"Name":"predictions","NodeId":"-726","OutputType":null}],"UsePreviousResults":true,"moduleIdForCode":3,"Comment":"","CommentCollapsed":true}],"SerializedClientData":"<?xml version='1.0' encoding='utf-16'?><DataV1 xmlns:xsd='http://www.w3.org/2001/XMLSchema' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'><Meta /><NodePositions><NodePosition Node='-240' Position='-11,-173,200,200'/><NodePosition Node='-1295' Position='42,-29,200,200'/><NodePosition Node='-1298' Position='365,-152,200,200'/><NodePosition Node='-1371' Position='49,78,200,200'/><NodePosition Node='-726' Position='125.03482055664062,203.51512145996094,200,200'/></NodePositions><NodeGroups /></DataV1>"},"IsDraft":true,"ParentExperimentId":null,"WebService":{"IsWebServiceExperiment":false,"Inputs":[],"Outputs":[],"Parameters":[{"Name":"交易日期","Value":"","ParameterDefinition":{"Name":"交易日期","FriendlyName":"交易日期","DefaultValue":"","ParameterType":"String","HasDefaultValue":true,"IsOptional":true,"ParameterRules":[],"HasRules":false,"MarkupType":0,"CredentialDescriptor":null}}],"WebServiceGroupId":null,"SerializedClientData":"<?xml version='1.0' encoding='utf-16'?><DataV1 xmlns:xsd='http://www.w3.org/2001/XMLSchema' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'><Meta /><NodePositions></NodePositions><NodeGroups /></DataV1>"},"DisableNodesUpdate":false,"Category":"user","Tags":[],"IsPartialRun":true}
In [1]:
# 本代码由可视化策略环境自动生成 2020年9月19日 23:44
# 本代码单元只能在可视化模式下编辑。您也可以拷贝代码,粘贴到新建的代码单元或者策略,然后修改。
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m1_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。
input_df = input_1.read_df().reset_index(drop='True')
out1 = input_df[:2500]
out2 = input_df[2500:]
data_1 = DataSource.write_df(out1)
data_2 = DataSource.write_pickle(out2)
return Outputs(data_1=data_1, data_2=data_2, data_3=None)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m1_post_run_bigquant_run(outputs):
return outputs
m2 = M.input_csv.v5(
file='model_data.csv',
coding='utf-8',
dtypes={},
date_type='%Y-%m-%d',
date_cols=['date']
)
m4 = M.dropnan.v2(
input_data=m2.data
)
m1 = M.cached.v3(
input_1=m4.data,
run=m1_run_bigquant_run,
post_run=m1_post_run_bigquant_run,
input_ports='',
params='{}',
output_ports=''
)
m5 = M.input_features.v1(
features="""return_1
return_3
return_5
return_10
return_20
MACD
MACDsignal
MACDhist
KAMA
slowk
slowd
fastk
fastd
boll_upper
boll_middle
boll_lower
rsi_6
rsi_9
rsi_14
WILLR
SMA_5
SMA_10
SMA_20
EMA_5
EMA_10
EMA_20
"""
)
m3 = M.extra_trees_classifier.v1(
training_ds=m1.data_1,
features=m5.data,
predict_ds=m1.data_2,
criterion='gini',
iterations=10,
feature_fraction=1,
max_depth=30,
min_samples_per_leaf=200,
key_cols='date,instrument',
workers=1,
random_state=0,
other_train_parameters={}
)
日志 15 条,错误日志
2 条
[2020-09-19 23:43:36.352254] INFO: moduleinvoker: input_csv.v5 开始运行..
[2020-09-19 23:43:36.367811] INFO: moduleinvoker: 命中缓存
[2020-09-19 23:43:36.368706] INFO: moduleinvoker: input_csv.v5 运行完成[0.016469s].
[2020-09-19 23:43:36.378177] INFO: moduleinvoker: dropnan.v2 开始运行..
[2020-09-19 23:43:36.384968] INFO: moduleinvoker: 命中缓存
[2020-09-19 23:43:36.386133] INFO: moduleinvoker: dropnan.v2 运行完成[0.007955s].
[2020-09-19 23:43:36.393389] INFO: moduleinvoker: cached.v3 开始运行..
[2020-09-19 23:43:36.421133] INFO: moduleinvoker: 命中缓存
[2020-09-19 23:43:36.422360] INFO: moduleinvoker: cached.v3 运行完成[0.028965s].
[2020-09-19 23:43:36.429782] INFO: moduleinvoker: input_features.v1 开始运行..
[2020-09-19 23:43:36.435707] INFO: moduleinvoker: 命中缓存
[2020-09-19 23:43:36.436828] INFO: moduleinvoker: input_features.v1 运行完成[0.007045s].
[2020-09-19 23:43:36.641685] INFO: moduleinvoker: extra_trees_classifier.v1 开始运行..
[2020-09-19 23:43:36.699429] ERROR: moduleinvoker: module name: cached, module version: v2, trackeback: Traceback (most recent call last): tables.exceptions.HDF5ExtError: HDF5 error back trace File "H5F.c", line 511, in H5Fopen unable to open file File "H5Fint.c", line 1604, in H5F_open unable to read superblock File "H5Fsuper.c", line 413, in H5F__super_read file signature not found End of HDF5 error back trace Unable to open/create file '/var/app/data/bigquant/datasource/user/v3/f/ce/fce936d232b844e2848a55d47912f0bfT' During handling of the above exception, another exception occurred: Traceback (most recent call last): OSError: HDF5 error back trace File "H5F.c", line 511, in H5Fopen unable to open file File "H5Fint.c", line 1604, in H5F_open unable to read superblock File "H5Fsuper.c", line 413, in H5F__super_read file signature not found End of HDF5 error back trace Unable to open/create file '/var/app/data/bigquant/datasource/user/v3/f/ce/fce936d232b844e2848a55d47912f0bfT'
[2020-09-19 23:43:36.707087] ERROR: moduleinvoker: module name: extra_trees_classifier, module version: v1, trackeback: Traceback (most recent call last): tables.exceptions.HDF5ExtError: HDF5 error back trace File "H5F.c", line 511, in H5Fopen unable to open file File "H5Fint.c", line 1604, in H5F_open unable to read superblock File "H5Fsuper.c", line 413, in H5F__super_read file signature not found End of HDF5 error back trace Unable to open/create file '/var/app/data/bigquant/datasource/user/v3/f/ce/fce936d232b844e2848a55d47912f0bfT' During handling of the above exception, another exception occurred: Traceback (most recent call last): OSError: HDF5 error back trace File "H5F.c", line 511, in H5Fopen unable to open file File "H5Fint.c", line 1604, in H5F_open unable to read superblock File "H5Fsuper.c", line 413, in H5F__super_read file signature not found End of HDF5 error back trace Unable to open/create file '/var/app/data/bigquant/datasource/user/v3/f/ce/fce936d232b844e2848a55d47912f0bfT'