本策略选股思路来源于:https://bigquant.com/wiki/doc/hangqing-celve-6sw8FeSPsR 并修改了持仓算法为固定1天持仓
import tensorflow as tf
#gpus = tf.config.list_physical_devices("GPU")
#tf.config.experimental.set_memory_growth(gpus[0], True)
from tensorflow.keras import optimizers
# 本代码由可视化策略环境自动生成 2022年3月15日 20:52
# 本代码单元只能在可视化模式下编辑。您也可以拷贝代码,粘贴到新建的代码单元或者策略,然后修改。
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m4_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。在这里编写您的代码
from sklearn.model_selection import train_test_split
data = input_1.read()
x_train, x_val, y_train, y_val = train_test_split(data["x"], data['y'], shuffle=False, test_size=0.2)
data_1 = DataSource.write_pickle({'x': x_train, 'y': y_train})
data_2 = DataSource.write_pickle({'x': x_val, 'y': y_val})
return Outputs(data_1=data_1, data_2=data_2, data_3=None)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m4_post_run_bigquant_run(outputs):
return outputs
from tensorflow.keras.callbacks import EarlyStopping
m35_earlystop_bigquant_run=EarlyStopping(monitor='val_mse', min_delta=0.0001, patience=10)
# 用户的自定义层需要写到字典中,比如
# {
# "MyLayer": MyLayer
# }
m35_custom_objects_bigquant_run = {
}
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m24_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。在这里编写您的代码
pred_label = input_1.read_pickle()
df = input_2.read_df()
df = pd.DataFrame({'pred_label':pred_label[:,0], 'instrument':df.instrument, 'date':df.date})
df.sort_values(['date','pred_label'],inplace=True, ascending=[True,False])
return Outputs(data_1=DataSource.write_df(df), data_2=None, data_3=None)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m24_post_run_bigquant_run(outputs):
return outputs
# 回测引擎:初始化函数,只执行一次
def m19_initialize_bigquant_run(context):
# 加载预测数据
context.ranker_prediction = context.options['data'].read_df()
# 系统已经设置了默认的交易手续费和滑点,要修改手续费可使用如下函数
context.set_commission(PerOrder(buy_cost=0.0003, sell_cost=0.0003, min_cost=5))
# 预测数据,通过options传入进来,使用 read_df 函数,加载到内存 (DataFrame)
# 设置买入的股票数量,这里买入预测股票列表排名靠前的5只
stock_count = 1
# 每只的股票的权重,如下的权重分配会使得靠前的股票分配多一点的资金,[0.339160, 0.213986, 0.169580, ..]
context.stock_weights = [1]
# 设置每只股票占用的最大资金比例
context.max_cash_per_instrument = 1/stock_count
context.hold_days = 1
# 回测引擎:每日数据处理函数,每天执行一次
def m19_handle_data_bigquant_run(context, data):
# 按日期过滤得到今日的预测数据
ranker_prediction = context.ranker_prediction[
context.ranker_prediction.date == data.current_dt.strftime('%Y-%m-%d')]
# 1. 资金分配
# 平均持仓时间是hold_days,每日都将买入股票,每日预期使用 1/hold_days 的资金
# 实际操作中,会存在一定的买入误差,所以在前hold_days天,等量使用资金;之后,尽量使用剩余资金(这里设置最多用等量的1.5倍)
is_staging = context.trading_day_index < context.hold_days # 是否在建仓期间(前 hold_days 天)
cash_avg = context.portfolio.portfolio_value / context.hold_days
cash_for_buy = min(context.portfolio.cash, (1 if is_staging else 1.5) * cash_avg)
positions = {e.symbol: p.amount * p.last_sale_price
for e, p in context.portfolio.positions.items()}
#----------------------------START:持有固定自然日天数卖出---------------------------
today = data.current_dt
# 不是建仓期(在前hold_days属于建仓期)
if not is_staging:
equities = {e.symbol: p for e, p in context.portfolio.positions.items() if p.amount>0}
for instrument in equities:
# print('last_sale_date: ', equities[instrument].last_sale_date)
sid = equities[instrument].sid # 交易标的
# 今天和上次交易的时间相隔hold_days就全部卖出
if today-equities[instrument].last_sale_date>=datetime.timedelta(context.hold_days) and data.can_trade(context.symbol(instrument)):
context.order_target_percent(sid, 0)
#--------------------------------END:持有固定天数卖出---------------------------
# 3. 生成买入订单:按StockRanker预测的排序,买入前面的stock_count只股票
buy_cash_weights = context.stock_weights
buy_instruments = list(ranker_prediction.instrument[:len(buy_cash_weights)])
max_cash_per_instrument = context.portfolio.portfolio_value * context.max_cash_per_instrument
for i, instrument in enumerate(buy_instruments):
cash = cash_for_buy * buy_cash_weights[i]
if cash > max_cash_per_instrument - positions.get(instrument, 0):
# 确保股票持仓量不会超过每次股票最大的占用资金量
cash = max_cash_per_instrument - positions.get(instrument, 0)
if cash > 0:
context.order_value(context.symbol(instrument), cash)
# 回测引擎:准备数据,只执行一次
def m19_prepare_bigquant_run(context):
pass
m1 = M.instruments.v2(
start_date='2013-02-01',
end_date='2019-10-30',
market='CN_STOCK_A',
instrument_list='',
max_count=0
)
m20 = M.use_datasource.v1(
instruments=m1.data,
datasource_id='net_amount_CN_STOCK_A',
start_date='',
end_date=''
)
m21 = M.filter.v3(
input_data=m20.data,
expr='mf_net_amount_l>8000000',
output_left_data=False
)
m23 = M.select_columns.v3(
input_ds=m21.data,
columns='date,instrument',
reverse_select=False
)
m30 = M.use_datasource.v1(
instruments=m1.data,
datasource_id='bar1d_CN_STOCK_A',
start_date='',
end_date=''
)
m31 = M.join.v3(
data1=m30.data,
data2=m23.data,
on='date,instrument',
how='inner',
sort=False
)
m2 = M.auto_labeler_on_datasource.v1(
input_data=m31.data,
label_expr="""# #号开始的表示注释
# 0. 每行一个,顺序执行,从第二个开始,可以使用label字段
# 1. 可用数据字段见 https://bigquant.com/docs/develop/datasource/deprecated/history_data.html
# 2. 可用操作符和函数见 `表达式引擎 <https://bigquant.com/docs/develop/bigexpr/usage.html>`_
# 计算收益:5日收盘价(作为卖出价格)除以明日开盘价(作为买入价格)
shift(close, -5) / shift(open, -1)
# 极值处理:用1%和99%分位的值做clip
clip(label, all_quantile(label, 0.01), all_quantile(label, 0.99))
# 将分数映射到分类,这里使用20个分类
all_wbins(label, 20)
# 过滤掉一字涨停的情况 (设置label为NaN,在后续处理和训练中会忽略NaN的label)
where(shift(high, -1) == shift(low, -1), NaN, label)
""",
drop_na_label=True,
cast_label_int=True,
date_col='date',
instrument_col='instrument',
user_functions={}
)
m13 = M.standardlize.v8(
input_1=m2.data,
columns_input='label'
)
m3 = M.input_features.v1(
features="""isXiaDie0=where((return_0<1),1,0)
xd_num=group_sum(date, isXiaDie0)#当天A股下跌数
isZhangtToday=price_limit_status_0==3
isDietToday=price_limit_status_0==1
return0=return_0
priceLowBl10=close_0/ts_min(close_0,10)
close_0
open_0
high_0
low_0
# amount_0
"""
)
m5 = M.features_short.v1(
input_1=m3.data
)
m15 = M.general_feature_extractor.v7(
instruments=m1.data,
features=m3.data,
start_date='',
end_date='',
before_start_days=90
)
m28 = M.join.v3(
data1=m23.data,
data2=m15.data,
on='date,instrument',
how='inner',
sort=False
)
m16 = M.derived_feature_extractor.v3(
input_data=m28.data,
features=m3.data,
date_col='date',
instrument_col='instrument',
drop_na=True,
remove_extra_columns=False
)
m14 = M.standardlize.v8(
input_1=m16.data,
input_2=m5.data_1,
columns_input=''
)
m10 = M.fillnan.v1(
input_data=m14.data,
features=m5.data_1,
fill_value='0.0'
)
m7 = M.join.v3(
data1=m13.data,
data2=m10.data,
on='date,instrument',
how='inner',
sort=False
)
m26 = M.dl_convert_to_bin.v2(
input_data=m7.data,
features=m5.data_1,
window_size=5,
feature_clip=5,
flatten=False,
window_along_col='instrument'
)
m4 = M.cached.v3(
input_1=m26.data,
input_2=m3.data,
run=m4_run_bigquant_run,
post_run=m4_post_run_bigquant_run,
input_ports='',
params='{}',
output_ports=''
)
m9 = M.instruments.v2(
start_date=T.live_run_param('trading_date', '2022-01-01'),
end_date=T.live_run_param('trading_date', '2022-03-14'),
market='CN_STOCK_A',
instrument_list='',
max_count=0
)
m8 = M.use_datasource.v1(
instruments=m9.data,
datasource_id='net_amount_CN_STOCK_A',
start_date='',
end_date=''
)
m22 = M.filter.v3(
input_data=m8.data,
expr='mf_net_amount_l>8000000',
output_left_data=False
)
m17 = M.general_feature_extractor.v7(
instruments=m9.data,
features=m3.data,
start_date='',
end_date='',
before_start_days=90
)
m29 = M.join.v3(
data1=m17.data,
data2=m22.data,
on='date,instrument',
how='inner',
sort=False
)
m18 = M.derived_feature_extractor.v3(
input_data=m29.data,
features=m3.data,
date_col='date',
instrument_col='instrument',
drop_na=True,
remove_extra_columns=False
)
m25 = M.standardlize.v8(
input_1=m18.data,
input_2=m5.data_1,
columns_input=''
)
m12 = M.fillnan.v1(
input_data=m25.data,
features=m5.data_1,
fill_value='0.0'
)
m27 = M.dl_convert_to_bin.v2(
input_data=m12.data,
features=m5.data_1,
window_size=5,
feature_clip=5,
flatten=False,
window_along_col='instrument'
)
m6 = M.dl_layer_input.v1(
shape='5,10',
batch_shape='',
dtype='float32',
sparse=False,
name=''
)
m37 = M.dl_layer_batchnormalization.v1(
inputs=m6.data,
axis=-1,
momentum=0.99,
epsilon=0.001,
center=True,
scale=True,
beta_initializer='Zeros',
gamma_initializer='Ones',
moving_mean_initializer='Zeros',
moving_variance_initializer='Ones',
beta_regularizer='None',
beta_regularizer_l1=0,
beta_regularizer_l2=0,
gamma_regularizer='None',
gamma_regularizer_l1=0,
gamma_regularizer_l2=0,
beta_constraint='None',
gamma_constraint='None',
name=''
)
m43 = M.dl_layer_conv1d.v1(
inputs=m37.data,
filters=64,
kernel_size='3',
strides='1',
padding='same',
dilation_rate=1,
activation='relu',
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='Zeros',
kernel_regularizer='None',
kernel_regularizer_l1=0,
kernel_regularizer_l2=0,
bias_regularizer='None',
bias_regularizer_l1=0,
bias_regularizer_l2=0,
activity_regularizer='None',
activity_regularizer_l1=0,
activity_regularizer_l2=0,
kernel_constraint='None',
bias_constraint='None',
name=''
)
m44 = M.dl_layer_conv1d.v1(
inputs=m43.data,
filters=64,
kernel_size='3',
strides='1',
padding='same',
dilation_rate=1,
activation='relu',
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='Zeros',
kernel_regularizer='None',
kernel_regularizer_l1=0,
kernel_regularizer_l2=0,
bias_regularizer='None',
bias_regularizer_l1=0,
bias_regularizer_l2=0,
activity_regularizer='None',
activity_regularizer_l1=0,
activity_regularizer_l2=0,
kernel_constraint='None',
bias_constraint='None',
name=''
)
m41 = M.dl_layer_maxpooling1d.v1(
inputs=m44.data,
pool_size=2,
padding='valid',
name=''
)
m45 = M.dl_layer_conv1d.v1(
inputs=m41.data,
filters=128,
kernel_size='5',
strides='1',
padding='same',
dilation_rate=1,
activation='relu',
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='Zeros',
kernel_regularizer='None',
kernel_regularizer_l1=0,
kernel_regularizer_l2=0,
bias_regularizer='None',
bias_regularizer_l1=0,
bias_regularizer_l2=0,
activity_regularizer='None',
activity_regularizer_l1=0,
activity_regularizer_l2=0,
kernel_constraint='None',
bias_constraint='None',
name=''
)
m47 = M.dl_layer_conv1d.v1(
inputs=m45.data,
filters=128,
kernel_size='5',
strides='1',
padding='same',
dilation_rate=1,
activation='relu',
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='Zeros',
kernel_regularizer='None',
kernel_regularizer_l1=0,
kernel_regularizer_l2=0,
bias_regularizer='None',
bias_regularizer_l1=0,
bias_regularizer_l2=0,
activity_regularizer='None',
activity_regularizer_l1=0,
activity_regularizer_l2=0,
kernel_constraint='None',
bias_constraint='None',
name=''
)
m48 = M.dl_layer_batchnormalization.v1(
inputs=m47.data,
axis=-1,
momentum=0.99,
epsilon=0.001,
center=True,
scale=True,
beta_initializer='Zeros',
gamma_initializer='Ones',
moving_mean_initializer='Zeros',
moving_variance_initializer='Ones',
beta_regularizer='None',
beta_regularizer_l1=0,
beta_regularizer_l2=0,
gamma_regularizer='None',
gamma_regularizer_l1=0,
gamma_regularizer_l2=0,
beta_constraint='None',
gamma_constraint='None',
name=''
)
m50 = M.dl_layer_conv1d.v1(
inputs=m48.data,
filters=128,
kernel_size='3',
strides='1',
padding='same',
dilation_rate=1,
activation='relu',
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='Zeros',
kernel_regularizer='None',
kernel_regularizer_l1=0,
kernel_regularizer_l2=0,
bias_regularizer='None',
bias_regularizer_l1=0,
bias_regularizer_l2=0,
activity_regularizer='None',
activity_regularizer_l1=0,
activity_regularizer_l2=0,
kernel_constraint='None',
bias_constraint='None',
name=''
)
m51 = M.dl_layer_batchnormalization.v1(
inputs=m50.data,
axis=-1,
momentum=0.99,
epsilon=0.001,
center=True,
scale=True,
beta_initializer='Zeros',
gamma_initializer='Ones',
moving_mean_initializer='Zeros',
moving_variance_initializer='Ones',
beta_regularizer='None',
beta_regularizer_l1=0,
beta_regularizer_l2=0,
gamma_regularizer='None',
gamma_regularizer_l1=0,
gamma_regularizer_l2=0,
beta_constraint='None',
gamma_constraint='None',
name=''
)
m53 = M.dl_layer_conv1d.v1(
inputs=m51.data,
filters=128,
kernel_size='5',
strides='1',
padding='same',
dilation_rate=1,
activation='relu',
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='Zeros',
kernel_regularizer='None',
kernel_regularizer_l1=0,
kernel_regularizer_l2=0,
bias_regularizer='None',
bias_regularizer_l1=0,
bias_regularizer_l2=0,
activity_regularizer='None',
activity_regularizer_l1=0,
activity_regularizer_l2=0,
kernel_constraint='None',
bias_constraint='None',
name=''
)
m46 = M.dl_layer_batchnormalization.v1(
inputs=m53.data,
axis=-1,
momentum=0.99,
epsilon=0.001,
center=True,
scale=True,
beta_initializer='Zeros',
gamma_initializer='Ones',
moving_mean_initializer='Zeros',
moving_variance_initializer='Ones',
beta_regularizer='None',
beta_regularizer_l1=0,
beta_regularizer_l2=0,
gamma_regularizer='None',
gamma_regularizer_l1=0,
gamma_regularizer_l2=0,
beta_constraint='None',
gamma_constraint='None',
name=''
)
m49 = M.dl_layer_conv1d.v1(
inputs=m46.data,
filters=128,
kernel_size='5',
strides='1',
padding='same',
dilation_rate=1,
activation='relu',
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='Zeros',
kernel_regularizer='None',
kernel_regularizer_l1=0,
kernel_regularizer_l2=0,
bias_regularizer='None',
bias_regularizer_l1=0,
bias_regularizer_l2=0,
activity_regularizer='None',
activity_regularizer_l1=0,
activity_regularizer_l2=0,
kernel_constraint='None',
bias_constraint='None',
name=''
)
m62 = M.dl_layer_add.v1(
input1=m49.data,
input2=m47.data,
name=''
)
m38 = M.dl_layer_globalmaxpooling1d.v1(
inputs=m62.data,
name=''
)
m61 = M.dl_layer_dropout.v1(
inputs=m38.data,
rate=0.1,
noise_shape='',
name=''
)
m57 = M.dl_layer_dense.v1(
inputs=m61.data,
units=1,
activation='linear',
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='Zeros',
kernel_regularizer='None',
kernel_regularizer_l1=0,
kernel_regularizer_l2=0,
bias_regularizer='None',
bias_regularizer_l1=0,
bias_regularizer_l2=0,
activity_regularizer='None',
activity_regularizer_l1=0,
activity_regularizer_l2=0,
kernel_constraint='None',
bias_constraint='None',
name=''
)
m34 = M.dl_model_init.v1(
inputs=m6.data,
outputs=m57.data
)
m35 = M.dl_model_train.v1(
input_model=m34.data,
training_data=m4.data_1,
validation_data=m4.data_2,
optimizer='Adam',
user_optimizer=optimizers.Adam(lr=0.01),
loss='mean_squared_error',
metrics='mse',
batch_size=1024,
epochs=2,
earlystop=m35_earlystop_bigquant_run,
custom_objects=m35_custom_objects_bigquant_run,
n_gpus=1,
verbose='2:每个epoch输出一行记录',
m_cached=False
)
m11 = M.dl_model_predict.v1(
trained_model=m35.data,
input_data=m27.data,
batch_size=1024,
n_gpus=1,
verbose='2:每个epoch输出一行记录'
)
m24 = M.cached.v3(
input_1=m11.data,
input_2=m18.data,
run=m24_run_bigquant_run,
post_run=m24_post_run_bigquant_run,
input_ports='',
params='{}',
output_ports=''
)
m19 = M.trade.v4(
instruments=m9.data,
options_data=m24.data_1,
start_date='',
end_date='',
initialize=m19_initialize_bigquant_run,
handle_data=m19_handle_data_bigquant_run,
prepare=m19_prepare_bigquant_run,
order_price_field_buy='open',
order_price_field_sell='close',
capital_base=100000,
auto_cancel_non_tradable_orders=True,
data_frequency='daily',
price_type='后复权',
product_type='股票',
plot_charts=True,
backtest_only=False,
benchmark='000300.SHA'
)
[2022-03-15 20:40:31.838744] INFO: moduleinvoker: instruments.v2 开始运行..
[2022-03-15 20:40:31.863418] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:40:31.865850] INFO: moduleinvoker: instruments.v2 运行完成[0.027126s].
[2022-03-15 20:40:31.874546] INFO: moduleinvoker: use_datasource.v1 开始运行..
[2022-03-15 20:40:31.895284] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:40:31.897221] INFO: moduleinvoker: use_datasource.v1 运行完成[0.022676s].
[2022-03-15 20:40:31.909662] INFO: moduleinvoker: filter.v3 开始运行..
[2022-03-15 20:40:31.919752] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:40:31.921617] INFO: moduleinvoker: filter.v3 运行完成[0.011952s].
[2022-03-15 20:40:31.985574] INFO: moduleinvoker: select_columns.v3 开始运行..
[2022-03-15 20:40:31.997515] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:40:31.999799] INFO: moduleinvoker: select_columns.v3 运行完成[0.014253s].
[2022-03-15 20:40:32.007765] INFO: moduleinvoker: use_datasource.v1 开始运行..
[2022-03-15 20:40:32.022786] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:40:32.025431] INFO: moduleinvoker: use_datasource.v1 运行完成[0.017665s].
[2022-03-15 20:40:32.041813] INFO: moduleinvoker: join.v3 开始运行..
[2022-03-15 20:40:32.051717] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:40:32.053358] INFO: moduleinvoker: join.v3 运行完成[0.011546s].
[2022-03-15 20:40:32.066034] INFO: moduleinvoker: auto_labeler_on_datasource.v1 开始运行..
[2022-03-15 20:40:32.081043] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:40:32.082893] INFO: moduleinvoker: auto_labeler_on_datasource.v1 运行完成[0.016863s].
[2022-03-15 20:40:32.093493] INFO: moduleinvoker: standardlize.v8 开始运行..
[2022-03-15 20:40:32.104013] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:40:32.105732] INFO: moduleinvoker: standardlize.v8 运行完成[0.012239s].
[2022-03-15 20:40:32.114509] INFO: moduleinvoker: input_features.v1 开始运行..
[2022-03-15 20:40:32.127454] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:40:32.129472] INFO: moduleinvoker: input_features.v1 运行完成[0.014964s].
[2022-03-15 20:40:32.151132] INFO: moduleinvoker: features_short.v1 开始运行..
[2022-03-15 20:40:32.160925] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:40:32.162645] INFO: moduleinvoker: features_short.v1 运行完成[0.011542s].
[2022-03-15 20:40:32.185878] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2022-03-15 20:40:33.754693] INFO: 基础特征抽取: 年份 2012, 特征行数=98651
[2022-03-15 20:40:35.803263] INFO: 基础特征抽取: 年份 2013, 特征行数=564168
[2022-03-15 20:40:38.241891] INFO: 基础特征抽取: 年份 2014, 特征行数=569948
[2022-03-15 20:40:40.494160] INFO: 基础特征抽取: 年份 2015, 特征行数=569698
[2022-03-15 20:40:43.138024] INFO: 基础特征抽取: 年份 2016, 特征行数=641546
[2022-03-15 20:40:46.169260] INFO: 基础特征抽取: 年份 2017, 特征行数=743233
[2022-03-15 20:40:49.680926] INFO: 基础特征抽取: 年份 2018, 特征行数=816987
[2022-03-15 20:41:07.891189] INFO: 基础特征抽取: 年份 2019, 特征行数=721410
[2022-03-15 20:41:07.968494] INFO: 基础特征抽取: 总行数: 4725641
[2022-03-15 20:41:07.987347] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[35.801466s].
[2022-03-15 20:41:07.999148] INFO: moduleinvoker: join.v3 开始运行..
[2022-03-15 20:41:10.037930] INFO: join: /y_2012, 行数=0/98651, 耗时=0.388839s
[2022-03-15 20:41:11.436240] INFO: join: /y_2013, 行数=21804/564168, 耗时=1.396121s
[2022-03-15 20:41:12.844335] INFO: join: /y_2014, 行数=34249/569948, 耗时=1.402663s
[2022-03-15 20:41:14.366737] INFO: join: /y_2015, 行数=73947/569698, 耗时=1.517154s
[2022-03-15 20:41:15.815072] INFO: join: /y_2016, 行数=56903/641546, 耗时=1.444491s
[2022-03-15 20:41:17.699990] INFO: join: /y_2017, 行数=43674/743233, 耗时=1.878172s
[2022-03-15 20:41:20.841363] INFO: join: /y_2018, 行数=31233/816987, 耗时=3.133989s
[2022-03-15 20:41:35.512833] INFO: join: /y_2019, 行数=32927/721410, 耗时=14.664844s
[2022-03-15 20:41:35.574613] INFO: join: 最终行数: 294737
[2022-03-15 20:41:35.602187] INFO: moduleinvoker: join.v3 运行完成[27.603051s].
[2022-03-15 20:41:35.617847] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2022-03-15 20:41:36.569948] INFO: derived_feature_extractor: 提取完成 isXiaDie0=where((return_0<1),1,0), 0.003s
[2022-03-15 20:41:36.664639] INFO: derived_feature_extractor: 提取完成 xd_num=group_sum(date, isXiaDie0)#当天A股下跌数, 0.093s
[2022-03-15 20:41:36.668388] INFO: derived_feature_extractor: 提取完成 isZhangtToday=price_limit_status_0==3, 0.002s
[2022-03-15 20:41:36.671449] INFO: derived_feature_extractor: 提取完成 isDietToday=price_limit_status_0==1, 0.002s
[2022-03-15 20:41:36.673785] INFO: derived_feature_extractor: 提取完成 return0=return_0, 0.001s
[2022-03-15 20:41:37.142260] INFO: derived_feature_extractor: 提取完成 priceLowBl10=close_0/ts_min(close_0,10), 0.467s
[2022-03-15 20:41:37.367766] INFO: derived_feature_extractor: /y_2012, 0
[2022-03-15 20:41:37.482424] INFO: derived_feature_extractor: /y_2013, 21804
[2022-03-15 20:41:37.671586] INFO: derived_feature_extractor: /y_2014, 34249
[2022-03-15 20:41:38.019847] INFO: derived_feature_extractor: /y_2015, 73947
[2022-03-15 20:41:38.467408] INFO: derived_feature_extractor: /y_2016, 56903
[2022-03-15 20:41:38.694295] INFO: derived_feature_extractor: /y_2017, 43674
[2022-03-15 20:41:38.911998] INFO: derived_feature_extractor: /y_2018, 31233
[2022-03-15 20:41:39.060450] INFO: derived_feature_extractor: /y_2019, 32927
[2022-03-15 20:41:39.181835] INFO: moduleinvoker: derived_feature_extractor.v3 运行完成[3.563988s].
[2022-03-15 20:41:39.188280] INFO: moduleinvoker: standardlize.v8 开始运行..
[2022-03-15 20:42:05.597161] INFO: moduleinvoker: standardlize.v8 运行完成[26.408831s].
[2022-03-15 20:42:05.617415] INFO: moduleinvoker: fillnan.v1 开始运行..
[2022-03-15 20:42:06.067524] INFO: moduleinvoker: fillnan.v1 运行完成[0.450111s].
[2022-03-15 20:42:06.079074] INFO: moduleinvoker: join.v3 开始运行..
[2022-03-15 20:42:08.833582] INFO: join: /data, 行数=259403/261949, 耗时=1.244573s
[2022-03-15 20:42:08.881494] INFO: join: 最终行数: 259403
[2022-03-15 20:42:08.970240] INFO: moduleinvoker: join.v3 运行完成[2.891165s].
[2022-03-15 20:42:09.003634] INFO: moduleinvoker: dl_convert_to_bin.v2 开始运行..
[2022-03-15 20:42:31.927749] INFO: moduleinvoker: dl_convert_to_bin.v2 运行完成[22.924121s].
[2022-03-15 20:42:31.946919] INFO: moduleinvoker: cached.v3 开始运行..
[2022-03-15 20:42:32.319663] INFO: moduleinvoker: cached.v3 运行完成[0.372734s].
[2022-03-15 20:42:32.327189] INFO: moduleinvoker: instruments.v2 开始运行..
[2022-03-15 20:42:32.344438] INFO: moduleinvoker: 命中缓存
[2022-03-15 20:42:32.347497] INFO: moduleinvoker: instruments.v2 运行完成[0.020308s].
[2022-03-15 20:42:32.356095] INFO: moduleinvoker: use_datasource.v1 开始运行..
[2022-03-15 20:42:32.701621] INFO: moduleinvoker: use_datasource.v1 运行完成[0.345525s].
[2022-03-15 20:42:32.713983] INFO: moduleinvoker: filter.v3 开始运行..
[2022-03-15 20:42:32.736551] INFO: filter: 使用表达式 mf_net_amount_l>8000000 过滤
[2022-03-15 20:42:32.934015] INFO: filter: 过滤 /data, 17676/0/208035
[2022-03-15 20:42:32.972576] INFO: moduleinvoker: filter.v3 运行完成[0.258581s].
[2022-03-15 20:42:32.989029] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2022-03-15 20:42:36.391521] INFO: 基础特征抽取: 年份 2021, 特征行数=279539
[2022-03-15 20:42:37.306634] INFO: 基础特征抽取: 年份 2022, 特征行数=211689
[2022-03-15 20:42:37.375524] INFO: 基础特征抽取: 总行数: 491228
[2022-03-15 20:42:37.389886] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[4.400862s].
[2022-03-15 20:42:37.402697] INFO: moduleinvoker: join.v3 开始运行..
[2022-03-15 20:42:38.223307] INFO: join: /y_2021, 行数=0/279539, 耗时=0.660569s
[2022-03-15 20:42:38.846400] INFO: join: /y_2022, 行数=17676/211689, 耗时=0.619853s
[2022-03-15 20:42:38.902684] INFO: join: 最终行数: 17676
[2022-03-15 20:42:38.915786] INFO: moduleinvoker: join.v3 运行完成[1.513075s].
[2022-03-15 20:42:38.929067] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2022-03-15 20:42:39.109553] INFO: derived_feature_extractor: 提取完成 isXiaDie0=where((return_0<1),1,0), 0.001s
[2022-03-15 20:42:39.125990] INFO: derived_feature_extractor: 提取完成 xd_num=group_sum(date, isXiaDie0)#当天A股下跌数, 0.015s
[2022-03-15 20:42:39.128945] INFO: derived_feature_extractor: 提取完成 isZhangtToday=price_limit_status_0==3, 0.001s
[2022-03-15 20:42:39.132945] INFO: derived_feature_extractor: 提取完成 isDietToday=price_limit_status_0==1, 0.002s
[2022-03-15 20:42:39.136012] INFO: derived_feature_extractor: 提取完成 return0=return_0, 0.001s
[2022-03-15 20:42:39.478558] INFO: derived_feature_extractor: 提取完成 priceLowBl10=close_0/ts_min(close_0,10), 0.341s
[2022-03-15 20:42:39.547460] INFO: derived_feature_extractor: /y_2021, 0
[2022-03-15 20:42:39.684842] INFO: derived_feature_extractor: /y_2022, 17676
[2022-03-15 20:42:39.777111] INFO: moduleinvoker: derived_feature_extractor.v3 运行完成[0.848041s].
[2022-03-15 20:42:39.784465] INFO: moduleinvoker: standardlize.v8 开始运行..
[2022-03-15 20:42:40.223532] INFO: moduleinvoker: standardlize.v8 运行完成[0.439058s].
[2022-03-15 20:42:40.234737] INFO: moduleinvoker: fillnan.v1 开始运行..
[2022-03-15 20:42:40.401424] INFO: moduleinvoker: fillnan.v1 运行完成[0.166684s].
[2022-03-15 20:42:40.428011] INFO: moduleinvoker: dl_convert_to_bin.v2 开始运行..
[2022-03-15 20:42:40.645816] INFO: moduleinvoker: dl_convert_to_bin.v2 运行完成[0.217816s].
[2022-03-15 20:42:40.686964] INFO: moduleinvoker: dl_layer_input.v1 运行完成[0.023806s].
[2022-03-15 20:42:40.798177] INFO: moduleinvoker: dl_layer_batchnormalization.v1 运行完成[0.097765s].
[2022-03-15 20:42:40.835989] INFO: moduleinvoker: dl_layer_conv1d.v1 运行完成[0.023239s].
[2022-03-15 20:42:40.875922] INFO: moduleinvoker: dl_layer_conv1d.v1 运行完成[0.031844s].
[2022-03-15 20:42:40.904715] INFO: moduleinvoker: dl_layer_maxpooling1d.v1 运行完成[0.015346s].
[2022-03-15 20:42:40.969156] INFO: moduleinvoker: dl_layer_conv1d.v1 运行完成[0.054083s].
[2022-03-15 20:42:41.007359] INFO: moduleinvoker: dl_layer_conv1d.v1 运行完成[0.028875s].
[2022-03-15 20:42:41.036043] INFO: moduleinvoker: dl_layer_batchnormalization.v1 运行完成[0.018804s].
[2022-03-15 20:42:41.083483] INFO: moduleinvoker: dl_layer_conv1d.v1 运行完成[0.018211s].
[2022-03-15 20:42:41.108811] INFO: moduleinvoker: dl_layer_batchnormalization.v1 运行完成[0.01879s].
[2022-03-15 20:42:41.132706] INFO: moduleinvoker: dl_layer_conv1d.v1 运行完成[0.017121s].
[2022-03-15 20:42:41.175475] INFO: moduleinvoker: dl_layer_batchnormalization.v1 运行完成[0.033064s].
[2022-03-15 20:42:41.215946] INFO: moduleinvoker: dl_layer_conv1d.v1 运行完成[0.031558s].
[2022-03-15 20:42:41.240365] INFO: moduleinvoker: dl_layer_add.v1 运行完成[0.009984s].
[2022-03-15 20:42:41.285037] INFO: moduleinvoker: dl_layer_globalmaxpooling1d.v1 运行完成[0.009194s].
[2022-03-15 20:42:41.307780] INFO: moduleinvoker: dl_layer_dropout.v1 运行完成[0.008177s].
[2022-03-15 20:42:41.336843] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.015113s].
[2022-03-15 20:42:41.418444] INFO: moduleinvoker: cached.v3 开始运行..
[2022-03-15 20:42:41.464773] INFO: moduleinvoker: cached.v3 运行完成[0.046337s].
[2022-03-15 20:42:41.469093] INFO: moduleinvoker: dl_model_init.v1 运行完成[0.120615s].
[2022-03-15 20:42:41.494567] INFO: moduleinvoker: dl_model_train.v1 开始运行..
[2022-03-15 20:42:42.087879] INFO: dl_model_train: 准备训练,训练样本个数:207522,迭代次数:2
[2022-03-15 20:50:19.971607] INFO: dl_model_train: 训练结束,耗时:457.88s
[2022-03-15 20:50:20.043235] INFO: moduleinvoker: dl_model_train.v1 运行完成[458.548627s].
[2022-03-15 20:50:20.083794] INFO: moduleinvoker: dl_model_predict.v1 开始运行..
[2022-03-15 20:50:21.475224] INFO: moduleinvoker: dl_model_predict.v1 运行完成[1.39144s].
[2022-03-15 20:50:21.498641] INFO: moduleinvoker: cached.v3 开始运行..
[2022-03-15 20:50:21.680974] INFO: moduleinvoker: cached.v3 运行完成[0.182347s].
[2022-03-15 20:50:23.984881] INFO: moduleinvoker: backtest.v8 开始运行..
[2022-03-15 20:50:23.995218] INFO: backtest: biglearning backtest:V8.6.2
[2022-03-15 20:50:23.997127] INFO: backtest: product_type:stock by specified
[2022-03-15 20:50:24.088493] INFO: moduleinvoker: cached.v2 开始运行..
[2022-03-15 20:50:28.464949] INFO: backtest: 读取股票行情完成:1363246
[2022-03-15 20:50:29.890360] INFO: moduleinvoker: cached.v2 运行完成[5.801913s].
[2022-03-15 20:50:31.369695] INFO: algo: TradingAlgorithm V1.8.7
[2022-03-15 20:50:31.815974] INFO: algo: trading transform...
[2022-03-15 20:50:32.411487] INFO: Performance: Simulated 45 trading days out of 45.
[2022-03-15 20:50:32.413138] INFO: Performance: first open: 2022-01-04 09:30:00+00:00
[2022-03-15 20:50:32.414363] INFO: Performance: last close: 2022-03-14 15:00:00+00:00
[2022-03-15 20:50:36.372572] INFO: moduleinvoker: backtest.v8 运行完成[12.387672s].
[2022-03-15 20:50:36.375657] INFO: moduleinvoker: trade.v4 运行完成[14.668056s].