\
因子:样例因子(18个)
因子是否标准化:是
标注:未来5日收益(不做离散化)
算法:LSTM
类型:回归问题
训练集:10-16年
测试集:16-19年
选股依据:根据预测值降序排序买入
持股数:30
持仓天数:5
# 本代码由可视化策略环境自动生成 2023年5月30日 10:03
# 本代码单元只能在可视化模式下编辑。您也可以拷贝代码,粘贴到新建的代码单元或者策略,然后修改。
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m8_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。在这里编写您的代码
df = input_1.read_pickle()
feature_len = len(input_2.read_pickle())
df['x'] = df['x'].reshape(df['x'].shape[0], int(feature_len), int(df['x'].shape[1]/feature_len))
data_1 = DataSource.write_pickle(df)
return Outputs(data_1=data_1)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m8_post_run_bigquant_run(outputs):
return outputs
def m20_user_activity_regularizer_bigquant_run(weight_matrix):
from tensorflow.keras.constraints import maxnorm
return 0.01 * K.sum(K.abs(weight_matrix))
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m4_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。在这里编写您的代码
df = input_1.read_pickle()
feature_len = len(input_2.read_pickle())
df['x'] = df['x'].reshape(df['x'].shape[0], int(feature_len), int(df['x'].shape[1]/feature_len))
data_1 = DataSource.write_pickle(df)
return Outputs(data_1=data_1)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m4_post_run_bigquant_run(outputs):
return outputs
from tensorflow.keras import optimizers
m5_user_optimizer_bigquant_run=optimizers.Adam(lr=0.00085)
# 用户的自定义层需要写到字典中,比如
# {
# "MyLayer": MyLayer
# }
m5_custom_objects_bigquant_run = {
}
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m24_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。在这里编写您的代码
pred_label = input_1.read_pickle()
df = input_2.read_df()
df = pd.DataFrame({'pred_label':pred_label[:,0], 'instrument':df.instrument, 'date':df.date})
df.sort_values(['date','pred_label'],inplace=True, ascending=[True,False])
return Outputs(data_1=DataSource.write_df(df), data_2=None, data_3=None)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m24_post_run_bigquant_run(outputs):
return outputs
# 回测引擎:初始化函数,只执行一次
def m40_initialize_bigquant_run(context):
# 加载预测数据
context.ranker_prediction = context.options['data'].read_df()
# 系统已经设置了默认的交易手续费和滑点,要修改手续费可使用如下函数
context.set_commission(PerOrder(buy_cost=0.0003, sell_cost=0.0013, min_cost=5))
from zipline.finance.slippage import SlippageModel
class FixedPriceSlippage(SlippageModel):
# 指定初始化函数
def __init__(self, spreads, price_field_buy, price_field_sell):
self.spreads = spreads
self._price_field_buy = price_field_buy
self._price_field_sell = price_field_sell
def process_order(self, data, order, bar_volume=0, trigger_check_price=0):
if order.limit is None:
price_field = self._price_field_buy if order.amount > 0 else self._price_field_sell
price_base = data.current(order.asset, price_field)
# 买单的下单价格向上偏移 spreads百分比 , 卖单的下单价格向下偏移 spreads百分比
price = price_base * (1.0 + self.spreads) if order.amount > 0 else price_base * (1.0 - self.spreads)
else:
price = order.limit
# 返回希望成交的价格和数量
return (price, order.amount)
# 设置滑点模型
fix_slippage = FixedPriceSlippage(price_field_buy='open', price_field_sell='close',spreads=0)
context.set_slippage(us_equities=fix_slippage)
# 预测数据,通过options传入进来,使用 read_df 函数,加载到内存 (DataFrame)
# 设置买入的股票数量,这里买入预测股票列表排名靠前的5只
stock_count = 10
# 每只的股票的权重,如下的权重分配会使得靠前的股票分配多一点的资金,[0.339160, 0.213986, 0.169580, ..]
context.stock_weights = T.norm([1 / math.log(i + 2) for i in range(0, stock_count)])
#改为等权重配置
#context.stock_weights = [1 / stock_count for i in range(0, stock_count)]
# 设置每只股票占用的最大资金比例
context.max_cash_per_instrument = 0.1
context.options['hold_days'] = 4
# 回测引擎:每日数据处理函数,每天执行一次
def m40_handle_data_bigquant_run(context, data):
#-------------------大盘风控代码---------------------------#
#获取当日日期
today = data.current_dt.strftime('%Y-%m-%d')
stock_hold_now = [equity.symbol for equity in context.portfolio.positions ]
#大盘风控模块,读取风控数据
#benckmark_risk=context.benckmark_risk.ix[today]
#当risk为1时,市场有风险,全部平仓,不再执行其它操作
#if benckmark_risk > 0:
#for instrument in stock_hold_now:
#context.order_target(symbol(instrument), 0)
#print(today,'大盘风控止损触发,全仓卖出')
#return
#-------------------大盘风控代码---------------------------#
# 按日期过滤得到今日的预测数据
ranker_prediction = context.ranker_prediction[
context.ranker_prediction.date == data.current_dt.strftime('%Y-%m-%d')]
# 1. 资金分配
# 平均持仓时间是hold_days,每日都将买入股票,每日预期使用 1/hold_days 的资金
# 实际操作中,会存在一定的买入误差,所以在前hold_days天,等量使用资金;之后,尽量使用剩余资金(这里设置最多用等量的1.5倍)
is_staging = context.trading_day_index < context.options['hold_days'] # 是否在建仓期间(前 hold_days 天)
cash_avg = context.portfolio.portfolio_value / context.options['hold_days']
cash_for_buy = min(context.portfolio.cash, (1 if is_staging else 1.5) * cash_avg)
cash_for_sell = cash_avg - (context.portfolio.cash - cash_for_buy)
positions = {e.symbol: p.amount * p.last_sale_price
for e, p in context.perf_tracker.position_tracker.positions.items()}
#---------------------------START:止赢止损模块(含建仓期)--------------------
# 新建当日止赢止损股票列表是为了handle_data 策略逻辑部分不再对该股票进行判断
current_stopwin_stock=[]
current_stoploss_stock = []
today_date = data.current_dt.strftime('%Y-%m-%d')
positions_stop={e.symbol:p.cost_basis
for e,p in context.portfolio.positions.items()}
if len(positions_stop)>0:
for i in positions_stop.keys():
stock_cost=positions_stop[i]
stock_market_price=data.current(context.symbol(i),'price')
# 赚20%且为可交易状态就止盈
if stock_market_price/stock_cost-1>0.25 and data.can_trade(context.symbol(i)) and not context.has_unfinished_sell_order(i):
context.order_target_percent(context.symbol(i),0)
current_stopwin_stock.append(i)
# 亏10%并且为可交易状态就止损
if stock_market_price/stock_cost-1 <= -0.1 and data.can_trade(context.symbol(i)) and not context.has_unfinished_sell_order(i):
context.order_target_percent(context.symbol(i),0)
current_stoploss_stock.append(i)
if len(current_stopwin_stock)>0:
print(today_date,'止盈股票列表',current_stopwin_stock)
if len(current_stoploss_stock)>0:
print(today_date,'止损股票列表',current_stoploss_stock)
#--------------------------END: 止赢止损模块-----------------------------
# 2. 生成卖出订单:hold_days天之后才开始卖出;对持仓的股票,按机器学习算法预测的排序末位淘汰
if not is_staging and cash_for_sell > 0:
equities = {e.symbol: e for e, p in context.perf_tracker.position_tracker.positions.items()}
instruments = list(reversed(list(ranker_prediction.instrument[ranker_prediction.instrument.apply(
lambda x: x in equities and not context.has_unfinished_sell_order(equities[x]))])))
# print('rank order for sell %s' % instruments)
for instrument in instruments:
context.order_target(context.symbol(instrument), 0)
cash_for_sell -= positions[instrument]
if cash_for_sell <= 0:
break
# 3. 生成买入订单:按机器学习算法预测的排序,买入前面的stock_count只股票
buy_cash_weights = context.stock_weights
buy_instruments = list(ranker_prediction.instrument[:len(buy_cash_weights)])
max_cash_per_instrument = context.portfolio.portfolio_value * context.max_cash_per_instrument
for i, instrument in enumerate(buy_instruments):
cash = cash_for_buy * buy_cash_weights[i]
if cash > max_cash_per_instrument - positions.get(instrument, 0):
# 确保股票持仓量不会超过每次股票最大的占用资金量
cash = max_cash_per_instrument - positions.get(instrument, 0)
if cash > 0:
context.order_value(context.symbol(instrument), cash)
# 回测引擎:准备数据,只执行一次
#def m40_prepare_bigquant_run(context):
# pass
# 回测引擎:准备数据,只执行一次
def m40_prepare_bigquant_run(context):
#在数据准备函数中一次性计算每日的大盘风控条件相比于在handle中每日计算风控条件可以提高回测速度
# 多取50天的数据便于计算均值(保证回测的第一天均值不为Nan值),
# 其中context.start_date和context.end_date是回测指定的起始时间和终止时间
start_date= (pd.to_datetime(context.start_date) - datetime.timedelta(days=50)).strftime('%Y-%m-%d')
df=DataSource('bar1d_index_CN_STOCK_A').read(start_date=start_date,end_date=context.end_date,fields=['close'])
#这里以上证指数000001.HIX为例
benckmark_data=df[df.instrument=='000001.HIX']
#计算上证指数5日涨幅
benckmark_data['ret5']=4*benckmark_data['close']/(benckmark_data['close'].shift(4)+benckmark_data['close'].shift(3)+benckmark_data['close'].shift(2)+benckmark_data['close'].shift(1))-1
#计算大盘风控条件,如果5日涨幅小于-10%则设置风险状态risk为1,否则为0
benckmark_data['risk'] = np.where(benckmark_data['ret5']<-0.02,1,0)
#设置日期为索引
benckmark_data.set_index('date',inplace=True)
#把风控序列输出给全局变量context.benckmark_risk
context.benckmark_risk=benckmark_data['risk']
g = T.Graph({
'm3': 'M.input_features.v1',
'm3.features': """mf_net_pct_s_0
mean(where(close_0>open_0, ((close_0-low_0)/(high_0-low_0))*(turn_0/avg_turn_5), -1*((high_0-close_0)/(high_0-low_0))*(turn_0/avg_turn_5)),5)
(high_0-close_0)/(high_0-low_0)
sum(where(close_0>open_0, ((close_0-low_0)/(high_0-low_0))*(turn_0/avg_turn_5), -1*((high_0-close_0)/(high_0-low_0))*(turn_0/avg_turn_5)),5)
ta_bbands_u(close_0, 5)
(high_0-open_0)/close_0
(close_0-ts_min(close_0, 5))/close_0
(close_0-open_4)/close_0
(ts_max(high_0, 5)-close_0)/close_0
(ts_max(high_0, 5)-open_4)/close_0
(open_4-ts_min(low_0, 5))/close_0
""",
'm43': 'M.input_features.v1',
'm43.features_ds': T.Graph.OutputPort('m3.data'),
'm43.features': 'market_cap_0',
'm9': 'M.instruments.v2',
'm9.start_date': '2017-01-01',
'm9.end_date': T.live_run_param('trading_date', '2019-12-31'),
'm9.market': 'CN_STOCK_A',
'm9.instrument_list': '',
'm9.max_count': 0,
'm17': 'M.general_feature_extractor.v7',
'm17.instruments': T.Graph.OutputPort('m9.data'),
'm17.features': T.Graph.OutputPort('m43.data'),
'm17.start_date': '',
'm17.end_date': '',
'm17.before_start_days': 90,
'm18': 'M.derived_feature_extractor.v3',
'm18.input_data': T.Graph.OutputPort('m17.data'),
'm18.features': T.Graph.OutputPort('m43.data'),
'm18.date_col': 'date',
'm18.instrument_col': 'instrument',
'm18.drop_na': True,
'm18.remove_extra_columns': False,
'm45': 'M.filter.v3',
'm45.input_data': T.Graph.OutputPort('m18.data'),
'm45.expr': 'market_cap_0>2000000000',
'm45.output_left_data': False,
'm25': 'M.standardlize.v8',
'm25.input_1': T.Graph.OutputPort('m45.data'),
'm25.input_2': T.Graph.OutputPort('m3.data'),
'm25.columns_input': '[]',
'm37': 'M.aa.v5',
'm37.input_data': T.Graph.OutputPort('m25.data'),
'm37.day_number': 150,
'm35': 'M.chinaa_stock_filter.v1',
'm35.input_data': T.Graph.OutputPort('m37.data'),
'm35.index_constituent_cond': ['中证500'],
'm35.board_cond': ['全部'],
'm35.industry_cond': ['全部'],
'm35.st_cond': ['正常'],
'm35.delist_cond': ['全部'],
'm35.output_left_data': False,
'm55': 'M.dropnan.v2',
'm55.input_data': T.Graph.OutputPort('m35.data'),
'm27': 'M.dl_convert_to_bin.v2',
'm27.input_data': T.Graph.OutputPort('m55.data'),
'm27.features': T.Graph.OutputPort('m3.data'),
'm27.window_size': 5,
'm27.feature_clip': 3,
'm27.flatten': True,
'm27.window_along_col': 'instrument',
'm8': 'M.cached.v3',
'm8.input_1': T.Graph.OutputPort('m27.data'),
'm8.input_2': T.Graph.OutputPort('m3.data'),
'm8.run': m8_run_bigquant_run,
'm8.post_run': m8_post_run_bigquant_run,
'm8.input_ports': '',
'm8.params': '{}',
'm8.output_ports': '',
'm6': 'M.dl_layer_input.v1',
'm6.shape': '11,5',
'm6.batch_shape': '',
'm6.dtype': 'float32',
'm6.sparse': False,
'm6.name': '',
'm23': 'M.dl_layer_reshape.v1',
'm23.inputs': T.Graph.OutputPort('m6.data'),
'm23.target_shape': '11,5,1',
'm23.name': '',
'm28': 'M.dl_layer_conv2d.v1',
'm28.inputs': T.Graph.OutputPort('m23.data'),
'm28.filters': 40,
'm28.kernel_size': '1,5',
'm28.strides': '1,1',
'm28.padding': 'valid',
'm28.data_format': 'channels_last',
'm28.dilation_rate': '1,1',
'm28.activation': 'relu',
'm28.use_bias': True,
'm28.kernel_initializer': 'glorot_uniform',
'm28.bias_initializer': 'Zeros',
'm28.kernel_regularizer': 'None',
'm28.kernel_regularizer_l1': 0,
'm28.kernel_regularizer_l2': 0,
'm28.bias_regularizer': 'None',
'm28.bias_regularizer_l1': 0,
'm28.bias_regularizer_l2': 0,
'm28.activity_regularizer': 'None',
'm28.activity_regularizer_l1': 0,
'm28.activity_regularizer_l2': 0,
'm28.kernel_constraint': 'None',
'm28.bias_constraint': 'None',
'm28.name': '',
'm29': 'M.dl_layer_reshape.v1',
'm29.inputs': T.Graph.OutputPort('m28.data'),
'm29.target_shape': '1,440',
'm29.name': '',
'm10': 'M.dl_layer_lstm.v1',
'm10.inputs': T.Graph.OutputPort('m29.data'),
'm10.units': 60,
'm10.activation': 'tanh',
'm10.recurrent_activation': 'hard_sigmoid',
'm10.use_bias': True,
'm10.kernel_initializer': 'glorot_uniform',
'm10.recurrent_initializer': 'Orthogonal',
'm10.bias_initializer': 'Zeros',
'm10.unit_forget_bias': True,
'm10.kernel_regularizer': 'None',
'm10.kernel_regularizer_l1': 0,
'm10.kernel_regularizer_l2': 0,
'm10.recurrent_regularizer': 'None',
'm10.recurrent_regularizer_l1': 0,
'm10.recurrent_regularizer_l2': 0.01,
'm10.bias_regularizer': 'None',
'm10.bias_regularizer_l1': 0,
'm10.bias_regularizer_l2': 0,
'm10.activity_regularizer': 'None',
'm10.activity_regularizer_l2': 0.01,
'm10.kernel_constraint': 'None',
'm10.recurrent_constraint': 'None',
'm10.bias_constraint': 'None',
'm10.dropout': 0,
'm10.recurrent_dropout': 0.5,
'm10.return_sequences': False,
'm10.implementation': '2',
'm10.name': '',
'm41': 'M.dl_layer_reshape.v1',
'm41.inputs': T.Graph.OutputPort('m10.data'),
'm41.target_shape': '1,60',
'm41.name': '',
'm39': 'M.dl_layer_lstm.v1',
'm39.inputs': T.Graph.OutputPort('m41.data'),
'm39.units': 60,
'm39.activation': 'tanh',
'm39.recurrent_activation': 'hard_sigmoid',
'm39.use_bias': True,
'm39.kernel_initializer': 'glorot_uniform',
'm39.recurrent_initializer': 'Orthogonal',
'm39.bias_initializer': 'Zeros',
'm39.unit_forget_bias': True,
'm39.kernel_regularizer': 'None',
'm39.kernel_regularizer_l1': 0.005,
'm39.kernel_regularizer_l2': 0.005,
'm39.recurrent_regularizer': 'None',
'm39.recurrent_regularizer_l1': 0,
'm39.recurrent_regularizer_l2': 0.01,
'm39.bias_regularizer': 'None',
'm39.bias_regularizer_l1': 0,
'm39.bias_regularizer_l2': 0,
'm39.activity_regularizer': 'None',
'm39.activity_regularizer_l2': 0.01,
'm39.kernel_constraint': 'None',
'm39.recurrent_constraint': 'None',
'm39.bias_constraint': 'None',
'm39.dropout': 0,
'm39.recurrent_dropout': 0.5,
'm39.return_sequences': False,
'm39.implementation': '2',
'm39.name': '',
'm38': 'M.dl_layer_reshape.v1',
'm38.inputs': T.Graph.OutputPort('m39.data'),
'm38.target_shape': '1,60',
'm38.name': '',
'm57': 'M.dl_layer_lstm.v1',
'm57.inputs': T.Graph.OutputPort('m38.data'),
'm57.units': 60,
'm57.activation': 'tanh',
'm57.recurrent_activation': 'hard_sigmoid',
'm57.use_bias': True,
'm57.kernel_initializer': 'glorot_uniform',
'm57.recurrent_initializer': 'Orthogonal',
'm57.bias_initializer': 'Zeros',
'm57.unit_forget_bias': True,
'm57.kernel_regularizer': 'None',
'm57.kernel_regularizer_l1': 0.005,
'm57.kernel_regularizer_l2': 0.005,
'm57.recurrent_regularizer': 'None',
'm57.recurrent_regularizer_l1': 0,
'm57.recurrent_regularizer_l2': 0.01,
'm57.bias_regularizer': 'None',
'm57.bias_regularizer_l1': 0,
'm57.bias_regularizer_l2': 0,
'm57.activity_regularizer': 'None',
'm57.activity_regularizer_l2': 0.01,
'm57.kernel_constraint': 'None',
'm57.recurrent_constraint': 'None',
'm57.bias_constraint': 'None',
'm57.dropout': 0,
'm57.recurrent_dropout': 0.5,
'm57.return_sequences': False,
'm57.implementation': '2',
'm57.name': '',
'm12': 'M.dl_layer_dropout.v1',
'm12.inputs': T.Graph.OutputPort('m57.data'),
'm12.rate': 0.9,
'm12.noise_shape': '',
'm12.name': 'dropout1',
'm20': 'M.dl_layer_dense.v1',
'm20.inputs': T.Graph.OutputPort('m12.data'),
'm20.units': 80,
'm20.activation': 'tanh',
'm20.use_bias': True,
'm20.kernel_initializer': 'glorot_uniform',
'm20.bias_initializer': 'Zeros',
'm20.kernel_regularizer': 'None',
'm20.kernel_regularizer_l1': 0,
'm20.kernel_regularizer_l2': 0,
'm20.bias_regularizer': 'None',
'm20.bias_regularizer_l1': 0,
'm20.bias_regularizer_l2': 0,
'm20.activity_regularizer': 'L1L2',
'm20.activity_regularizer_l1': 0.005,
'm20.activity_regularizer_l2': 0.005,
'm20.user_activity_regularizer': m20_user_activity_regularizer_bigquant_run,
'm20.kernel_constraint': 'None',
'm20.bias_constraint': 'None',
'm20.name': '',
'm21': 'M.dl_layer_dropout.v1',
'm21.inputs': T.Graph.OutputPort('m20.data'),
'm21.rate': 0.9,
'm21.noise_shape': '',
'm21.name': 'dropout2',
'm30': 'M.dl_layer_dense.v1',
'm30.inputs': T.Graph.OutputPort('m21.data'),
'm30.units': 60,
'm30.activation': 'tanh',
'm30.use_bias': True,
'm30.kernel_initializer': 'glorot_uniform',
'm30.bias_initializer': 'Zeros',
'm30.kernel_regularizer': 'None',
'm30.kernel_regularizer_l1': 0.001,
'm30.kernel_regularizer_l2': 0.001,
'm30.bias_regularizer': 'None',
'm30.bias_regularizer_l1': 0,
'm30.bias_regularizer_l2': 0,
'm30.activity_regularizer': 'L1L2',
'm30.activity_regularizer_l1': 0.005,
'm30.activity_regularizer_l2': 0.005,
'm30.kernel_constraint': 'None',
'm30.bias_constraint': 'None',
'm30.name': '',
'm56': 'M.dl_layer_dropout.v1',
'm56.inputs': T.Graph.OutputPort('m30.data'),
'm56.rate': 0.8,
'm56.noise_shape': '',
'm56.name': 'dropout2',
'm59': 'M.dl_layer_dense.v1',
'm59.inputs': T.Graph.OutputPort('m56.data'),
'm59.units': 40,
'm59.activation': 'tanh',
'm59.use_bias': True,
'm59.kernel_initializer': 'glorot_uniform',
'm59.bias_initializer': 'Zeros',
'm59.kernel_regularizer': 'None',
'm59.kernel_regularizer_l1': 0.001,
'm59.kernel_regularizer_l2': 0.001,
'm59.bias_regularizer': 'None',
'm59.bias_regularizer_l1': 0,
'm59.bias_regularizer_l2': 0,
'm59.activity_regularizer': 'L1L2',
'm59.activity_regularizer_l1': 0.005,
'm59.activity_regularizer_l2': 0.005,
'm59.kernel_constraint': 'None',
'm59.bias_constraint': 'None',
'm59.name': '',
'm22': 'M.dl_layer_dense.v1',
'm22.inputs': T.Graph.OutputPort('m59.data'),
'm22.units': 1,
'm22.activation': 'tanh',
'm22.use_bias': True,
'm22.kernel_initializer': 'glorot_uniform',
'm22.bias_initializer': 'Zeros',
'm22.kernel_regularizer': 'None',
'm22.kernel_regularizer_l1': 0.003,
'm22.kernel_regularizer_l2': 0.003,
'm22.bias_regularizer': 'None',
'm22.bias_regularizer_l1': 0,
'm22.bias_regularizer_l2': 0,
'm22.activity_regularizer': 'L1L2',
'm22.activity_regularizer_l1': 0.005,
'm22.activity_regularizer_l2': 0.005,
'm22.kernel_constraint': 'None',
'm22.bias_constraint': 'None',
'm22.name': '',
'm34': 'M.dl_model_init.v1',
'm34.inputs': T.Graph.OutputPort('m6.data'),
'm34.outputs': T.Graph.OutputPort('m22.data'),
'm31': 'M.instruments.v2',
'm31.start_date': '2014-01-01',
'm31.end_date': '2016-12-31',
'm31.market': 'CN_STOCK_A',
'm31.instrument_list': '',
'm31.max_count': 0,
'm2': 'M.advanced_auto_labeler.v2',
'm2.instruments': T.Graph.OutputPort('m31.data'),
'm2.label_expr': """# #号开始的表示注释
# 0. 每行一个,顺序执行,从第二个开始,可以使用label字段
# 1. 可用数据字段见 https://bigquant.com/docs/data_history_data.html
# 添加benchmark_前缀,可使用对应的benchmark数据
# 2. 可用操作符和函数见 `表达式引擎 <https://bigquant.com/docs/big_expr.html>`_
# 计算收益:5日收盘价(作为卖出价格)除以明日开盘价(作为买入价格)
(shift(close, -5) / shift(open, -1) - 1)*10
# 极值处理:用1%和99%分位的值做clip
clip(label, all_quantile(label, 0.01), all_quantile(label, 0.99))
# 过滤掉一字涨停的情况 (设置label为NaN,在后续处理和训练中会忽略NaN的label)
where(shift(high, -1) == shift(low, -1), NaN, label)
""",
'm2.start_date': '',
'm2.end_date': '',
'm2.benchmark': '000300.SHA',
'm2.drop_na_label': True,
'm2.cast_label_int': False,
'm13': 'M.standardlize.v8',
'm13.input_1': T.Graph.OutputPort('m2.data'),
'm13.columns_input': 'label',
'm15': 'M.general_feature_extractor.v7',
'm15.instruments': T.Graph.OutputPort('m31.data'),
'm15.features': T.Graph.OutputPort('m43.data'),
'm15.start_date': '',
'm15.end_date': '',
'm15.before_start_days': 90,
'm16': 'M.derived_feature_extractor.v3',
'm16.input_data': T.Graph.OutputPort('m15.data'),
'm16.features': T.Graph.OutputPort('m43.data'),
'm16.date_col': 'date',
'm16.instrument_col': 'instrument',
'm16.drop_na': True,
'm16.remove_extra_columns': False,
'm44': 'M.filter.v3',
'm44.input_data': T.Graph.OutputPort('m16.data'),
'm44.expr': 'market_cap_0>2000000000',
'm44.output_left_data': False,
'm14': 'M.standardlize.v8',
'm14.input_1': T.Graph.OutputPort('m44.data'),
'm14.input_2': T.Graph.OutputPort('m3.data'),
'm14.columns_input': '[]',
'm7': 'M.join.v3',
'm7.data1': T.Graph.OutputPort('m13.data'),
'm7.data2': T.Graph.OutputPort('m14.data'),
'm7.on': 'date,instrument',
'm7.how': 'inner',
'm7.sort': False,
'm36': 'M.aa.v5',
'm36.input_data': T.Graph.OutputPort('m7.data'),
'm36.day_number': 150,
'm33': 'M.chinaa_stock_filter.v1',
'm33.input_data': T.Graph.OutputPort('m36.data'),
'm33.index_constituent_cond': ['中证500'],
'm33.board_cond': ['全部'],
'm33.industry_cond': ['全部'],
'm33.st_cond': ['正常'],
'm33.delist_cond': ['全部'],
'm33.output_left_data': False,
'm54': 'M.dropnan.v2',
'm54.input_data': T.Graph.OutputPort('m33.data'),
'm26': 'M.dl_convert_to_bin.v2',
'm26.input_data': T.Graph.OutputPort('m54.data'),
'm26.features': T.Graph.OutputPort('m3.data'),
'm26.window_size': 5,
'm26.feature_clip': 3,
'm26.flatten': True,
'm26.window_along_col': 'instrument',
'm4': 'M.cached.v3',
'm4.input_1': T.Graph.OutputPort('m26.data'),
'm4.input_2': T.Graph.OutputPort('m3.data'),
'm4.run': m4_run_bigquant_run,
'm4.post_run': m4_post_run_bigquant_run,
'm4.input_ports': '',
'm4.params': '{}',
'm4.output_ports': '',
'm5': 'M.dl_model_train.v1',
'm5.input_model': T.Graph.OutputPort('m34.data'),
'm5.training_data': T.Graph.OutputPort('m4.data_1'),
'm5.optimizer': '自定义',
'm5.user_optimizer': m5_user_optimizer_bigquant_run,
'm5.loss': 'mean_squared_error',
'm5.metrics': 'mae',
'm5.batch_size': 2048,
'm5.epochs': 1,
'm5.custom_objects': m5_custom_objects_bigquant_run,
'm5.n_gpus': 0,
'm5.verbose': '2:每个epoch输出一行记录',
'm11': 'M.dl_model_predict.v1',
'm11.trained_model': T.Graph.OutputPort('m5.data'),
'm11.input_data': T.Graph.OutputPort('m8.data_1'),
'm11.batch_size': 1024,
'm11.n_gpus': 0,
'm11.verbose': '2:每个epoch输出一行记录',
'm24': 'M.cached.v3',
'm24.input_1': T.Graph.OutputPort('m11.data'),
'm24.input_2': T.Graph.OutputPort('m55.data'),
'm24.run': m24_run_bigquant_run,
'm24.post_run': m24_post_run_bigquant_run,
'm24.input_ports': '',
'm24.params': '{}',
'm24.output_ports': '',
'm40': 'M.trade.v4',
'm40.instruments': T.Graph.OutputPort('m9.data'),
'm40.options_data': T.Graph.OutputPort('m24.data_1'),
'm40.start_date': '',
'm40.end_date': '',
'm40.initialize': m40_initialize_bigquant_run,
'm40.handle_data': m40_handle_data_bigquant_run,
'm40.prepare': m40_prepare_bigquant_run,
'm40.volume_limit': 0.025,
'm40.order_price_field_buy': 'open',
'm40.order_price_field_sell': 'close',
'm40.capital_base': 5000000,
'm40.auto_cancel_non_tradable_orders': True,
'm40.data_frequency': 'daily',
'm40.price_type': '真实价格',
'm40.product_type': '股票',
'm40.plot_charts': True,
'm40.backtest_only': False,
'm40.benchmark': '000905.SHA',
})
# g.run({})
def m42_run_bigquant_run(
bq_graph,
inputs,
trading_days_market='CN', # 使用那个市场的交易日历
train_instruments_mid='m31', # 训练数据 证券代码列表 模块id
test_instruments_mid='m9', # 测试数据 证券代码列表 模块id
predict_mid='m24', # 预测 模块id
trade_mid='m40', # 回测 模块id
start_date='2022-07-30', # 数据开始日期
end_date=T.live_run_param('trading_date', '2023-05-29'), # 数据结束日期
train_update_days=30, # 更新周期,按交易日计算,每多少天更新一次
train_update_days_for_live=30, #模拟实盘模式下的更新周期,按交易日计算,每多少天更新一次。如果需要在模拟实盘阶段使用不同的模型更新周期,可以设置这个参数
train_data_min_days=125, # 最小数据天数,按交易日计算,所以第一个滚动的结束日期是 从开始日期到开始日期+最小数据天数
train_data_max_days=125, # 最大数据天数,按交易日计算,0,表示没有限制,否则每一个滚动的开始日期=max(此滚动的结束日期-最大数据天数, 开始日期
rolling_count_for_live=1, #实盘模式下滚动次数,模拟实盘模式下,取最后多少次滚动。一般在模拟实盘模式下,只用到最后一次滚动训练的模型,这里可以设置为1;如果你的滚动训练数据时间段很短,以至于期间可能没有训练数据,这里可以设置大一点。0表示没有限制
):
def merge_datasources(input_1):
df_list = [ds.read_df() for ds in input_1]
df = pd.concat(df_list)
instrument_data = {
'start_date': df['date'].min().strftime('%Y-%m-%d'),
'end_date': df['date'].max().strftime('%Y-%m-%d'),
'instruments': list(set(df['instrument'])),
}
return Outputs(data=DataSource.write_df(df), instrument_data=DataSource.write_pickle(instrument_data))
def gen_rolling_dates(trading_days_market, start_date, end_date, train_update_days, train_update_days_for_live, train_data_min_days, train_data_max_days, rolling_count_for_live):
# 是否实盘模式
tdays = list(D.trading_days(market=trading_days_market, start_date=start_date, end_date=end_date)['date'])
is_live_run = T.live_run_param('trading_date', None) is not None
if is_live_run and train_update_days_for_live:
train_update_days = train_update_days_for_live
rollings = []
train_end_date = train_data_min_days
while train_end_date < len(tdays):
if train_data_max_days is not None:
train_start_date = max(train_end_date - train_data_max_days, 0)
else:
train_start_date = start_date
rollings.append({
'train_start_date': tdays[train_start_date].strftime('%Y-%m-%d'),
'train_end_date': tdays[train_end_date - 1].strftime('%Y-%m-%d'),
'test_start_date': tdays[train_end_date].strftime('%Y-%m-%d'),
'test_end_date': tdays[min(train_end_date + train_update_days, len(tdays)) - 1].strftime('%Y-%m-%d'),
})
train_end_date += train_update_days
if not rollings:
raise Exception('没有滚动需要执行,请检查配置')
if is_live_run and rolling_count_for_live:
rollings = rollings[-rolling_count_for_live:]
return rollings
g = bq_graph
rolling_dates = gen_rolling_dates(
trading_days_market, start_date, end_date, train_update_days, train_update_days_for_live, train_data_min_days, train_data_max_days, rolling_count_for_live)
# 训练和预测
results = []
for rolling in rolling_dates:
parameters = {}
# 先禁用回测
parameters[trade_mid + '.__enabled__'] = False
parameters[train_instruments_mid + '.start_date'] = rolling['train_start_date']
parameters[train_instruments_mid + '.end_date'] = rolling['train_end_date']
parameters[test_instruments_mid + '.start_date'] = rolling['test_start_date']
parameters[test_instruments_mid + '.end_date'] = rolling['test_end_date']
# print('------ rolling_train:', parameters)
results.append(g.run(parameters))
# 合并预测结果并回测
mx = M.cached.v3(run=merge_datasources, input_1=[result[predict_mid].data_1 for result in results])
parameters = {}
parameters['*.__enabled__'] = False
parameters[trade_mid + '.__enabled__'] = True
parameters[trade_mid + '.instruments'] = mx.instrument_data
parameters[trade_mid + '.options_data'] = mx.data
trade = g.run(parameters)
return {'rollings': results, 'trade': trade}
m42 = M.hyper_rolling_train.v1(
run=m42_run_bigquant_run,
run_now=True,
bq_graph=g
)
[2023-05-30 10:02:26.415814] INFO: moduleinvoker: input_features.v1 开始运行..
[2023-05-30 10:02:26.423876] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.426268] INFO: moduleinvoker: input_features.v1 运行完成[0.010483s].
[2023-05-30 10:02:26.433222] INFO: moduleinvoker: instruments.v2 开始运行..
[2023-05-30 10:02:26.440889] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.442704] INFO: moduleinvoker: instruments.v2 运行完成[0.009495s].
[2023-05-30 10:02:26.451798] INFO: moduleinvoker: dl_layer_input.v1 运行完成[0.001555s].
[2023-05-30 10:02:26.459012] INFO: moduleinvoker: instruments.v2 开始运行..
[2023-05-30 10:02:26.467944] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.469965] INFO: moduleinvoker: instruments.v2 运行完成[0.010964s].
[2023-05-30 10:02:26.474898] INFO: moduleinvoker: input_features.v1 开始运行..
[2023-05-30 10:02:26.484671] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.486423] INFO: moduleinvoker: input_features.v1 运行完成[0.011542s].
[2023-05-30 10:02:26.514505] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.011503s].
[2023-05-30 10:02:26.524372] INFO: moduleinvoker: advanced_auto_labeler.v2 开始运行..
[2023-05-30 10:02:26.532554] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.534871] INFO: moduleinvoker: advanced_auto_labeler.v2 运行完成[0.01049s].
[2023-05-30 10:02:26.552773] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2023-05-30 10:02:26.560190] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.562173] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[0.009427s].
[2023-05-30 10:02:26.577255] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2023-05-30 10:02:26.584975] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.587553] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[0.010313s].
[2023-05-30 10:02:26.608852] INFO: moduleinvoker: dl_layer_conv2d.v1 运行完成[0.014848s].
[2023-05-30 10:02:26.614812] INFO: moduleinvoker: standardlize.v8 开始运行..
[2023-05-30 10:02:26.622723] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.624497] INFO: moduleinvoker: standardlize.v8 运行完成[0.009691s].
[2023-05-30 10:02:26.632690] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2023-05-30 10:02:26.639483] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.641681] INFO: moduleinvoker: derived_feature_extractor.v3 运行完成[0.00898s].
[2023-05-30 10:02:26.649578] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2023-05-30 10:02:26.656416] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.658634] INFO: moduleinvoker: derived_feature_extractor.v3 运行完成[0.009051s].
[2023-05-30 10:02:26.676068] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.011157s].
[2023-05-30 10:02:26.685793] INFO: moduleinvoker: filter.v3 开始运行..
[2023-05-30 10:02:26.693182] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.695355] INFO: moduleinvoker: filter.v3 运行完成[0.009561s].
[2023-05-30 10:02:26.705697] INFO: moduleinvoker: filter.v3 开始运行..
[2023-05-30 10:02:26.712710] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.714927] INFO: moduleinvoker: filter.v3 运行完成[0.009227s].
[2023-05-30 10:02:26.953926] INFO: moduleinvoker: dl_layer_lstm.v1 运行完成[0.231935s].
[2023-05-30 10:02:26.960211] INFO: moduleinvoker: standardlize.v8 开始运行..
[2023-05-30 10:02:26.970607] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.972909] INFO: moduleinvoker: standardlize.v8 运行完成[0.012698s].
[2023-05-30 10:02:26.981545] INFO: moduleinvoker: standardlize.v8 开始运行..
[2023-05-30 10:02:26.989634] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:26.991589] INFO: moduleinvoker: standardlize.v8 运行完成[0.010064s].
[2023-05-30 10:02:27.010687] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.011563s].
[2023-05-30 10:02:27.021570] INFO: moduleinvoker: aa.v5 开始运行..
[2023-05-30 10:02:27.029332] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.031232] INFO: moduleinvoker: aa.v5 运行完成[0.009668s].
[2023-05-30 10:02:27.040754] INFO: moduleinvoker: join.v3 开始运行..
[2023-05-30 10:02:27.048877] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.050804] INFO: moduleinvoker: join.v3 运行完成[0.010056s].
[2023-05-30 10:02:27.296589] INFO: moduleinvoker: dl_layer_lstm.v1 运行完成[0.239353s].
[2023-05-30 10:02:27.307491] INFO: moduleinvoker: chinaa_stock_filter.v1 开始运行..
[2023-05-30 10:02:27.315058] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.317711] INFO: moduleinvoker: chinaa_stock_filter.v1 运行完成[0.010219s].
[2023-05-30 10:02:27.327219] INFO: moduleinvoker: aa.v5 开始运行..
[2023-05-30 10:02:27.333641] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.335239] INFO: moduleinvoker: aa.v5 运行完成[0.008036s].
[2023-05-30 10:02:27.352287] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.010801s].
[2023-05-30 10:02:27.361972] INFO: moduleinvoker: dropnan.v2 开始运行..
[2023-05-30 10:02:27.369266] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.371349] INFO: moduleinvoker: dropnan.v2 运行完成[0.00938s].
[2023-05-30 10:02:27.382798] INFO: moduleinvoker: chinaa_stock_filter.v1 开始运行..
[2023-05-30 10:02:27.391498] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.394404] INFO: moduleinvoker: chinaa_stock_filter.v1 运行完成[0.011611s].
[2023-05-30 10:02:27.621899] INFO: moduleinvoker: dl_layer_lstm.v1 运行完成[0.219184s].
[2023-05-30 10:02:27.638424] INFO: moduleinvoker: dl_convert_to_bin.v2 开始运行..
[2023-05-30 10:02:27.646432] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.648852] INFO: moduleinvoker: dl_convert_to_bin.v2 运行完成[0.010438s].
[2023-05-30 10:02:27.663386] INFO: moduleinvoker: dropnan.v2 开始运行..
[2023-05-30 10:02:27.672493] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.674366] INFO: moduleinvoker: dropnan.v2 运行完成[0.010985s].
[2023-05-30 10:02:27.686675] INFO: moduleinvoker: dl_layer_dropout.v1 运行完成[0.004541s].
[2023-05-30 10:02:27.705505] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:27.715589] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.717983] INFO: moduleinvoker: cached.v3 运行完成[0.012506s].
[2023-05-30 10:02:27.740680] INFO: moduleinvoker: dl_convert_to_bin.v2 开始运行..
[2023-05-30 10:02:27.750596] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.753096] INFO: moduleinvoker: dl_convert_to_bin.v2 运行完成[0.012417s].
[2023-05-30 10:02:27.788841] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.027291s].
[2023-05-30 10:02:27.801664] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:27.812537] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:27.814715] INFO: moduleinvoker: cached.v3 运行完成[0.013069s].
[2023-05-30 10:02:27.830963] INFO: moduleinvoker: dl_layer_dropout.v1 运行完成[0.006966s].
[2023-05-30 10:02:27.868272] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.028338s].
[2023-05-30 10:02:27.879874] INFO: moduleinvoker: dl_layer_dropout.v1 运行完成[0.004202s].
[2023-05-30 10:02:27.919202] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.030549s].
[2023-05-30 10:02:27.960836] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.033316s].
[2023-05-30 10:02:28.048197] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:28.086780] INFO: moduleinvoker: cached.v3 运行完成[0.03859s].
[2023-05-30 10:02:28.089925] INFO: moduleinvoker: dl_model_init.v1 运行完成[0.121362s].
[2023-05-30 10:02:28.104559] INFO: moduleinvoker: dl_model_train.v1 开始运行..
[2023-05-30 10:02:29.096615] INFO: dl_model_train: 准备训练,训练样本个数:57316,迭代次数:1
[2023-05-30 10:02:50.164191] INFO: dl_model_train: 训练结束,耗时:21.07s
[2023-05-30 10:02:50.195851] INFO: moduleinvoker: dl_model_train.v1 运行完成[22.091299s].
[2023-05-30 10:02:50.200903] INFO: moduleinvoker: dl_model_predict.v1 开始运行..
[2023-05-30 10:02:54.505653] INFO: moduleinvoker: dl_model_predict.v1 运行完成[4.304841s].
[2023-05-30 10:02:54.527470] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:54.739702] INFO: moduleinvoker: cached.v3 运行完成[0.212247s].
[2023-05-30 10:02:54.748659] INFO: moduleinvoker: input_features.v1 开始运行..
[2023-05-30 10:02:54.755318] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:54.757648] INFO: moduleinvoker: input_features.v1 运行完成[0.008999s].
[2023-05-30 10:02:54.764093] INFO: moduleinvoker: instruments.v2 开始运行..
[2023-05-30 10:02:54.771773] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:54.774025] INFO: moduleinvoker: instruments.v2 运行完成[0.00995s].
[2023-05-30 10:02:54.788746] INFO: moduleinvoker: dl_layer_input.v1 运行完成[0.001509s].
[2023-05-30 10:02:54.793787] INFO: moduleinvoker: instruments.v2 开始运行..
[2023-05-30 10:02:54.801153] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:54.803440] INFO: moduleinvoker: instruments.v2 运行完成[0.009653s].
[2023-05-30 10:02:54.807752] INFO: moduleinvoker: input_features.v1 开始运行..
[2023-05-30 10:02:54.814580] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:54.817236] INFO: moduleinvoker: input_features.v1 运行完成[0.009464s].
[2023-05-30 10:02:54.836434] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.011676s].
[2023-05-30 10:02:54.845058] INFO: moduleinvoker: advanced_auto_labeler.v2 开始运行..
[2023-05-30 10:02:54.852760] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:54.854792] INFO: moduleinvoker: advanced_auto_labeler.v2 运行完成[0.009739s].
[2023-05-30 10:02:54.870310] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2023-05-30 10:02:54.876981] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:54.879033] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[0.008745s].
[2023-05-30 10:02:54.900324] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2023-05-30 10:02:54.907040] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:54.909283] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[0.008974s].
[2023-05-30 10:02:54.930019] INFO: moduleinvoker: dl_layer_conv2d.v1 运行完成[0.014117s].
[2023-05-30 10:02:54.942068] INFO: moduleinvoker: standardlize.v8 开始运行..
[2023-05-30 10:02:54.950531] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:54.952628] INFO: moduleinvoker: standardlize.v8 运行完成[0.010568s].
[2023-05-30 10:02:54.960532] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2023-05-30 10:02:54.967991] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:54.970368] INFO: moduleinvoker: derived_feature_extractor.v3 运行完成[0.009818s].
[2023-05-30 10:02:54.982279] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2023-05-30 10:02:54.989409] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:54.991594] INFO: moduleinvoker: derived_feature_extractor.v3 运行完成[0.0093s].
[2023-05-30 10:02:55.013476] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.012511s].
[2023-05-30 10:02:55.023371] INFO: moduleinvoker: filter.v3 开始运行..
[2023-05-30 10:02:55.030760] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:55.032827] INFO: moduleinvoker: filter.v3 运行完成[0.009459s].
[2023-05-30 10:02:55.042130] INFO: moduleinvoker: filter.v3 开始运行..
[2023-05-30 10:02:55.052113] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:55.054120] INFO: moduleinvoker: filter.v3 运行完成[0.011994s].
[2023-05-30 10:02:55.305406] INFO: moduleinvoker: dl_layer_lstm.v1 运行完成[0.24432s].
[2023-05-30 10:02:55.311979] INFO: moduleinvoker: standardlize.v8 开始运行..
[2023-05-30 10:02:55.320884] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:55.322960] INFO: moduleinvoker: standardlize.v8 运行完成[0.010981s].
[2023-05-30 10:02:55.332343] INFO: moduleinvoker: standardlize.v8 开始运行..
[2023-05-30 10:02:55.345527] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:55.347603] INFO: moduleinvoker: standardlize.v8 运行完成[0.01526s].
[2023-05-30 10:02:55.366858] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.01287s].
[2023-05-30 10:02:55.378328] INFO: moduleinvoker: aa.v5 开始运行..
[2023-05-30 10:02:55.385882] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:55.387611] INFO: moduleinvoker: aa.v5 运行完成[0.009306s].
[2023-05-30 10:02:55.397906] INFO: moduleinvoker: join.v3 开始运行..
[2023-05-30 10:02:55.405771] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:55.407552] INFO: moduleinvoker: join.v3 运行完成[0.009659s].
[2023-05-30 10:02:55.710717] INFO: moduleinvoker: dl_layer_lstm.v1 运行完成[0.295625s].
[2023-05-30 10:02:55.721030] INFO: moduleinvoker: chinaa_stock_filter.v1 开始运行..
[2023-05-30 10:02:55.730356] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:55.732103] INFO: moduleinvoker: chinaa_stock_filter.v1 运行完成[0.011056s].
[2023-05-30 10:02:55.741514] INFO: moduleinvoker: aa.v5 开始运行..
[2023-05-30 10:02:55.750234] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:55.752030] INFO: moduleinvoker: aa.v5 运行完成[0.010536s].
[2023-05-30 10:02:55.772031] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.011975s].
[2023-05-30 10:02:55.781522] INFO: moduleinvoker: dropnan.v2 开始运行..
[2023-05-30 10:02:55.789652] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:55.791913] INFO: moduleinvoker: dropnan.v2 运行完成[0.010391s].
[2023-05-30 10:02:55.800968] INFO: moduleinvoker: chinaa_stock_filter.v1 开始运行..
[2023-05-30 10:02:55.808857] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:55.810740] INFO: moduleinvoker: chinaa_stock_filter.v1 运行完成[0.009778s].
[2023-05-30 10:02:56.032467] INFO: moduleinvoker: dl_layer_lstm.v1 运行完成[0.214462s].
[2023-05-30 10:02:56.049351] INFO: moduleinvoker: dl_convert_to_bin.v2 开始运行..
[2023-05-30 10:02:56.062522] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.064327] INFO: moduleinvoker: dl_convert_to_bin.v2 运行完成[0.015013s].
[2023-05-30 10:02:56.073266] INFO: moduleinvoker: dropnan.v2 开始运行..
[2023-05-30 10:02:56.087794] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.089589] INFO: moduleinvoker: dropnan.v2 运行完成[0.016325s].
[2023-05-30 10:02:56.100771] INFO: moduleinvoker: dl_layer_dropout.v1 运行完成[0.004471s].
[2023-05-30 10:02:56.113405] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:56.120791] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.122517] INFO: moduleinvoker: cached.v3 运行完成[0.009136s].
[2023-05-30 10:02:56.141776] INFO: moduleinvoker: dl_convert_to_bin.v2 开始运行..
[2023-05-30 10:02:56.149480] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.151181] INFO: moduleinvoker: dl_convert_to_bin.v2 运行完成[0.009426s].
[2023-05-30 10:02:56.184668] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.024941s].
[2023-05-30 10:02:56.198824] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:56.207356] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.209097] INFO: moduleinvoker: cached.v3 运行完成[0.010285s].
[2023-05-30 10:02:56.220090] INFO: moduleinvoker: dl_layer_dropout.v1 运行完成[0.004303s].
[2023-05-30 10:02:56.254391] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.027845s].
[2023-05-30 10:02:56.269194] INFO: moduleinvoker: dl_layer_dropout.v1 运行完成[0.006667s].
[2023-05-30 10:02:56.308884] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.031979s].
[2023-05-30 10:02:56.342319] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.025043s].
[2023-05-30 10:02:56.408045] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:56.415747] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.417707] INFO: moduleinvoker: cached.v3 运行完成[0.009684s].
[2023-05-30 10:02:56.420038] INFO: moduleinvoker: dl_model_init.v1 运行完成[0.064904s].
[2023-05-30 10:02:56.432709] INFO: moduleinvoker: dl_model_train.v1 开始运行..
[2023-05-30 10:02:56.441009] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.443965] INFO: moduleinvoker: dl_model_train.v1 运行完成[0.011267s].
[2023-05-30 10:02:56.448805] INFO: moduleinvoker: dl_model_predict.v1 开始运行..
[2023-05-30 10:02:56.456473] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.460670] INFO: moduleinvoker: dl_model_predict.v1 运行完成[0.011866s].
[2023-05-30 10:02:56.473833] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:56.482378] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.484125] INFO: moduleinvoker: cached.v3 运行完成[0.010292s].
[2023-05-30 10:02:56.490302] INFO: moduleinvoker: input_features.v1 开始运行..
[2023-05-30 10:02:56.496727] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.498420] INFO: moduleinvoker: input_features.v1 运行完成[0.008129s].
[2023-05-30 10:02:56.503768] INFO: moduleinvoker: instruments.v2 开始运行..
[2023-05-30 10:02:56.510371] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.512491] INFO: moduleinvoker: instruments.v2 运行完成[0.008724s].
[2023-05-30 10:02:56.520615] INFO: moduleinvoker: dl_layer_input.v1 运行完成[0.001396s].
[2023-05-30 10:02:56.526662] INFO: moduleinvoker: instruments.v2 开始运行..
[2023-05-30 10:02:56.534471] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.536351] INFO: moduleinvoker: instruments.v2 运行完成[0.009693s].
[2023-05-30 10:02:56.540672] INFO: moduleinvoker: input_features.v1 开始运行..
[2023-05-30 10:02:56.547359] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.549088] INFO: moduleinvoker: input_features.v1 运行完成[0.008415s].
[2023-05-30 10:02:56.573253] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.018236s].
[2023-05-30 10:02:56.581203] INFO: moduleinvoker: advanced_auto_labeler.v2 开始运行..
[2023-05-30 10:02:56.588987] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.590770] INFO: moduleinvoker: advanced_auto_labeler.v2 运行完成[0.009568s].
[2023-05-30 10:02:56.604069] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2023-05-30 10:02:56.610796] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.612601] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[0.008558s].
[2023-05-30 10:02:56.625803] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2023-05-30 10:02:56.633905] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.635709] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[0.009931s].
[2023-05-30 10:02:56.655639] INFO: moduleinvoker: dl_layer_conv2d.v1 运行完成[0.013629s].
[2023-05-30 10:02:56.661889] INFO: moduleinvoker: standardlize.v8 开始运行..
[2023-05-30 10:02:56.669944] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.673098] INFO: moduleinvoker: standardlize.v8 运行完成[0.011162s].
[2023-05-30 10:02:56.682225] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2023-05-30 10:02:56.688915] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.690926] INFO: moduleinvoker: derived_feature_extractor.v3 运行完成[0.008698s].
[2023-05-30 10:02:56.700069] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2023-05-30 10:02:56.706716] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.708478] INFO: moduleinvoker: derived_feature_extractor.v3 运行完成[0.008413s].
[2023-05-30 10:02:56.735877] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.012403s].
[2023-05-30 10:02:56.744741] INFO: moduleinvoker: filter.v3 开始运行..
[2023-05-30 10:02:56.752058] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.754108] INFO: moduleinvoker: filter.v3 运行完成[0.009352s].
[2023-05-30 10:02:56.763985] INFO: moduleinvoker: filter.v3 开始运行..
[2023-05-30 10:02:56.771545] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:56.773595] INFO: moduleinvoker: filter.v3 运行完成[0.009618s].
[2023-05-30 10:02:57.012301] INFO: moduleinvoker: dl_layer_lstm.v1 运行完成[0.222633s].
[2023-05-30 10:02:57.019608] INFO: moduleinvoker: standardlize.v8 开始运行..
[2023-05-30 10:02:57.027104] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.029166] INFO: moduleinvoker: standardlize.v8 运行完成[0.009571s].
[2023-05-30 10:02:57.035626] INFO: moduleinvoker: standardlize.v8 开始运行..
[2023-05-30 10:02:57.043635] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.046148] INFO: moduleinvoker: standardlize.v8 运行完成[0.010522s].
[2023-05-30 10:02:57.070168] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.016832s].
[2023-05-30 10:02:57.081036] INFO: moduleinvoker: aa.v5 开始运行..
[2023-05-30 10:02:57.088844] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.090630] INFO: moduleinvoker: aa.v5 运行完成[0.009641s].
[2023-05-30 10:02:57.106306] INFO: moduleinvoker: join.v3 开始运行..
[2023-05-30 10:02:57.115113] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.117261] INFO: moduleinvoker: join.v3 运行完成[0.010965s].
[2023-05-30 10:02:57.350766] INFO: moduleinvoker: dl_layer_lstm.v1 运行完成[0.226324s].
[2023-05-30 10:02:57.363568] INFO: moduleinvoker: chinaa_stock_filter.v1 开始运行..
[2023-05-30 10:02:57.370928] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.372763] INFO: moduleinvoker: chinaa_stock_filter.v1 运行完成[0.009201s].
[2023-05-30 10:02:57.381892] INFO: moduleinvoker: aa.v5 开始运行..
[2023-05-30 10:02:57.388917] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.390975] INFO: moduleinvoker: aa.v5 运行完成[0.009102s].
[2023-05-30 10:02:57.408813] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.010928s].
[2023-05-30 10:02:57.417779] INFO: moduleinvoker: dropnan.v2 开始运行..
[2023-05-30 10:02:57.424590] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.426375] INFO: moduleinvoker: dropnan.v2 运行完成[0.008601s].
[2023-05-30 10:02:57.434799] INFO: moduleinvoker: chinaa_stock_filter.v1 开始运行..
[2023-05-30 10:02:57.441297] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.443240] INFO: moduleinvoker: chinaa_stock_filter.v1 运行完成[0.008447s].
[2023-05-30 10:02:57.682600] INFO: moduleinvoker: dl_layer_lstm.v1 运行完成[0.232809s].
[2023-05-30 10:02:57.700160] INFO: moduleinvoker: dl_convert_to_bin.v2 开始运行..
[2023-05-30 10:02:57.708345] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.710699] INFO: moduleinvoker: dl_convert_to_bin.v2 运行完成[0.010561s].
[2023-05-30 10:02:57.722367] INFO: moduleinvoker: dropnan.v2 开始运行..
[2023-05-30 10:02:57.729492] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.731474] INFO: moduleinvoker: dropnan.v2 运行完成[0.009115s].
[2023-05-30 10:02:57.743267] INFO: moduleinvoker: dl_layer_dropout.v1 运行完成[0.004187s].
[2023-05-30 10:02:57.768602] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:57.776646] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.778672] INFO: moduleinvoker: cached.v3 运行完成[0.010096s].
[2023-05-30 10:02:57.793231] INFO: moduleinvoker: dl_convert_to_bin.v2 开始运行..
[2023-05-30 10:02:57.800393] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.802067] INFO: moduleinvoker: dl_convert_to_bin.v2 运行完成[0.008864s].
[2023-05-30 10:02:57.836099] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.026355s].
[2023-05-30 10:02:57.847491] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:57.854932] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:57.856992] INFO: moduleinvoker: cached.v3 运行完成[0.009519s].
[2023-05-30 10:02:57.870204] INFO: moduleinvoker: dl_layer_dropout.v1 运行完成[0.004185s].
[2023-05-30 10:02:57.903454] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.025555s].
[2023-05-30 10:02:57.917510] INFO: moduleinvoker: dl_layer_dropout.v1 运行完成[0.004482s].
[2023-05-30 10:02:57.956184] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.031008s].
[2023-05-30 10:02:58.003462] INFO: moduleinvoker: dl_layer_dense.v1 运行完成[0.032361s].
[2023-05-30 10:02:58.083904] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:02:58.092684] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:02:58.094698] INFO: moduleinvoker: cached.v3 运行完成[0.010814s].
[2023-05-30 10:02:58.096815] INFO: moduleinvoker: dl_model_init.v1 运行完成[0.086392s].
[2023-05-30 10:02:58.109267] INFO: moduleinvoker: dl_model_train.v1 开始运行..
[2023-05-30 10:02:58.966747] INFO: dl_model_train: 准备训练,训练样本个数:56739,迭代次数:1
[2023-05-30 10:03:19.693491] INFO: dl_model_train: 训练结束,耗时:20.72s
[2023-05-30 10:03:19.764528] INFO: moduleinvoker: dl_model_train.v1 运行完成[21.655278s].
[2023-05-30 10:03:19.769982] INFO: moduleinvoker: dl_model_predict.v1 开始运行..
[2023-05-30 10:03:22.708537] INFO: moduleinvoker: dl_model_predict.v1 运行完成[2.938541s].
[2023-05-30 10:03:22.726096] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:03:22.913209] INFO: moduleinvoker: cached.v3 运行完成[0.187154s].
[2023-05-30 10:03:22.928115] INFO: moduleinvoker: cached.v3 开始运行..
[2023-05-30 10:03:23.307359] INFO: moduleinvoker: cached.v3 运行完成[0.379302s].
[2023-05-30 10:03:23.359372] INFO: moduleinvoker: backtest.v8 开始运行..
[2023-05-30 10:03:23.365059] INFO: backtest: biglearning backtest:V8.6.3
[2023-05-30 10:03:23.465960] INFO: backtest: product_type:stock by specified
[2023-05-30 10:03:23.546417] INFO: moduleinvoker: cached.v2 开始运行..
[2023-05-30 10:03:23.553528] INFO: moduleinvoker: 命中缓存
[2023-05-30 10:03:23.555787] INFO: moduleinvoker: cached.v2 运行完成[0.009385s].
[2023-05-30 10:03:25.588826] INFO: backtest: algo history_data=DataSource(fa5eccb5bbf04045af2d278cf9680240T)
[2023-05-30 10:03:25.590579] INFO: algo: TradingAlgorithm V1.8.9
[2023-05-30 10:03:25.974172] INFO: algo: trading transform...
[2023-05-30 10:03:32.445056] INFO: Performance: Simulated 129 trading days out of 129.
[2023-05-30 10:03:32.446660] INFO: Performance: first open: 2022-11-16 09:30:00+00:00
[2023-05-30 10:03:32.447995] INFO: Performance: last close: 2023-05-29 15:00:00+00:00
[2023-05-30 10:03:39.018334] INFO: moduleinvoker: backtest.v8 运行完成[15.658958s].
[2023-05-30 10:03:39.020314] INFO: moduleinvoker: trade.v4 运行完成[15.702823s].
[2023-05-30 10:03:39.021969] INFO: moduleinvoker: hyper_rolling_train.v1 运行完成[72.634187s].