\
因子:样例因子(18个)
因子是否标准化:是
标注:未来5日收益(不做离散化)
算法:LSTM
类型:回归问题
训练集:10-16年
测试集:16-19年
选股依据:根据预测值降序排序买入
持股数:30
持仓天数:5
# 本代码由可视化策略环境自动生成 2022年8月9日 18:44
# 本代码单元只能在可视化模式下编辑。您也可以拷贝代码,粘贴到新建的代码单元或者策略,然后修改。
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m8_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。在这里编写您的代码
df = input_1.read_pickle()
feature_len = len(input_2.read_pickle())
df['x'] = df['x'].reshape(df['x'].shape[0], int(feature_len), int(df['x'].shape[1]/feature_len))
data_1 = DataSource.write_pickle(df)
return Outputs(data_1=data_1)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m8_post_run_bigquant_run(outputs):
return outputs
def m20_user_activity_regularizer_bigquant_run(weight_matrix):
from tensorflow.keras.constraints import maxnorm
return 0.01 * K.sum(K.abs(weight_matrix))
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m4_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。在这里编写您的代码
df = input_1.read_pickle()
feature_len = len(input_2.read_pickle())
df['x'] = df['x'].reshape(df['x'].shape[0], int(feature_len), int(df['x'].shape[1]/feature_len))
data_1 = DataSource.write_pickle(df)
return Outputs(data_1=data_1)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m4_post_run_bigquant_run(outputs):
return outputs
from tensorflow.keras import optimizers
m5_user_optimizer_bigquant_run=optimizers.Adam(lr=0.00085)
# 用户的自定义层需要写到字典中,比如
# {
# "MyLayer": MyLayer
# }
m5_custom_objects_bigquant_run = {
}
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m24_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。在这里编写您的代码
pred_label = input_1.read_pickle()
df = input_2.read_df()
df = pd.DataFrame({'pred_label':pred_label[:,0], 'instrument':df.instrument, 'date':df.date})
df.sort_values(['date','pred_label'],inplace=True, ascending=[True,False])
return Outputs(data_1=DataSource.write_df(df), data_2=None, data_3=None)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m24_post_run_bigquant_run(outputs):
return outputs
# 回测引擎:初始化函数,只执行一次
def m40_initialize_bigquant_run(context):
# 加载预测数据
context.ranker_prediction = context.options['data'].read_df()
# 系统已经设置了默认的交易手续费和滑点,要修改手续费可使用如下函数
context.set_commission(PerOrder(buy_cost=0.0003, sell_cost=0.0013, min_cost=5))
from zipline.finance.slippage import SlippageModel
class FixedPriceSlippage(SlippageModel):
# 指定初始化函数
def __init__(self, spreads, price_field_buy, price_field_sell):
self.spreads = spreads
self._price_field_buy = price_field_buy
self._price_field_sell = price_field_sell
def process_order(self, data, order, bar_volume=0, trigger_check_price=0):
if order.limit is None:
price_field = self._price_field_buy if order.amount > 0 else self._price_field_sell
price_base = data.current(order.asset, price_field)
# 买单的下单价格向上偏移 spreads百分比 , 卖单的下单价格向下偏移 spreads百分比
price = price_base * (1.0 + self.spreads) if order.amount > 0 else price_base * (1.0 - self.spreads)
else:
price = order.limit
# 返回希望成交的价格和数量
return (price, order.amount)
# 设置滑点模型
fix_slippage = FixedPriceSlippage(price_field_buy='open', price_field_sell='close',spreads=0)
context.set_slippage(us_equities=fix_slippage)
# 预测数据,通过options传入进来,使用 read_df 函数,加载到内存 (DataFrame)
# 设置买入的股票数量,这里买入预测股票列表排名靠前的5只
stock_count = 10
# 每只的股票的权重,如下的权重分配会使得靠前的股票分配多一点的资金,[0.339160, 0.213986, 0.169580, ..]
context.stock_weights = T.norm([1 / math.log(i + 2) for i in range(0, stock_count)])
#改为等权重配置
#context.stock_weights = [1 / stock_count for i in range(0, stock_count)]
# 设置每只股票占用的最大资金比例
context.max_cash_per_instrument = 0.1
context.options['hold_days'] = 4
# 回测引擎:每日数据处理函数,每天执行一次
def m40_handle_data_bigquant_run(context, data):
#-------------------大盘风控代码---------------------------#
#获取当日日期
today = data.current_dt.strftime('%Y-%m-%d')
stock_hold_now = [equity.symbol for equity in context.portfolio.positions ]
#大盘风控模块,读取风控数据
#benckmark_risk=context.benckmark_risk.ix[today]
#当risk为1时,市场有风险,全部平仓,不再执行其它操作
#if benckmark_risk > 0:
#for instrument in stock_hold_now:
#context.order_target(symbol(instrument), 0)
#print(today,'大盘风控止损触发,全仓卖出')
#return
#-------------------大盘风控代码---------------------------#
# 按日期过滤得到今日的预测数据
ranker_prediction = context.ranker_prediction[
context.ranker_prediction.date == data.current_dt.strftime('%Y-%m-%d')]
# 1. 资金分配
# 平均持仓时间是hold_days,每日都将买入股票,每日预期使用 1/hold_days 的资金
# 实际操作中,会存在一定的买入误差,所以在前hold_days天,等量使用资金;之后,尽量使用剩余资金(这里设置最多用等量的1.5倍)
is_staging = context.trading_day_index < context.options['hold_days'] # 是否在建仓期间(前 hold_days 天)
cash_avg = context.portfolio.portfolio_value / context.options['hold_days']
cash_for_buy = min(context.portfolio.cash, (1 if is_staging else 1.5) * cash_avg)
cash_for_sell = cash_avg - (context.portfolio.cash - cash_for_buy)
positions = {e.symbol: p.amount * p.last_sale_price
for e, p in context.perf_tracker.position_tracker.positions.items()}
#---------------------------START:止赢止损模块(含建仓期)--------------------
# 新建当日止赢止损股票列表是为了handle_data 策略逻辑部分不再对该股票进行判断
current_stopwin_stock=[]
current_stoploss_stock = []
today_date = data.current_dt.strftime('%Y-%m-%d')
positions_stop={e.symbol:p.cost_basis
for e,p in context.portfolio.positions.items()}
if len(positions_stop)>0:
for i in positions_stop.keys():
stock_cost=positions_stop[i]
stock_market_price=data.current(context.symbol(i),'price')
# 赚20%且为可交易状态就止盈
if stock_market_price/stock_cost-1>0.25 and data.can_trade(context.symbol(i)) and not context.has_unfinished_sell_order(i):
context.order_target_percent(context.symbol(i),0)
current_stopwin_stock.append(i)
# 亏10%并且为可交易状态就止损
if stock_market_price/stock_cost-1 <= -0.1 and data.can_trade(context.symbol(i)) and not context.has_unfinished_sell_order(i):
context.order_target_percent(context.symbol(i),0)
current_stoploss_stock.append(i)
if len(current_stopwin_stock)>0:
print(today_date,'止盈股票列表',current_stopwin_stock)
if len(current_stoploss_stock)>0:
print(today_date,'止损股票列表',current_stoploss_stock)
#--------------------------END: 止赢止损模块-----------------------------
# 2. 生成卖出订单:hold_days天之后才开始卖出;对持仓的股票,按机器学习算法预测的排序末位淘汰
if not is_staging and cash_for_sell > 0:
equities = {e.symbol: e for e, p in context.perf_tracker.position_tracker.positions.items()}
instruments = list(reversed(list(ranker_prediction.instrument[ranker_prediction.instrument.apply(
lambda x: x in equities and not context.has_unfinished_sell_order(equities[x]))])))
# print('rank order for sell %s' % instruments)
for instrument in instruments:
context.order_target(context.symbol(instrument), 0)
cash_for_sell -= positions[instrument]
if cash_for_sell <= 0:
break
# 3. 生成买入订单:按机器学习算法预测的排序,买入前面的stock_count只股票
buy_cash_weights = context.stock_weights
buy_instruments = list(ranker_prediction.instrument[:len(buy_cash_weights)])
max_cash_per_instrument = context.portfolio.portfolio_value * context.max_cash_per_instrument
for i, instrument in enumerate(buy_instruments):
cash = cash_for_buy * buy_cash_weights[i]
if cash > max_cash_per_instrument - positions.get(instrument, 0):
# 确保股票持仓量不会超过每次股票最大的占用资金量
cash = max_cash_per_instrument - positions.get(instrument, 0)
if cash > 0:
context.order_value(context.symbol(instrument), cash)
# 回测引擎:准备数据,只执行一次
#def m40_prepare_bigquant_run(context):
# pass
# 回测引擎:准备数据,只执行一次
def m40_prepare_bigquant_run(context):
#在数据准备函数中一次性计算每日的大盘风控条件相比于在handle中每日计算风控条件可以提高回测速度
# 多取50天的数据便于计算均值(保证回测的第一天均值不为Nan值),
# 其中context.start_date和context.end_date是回测指定的起始时间和终止时间
start_date= (pd.to_datetime(context.start_date) - datetime.timedelta(days=50)).strftime('%Y-%m-%d')
df=DataSource('bar1d_index_CN_STOCK_A').read(start_date=start_date,end_date=context.end_date,fields=['close'])
#这里以上证指数000001.HIX为例
benckmark_data=df[df.instrument=='000001.HIX']
#计算上证指数5日涨幅
benckmark_data['ret5']=4*benckmark_data['close']/(benckmark_data['close'].shift(4)+benckmark_data['close'].shift(3)+benckmark_data['close'].shift(2)+benckmark_data['close'].shift(1))-1
#计算大盘风控条件,如果5日涨幅小于-10%则设置风险状态risk为1,否则为0
benckmark_data['risk'] = np.where(benckmark_data['ret5']<-0.02,1,0)
#设置日期为索引
benckmark_data.set_index('date',inplace=True)
#把风控序列输出给全局变量context.benckmark_risk
context.benckmark_risk=benckmark_data['risk']
g = T.Graph({
'm3': 'M.input_features.v1',
'm3.features': """
mean(where(close_0>open_0, ((close_0-low_0)/(high_0-low_0))*(turn_0/avg_turn_5), -1*((high_0-close_0)/(high_0-low_0))*(turn_0/avg_turn_5)),5) #-4/25///47
""",
'm43': 'M.input_features.v1',
'm43.features_ds': T.Graph.OutputPort('m3.data'),
'm43.features': 'market_cap_0',
'm9': 'M.instruments.v2',
'm9.start_date': '2017-01-01',
'm9.end_date': T.live_run_param('trading_date', '2019-12-31'),
'm9.market': 'CN_STOCK_A',
'm9.instrument_list': """000008.SZA
000009.SZA
000012.SZA
000021.SZA
000027.SZA
000028.SZA
000031.SZA
000039.SZA
000046.SZA
000050.SZA
000060.SZA
000061.SZA
000062.SZA
000078.SZA
000089.SZA
000090.SZA
000156.SZA
000158.SZA
000301.SZA
000400.SZA
000401.SZA
000402.SZA
000415.SZA
000488.SZA
000501.SZA
000513.SZA
000519.SZA
000528.SZA
000537.SZA
000540.SZA
000543.SZA
000547.SZA
000553.SZA
000559.SZA
000563.SZA
000564.SZA
000581.SZA
000598.SZA
000600.SZA
000623.SZA
000629.SZA
000630.SZA
000636.SZA
000681.SZA
000685.SZA
000686.SZA
000690.SZA
000709.SZA
000717.SZA
000718.SZA
000729.SZA
000732.SZA
000738.SZA
000739.SZA
000750.SZA
000758.SZA
000778.SZA
000807.SZA
000813.SZA
000825.SZA
000826.SZA
000830.SZA
000869.SZA
000877.SZA
000878.SZA
000883.SZA
000887.SZA
000898.SZA
000930.SZA
000932.SZA
000937.SZA
000959.SZA
000960.SZA
000967.SZA
000970.SZA
000975.SZA
000983.SZA
000988.SZA
000990.SZA
000997.SZA
000998.SZA
000999.SZA
001872.SZA
001914.SZA
002002.SZA
002004.SZA
002010.SZA
002013.SZA
002019.SZA
002028.SZA
002030.SZA
002038.SZA
002048.SZA
002051.SZA
002056.SZA
002064.SZA
002074.SZA
002075.SZA
002078.SZA
002080.SZA
002081.SZA
002085.SZA
002092.SZA
002093.SZA
002110.SZA
002124.SZA
002128.SZA
002131.SZA
002138.SZA
002152.SZA
002155.SZA
002156.SZA
002174.SZA
002183.SZA
002185.SZA
002191.SZA
002195.SZA
002203.SZA
002212.SZA
002217.SZA
002221.SZA
002223.SZA
002233.SZA
002242.SZA
002244.SZA
002249.SZA
002250.SZA
002266.SZA
002268.SZA
002273.SZA
002281.SZA
002294.SZA
002299.SZA
002302.SZA
002317.SZA
002340.SZA
002353.SZA
002368.SZA
002372.SZA
002373.SZA
002375.SZA
002382.SZA
002385.SZA
002387.SZA
002390.SZA
002396.SZA
002399.SZA
002408.SZA
002416.SZA
002419.SZA
002423.SZA
002424.SZA
002429.SZA
002434.SZA
002440.SZA
002444.SZA
002458.SZA
002465.SZA
002468.SZA
002500.SZA
002503.SZA
002505.SZA
002506.SZA
002507.SZA
002511.SZA
002544.SZA
002557.SZA
002563.SZA
002568.SZA
002572.SZA
002583.SZA
002589.SZA
002595.SZA
002603.SZA
002625.SZA
002635.SZA
002640.SZA
002648.SZA
002653.SZA
002670.SZA
002683.SZA
002690.SZA
002701.SZA
002705.SZA
002709.SZA
002745.SZA
002791.SZA
002797.SZA
002807.SZA
002815.SZA
002818.SZA
002831.SZA
002839.SZA
002867.SZA
002901.SZA
002920.SZA
002925.SZA
002926.SZA
002936.SZA
002941.SZA
002946.SZA
002948.SZA
002957.SZA
002966.SZA
002985.SZA
300001.SZA
300002.SZA
300009.SZA
300010.SZA
300012.SZA
300017.SZA
300024.SZA
300026.SZA
300058.SZA
300070.SZA
300072.SZA
300088.SZA
300113.SZA
300115.SZA
300133.SZA
300134.SZA
300146.SZA
300166.SZA
300168.SZA
300180.SZA
300182.SZA
300207.SZA
300212.SZA
300223.SZA
300244.SZA
300257.SZA
300271.SZA
300274.SZA
300285.SZA
300296.SZA
300315.SZA
300316.SZA
300324.SZA
300357.SZA
300376.SZA
300418.SZA
300459.SZA
300463.SZA
300474.SZA
300482.SZA
300496.SZA
300595.SZA
300618.SZA
300630.SZA
300699.SZA
600006.SHA
600008.SHA
600021.SHA
600022.SHA
600026.SHA
600037.SHA
600038.SHA
600039.SHA
600053.SHA
600056.SHA
600060.SHA
600062.SHA
600064.SHA
600073.SHA
600079.SHA
600089.SHA
600094.SHA
600120.SHA
600126.SHA
600131.SHA
600132.SHA
600141.SHA
600143.SHA
600153.SHA
600155.SHA
600158.SHA
600160.SHA
600166.SHA
600167.SHA
600170.SHA
600171.SHA
600188.SHA
600195.SHA
600201.SHA
600216.SHA
600219.SHA
600256.SHA
600258.SHA
600259.SHA
600260.SHA
600266.SHA
600273.SHA
600277.SHA
600282.SHA
600291.SHA
600307.SHA
600312.SHA
600315.SHA
600316.SHA
600325.SHA
600329.SHA
600338.SHA
600339.SHA
600348.SHA
600350.SHA
600372.SHA
600373.SHA
600376.SHA
600380.SHA
600388.SHA
600392.SHA
600398.SHA
600409.SHA
600410.SHA
600415.SHA
600418.SHA
600426.SHA
600435.SHA
600446.SHA
600460.SHA
600466.SHA
600486.SHA
600497.SHA
600500.SHA
600507.SHA
600511.SHA
600515.SHA
600516.SHA
600517.SHA
600528.SHA
600529.SHA
600535.SHA
600545.SHA
600546.SHA
600549.SHA
600556.SHA
600563.SHA
600566.SHA
600567.SHA
600572.SHA
600575.SHA
600580.SHA
600582.SHA
600597.SHA
600598.SHA
600623.SHA
600633.SHA
600639.SHA
600640.SHA
600642.SHA
600643.SHA
600645.SHA
600648.SHA
600649.SHA
600657.SHA
600664.SHA
600667.SHA
600673.SHA
600675.SHA
600699.SHA
600704.SHA
600707.SHA
600717.SHA
600718.SHA
600728.SHA
600729.SHA
600733.SHA
600737.SHA
600739.SHA
600748.SHA
600754.SHA
600755.SHA
600765.SHA
600776.SHA
600779.SHA
600782.SHA
600787.SHA
600801.SHA
600804.SHA
600808.SHA
600811.SHA
600820.SHA
600823.SHA
600827.SHA
600835.SHA
600839.SHA
600859.SHA
600862.SHA
600863.SHA
600867.SHA
600869.SHA
600871.SHA
600875.SHA
600879.SHA
600881.SHA
600884.SHA
600885.SHA
600895.SHA
600901.SHA
600903.SHA
600908.SHA
600909.SHA
600917.SHA
600928.SHA
600959.SHA
600967.SHA
600968.SHA
600970.SHA
600985.SHA
601000.SHA
601003.SHA
601005.SHA
601016.SHA
601068.SHA
601098.SHA
601099.SHA
601106.SHA
601118.SHA
601127.SHA
601128.SHA
601139.SHA
601168.SHA
601179.SHA
601200.SHA
601228.SHA
601233.SHA
601298.SHA
601333.SHA
601456.SHA
601512.SHA
601598.SHA
601608.SHA
601611.SHA
601615.SHA
601689.SHA
601699.SHA
601717.SHA
601718.SHA
601778.SHA
601799.SHA
601801.SHA
601828.SHA
601860.SHA
601865.SHA
601866.SHA
601869.SHA
601880.SHA
601928.SHA
601958.SHA
601966.SHA
601969.SHA
601975.SHA
601992.SHA
601997.SHA
603000.SHA
603056.SHA
603077.SHA
603198.SHA
603225.SHA
603228.SHA
603256.SHA
603260.SHA
603290.SHA
603317.SHA
603328.SHA
603338.SHA
603345.SHA
603355.SHA
603377.SHA
603379.SHA
603444.SHA
603486.SHA
603515.SHA
603568.SHA
603589.SHA
603605.SHA
603638.SHA
603650.SHA
603708.SHA
603712.SHA
603719.SHA
603737.SHA
603786.SHA
603806.SHA
603816.SHA
603858.SHA
603866.SHA
603868.SHA
603882.SHA
603883.SHA
603885.SHA
603888.SHA
603893.SHA
603927.SHA
603983.SHA
688002.SHA
688029.SHA
688088.SHA
688099.SHA
688321.SHA""",
'm9.max_count': 0,
'm17': 'M.general_feature_extractor.v7',
'm17.instruments': T.Graph.OutputPort('m9.data'),
'm17.features': T.Graph.OutputPort('m43.data'),
'm17.start_date': '',
'm17.end_date': '',
'm17.before_start_days': 90,
'm18': 'M.derived_feature_extractor.v3',
'm18.input_data': T.Graph.OutputPort('m17.data'),
'm18.features': T.Graph.OutputPort('m43.data'),
'm18.date_col': 'date',
'm18.instrument_col': 'instrument',
'm18.drop_na': True,
'm18.remove_extra_columns': False,
'm45': 'M.filter.v3',
'm45.input_data': T.Graph.OutputPort('m18.data'),
'm45.expr': 'market_cap_0>2000000000',
'm45.output_left_data': False,
'm25': 'M.standardlize.v8',
'm25.input_1': T.Graph.OutputPort('m45.data'),
'm25.input_2': T.Graph.OutputPort('m3.data'),
'm25.columns_input': '[]',
'm37': 'M.aa.v5',
'm37.input_data': T.Graph.OutputPort('m25.data'),
'm37.day_number': 150,
'm35': 'M.chinaa_stock_filter.v1',
'm35.input_data': T.Graph.OutputPort('m37.data'),
'm35.index_constituent_cond': ['全部'],
'm35.board_cond': ['全部'],
'm35.industry_cond': ['全部'],
'm35.st_cond': ['正常'],
'm35.delist_cond': ['全部'],
'm35.output_left_data': False,
'm55': 'M.dropnan.v2',
'm55.input_data': T.Graph.OutputPort('m35.data'),
'm27': 'M.dl_convert_to_bin.v2',
'm27.input_data': T.Graph.OutputPort('m55.data'),
'm27.features': T.Graph.OutputPort('m3.data'),
'm27.window_size': 5,
'm27.feature_clip': 3,
'm27.flatten': True,
'm27.window_along_col': 'instrument',
'm8': 'M.cached.v3',
'm8.input_1': T.Graph.OutputPort('m27.data'),
'm8.input_2': T.Graph.OutputPort('m3.data'),
'm8.run': m8_run_bigquant_run,
'm8.post_run': m8_post_run_bigquant_run,
'm8.input_ports': '',
'm8.params': '{}',
'm8.output_ports': '',
'm6': 'M.dl_layer_input.v1',
'm6.shape': '1,5',
'm6.batch_shape': '',
'm6.dtype': 'float32',
'm6.sparse': False,
'm6.name': '',
'm23': 'M.dl_layer_reshape.v1',
'm23.inputs': T.Graph.OutputPort('m6.data'),
'm23.target_shape': '1,5,1',
'm23.name': '',
'm28': 'M.dl_layer_conv2d.v1',
'm28.inputs': T.Graph.OutputPort('m23.data'),
'm28.filters': 40,
'm28.kernel_size': '1,5',
'm28.strides': '1,1',
'm28.padding': 'valid',
'm28.data_format': 'channels_last',
'm28.dilation_rate': '1,1',
'm28.activation': 'relu',
'm28.use_bias': True,
'm28.kernel_initializer': 'glorot_uniform',
'm28.bias_initializer': 'Zeros',
'm28.kernel_regularizer': 'None',
'm28.kernel_regularizer_l1': 0,
'm28.kernel_regularizer_l2': 0,
'm28.bias_regularizer': 'None',
'm28.bias_regularizer_l1': 0,
'm28.bias_regularizer_l2': 0,
'm28.activity_regularizer': 'None',
'm28.activity_regularizer_l1': 0,
'm28.activity_regularizer_l2': 0,
'm28.kernel_constraint': 'None',
'm28.bias_constraint': 'None',
'm28.name': '',
'm29': 'M.dl_layer_reshape.v1',
'm29.inputs': T.Graph.OutputPort('m28.data'),
'm29.target_shape': '1,40',
'm29.name': '',
'm10': 'M.dl_layer_lstm.v1',
'm10.inputs': T.Graph.OutputPort('m29.data'),
'm10.units': 1,
'm10.activation': 'tanh',
'm10.recurrent_activation': 'hard_sigmoid',
'm10.use_bias': True,
'm10.kernel_initializer': 'glorot_uniform',
'm10.recurrent_initializer': 'Orthogonal',
'm10.bias_initializer': 'Zeros',
'm10.unit_forget_bias': True,
'm10.kernel_regularizer': 'None',
'm10.kernel_regularizer_l1': 0,
'm10.kernel_regularizer_l2': 0,
'm10.recurrent_regularizer': 'None',
'm10.recurrent_regularizer_l1': 0,
'm10.recurrent_regularizer_l2': 0.01,
'm10.bias_regularizer': 'None',
'm10.bias_regularizer_l1': 0,
'm10.bias_regularizer_l2': 0,
'm10.activity_regularizer': 'None',
'm10.activity_regularizer_l2': 0.01,
'm10.kernel_constraint': 'None',
'm10.recurrent_constraint': 'None',
'm10.bias_constraint': 'None',
'm10.dropout': 0,
'm10.recurrent_dropout': 0.5,
'm10.return_sequences': False,
'm10.implementation': '2',
'm10.name': '',
'm41': 'M.dl_layer_reshape.v1',
'm41.inputs': T.Graph.OutputPort('m10.data'),
'm41.target_shape': '1,1',
'm41.name': '',
'm39': 'M.dl_layer_lstm.v1',
'm39.inputs': T.Graph.OutputPort('m41.data'),
'm39.units': 1,
'm39.activation': 'tanh',
'm39.recurrent_activation': 'hard_sigmoid',
'm39.use_bias': True,
'm39.kernel_initializer': 'glorot_uniform',
'm39.recurrent_initializer': 'Orthogonal',
'm39.bias_initializer': 'Zeros',
'm39.unit_forget_bias': True,
'm39.kernel_regularizer': 'None',
'm39.kernel_regularizer_l1': 0.005,
'm39.kernel_regularizer_l2': 0.005,
'm39.recurrent_regularizer': 'None',
'm39.recurrent_regularizer_l1': 0,
'm39.recurrent_regularizer_l2': 0.01,
'm39.bias_regularizer': 'None',
'm39.bias_regularizer_l1': 0,
'm39.bias_regularizer_l2': 0,
'm39.activity_regularizer': 'None',
'm39.activity_regularizer_l2': 0.01,
'm39.kernel_constraint': 'None',
'm39.recurrent_constraint': 'None',
'm39.bias_constraint': 'None',
'm39.dropout': 0,
'm39.recurrent_dropout': 0.5,
'm39.return_sequences': False,
'm39.implementation': '2',
'm39.name': '',
'm38': 'M.dl_layer_reshape.v1',
'm38.inputs': T.Graph.OutputPort('m39.data'),
'm38.target_shape': '1,1',
'm38.name': '',
'm57': 'M.dl_layer_lstm.v1',
'm57.inputs': T.Graph.OutputPort('m38.data'),
'm57.units': 1,
'm57.activation': 'tanh',
'm57.recurrent_activation': 'hard_sigmoid',
'm57.use_bias': True,
'm57.kernel_initializer': 'glorot_uniform',
'm57.recurrent_initializer': 'Orthogonal',
'm57.bias_initializer': 'Zeros',
'm57.unit_forget_bias': True,
'm57.kernel_regularizer': 'None',
'm57.kernel_regularizer_l1': 0.005,
'm57.kernel_regularizer_l2': 0.005,
'm57.recurrent_regularizer': 'None',
'm57.recurrent_regularizer_l1': 0,
'm57.recurrent_regularizer_l2': 0.01,
'm57.bias_regularizer': 'None',
'm57.bias_regularizer_l1': 0,
'm57.bias_regularizer_l2': 0,
'm57.activity_regularizer': 'None',
'm57.activity_regularizer_l2': 0.01,
'm57.kernel_constraint': 'None',
'm57.recurrent_constraint': 'None',
'm57.bias_constraint': 'None',
'm57.dropout': 0,
'm57.recurrent_dropout': 0.5,
'm57.return_sequences': False,
'm57.implementation': '2',
'm57.name': '',
'm12': 'M.dl_layer_dropout.v1',
'm12.inputs': T.Graph.OutputPort('m57.data'),
'm12.rate': 0.4,
'm12.noise_shape': '',
'm12.name': 'dropout1',
'm20': 'M.dl_layer_dense.v1',
'm20.inputs': T.Graph.OutputPort('m12.data'),
'm20.units': 80,
'm20.activation': 'tanh',
'm20.use_bias': True,
'm20.kernel_initializer': 'glorot_uniform',
'm20.bias_initializer': 'Zeros',
'm20.kernel_regularizer': 'None',
'm20.kernel_regularizer_l1': 0,
'm20.kernel_regularizer_l2': 0,
'm20.bias_regularizer': 'None',
'm20.bias_regularizer_l1': 0,
'm20.bias_regularizer_l2': 0,
'm20.activity_regularizer': 'L1L2',
'm20.activity_regularizer_l1': 0.005,
'm20.activity_regularizer_l2': 0.005,
'm20.user_activity_regularizer': m20_user_activity_regularizer_bigquant_run,
'm20.kernel_constraint': 'None',
'm20.bias_constraint': 'None',
'm20.name': '',
'm21': 'M.dl_layer_dropout.v1',
'm21.inputs': T.Graph.OutputPort('m20.data'),
'm21.rate': 0.4,
'm21.noise_shape': '',
'm21.name': 'dropout2',
'm30': 'M.dl_layer_dense.v1',
'm30.inputs': T.Graph.OutputPort('m21.data'),
'm30.units': 60,
'm30.activation': 'tanh',
'm30.use_bias': True,
'm30.kernel_initializer': 'glorot_uniform',
'm30.bias_initializer': 'Zeros',
'm30.kernel_regularizer': 'None',
'm30.kernel_regularizer_l1': 0.001,
'm30.kernel_regularizer_l2': 0.001,
'm30.bias_regularizer': 'None',
'm30.bias_regularizer_l1': 0,
'm30.bias_regularizer_l2': 0,
'm30.activity_regularizer': 'L1L2',
'm30.activity_regularizer_l1': 0.005,
'm30.activity_regularizer_l2': 0.005,
'm30.kernel_constraint': 'None',
'm30.bias_constraint': 'None',
'm30.name': '',
'm56': 'M.dl_layer_dropout.v1',
'm56.inputs': T.Graph.OutputPort('m30.data'),
'm56.rate': 0.3,
'm56.noise_shape': '',
'm56.name': 'dropout2',
'm59': 'M.dl_layer_dense.v1',
'm59.inputs': T.Graph.OutputPort('m56.data'),
'm59.units': 40,
'm59.activation': 'tanh',
'm59.use_bias': True,
'm59.kernel_initializer': 'glorot_uniform',
'm59.bias_initializer': 'Zeros',
'm59.kernel_regularizer': 'None',
'm59.kernel_regularizer_l1': 0.001,
'm59.kernel_regularizer_l2': 0.001,
'm59.bias_regularizer': 'None',
'm59.bias_regularizer_l1': 0,
'm59.bias_regularizer_l2': 0,
'm59.activity_regularizer': 'L1L2',
'm59.activity_regularizer_l1': 0.005,
'm59.activity_regularizer_l2': 0.005,
'm59.kernel_constraint': 'None',
'm59.bias_constraint': 'None',
'm59.name': '',
'm22': 'M.dl_layer_dense.v1',
'm22.inputs': T.Graph.OutputPort('m59.data'),
'm22.units': 1,
'm22.activation': 'tanh',
'm22.use_bias': True,
'm22.kernel_initializer': 'glorot_uniform',
'm22.bias_initializer': 'Zeros',
'm22.kernel_regularizer': 'None',
'm22.kernel_regularizer_l1': 0.003,
'm22.kernel_regularizer_l2': 0.003,
'm22.bias_regularizer': 'None',
'm22.bias_regularizer_l1': 0,
'm22.bias_regularizer_l2': 0,
'm22.activity_regularizer': 'L1L2',
'm22.activity_regularizer_l1': 0.005,
'm22.activity_regularizer_l2': 0.005,
'm22.kernel_constraint': 'None',
'm22.bias_constraint': 'None',
'm22.name': '',
'm34': 'M.dl_model_init.v1',
'm34.inputs': T.Graph.OutputPort('m6.data'),
'm34.outputs': T.Graph.OutputPort('m22.data'),
'm31': 'M.instruments.v2',
'm31.start_date': '2014-01-01',
'm31.end_date': '2016-12-31',
'm31.market': 'CN_STOCK_A',
'm31.instrument_list': """000008.SZA
000009.SZA
000012.SZA
000021.SZA
000027.SZA
000028.SZA
000031.SZA
000039.SZA
000046.SZA
000050.SZA
000060.SZA
000061.SZA
000062.SZA
000078.SZA
000089.SZA
000090.SZA
000156.SZA
000158.SZA
000301.SZA
000400.SZA
000401.SZA
000402.SZA
000415.SZA
000488.SZA
000501.SZA
000513.SZA
000519.SZA
000528.SZA
000537.SZA
000540.SZA
000543.SZA
000547.SZA
000553.SZA
000559.SZA
000563.SZA
000564.SZA
000581.SZA
000598.SZA
000600.SZA
000623.SZA
000629.SZA
000630.SZA
000636.SZA
000681.SZA
000685.SZA
000686.SZA
000690.SZA
000709.SZA
000717.SZA
000718.SZA
000729.SZA
000732.SZA
000738.SZA
000739.SZA
000750.SZA
000758.SZA
000778.SZA
000807.SZA
000813.SZA
000825.SZA
000826.SZA
000830.SZA
000869.SZA
000877.SZA
000878.SZA
000883.SZA
000887.SZA
000898.SZA
000930.SZA
000932.SZA
000937.SZA
000959.SZA
000960.SZA
000967.SZA
000970.SZA
000975.SZA
000983.SZA
000988.SZA
000990.SZA
000997.SZA
000998.SZA
000999.SZA
001872.SZA
001914.SZA
002002.SZA
002004.SZA
002010.SZA
002013.SZA
002019.SZA
002028.SZA
002030.SZA
002038.SZA
002048.SZA
002051.SZA
002056.SZA
002064.SZA
002074.SZA
002075.SZA
002078.SZA
002080.SZA
002081.SZA
002085.SZA
002092.SZA
002093.SZA
002110.SZA
002124.SZA
002128.SZA
002131.SZA
002138.SZA
002152.SZA
002155.SZA
002156.SZA
002174.SZA
002183.SZA
002185.SZA
002191.SZA
002195.SZA
002203.SZA
002212.SZA
002217.SZA
002221.SZA
002223.SZA
002233.SZA
002242.SZA
002244.SZA
002249.SZA
002250.SZA
002266.SZA
002268.SZA
002273.SZA
002281.SZA
002294.SZA
002299.SZA
002302.SZA
002317.SZA
002340.SZA
002353.SZA
002368.SZA
002372.SZA
002373.SZA
002375.SZA
002382.SZA
002385.SZA
002387.SZA
002390.SZA
002396.SZA
002399.SZA
002408.SZA
002416.SZA
002419.SZA
002423.SZA
002424.SZA
002429.SZA
002434.SZA
002440.SZA
002444.SZA
002458.SZA
002465.SZA
002468.SZA
002500.SZA
002503.SZA
002505.SZA
002506.SZA
002507.SZA
002511.SZA
002544.SZA
002557.SZA
002563.SZA
002568.SZA
002572.SZA
002583.SZA
002589.SZA
002595.SZA
002603.SZA
002625.SZA
002635.SZA
002640.SZA
002648.SZA
002653.SZA
002670.SZA
002683.SZA
002690.SZA
002701.SZA
002705.SZA
002709.SZA
002745.SZA
002791.SZA
002797.SZA
002807.SZA
002815.SZA
002818.SZA
002831.SZA
002839.SZA
002867.SZA
002901.SZA
002920.SZA
002925.SZA
002926.SZA
002936.SZA
002941.SZA
002946.SZA
002948.SZA
002957.SZA
002966.SZA
002985.SZA
300001.SZA
300002.SZA
300009.SZA
300010.SZA
300012.SZA
300017.SZA
300024.SZA
300026.SZA
300058.SZA
300070.SZA
300072.SZA
300088.SZA
300113.SZA
300115.SZA
300133.SZA
300134.SZA
300146.SZA
300166.SZA
300168.SZA
300180.SZA
300182.SZA
300207.SZA
300212.SZA
300223.SZA
300244.SZA
300257.SZA
300271.SZA
300274.SZA
300285.SZA
300296.SZA
300315.SZA
300316.SZA
300324.SZA
300357.SZA
300376.SZA
300418.SZA
300459.SZA
300463.SZA
300474.SZA
300482.SZA
300496.SZA
300595.SZA
300618.SZA
300630.SZA
300699.SZA
600006.SHA
600008.SHA
600021.SHA
600022.SHA
600026.SHA
600037.SHA
600038.SHA
600039.SHA
600053.SHA
600056.SHA
600060.SHA
600062.SHA
600064.SHA
600073.SHA
600079.SHA
600089.SHA
600094.SHA
600120.SHA
600126.SHA
600131.SHA
600132.SHA
600141.SHA
600143.SHA
600153.SHA
600155.SHA
600158.SHA
600160.SHA
600166.SHA
600167.SHA
600170.SHA
600171.SHA
600188.SHA
600195.SHA
600201.SHA
600216.SHA
600219.SHA
600256.SHA
600258.SHA
600259.SHA
600260.SHA
600266.SHA
600273.SHA
600277.SHA
600282.SHA
600291.SHA
600307.SHA
600312.SHA
600315.SHA
600316.SHA
600325.SHA
600329.SHA
600338.SHA
600339.SHA
600348.SHA
600350.SHA
600372.SHA
600373.SHA
600376.SHA
600380.SHA
600388.SHA
600392.SHA
600398.SHA
600409.SHA
600410.SHA
600415.SHA
600418.SHA
600426.SHA
600435.SHA
600446.SHA
600460.SHA
600466.SHA
600486.SHA
600497.SHA
600500.SHA
600507.SHA
600511.SHA
600515.SHA
600516.SHA
600517.SHA
600528.SHA
600529.SHA
600535.SHA
600545.SHA
600546.SHA
600549.SHA
600556.SHA
600563.SHA
600566.SHA
600567.SHA
600572.SHA
600575.SHA
600580.SHA
600582.SHA
600597.SHA
600598.SHA
600623.SHA
600633.SHA
600639.SHA
600640.SHA
600642.SHA
600643.SHA
600645.SHA
600648.SHA
600649.SHA
600657.SHA
600664.SHA
600667.SHA
600673.SHA
600675.SHA
600699.SHA
600704.SHA
600707.SHA
600717.SHA
600718.SHA
600728.SHA
600729.SHA
600733.SHA
600737.SHA
600739.SHA
600748.SHA
600754.SHA
600755.SHA
600765.SHA
600776.SHA
600779.SHA
600782.SHA
600787.SHA
600801.SHA
600804.SHA
600808.SHA
600811.SHA
600820.SHA
600823.SHA
600827.SHA
600835.SHA
600839.SHA
600859.SHA
600862.SHA
600863.SHA
600867.SHA
600869.SHA
600871.SHA
600875.SHA
600879.SHA
600881.SHA
600884.SHA
600885.SHA
600895.SHA
600901.SHA
600903.SHA
600908.SHA
600909.SHA
600917.SHA
600928.SHA
600959.SHA
600967.SHA
600968.SHA
600970.SHA
600985.SHA
601000.SHA
601003.SHA
601005.SHA
601016.SHA
601068.SHA
601098.SHA
601099.SHA
601106.SHA
601118.SHA
601127.SHA
601128.SHA
601139.SHA
601168.SHA
601179.SHA
601200.SHA
601228.SHA
601233.SHA
601298.SHA
601333.SHA
601456.SHA
601512.SHA
601598.SHA
601608.SHA
601611.SHA
601615.SHA
601689.SHA
601699.SHA
601717.SHA
601718.SHA
601778.SHA
601799.SHA
601801.SHA
601828.SHA
601860.SHA
601865.SHA
601866.SHA
601869.SHA
601880.SHA
601928.SHA
601958.SHA
601966.SHA
601969.SHA
601975.SHA
601992.SHA
601997.SHA
603000.SHA
603056.SHA
603077.SHA
603198.SHA
603225.SHA
603228.SHA
603256.SHA
603260.SHA
603290.SHA
603317.SHA
603328.SHA
603338.SHA
603345.SHA
603355.SHA
603377.SHA
603379.SHA
603444.SHA
603486.SHA
603515.SHA
603568.SHA
603589.SHA
603605.SHA
603638.SHA
603650.SHA
603708.SHA
603712.SHA
603719.SHA
603737.SHA
603786.SHA
603806.SHA
603816.SHA
603858.SHA
603866.SHA
603868.SHA
603882.SHA
603883.SHA
603885.SHA
603888.SHA
603893.SHA
603927.SHA
603983.SHA
688002.SHA
688029.SHA
688088.SHA
688099.SHA
688321.SHA""",
'm31.max_count': 0,
'm2': 'M.advanced_auto_labeler.v2',
'm2.instruments': T.Graph.OutputPort('m31.data'),
'm2.label_expr': """# #号开始的表示注释
# 0. 每行一个,顺序执行,从第二个开始,可以使用label字段
# 1. 可用数据字段见 https://bigquant.com/docs/data_history_data.html
# 添加benchmark_前缀,可使用对应的benchmark数据
# 2. 可用操作符和函数见 `表达式引擎 <https://bigquant.com/docs/big_expr.html>`_
# 计算收益:5日收盘价(作为卖出价格)除以明日开盘价(作为买入价格)
(shift(close, -5) / shift(open, -1) - 1)*10
# 极值处理:用1%和99%分位的值做clip
clip(label, all_quantile(label, 0.01), all_quantile(label, 0.99))
# 过滤掉一字涨停的情况 (设置label为NaN,在后续处理和训练中会忽略NaN的label)
where(shift(high, -1) == shift(low, -1), NaN, label)
""",
'm2.start_date': '',
'm2.end_date': '',
'm2.benchmark': '000300.SHA',
'm2.drop_na_label': True,
'm2.cast_label_int': False,
'm13': 'M.standardlize.v8',
'm13.input_1': T.Graph.OutputPort('m2.data'),
'm13.columns_input': 'label',
'm15': 'M.general_feature_extractor.v7',
'm15.instruments': T.Graph.OutputPort('m31.data'),
'm15.features': T.Graph.OutputPort('m43.data'),
'm15.start_date': '',
'm15.end_date': '',
'm15.before_start_days': 90,
'm16': 'M.derived_feature_extractor.v3',
'm16.input_data': T.Graph.OutputPort('m15.data'),
'm16.features': T.Graph.OutputPort('m43.data'),
'm16.date_col': 'date',
'm16.instrument_col': 'instrument',
'm16.drop_na': True,
'm16.remove_extra_columns': False,
'm44': 'M.filter.v3',
'm44.input_data': T.Graph.OutputPort('m16.data'),
'm44.expr': 'market_cap_0>2000000000',
'm44.output_left_data': False,
'm14': 'M.standardlize.v8',
'm14.input_1': T.Graph.OutputPort('m44.data'),
'm14.input_2': T.Graph.OutputPort('m3.data'),
'm14.columns_input': '[]',
'm7': 'M.join.v3',
'm7.data1': T.Graph.OutputPort('m13.data'),
'm7.data2': T.Graph.OutputPort('m14.data'),
'm7.on': 'date,instrument',
'm7.how': 'inner',
'm7.sort': False,
'm36': 'M.aa.v5',
'm36.input_data': T.Graph.OutputPort('m7.data'),
'm36.day_number': 150,
'm33': 'M.chinaa_stock_filter.v1',
'm33.input_data': T.Graph.OutputPort('m36.data'),
'm33.index_constituent_cond': ['全部'],
'm33.board_cond': ['全部'],
'm33.industry_cond': ['全部'],
'm33.st_cond': ['正常'],
'm33.delist_cond': ['全部'],
'm33.output_left_data': False,
'm54': 'M.dropnan.v2',
'm54.input_data': T.Graph.OutputPort('m33.data'),
'm26': 'M.dl_convert_to_bin.v2',
'm26.input_data': T.Graph.OutputPort('m54.data'),
'm26.features': T.Graph.OutputPort('m3.data'),
'm26.window_size': 5,
'm26.feature_clip': 3,
'm26.flatten': True,
'm26.window_along_col': 'instrument',
'm4': 'M.cached.v3',
'm4.input_1': T.Graph.OutputPort('m26.data'),
'm4.input_2': T.Graph.OutputPort('m3.data'),
'm4.run': m4_run_bigquant_run,
'm4.post_run': m4_post_run_bigquant_run,
'm4.input_ports': '',
'm4.params': '{}',
'm4.output_ports': '',
'm5': 'M.dl_model_train.v1',
'm5.input_model': T.Graph.OutputPort('m34.data'),
'm5.training_data': T.Graph.OutputPort('m4.data_1'),
'm5.optimizer': '自定义',
'm5.user_optimizer': m5_user_optimizer_bigquant_run,
'm5.loss': 'mean_squared_error',
'm5.metrics': 'mae',
'm5.batch_size': 2048,
'm5.epochs': 1,
'm5.custom_objects': m5_custom_objects_bigquant_run,
'm5.n_gpus': 0,
'm5.verbose': '2:每个epoch输出一行记录',
'm11': 'M.dl_model_predict.v1',
'm11.trained_model': T.Graph.OutputPort('m5.data'),
'm11.input_data': T.Graph.OutputPort('m8.data_1'),
'm11.batch_size': 1024,
'm11.n_gpus': 0,
'm11.verbose': '2:每个epoch输出一行记录',
'm24': 'M.cached.v3',
'm24.input_1': T.Graph.OutputPort('m11.data'),
'm24.input_2': T.Graph.OutputPort('m55.data'),
'm24.run': m24_run_bigquant_run,
'm24.post_run': m24_post_run_bigquant_run,
'm24.input_ports': '',
'm24.params': '{}',
'm24.output_ports': '',
'm40': 'M.trade.v4',
'm40.instruments': T.Graph.OutputPort('m9.data'),
'm40.options_data': T.Graph.OutputPort('m24.data_1'),
'm40.start_date': '',
'm40.end_date': '',
'm40.initialize': m40_initialize_bigquant_run,
'm40.handle_data': m40_handle_data_bigquant_run,
'm40.prepare': m40_prepare_bigquant_run,
'm40.volume_limit': 0.025,
'm40.order_price_field_buy': 'open',
'm40.order_price_field_sell': 'close',
'm40.capital_base': 5000000,
'm40.auto_cancel_non_tradable_orders': True,
'm40.data_frequency': 'daily',
'm40.price_type': '真实价格',
'm40.product_type': '股票',
'm40.plot_charts': True,
'm40.backtest_only': False,
'm40.benchmark': '000905.SHA',
'm1': 'M.input_features.v1',
'm1.features': """mf_net_pct_s_0 ##8/33
#mean(where(close_0>open_0, ((close_0-low_0)/(high_0-low_0))*(turn_0/avg_turn_5), -1*((high_0-close_0)/(high_0-low_0))*(turn_0/avg_turn_5)),5) #-4/25///47
(close_0-low_0)/close_0 ##4/24//42
(high_0-close_0)/close_0 ##21/
(open_0-close_0)/close_0 #3/14//36\\\\72
#(high_0-low_0)/close_0 #0/25\\\\\69
sqrt(high_0*low_0)-amount_0/volume_0*adjust_factor_0 ##2/22///47////54
rank_volatility_5_0 ##-8/19///47\\\77
#(high_0-close_0)/(high_0-low_0)#-4/19//37///46\55
#sum(where(close_0>open_0, ((close_0-low_0)/(high_0-low_0))*(turn_0/avg_turn_5), -1*((high_0-close_0)/(high_0-low_0))*(turn_0/avg_turn_5)),5) #-4/16
#ta_bbands_u(close_0, 5) #-10/17
std(turn_0,5) ##1/18//36///53
#(high_0-open_0)/close_0 #-1/18/36///46
(open_0-low_0)/close_0 ##5/16\\67\\\\\69
#(close_0-ts_min(close_0, 5))/close_0 #-6/27/
#(close_0-ts_min(low_0, 5))/close_0 #-5/20///50\61
#(close_0-open_4)/close_0 #-1/12
#(ts_max(high_0, 5)-close_0)/close_0 #8/23///50
(ts_max(high_0, 5)-ts_min(low_0, 5))/close_0 ##-1/18//37///50\72
#(ts_max(high_0, 5)-open_4)/close_0 #-4/20
#(open_4-ts_min(low_0, 5))/close_0 #4/21
""",
'm19': 'M.input_features.v1',
'm19.features': """mf_net_pct_s_0 ##8/33
#mean(where(close_0>open_0, ((close_0-low_0)/(high_0-low_0))*(turn_0/avg_turn_5), -1*((high_0-close_0)/(high_0-low_0))*(turn_0/avg_turn_5)),5) #-4/25///47
(close_0-low_0)/close_0 ##4/24//42
(high_0-close_0)/close_0 ##21/
(open_0-close_0)/close_0 #3/14//36\\\\72
(high_0-low_0)/close_0 #0/25\\\\\73
sqrt(high_0*low_0)-amount_0/volume_0*adjust_factor_0 ##2/22///47////54
rank_volatility_5_0 ##-8/19///47\\\77
#(high_0-close_0)/(high_0-low_0)#-4/19//37
#sum(where(close_0>open_0, ((close_0-low_0)/(high_0-low_0))*(turn_0/avg_turn_5), -1*((high_0-close_0)/(high_0-low_0))*(turn_0/avg_turn_5)),5) #-4/16
#ta_bbands_u(close_0, 5) #-10/17
std(turn_0,5) ##1/18//36///53
#(high_0-open_0)/close_0 #-1/18/36///46
(open_0-low_0)/close_0 ##5/16\\67
#(close_0-ts_min(close_0, 5))/close_0 #-6/27/
(close_0-ts_min(low_0, 5))/close_0 #-5/20///50\\\\\73
#(close_0-open_4)/close_0 #-1/12
#(ts_max(high_0, 5)-close_0)/close_0 #8/23///50
(ts_max(high_0, 5)-ts_min(low_0, 5))/close_0 ##-1/18//37///50\72
#(ts_max(high_0, 5)-open_4)/close_0 #-4/20
#(open_4-ts_min(low_0, 5))/close_0 #4/21
""",
})
# g.run({})
def m42_run_bigquant_run(
bq_graph,
inputs,
trading_days_market='CN', # 使用那个市场的交易日历
train_instruments_mid='m31', # 训练数据 证券代码列表 模块id
test_instruments_mid='m9', # 测试数据 证券代码列表 模块id
predict_mid='m24', # 预测 模块id
trade_mid='m40', # 回测 模块id
start_date='2017-08-01', # 数据开始日期
end_date=T.live_run_param('trading_date', '2018-12-31'), # 数据结束日期
train_update_days=30, # 更新周期,按交易日计算,每多少天更新一次
train_update_days_for_live=30, #模拟实盘模式下的更新周期,按交易日计算,每多少天更新一次。如果需要在模拟实盘阶段使用不同的模型更新周期,可以设置这个参数
train_data_min_days=125, # 最小数据天数,按交易日计算,所以第一个滚动的结束日期是 从开始日期到开始日期+最小数据天数
train_data_max_days=125, # 最大数据天数,按交易日计算,0,表示没有限制,否则每一个滚动的开始日期=max(此滚动的结束日期-最大数据天数, 开始日期
rolling_count_for_live=1, #实盘模式下滚动次数,模拟实盘模式下,取最后多少次滚动。一般在模拟实盘模式下,只用到最后一次滚动训练的模型,这里可以设置为1;如果你的滚动训练数据时间段很短,以至于期间可能没有训练数据,这里可以设置大一点。0表示没有限制
):
def merge_datasources(input_1):
df_list = [ds.read_df() for ds in input_1]
df = pd.concat(df_list)
instrument_data = {
'start_date': df['date'].min().strftime('%Y-%m-%d'),
'end_date': df['date'].max().strftime('%Y-%m-%d'),
'instruments': list(set(df['instrument'])),
}
return Outputs(data=DataSource.write_df(df), instrument_data=DataSource.write_pickle(instrument_data))
def gen_rolling_dates(trading_days_market, start_date, end_date, train_update_days, train_update_days_for_live, train_data_min_days, train_data_max_days, rolling_count_for_live):
# 是否实盘模式
tdays = list(D.trading_days(market=trading_days_market, start_date=start_date, end_date=end_date)['date'])
is_live_run = T.live_run_param('trading_date', None) is not None
if is_live_run and train_update_days_for_live:
train_update_days = train_update_days_for_live
rollings = []
train_end_date = train_data_min_days
while train_end_date < len(tdays):
if train_data_max_days is not None:
train_start_date = max(train_end_date - train_data_max_days, 0)
else:
train_start_date = start_date
rollings.append({
'train_start_date': tdays[train_start_date].strftime('%Y-%m-%d'),
'train_end_date': tdays[train_end_date - 1].strftime('%Y-%m-%d'),
'test_start_date': tdays[train_end_date].strftime('%Y-%m-%d'),
'test_end_date': tdays[min(train_end_date + train_update_days, len(tdays)) - 1].strftime('%Y-%m-%d'),
})
train_end_date += train_update_days
if not rollings:
raise Exception('没有滚动需要执行,请检查配置')
if is_live_run and rolling_count_for_live:
rollings = rollings[-rolling_count_for_live:]
return rollings
g = bq_graph
rolling_dates = gen_rolling_dates(
trading_days_market, start_date, end_date, train_update_days, train_update_days_for_live, train_data_min_days, train_data_max_days, rolling_count_for_live)
# 训练和预测
results = []
for rolling in rolling_dates:
parameters = {}
# 先禁用回测
parameters[trade_mid + '.__enabled__'] = False
parameters[train_instruments_mid + '.start_date'] = rolling['train_start_date']
parameters[train_instruments_mid + '.end_date'] = rolling['train_end_date']
parameters[test_instruments_mid + '.start_date'] = rolling['test_start_date']
parameters[test_instruments_mid + '.end_date'] = rolling['test_end_date']
# print('------ rolling_train:', parameters)
results.append(g.run(parameters))
# 合并预测结果并回测
mx = M.cached.v3(run=merge_datasources, input_1=[result[predict_mid].data_1 for result in results])
parameters = {}
parameters['*.__enabled__'] = False
parameters[trade_mid + '.__enabled__'] = True
parameters[trade_mid + '.instruments'] = mx.instrument_data
parameters[trade_mid + '.options_data'] = mx.data
trade = g.run(parameters)
return {'rollings': results, 'trade': trade}
m42 = M.hyper_rolling_train.v1(
run=m42_run_bigquant_run,
run_now=True,
bq_graph=g
)
[2022-08-09 18:42:38.075184] INFO: moduleinvoker: input_features.v1 开始运行..
[2022-08-09 18:42:38.164643] INFO: moduleinvoker: input_features.v1 运行完成[0.089465s].
[2022-08-09 18:42:38.188666] INFO: moduleinvoker: instruments.v2 开始运行..
[2022-08-09 18:42:38.199451] INFO: moduleinvoker: 命中缓存
[2022-08-09 18:42:38.201396] INFO: moduleinvoker: instruments.v2 运行完成[0.012747s].
[2022-08-09 18:42:38.219363] INFO: moduleinvoker: dl_layer_input.v1 运行完成[0.007516s].
[2022-08-09 18:42:38.229138] INFO: moduleinvoker: instruments.v2 开始运行..
[2022-08-09 18:42:38.238967] INFO: moduleinvoker: 命中缓存
[2022-08-09 18:42:38.240917] INFO: moduleinvoker: instruments.v2 运行完成[0.011791s].
[2022-08-09 18:42:38.245964] INFO: moduleinvoker: input_features.v1 开始运行..
[2022-08-09 18:42:38.255011] INFO: moduleinvoker: 命中缓存
[2022-08-09 18:42:38.256790] INFO: moduleinvoker: input_features.v1 运行完成[0.010836s].
[2022-08-09 18:42:38.261418] INFO: moduleinvoker: input_features.v1 开始运行..
[2022-08-09 18:42:38.281110] INFO: moduleinvoker: 命中缓存
[2022-08-09 18:42:38.283087] INFO: moduleinvoker: input_features.v1 运行完成[0.02166s].
[2022-08-09 18:42:38.291205] INFO: moduleinvoker: input_features.v1 开始运行..
[2022-08-09 18:42:38.373592] INFO: moduleinvoker: input_features.v1 运行完成[0.082401s].
[2022-08-09 18:42:38.404653] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.020642s].
[2022-08-09 18:42:38.417159] INFO: moduleinvoker: advanced_auto_labeler.v2 开始运行..
[2022-08-09 18:42:38.427718] INFO: moduleinvoker: 命中缓存
[2022-08-09 18:42:38.430412] INFO: moduleinvoker: advanced_auto_labeler.v2 运行完成[0.013254s].
[2022-08-09 18:42:38.446775] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2022-08-09 18:42:43.490644] INFO: 基础特征抽取: 年份 2017, 特征行数=70822
[2022-08-09 18:42:48.182038] INFO: 基础特征抽取: 年份 2018, 特征行数=9115
[2022-08-09 18:42:48.226223] INFO: 基础特征抽取: 总行数: 79937
[2022-08-09 18:42:48.232398] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[9.785628s].
[2022-08-09 18:42:48.249489] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2022-08-09 18:42:51.091843] INFO: 基础特征抽取: 年份 2017, 特征行数=18137
[2022-08-09 18:42:58.245423] INFO: 基础特征抽取: 年份 2018, 特征行数=22243
[2022-08-09 18:42:58.299320] INFO: 基础特征抽取: 总行数: 40380
[2022-08-09 18:42:58.308694] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[10.059227s].
[2022-08-09 18:42:58.366989] INFO: moduleinvoker: dl_layer_conv2d.v1 运行完成[0.048287s].
[2022-08-09 18:42:58.376739] INFO: moduleinvoker: standardlize.v8 开始运行..
[2022-08-09 18:42:58.399416] INFO: moduleinvoker: 命中缓存
[2022-08-09 18:42:58.401580] INFO: moduleinvoker: standardlize.v8 运行完成[0.024858s].
[2022-08-09 18:42:58.418087] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2022-08-09 18:42:58.740713] INFO: derived_feature_extractor: 提取完成 mean(where(close_0>open_0, ((close_0-low_0)/(high_0-low_0))*(turn_0/avg_turn_5), -1*((high_0-close_0)/(high_0-low_0))*(turn_0/avg_turn_5)),5) #-4/25///47, 0.080s
[2022-08-09 18:42:58.942331] INFO: derived_feature_extractor: /y_2017, 70822
[2022-08-09 18:42:59.083581] INFO: derived_feature_extractor: /y_2018, 9115
[2022-08-09 18:42:59.214027] INFO: moduleinvoker: derived_feature_extractor.v3 运行完成[0.795943s].
[2022-08-09 18:42:59.223609] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2022-08-09 18:42:59.459720] INFO: derived_feature_extractor: 提取完成 mean(where(close_0>open_0, ((close_0-low_0)/(high_0-low_0))*(turn_0/avg_turn_5), -1*((high_0-close_0)/(high_0-low_0))*(turn_0/avg_turn_5)),5) #-4/25///47, 0.047s
[2022-08-09 18:42:59.696397] INFO: derived_feature_extractor: /y_2017, 18137
[2022-08-09 18:42:59.797668] INFO: derived_feature_extractor: /y_2018, 22243
[2022-08-09 18:42:59.908999] INFO: moduleinvoker: derived_feature_extractor.v3 运行完成[0.685429s].
[2022-08-09 18:42:59.928068] INFO: moduleinvoker: dl_layer_reshape.v1 运行完成[0.011361s].
[2022-08-09 18:42:59.941446] INFO: moduleinvoker: filter.v3 开始运行..
[2022-08-09 18:43:00.030650] INFO: filter: 使用表达式 market_cap_0>2000000000 过滤
[2022-08-09 18:43:00.395220] INFO: filter: 过滤 /y_2017, 68836/0/68836
[2022-08-09 18:43:00.562671] INFO: filter: 过滤 /y_2018, 9050/0/9050
[2022-08-09 18:43:00.611300] INFO: moduleinvoker: filter.v3 运行完成[0.669843s].
[2022-08-09 18:43:00.621275] INFO: moduleinvoker: filter.v3 开始运行..
[2022-08-09 18:43:00.638702] INFO: filter: 使用表达式 market_cap_0>2000000000 过滤
[2022-08-09 18:43:00.750682] INFO: filter: 过滤 /y_2017, 16325/0/16325
[2022-08-09 18:43:00.943634] INFO: filter: 过滤 /y_2018, 22057/0/22066
[2022-08-09 18:43:01.083148] INFO: moduleinvoker: filter.v3 运行完成[0.461853s].
[2022-08-09 18:43:01.127952] ERROR: moduleinvoker: module name: dl_layer_lstm, module version: v1, trackeback: NotImplementedError: Cannot convert a symbolic Tensor (lstm/strided_slice:0) to a numpy array. This error may indicate that you're trying to pass a Tensor to a NumPy call, which is not supported
[2022-08-09 18:43:01.138237] ERROR: moduleinvoker: module name: hyper_rolling_train, module version: v1, trackeback: NotImplementedError: Cannot convert a symbolic Tensor (lstm/strided_slice:0) to a numpy array. This error may indicate that you're trying to pass a Tensor to a NumPy call, which is not supported