# 本代码由可视化策略环境自动生成 2021年11月21日 14:08
# 本代码单元只能在可视化模式下编辑。您也可以拷贝代码,粘贴到新建的代码单元或者策略,然后修改。
# 回测引擎:初始化函数,只执行一次
def m63_initialize_bigquant_run(context):
# 加载预测数据
context.ranker_prediction = context.options['data'].read_df()
# 系统已经设置了默认的交易手续费和滑点,要修改手续费可使用如下函数
context.set_commission(PerOrder(buy_cost=0.0003, sell_cost=0.0013, min_cost=5))
# 预测数据,通过options传入进来,使用 read_df 函数,加载到内存 (DataFrame)
# 设置买入的股票数量,这里买入预测股票列表排名靠前的5只
stock_count = 5
# 每只的股票的权重,如下的权重分配会使得靠前的股票分配多一点的资金,[0.339160, 0.213986, 0.169580, ..]
context.stock_weights = T.norm([1 / math.log(i + 2) for i in range(0, stock_count)])
# 设置每只股票占用的最大资金比例
context.max_cash_per_instrument =0.1
context.options['hold_days'] = 5
from zipline.finance.slippage import SlippageModel
class FixedPriceSlippage(SlippageModel):
def process_order(self, data, order, bar_volume=0, trigger_check_price=0):
if order.limit is None:
price_field = self._price_field_buy if order.amount > 0 else self._price_field_sell
price = data.current(order.asset, price_field)
else:
price = data.current(order.asset, self._price_field_buy)
# 返回希望成交的价格和数量
return (price, order.amount)
# 设置price_field,默认是开盘买入,收盘卖出
context.fix_slippage = FixedPriceSlippage(price_field_buy='open', price_field_sell='close')
context.set_slippage(us_equities=context.fix_slippage)
# 回测引擎:每日数据处理函数,每天执行一次
def m63_handle_data_bigquant_run(context, data):
# 获取当前持仓
positions = {e.symbol: p.amount * p.last_sale_price
for e, p in context.portfolio.positions.items()}
today = data.current_dt.strftime('%Y-%m-%d')
# 按日期过滤得到今日的预测数据
ranker_prediction = context.ranker_prediction[
context.ranker_prediction.date == today]
try:
#大盘风控模块,读取风控数据
benckmark_risk=ranker_prediction['bm_0'].values[0]
if benckmark_risk > 0:
for instrument in positions.keys():
context.order_target(context.symbol(instrument), 0)
print(today,'大盘风控止损触发,全仓卖出')
return
except:
print('缺失风控数据!')
#当risk为1时,市场有风险,全部平仓,不再执行其它操作
# 1. 资金分配
# 平均持仓时间是hold_days,每日都将买入股票,每日预期使用 1/hold_days 的资金
# 实际操作中,会存在一定的买入误差,所以在前hold_days天,等量使用资金;之后,尽量使用剩余资金(这里设置最多用等量的1.5倍)
is_staging = context.trading_day_index < context.options['hold_days'] # 是否在建仓期间(前 hold_days 天)
cash_avg = context.portfolio.portfolio_value / context.options['hold_days']
cash_for_buy = min(context.portfolio.cash, (1 if is_staging else 1.5) * cash_avg)
cash_for_sell = cash_avg - (context.portfolio.cash - cash_for_buy)
# 2. 根据需要加入移动止赢止损模块、固定天数卖出模块、ST或退市股卖出模块
stock_sold = [] # 记录卖出的股票,防止多次卖出出现空单
#------------------------START:止赢止损模块(含建仓期)---------------
current_stopwin_stock=[]
current_stoploss_stock = []
positions_cost={e.symbol:p.cost_basis for e,p in context.portfolio.positions.items()}
if len(positions)>0:
for instrument in positions.keys():
stock_cost=positions_cost[instrument]
stock_market_price=data.current(context.symbol(instrument),'price')
volume_since_buy = data.history(context.symbol(instrument), 'volume', 6, '1d')
# 赚9%且为可交易状态就止盈
if stock_market_price/stock_cost-1>=0.6 and data.can_trade(context.symbol(instrument)):
context.order_target_percent(context.symbol(instrument),0)
cash_for_sell -= positions[instrument]
current_stopwin_stock.append(instrument)
# 亏5%并且为可交易状态就止损
if stock_market_price/stock_cost-1 <= -0.05 and data.can_trade(context.symbol(instrument)):
context.order_target_percent(context.symbol(instrument),0)
cash_for_sell -= positions[instrument]
current_stoploss_stock.append(instrument)
# 放天量 止损:
# if (volume_since_buy[0]>1.5*volume_since_buy[1]) |(volume_since_buy[0]>1.5*(volume_since_buy[1]+volume_since_buy[2]+volume_since_buy[3]+volume_since_buy[4]+volume_since_buy[5])/5):
# context.order_target_percent(context.symbol(instrument),0)
# cash_for_sell -= positions[instrument]
# current_stoploss_stock.append(instrument)
if len(current_stopwin_stock)>0:
print(today,'止盈股票列表',current_stopwin_stock)
stock_sold += current_stopwin_stock
if len(current_stoploss_stock)>0:
print(today,'止损股票列表',current_stoploss_stock)
stock_sold += current_stoploss_stock
#--------------------------END: 止赢止损模块--------------------------
#--------------------------START:持有固定天数卖出(不含建仓期)-----------
current_stopdays_stock = []
positions_lastdate = {e.symbol:p.last_sale_date for e,p in context.portfolio.positions.items()}
# 不是建仓期(在前hold_days属于建仓期)
if not is_staging:
for instrument in positions.keys():
#如果上面的止盈止损已经卖出过了,就不要重复卖出以防止产生空单
if instrument in stock_sold:
continue
# 今天和上次交易的时间相隔hold_days就全部卖出 datetime.timedelta(context.options['hold_days'])也可以换成自己需要的天数,比如datetime.timedelta(5)
if data.current_dt - positions_lastdate[instrument]>=datetime.timedelta(22) and data.can_trade(context.symbol(instrument)):
context.order_target_percent(context.symbol(instrument), 0)
current_stopdays_stock.append(instrument)
cash_for_sell -= positions[instrument]
if len(current_stopdays_stock)>0:
print(today,'固定天数卖出列表',current_stopdays_stock)
stock_sold += current_stopdays_stock
#------------------------- END:持有固定天数卖出-----------------------
#-------------------------- START: ST和退市股卖出 ---------------------
st_stock_list = []
for instrument in positions.keys():
try:
instrument_name = ranker_prediction[ranker_prediction.instrument==instrument].name.values[0]
# 如果股票状态变为了st或者退市 则卖出
if 'ST' in instrument_name or '退' in instrument_name:
if instrument in stock_sold:
continue
if data.can_trade(context.symbol(instrument)):
context.order_target(context.symbol(instrument), 0)
st_stock_list.append(instrument)
cash_for_sell -= positions[instrument]
except:
continue
if st_stock_list!=[]:
print(today,'持仓出现st股/退市股',st_stock_list,'进行卖出处理')
stock_sold += st_stock_list
#-------------------------- END: ST和退市股卖出 ---------------------
# 3. 生成轮仓卖出订单:hold_days天之后才开始卖出;对持仓的股票,按机器学习算法预测的排序末位淘汰
if not is_staging and cash_for_sell > 0:
instruments = list(reversed(list(ranker_prediction.instrument[ranker_prediction.instrument.apply(
lambda x: x in positions)])))
for instrument in instruments:
# 如果资金够了就不卖出了
if cash_for_sell <= 0:
break
#防止多个止损条件同时满足,出现多次卖出产生空单
if instrument in stock_sold:
continue
context.order_target(context.symbol(instrument), 0)
cash_for_sell -= positions[instrument]
stock_sold.append(instrument)
# 4. 生成轮仓买入订单:按机器学习算法预测的排序,买入前面的stock_count只股票
# 计算今日跌停的股票
dt_list = list(ranker_prediction[ranker_prediction.price_limit_status_0==1].instrument)
# 计算今日ST/退市的股票
st_list = list(ranker_prediction[ranker_prediction.name.str.contains('ST')|ranker_prediction.name.str.contains('退')].instrument)
# 计算所有禁止买入的股票池
banned_list = stock_sold+dt_list+st_list
buy_cash_weights = context.stock_weights
buy_instruments=[k for k in list(ranker_prediction.instrument) if k not in banned_list][:len(buy_cash_weights)]
max_cash_per_instrument = context.portfolio.portfolio_value * context.max_cash_per_instrument
for i, instrument in enumerate(buy_instruments):
cash = cash_for_buy * buy_cash_weights[i]
if cash > max_cash_per_instrument - positions.get(instrument, 0):
# 确保股票持仓量不会超过每次股票最大的占用资金量
cash = max_cash_per_instrument - positions.get(instrument, 0)
if cash > 0:
context.order_value(context.symbol(instrument), cash)
# 回测引擎:准备数据,只执行一次
def m63_prepare_bigquant_run(context):
context.status_df = D.features(instruments =context.instruments,start_date = context.start_date, end_date = context.end_date,
fields=['st_status_0','price_limit_status_0','price_limit_status_1'])
def m63_before_trading_start_bigquant_run(context, data):
# 获取涨跌停状态数据
df_price_limit_status = context.ranker_prediction.set_index('date')
today=data.current_dt.strftime('%Y-%m-%d')
# 得到当前未完成订单
for orders in get_open_orders().values():
# 循环,撤销订单
for _order in orders:
ins=str(_order.sid.symbol)
try:
#判断一下如果当日涨停,则取消卖单
if df_price_limit_status[df_price_limit_status.instrument==ins].price_limit_status_0.ix[today]>2 and _order.amount<0:
cancel_order(_order)
print(today,'尾盘涨停取消卖单',ins)
except:
continue
g = T.Graph({
'm1': 'M.instruments.v2',
'm1.start_date': '2010-01-01',
'm1.end_date': '2018-01-01',
'm1.market': 'CN_STOCK_A',
'm1.instrument_list': """002714.SZA
000620.SZA
600066.SHA
600887.SHA
002415.SZA
300450.SZA
000622.SZA
002690.SZA
300601.SZA
000776.SZA
600519.SHA
000333.SZA
002035.SZA
002236.SZA
300015.SZA
600436.SHA
600566.SHA
600201.SHA
600705.SHA
000156.SZA
002677.SZA
002032.SZA
600681.SHA
603601.SHA
002372.SZA
603019.SHA
300571.SZA
002475.SZA
300033.SZA
600346.SHA
601888.SHA
600763.SHA
603799.SHA
603690.SHA
002044.SZA
300253.SZA
000651.SZA
002085.SZA
600903.SHA
600340.SHA
300296.SZA
002572.SZA
300176.SZA
000963.SZA
000403.SZA
002747.SZA
000789.SZA
002508.SZA
300347.SZA
300401.SZA
300666.SZA
000732.SZA
000049.SZA
300451.SZA
603986.SHA
300684.SZA
000681.SZA
000671.SZA
002230.SZA
600867.SHA
300059.SZA
002020.SZA
300457.SZA
300136.SZA
300308.SZA
000661.SZA
002304.SZA
600276.SHA
600563.SHA
601012.SHA
000703.SZA
002410.SZA
002358.SZA
300725.SZA
300226.SZA
300675.SZA
300324.SZA
600801.SHA
000048.SZA
300383.SZA
300285.SZA
300459.SZA
002008.SZA
603288.SHA
002049.SZA
002311.SZA
600305.SHA
300377.SZA
002746.SZA
603638.SHA
002507.SZA
002252.SZA
002271.SZA
300482.SZA
000002.SZA
600612.SHA
002081.SZA
002013.SZA
300357.SZA""",
'm1.max_count': 0,
'm2': 'M.use_datasource.v1',
'm2.instruments': T.Graph.OutputPort('m1.data'),
'm2.datasource_id': 'bar1d_CN_STOCK_A',
'm2.start_date': '',
'm2.end_date': '',
'm3': 'M.input_features.v1',
'm3.features': """# #号开始的表示注释
# 多个特征,每行一个,可以包含基础特征和衍生特征
avg_turn_15/turn_0
mf_net_amount_xl_0
alpha4=close_0*avg_turn_0+close_1*avg_turn_1+close_2*avg_turn_2
#自己添加的
alpha20=(((-1 * rank((open_0 - delay(high_0, 1)))) * rank((open_0 - delay(close_0, 1)))) * rank((open_0 - delay(low_0, 1))))
alpha_001=(rank(ts_argmax(signedpower(where(((close_0/shift(close_0,1)-1) < 0), std((close_0/shift(close_0,1)-1), 20), close_0), 2), 5)) -0.5)
alpha_002=(-1 * correlation(rank(delta(log(volume_0), 2)), rank(div((close_0 - open_0), open_0)), 6))
alpha_003 = (-1 * correlation(rank(open_0), rank(volume_0), 10))""",
'm52': 'M.input_features.v1',
'm52.features_ds': T.Graph.OutputPort('m3.data'),
'm52.features': """# #号开始的表示注释
# 多个特征,每行一个,可以包含基础特征和衍生特征
#周线金叉
cond1=sum(ta_macd_dif(close_0,2,4,4),5)>sum(ta_macd_dea(close_0,2,4,4),5)
cond2=close_0>mean(close_0, 25)
cond3=sum(ta_macd_dea(close_0,2,4,4),5)>0.2
price_limit_status_0
cond4=st_status_0<1""",
'm15': 'M.general_feature_extractor.v7',
'm15.instruments': T.Graph.OutputPort('m1.data'),
'm15.features': T.Graph.OutputPort('m52.data'),
'm15.start_date': '',
'm15.end_date': '',
'm15.before_start_days': 90,
'm16': 'M.derived_feature_extractor.v3',
'm16.input_data': T.Graph.OutputPort('m15.data'),
'm16.features': T.Graph.OutputPort('m52.data'),
'm16.date_col': 'date',
'm16.instrument_col': 'instrument',
'm16.drop_na': False,
'm16.remove_extra_columns': False,
'm67': 'M.features_short.v1',
'm67.input_1': T.Graph.OutputPort('m3.data'),
'm9': 'M.instruments.v2',
'm9.start_date': T.live_run_param('trading_date', '2018-01-01'),
'm9.end_date': T.live_run_param('trading_date', '2021-10-30'),
'm9.market': 'CN_STOCK_A',
'm9.instrument_list': """300782.SZA
605358.SHA
603290.SHA
603392.SHA
601865.SHA
300759.SZA
300750.SZA
300677.SZA
002607.SZA
603259.SHA
300751.SZA
603613.SHA
601100.SHA
300763.SZA
002568.SZA
300724.SZA
603345.SHA
600763.SHA
603713.SHA
300595.SZA
300014.SZA
603712.SHA
300760.SZA
603317.SHA
002791.SZA
601066.SHA
002985.SZA
300661.SZA
300347.SZA
300777.SZA
603129.SHA
300454.SZA
601888.SHA
605111.SHA
603638.SHA
300850.SZA
600809.SHA
002414.SZA
603893.SHA
002967.SZA
600132.SHA
603605.SHA
300015.SZA
603267.SHA
300012.SZA
600882.SHA
300684.SZA
300390.SZA
300769.SZA
300748.SZA
000799.SZA
300767.SZA
300775.SZA
603737.SHA
300601.SZA
601698.SHA
300841.SZA
002975.SZA
603501.SHA
300122.SZA
300677.SZA
603392.SHA
002791.SZA
601865.SHA
300759.SZA
002568.SZA
603613.SHA
300014.SZA
601100.SHA
300763.SZA
300274.SZA
601633.SHA
603501.SHA
002709.SZA
603317.SHA
300661.SZA
002985.SZA
600882.SHA
300598.SZA
300777.SZA
300552.SZA
300346.SZA
002475.SZA
605111.SHA
300850.SZA
300526.SZA
601012.SHA
603893.SHA
002967.SZA
000858.SZA
603267.SHA
000568.SZA
603638.SHA
000708.SZA
603456.SHA
000995.SZA
600399.SHA
300767.SZA
300595.SZA
300347.SZA
600763.SHA
300751.SZA
600316.SHA
300775.SZA
603208.SHA
600862.SHA
002241.SZA
002706.SZA
300390.SZA
601698.SHA
002541.SZA
002607.SZA
000733.SZA
000596.SZA
603345.SHA
300151.SZA
300496.SZA
002705.SZA
002756.SZA
603185.SHA
002850.SZA
000661.SZA
002600.SZA
300724.SZA
600584.SHA
002414.SZA
300223.SZA
002920.SZA
603906.SHA
002714.SZA
600966.SHA
300083.SZA
300601.SZA
600438.SHA
002812.SZA
002459.SZA
603027.SHA
300015.SZA
300763.SZA
002709.SZA
688202.SHA
000422.SZA
300769.SZA
300751.SZA
300343.SZA
605117.SHA
300827.SZA
601633.SHA
603026.SHA
002240.SZA
002326.SZA
002487.SZA
000762.SZA
300432.SZA
603396.SHA
300363.SZA
603985.SHA
000155.SZA
002594.SZA
600399.SHA
600702.SHA
300171.SZA
002176.SZA
000733.SZA
300750.SZA
601127.SHA
002812.SZA
603260.SHA
600610.SHA
601012.SHA
003022.SZA
603127.SHA
000301.SZA
002585.SZA
688198.SHA
002245.SZA
300693.SZA
600096.SHA
300568.SZA
300382.SZA
300443.SZA
003031.SZA
605376.SHA
603806.SHA
603223.SHA
688116.SHA
002529.SZA
600141.SHA
600956.SHA
300035.SZA
300316.SZA
002460.SZA
600110.SHA
300671.SZA
002407.SZA
600532.SHA
688599.SHA
002472.SZA
600499.SHA
600111.SHA
600884.SHA
300696.SZA
603267.SHA""",
'm9.max_count': 0,
'm17': 'M.general_feature_extractor.v7',
'm17.instruments': T.Graph.OutputPort('m9.data'),
'm17.features': T.Graph.OutputPort('m52.data'),
'm17.start_date': '',
'm17.end_date': '',
'm17.before_start_days': 90,
'm18': 'M.derived_feature_extractor.v3',
'm18.input_data': T.Graph.OutputPort('m17.data'),
'm18.features': T.Graph.OutputPort('m52.data'),
'm18.date_col': 'date',
'm18.instrument_col': 'instrument',
'm18.drop_na': False,
'm18.remove_extra_columns': False,
'm5': 'M.instruments.v2',
'm5.start_date': '2010-01-01',
'm5.end_date': '2019-01-01',
'm5.market': 'CN_STOCK_A',
'm5.instrument_list': '000300.HIX',
'm5.max_count': 0,
'm10': 'M.input_features.v1',
'm10.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
close
instrument
""",
'm20': 'M.use_datasource.v1',
'm20.instruments': T.Graph.OutputPort('m5.data'),
'm20.features': T.Graph.OutputPort('m10.data'),
'm20.datasource_id': 'bar1d_index_CN_STOCK_A',
'm20.start_date': '',
'm20.end_date': '',
'm20.m_cached': False,
'm11': 'M.instruments.v2',
'm11.start_date': '2010-01-01',
'm11.end_date': '2019-01-01',
'm11.market': 'CN_STOCK_A',
'm11.instrument_list': '',
'm11.max_count': 0,
'm12': 'M.input_features.v1',
'm12.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
close
open
low
high
adjust_factor""",
'm29': 'M.use_datasource.v1',
'm29.instruments': T.Graph.OutputPort('m11.data'),
'm29.features': T.Graph.OutputPort('m12.data'),
'm29.datasource_id': 'bar1d_CN_STOCK_A',
'm29.start_date': '',
'm29.end_date': '',
'm21': 'M.input_features.v1',
'm21.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
bmret=close/shift(close,1)-1
""",
'm22': 'M.derived_feature_extractor.v3',
'm22.input_data': T.Graph.OutputPort('m20.data'),
'm22.features': T.Graph.OutputPort('m21.data'),
'm22.date_col': 'date',
'm22.instrument_col': 'instrument',
'm22.drop_na': False,
'm22.remove_extra_columns': False,
'm22.user_functions': {},
'm23': 'M.select_columns.v3',
'm23.input_ds': T.Graph.OutputPort('m22.data'),
'm23.columns_ds': T.Graph.OutputPort('m10.data'),
'm23.columns': '',
'm23.reverse_select': True,
'm26': 'M.input_features.v1',
'm26.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
relative_ret=stockret-bmret
relative_ret_5=sum(relative_ret,5)
relative_ret_30=sum(relative_ret,30)""",
'm30': 'M.input_features.v1',
'm30.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
stockret=close/shift(close,1)-1""",
'm28': 'M.derived_feature_extractor.v3',
'm28.input_data': T.Graph.OutputPort('m29.data'),
'm28.features': T.Graph.OutputPort('m30.data'),
'm28.date_col': 'date',
'm28.instrument_col': 'instrument',
'm28.drop_na': False,
'm28.remove_extra_columns': False,
'm28.user_functions': {},
'm27': 'M.select_columns.v3',
'm27.input_ds': T.Graph.OutputPort('m28.data'),
'm27.columns_ds': T.Graph.OutputPort('m12.data'),
'm27.columns': '',
'm27.reverse_select': True,
'm24': 'M.join.v3',
'm24.data1': T.Graph.OutputPort('m23.data'),
'm24.data2': T.Graph.OutputPort('m27.data'),
'm24.on': 'date',
'm24.how': 'inner',
'm24.sort': False,
'm25': 'M.derived_feature_extractor.v3',
'm25.input_data': T.Graph.OutputPort('m24.data'),
'm25.features': T.Graph.OutputPort('m26.data'),
'm25.date_col': 'date',
'm25.instrument_col': 'instrument',
'm25.drop_na': False,
'm25.remove_extra_columns': False,
'm25.user_functions': {},
'm31': 'M.filter.v3',
'm31.input_data': T.Graph.OutputPort('m25.data'),
'm31.expr': '(relative_ret_5>0)&(relative_ret_30>0)&(rank(relative_ret_30)>0.8)',
'm31.output_left_data': False,
'm32': 'M.select_columns.v3',
'm32.input_ds': T.Graph.OutputPort('m31.data'),
'm32.columns': 'date,instrument',
'm32.reverse_select': False,
'm33': 'M.join.v3',
'm33.data1': T.Graph.OutputPort('m2.data'),
'm33.data2': T.Graph.OutputPort('m32.data'),
'm33.on': 'date,instrument',
'm33.how': 'inner',
'm33.sort': False,
'm34': 'M.auto_labeler_on_datasource.v1',
'm34.input_data': T.Graph.OutputPort('m33.data'),
'm34.label_expr': """# #号开始的表示注释
# 0. 每行一个,顺序执行,从第二个开始,可以使用label字段
# 1. 可用数据字段见 https://bigquant.com/docs/develop/datasource/deprecated/history_data.html
# 2. 可用操作符和函数见 `表达式引擎 <https://bigquant.com/docs/develop/bigexpr/usage.html>`_
# 计算收益:5日收盘价(作为卖出价格)除以明日开盘价(作为买入价格)
#shift(close, -5) / shift(open, -1)
# 极值处理:用1%和99%分位的值做clip
#clip(label, all_quantile(label, 0.01), all_quantile(label, 0.99))
# 将分数映射到分类,这里使用20个分类
#all_wbins(label, 20)
# 过滤掉一字涨停的情况 (设置label为NaN,在后续处理和训练中会忽略NaN的label)
#where(shift(high, -1) == shift(low, -1), NaN, label)
# 计算收益:2日开盘价(作为卖出价格)除以明日开盘价(作为买入价格)
(shift(close, -5) / shift(open, -1)-1)
# 极值处理:用1%和99%分位的值做clip
clip(label, all_quantile(label, 0.01), all_quantile(label, 0.99))
all_wbins(label, 20)
# 过滤掉一字涨停的情况 (设置label为NaN,在后续处理和训练中会忽略NaN的label)
where(shift(high, -1) == shift(low, -1), NaN, label)
#where(label>0.5, NaN, label)
#where(label<-0.5, NaN, label)
""",
'm34.drop_na_label': True,
'm34.cast_label_int': False,
'm34.date_col': 'date',
'm34.instrument_col': 'instrument',
'm7': 'M.join.v3',
'm7.data1': T.Graph.OutputPort('m34.data'),
'm7.data2': T.Graph.OutputPort('m16.data'),
'm7.on': 'date,instrument',
'm7.how': 'inner',
'm7.sort': False,
'm53': 'M.filter.v3',
'm53.input_data': T.Graph.OutputPort('m7.data'),
'm53.expr': 'cond1&cond2&cond3',
'm53.output_left_data': False,
'm13': 'M.dropnan.v1',
'm13.input_data': T.Graph.OutputPort('m53.data'),
'm6': 'M.stock_ranker_train.v5',
'm6.training_ds': T.Graph.OutputPort('m13.data'),
'm6.features': T.Graph.OutputPort('m67.data_1'),
'm6.test_ds': T.Graph.OutputPort('m13.data'),
'm6.learning_algorithm': '排序',
'm6.number_of_leaves': 20,
'm6.minimum_docs_per_leaf': 1000,
'm6.number_of_trees': 70,
'm6.learning_rate': 0.1,
'm6.max_bins': 1023,
'm6.feature_fraction': 1,
'm6.m_lazy_run': False,
'm4': 'M.instruments.v2',
'm4.start_date': '2018-01-01',
'm4.end_date': T.live_run_param('trading_date', '2021-10-30'),
'm4.market': 'CN_STOCK_A',
'm4.instrument_list': '000300.HIX',
'm4.max_count': 0,
'm35': 'M.input_features.v1',
'm35.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
close
instrument
""",
'm36': 'M.use_datasource.v1',
'm36.instruments': T.Graph.OutputPort('m4.data'),
'm36.features': T.Graph.OutputPort('m35.data'),
'm36.datasource_id': 'bar1d_index_CN_STOCK_A',
'm36.start_date': '',
'm36.end_date': '',
'm36.m_cached': False,
'm37': 'M.instruments.v2',
'm37.start_date': '2018-01-01',
'm37.end_date': T.live_run_param('trading_date', '2021-10-30'),
'm37.market': 'CN_STOCK_A',
'm37.instrument_list': '',
'm37.max_count': 0,
'm38': 'M.input_features.v1',
'm38.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
bmret=close/shift(close,1)-1
""",
'm39': 'M.derived_feature_extractor.v3',
'm39.input_data': T.Graph.OutputPort('m36.data'),
'm39.features': T.Graph.OutputPort('m38.data'),
'm39.date_col': 'date',
'm39.instrument_col': 'instrument',
'm39.drop_na': False,
'm39.remove_extra_columns': False,
'm39.user_functions': {},
'm40': 'M.select_columns.v3',
'm40.input_ds': T.Graph.OutputPort('m39.data'),
'm40.columns_ds': T.Graph.OutputPort('m35.data'),
'm40.columns': '',
'm40.reverse_select': True,
'm43': 'M.input_features.v1',
'm43.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
close
open
low
high
adjust_factor""",
'm42': 'M.use_datasource.v1',
'm42.instruments': T.Graph.OutputPort('m37.data'),
'm42.features': T.Graph.OutputPort('m43.data'),
'm42.datasource_id': 'bar1d_CN_STOCK_A',
'm42.start_date': '',
'm42.end_date': '',
'm44': 'M.input_features.v1',
'm44.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
stockret=close/shift(close,1)-1""",
'm45': 'M.derived_feature_extractor.v3',
'm45.input_data': T.Graph.OutputPort('m42.data'),
'm45.features': T.Graph.OutputPort('m44.data'),
'm45.date_col': 'date',
'm45.instrument_col': 'instrument',
'm45.drop_na': False,
'm45.remove_extra_columns': False,
'm45.user_functions': {},
'm46': 'M.select_columns.v3',
'm46.input_ds': T.Graph.OutputPort('m45.data'),
'm46.columns_ds': T.Graph.OutputPort('m43.data'),
'm46.columns': '',
'm46.reverse_select': True,
'm41': 'M.join.v3',
'm41.data1': T.Graph.OutputPort('m40.data'),
'm41.data2': T.Graph.OutputPort('m46.data'),
'm41.on': 'date',
'm41.how': 'inner',
'm41.sort': False,
'm47': 'M.input_features.v1',
'm47.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
relative_ret=stockret-bmret
relative_ret_5=sum(relative_ret,5)
relative_ret_30=sum(relative_ret,30)""",
'm48': 'M.derived_feature_extractor.v3',
'm48.input_data': T.Graph.OutputPort('m41.data'),
'm48.features': T.Graph.OutputPort('m47.data'),
'm48.date_col': 'date',
'm48.instrument_col': 'instrument',
'm48.drop_na': False,
'm48.remove_extra_columns': False,
'm48.user_functions': {},
'm49': 'M.filter.v3',
'm49.input_data': T.Graph.OutputPort('m48.data'),
'm49.expr': '(relative_ret_5>0)&(relative_ret_30>0)&(rank(relative_ret_30)>0.8)',
'm49.output_left_data': False,
'm50': 'M.select_columns.v3',
'm50.input_ds': T.Graph.OutputPort('m49.data'),
'm50.columns': 'date,instrument',
'm50.reverse_select': False,
'm51': 'M.join.v3',
'm51.data1': T.Graph.OutputPort('m18.data'),
'm51.data2': T.Graph.OutputPort('m50.data'),
'm51.on': 'date,instrument',
'm51.how': 'inner',
'm51.sort': False,
'm54': 'M.filter.v3',
'm54.input_data': T.Graph.OutputPort('m51.data'),
'm54.expr': 'cond1&cond2&cond3&cond4',
'm54.output_left_data': False,
'm14': 'M.dropnan.v1',
'm14.input_data': T.Graph.OutputPort('m54.data'),
'm8': 'M.stock_ranker_predict.v5',
'm8.model': T.Graph.OutputPort('m6.model'),
'm8.data': T.Graph.OutputPort('m14.data'),
'm8.m_lazy_run': False,
'm64': 'M.select_columns.v3',
'm64.input_ds': T.Graph.OutputPort('m14.data'),
'm64.columns': 'date,instrument,price_limit_status_0',
'm64.reverse_select': False,
'm65': 'M.join.v3',
'm65.data1': T.Graph.OutputPort('m8.predictions'),
'm65.data2': T.Graph.OutputPort('m64.data'),
'm65.on': 'date,instrument',
'm65.how': 'inner',
'm65.sort': False,
'm55': 'M.input_features.v1',
'm55.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
#bm_0 = where(close/shift(close,5)-1<-0.05,1,0)
bm_0=where(ta_macd_dif(close,2,4,4)-ta_macd_dea(close,2,4,4)<0,1,0)""",
'm56': 'M.index_feature_extract.v3',
'm56.input_1': T.Graph.OutputPort('m9.data'),
'm56.input_2': T.Graph.OutputPort('m55.data'),
'm56.before_days': 100,
'm56.index': '000300.HIX',
'm57': 'M.select_columns.v3',
'm57.input_ds': T.Graph.OutputPort('m56.data_1'),
'm57.columns': 'date,bm_0',
'm57.reverse_select': False,
'm60': 'M.input_features.v1',
'm60.features': """
# #号开始的表示注释,注释需单独一行
# 多个特征,每行一个,可以包含基础特征和衍生特征,特征须为本平台特征
name""",
'm59': 'M.use_datasource.v1',
'm59.instruments': T.Graph.OutputPort('m9.data'),
'm59.features': T.Graph.OutputPort('m60.data'),
'm59.datasource_id': 'instruments_CN_STOCK_A',
'm59.start_date': '',
'm59.end_date': '',
'm58': 'M.join.v3',
'm58.data1': T.Graph.OutputPort('m59.data'),
'm58.data2': T.Graph.OutputPort('m57.data'),
'm58.on': 'date',
'm58.how': 'left',
'm58.sort': True,
'm61': 'M.join.v3',
'm61.data1': T.Graph.OutputPort('m65.data'),
'm61.data2': T.Graph.OutputPort('m58.data'),
'm61.on': 'date,instrument',
'm61.how': 'left',
'm61.sort': False,
'm62': 'M.sort.v4',
'm62.input_ds': T.Graph.OutputPort('m61.data'),
'm62.sort_by': 'score',
'm62.group_by': 'date',
'm62.keep_columns': '--',
'm62.ascending': False,
'm63': 'M.trade.v4',
'm63.instruments': T.Graph.OutputPort('m9.data'),
'm63.options_data': T.Graph.OutputPort('m62.sorted_data'),
'm63.start_date': '',
'm63.end_date': '',
'm63.initialize': m63_initialize_bigquant_run,
'm63.handle_data': m63_handle_data_bigquant_run,
'm63.prepare': m63_prepare_bigquant_run,
'm63.before_trading_start': m63_before_trading_start_bigquant_run,
'm63.volume_limit': 0.025,
'm63.order_price_field_buy': 'open',
'm63.order_price_field_sell': 'close',
'm63.capital_base': 1000001,
'm63.auto_cancel_non_tradable_orders': True,
'm63.data_frequency': 'daily',
'm63.price_type': '后复权',
'm63.product_type': '股票',
'm63.plot_charts': True,
'm63.backtest_only': False,
'm63.benchmark': '',
})
# g.run({})
def m19_param_grid_builder_bigquant_run():
param_grid = {}
searchtime=100
maxfactorlen=30
alist=['alpha_001 = (rank(ts_argmax(signedpower(where(((close_0/shift(close_0,1)-1) < 0), std((close_0/shift(close_0,1)-1), 20), close_0), 2), 5)) -0.5)',
'alpha_002 = (-1 * correlation(rank(delta(log(volume_0), 2)), rank(div((close_0 - open_0), open_0)), 6))',
'alpha_003 = (-1 * correlation(rank(open_0), rank(volume_0), 10))',
'alpha_004 = (-1 * ts_rank(rank(low_0), 9))',
'alpha_005 = (rank((open_0 - (sum(((high_0+low_0+open_0+close_0)*0.25), 10) / 10))) * (-1 * abs(rank((close_0 - ((high_0+low_0+open_0+close_0)*0.25))))))',
'alpha_006 = (-1 * correlation(open_0, volume_0, 10))',
'alpha_007 = where((mean(volume_0,20) < volume_0), ((-1 * ts_rank(abs(delta(close_0, 7)), 60)) * sign(delta(close_0, 7))), (-1* 1))',
'alpha_008 = (-1 * rank(((sum(open_0, 5) * sum((close_0/shift(close_0,1)-1), 5)) - delay((sum(open_0, 5) * sum((close_0/shift(close_0,1)-1), 5)),10))))',
'alpha_009 = where((0 < ts_min(delta(close_0, 1), 5)), delta(close_0, 1), where((ts_max(delta(close_0, 1), 5) < 0), delta(close_0, 1), (-1 * delta(close_0, 1))))',
'alpha_010 = rank(where((0 < ts_min(delta(close_0, 1), 4)), delta(close_0, 1), where((ts_max(delta(close_0, 1), 4) < 0), delta(close_0, 1), (-1 * delta(close_0, 1)))))',
'alpha_011 = ((rank(ts_max((((high_0+low_0+open_0+close_0)*0.25) - close_0), 3)) + rank(ts_min((((high_0+low_0+open_0+close_0)*0.25) - close_0), 3))) *rank(delta(volume_0, 3)))',
'alpha_012 = (sign(delta(volume_0, 1)) * (-1 * delta(close_0, 1)))',
'alpha_013 = (-1 * rank(covariance(rank(close_0), rank(volume_0), 5)))',
'alpha_014 = ((-1 * rank(delta((close_0/shift(close_0,1)-1), 3))) * correlation(open_0, volume_0, 10))',
'alpha_015 = (-1 * sum(rank(correlation(rank(high_0), rank(volume_0), 3)), 3))',
'alpha_016 = (-1 * rank(covariance(rank(high_0), rank(volume_0), 5)))',
'alpha_017 = (((-1 * rank(ts_rank(close_0, 10))) * rank(delta(delta(close_0, 1), 1))) *rank(ts_rank((div(volume_0, mean(volume_0,20))), 5)))',
'alpha_018 = (-1 * rank(((std(abs((close_0 - open_0)), 5) + (close_0 - open_0)) + correlation(close_0, open_0,10))))',
'alpha_019 = ((-1 * sign(((close_0 - delay(close_0, 7)) + delta(close_0, 7)))) * (1 + rank((1 + sum((close_0/shift(close_0,1)-1),250)))))',
'alpha_020 = (((-1 * rank((open_0 - delay(high_0, 1)))) * rank((open_0 - delay(close_0, 1)))) * rank((open_0 -delay(low_0, 1))))',
'alpha_021 = where((((sum(close_0, 8) / 8) + std(close_0, 8)) < (sum(close_0, 2) / 2)), (-1 * 1), where(((sum(close_0,2) / 2) < ((sum(close_0, 8) / 8) - std(close_0, 8))), 1, where(((1 < div(volume_0, mean(volume_0,20))) | (div(volume_0, mean(volume_0,20)) == 1)), 1, (-1 * 1))))',
'alpha_022 = (-1 * (delta(correlation(high_0, volume_0, 5), 5) * rank(std(close_0, 20))))',
'alpha_023 = where(((sum(high_0, 20) / 20) < high_0), (-1 * delta(high_0, 2)), 0)',
'alpha_024 = where((((delta((sum(close_0, 100) / 100), 100) / delay(close_0, 100)) < 0.05) | ((delta((sum(close_0, 100) / 100), 100) / delay(close_0, 100)) == 0.05)), (-1 * (close_0 - ts_min(close_0,100))), (-1 * delta(close_0, 3)))',
'alpha_025 = rank(((((-1 * (close_0/shift(close_0,1)-1)) * mean(volume_0,20)) * ((high_0+low_0+open_0+close_0)*0.25)) * (high_0 - close_0)))',
'alpha_026 = (-1 * ts_max(correlation(ts_rank(volume_0, 5), ts_rank(high_0, 5), 5), 3))',
'alpha_027 = where((0.5 < rank((sum(correlation(rank(volume_0), rank(((high_0+low_0+open_0+close_0)*0.25)), 6), 2) / 2.0))), (-1 * 1), 1)',
'alpha_028 = scale(((correlation(mean(volume_0,20), low_0, 5) + ((high_0 + low_0) / 2)) - close_0))',
'alpha_029 = (min(product(rank(rank(scale(log(sum(ts_min(rank(rank((-1 * rank(delta((close_0 - 1),5))))), 2), 1))))), 1), 5) + ts_rank(delay((-1 * (close_0/shift(close_0,1)-1)), 6), 5))',
'alpha_030 = div(((1.0 - rank(((sign((close_0 - delay(close_0, 1))) + sign((delay(close_0, 1) - delay(close_0, 2)))) +sign((delay(close_0, 2) - delay(close_0, 3)))))) * sum(volume_0, 5)), sum(volume_0, 20))',
'alpha_031 = ((rank(rank(rank(decay_linear((-1 * rank(rank(delta(close_0, 10)))), 10)))) + rank((-1 *delta(close_0, 3)))) + sign(scale(correlation(mean(volume_0,20), low_0, 12))))',
'alpha_032 = (scale(((sum(close_0, 7) / 7) - close_0)) + (20 * scale(correlation(((high_0+low_0+open_0+close_0)*0.25), delay(close_0, 5),230))))',
'alpha_033 = rank((-1 * ((1 - (open_0 / close_0))**1)))',
'alpha_034 = rank(((1 - rank(div(std((close_0/shift(close_0,1)-1), 2), std((close_0/shift(close_0,1)-1), 5)))) + (1 - rank(delta(close_0, 1)))))',
'alpha_035 = ((ts_rank(volume_0, 32) * (1 - ts_rank(((close_0 + high_0) - low_0), 16))) * (1 -ts_rank((close_0/shift(close_0,1)-1), 32)))',
'alpha_036 = (((((2.21 * rank(correlation((close_0 - open_0), delay(volume_0, 1), 15))) + (0.7 * rank((open_0- close_0)))) + (0.73 * rank(ts_rank(delay((-1 * (close_0/shift(close_0,1)-1)), 6), 5)))) + rank(abs(correlation(((high_0+low_0+open_0+close_0)*0.25),mean(volume_0,20), 6)))) + (0.6 * rank((((sum(close_0, 200) / 200) - open_0) * (close_0 - open_0)))))',
'alpha_037 = (rank(correlation(delay((open_0 - close_0), 1), close_0, 200)) + rank((open_0 - close_0)))',
'alpha_038 = ((-1 * rank(ts_rank(close_0, 10))) * rank((close_0 / open_0)))',
'alpha_039 = ((-1 * rank((delta(close_0, 7) * (1 - rank(decay_linear(div(volume_0, mean(volume_0,20)), 9)))))) * (1 +rank(sum((close_0/shift(close_0,1)-1), 250))))',
'alpha_040 = ((-1 * rank(std(high_0, 10))) * correlation(high_0, volume_0, 10))',
'alpha_041 = (((high_0 * low_0)**0.5) - ((high_0+low_0+open_0+close_0)*0.25))',
'alpha_042 = div(rank((((high_0+low_0+open_0+close_0)*0.25) - close_0)), rank((((high_0+low_0+open_0+close_0)*0.25) + close_0)))',
'alpha_043 = (ts_rank(div(volume_0, mean(volume_0,20)), 20) * ts_rank((-1 * delta(close_0, 7)), 8))',
'alpha_044 = (-1 * correlation(high_0, rank(volume_0), 5))',
'alpha_045 = (-1 * ((rank((sum(delay(close_0, 5), 20) / 20)) * correlation(close_0, volume_0, 2)) *rank(correlation(sum(close_0, 5), sum(close_0, 20), 2))))',
'alpha_046 = where((0.25 < (((delay(close_0, 20) - delay(close_0, 10)) / 10) - ((delay(close_0, 10) - close_0) / 10))), (-1 * 1), where(((((delay(close_0, 20) - delay(close_0, 10)) / 10) - ((delay(close_0, 10) - close_0) / 10)) < 0), 1, ((-1 * 1) * (close_0 - delay(close_0, 1)))))',
'alpha_047 = ((div((rank((1 / close_0)) * volume_0), mean(volume_0,20)) * ((high_0 * rank((high_0 - close_0))) / (sum(high_0, 5) /5))) - rank((((high_0+low_0+open_0+close_0)*0.25) - delay(((high_0+low_0+open_0+close_0)*0.25), 5))))',
'alpha_049 = where(((((delay(close_0, 20) - delay(close_0, 10)) / 10) - ((delay(close_0, 10) - close_0) / 10)) < (-1 *0.1)), 1, ((-1 * 1) * (close_0 - delay(close_0, 1))))',
'alpha_050 = (-1 * ts_max(rank(correlation(rank(volume_0), rank(((high_0+low_0+open_0+close_0)*0.25)), 5)), 5))',
'alpha_051 = where(((((delay(close_0, 20) - delay(close_0, 10)) / 10) - ((delay(close_0, 10) - close_0) / 10)) < (-1 *0.05)), 1, ((-1 * 1) * (close_0 - delay(close_0, 1))))',
'alpha_052 = ((((-1 * ts_min(low_0, 5)) + delay(ts_min(low_0, 5), 5)) * rank(((sum((close_0/shift(close_0,1)-1), 240) -sum((close_0/shift(close_0,1)-1), 20)) / 220))) * ts_rank(volume_0, 5))',
'alpha_053 = (-1 * delta(div(((close_0 - low_0) - (high_0 - close_0)), (close_0 - low_0)), 9))',
'alpha_054 = div((-1 * ((low_0 - close_0) * (open_0**5))), ((low_0 - high_0) * (close_0**5)))',
'alpha_055 = (-1 * correlation(rank(div((close_0 - ts_min(low_0, 12)), (ts_max(high_0, 12) - ts_min(low_0,12)))), rank(volume_0), 6))',
'alpha_056 = (0 - (1 * (rank(div(sum((close_0/shift(close_0,1)-1), 10), sum(sum((close_0/shift(close_0,1)-1), 2), 3))) * rank(((close_0/shift(close_0,1)-1) * market_cap_0)))))',
'alpha_057 = (0 - (1 * div((close_0 - ((high_0+low_0+open_0+close_0)*0.25)), decay_linear(rank(ts_argmax(close_0, 30)), 2))))',
'alpha_060 = (0 - (1 * ((2 * scale(rank((div(((close_0 - low_0) - (high_0 - close_0)), (high_0 - low_0)) * volume_0)))) -scale(rank(ts_argmax(close_0, 10))))))',
# 'alpha_061 = where(rank((((high_0+low_0+open_0+close_0)*0.25) - ts_min(((high_0+low_0+open_0+close_0)*0.25), 16.1219))) < rank(correlation(((high_0+low_0+open_0+close_0)*0.25), mean(volume_0,180), 17.9282)), 1, -1)',
# 'alpha_062 = ((rank(correlation(((high_0+low_0+open_0+close_0)*0.25), sum(mean(volume_0,20), 22.4101), 9.91009)) < rank(((rank(open_0) +rank(open_0)) < (rank(((high_0 + low_0) / 2)) + rank(high_0))))) * -1)',
# 'alpha_064 = (where(rank(correlation(sum(((open_0 * 0.178404) + (low_0 * (1 - 0.178404))), 12.7054),sum(mean(volume_0,120), 12.7054), 16.6208)) < rank(delta(((((high_0 + low_0) / 2) * 0.178404) + (((high_0+low_0+open_0+close_0)*0.25) * (1 -0.178404))), 3.69741)), 1, -1) * -1)',
# 'alpha_065 = (where(rank(correlation(((open_0 * 0.00817205) + (((high_0+low_0+open_0+close_0)*0.25) * (1 - 0.00817205))), sum(mean(volume_0,60),8.6911), 6.40374)) < rank((open_0 - ts_min(open_0, 13.635))), 1, -1) * -1)',
# 'alpha_066 = ((rank(decay_linear(delta(((high_0+low_0+open_0+close_0)*0.25), 3.51013), 7.23052)) + ts_rank(decay_linear(div((((low_0* 0.96633) + (low_0 * (1 - 0.96633))) - ((high_0+low_0+open_0+close_0)*0.25)), (open_0 - ((high_0 + low_0) / 2))), 11.4157), 6.72611)) * -1)',
# 'alpha_068 = (where(ts_rank(correlation(rank(high_0), rank(mean(volume_0,15)), 8.91644), 13.9333) <rank(delta(((close_0 * 0.518371) + (low_0 * (1 - 0.518371))), 1.06157)), 1, -1) * -1)',
# 'alpha_071 = max(ts_rank(decay_linear(correlation(ts_rank(close_0, 3.43976), ts_rank(mean(volume_0,180),12.0647), 18.0175), 4.20501), 15.6948), ts_rank(decay_linear((rank(((low_0 + open_0) - (((high_0+low_0+open_0+close_0)*0.25) +((high_0+low_0+open_0+close_0)*0.25))))**2), 16.4662), 4.4388))',
# 'alpha_072 = div(rank(decay_linear(correlation(((high_0 + low_0) / 2), mean(volume_0,40), 8.93345), 10.1519)), rank(decay_linear(correlation(ts_rank(((high_0+low_0+open_0+close_0)*0.25), 3.72469), ts_rank(volume_0, 18.5188), 6.86671),2.95011)))',
# 'alpha_073 = (max(rank(decay_linear(delta(((high_0+low_0+open_0+close_0)*0.25), 4.72775), 2.91864)),ts_rank(decay_linear((div(delta(((open_0 * 0.147155) + (low_0 * (1 - 0.147155))), 2.03608), ((open_0 *0.147155) + (low_0 * (1 - 0.147155)))) * -1), 3.33829), 16.7411)) * -1)',
# 'alpha_074 = (where(rank(correlation(close_0, sum(mean(volume_0,30), 37.4843), 15.1365)) <rank(correlation(rank(((high_0 * 0.0261661) + (((high_0+low_0+open_0+close_0)*0.25) * (1 - 0.0261661)))), rank(volume_0), 11.4791)), 1, -1)* -1)',
# 'alpha_075 = where(rank(correlation(((high_0+low_0+open_0+close_0)*0.25), volume_0, 4.24304)) < rank(correlation(rank(low_0), rank(mean(volume_0,50)),12.4413)), 1, -1)',
# 'alpha_077 = min(rank(decay_linear(((((high_0 + low_0) / 2) + high_0) - (((high_0+low_0+open_0+close_0)*0.25) + high_0)), 20.0451)),rank(decay_linear(correlation(((high_0 + low_0) / 2), mean(volume_0,40), 3.1614), 5.64125)))',
# 'alpha_078 = (rank(correlation(sum(((low_0 * 0.352233) + (((high_0+low_0+open_0+close_0)*0.25) * (1 - 0.352233))), 19.7428),sum(mean(volume_0,40), 19.7428), 6.83313))**rank(correlation(rank(((high_0+low_0+open_0+close_0)*0.25)), rank(volume_0), 5.77492)))',
# 'alpha_081 = (where(rank(log(product(rank((rank(correlation(((high_0+low_0+open_0+close_0)*0.25), sum(mean(volume_0,10), 49.6054),8.47743))**4)), 14.9655))) < rank(correlation(rank(((high_0+low_0+open_0+close_0)*0.25)), rank(volume_0), 5.07914)), 1, -1) * -1)',
'alpha_083 = div((rank(delay(div((high_0 - low_0), (sum(close_0, 5) / 5)), 2)) * rank(rank(volume_0))), div(((high_0 -low_0) / (sum(close_0, 5) / 5)), (((high_0+low_0+open_0+close_0)*0.25) - close_0)))',
# 'alpha_084 = signedpower(ts_rank((((high_0+low_0+open_0+close_0)*0.25) - ts_max(((high_0+low_0+open_0+close_0)*0.25), 15.3217)), 20.7127), delta(close_0,4.96796))',
# 'alpha_085 = (rank(correlation(((high_0 * 0.876703) + (close_0 * (1 - 0.876703))), mean(volume_0,30),9.61331))**rank(correlation(ts_rank(((high_0 + low_0) / 2), 3.70596), ts_rank(volume_0, 10.1595),7.11408)))',
# 'alpha_086 = (where(ts_rank(correlation(close_0, sum(mean(volume_0,20), 14.7444), 6.00049), 20.4195) < rank(((open_0+ close_0) - (((high_0+low_0+open_0+close_0)*0.25) + open_0))), 1, -1) * -1)',
# 'alpha_088 = min(rank(decay_linear(((rank(open_0) + rank(low_0)) - (rank(high_0) + rank(close_0))),8.06882)), ts_rank(decay_linear(correlation(ts_rank(close_0, 8.44728), ts_rank(mean(volume_0,60),20.6966), 8.01266), 6.65053), 2.61957))',
# 'alpha_092 = min(ts_rank(decay_linear(where((((high_0 + low_0) / 2) + close_0) < (low_0 + open_0), 1, -1), 14.7221),18.8683), ts_rank(decay_linear(correlation(rank(low_0), rank(mean(volume_0,30)), 7.58555), 6.94024),6.80584))',
# 'alpha_094 = ((rank((((high_0+low_0+open_0+close_0)*0.25) - ts_min(((high_0+low_0+open_0+close_0)*0.25), 11.5783)))**ts_rank(correlation(ts_rank(((high_0+low_0+open_0+close_0)*0.25),19.6462), ts_rank(mean(volume_0,60), 4.02992), 18.0926), 2.70756)) * -1)',
# 'alpha_095 = where(rank((open_0 - ts_min(open_0, 12.4105))) < ts_rank((rank(correlation(sum(((high_0 + low_0)/ 2), 19.1351), sum(mean(volume_0,40), 19.1351), 12.8742))**5), 11.7584), 1, -1)',
# 'alpha_096 = (max(ts_rank(decay_linear(correlation(rank(((high_0+low_0+open_0+close_0)*0.25)), rank(volume_0), 3.83878),4.16783), 8.38151), ts_rank(decay_linear(ts_argmax(correlation(ts_rank(close_0, 7.45404),ts_rank(mean(volume_0,60), 4.13242), 3.65459), 12.6556), 14.0365), 13.4143)) * -1)',
# 'alpha_098 = (rank(decay_linear(correlation(((high_0+low_0+open_0+close_0)*0.25), sum(mean(volume_0,5), 26.4719), 4.58418), 7.18088)) -rank(decay_linear(ts_rank(ts_argmin(correlation(rank(open_0), rank(mean(volume_0,15)), 20.8187), 8.62571),6.95668), 8.07206)))',
# 'alpha_099 = (where(rank(correlation(sum(((high_0 + low_0) / 2), 19.8975), sum(mean(volume_0,60), 19.8975), 8.8136)) <rank(correlation(low_0, volume_0, 6.28259)), 1, -1) * -1)',
'alpha_101 = div((close_0 - open_0), ((high_0 - low_0) + 0.001))']
alphalist=[]
for j in range(searchtime):
rlist=[random.randint(0,len(alist)-1) for i in range(random.randint(0,maxfactorlen))]
str='avg_turn_15/turn_0\nmf_net_amount_xl_0\nalpha4=close_0*avg_turn_0+close_1*avg_turn_1+close_2*avg_turn_2\n'
str=''
for i in rlist:
str=str+alist[i].replace(' ','')+'\n'
alphalist.append(str)
# 在这里设置需要调优的参数备选
#param_grid['m3.features'] = ['avg_turn_15/turn_0\nmf_net_amount_xl_0\nalpha4=close_0*avg_turn_0+close_1*avg_turn_1+close_2*avg_turn_2\nalpha20=(((-1 * rank((open_0 - delay(high_0, 1)))) * rank((open_0 - delay(close_0, 1)))) * rank((open_0 - delay(low_0, 1))))\nalpha_001=(rank(ts_argmax(signedpower(where(((close_0/shift(close_0,1)-1) < 0), std((close_0/shift(close_0,1)-1), 20), close_0), 2), 5)) -0.5)\nalpha_002=(-1 * correlation(rank(delta(log(volume_0), 2)), rank(div((close_0 - open_0), open_0)), 6))\nalpha_003=(-1 * correlation(rank(open_0), rank(volume_0), 10))', 'close_2/close_0\nclose_3/close_0']
param_grid['m3.features'] =alphalist
# param_grid['m6.number_of_trees'] = [5, 10, 20]
return param_grid
def m63_scoring_bigquant_run(result):
#score = result.get('m63').read_raw_perf()['sharpe'].tail(1)[0]#算法夏普指数
score = result.get('m63').read_raw_perf()['algorithm_period_return'][-1] #算法收益率
#print(score)
return {'score': score}
m19 = M.hyper_parameter_search.v1(
param_grid_builder=m19_param_grid_builder_bigquant_run,
scoring=m63_scoring_bigquant_run,
search_algorithm='网格搜索',
search_iterations=10,
workers=1,
worker_distributed_run=True,
worker_silent=True,
run_now=True,
bq_graph=g
)