TabNet: Attentive Interpretable Tabular Learning
基于Tabnet模型的量化选股方案。抽取了98个量价因子,2010到2018年为数据训练TabNet模型,并将模型的预测结果应用在2018到2021年9月的数据上进行了回测。
TabNet核心参数
# 本代码由可视化策略环境自动生成 2023年2月6日 17:00
# 本代码单元只能在可视化模式下编辑。您也可以拷贝代码,粘贴到新建的代码单元或者策略,然后修改。
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m12_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。在这里编写您的代码
from sklearn.model_selection import train_test_split
data = input_1.read()
x_train, x_val, y_train, y_val = train_test_split(data["x"], data['y'], random_state=2021)
data_1 = DataSource.write_pickle({'x': x_train, 'y': y_train.reshape(-1, 1)})
data_2 = DataSource.write_pickle({'x': x_val, 'y': y_val.reshape(-1, 1)})
return Outputs(data_1=data_1, data_2=data_2, data_3=None)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m12_post_run_bigquant_run(outputs):
return outputs
# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
def m20_run_bigquant_run(input_1, input_2, input_3):
# 示例代码如下。在这里编写您的代码
pred_label = input_1.read_pickle()
df = input_2.read_df()
df = pd.DataFrame({'pred_label':pred_label[:,0], 'instrument':df.instrument, 'date':df.date})
df.sort_values(['date','pred_label'],inplace=True, ascending=[True,False])
return Outputs(data_1=DataSource.write_df(df), data_2=None, data_3=None)
# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
def m20_post_run_bigquant_run(outputs):
return outputs
# 回测引擎:初始化函数,只执行一次
def m21_initialize_bigquant_run(context):
# 加载预测数据
context.ranker_prediction = context.options['data'].read_df()
# 系统已经设置了默认的交易手续费和滑点,要修改手续费可使用如下函数
context.set_commission(PerOrder(buy_cost=0.001, sell_cost=0.001, min_cost=5))
# 预测数据,通过options传入进来,使用 read_df 函数,加载到内存 (DataFrame)
# 设置买入的股票数量,这里买入预测股票列表排名靠前的5只
stock_count = 20
# 每只的股票的权重,如下的权重分配会使得靠前的股票分配多一点的资金,[0.339160, 0.213986, 0.169580, ..]
context.stock_weights = T.norm([1 / math.log(i + 2) for i in range(0, stock_count)])
# 设置每只股票占用的最大资金比例
context.max_cash_per_instrument = 0.2
context.options['hold_days'] = 5
# 回测引擎:每日数据处理函数,每天执行一次
def m21_handle_data_bigquant_run(context, data):
# 按日期过滤得到今日的预测数据
ranker_prediction = context.ranker_prediction[
context.ranker_prediction.date == data.current_dt.strftime('%Y-%m-%d')]
# 1. 资金分配
# 平均持仓时间是hold_days,每日都将买入股票,每日预期使用 1/hold_days 的资金
# 实际操作中,会存在一定的买入误差,所以在前hold_days天,等量使用资金;之后,尽量使用剩余资金(这里设置最多用等量的1.5倍)
is_staging = context.trading_day_index < context.options['hold_days'] # 是否在建仓期间(前 hold_days 天)
cash_avg = context.portfolio.portfolio_value / context.options['hold_days']
cash_for_buy = min(context.portfolio.cash, (1 if is_staging else 1.5) * cash_avg)
cash_for_sell = cash_avg - (context.portfolio.cash - cash_for_buy)
positions = {e.symbol: p.amount * p.last_sale_price
for e, p in context.perf_tracker.position_tracker.positions.items()}
# 2. 生成卖出订单:hold_days天之后才开始卖出;对持仓的股票,按机器学习算法预测的排序末位淘汰
if not is_staging and cash_for_sell > 0:
equities = {e.symbol: e for e, p in context.perf_tracker.position_tracker.positions.items()}
instruments = list(reversed(list(ranker_prediction.instrument[ranker_prediction.instrument.apply(
lambda x: x in equities and not context.has_unfinished_sell_order(equities[x]))])))
# print('rank order for sell %s' % instruments)
for instrument in instruments:
context.order_target(context.symbol(instrument), 0)
cash_for_sell -= positions[instrument]
if cash_for_sell <= 0:
break
# 3. 生成买入订单:按机器学习算法预测的排序,买入前面的stock_count只股票
buy_cash_weights = context.stock_weights
buy_instruments = list(ranker_prediction.instrument[:len(buy_cash_weights)])
max_cash_per_instrument = context.portfolio.portfolio_value * context.max_cash_per_instrument
for i, instrument in enumerate(buy_instruments):
cash = cash_for_buy * buy_cash_weights[i]
if cash > max_cash_per_instrument - positions.get(instrument, 0):
# 确保股票持仓量不会超过每次股票最大的占用资金量
cash = max_cash_per_instrument - positions.get(instrument, 0)
if cash > 0:
context.order_value(context.symbol(instrument), cash)
# 回测引擎:准备数据,只执行一次
def m21_prepare_bigquant_run(context):
pass
m1 = M.instruments.v2(
start_date='2010-01-01',
end_date='2017-12-31',
market='CN_STOCK_A',
instrument_list='',
max_count=0
)
m2 = M.advanced_auto_labeler.v2(
instruments=m1.data,
label_expr="""# #号开始的表示注释
# 0. 每行一个,顺序执行,从第二个开始,可以使用label字段
# 1. 可用数据字段见 https://bigquant.com/docs/data_history_data.html
# 添加benchmark_前缀,可使用对应的benchmark数据
# 2. 可用操作符和函数见 `表达式引擎 <https://bigquant.com/docs/big_expr.html>`_
# 计算收益:5日收盘价(作为卖出价格)除以明日开盘价(作为买入价格)
shift(close, -5) / shift(open, -1)-1
# 极值处理:用1%和99%分位的值做clip
clip(label, all_quantile(label, 0.01), all_quantile(label, 0.99))
# 过滤掉一字涨停的情况 (设置label为NaN,在后续处理和训练中会忽略NaN的label)
where(shift(high, -1) == shift(low, -1), NaN, label)
""",
start_date='',
end_date='',
benchmark='000300.SHA',
drop_na_label=True,
cast_label_int=False
)
m17 = M.standardlize.v8(
input_1=m2.data,
columns_input='label'
)
m3 = M.input_features.v1(
features="""close_0
open_0
high_0
low_0
amount_0
turn_0
return_0
close_1
open_1
high_1
low_1
return_1
amount_1
turn_1
close_2
open_2
high_2
low_2
amount_2
turn_2
return_2
close_3
open_3
high_3
low_3
amount_3
turn_3
return_3
close_4
open_4
high_4
low_4
amount_4
turn_4
return_4
mean(close_0, 5)
mean(low_0, 5)
mean(open_0, 5)
mean(high_0, 5)
mean(turn_0, 5)
mean(amount_0, 5)
mean(return_0, 5)
ts_max(close_0, 5)
ts_max(low_0, 5)
ts_max(open_0, 5)
ts_max(high_0, 5)
ts_max(turn_0, 5)
ts_max(amount_0, 5)
ts_max(return_0, 5)
ts_min(close_0, 5)
ts_min(low_0, 5)
ts_min(open_0, 5)
ts_min(high_0, 5)
ts_min(turn_0, 5)
ts_min(amount_0, 5)
ts_min(return_0, 5)
std(close_0, 5)
std(low_0, 5)
std(open_0, 5)
std(high_0, 5)
std(turn_0, 5)
std(amount_0, 5)
std(return_0, 5)
ts_rank(close_0, 5)
ts_rank(low_0, 5)
ts_rank(open_0, 5)
ts_rank(high_0, 5)
ts_rank(turn_0, 5)
ts_rank(amount_0, 5)
ts_rank(return_0, 5)
decay_linear(close_0, 5)
decay_linear(low_0, 5)
decay_linear(open_0, 5)
decay_linear(high_0, 5)
decay_linear(turn_0, 5)
decay_linear(amount_0, 5)
decay_linear(return_0, 5)
correlation(volume_0, return_0, 5)
correlation(volume_0, high_0, 5)
correlation(volume_0, low_0, 5)
correlation(volume_0, close_0, 5)
correlation(volume_0, open_0, 5)
correlation(volume_0, turn_0, 5)
correlation(return_0, high_0, 5)
correlation(return_0, low_0, 5)
correlation(return_0, close_0, 5)
correlation(return_0, open_0, 5)
correlation(return_0, turn_0, 5)
correlation(high_0, low_0, 5)
correlation(high_0, close_0, 5)
correlation(high_0, open_0, 5)
correlation(high_0, turn_0, 5)
correlation(low_0, close_0, 5)
correlation(low_0, open_0, 5)
correlation(low_0, turn_0, 5)
correlation(close_0, open_0, 5)
correlation(close_0, turn_0, 5)
correlation(open_0, turn_0, 5)"""
)
m6 = M.general_feature_extractor.v7(
instruments=m1.data,
features=m3.data,
start_date='',
end_date='',
before_start_days=10
)
m7 = M.derived_feature_extractor.v3(
input_data=m6.data,
features=m3.data,
date_col='date',
instrument_col='instrument',
drop_na=True,
remove_extra_columns=False
)
m13 = M.standardlize.v8(
input_1=m7.data,
input_2=m3.data,
columns_input='[]'
)
m14 = M.fillnan.v1(
input_data=m13.data,
features=m3.data,
fill_value='0.0'
)
m4 = M.join.v3(
data1=m17.data,
data2=m14.data,
on='date,instrument',
how='inner',
sort=False
)
m10 = M.dl_convert_to_bin.v2(
input_data=m4.data,
features=m3.data,
window_size=1,
feature_clip=3,
flatten=True,
window_along_col='instrument'
)
m12 = M.cached.v3(
input_1=m10.data,
run=m12_run_bigquant_run,
post_run=m12_post_run_bigquant_run,
input_ports='',
params='{}',
output_ports=''
)
m18 = M.dl_models_tabnet_train.v1(
training_data=m12.data_1,
validation_data=m12.data_2,
input_dim=98,
n_steps=3,
n_d=32,
n_a=32,
gamma=1.3,
momentum=0.02,
batch_size=5120,
virtual_batch_size=512,
epochs=100,
num_workers=4,
device_name='auto:自动调用GPU',
verbose='1:输出进度条记录'
)
m5 = M.instruments.v2(
start_date='2018-01-01',
end_date='2021-09-01',
market='CN_STOCK_A',
instrument_list='',
max_count=0
)
m8 = M.general_feature_extractor.v7(
instruments=m5.data,
features=m3.data,
start_date='',
end_date='',
before_start_days=10
)
m9 = M.derived_feature_extractor.v3(
input_data=m8.data,
features=m3.data,
date_col='date',
instrument_col='instrument',
drop_na=True,
remove_extra_columns=False
)
m16 = M.standardlize.v8(
input_1=m9.data,
input_2=m3.data,
columns_input='[]'
)
m15 = M.fillnan.v1(
input_data=m16.data,
features=m3.data,
fill_value='0.0'
)
m11 = M.dl_convert_to_bin.v2(
input_data=m15.data,
features=m3.data,
window_size=1,
feature_clip=3,
flatten=True,
window_along_col='instrument'
)
m19 = M.dl_models_tabnet_predict.v1(
trained_model=m18.data,
input_data=m11.data,
m_cached=False
)
m20 = M.cached.v3(
input_1=m19.data,
input_2=m9.data,
run=m20_run_bigquant_run,
post_run=m20_post_run_bigquant_run,
input_ports='',
params='{}',
output_ports=''
)
m21 = M.trade.v4(
instruments=m5.data,
options_data=m20.data_1,
start_date='',
end_date='',
initialize=m21_initialize_bigquant_run,
handle_data=m21_handle_data_bigquant_run,
prepare=m21_prepare_bigquant_run,
volume_limit=0.025,
order_price_field_buy='open',
order_price_field_sell='close',
capital_base=1000000,
auto_cancel_non_tradable_orders=True,
data_frequency='daily',
price_type='后复权',
product_type='股票',
plot_charts=True,
backtest_only=False,
benchmark='000300.SHA'
)
m22 = M.strategy_turn_analysis.v1(
raw_perf=m21.raw_perf
)
[2023-02-06 16:37:57.336251] INFO: moduleinvoker: instruments.v2 开始运行..
[2023-02-06 16:37:57.352452] INFO: moduleinvoker: 命中缓存
[2023-02-06 16:37:57.354889] INFO: moduleinvoker: instruments.v2 运行完成[0.018633s].
[2023-02-06 16:37:57.369811] INFO: moduleinvoker: advanced_auto_labeler.v2 开始运行..
[2023-02-06 16:38:20.492282] INFO: 自动标注(股票): 加载历史数据: 4597290 行
[2023-02-06 16:38:20.494755] INFO: 自动标注(股票): 开始标注 ..
[2023-02-06 16:38:39.696554] INFO: moduleinvoker: advanced_auto_labeler.v2 运行完成[42.3267s].
[2023-02-06 16:38:39.722611] INFO: moduleinvoker: standardlize.v8 开始运行..
[2023-02-06 16:39:33.963236] INFO: moduleinvoker: standardlize.v8 运行完成[54.2406s].
[2023-02-06 16:39:33.978601] INFO: moduleinvoker: input_features.v1 开始运行..
[2023-02-06 16:39:33.989988] INFO: moduleinvoker: 命中缓存
[2023-02-06 16:39:33.991618] INFO: moduleinvoker: input_features.v1 运行完成[0.013036s].
[2023-02-06 16:39:34.021812] INFO: moduleinvoker: general_feature_extractor.v7 开始运行..
[2023-02-06 16:39:34.554094] INFO: 基础特征抽取: 年份 2009, 特征行数=12795
[2023-02-06 16:39:36.172050] INFO: 基础特征抽取: 年份 2010, 特征行数=431567
[2023-02-06 16:39:38.278423] INFO: 基础特征抽取: 年份 2011, 特征行数=511455
[2023-02-06 16:39:40.375428] INFO: 基础特征抽取: 年份 2012, 特征行数=565675
[2023-02-06 16:39:42.438953] INFO: 基础特征抽取: 年份 2013, 特征行数=564168
[2023-02-06 16:39:44.683353] INFO: 基础特征抽取: 年份 2014, 特征行数=569948
[2023-02-06 16:39:46.749629] INFO: 基础特征抽取: 年份 2015, 特征行数=569698
[2023-02-06 16:39:49.161167] INFO: 基础特征抽取: 年份 2016, 特征行数=641546
[2023-02-06 16:39:51.920496] INFO: 基础特征抽取: 年份 2017, 特征行数=743233
[2023-02-06 16:39:52.211382] INFO: 基础特征抽取: 总行数: 4610085
[2023-02-06 16:39:52.224517] INFO: moduleinvoker: general_feature_extractor.v7 运行完成[18.202683s].
[2023-02-06 16:39:52.238909] INFO: moduleinvoker: derived_feature_extractor.v3 开始运行..
[2023-02-06 16:40:07.981329] INFO: derived_feature_extractor: 提取完成 mean(close_0, 5), 3.434s
[2023-02-06 16:40:11.503450] INFO: derived_feature_extractor: 提取完成 mean(low_0, 5), 3.520s
[2023-02-06 16:40:15.259763] INFO: derived_feature_extractor: 提取完成 mean(open_0, 5), 3.755s
[2023-02-06 16:40:18.878552] INFO: derived_feature_extractor: 提取完成 mean(high_0, 5), 3.617s
[2023-02-06 16:40:22.596895] INFO: derived_feature_extractor: 提取完成 mean(turn_0, 5), 3.717s
[2023-02-06 16:40:26.115930] INFO: derived_feature_extractor: 提取完成 mean(amount_0, 5), 3.517s
[2023-02-06 16:40:29.836563] INFO: derived_feature_extractor: 提取完成 mean(return_0, 5), 3.719s
[2023-02-06 16:40:33.714054] INFO: derived_feature_extractor: 提取完成 ts_max(close_0, 5), 3.876s
[2023-02-06 16:40:37.540503] INFO: derived_feature_extractor: 提取完成 ts_max(low_0, 5), 3.823s
[2023-02-06 16:40:41.295402] INFO: derived_feature_extractor: 提取完成 ts_max(open_0, 5), 3.753s
[2023-02-06 16:40:44.961536] INFO: derived_feature_extractor: 提取完成 ts_max(high_0, 5), 3.664s
[2023-02-06 16:40:49.239124] INFO: derived_feature_extractor: 提取完成 ts_max(turn_0, 5), 4.276s
[2023-02-06 16:40:53.082962] INFO: derived_feature_extractor: 提取完成 ts_max(amount_0, 5), 3.842s
[2023-02-06 16:40:56.928948] INFO: derived_feature_extractor: 提取完成 ts_max(return_0, 5), 3.844s
[2023-02-06 16:41:00.841566] INFO: derived_feature_extractor: 提取完成 ts_min(close_0, 5), 3.911s
[2023-02-06 16:41:04.605869] INFO: derived_feature_extractor: 提取完成 ts_min(low_0, 5), 3.763s
[2023-02-06 16:41:08.459554] INFO: derived_feature_extractor: 提取完成 ts_min(open_0, 5), 3.852s
[2023-02-06 16:41:12.164385] INFO: derived_feature_extractor: 提取完成 ts_min(high_0, 5), 3.703s
[2023-02-06 16:41:15.853189] INFO: derived_feature_extractor: 提取完成 ts_min(turn_0, 5), 3.687s
[2023-02-06 16:41:19.803632] INFO: derived_feature_extractor: 提取完成 ts_min(amount_0, 5), 3.944s
[2023-02-06 16:41:23.459307] INFO: derived_feature_extractor: 提取完成 ts_min(return_0, 5), 3.654s
[2023-02-06 16:41:27.253483] INFO: derived_feature_extractor: 提取完成 std(close_0, 5), 3.792s
[2023-02-06 16:41:30.996103] INFO: derived_feature_extractor: 提取完成 std(low_0, 5), 3.741s
[2023-02-06 16:41:34.720316] INFO: derived_feature_extractor: 提取完成 std(open_0, 5), 3.723s
[2023-02-06 16:41:38.366487] INFO: derived_feature_extractor: 提取完成 std(high_0, 5), 3.644s
[2023-02-06 16:41:42.113912] INFO: derived_feature_extractor: 提取完成 std(turn_0, 5), 3.745s
[2023-02-06 16:41:45.903676] INFO: derived_feature_extractor: 提取完成 std(amount_0, 5), 3.788s
[2023-02-06 16:41:49.756957] INFO: derived_feature_extractor: 提取完成 std(return_0, 5), 3.851s
[2023-02-06 16:42:04.406071] INFO: derived_feature_extractor: 提取完成 ts_rank(close_0, 5), 14.646s
[2023-02-06 16:42:18.779127] INFO: derived_feature_extractor: 提取完成 ts_rank(low_0, 5), 14.371s
[2023-02-06 16:42:33.167217] INFO: derived_feature_extractor: 提取完成 ts_rank(open_0, 5), 14.386s
[2023-02-06 16:42:47.890220] INFO: derived_feature_extractor: 提取完成 ts_rank(high_0, 5), 14.721s
[2023-02-06 16:43:02.702455] INFO: derived_feature_extractor: 提取完成 ts_rank(turn_0, 5), 14.810s
[2023-02-06 16:43:17.167617] INFO: derived_feature_extractor: 提取完成 ts_rank(amount_0, 5), 14.463s
[2023-02-06 16:43:31.676192] INFO: derived_feature_extractor: 提取完成 ts_rank(return_0, 5), 14.506s
[2023-02-06 16:43:41.159677] INFO: derived_feature_extractor: 提取完成 decay_linear(close_0, 5), 9.482s
[2023-02-06 16:43:51.650427] INFO: derived_feature_extractor: 提取完成 decay_linear(low_0, 5), 10.489s
[2023-02-06 16:44:02.584390] INFO: derived_feature_extractor: 提取完成 decay_linear(open_0, 5), 10.932s
[2023-02-06 16:44:12.802596] INFO: derived_feature_extractor: 提取完成 decay_linear(high_0, 5), 10.216s
[2023-02-06 16:44:22.761476] INFO: derived_feature_extractor: 提取完成 decay_linear(turn_0, 5), 9.957s
[2023-02-06 16:44:32.519604] INFO: derived_feature_extractor: 提取完成 decay_linear(amount_0, 5), 9.756s
[2023-02-06 16:44:42.181734] INFO: derived_feature_extractor: 提取完成 decay_linear(return_0, 5), 9.660s
[2023-02-06 16:45:08.300387] INFO: derived_feature_extractor: 提取完成 correlation(volume_0, return_0, 5), 26.117s
[2023-02-06 16:45:33.838024] INFO: derived_feature_extractor: 提取完成 correlation(volume_0, high_0, 5), 25.535s
[2023-02-06 16:46:00.181543] INFO: derived_feature_extractor: 提取完成 correlation(volume_0, low_0, 5), 26.342s
[2023-02-06 16:46:26.259549] INFO: derived_feature_extractor: 提取完成 correlation(volume_0, close_0, 5), 26.076s
[2023-02-06 16:46:52.100702] INFO: derived_feature_extractor: 提取完成 correlation(volume_0, open_0, 5), 25.839s
[2023-02-06 16:47:20.074399] INFO: derived_feature_extractor: 提取完成 correlation(volume_0, turn_0, 5), 27.971s
[2023-02-06 16:47:48.500996] INFO: derived_feature_extractor: 提取完成 correlation(return_0, high_0, 5), 28.425s
[2023-02-06 16:48:14.161572] INFO: derived_feature_extractor: 提取完成 correlation(return_0, low_0, 5), 25.659s
[2023-02-06 16:48:39.544091] INFO: derived_feature_extractor: 提取完成 correlation(return_0, close_0, 5), 25.380s
[2023-02-06 16:49:04.768786] INFO: derived_feature_extractor: 提取完成 correlation(return_0, open_0, 5), 25.223s
[2023-02-06 16:49:31.236453] INFO: derived_feature_extractor: 提取完成 correlation(return_0, turn_0, 5), 26.466s
[2023-02-06 16:49:58.010757] INFO: derived_feature_extractor: 提取完成 correlation(high_0, low_0, 5), 26.772s
[2023-02-06 16:50:22.989153] INFO: derived_feature_extractor: 提取完成 correlation(high_0, close_0, 5), 24.977s
[2023-02-06 16:50:48.433728] INFO: derived_feature_extractor: 提取完成 correlation(high_0, open_0, 5), 25.443s
[2023-02-06 16:51:15.464583] INFO: derived_feature_extractor: 提取完成 correlation(high_0, turn_0, 5), 27.029s
[2023-02-06 16:51:43.802066] INFO: derived_feature_extractor: 提取完成 correlation(low_0, close_0, 5), 28.335s
[2023-02-06 16:52:11.512977] INFO: derived_feature_extractor: 提取完成 correlation(low_0, open_0, 5), 27.709s
[2023-02-06 16:52:40.152028] INFO: derived_feature_extractor: 提取完成 correlation(low_0, turn_0, 5), 28.637s
[2023-02-06 16:53:07.370211] INFO: derived_feature_extractor: 提取完成 correlation(close_0, open_0, 5), 27.216s
[2023-02-06 16:53:34.106387] INFO: derived_feature_extractor: 提取完成 correlation(close_0, turn_0, 5), 26.734s
[2023-02-06 16:54:01.725587] INFO: derived_feature_extractor: 提取完成 correlation(open_0, turn_0, 5), 27.617s
# 输出predict
predict_df = m20.data_1.read()
predict_df.head()
predict_df.to_csv("tabnet_predict.csv")