克隆策略
In [5]:
# 基础参数配置
class conf:
    start_date = '2010-11-01'
    end_date='2017-05-15'
    # split_date 之前的数据用于训练,之后的数据用作效果评估
    split_date = '2015-01-01'
    # D.instruments: https://bigquant.com/docs/data_instruments.html
    instruments = D.instruments(start_date, end_date)

    # 机器学习目标标注函数
    # 如下标注函数等价于 min(max((持有期间的收益 * 100), -20), 20) + 20 (后面的M.fast_auto_labeler会做取整操作)
    # 说明:max/min这里将标注分数限定在区间[-20, 20],+20将分数变为非负数 (StockRanker要求标注分数非负整数)
    label_expr = ['return * 100', 'where(label > {0}, {0}, where(label < -{0}, -{0}, label)) + {0}'.format(20)]
    # 持有天数,用于计算label_expr中的return值(收益)
    hold_days = 5

    # 特征 https://bigquant.com/docs/data_features.html,你可以通过表达式构造任何特征
    features = [
        'avg_turn_5',  # 5日平均换手率
        '(high_0-low_0+high_1-low_1+high_2-low_2+high_3-low_3+high_4-low_4)/5',  # 5日平均振幅
        #'pe_lyr_0',  # 市盈率LYR
        'mf_net_amount_5',  # 5日净主动买入额
        'mf_net_amount_10',  # 10日净主动买入额
        'mf_net_amount_20',  # 20日净主动买入额
    ]

# 给数据做标注:给每一行数据(样本)打分,一般分数越高表示越好
m1 = M.fast_auto_labeler.v5(
    instruments=conf.instruments, start_date=conf.start_date, end_date=conf.end_date,
    label_expr=conf.label_expr, hold_days=conf.hold_days,
    benchmark='000300.SHA', sell_at='open', buy_at='open')
# 计算特征数据
m2 = M.general_feature_extractor.v5(
    instruments=conf.instruments, start_date=conf.start_date, end_date=conf.end_date,
    features=conf.features)
# 数据预处理:缺失数据处理,数据规范化,T.get_stock_ranker_default_transforms为StockRanker模型做数据预处理
m3 = M.transform.v2(
    data=m2.data, transforms=T.get_stock_ranker_default_transforms(),
    drop_null=True, astype='int32', except_columns=['date', 'instrument'],
    clip_lower=0, clip_upper=200000000)
# 合并标注和特征数据
m4 = M.join.v2(data1=m1.data, data2=m3.data, on=['date', 'instrument'], sort=True)

# 训练数据集
m5_training = M.filter.v2(data=m4.data, expr='date < "%s"' % conf.split_date)
# 评估数据集
m5_evaluation = M.filter.v2(data=m4.data, expr='"%s" <= date' % conf.split_date)
# StockRanker机器学习训练
m6 = M.stock_ranker_train.v2(training_ds=m5_training.data, features=conf.features)
# 对评估集做预测
m7 = M.stock_ranker_predict.v2(model_id=m6.model_id, data=m5_evaluation.data)


## 量化回测 https://bigquant.com/docs/strategy_backtest.html
# 回测引擎:初始化函数,只执行一次
def initialize(context):
    # 系统已经设置了默认的交易手续费和滑点,要修改手续费可使用如下函数
    context.set_commission(PerOrder(buy_cost=0.0003, sell_cost=0.0013, min_cost=5))
    # 预测数据,通过options传入进来,使用 read_df 函数,加载到内存 (DataFrame)
    context.ranker_prediction = context.options['ranker_prediction'].read_df()
    # 设置买入的股票数量,这里买入预测股票列表排名靠前的5只
    stock_count = 5
    # 每只的股票的权重,如下的权重分配会使得靠前的股票分配多一点的资金,[0.339160, 0.213986, 0.169580, ..]
    context.stock_weights = T.norm([1 / math.log(i + 2) for i in range(0, stock_count)])
    # 设置每只股票占用的最大资金比例
    context.max_cash_per_instrument = 0.2

# 回测引擎:每日数据处理函数,每天执行一次
def handle_data(context, data):
    # 按日期过滤得到今日的预测数据
    ranker_prediction = context.ranker_prediction[context.ranker_prediction.date == data.current_dt.strftime('%Y-%m-%d')]

    # 1. 资金分配
    # 平均持仓时间是hold_days,每日都将买入股票,每日预期使用 1/hold_days 的资金
    # 实际操作中,会存在一定的买入误差,所以在前hold_days天,等量使用资金;之后,尽量使用剩余资金(这里设置最多用等量的1.5倍)
    is_staging = context.trading_day_index < context.options['hold_days'] # 是否在建仓期间(前 hold_days 天)
    cash_avg = context.portfolio.portfolio_value / context.options['hold_days']
    cash_for_buy = min(context.portfolio.cash, (1 if is_staging else 1.5) * cash_avg)
    cash_for_sell = cash_avg - (context.portfolio.cash - cash_for_buy)
    positions = {e.symbol: p.amount * p.last_sale_price         for e, p in context.perf_tracker.position_tracker.positions.items()}

    # 2. 生成卖出订单:hold_days天之后才开始卖出;对持仓的股票,按StockRanker预测的排序末位淘汰
    if not is_staging and cash_for_sell > 0:
        equities = {e.symbol: e for e, p in context.perf_tracker.position_tracker.positions.items()}
        instruments = list(reversed(list(ranker_prediction.instrument[ranker_prediction.instrument.apply(
                lambda x: x in equities and not context.has_unfinished_sell_order(equities[x]))])))
        # print('rank order for sell %s' % instruments)
        for instrument in instruments:
            context.order_target(context.symbol(instrument), 0)
            cash_for_sell -= positions[instrument]
            if cash_for_sell <= 0:
                break

    # 3. 生成买入订单:按StockRanker预测的排序,买入前面的stock_count只股票
    buy_cash_weights = context.stock_weights
    buy_instruments = list(ranker_prediction.instrument[:len(buy_cash_weights)])
    max_cash_per_instrument = context.portfolio.portfolio_value * context.max_cash_per_instrument
    for i, instrument in enumerate(buy_instruments):
        cash = cash_for_buy * buy_cash_weights[i]
        if cash > max_cash_per_instrument - positions.get(instrument, 0):
            # 确保股票持仓量不会超过每次股票最大的占用资金量
            cash = max_cash_per_instrument - positions.get(instrument, 0)
        if cash > 0:
            context.order_value(context.symbol(instrument), cash)

# 调用回测引擎
m8 = M.backtest.v5(
    instruments=m7.instruments,
    start_date=m7.start_date,
    end_date=m7.end_date,
    initialize=initialize,
    handle_data=handle_data,
    order_price_field_buy='open',       # 表示 开盘 时买入
    order_price_field_sell='close',     # 表示 收盘 前卖出
    capital_base=1000000,               # 初始资金
    benchmark='000300.SHA',             # 比较基准,不影响回测结果
    # 通过 options 参数传递预测数据和参数给回测引擎
    options={'ranker_prediction': m7.predictions, 'hold_days': conf.hold_days}
)
[2017-05-15 15:36:12.842591] INFO: bigquant: fast_auto_labeler.v5 start ..
[2017-05-15 15:36:20.943248] INFO: fast_auto_labeler: load history data: 3759000 rows
[2017-05-15 15:36:21.902374] INFO: fast_auto_labeler: start labeling
[2017-05-15 15:37:55.772914] INFO: bigquant: fast_auto_labeler.v5 end [102.930287s].
[2017-05-15 15:37:55.780103] INFO: bigquant: general_feature_extractor.v5 start ..
[2017-05-15 15:38:03.926007] INFO: general_feature_extractor: year 2010, featurerows=86096
[2017-05-15 15:38:30.007894] INFO: general_feature_extractor: year 2011, featurerows=511455
[2017-05-15 15:38:47.583050] INFO: general_feature_extractor: year 2012, featurerows=565675
[2017-05-15 15:39:07.030722] INFO: general_feature_extractor: year 2013, featurerows=564168
[2017-05-15 15:39:40.864233] INFO: general_feature_extractor: year 2014, featurerows=569948
[2017-05-15 15:40:19.611505] INFO: general_feature_extractor: year 2015, featurerows=569698
[2017-05-15 15:40:59.883801] INFO: general_feature_extractor: year 2016, featurerows=641546
[2017-05-15 15:41:18.530195] INFO: general_feature_extractor: year 2017, featurerows=250414
[2017-05-15 15:41:18.807800] INFO: general_feature_extractor: total feature rows: 3759000
[2017-05-15 15:41:18.816050] INFO: bigquant: general_feature_extractor.v5 end [203.035914s].
[2017-05-15 15:41:18.824148] INFO: bigquant: transform.v2 start ..
[2017-05-15 15:41:21.251055] INFO: transform: transformed /y_2010, 85786/86096
[2017-05-15 15:41:24.678336] INFO: transform: transformed /y_2011, 510034/511455
[2017-05-15 15:41:28.402984] INFO: transform: transformed /y_2012, 564884/565675
[2017-05-15 15:41:32.128190] INFO: transform: transformed /y_2013, 564158/564168
[2017-05-15 15:41:35.585361] INFO: transform: transformed /y_2014, 569356/569948
[2017-05-15 15:41:39.530547] INFO: transform: transformed /y_2015, 568581/569698
[2017-05-15 15:41:43.427508] INFO: transform: transformed /y_2016, 640408/641546
[2017-05-15 15:41:46.297988] INFO: transform: transformed /y_2017, 249451/250414
[2017-05-15 15:41:46.586474] INFO: transform: transformed rows: 3752658/3759000
[2017-05-15 15:41:46.602813] INFO: bigquant: transform.v2 end [27.778633s].
[2017-05-15 15:41:46.607911] INFO: bigquant: join.v2 start ..
[2017-05-15 15:41:53.208942] INFO: filter: /y_2010, rows=85671/85786, timetaken=5.138235s
[2017-05-15 15:41:59.700043] INFO: filter: /y_2011, rows=509502/510034, timetaken=6.483813s
[2017-05-15 15:42:06.841036] INFO: filter: /y_2012, rows=563788/564884, timetaken=7.055572s
[2017-05-15 15:42:14.745785] INFO: filter: /y_2013, rows=563123/564158, timetaken=7.798998s
[2017-05-15 15:42:23.145499] INFO: filter: /y_2014, rows=567664/569356, timetaken=8.293027s
[2017-05-15 15:42:33.334913] INFO: filter: /y_2015, rows=560341/568581, timetaken=10.076097s
[2017-05-15 15:42:42.597852] INFO: filter: /y_2016, rows=637192/640408, timetaken=9.234101s
[2017-05-15 15:42:48.991854] INFO: filter: /y_2017, rows=229222/249451, timetaken=6.369214s
[2017-05-15 15:42:49.376168] INFO: filter: total result rows: 3716503
[2017-05-15 15:42:49.382027] INFO: bigquant: join.v2 end [62.773782s].
[2017-05-15 15:42:49.387334] INFO: bigquant: filter.v2 start ..
[2017-05-15 15:42:49.394403] INFO: filter: filter with expr date < "2015-01-01"
[2017-05-15 15:42:50.008132] INFO: filter: filter /y_2010, 85671/85671
[2017-05-15 15:42:51.870977] INFO: filter: filter /y_2011, 509502/509502
[2017-05-15 15:42:53.905593] INFO: filter: filter /y_2012, 563788/563788
[2017-05-15 15:42:56.197440] INFO: filter: filter /y_2013, 563123/563123
[2017-05-15 15:42:58.512644] INFO: filter: filter /y_2014, 567664/567664
[2017-05-15 15:42:58.896270] INFO: filter: filter /y_2015, 0/560341
[2017-05-15 15:42:59.276762] INFO: filter: filter /y_2016, 0/637192
[2017-05-15 15:42:59.411882] INFO: filter: filter /y_2017, 0/229222
[2017-05-15 15:43:00.076867] INFO: bigquant: filter.v2 end [10.689444s].
[2017-05-15 15:43:00.082275] INFO: bigquant: filter.v2 start ..
[2017-05-15 15:43:00.152060] INFO: filter: filter with expr "2015-01-01" <= date
[2017-05-15 15:43:00.488005] INFO: filter: filter /y_2010, 0/85671
[2017-05-15 15:43:00.820323] INFO: filter: filter /y_2011, 0/509502
[2017-05-15 15:43:01.096979] INFO: filter: filter /y_2012, 0/563788
[2017-05-15 15:43:01.349924] INFO: filter: filter /y_2013, 0/563123
[2017-05-15 15:43:01.620795] INFO: filter: filter /y_2014, 0/567664
[2017-05-15 15:43:03.363572] INFO: filter: filter /y_2015, 560341/560341
[2017-05-15 15:43:05.717689] INFO: filter: filter /y_2016, 637192/637192
[2017-05-15 15:43:06.772304] INFO: filter: filter /y_2017, 229222/229222
[2017-05-15 15:43:06.916859] INFO: bigquant: filter.v2 end [6.834527s].
[2017-05-15 15:43:06.922535] INFO: bigquant: stock_ranker_train.v2 start ..
[2017-05-15 15:43:10.153043] INFO: df2bin: prepare data: training ..
[2017-05-15 15:44:18.449627] INFO: stock_ranker_train: training: 2289748 rows
[2017-05-15 15:47:42.675311] INFO: bigquant: stock_ranker_train.v2 end [275.752664s].
[2017-05-15 15:47:42.681611] INFO: bigquant: stock_ranker_predict.v2 start ..
[2017-05-15 15:47:43.875332] INFO: df2bin: prepare data: prediction ..
[2017-05-15 15:48:28.447060] INFO: stock_ranker_predict: prediction: 1426755 rows
[2017-05-15 15:48:45.303684] INFO: bigquant: stock_ranker_predict.v2 end [62.621979s].
[2017-05-15 15:48:45.322284] INFO: bigquant: backtest.v5 start ..
[2017-05-15 15:51:14.946024] INFO: Performance: Simulated 568 trading days out of 568.
[2017-05-15 15:51:14.948977] INFO: Performance: first open: 2015-01-05 14:30:00+00:00
[2017-05-15 15:51:14.950506] INFO: Performance: last close: 2017-05-04 19:00:00+00:00
  • 收益率84.39%
  • 年化收益率31.19%
  • 基准收益率-3.66%
  • 阿尔法0.32
  • 贝塔0.92
  • 夏普比率0.82
  • 收益波动率32.94%
  • 信息比率1.78
  • 最大回撤52.89%
[2017-05-15 15:51:19.727240] INFO: bigquant: backtest.v5 end [154.404912s].