克隆策略

    {"description":"实验创建于2017/8/26","graph":{"edges":[{"to_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-15:instruments","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-8:data"},{"to_node_id":"-106:instruments","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-8:data"},{"to_node_id":"-773:input_1","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-15:data"},{"to_node_id":"-106:features","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24:data"},{"to_node_id":"-113:features","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24:data"},{"to_node_id":"-768:input_2","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24:data"},{"to_node_id":"-243:features","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24:data"},{"to_node_id":"-122:features","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24:data"},{"to_node_id":"-129:features","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24:data"},{"to_node_id":"-778:input_2","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24:data"},{"to_node_id":"-251:features","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24:data"},{"to_node_id":"-243:input_data","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-53:data"},{"to_node_id":"-113:input_data","from_node_id":"-106:data"},{"to_node_id":"-768:input_1","from_node_id":"-113:data"},{"to_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-53:data2","from_node_id":"-768:data"},{"to_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-53:data1","from_node_id":"-773:data"},{"to_node_id":"-235:training_data","from_node_id":"-243:data"},{"to_node_id":"-122:instruments","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-62:data"},{"to_node_id":"-227:instruments","from_node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-62:data"},{"to_node_id":"-129:input_data","from_node_id":"-122:data"},{"to_node_id":"-778:input_1","from_node_id":"-129:data"},{"to_node_id":"-251:input_data","from_node_id":"-778:data"},{"to_node_id":"-263:input_2","from_node_id":"-778:data"},{"to_node_id":"-218:input_data","from_node_id":"-251:data"},{"to_node_id":"-230:inputs","from_node_id":"-268:data"},{"to_node_id":"-114:input1","from_node_id":"-268:data"},{"to_node_id":"-121:input2","from_node_id":"-268:data"},{"to_node_id":"-121:input1","from_node_id":"-114:data"},{"to_node_id":"-158:input1","from_node_id":"-121:data"},{"to_node_id":"-158:input2","from_node_id":"-121:data"},{"to_node_id":"-158:input3","from_node_id":"-121:data"},{"to_node_id":"-138:input1","from_node_id":"-132:data"},{"to_node_id":"-138:input2","from_node_id":"-132:data"},{"to_node_id":"-138:input3","from_node_id":"-132:data"},{"to_node_id":"-162:inputs","from_node_id":"-138:data"},{"to_node_id":"-132:input1","from_node_id":"-158:data"},{"to_node_id":"-132:input2","from_node_id":"-158:data"},{"to_node_id":"-132:input3","from_node_id":"-158:data"},{"to_node_id":"-166:inputs","from_node_id":"-162:data"},{"to_node_id":"-173:inputs","from_node_id":"-166:data"},{"to_node_id":"-201:inputs","from_node_id":"-173:data"},{"to_node_id":"-230:outputs","from_node_id":"-201:data"},{"to_node_id":"-235:input_model","from_node_id":"-230:data"},{"to_node_id":"-218:trained_model","from_node_id":"-235:data"},{"to_node_id":"-263:input_1","from_node_id":"-218:data"},{"to_node_id":"-227:options_data","from_node_id":"-263:data_1"}],"nodes":[{"node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-8","module_id":"BigQuantSpace.instruments.instruments-v2","parameters":[{"name":"start_date","value":"2015-01-01","type":"Literal","bound_global_parameter":null},{"name":"end_date","value":"2016-01-01","type":"Literal","bound_global_parameter":null},{"name":"market","value":"CN_STOCK_A","type":"Literal","bound_global_parameter":null},{"name":"instrument_list","value":" ","type":"Literal","bound_global_parameter":null},{"name":"max_count","value":"0","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"rolling_conf","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-8"}],"output_ports":[{"name":"data","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-8"}],"cacheable":true,"seq_num":1,"comment":"","comment_collapsed":true},{"node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-15","module_id":"BigQuantSpace.advanced_auto_labeler.advanced_auto_labeler-v2","parameters":[{"name":"label_expr","value":"# #号开始的表示注释\n# 0. 每行一个,顺序执行,从第二个开始,可以使用label字段\n# 1. 可用数据字段见 https://bigquant.com/docs/data_history_data.html\n# 添加benchmark_前缀,可使用对应的benchmark数据\n# 2. 可用操作符和函数见 `表达式引擎 <https://bigquant.com/docs/big_expr.html>`_\n\n# 计算收益:5日收盘价(作为卖出价格)除以明日开盘价(作为买入价格)\nshift(close, -5) / shift(open, -1)-1\n\n# 极值处理:用1%和99%分位的值做clip\nclip(label, all_quantile(label, 0.01), all_quantile(label, 0.99))\n\n# 过滤掉一字涨停的情况 (设置label为NaN,在后续处理和训练中会忽略NaN的label)\nwhere(shift(high, -1) == shift(low, -1), NaN, label)\n","type":"Literal","bound_global_parameter":null},{"name":"start_date","value":"","type":"Literal","bound_global_parameter":null},{"name":"end_date","value":"","type":"Literal","bound_global_parameter":null},{"name":"benchmark","value":"000300.SHA","type":"Literal","bound_global_parameter":null},{"name":"drop_na_label","value":"True","type":"Literal","bound_global_parameter":null},{"name":"cast_label_int","value":"False","type":"Literal","bound_global_parameter":null},{"name":"user_functions","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"instruments","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-15"}],"output_ports":[{"name":"data","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-15"}],"cacheable":true,"seq_num":2,"comment":"","comment_collapsed":true},{"node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24","module_id":"BigQuantSpace.input_features.input_features-v1","parameters":[{"name":"features","value":"close_0/mean(close_0,5)\nclose_0/mean(close_0,10)\n# close_0/mean(close_0,20)\n# close_0/open_0\n# open_0/mean(close_0,5)\n# open_0/mean(close_0,10)\n# open_0/mean(close_0,20)","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"features_ds","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24"}],"output_ports":[{"name":"data","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-24"}],"cacheable":true,"seq_num":3,"comment":"","comment_collapsed":true},{"node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-53","module_id":"BigQuantSpace.join.join-v3","parameters":[{"name":"on","value":"date,instrument","type":"Literal","bound_global_parameter":null},{"name":"how","value":"inner","type":"Literal","bound_global_parameter":null},{"name":"sort","value":"False","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"data1","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-53"},{"name":"data2","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-53"}],"output_ports":[{"name":"data","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-53"}],"cacheable":true,"seq_num":7,"comment":"","comment_collapsed":true},{"node_id":"-106","module_id":"BigQuantSpace.general_feature_extractor.general_feature_extractor-v7","parameters":[{"name":"start_date","value":"","type":"Literal","bound_global_parameter":null},{"name":"end_date","value":"","type":"Literal","bound_global_parameter":null},{"name":"before_start_days","value":"30","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"instruments","node_id":"-106"},{"name":"features","node_id":"-106"}],"output_ports":[{"name":"data","node_id":"-106"}],"cacheable":true,"seq_num":15,"comment":"","comment_collapsed":true},{"node_id":"-113","module_id":"BigQuantSpace.derived_feature_extractor.derived_feature_extractor-v3","parameters":[{"name":"date_col","value":"date","type":"Literal","bound_global_parameter":null},{"name":"instrument_col","value":"instrument","type":"Literal","bound_global_parameter":null},{"name":"drop_na","value":"True","type":"Literal","bound_global_parameter":null},{"name":"remove_extra_columns","value":"False","type":"Literal","bound_global_parameter":null},{"name":"user_functions","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input_data","node_id":"-113"},{"name":"features","node_id":"-113"}],"output_ports":[{"name":"data","node_id":"-113"}],"cacheable":true,"seq_num":16,"comment":"","comment_collapsed":true},{"node_id":"-768","module_id":"BigQuantSpace.standardlize.standardlize-v8","parameters":[{"name":"columns_input","value":"[]","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input_1","node_id":"-768"},{"name":"input_2","node_id":"-768"}],"output_ports":[{"name":"data","node_id":"-768"}],"cacheable":true,"seq_num":14,"comment":"","comment_collapsed":true},{"node_id":"-773","module_id":"BigQuantSpace.standardlize.standardlize-v8","parameters":[{"name":"columns_input","value":"label","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input_1","node_id":"-773"},{"name":"input_2","node_id":"-773"}],"output_ports":[{"name":"data","node_id":"-773"}],"cacheable":true,"seq_num":13,"comment":"","comment_collapsed":true},{"node_id":"-243","module_id":"BigQuantSpace.dl_convert_to_bin.dl_convert_to_bin-v2","parameters":[{"name":"window_size","value":"5","type":"Literal","bound_global_parameter":null},{"name":"feature_clip","value":5,"type":"Literal","bound_global_parameter":null},{"name":"flatten","value":"False","type":"Literal","bound_global_parameter":null},{"name":"window_along_col","value":"instrument","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input_data","node_id":"-243"},{"name":"features","node_id":"-243"}],"output_ports":[{"name":"data","node_id":"-243"}],"cacheable":true,"seq_num":26,"comment":"","comment_collapsed":true},{"node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-62","module_id":"BigQuantSpace.instruments.instruments-v2","parameters":[{"name":"start_date","value":"2016-01-01","type":"Literal","bound_global_parameter":null},{"name":"end_date","value":"2017-01-01","type":"Literal","bound_global_parameter":null},{"name":"market","value":"CN_STOCK_A","type":"Literal","bound_global_parameter":null},{"name":"instrument_list","value":"","type":"Literal","bound_global_parameter":null},{"name":"max_count","value":"0","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"rolling_conf","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-62"}],"output_ports":[{"name":"data","node_id":"287d2cb0-f53c-4101-bdf8-104b137c8601-62"}],"cacheable":true,"seq_num":5,"comment":"预测数据,用于回测和模拟","comment_collapsed":true},{"node_id":"-122","module_id":"BigQuantSpace.general_feature_extractor.general_feature_extractor-v7","parameters":[{"name":"start_date","value":"","type":"Literal","bound_global_parameter":null},{"name":"end_date","value":"","type":"Literal","bound_global_parameter":null},{"name":"before_start_days","value":"30","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"instruments","node_id":"-122"},{"name":"features","node_id":"-122"}],"output_ports":[{"name":"data","node_id":"-122"}],"cacheable":true,"seq_num":6,"comment":"","comment_collapsed":true},{"node_id":"-129","module_id":"BigQuantSpace.derived_feature_extractor.derived_feature_extractor-v3","parameters":[{"name":"date_col","value":"date","type":"Literal","bound_global_parameter":null},{"name":"instrument_col","value":"instrument","type":"Literal","bound_global_parameter":null},{"name":"drop_na","value":"True","type":"Literal","bound_global_parameter":null},{"name":"remove_extra_columns","value":"False","type":"Literal","bound_global_parameter":null},{"name":"user_functions","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input_data","node_id":"-129"},{"name":"features","node_id":"-129"}],"output_ports":[{"name":"data","node_id":"-129"}],"cacheable":true,"seq_num":8,"comment":"","comment_collapsed":true},{"node_id":"-778","module_id":"BigQuantSpace.standardlize.standardlize-v8","parameters":[{"name":"columns_input","value":"[]","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input_1","node_id":"-778"},{"name":"input_2","node_id":"-778"}],"output_ports":[{"name":"data","node_id":"-778"}],"cacheable":true,"seq_num":9,"comment":"","comment_collapsed":true},{"node_id":"-251","module_id":"BigQuantSpace.dl_convert_to_bin.dl_convert_to_bin-v2","parameters":[{"name":"window_size","value":"5","type":"Literal","bound_global_parameter":null},{"name":"feature_clip","value":5,"type":"Literal","bound_global_parameter":null},{"name":"flatten","value":"False","type":"Literal","bound_global_parameter":null},{"name":"window_along_col","value":"instrument","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input_data","node_id":"-251"},{"name":"features","node_id":"-251"}],"output_ports":[{"name":"data","node_id":"-251"}],"cacheable":true,"seq_num":10,"comment":"","comment_collapsed":true},{"node_id":"-268","module_id":"BigQuantSpace.dl_layer_input.dl_layer_input-v1","parameters":[{"name":"shape","value":"5,2","type":"Literal","bound_global_parameter":null},{"name":"batch_shape","value":"","type":"Literal","bound_global_parameter":null},{"name":"dtype","value":"float32","type":"Literal","bound_global_parameter":null},{"name":"sparse","value":"False","type":"Literal","bound_global_parameter":null},{"name":"name","value":"test","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"inputs","node_id":"-268"}],"output_ports":[{"name":"data","node_id":"-268"}],"cacheable":false,"seq_num":12,"comment":"","comment_collapsed":true},{"node_id":"-114","module_id":"BigQuantSpace.dl_layer_userlayer.dl_layer_userlayer-v1","parameters":[{"name":"layer_class","value":"from tensorflow.keras.layers import Layer\nimport tensorflow as tf\n\n\nclass Time2Vector(Layer):\n def __init__(self, seq_len, **kwargs):\n super(Time2Vector, self).__init__(**kwargs)\n self.seq_len = seq_len\n\n def build(self, input_shape):\n self.weights_linear = self.add_weight(name='weight_linear',\n shape=(int(self.seq_len),),\n initializer='uniform',\n trainable=True)\n\n self.bias_linear = self.add_weight(name='bias_linear',\n shape=(int(self.seq_len),),\n initializer='uniform',\n trainable=True)\n\n self.weights_periodic = self.add_weight(name='weight_periodic',\n shape=(int(self.seq_len),),\n initializer='uniform',\n trainable=True)\n\n self.bias_periodic = self.add_weight(name='bias_periodic',\n shape=(int(self.seq_len),),\n initializer='uniform',\n trainable=True)\n\n def call(self, x):\n # Convert (batch, seq_len, 7) to (batch, seq_len)\n x = tf.math.reduce_mean(x[:, :, :], axis=-1)\n time_linear = self.weights_linear * x + self.bias_linear\n time_linear = tf.expand_dims(time_linear, axis=-1)\n\n time_periodic = tf.math.sin(tf.multiply(x, self.weights_periodic) + self.bias_periodic)\n time_periodic = tf.expand_dims(time_periodic, axis=-1)\n return tf.concat([time_linear, time_periodic], axis=-1)\n \n def get_config(self):\n config = {\"seq_len\": self.seq_len}\n base_config = super(Time2Vector, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))\n\nbigquant_run = Time2Vector\n","type":"Literal","bound_global_parameter":null},{"name":"params","value":"{\n \"seq_len\": 5\n}","type":"Literal","bound_global_parameter":null},{"name":"name","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input1","node_id":"-114"},{"name":"input2","node_id":"-114"},{"name":"input3","node_id":"-114"}],"output_ports":[{"name":"data","node_id":"-114"}],"cacheable":false,"seq_num":17,"comment":"Time2VecLayer","comment_collapsed":false},{"node_id":"-121","module_id":"BigQuantSpace.dl_layer_concatenate.dl_layer_concatenate-v1","parameters":[{"name":"axis","value":-1,"type":"Literal","bound_global_parameter":null},{"name":"name","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input1","node_id":"-121"},{"name":"input2","node_id":"-121"},{"name":"input3","node_id":"-121"}],"output_ports":[{"name":"data","node_id":"-121"}],"cacheable":false,"seq_num":18,"comment":"","comment_collapsed":true},{"node_id":"-132","module_id":"BigQuantSpace.dl_layer_userlayer.dl_layer_userlayer-v1","parameters":[{"name":"layer_class","value":"from tensorflow.keras.layers import Layer, Dropout, Dense, LayerNormalization, Conv1D\nimport tensorflow as tf\n\n# class SingleAttention(Layer):\n# def __init__(self, d_k, d_v):\n# super(SingleAttention, self).__init__()\n# self.d_k = d_k\n# self.d_v = d_v\n\n# def build(self, input_shape):\n# self.query = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n# self.key = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n# self.value = Dense(self.d_v, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n\n# def call(self, inputs): # inputs = (in_seq, in_seq, in_seq)\n# q = self.query(inputs[0])\n# k = self.key(inputs[1])\n\n# attn_weights = tf.matmul(q, k, transpose_b=True)\n# attn_weights = tf.map_fn(lambda x: x/np.sqrt(self.d_k), attn_weights)\n# attn_weights = tf.nn.softmax(attn_weights, axis=-1)\n\n# v = self.value(inputs[2])\n# attn_out = tf.matmul(attn_weights, v)\n# return attn_out\n \n# class MultiAttention(Layer):\n# def __init__(self, d_k, d_v, n_heads): # , out_dim\n# super(MultiAttention, self).__init__()\n# self.d_k = d_k\n# self.d_v = d_v\n# self.n_heads = n_heads\n# self.attn_heads = list()\n\n# def build(self, input_shape):\n# for n in range(self.n_heads):\n# self.attn_heads.append(SingleAttention(self.d_k, self.d_v)) \n# self.linear = Dense(input_shape[-1][-1], input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n\n# def call(self, inputs):\n# attn = [self.attn_heads[i](inputs) for i in range(self.n_heads)]\n# concat_attn = tf.concat(attn, axis=-1)\n# multi_linear = self.linear(concat_attn)\n# return multi_linear \n \nclass TransformerEncoder2(Layer):\n def __init__(self, d_k, d_v, n_heads, ff_dim, dropout=0.1, **kwargs):\n super(TransformerEncoder2, self).__init__(**kwargs)\n self.d_k = d_k\n self.d_v = d_v\n self.n_heads = n_heads\n self.ff_dim = ff_dim\n self.attn_heads = list()\n self.dropout_rate = dropout\n\n def build(self, input_shape):\n self.attn_multi = MultiAttention(self.d_k, self.d_v, self.n_heads)\n self.attn_dropout = Dropout(self.dropout_rate)\n self.attn_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6)\n\n self.ff_conv1D_1 = Conv1D(filters=self.ff_dim, kernel_size=1, activation='relu')\n self.ff_conv1D_2 = Conv1D(filters=input_shape[-1][-1], kernel_size=1)\n self.ff_dropout = Dropout(self.dropout_rate)\n self.ff_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6) \n\n def call(self, inputs):\n attn_layer = self.attn_multi(inputs)\n attn_layer = self.attn_dropout(attn_layer)\n attn_layer = self.attn_normalize(inputs[0] + attn_layer)\n ff_layer = self.ff_conv1D_1(attn_layer)\n ff_layer = self.ff_conv1D_2(ff_layer)\n ff_layer = self.ff_dropout(ff_layer)\n ff_layer = self.ff_normalize(inputs[0] + ff_layer)\n return ff_layer \n \n def get_config(self):\n config = {\"d_k\": self.d_k, \"d_v\": self.d_v, \"n_heads\": self.n_heads, \"ff_dim\": self.ff_dim, \"dropout\": self.dropout_rate}\n base_config = super(TransformerEncoder2, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))\n\nbigquant_run = TransformerEncoder2\n","type":"Literal","bound_global_parameter":null},{"name":"params","value":"{\n \"d_k\": 100,\n \"d_v\": 100,\n \"n_heads\": 1,\n \"ff_dim\": 256\n}","type":"Literal","bound_global_parameter":null},{"name":"name","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input1","node_id":"-132"},{"name":"input2","node_id":"-132"},{"name":"input3","node_id":"-132"}],"output_ports":[{"name":"data","node_id":"-132"}],"cacheable":false,"seq_num":20,"comment":"TransformerEncoderLayer","comment_collapsed":false},{"node_id":"-138","module_id":"BigQuantSpace.dl_layer_userlayer.dl_layer_userlayer-v1","parameters":[{"name":"layer_class","value":"from tensorflow.keras.layers import Layer, Dropout, Dense, LayerNormalization, Conv1D\nimport tensorflow as tf\n\n# class SingleAttention(Layer):\n# def __init__(self, d_k, d_v):\n# super(SingleAttention, self).__init__()\n# self.d_k = d_k\n# self.d_v = d_v\n\n# def build(self, input_shape):\n# self.query = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n# self.key = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n# self.value = Dense(self.d_v, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n\n# def call(self, inputs): # inputs = (in_seq, in_seq, in_seq)\n# q = self.query(inputs[0])\n# k = self.key(inputs[1])\n\n# attn_weights = tf.matmul(q, k, transpose_b=True)\n# attn_weights = tf.map_fn(lambda x: x/np.sqrt(self.d_k), attn_weights)\n# attn_weights = tf.nn.softmax(attn_weights, axis=-1)\n\n# v = self.value(inputs[2])\n# attn_out = tf.matmul(attn_weights, v)\n# return attn_out\n \n# class MultiAttention(Layer):\n# def __init__(self, d_k, d_v, n_heads): # , out_dim\n# super(MultiAttention, self).__init__()\n# self.d_k = d_k\n# self.d_v = d_v\n# self.n_heads = n_heads\n# self.attn_heads = list()\n\n# def build(self, input_shape):\n# for n in range(self.n_heads):\n# self.attn_heads.append(SingleAttention(self.d_k, self.d_v)) \n# self.linear = Dense(input_shape[-1][-1], input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n\n# def call(self, inputs):\n# attn = [self.attn_heads[i](inputs) for i in range(self.n_heads)]\n# concat_attn = tf.concat(attn, axis=-1)\n# multi_linear = self.linear(concat_attn)\n# return multi_linear \n \nclass TransformerEncoder3(Layer):\n def __init__(self, d_k, d_v, n_heads, ff_dim, dropout=0.1, **kwargs):\n super(TransformerEncoder3, self).__init__(**kwargs)\n self.d_k = d_k\n self.d_v = d_v\n self.n_heads = n_heads\n self.ff_dim = ff_dim\n self.attn_heads = list()\n self.dropout_rate = dropout\n\n def build(self, input_shape):\n self.attn_multi = MultiAttention(self.d_k, self.d_v, self.n_heads)\n self.attn_dropout = Dropout(self.dropout_rate)\n self.attn_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6)\n\n self.ff_conv1D_1 = Conv1D(filters=self.ff_dim, kernel_size=1, activation='relu')\n self.ff_conv1D_2 = Conv1D(filters=input_shape[-1][-1], kernel_size=1)\n self.ff_dropout = Dropout(self.dropout_rate)\n self.ff_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6) \n\n def call(self, inputs):\n attn_layer = self.attn_multi(inputs)\n attn_layer = self.attn_dropout(attn_layer)\n attn_layer = self.attn_normalize(inputs[0] + attn_layer)\n ff_layer = self.ff_conv1D_1(attn_layer)\n ff_layer = self.ff_conv1D_2(ff_layer)\n ff_layer = self.ff_dropout(ff_layer)\n ff_layer = self.ff_normalize(inputs[0] + ff_layer)\n return ff_layer \n \n def get_config(self):\n config = {\"d_k\": self.d_k, \"d_v\": self.d_v, \"n_heads\": self.n_heads, \"ff_dim\": self.ff_dim, \"dropout\": self.dropout_rate}\n base_config = super(TransformerEncoder3, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))\n\nbigquant_run = TransformerEncoder3\n","type":"Literal","bound_global_parameter":null},{"name":"params","value":"{\n \"d_k\": 100,\n \"d_v\": 100,\n \"n_heads\": 1,\n \"ff_dim\": 256\n}","type":"Literal","bound_global_parameter":null},{"name":"name","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input1","node_id":"-138"},{"name":"input2","node_id":"-138"},{"name":"input3","node_id":"-138"}],"output_ports":[{"name":"data","node_id":"-138"}],"cacheable":false,"seq_num":21,"comment":"TransformerEncoderLayer","comment_collapsed":false},{"node_id":"-158","module_id":"BigQuantSpace.dl_layer_userlayer.dl_layer_userlayer-v1","parameters":[{"name":"layer_class","value":"from tensorflow.keras.layers import Layer, Dropout, Dense, LayerNormalization, Conv1D\nimport tensorflow as tf\n\nclass SingleAttention(Layer):\n def __init__(self, d_k, d_v):\n super(SingleAttention, self).__init__()\n self.d_k = d_k\n self.d_v = d_v\n\n def build(self, input_shape):\n self.query = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n self.key = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n self.value = Dense(self.d_v, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n\n def call(self, inputs): # inputs = (in_seq, in_seq, in_seq)\n q = self.query(inputs[0])\n k = self.key(inputs[1])\n\n attn_weights = tf.matmul(q, k, transpose_b=True)\n attn_weights = tf.map_fn(lambda x: x/np.sqrt(self.d_k), attn_weights)\n attn_weights = tf.nn.softmax(attn_weights, axis=-1)\n\n v = self.value(inputs[2])\n attn_out = tf.matmul(attn_weights, v)\n return attn_out\n \nclass MultiAttention(Layer):\n def __init__(self, d_k, d_v, n_heads):\n super(MultiAttention, self).__init__()\n self.d_k = d_k\n self.d_v = d_v\n self.n_heads = n_heads\n self.attn_heads = list()\n\n def build(self, input_shape):\n for n in range(self.n_heads):\n self.attn_heads.append(SingleAttention(self.d_k, self.d_v)) \n self.linear = Dense(input_shape[-1][-1], input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')\n\n def call(self, inputs):\n attn = [self.attn_heads[i](inputs) for i in range(self.n_heads)]\n concat_attn = tf.concat(attn, axis=-1)\n multi_linear = self.linear(concat_attn)\n return multi_linear \n \nclass TransformerEncoder1(Layer):\n def __init__(self, d_k, d_v, n_heads, ff_dim, dropout=0.1, **kwargs):\n super(TransformerEncoder1, self).__init__(**kwargs)\n self.d_k = d_k\n self.d_v = d_v\n self.n_heads = n_heads\n self.ff_dim = ff_dim\n self.attn_heads = list()\n self.dropout_rate = dropout\n\n def build(self, input_shape):\n self.attn_multi = MultiAttention(self.d_k, self.d_v, self.n_heads)\n self.attn_dropout = Dropout(self.dropout_rate)\n self.attn_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6)\n\n self.ff_conv1D_1 = Conv1D(filters=self.ff_dim, kernel_size=1, activation='relu')\n self.ff_conv1D_2 = Conv1D(filters=input_shape[-1][-1], kernel_size=1)\n self.ff_dropout = Dropout(self.dropout_rate)\n self.ff_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6) \n\n def call(self, inputs):\n attn_layer = self.attn_multi(inputs)\n attn_layer = self.attn_dropout(attn_layer)\n attn_layer = self.attn_normalize(inputs[0] + attn_layer)\n ff_layer = self.ff_conv1D_1(attn_layer)\n ff_layer = self.ff_conv1D_2(ff_layer)\n ff_layer = self.ff_dropout(ff_layer)\n ff_layer = self.ff_normalize(inputs[0] + ff_layer)\n return ff_layer \n \n def get_config(self):\n config = {\"d_k\": self.d_k, \"d_v\": self.d_v, \"n_heads\": self.n_heads, \"ff_dim\": self.ff_dim, \"dropout\": self.dropout_rate}\n base_config = super(TransformerEncoder1, self).get_config()\n return dict(list(base_config.items()) + list(config.items()))\n\nbigquant_run = TransformerEncoder1\n","type":"Literal","bound_global_parameter":null},{"name":"params","value":"{\n \"d_k\": 100,\n \"d_v\": 100,\n \"n_heads\": 1,\n \"ff_dim\": 256\n}","type":"Literal","bound_global_parameter":null},{"name":"name","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input1","node_id":"-158"},{"name":"input2","node_id":"-158"},{"name":"input3","node_id":"-158"}],"output_ports":[{"name":"data","node_id":"-158"}],"cacheable":false,"seq_num":23,"comment":"TransformerEncoderLayer","comment_collapsed":false},{"node_id":"-162","module_id":"BigQuantSpace.dl_layer_globalaveragepooling1d.dl_layer_globalaveragepooling1d-v1","parameters":[{"name":"name","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"inputs","node_id":"-162"}],"output_ports":[{"name":"data","node_id":"-162"}],"cacheable":false,"seq_num":19,"comment":"","comment_collapsed":true},{"node_id":"-166","module_id":"BigQuantSpace.dl_layer_dropout.dl_layer_dropout-v1","parameters":[{"name":"rate","value":"0.1","type":"Literal","bound_global_parameter":null},{"name":"noise_shape","value":"","type":"Literal","bound_global_parameter":null},{"name":"seed","value":"111","type":"Literal","bound_global_parameter":null},{"name":"name","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"inputs","node_id":"-166"}],"output_ports":[{"name":"data","node_id":"-166"}],"cacheable":false,"seq_num":22,"comment":"","comment_collapsed":true},{"node_id":"-173","module_id":"BigQuantSpace.dl_layer_dense.dl_layer_dense-v1","parameters":[{"name":"units","value":"256","type":"Literal","bound_global_parameter":null},{"name":"activation","value":"relu","type":"Literal","bound_global_parameter":null},{"name":"user_activation","value":"","type":"Literal","bound_global_parameter":null},{"name":"use_bias","value":"True","type":"Literal","bound_global_parameter":null},{"name":"kernel_initializer","value":"glorot_uniform","type":"Literal","bound_global_parameter":null},{"name":"user_kernel_initializer","value":"","type":"Literal","bound_global_parameter":null},{"name":"bias_initializer","value":"Zeros","type":"Literal","bound_global_parameter":null},{"name":"user_bias_initializer","value":"","type":"Literal","bound_global_parameter":null},{"name":"kernel_regularizer","value":"None","type":"Literal","bound_global_parameter":null},{"name":"kernel_regularizer_l1","value":0,"type":"Literal","bound_global_parameter":null},{"name":"kernel_regularizer_l2","value":0,"type":"Literal","bound_global_parameter":null},{"name":"user_kernel_regularizer","value":"","type":"Literal","bound_global_parameter":null},{"name":"bias_regularizer","value":"None","type":"Literal","bound_global_parameter":null},{"name":"bias_regularizer_l1","value":0,"type":"Literal","bound_global_parameter":null},{"name":"bias_regularizer_l2","value":0,"type":"Literal","bound_global_parameter":null},{"name":"user_bias_regularizer","value":"","type":"Literal","bound_global_parameter":null},{"name":"activity_regularizer","value":"None","type":"Literal","bound_global_parameter":null},{"name":"activity_regularizer_l1","value":0,"type":"Literal","bound_global_parameter":null},{"name":"activity_regularizer_l2","value":0,"type":"Literal","bound_global_parameter":null},{"name":"user_activity_regularizer","value":"","type":"Literal","bound_global_parameter":null},{"name":"kernel_constraint","value":"None","type":"Literal","bound_global_parameter":null},{"name":"user_kernel_constraint","value":"","type":"Literal","bound_global_parameter":null},{"name":"bias_constraint","value":"None","type":"Literal","bound_global_parameter":null},{"name":"user_bias_constraint","value":"","type":"Literal","bound_global_parameter":null},{"name":"name","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"inputs","node_id":"-173"}],"output_ports":[{"name":"data","node_id":"-173"}],"cacheable":false,"seq_num":24,"comment":"","comment_collapsed":true},{"node_id":"-201","module_id":"BigQuantSpace.dl_layer_dense.dl_layer_dense-v1","parameters":[{"name":"units","value":"1","type":"Literal","bound_global_parameter":null},{"name":"activation","value":"linear","type":"Literal","bound_global_parameter":null},{"name":"user_activation","value":"","type":"Literal","bound_global_parameter":null},{"name":"use_bias","value":"True","type":"Literal","bound_global_parameter":null},{"name":"kernel_initializer","value":"glorot_uniform","type":"Literal","bound_global_parameter":null},{"name":"user_kernel_initializer","value":"","type":"Literal","bound_global_parameter":null},{"name":"bias_initializer","value":"Zeros","type":"Literal","bound_global_parameter":null},{"name":"user_bias_initializer","value":"","type":"Literal","bound_global_parameter":null},{"name":"kernel_regularizer","value":"None","type":"Literal","bound_global_parameter":null},{"name":"kernel_regularizer_l1","value":0,"type":"Literal","bound_global_parameter":null},{"name":"kernel_regularizer_l2","value":0,"type":"Literal","bound_global_parameter":null},{"name":"user_kernel_regularizer","value":"","type":"Literal","bound_global_parameter":null},{"name":"bias_regularizer","value":"None","type":"Literal","bound_global_parameter":null},{"name":"bias_regularizer_l1","value":0,"type":"Literal","bound_global_parameter":null},{"name":"bias_regularizer_l2","value":0,"type":"Literal","bound_global_parameter":null},{"name":"user_bias_regularizer","value":"","type":"Literal","bound_global_parameter":null},{"name":"activity_regularizer","value":"None","type":"Literal","bound_global_parameter":null},{"name":"activity_regularizer_l1","value":0,"type":"Literal","bound_global_parameter":null},{"name":"activity_regularizer_l2","value":0,"type":"Literal","bound_global_parameter":null},{"name":"user_activity_regularizer","value":"","type":"Literal","bound_global_parameter":null},{"name":"kernel_constraint","value":"None","type":"Literal","bound_global_parameter":null},{"name":"user_kernel_constraint","value":"","type":"Literal","bound_global_parameter":null},{"name":"bias_constraint","value":"None","type":"Literal","bound_global_parameter":null},{"name":"user_bias_constraint","value":"","type":"Literal","bound_global_parameter":null},{"name":"name","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"inputs","node_id":"-201"}],"output_ports":[{"name":"data","node_id":"-201"}],"cacheable":false,"seq_num":25,"comment":"","comment_collapsed":true},{"node_id":"-230","module_id":"BigQuantSpace.dl_model_init.dl_model_init-v1","parameters":[],"input_ports":[{"name":"inputs","node_id":"-230"},{"name":"outputs","node_id":"-230"}],"output_ports":[{"name":"data","node_id":"-230"}],"cacheable":false,"seq_num":27,"comment":"","comment_collapsed":true},{"node_id":"-235","module_id":"BigQuantSpace.dl_model_train.dl_model_train-v1","parameters":[{"name":"optimizer","value":"Adam","type":"Literal","bound_global_parameter":null},{"name":"user_optimizer","value":"","type":"Literal","bound_global_parameter":null},{"name":"loss","value":"mean_squared_error","type":"Literal","bound_global_parameter":null},{"name":"user_loss","value":"","type":"Literal","bound_global_parameter":null},{"name":"metrics","value":"mae","type":"Literal","bound_global_parameter":null},{"name":"batch_size","value":"1024","type":"Literal","bound_global_parameter":null},{"name":"epochs","value":"1","type":"Literal","bound_global_parameter":null},{"name":"custom_objects","value":"bigquant_run = {\n \"Time2Vector\": Time2Vector,\n \"TransformerEncoder1\": TransformerEncoder1,\n \"TransformerEncoder2\": TransformerEncoder2,\n \"TransformerEncoder3\": TransformerEncoder3\n}\n","type":"Literal","bound_global_parameter":null},{"name":"n_gpus","value":"1","type":"Literal","bound_global_parameter":null},{"name":"verbose","value":"1:输出进度条记录","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input_model","node_id":"-235"},{"name":"training_data","node_id":"-235"},{"name":"validation_data","node_id":"-235"}],"output_ports":[{"name":"data","node_id":"-235"}],"cacheable":true,"seq_num":28,"comment":"","comment_collapsed":true},{"node_id":"-218","module_id":"BigQuantSpace.dl_model_predict.dl_model_predict-v1","parameters":[{"name":"batch_size","value":"1024","type":"Literal","bound_global_parameter":null},{"name":"n_gpus","value":0,"type":"Literal","bound_global_parameter":null},{"name":"verbose","value":"2:每个epoch输出一行记录","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"trained_model","node_id":"-218"},{"name":"input_data","node_id":"-218"}],"output_ports":[{"name":"data","node_id":"-218"}],"cacheable":true,"seq_num":4,"comment":"","comment_collapsed":true},{"node_id":"-227","module_id":"BigQuantSpace.trade.trade-v4","parameters":[{"name":"start_date","value":"","type":"Literal","bound_global_parameter":null},{"name":"end_date","value":"","type":"Literal","bound_global_parameter":null},{"name":"initialize","value":"# 回测引擎:初始化函数,只执行一次\ndef bigquant_run(context):\n # 加载预测数据\n context.ranker_prediction = context.options['data'].read_df()\n\n # 系统已经设置了默认的交易手续费和滑点,要修改手续费可使用如下函数\n context.set_commission(PerOrder(buy_cost=0.0003, sell_cost=0.0013, min_cost=5))\n # 预测数据,通过options传入进来,使用 read_df 函数,加载到内存 (DataFrame)\n # 设置买入的股票数量,这里买入预测股票列表排名靠前的5只\n stock_count = 5\n # 每只的股票的权重,如下的权重分配会使得靠前的股票分配多一点的资金,[0.339160, 0.213986, 0.169580, ..]\n context.stock_weights = T.norm([1 / math.log(i + 2) for i in range(0, stock_count)])\n # 设置每只股票占用的最大资金比例\n context.max_cash_per_instrument = 0.2\n context.hold_days = 5\n","type":"Literal","bound_global_parameter":null},{"name":"handle_data","value":"# 回测引擎:每日数据处理函数,每天执行一次\ndef bigquant_run(context, data):\n # 按日期过滤得到今日的预测数据\n ranker_prediction = context.ranker_prediction[\n context.ranker_prediction.date == data.current_dt.strftime('%Y-%m-%d')]\n\n # 1. 资金分配\n # 平均持仓时间是hold_days,每日都将买入股票,每日预期使用 1/hold_days 的资金\n # 实际操作中,会存在一定的买入误差,所以在前hold_days天,等量使用资金;之后,尽量使用剩余资金(这里设置最多用等量的1.5倍)\n is_staging = context.trading_day_index < context.hold_days # 是否在建仓期间(前 hold_days 天)\n cash_avg = context.portfolio.portfolio_value / context.hold_days\n cash_for_buy = min(context.portfolio.cash, (1 if is_staging else 1.5) * cash_avg)\n cash_for_sell = cash_avg - (context.portfolio.cash - cash_for_buy)\n positions = {e.symbol: p.amount * p.last_sale_price\n for e, p in context.perf_tracker.position_tracker.positions.items()}\n\n # 2. 生成卖出订单:hold_days天之后才开始卖出;对持仓的股票,按StockRanker预测的排序末位淘汰\n if not is_staging and cash_for_sell > 0:\n equities = {e.symbol: e for e, p in context.perf_tracker.position_tracker.positions.items()}\n instruments = list(reversed(list(ranker_prediction.instrument[ranker_prediction.instrument.apply(\n lambda x: x in equities and not context.has_unfinished_sell_order(equities[x]))])))\n # print('rank order for sell %s' % instruments)\n for instrument in instruments:\n context.order_target(context.symbol(instrument), 0)\n cash_for_sell -= positions[instrument]\n if cash_for_sell <= 0:\n break\n\n # 3. 生成买入订单:按StockRanker预测的排序,买入前面的stock_count只股票\n buy_cash_weights = context.stock_weights\n buy_instruments = list(ranker_prediction.instrument[:len(buy_cash_weights)])\n max_cash_per_instrument = context.portfolio.portfolio_value * context.max_cash_per_instrument\n for i, instrument in enumerate(buy_instruments):\n cash = cash_for_buy * buy_cash_weights[i]\n if cash > max_cash_per_instrument - positions.get(instrument, 0):\n # 确保股票持仓量不会超过每次股票最大的占用资金量\n cash = max_cash_per_instrument - positions.get(instrument, 0)\n if cash > 0:\n context.order_value(context.symbol(instrument), cash)\n","type":"Literal","bound_global_parameter":null},{"name":"prepare","value":"# 回测引擎:准备数据,只执行一次\ndef bigquant_run(context):\n pass\n","type":"Literal","bound_global_parameter":null},{"name":"before_trading_start","value":"# 回测引擎:每个单位时间开始前调用一次,即每日开盘前调用一次。\ndef bigquant_run(context, data):\n pass\n","type":"Literal","bound_global_parameter":null},{"name":"volume_limit","value":0.025,"type":"Literal","bound_global_parameter":null},{"name":"order_price_field_buy","value":"open","type":"Literal","bound_global_parameter":null},{"name":"order_price_field_sell","value":"close","type":"Literal","bound_global_parameter":null},{"name":"capital_base","value":1000000,"type":"Literal","bound_global_parameter":null},{"name":"auto_cancel_non_tradable_orders","value":"True","type":"Literal","bound_global_parameter":null},{"name":"data_frequency","value":"daily","type":"Literal","bound_global_parameter":null},{"name":"price_type","value":"真实价格","type":"Literal","bound_global_parameter":null},{"name":"product_type","value":"股票","type":"Literal","bound_global_parameter":null},{"name":"plot_charts","value":"True","type":"Literal","bound_global_parameter":null},{"name":"backtest_only","value":"False","type":"Literal","bound_global_parameter":null},{"name":"benchmark","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"instruments","node_id":"-227"},{"name":"options_data","node_id":"-227"},{"name":"history_ds","node_id":"-227"},{"name":"benchmark_ds","node_id":"-227"},{"name":"trading_calendar","node_id":"-227"}],"output_ports":[{"name":"raw_perf","node_id":"-227"}],"cacheable":false,"seq_num":29,"comment":"","comment_collapsed":true},{"node_id":"-263","module_id":"BigQuantSpace.cached.cached-v3","parameters":[{"name":"run","value":"# Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端\ndef bigquant_run(input_1, input_2, input_3):\n # 示例代码如下。在这里编写您的代码\n pred = input_1.read()\n# pred = DataSource(\"d73724f7124c4f48ade645d9973f34e4T\").read()\n test = input_2.read()\n df_trade = pd.DataFrame({'pred_label': pred[:, 0], 'instrument': test[\"instrument\"], 'date': test[\"date\"]})\n df_trade.sort_values(['date', 'pred_label'], inplace=True, ascending=[True, False])\n return Outputs(data_1=DataSource.write_df(df_trade))\n","type":"Literal","bound_global_parameter":null},{"name":"post_run","value":"# 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。\ndef bigquant_run(outputs):\n return outputs\n","type":"Literal","bound_global_parameter":null},{"name":"input_ports","value":"","type":"Literal","bound_global_parameter":null},{"name":"params","value":"{}","type":"Literal","bound_global_parameter":null},{"name":"output_ports","value":"","type":"Literal","bound_global_parameter":null}],"input_ports":[{"name":"input_1","node_id":"-263"},{"name":"input_2","node_id":"-263"},{"name":"input_3","node_id":"-263"}],"output_ports":[{"name":"data_1","node_id":"-263"},{"name":"data_2","node_id":"-263"},{"name":"data_3","node_id":"-263"}],"cacheable":true,"seq_num":30,"comment":"","comment_collapsed":true}],"node_layout":"<node_postions><node_position Node='287d2cb0-f53c-4101-bdf8-104b137c8601-8' Position='389,20,200,200'/><node_position Node='287d2cb0-f53c-4101-bdf8-104b137c8601-15' Position='214,170,200,200'/><node_position Node='287d2cb0-f53c-4101-bdf8-104b137c8601-24' Position='806,-119,200,200'/><node_position Node='287d2cb0-f53c-4101-bdf8-104b137c8601-53' Position='381,467,200,200'/><node_position Node='-106' Position='547,173,200,200'/><node_position Node='-113' Position='548,275,200,200'/><node_position Node='-768' Position='662,359,200,200'/><node_position Node='-773' Position='216,277,200,200'/><node_position Node='-243' Position='388,544,200,200'/><node_position Node='287d2cb0-f53c-4101-bdf8-104b137c8601-62' Position='1078,94,200,200'/><node_position Node='-122' Position='1071,201,200,200'/><node_position Node='-129' Position='1077,335,200,200'/><node_position Node='-778' Position='1059,427,200,200'/><node_position Node='-251' Position='1016,554,200,200'/><node_position Node='-268' Position='-86,-169,200,200'/><node_position Node='-114' Position='-118,-59,200,200'/><node_position Node='-121' Position='-125,56,200,200'/><node_position Node='-132' Position='-143,278,200,200'/><node_position Node='-138' Position='-148,392,200,200'/><node_position Node='-158' Position='-135,165,200,200'/><node_position Node='-162' Position='-154,498,200,200'/><node_position Node='-166' Position='-155,592,200,200'/><node_position Node='-173' Position='-154,686,200,200'/><node_position Node='-201' Position='-145,845,200,200'/><node_position Node='-230' Position='-15,963,200,200'/><node_position Node='-235' Position='249.85031127929688,793,200,200'/><node_position Node='-218' Position='411,909,200,200'/><node_position Node='-227' Position='606,1160,200,200'/><node_position Node='-263' Position='583,1036,200,200'/></node_postions>"},"nodes_readonly":false,"studio_version":"v2"}
    In [1]:
    # 本代码由可视化策略环境自动生成 2021年6月29日10:40
    # 本代码单元只能在可视化模式下编辑。您也可以拷贝代码,粘贴到新建的代码单元或者策略,然后修改。
    
    
    from tensorflow.keras.layers import Layer
    import tensorflow as tf
    
    
    class Time2Vector(Layer):
        def __init__(self, seq_len, **kwargs):
            super(Time2Vector, self).__init__(**kwargs)
            self.seq_len = seq_len
    
        def build(self, input_shape):
            self.weights_linear = self.add_weight(name='weight_linear',
                                        shape=(int(self.seq_len),),
                                        initializer='uniform',
                                        trainable=True)
    
            self.bias_linear = self.add_weight(name='bias_linear',
                                        shape=(int(self.seq_len),),
                                        initializer='uniform',
                                        trainable=True)
    
            self.weights_periodic = self.add_weight(name='weight_periodic',
                                        shape=(int(self.seq_len),),
                                        initializer='uniform',
                                        trainable=True)
    
            self.bias_periodic = self.add_weight(name='bias_periodic',
                                        shape=(int(self.seq_len),),
                                        initializer='uniform',
                                        trainable=True)
    
        def call(self, x):
            # Convert (batch, seq_len, 7) to (batch, seq_len)
            x = tf.math.reduce_mean(x[:, :, :], axis=-1)
            time_linear = self.weights_linear * x + self.bias_linear
            time_linear = tf.expand_dims(time_linear, axis=-1)
    
            time_periodic = tf.math.sin(tf.multiply(x, self.weights_periodic) + self.bias_periodic)
            time_periodic = tf.expand_dims(time_periodic, axis=-1)
            return tf.concat([time_linear, time_periodic], axis=-1)
        
        def get_config(self):
            config = {"seq_len": self.seq_len}
            base_config = super(Time2Vector, self).get_config()
            return dict(list(base_config.items()) + list(config.items()))
    
    m17_layer_class_bigquant_run = Time2Vector
    
    from tensorflow.keras.layers import Layer, Dropout, Dense, LayerNormalization, Conv1D
    import tensorflow as tf
    
    class SingleAttention(Layer):
        def __init__(self, d_k, d_v):
            super(SingleAttention, self).__init__()
            self.d_k = d_k
            self.d_v = d_v
    
        def build(self, input_shape):
            self.query = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
            self.key = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
            self.value = Dense(self.d_v, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
    
        def call(self, inputs): # inputs = (in_seq, in_seq, in_seq)
            q = self.query(inputs[0])
            k = self.key(inputs[1])
    
            attn_weights = tf.matmul(q, k, transpose_b=True)
            attn_weights = tf.map_fn(lambda x: x/np.sqrt(self.d_k), attn_weights)
            attn_weights = tf.nn.softmax(attn_weights, axis=-1)
    
            v = self.value(inputs[2])
            attn_out = tf.matmul(attn_weights, v)
            return attn_out
        
    class MultiAttention(Layer):
        def __init__(self, d_k, d_v, n_heads):
            super(MultiAttention, self).__init__()
            self.d_k = d_k
            self.d_v = d_v
            self.n_heads = n_heads
            self.attn_heads = list()
    
        def build(self, input_shape):
            for n in range(self.n_heads):
                self.attn_heads.append(SingleAttention(self.d_k, self.d_v))  
            self.linear = Dense(input_shape[-1][-1], input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
    
        def call(self, inputs):
            attn = [self.attn_heads[i](inputs) for i in range(self.n_heads)]
            concat_attn = tf.concat(attn, axis=-1)
            multi_linear = self.linear(concat_attn)
            return multi_linear 
        
    class TransformerEncoder1(Layer):
        def __init__(self, d_k, d_v, n_heads, ff_dim, dropout=0.1, **kwargs):
            super(TransformerEncoder1, self).__init__(**kwargs)
            self.d_k = d_k
            self.d_v = d_v
            self.n_heads = n_heads
            self.ff_dim = ff_dim
            self.attn_heads = list()
            self.dropout_rate = dropout
    
        def build(self, input_shape):
            self.attn_multi = MultiAttention(self.d_k, self.d_v, self.n_heads)
            self.attn_dropout = Dropout(self.dropout_rate)
            self.attn_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6)
    
            self.ff_conv1D_1 = Conv1D(filters=self.ff_dim, kernel_size=1, activation='relu')
            self.ff_conv1D_2 = Conv1D(filters=input_shape[-1][-1], kernel_size=1)
            self.ff_dropout = Dropout(self.dropout_rate)
            self.ff_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6)    
    
        def call(self, inputs):
            attn_layer = self.attn_multi(inputs)
            attn_layer = self.attn_dropout(attn_layer)
            attn_layer = self.attn_normalize(inputs[0] + attn_layer)
            ff_layer = self.ff_conv1D_1(attn_layer)
            ff_layer = self.ff_conv1D_2(ff_layer)
            ff_layer = self.ff_dropout(ff_layer)
            ff_layer = self.ff_normalize(inputs[0] + ff_layer)
            return ff_layer 
        
        def get_config(self):
            config = {"d_k": self.d_k, "d_v": self.d_v, "n_heads": self.n_heads, "ff_dim": self.ff_dim, "dropout": self.dropout_rate}
            base_config = super(TransformerEncoder1, self).get_config()
            return dict(list(base_config.items()) + list(config.items()))
    
    m23_layer_class_bigquant_run = TransformerEncoder1
    
    from tensorflow.keras.layers import Layer, Dropout, Dense, LayerNormalization, Conv1D
    import tensorflow as tf
    
    # class SingleAttention(Layer):
    #     def __init__(self, d_k, d_v):
    #         super(SingleAttention, self).__init__()
    #         self.d_k = d_k
    #         self.d_v = d_v
    
    #     def build(self, input_shape):
    #         self.query = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
    #         self.key = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
    #         self.value = Dense(self.d_v, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
    
    #     def call(self, inputs): # inputs = (in_seq, in_seq, in_seq)
    #         q = self.query(inputs[0])
    #         k = self.key(inputs[1])
    
    #         attn_weights = tf.matmul(q, k, transpose_b=True)
    #         attn_weights = tf.map_fn(lambda x: x/np.sqrt(self.d_k), attn_weights)
    #         attn_weights = tf.nn.softmax(attn_weights, axis=-1)
    
    #         v = self.value(inputs[2])
    #         attn_out = tf.matmul(attn_weights, v)
    #         return attn_out
        
    # class MultiAttention(Layer):
    #     def __init__(self, d_k, d_v, n_heads): # , out_dim
    #         super(MultiAttention, self).__init__()
    #         self.d_k = d_k
    #         self.d_v = d_v
    #         self.n_heads = n_heads
    #         self.attn_heads = list()
    
    #     def build(self, input_shape):
    #         for n in range(self.n_heads):
    #             self.attn_heads.append(SingleAttention(self.d_k, self.d_v))  
    #         self.linear = Dense(input_shape[-1][-1], input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
    
    #     def call(self, inputs):
    #         attn = [self.attn_heads[i](inputs) for i in range(self.n_heads)]
    #         concat_attn = tf.concat(attn, axis=-1)
    #         multi_linear = self.linear(concat_attn)
    #         return multi_linear 
        
    class TransformerEncoder2(Layer):
        def __init__(self, d_k, d_v, n_heads, ff_dim, dropout=0.1, **kwargs):
            super(TransformerEncoder2, self).__init__(**kwargs)
            self.d_k = d_k
            self.d_v = d_v
            self.n_heads = n_heads
            self.ff_dim = ff_dim
            self.attn_heads = list()
            self.dropout_rate = dropout
    
        def build(self, input_shape):
            self.attn_multi = MultiAttention(self.d_k, self.d_v, self.n_heads)
            self.attn_dropout = Dropout(self.dropout_rate)
            self.attn_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6)
    
            self.ff_conv1D_1 = Conv1D(filters=self.ff_dim, kernel_size=1, activation='relu')
            self.ff_conv1D_2 = Conv1D(filters=input_shape[-1][-1], kernel_size=1)
            self.ff_dropout = Dropout(self.dropout_rate)
            self.ff_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6)    
    
        def call(self, inputs):
            attn_layer = self.attn_multi(inputs)
            attn_layer = self.attn_dropout(attn_layer)
            attn_layer = self.attn_normalize(inputs[0] + attn_layer)
            ff_layer = self.ff_conv1D_1(attn_layer)
            ff_layer = self.ff_conv1D_2(ff_layer)
            ff_layer = self.ff_dropout(ff_layer)
            ff_layer = self.ff_normalize(inputs[0] + ff_layer)
            return ff_layer 
        
        def get_config(self):
            config = {"d_k": self.d_k, "d_v": self.d_v, "n_heads": self.n_heads, "ff_dim": self.ff_dim, "dropout": self.dropout_rate}
            base_config = super(TransformerEncoder2, self).get_config()
            return dict(list(base_config.items()) + list(config.items()))
    
    m20_layer_class_bigquant_run = TransformerEncoder2
    
    from tensorflow.keras.layers import Layer, Dropout, Dense, LayerNormalization, Conv1D
    import tensorflow as tf
    
    # class SingleAttention(Layer):
    #     def __init__(self, d_k, d_v):
    #         super(SingleAttention, self).__init__()
    #         self.d_k = d_k
    #         self.d_v = d_v
    
    #     def build(self, input_shape):
    #         self.query = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
    #         self.key = Dense(self.d_k, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
    #         self.value = Dense(self.d_v, input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
    
    #     def call(self, inputs): # inputs = (in_seq, in_seq, in_seq)
    #         q = self.query(inputs[0])
    #         k = self.key(inputs[1])
    
    #         attn_weights = tf.matmul(q, k, transpose_b=True)
    #         attn_weights = tf.map_fn(lambda x: x/np.sqrt(self.d_k), attn_weights)
    #         attn_weights = tf.nn.softmax(attn_weights, axis=-1)
    
    #         v = self.value(inputs[2])
    #         attn_out = tf.matmul(attn_weights, v)
    #         return attn_out
        
    # class MultiAttention(Layer):
    #     def __init__(self, d_k, d_v, n_heads): # , out_dim
    #         super(MultiAttention, self).__init__()
    #         self.d_k = d_k
    #         self.d_v = d_v
    #         self.n_heads = n_heads
    #         self.attn_heads = list()
    
    #     def build(self, input_shape):
    #         for n in range(self.n_heads):
    #             self.attn_heads.append(SingleAttention(self.d_k, self.d_v))  
    #         self.linear = Dense(input_shape[-1][-1], input_shape=input_shape, kernel_initializer='glorot_uniform', bias_initializer='glorot_uniform')
    
    #     def call(self, inputs):
    #         attn = [self.attn_heads[i](inputs) for i in range(self.n_heads)]
    #         concat_attn = tf.concat(attn, axis=-1)
    #         multi_linear = self.linear(concat_attn)
    #         return multi_linear 
        
    class TransformerEncoder3(Layer):
        def __init__(self, d_k, d_v, n_heads, ff_dim, dropout=0.1, **kwargs):
            super(TransformerEncoder3, self).__init__(**kwargs)
            self.d_k = d_k
            self.d_v = d_v
            self.n_heads = n_heads
            self.ff_dim = ff_dim
            self.attn_heads = list()
            self.dropout_rate = dropout
    
        def build(self, input_shape):
            self.attn_multi = MultiAttention(self.d_k, self.d_v, self.n_heads)
            self.attn_dropout = Dropout(self.dropout_rate)
            self.attn_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6)
    
            self.ff_conv1D_1 = Conv1D(filters=self.ff_dim, kernel_size=1, activation='relu')
            self.ff_conv1D_2 = Conv1D(filters=input_shape[-1][-1], kernel_size=1)
            self.ff_dropout = Dropout(self.dropout_rate)
            self.ff_normalize = LayerNormalization(input_shape=input_shape, epsilon=1e-6)    
    
        def call(self, inputs):
            attn_layer = self.attn_multi(inputs)
            attn_layer = self.attn_dropout(attn_layer)
            attn_layer = self.attn_normalize(inputs[0] + attn_layer)
            ff_layer = self.ff_conv1D_1(attn_layer)
            ff_layer = self.ff_conv1D_2(ff_layer)
            ff_layer = self.ff_dropout(ff_layer)
            ff_layer = self.ff_normalize(inputs[0] + ff_layer)
            return ff_layer 
        
        def get_config(self):
            config = {"d_k": self.d_k, "d_v": self.d_v, "n_heads": self.n_heads, "ff_dim": self.ff_dim, "dropout": self.dropout_rate}
            base_config = super(TransformerEncoder3, self).get_config()
            return dict(list(base_config.items()) + list(config.items()))
    
    m21_layer_class_bigquant_run = TransformerEncoder3
    
    m28_custom_objects_bigquant_run = {
        "Time2Vector": Time2Vector,
        "TransformerEncoder1": TransformerEncoder1,
        "TransformerEncoder2": TransformerEncoder2,
        "TransformerEncoder3": TransformerEncoder3
    }
    
    # Python 代码入口函数,input_1/2/3 对应三个输入端,data_1/2/3 对应三个输出端
    def m30_run_bigquant_run(input_1, input_2, input_3):
        # 示例代码如下。在这里编写您的代码
        pred = input_1.read()
    #     pred = DataSource("d73724f7124c4f48ade645d9973f34e4T").read()
        test = input_2.read()
        df_trade = pd.DataFrame({'pred_label': pred[:, 0], 'instrument': test["instrument"], 'date': test["date"]})
        df_trade.sort_values(['date', 'pred_label'], inplace=True, ascending=[True, False])
        return Outputs(data_1=DataSource.write_df(df_trade))
    
    # 后处理函数,可选。输入是主函数的输出,可以在这里对数据做处理,或者返回更友好的outputs数据格式。此函数输出不会被缓存。
    def m30_post_run_bigquant_run(outputs):
        return outputs
    
    # 回测引擎:初始化函数,只执行一次
    def m29_initialize_bigquant_run(context):
        # 加载预测数据
        context.ranker_prediction = context.options['data'].read_df()
    
        # 系统已经设置了默认的交易手续费和滑点,要修改手续费可使用如下函数
        context.set_commission(PerOrder(buy_cost=0.0003, sell_cost=0.0013, min_cost=5))
        # 预测数据,通过options传入进来,使用 read_df 函数,加载到内存 (DataFrame)
        # 设置买入的股票数量,这里买入预测股票列表排名靠前的5只
        stock_count = 5
        # 每只的股票的权重,如下的权重分配会使得靠前的股票分配多一点的资金,[0.339160, 0.213986, 0.169580, ..]
        context.stock_weights = T.norm([1 / math.log(i + 2) for i in range(0, stock_count)])
        # 设置每只股票占用的最大资金比例
        context.max_cash_per_instrument = 0.2
        context.hold_days = 5
    
    # 回测引擎:每日数据处理函数,每天执行一次
    def m29_handle_data_bigquant_run(context, data):
        # 按日期过滤得到今日的预测数据
        ranker_prediction = context.ranker_prediction[
            context.ranker_prediction.date == data.current_dt.strftime('%Y-%m-%d')]
    
        # 1. 资金分配
        # 平均持仓时间是hold_days,每日都将买入股票,每日预期使用 1/hold_days 的资金
        # 实际操作中,会存在一定的买入误差,所以在前hold_days天,等量使用资金;之后,尽量使用剩余资金(这里设置最多用等量的1.5倍)
        is_staging = context.trading_day_index < context.hold_days # 是否在建仓期间(前 hold_days 天)
        cash_avg = context.portfolio.portfolio_value / context.hold_days
        cash_for_buy = min(context.portfolio.cash, (1 if is_staging else 1.5) * cash_avg)
        cash_for_sell = cash_avg - (context.portfolio.cash - cash_for_buy)
        positions = {e.symbol: p.amount * p.last_sale_price
                     for e, p in context.perf_tracker.position_tracker.positions.items()}
    
        # 2. 生成卖出订单:hold_days天之后才开始卖出;对持仓的股票,按StockRanker预测的排序末位淘汰
        if not is_staging and cash_for_sell > 0:
            equities = {e.symbol: e for e, p in context.perf_tracker.position_tracker.positions.items()}
            instruments = list(reversed(list(ranker_prediction.instrument[ranker_prediction.instrument.apply(
                    lambda x: x in equities and not context.has_unfinished_sell_order(equities[x]))])))
            # print('rank order for sell %s' % instruments)
            for instrument in instruments:
                context.order_target(context.symbol(instrument), 0)
                cash_for_sell -= positions[instrument]
                if cash_for_sell <= 0:
                    break
    
        # 3. 生成买入订单:按StockRanker预测的排序,买入前面的stock_count只股票
        buy_cash_weights = context.stock_weights
        buy_instruments = list(ranker_prediction.instrument[:len(buy_cash_weights)])
        max_cash_per_instrument = context.portfolio.portfolio_value * context.max_cash_per_instrument
        for i, instrument in enumerate(buy_instruments):
            cash = cash_for_buy * buy_cash_weights[i]
            if cash > max_cash_per_instrument - positions.get(instrument, 0):
                # 确保股票持仓量不会超过每次股票最大的占用资金量
                cash = max_cash_per_instrument - positions.get(instrument, 0)
            if cash > 0:
                context.order_value(context.symbol(instrument), cash)
    
    # 回测引擎:准备数据,只执行一次
    def m29_prepare_bigquant_run(context):
        pass
    
    # 回测引擎:每个单位时间开始前调用一次,即每日开盘前调用一次。
    def m29_before_trading_start_bigquant_run(context, data):
        pass
    
    
    m1 = M.instruments.v2(
        start_date='2015-01-01',
        end_date='2016-01-01',
        market='CN_STOCK_A',
        instrument_list=' ',
        max_count=0
    )
    
    m2 = M.advanced_auto_labeler.v2(
        instruments=m1.data,
        label_expr="""# #号开始的表示注释
    # 0. 每行一个,顺序执行,从第二个开始,可以使用label字段
    # 1. 可用数据字段见 https://bigquant.com/docs/data_history_data.html
    #   添加benchmark_前缀,可使用对应的benchmark数据
    # 2. 可用操作符和函数见 `表达式引擎 <https://bigquant.com/docs/big_expr.html>`_
    
    # 计算收益:5日收盘价(作为卖出价格)除以明日开盘价(作为买入价格)
    shift(close, -5) / shift(open, -1)-1
    
    # 极值处理:用1%和99%分位的值做clip
    clip(label, all_quantile(label, 0.01), all_quantile(label, 0.99))
    
    # 过滤掉一字涨停的情况 (设置label为NaN,在后续处理和训练中会忽略NaN的label)
    where(shift(high, -1) == shift(low, -1), NaN, label)
    """,
        start_date='',
        end_date='',
        benchmark='000300.SHA',
        drop_na_label=True,
        cast_label_int=False
    )
    
    m13 = M.standardlize.v8(
        input_1=m2.data,
        columns_input='label'
    )
    
    m3 = M.input_features.v1(
        features="""close_0/mean(close_0,5)
    close_0/mean(close_0,10)
    # close_0/mean(close_0,20)
    # close_0/open_0
    # open_0/mean(close_0,5)
    # open_0/mean(close_0,10)
    # open_0/mean(close_0,20)"""
    )
    
    m15 = M.general_feature_extractor.v7(
        instruments=m1.data,
        features=m3.data,
        start_date='',
        end_date='',
        before_start_days=30
    )
    
    m16 = M.derived_feature_extractor.v3(
        input_data=m15.data,
        features=m3.data,
        date_col='date',
        instrument_col='instrument',
        drop_na=True,
        remove_extra_columns=False
    )
    
    m14 = M.standardlize.v8(
        input_1=m16.data,
        input_2=m3.data,
        columns_input='[]'
    )
    
    m7 = M.join.v3(
        data1=m13.data,
        data2=m14.data,
        on='date,instrument',
        how='inner',
        sort=False
    )
    
    m26 = M.dl_convert_to_bin.v2(
        input_data=m7.data,
        features=m3.data,
        window_size=5,
        feature_clip=5,
        flatten=False,
        window_along_col='instrument'
    )
    
    m5 = M.instruments.v2(
        start_date='2016-01-01',
        end_date='2017-01-01',
        market='CN_STOCK_A',
        instrument_list='',
        max_count=0
    )
    
    m6 = M.general_feature_extractor.v7(
        instruments=m5.data,
        features=m3.data,
        start_date='',
        end_date='',
        before_start_days=30
    )
    
    m8 = M.derived_feature_extractor.v3(
        input_data=m6.data,
        features=m3.data,
        date_col='date',
        instrument_col='instrument',
        drop_na=True,
        remove_extra_columns=False
    )
    
    m9 = M.standardlize.v8(
        input_1=m8.data,
        input_2=m3.data,
        columns_input='[]'
    )
    
    m10 = M.dl_convert_to_bin.v2(
        input_data=m9.data,
        features=m3.data,
        window_size=5,
        feature_clip=5,
        flatten=False,
        window_along_col='instrument'
    )
    
    m12 = M.dl_layer_input.v1(
        shape='5,2',
        batch_shape='',
        dtype='float32',
        sparse=False,
        name='test'
    )
    
    m17 = M.dl_layer_userlayer.v1(
        input1=m12.data,
        layer_class=m17_layer_class_bigquant_run,
        params="""{
        "seq_len": 5
    }""",
        name=''
    )
    
    m18 = M.dl_layer_concatenate.v1(
        input1=m17.data,
        input2=m12.data,
        axis=-1,
        name=''
    )
    
    m23 = M.dl_layer_userlayer.v1(
        input1=m18.data,
        input2=m18.data,
        input3=m18.data,
        layer_class=m23_layer_class_bigquant_run,
        params="""{
        "d_k": 100,
        "d_v": 100,
        "n_heads": 1,
        "ff_dim": 256
    }""",
        name=''
    )
    
    m20 = M.dl_layer_userlayer.v1(
        input1=m23.data,
        input2=m23.data,
        input3=m23.data,
        layer_class=m20_layer_class_bigquant_run,
        params="""{
        "d_k": 100,
        "d_v": 100,
        "n_heads": 1,
        "ff_dim": 256
    }""",
        name=''
    )
    
    m21 = M.dl_layer_userlayer.v1(
        input1=m20.data,
        input2=m20.data,
        input3=m20.data,
        layer_class=m21_layer_class_bigquant_run,
        params="""{
        "d_k": 100,
        "d_v": 100,
        "n_heads": 1,
        "ff_dim": 256
    }""",
        name=''
    )
    
    m19 = M.dl_layer_globalaveragepooling1d.v1(
        inputs=m21.data,
        name=''
    )
    
    m22 = M.dl_layer_dropout.v1(
        inputs=m19.data,
        rate=0.1,
        noise_shape='',
        seed=111,
        name=''
    )
    
    m24 = M.dl_layer_dense.v1(
        inputs=m22.data,
        units=256,
        activation='relu',
        use_bias=True,
        kernel_initializer='glorot_uniform',
        bias_initializer='Zeros',
        kernel_regularizer='None',
        kernel_regularizer_l1=0,
        kernel_regularizer_l2=0,
        bias_regularizer='None',
        bias_regularizer_l1=0,
        bias_regularizer_l2=0,
        activity_regularizer='None',
        activity_regularizer_l1=0,
        activity_regularizer_l2=0,
        kernel_constraint='None',
        bias_constraint='None',
        name=''
    )
    
    m25 = M.dl_layer_dense.v1(
        inputs=m24.data,
        units=1,
        activation='linear',
        use_bias=True,
        kernel_initializer='glorot_uniform',
        bias_initializer='Zeros',
        kernel_regularizer='None',
        kernel_regularizer_l1=0,
        kernel_regularizer_l2=0,
        bias_regularizer='None',
        bias_regularizer_l1=0,
        bias_regularizer_l2=0,
        activity_regularizer='None',
        activity_regularizer_l1=0,
        activity_regularizer_l2=0,
        kernel_constraint='None',
        bias_constraint='None',
        name=''
    )
    
    m27 = M.dl_model_init.v1(
        inputs=m12.data,
        outputs=m25.data
    )
    
    m28 = M.dl_model_train.v1(
        input_model=m27.data,
        training_data=m26.data,
        optimizer='Adam',
        loss='mean_squared_error',
        metrics='mae',
        batch_size=1024,
        epochs=1,
        custom_objects=m28_custom_objects_bigquant_run,
        n_gpus=1,
        verbose='1:输出进度条记录'
    )
    
    m4 = M.dl_model_predict.v1(
        trained_model=m28.data,
        input_data=m10.data,
        batch_size=1024,
        n_gpus=0,
        verbose='2:每个epoch输出一行记录'
    )
    
    m30 = M.cached.v3(
        input_1=m4.data,
        input_2=m9.data,
        run=m30_run_bigquant_run,
        post_run=m30_post_run_bigquant_run,
        input_ports='',
        params='{}',
        output_ports=''
    )
    
    m29 = M.trade.v4(
        instruments=m5.data,
        options_data=m30.data_1,
        start_date='',
        end_date='',
        initialize=m29_initialize_bigquant_run,
        handle_data=m29_handle_data_bigquant_run,
        prepare=m29_prepare_bigquant_run,
        before_trading_start=m29_before_trading_start_bigquant_run,
        volume_limit=0.025,
        order_price_field_buy='open',
        order_price_field_sell='close',
        capital_base=1000000,
        auto_cancel_non_tradable_orders=True,
        data_frequency='daily',
        price_type='真实价格',
        product_type='股票',
        plot_charts=True,
        backtest_only=False,
        benchmark=''
    )
    
      1/533 [..............................] - ETA: 5:11:25 - loss: 1.0668 - mae: 0.7582  2/533 [..............................] - ETA: 11:30 - loss: 1.0847 - mae: 0.7629    3/533 [..............................] - ETA: 11:36 - loss: 1.0774 - mae: 0.7596  4/533 [..............................] - ETA: 11:31 - loss: 1.0728 - mae: 0.7581  5/533 [..............................] - ETA: 11:38 - loss: 1.0678 - mae: 0.7567  6/533 [..............................] - ETA: 11:40 - loss: 1.0621 - mae: 0.7551  7/533 [..............................] - ETA: 11:40 - loss: 1.0571 - mae: 0.7535  8/533 [..............................] - ETA: 11:45 - loss: 1.0524 - mae: 0.7522  9/533 [..............................] - ETA: 11:48 - loss: 1.0490 - mae: 0.7514 10/533 [..............................] - ETA: 11:44 - loss: 1.0458 - mae: 0.7506 11/533 [..............................] - ETA: 11:50 - loss: 1.0434 - mae: 0.7498 12/533 [..............................] - ETA: 11:48 - loss: 1.0416 - mae: 0.7493 13/533 [..............................] - ETA: 11:49 - loss: 1.0393 - mae: 0.7487 14/533 [..............................] - ETA: 11:49 - loss: 1.0373 - mae: 0.7481 15/533 [..............................] - ETA: 11:48 - loss: 1.0352 - mae: 0.7475 16/533 [..............................] - ETA: 11:48 - loss: 1.0329 - mae: 0.7469 17/533 [..............................] - ETA: 11:50 - loss: 1.0306 - mae: 0.7462 18/533 [>.............................] - ETA: 11:51 - loss: 1.0285 - mae: 0.7455 19/533 [>.............................] - ETA: 11:49 - loss: 1.0265 - mae: 0.7450 20/533 [>.............................] - ETA: 11:48 - loss: 1.0246 - mae: 0.7444 21/533 [>.............................] - ETA: 11:49 - loss: 1.0229 - mae: 0.7439 22/533 [>.............................] - ETA: 11:49 - loss: 1.0216 - mae: 0.7434 23/533 [>.............................] - ETA: 11:49 - loss: 1.0205 - mae: 0.7430 24/533 [>.............................] - ETA: 11:45 - loss: 1.0196 - mae: 0.7427 25/533 [>.............................] - ETA: 11:41 - loss: 1.0186 - mae: 0.7423 26/533 [>.............................] - ETA: 11:39 - loss: 1.0176 - mae: 0.7420 27/533 [>.............................] - ETA: 11:36 - loss: 1.0167 - mae: 0.7417 28/533 [>.............................] - ETA: 11:32 - loss: 1.0159 - mae: 0.7414 29/533 [>.............................] - ETA: 11:30 - loss: 1.0152 - mae: 0.7411 30/533 [>.............................] - ETA: 11:27 - loss: 1.0145 - mae: 0.7408 31/533 [>.............................] - ETA: 11:24 - loss: 1.0139 - mae: 0.7406 32/533 [>.............................] - ETA: 11:23 - loss: 1.0134 - mae: 0.7403 33/533 [>.............................] - ETA: 11:20 - loss: 1.0127 - mae: 0.7401 34/533 [>.............................] - ETA: 11:17 - loss: 1.0121 - mae: 0.7398 35/533 [>.............................] - ETA: 11:15 - loss: 1.0115 - mae: 0.7396 36/533 [=>............................] - ETA: 11:14 - loss: 1.0110 - mae: 0.7393 37/533 [=>............................] - ETA: 11:13 - loss: 1.0104 - mae: 0.7391 38/533 [=>............................] - ETA: 11:10 - loss: 1.0098 - mae: 0.7388 39/533 [=>............................] - ETA: 11:08 - loss: 1.0091 - mae: 0.7385 40/533 [=>............................] - ETA: 11:05 - loss: 1.0085 - mae: 0.7383 41/533 [=>............................] - ETA: 11:02 - loss: 1.0078 - mae: 0.7380 42/533 [=>............................] - ETA: 10:58 - loss: 1.0072 - mae: 0.7377 43/533 [=>............................] - ETA: 10:56 - loss: 1.0066 - mae: 0.7375 44/533 [=>............................] - ETA: 10:55 - loss: 1.0060 - mae: 0.7373 45/533 [=>............................] - ETA: 10:53 - loss: 1.0056 - mae: 0.7370 46/533 [=>............................] - ETA: 10:52 - loss: 1.0052 - mae: 0.7368 47/533 [=>............................] - ETA: 10:50 - loss: 1.0048 - mae: 0.7366 48/533 [=>............................] - ETA: 10:48 - loss: 1.0044 - mae: 0.7364 49/533 [=>............................] - ETA: 10:48 - loss: 1.0040 - mae: 0.7362 50/533 [=>............................] - ETA: 10:48 - loss: 1.0037 - mae: 0.7361 51/533 [=>............................] - ETA: 10:46 - loss: 1.0034 - mae: 0.7359 52/533 [=>............................] - ETA: 10:46 - loss: 1.0030 - mae: 0.7357 53/533 [=>............................] - ETA: 10:45 - loss: 1.0027 - mae: 0.7356 54/533 [==>...........................] - ETA: 10:44 - loss: 1.0024 - mae: 0.7354 55/533 [==>...........................] - ETA: 10:43 - loss: 1.0022 - mae: 0.7353 56/533 [==>...........................] - ETA: 10:43 - loss: 1.0019 - mae: 0.7352 57/533 [==>...........................] - ETA: 10:41 - loss: 1.0017 - mae: 0.7350 58/533 [==>...........................] - ETA: 10:41 - loss: 1.0015 - mae: 0.7349 59/533 [==>...........................] - ETA: 10:39 - loss: 1.0013 - mae: 0.7348 60/533 [==>...........................] - ETA: 10:38 - loss: 1.0011 - mae: 0.7347 61/533 [==>...........................] - ETA: 10:36 - loss: 1.0009 - mae: 0.7346 62/533 [==>...........................] - ETA: 10:35 - loss: 1.0008 - mae: 0.7345 63/533 [==>...........................] - ETA: 10:33 - loss: 1.0006 - mae: 0.7344 64/533 [==>...........................] - ETA: 10:32 - loss: 1.0004 - mae: 0.7343 65/533 [==>...........................] - ETA: 10:31 - loss: 1.0003 - mae: 0.7343 66/533 [==>...........................] - ETA: 10:30 - loss: 1.0002 - mae: 0.7342 67/533 [==>...........................] - ETA: 10:28 - loss: 1.0001 - mae: 0.7341 68/533 [==>...........................] - ETA: 10:27 - loss: 1.0000 - mae: 0.7341 69/533 [==>...........................] - ETA: 10:26 - loss: 0.9999 - mae: 0.7340 70/533 [==>...........................] - ETA: 10:24 - loss: 0.9998 - mae: 0.7339 71/533 [==>...........................] - ETA: 10:22 - loss: 0.9997 - mae: 0.7339 72/533 [===>..........................] - ETA: 10:21 - loss: 0.9997 - mae: 0.7338 73/533 [===>..........................] - ETA: 10:19 - loss: 0.9996 - mae: 0.7338 74/533 [===>..........................] - ETA: 10:18 - loss: 0.9995 - mae: 0.7337 75/533 [===>..........................] - ETA: 10:17 - loss: 0.9994 - mae: 0.7337 76/533 [===>..........................] - ETA: 10:15 - loss: 0.9993 - mae: 0.7336 77/533 [===>..........................] - ETA: 10:14 - loss: 0.9993 - mae: 0.7336 78/533 [===>..........................] - ETA: 10:12 - loss: 0.9992 - mae: 0.7335 79/533 [===>..........................] - ETA: 10:10 - loss: 0.9991 - mae: 0.7335 80/533 [===>..........................] - ETA: 10:09 - loss: 0.9990 - mae: 0.7334 81/533 [===>..........................] - ETA: 10:07 - loss: 0.9990 - mae: 0.7334 82/533 [===>..........................] - ETA: 10:06 - loss: 0.9989 - mae: 0.7333 83/533 [===>..........................] - ETA: 10:04 - loss: 0.9988 - mae: 0.7333 84/533 [===>..........................] - ETA: 10:03 - loss: 0.9987 - mae: 0.7332 85/533 [===>..........................] - ETA: 10:01 - loss: 0.9986 - mae: 0.7332 86/533 [===>..........................] - ETA: 9:59 - loss: 0.9986 - mae: 0.7332  87/533 [===>..........................] - ETA: 9:57 - loss: 0.9985 - mae: 0.7331 88/533 [===>..........................] - ETA: 9:55 - loss: 0.9984 - mae: 0.7331 89/533 [====>.........................] - ETA: 9:53 - loss: 0.9984 - mae: 0.7331 90/533 [====>.........................] - ETA: 9:50 - loss: 0.9983 - mae: 0.7330 91/533 [====>.........................] - ETA: 9:49 - loss: 0.9983 - mae: 0.7330 92/533 [====>.........................] - ETA: 9:47 - loss: 0.9982 - mae: 0.7330 93/533 [====>.........................] - ETA: 9:46 - loss: 0.9982 - mae: 0.7330 94/533 [====>.........................] - ETA: 9:44 - loss: 0.9981 - mae: 0.7329 95/533 [====>.........................] - ETA: 9:42 - loss: 0.9981 - mae: 0.7329 96/533 [====>.........................] - ETA: 9:40 - loss: 0.9981 - mae: 0.7329 97/533 [====>.........................] - ETA: 9:39 - loss: 0.9980 - mae: 0.7329 98/533 [====>.........................] - ETA: 9:37 - loss: 0.9980 - mae: 0.7328 99/533 [====>.........................] - ETA: 9:35 - loss: 0.9980 - mae: 0.7328100/533 [====>.........................] - ETA: 9:33 - loss: 0.9980 - mae: 0.7328101/533 [====>.........................] - ETA: 9:31 - loss: 0.9980 - mae: 0.7328102/533 [====>.........................] - ETA: 9:29 - loss: 0.9979 - mae: 0.7328103/533 [====>.........................] - ETA: 9:28 - loss: 0.9979 - mae: 0.7327104/533 [====>.........................] - ETA: 9:26 - loss: 0.9979 - mae: 0.7327105/533 [====>.........................] - ETA: 9:25 - loss: 0.9979 - mae: 0.7327106/533 [====>.........................] - ETA: 9:24 - loss: 0.9979 - mae: 0.7327107/533 [=====>........................] - ETA: 9:22 - loss: 0.9979 - mae: 0.7327108/533 [=====>........................] - ETA: 9:21 - loss: 0.9978 - mae: 0.7327109/533 [=====>........................] - ETA: 9:20 - loss: 0.9978 - mae: 0.7326110/533 [=====>........................] - ETA: 9:18 - loss: 0.9978 - mae: 0.7326111/533 [=====>........................] - ETA: 9:17 - loss: 0.9978 - mae: 0.7326112/533 [=====>........................] - ETA: 9:16 - loss: 0.9977 - mae: 0.7326113/533 [=====>........................] - ETA: 9:15 - loss: 0.9977 - mae: 0.7326114/533 [=====>........................] - ETA: 9:13 - loss: 0.9977 - mae: 0.7325115/533 [=====>........................] - ETA: 9:12 - loss: 0.9976 - mae: 0.7325116/533 [=====>........................] - ETA: 9:11 - loss: 0.9976 - mae: 0.7325117/533 [=====>........................] - ETA: 9:09 - loss: 0.9976 - mae: 0.7325118/533 [=====>........................] - ETA: 9:08 - loss: 0.9975 - mae: 0.7325119/533 [=====>........................] - ETA: 9:07 - loss: 0.9975 - mae: 0.7325120/533 [=====>........................] - ETA: 9:06 - loss: 0.9974 - mae: 0.7324121/533 [=====>........................] - ETA: 9:04 - loss: 0.9974 - mae: 0.7324122/533 [=====>........................] - ETA: 9:03 - loss: 0.9974 - mae: 0.7324123/533 [=====>........................] - ETA: 9:02 - loss: 0.9974 - mae: 0.7324124/533 [=====>........................] - ETA: 9:00 - loss: 0.9974 - mae: 0.7324125/533 [======>.......................] - ETA: 8:59 - loss: 0.9973 - mae: 0.7324126/533 [======>.......................] - ETA: 8:58 - loss: 0.9973 - mae: 0.7324127/533 [======>.......................] - ETA: 8:56 - loss: 0.9973 - mae: 0.7324128/533 [======>.......................] - ETA: 8:55 - loss: 0.9973 - mae: 0.7324129/533 [======>.......................] - ETA: 8:54 - loss: 0.9973 - mae: 0.7323130/533 [======>.......................] - ETA: 8:53 - loss: 0.9973 - mae: 0.7323131/533 [======>.......................] - ETA: 8:51 - loss: 0.9972 - mae: 0.7323132/533 [======>.......................] - ETA: 8:50 - loss: 0.9972 - mae: 0.7323133/533 [======>.......................] - ETA: 8:49 - loss: 0.9972 - mae: 0.7323134/533 [======>.......................] - ETA: 8:48 - loss: 0.9972 - mae: 0.7323135/533 [======>.......................] - ETA: 8:47 - loss: 0.9972 - mae: 0.7323136/533 [======>.......................] - ETA: 8:46 - loss: 0.9972 - mae: 0.7323137/533 [======>.......................] - ETA: 8:45 - loss: 0.9972 - mae: 0.7323138/533 [======>.......................] - ETA: 8:44 - loss: 0.9972 - mae: 0.7323139/533 [======>.......................] - ETA: 8:42 - loss: 0.9971 - mae: 0.7322140/533 [======>.......................] - ETA: 8:41 - loss: 0.9971 - mae: 0.7322141/533 [======>.......................] - ETA: 8:39 - loss: 0.9971 - mae: 0.7322142/533 [======>.......................] - ETA: 8:38 - loss: 0.9971 - mae: 0.7322143/533 [=======>......................] - ETA: 8:37 - loss: 0.9971 - mae: 0.7322144/533 [=======>......................] - ETA: 8:35 - loss: 0.9970 - mae: 0.7322145/533 [=======>......................] - ETA: 8:34 - loss: 0.9970 - mae: 0.7322146/533 [=======>......................] - ETA: 8:33 - loss: 0.9970 - mae: 0.7322147/533 [=======>......................] - ETA: 8:32 - loss: 0.9970 - mae: 0.7321148/533 [=======>......................] - ETA: 8:30 - loss: 0.9969 - mae: 0.7321149/533 [=======>......................] - ETA: 8:29 - loss: 0.9969 - mae: 0.7321150/533 [=======>......................] - ETA: 8:28 - loss: 0.9969 - mae: 0.7321151/533 [=======>......................] - ETA: 8:26 - loss: 0.9969 - mae: 0.7321152/533 [=======>......................] - ETA: 8:25 - loss: 0.9968 - mae: 0.7321153/533 [=======>......................] - ETA: 8:24 - loss: 0.9968 - mae: 0.7321154/533 [=======>......................] - ETA: 8:22 - loss: 0.9968 - mae: 0.7321155/533 [=======>......................] - ETA: 8:21 - loss: 0.9967 - mae: 0.7320156/533 [=======>......................] - ETA: 8:19 - loss: 0.9967 - mae: 0.7320157/533 [=======>......................] - ETA: 8:18 - loss: 0.9967 - mae: 0.7320158/533 [=======>......................] - ETA: 8:17 - loss: 0.9967 - mae: 0.7320159/533 [=======>......................] - ETA: 8:16 - loss: 0.9966 - mae: 0.7320160/533 [========>.....................] - ETA: 8:14 - loss: 0.9966 - mae: 0.7320161/533 [========>.....................] - ETA: 8:13 - loss: 0.9966 - mae: 0.7320162/533 [========>.....................] - ETA: 8:12 - loss: 0.9965 - mae: 0.7320163/533 [========>.....................] - ETA: 8:10 - loss: 0.9965 - mae: 0.7319164/533 [========>.....................] - ETA: 8:09 - loss: 0.9965 - mae: 0.7319165/533 [========>.....................] - ETA: 8:07 - loss: 0.9964 - mae: 0.7319166/533 [========>.....................] - ETA: 8:06 - loss: 0.9964 - mae: 0.7319167/533 [========>.....................] - ETA: 8:05 - loss: 0.9964 - mae: 0.7319168/533 [========>.....................] - ETA: 8:03 - loss: 0.9964 - mae: 0.7319169/533 [========>.....................] - ETA: 8:02 - loss: 0.9963 - mae: 0.7319170/533 [========>.....................] - ETA: 8:00 - loss: 0.9963 - mae: 0.7319171/533 [========>.....................] - ETA: 7:59 - loss: 0.9963 - mae: 0.7319172/533 [========>.....................] - ETA: 7:58 - loss: 0.9963 - mae: 0.7318173/533 [========>.....................] - ETA: 7:56 - loss: 0.9962 - mae: 0.7318174/533 [========>.....................] - ETA: 7:55 - loss: 0.9962 - mae: 0.7318175/533 [========>.....................] - ETA: 7:54 - loss: 0.9962 - mae: 0.7318176/533 [========>.....................] - ETA: 7:53 - loss: 0.9962 - mae: 0.7318177/533 [========>.....................] - ETA: 7:51 - loss: 0.9961 - mae: 0.7318178/533 [=========>....................] - ETA: 7:50 - loss: 0.9961 - mae: 0.7318179/533 [=========>....................] - ETA: 7:49 - loss: 0.9961 - mae: 0.7318180/533 [=========>....................] - ETA: 7:47 - loss: 0.9961 - mae: 0.7318181/533 [=========>....................] - ETA: 7:46 - loss: 0.9961 - mae: 0.7318182/533 [=========>....................] - ETA: 7:45 - loss: 0.9961 - mae: 0.7318183/533 [=========>....................] - ETA: 7:44 - loss: 0.9961 - mae: 0.7318184/533 [=========>....................] - ETA: 7:43 - loss: 0.9961 - mae: 0.7317185/533 [=========>....................] - ETA: 7:42 - loss: 0.9960 - mae: 0.7317186/533 [=========>....................] - ETA: 7:40 - loss: 0.9960 - mae: 0.7317187/533 [=========>....................] - ETA: 7:39 - loss: 0.9960 - mae: 0.7317188/533 [=========>....................] - ETA: 7:38 - loss: 0.9960 - mae: 0.7317189/533 [=========>....................] - ETA: 7:37 - loss: 0.9960 - mae: 0.7317190/533 [=========>....................] - ETA: 7:35 - loss: 0.9960 - mae: 0.7317191/533 [=========>....................] - ETA: 7:34 - loss: 0.9959 - mae: 0.7317192/533 [=========>....................] - ETA: 7:33 - loss: 0.9959 - mae: 0.7317193/533 [=========>....................] - ETA: 7:31 - loss: 0.9959 - mae: 0.7317194/533 [=========>....................] - ETA: 7:30 - loss: 0.9959 - mae: 0.7317195/533 [=========>....................] - ETA: 7:29 - loss: 0.9959 - mae: 0.7317196/533 [==========>...................] - ETA: 7:28 - loss: 0.9959 - mae: 0.7317197/533 [==========>...................] - ETA: 7:27 - loss: 0.9959 - mae: 0.7317198/533 [==========>...................] - ETA: 7:25 - loss: 0.9959 - mae: 0.7317199/533 [==========>...................] - ETA: 7:24 - loss: 0.9959 - mae: 0.7317200/533 [==========>...................] - ETA: 7:23 - loss: 0.9959 - mae: 0.7317201/533 [==========>...................] - ETA: 7:21 - loss: 0.9959 - mae: 0.7317202/533 [==========>...................] - ETA: 7:20 - loss: 0.9959 - mae: 0.7317203/533 [==========>...................] - ETA: 7:19 - loss: 0.9959 - mae: 0.7317204/533 [==========>...................] - ETA: 7:17 - loss: 0.9959 - mae: 0.7317205/533 [==========>...................] - ETA: 7:16 - loss: 0.9959 - mae: 0.7317206/533 [==========>...................] - ETA: 7:15 - loss: 0.9959 - mae: 0.7317207/533 [==========>...................] - ETA: 7:14 - loss: 0.9959 - mae: 0.7317208/533 [==========>...................] - ETA: 7:12 - loss: 0.9959 - mae: 0.7317209/533 [==========>...................] - ETA: 7:11 - loss: 0.9959 - mae: 0.7317210/533 [==========>...................] - ETA: 7:10 - loss: 0.9959 - mae: 0.7317211/533 [==========>...................] - ETA: 7:09 - loss: 0.9959 - mae: 0.7317212/533 [==========>...................] - ETA: 7:07 - loss: 0.9959 - mae: 0.7317213/533 [==========>...................] - ETA: 7:06 - loss: 0.9959 - mae: 0.7317214/533 [===========>..................] - ETA: 7:05 - loss: 0.9959 - mae: 0.7317215/533 [===========>..................] - ETA: 7:04 - loss: 0.9959 - mae: 0.7317216/533 [===========>..................] - ETA: 7:02 - loss: 0.9959 - mae: 0.7317217/533 [===========>..................] - ETA: 7:01 - loss: 0.9959 - mae: 0.7317218/533 [===========>..................] - ETA: 7:00 - loss: 0.9959 - mae: 0.7317219/533 [===========>..................] - ETA: 6:59 - loss: 0.9959 - mae: 0.7317220/533 [===========>..................] - ETA: 6:58 - loss: 0.9959 - mae: 0.7317221/533 [===========>..................] - ETA: 6:57 - loss: 0.9959 - mae: 0.7317222/533 [===========>..................] - ETA: 6:55 - loss: 0.9959 - mae: 0.7317223/533 [===========>..................] - ETA: 6:54 - loss: 0.9959 - mae: 0.7317224/533 [===========>..................] - ETA: 6:53 - loss: 0.9958 - mae: 0.7317225/533 [===========>..................] - ETA: 6:52 - loss: 0.9958 - mae: 0.7317226/533 [===========>..................] - ETA: 6:51 - loss: 0.9958 - mae: 0.7317227/533 [===========>..................] - ETA: 6:50 - loss: 0.9958 - mae: 0.7317228/533 [===========>..................] - ETA: 6:49 - loss: 0.9958 - mae: 0.7317229/533 [===========>..................] - ETA: 6:47 - loss: 0.9958 - mae: 0.7317230/533 [===========>..................] - ETA: 6:46 - loss: 0.9958 - mae: 0.7317231/533 [============>.................] - ETA: 6:45 - loss: 0.9958 - mae: 0.7317232/533 [============>.................] - ETA: 6:43 - loss: 0.9958 - mae: 0.7317233/533 [============>.................] - ETA: 6:42 - loss: 0.9958 - mae: 0.7317234/533 [============>.................] - ETA: 6:41 - loss: 0.9958 - mae: 0.7317235/533 [============>.................] - ETA: 6:39 - loss: 0.9958 - mae: 0.7317236/533 [============>.................] - ETA: 6:38 - loss: 0.9958 - mae: 0.7317237/533 [============>.................] - ETA: 6:37 - loss: 0.9958 - mae: 0.7317238/533 [============>.................] - ETA: 6:35 - loss: 0.9958 - mae: 0.7317239/533 [============>.................] - ETA: 6:34 - loss: 0.9958 - mae: 0.7317240/533 [============>.................] - ETA: 6:33 - loss: 0.9958 - mae: 0.7317241/533 [============>.................] - ETA: 6:31 - loss: 0.9958 - mae: 0.7317242/533 [============>.................] - ETA: 6:30 - loss: 0.9958 - mae: 0.7317243/533 [============>.................] - ETA: 6:29 - loss: 0.9958 - mae: 0.7317244/533 [============>.................] - ETA: 6:27 - loss: 0.9958 - mae: 0.7317245/533 [============>.................] - ETA: 6:26 - loss: 0.9958 - mae: 0.7317246/533 [============>.................] - ETA: 6:25 - loss: 0.9958 - mae: 0.7317247/533 [============>.................] - ETA: 6:23 - loss: 0.9958 - mae: 0.7317248/533 [============>.................] - ETA: 6:22 - loss: 0.9958 - mae: 0.7317249/533 [=============>................] - ETA: 6:21 - loss: 0.9958 - mae: 0.7317250/533 [=============>................] - ETA: 6:19 - loss: 0.9958 - mae: 0.7317251/533 [=============>................] - ETA: 6:18 - loss: 0.9958 - mae: 0.7317252/533 [=============>................] - ETA: 6:16 - loss: 0.9958 - mae: 0.7317253/533 [=============>................] - ETA: 6:15 - loss: 0.9958 - mae: 0.7317254/533 [=============>................] - ETA: 6:14 - loss: 0.9958 - mae: 0.7317255/533 [=============>................] - ETA: 6:12 - loss: 0.9958 - mae: 0.7317256/533 [=============>................] - ETA: 6:11 - loss: 0.9958 - mae: 0.7317257/533 [=============>................] - ETA: 6:10 - loss: 0.9958 - mae: 0.7317258/533 [=============>................] - ETA: 6:08 - loss: 0.9958 - mae: 0.7317259/533 [=============>................] - ETA: 6:07 - loss: 0.9958 - mae: 0.7317260/533 [=============>................] - ETA: 6:05 - loss: 0.9957 - mae: 0.7317261/533 [=============>................] - ETA: 6:04 - loss: 0.9957 - mae: 0.7317262/533 [=============>................] - ETA: 6:02 - loss: 0.9957 - mae: 0.7316263/533 [=============>................] - ETA: 6:01 - loss: 0.9957 - mae: 0.7316264/533 [=============>................] - ETA: 5:59 - loss: 0.9957 - mae: 0.7316265/533 [=============>................] - ETA: 5:58 - loss: 0.9957 - mae: 0.7316266/533 [=============>................] - ETA: 5:56 - loss: 0.9957 - mae: 0.7316267/533 [==============>...............] - ETA: 5:55 - loss: 0.9957 - mae: 0.7316268/533 [==============>...............] - ETA: 5:53 - loss: 0.9957 - mae: 0.7316269/533 [==============>...............] - ETA: 5:52 - loss: 0.9956 - mae: 0.7316270/533 [==============>...............] - ETA: 5:50 - loss: 0.9956 - mae: 0.7316271/533 [==============>...............] - ETA: 5:49 - loss: 0.9956 - mae: 0.7316272/533 [==============>...............] - ETA: 5:48 - loss: 0.9956 - mae: 0.7316273/533 [==============>...............] - ETA: 5:46 - loss: 0.9956 - mae: 0.7316274/533 [==============>...............] - ETA: 5:45 - loss: 0.9956 - mae: 0.7316275/533 [==============>...............] - ETA: 5:43 - loss: 0.9956 - mae: 0.7316276/533 [==============>...............] - ETA: 5:42 - loss: 0.9956 - mae: 0.7316277/533 [==============>...............] - ETA: 5:40 - loss: 0.9956 - mae: 0.7316278/533 [==============>...............] - ETA: 5:39 - loss: 0.9956 - mae: 0.7316279/533 [==============>...............] - ETA: 5:38 - loss: 0.9956 - mae: 0.7316280/533 [==============>...............] - ETA: 5:36 - loss: 0.9955 - mae: 0.7316281/533 [==============>...............] - ETA: 5:35 - loss: 0.9955 - mae: 0.7316282/533 [==============>...............] - ETA: 5:34 - loss: 0.9955 - mae: 0.7316283/533 [==============>...............] - ETA: 5:32 - loss: 0.9955 - mae: 0.7316284/533 [==============>...............] - ETA: 5:31 - loss: 0.9955 - mae: 0.7316285/533 [===============>..............] - ETA: 5:30 - loss: 0.9955 - mae: 0.7316286/533 [===============>..............] - ETA: 5:29 - loss: 0.9955 - mae: 0.7316287/533 [===============>..............] - ETA: 5:27 - loss: 0.9955 - mae: 0.7316288/533 [===============>..............] - ETA: 5:26 - loss: 0.9954 - mae: 0.7316289/533 [===============>..............] - ETA: 5:25 - loss: 0.9954 - mae: 0.7316290/533 [===============>..............] - ETA: 5:24 - loss: 0.9954 - mae: 0.7316291/533 [===============>..............] - ETA: 5:22 - loss: 0.9954 - mae: 0.7316292/533 [===============>..............] - ETA: 5:21 - loss: 0.9954 - mae: 0.7316293/533 [===============>..............] - ETA: 5:20 - loss: 0.9954 - mae: 0.7316294/533 [===============>..............] - ETA: 5:18 - loss: 0.9954 - mae: 0.7316295/533 [===============>..............] - ETA: 5:17 - loss: 0.9954 - mae: 0.7315296/533 [===============>..............] - ETA: 5:16 - loss: 0.9954 - mae: 0.7315297/533 [===============>..............] - ETA: 5:14 - loss: 0.9953 - mae: 0.7315298/533 [===============>..............] - ETA: 5:13 - loss: 0.9953 - mae: 0.7315299/533 [===============>..............] - ETA: 5:12 - loss: 0.9953 - mae: 0.7315300/533 [===============>..............] - ETA: 5:10 - loss: 0.9953 - mae: 0.7315301/533 [===============>..............] - ETA: 5:09 - loss: 0.9953 - mae: 0.7315302/533 [===============>..............] - ETA: 5:08 - loss: 0.9953 - mae: 0.7315303/533 [================>.............] - ETA: 5:06 - loss: 0.9953 - mae: 0.7315304/533 [================>.............] - ETA: 5:05 - loss: 0.9953 - mae: 0.7315305/533 [================>.............] - ETA: 5:04 - loss: 0.9953 - mae: 0.7315306/533 [================>.............] - ETA: 5:02 - loss: 0.9953 - mae: 0.7315307/533 [================>.............] - ETA: 5:01 - loss: 0.9952 - mae: 0.7315308/533 [================>.............] - ETA: 5:00 - loss: 0.9952 - mae: 0.7315309/533 [================>.............] - ETA: 4:58 - loss: 0.9952 - mae: 0.7315310/533 [================>.............] - ETA: 4:57 - loss: 0.9952 - mae: 0.7315311/533 [================>.............] - ETA: 4:56 - loss: 0.9952 - mae: 0.7315312/533 [================>.............] - ETA: 4:54 - loss: 0.9952 - mae: 0.7315313/533 [================>.............] - ETA: 4:53 - loss: 0.9952 - mae: 0.7315314/533 [================>.............] - ETA: 4:52 - loss: 0.9952 - mae: 0.7315315/533 [================>.............] - ETA: 4:50 - loss: 0.9952 - mae: 0.7315316/533 [================>.............] - ETA: 4:49 - loss: 0.9952 - mae: 0.7315317/533 [================>.............] - ETA: 4:48 - loss: 0.9951 - mae: 0.7315318/533 [================>.............] - ETA: 4:47 - loss: 0.9951 - mae: 0.7315319/533 [================>.............] - ETA: 4:45 - loss: 0.9951 - mae: 0.7315320/533 [=================>............] - ETA: 4:44 - loss: 0.9951 - mae: 0.7315321/533 [=================>............] - ETA: 4:43 - loss: 0.9951 - mae: 0.7315322/533 [=================>............] - ETA: 4:41 - loss: 0.9951 - mae: 0.7315323/533 [=================>............] - ETA: 4:40 - loss: 0.9951 - mae: 0.7315324/533 [=================>............] - ETA: 4:39 - loss: 0.9951 - mae: 0.7315325/533 [=================>............] - ETA: 4:37 - loss: 0.9951 - mae: 0.7315326/533 [=================>............] - ETA: 4:36 - loss: 0.9950 - mae: 0.7315327/533 [=================>............] - ETA: 4:35 - loss: 0.9950 - mae: 0.7314328/533 [=================>............] - ETA: 4:33 - loss: 0.9950 - mae: 0.7314329/533 [=================>............] - ETA: 4:32 - loss: 0.9950 - mae: 0.7314330/533 [=================>............] - ETA: 4:31 - loss: 0.9950 - mae: 0.7314331/533 [=================>............] - ETA: 4:29 - loss: 0.9950 - mae: 0.7314332/533 [=================>............] - ETA: 4:28 - loss: 0.9950 - mae: 0.7314333/533 [=================>............] - ETA: 4:27 - loss: 0.9950 - mae: 0.7314334/533 [=================>............] - ETA: 4:25 - loss: 0.9950 - mae: 0.7314335/533 [=================>............] - ETA: 4:24 - loss: 0.9950 - mae: 0.7314336/533 [=================>............] - ETA: 4:23 - loss: 0.9950 - mae: 0.7314337/533 [=================>............] - ETA: 4:21 - loss: 0.9949 - mae: 0.7314338/533 [==================>...........] - ETA: 4:20 - loss: 0.9949 - mae: 0.7314339/533 [==================>...........] - ETA: 4:19 - loss: 0.9949 - mae: 0.7314340/533 [==================>...........] - ETA: 4:17 - loss: 0.9949 - mae: 0.7314341/533 [==================>...........] - ETA: 4:16 - loss: 0.9949 - mae: 0.7314342/533 [==================>...........] - ETA: 4:15 - loss: 0.9949 - mae: 0.7314343/533 [==================>...........] - ETA: 4:13 - loss: 0.9949 - mae: 0.7314344/533 [==================>...........] - ETA: 4:12 - loss: 0.9949 - mae: 0.7314345/533 [==================>...........] - ETA: 4:11 - loss: 0.9949 - mae: 0.7314346/533 [==================>...........] - ETA: 4:09 - loss: 0.9949 - mae: 0.7314347/533 [==================>...........] - ETA: 4:08 - loss: 0.9948 - mae: 0.7314348/533 [==================>...........] - ETA: 4:06 - loss: 0.9948 - mae: 0.7314349/533 [==================>...........] - ETA: 4:05 - loss: 0.9948 - mae: 0.7314350/533 [==================>...........] - ETA: 4:04 - loss: 0.9948 - mae: 0.7314351/533 [==================>...........] - ETA: 4:02 - loss: 0.9948 - mae: 0.7314352/533 [==================>...........] - ETA: 4:01 - loss: 0.9948 - mae: 0.7314353/533 [==================>...........] - ETA: 4:00 - loss: 0.9948 - mae: 0.7314354/533 [==================>...........] - ETA: 3:58 - loss: 0.9948 - mae: 0.7314355/533 [==================>...........] - ETA: 3:57 - loss: 0.9948 - mae: 0.7314356/533 [===================>..........] - ETA: 3:55 - loss: 0.9948 - mae: 0.7313357/533 [===================>..........] - ETA: 3:54 - loss: 0.9948 - mae: 0.7313358/533 [===================>..........] - ETA: 3:53 - loss: 0.9948 - mae: 0.7313359/533 [===================>..........] - ETA: 3:51 - loss: 0.9948 - mae: 0.7313360/533 [===================>..........] - ETA: 3:50 - loss: 0.9948 - mae: 0.7313361/533 [===================>..........] - ETA: 3:49 - loss: 0.9947 - mae: 0.7313362/533 [===================>..........] - ETA: 3:47 - loss: 0.9947 - mae: 0.7313363/533 [===================>..........] - ETA: 3:46 - loss: 0.9947 - mae: 0.7313364/533 [===================>..........] - ETA: 3:44 - loss: 0.9947 - mae: 0.7313365/533 [===================>..........] - ETA: 3:43 - loss: 0.9947 - mae: 0.7313366/533 [===================>..........] - ETA: 3:42 - loss: 0.9947 - mae: 0.7313367/533 [===================>..........] - ETA: 3:40 - loss: 0.9947 - mae: 0.7313368/533 [===================>..........] - ETA: 3:39 - loss: 0.9947 - mae: 0.7313369/533 [===================>..........] - ETA: 3:38 - loss: 0.9947 - mae: 0.7313370/533 [===================>..........] - ETA: 3:36 - loss: 0.9947 - mae: 0.7313371/533 [===================>..........] - ETA: 3:35 - loss: 0.9947 - mae: 0.7313372/533 [===================>..........] - ETA: 3:33 - loss: 0.9947 - mae: 0.7313373/533 [===================>..........] - ETA: 3:32 - loss: 0.9947 - mae: 0.7313374/533 [====================>.........] - ETA: 3:31 - loss: 0.9947 - mae: 0.7313375/533 [====================>.........] - ETA: 3:29 - loss: 0.9947 - mae: 0.7313376/533 [====================>.........] - ETA: 3:28 - loss: 0.9947 - mae: 0.7313377/533 [====================>.........] - ETA: 3:27 - loss: 0.9947 - mae: 0.7313378/533 [====================>.........] - ETA: 3:25 - loss: 0.9947 - mae: 0.7313379/533 [====================>.........] - ETA: 3:24 - loss: 0.9947 - mae: 0.7313380/533 [====================>.........] - ETA: 3:22 - loss: 0.9947 - mae: 0.7313381/533 [====================>.........] - ETA: 3:21 - loss: 0.9947 - mae: 0.7313382/533 [====================>.........] - ETA: 3:20 - loss: 0.9947 - mae: 0.7313383/533 [====================>.........] - ETA: 3:18 - loss: 0.9946 - mae: 0.7313384/533 [====================>.........] - ETA: 3:17 - loss: 0.9946 - mae: 0.7313385/533 [====================>.........] - ETA: 3:16 - loss: 0.9946 - mae: 0.7313386/533 [====================>.........] - ETA: 3:14 - loss: 0.9946 - mae: 0.7313387/533 [====================>.........] - ETA: 3:13 - loss: 0.9946 - mae: 0.7313388/533 [====================>.........] - ETA: 3:12 - loss: 0.9946 - mae: 0.7313389/533 [====================>.........] - ETA: 3:10 - loss: 0.9946 - mae: 0.7313390/533 [====================>.........] - ETA: 3:09 - loss: 0.9946 - mae: 0.7313391/533 [=====================>........] - ETA: 3:08 - loss: 0.9946 - mae: 0.7313392/533 [=====================>........] - ETA: 3:06 - loss: 0.9946 - mae: 0.7313393/533 [=====================>........] - ETA: 3:05 - loss: 0.9946 - mae: 0.7313394/533 [=====================>........] - ETA: 3:04 - loss: 0.9946 - mae: 0.7313395/533 [=====================>........] - ETA: 3:02 - loss: 0.9946 - mae: 0.7313396/533 [=====================>........] - ETA: 3:01 - loss: 0.9946 - mae: 0.7313397/533 [=====================>........] - ETA: 3:00 - loss: 0.9946 - mae: 0.7313398/533 [=====================>........] - ETA: 2:58 - loss: 0.9946 - mae: 0.7313399/533 [=====================>........] - ETA: 2:57 - loss: 0.9946 - mae: 0.7313400/533 [=====================>........] - ETA: 2:56 - loss: 0.9946 - mae: 0.7313401/533 [=====================>........] - ETA: 2:54 - loss: 0.9946 - mae: 0.7313402/533 [=====================>........] - ETA: 2:53 - loss: 0.9946 - mae: 0.7313403/533 [=====================>........] - ETA: 2:52 - loss: 0.9946 - mae: 0.7313404/533 [=====================>........] - ETA: 2:50 - loss: 0.9946 - mae: 0.7313405/533 [=====================>........] - ETA: 2:49 - loss: 0.9946 - mae: 0.7313406/533 [=====================>........] - ETA: 2:48 - loss: 0.9946 - mae: 0.7313407/533 [=====================>........] - ETA: 2:46 - loss: 0.9946 - mae: 0.7313408/533 [=====================>........] - ETA: 2:45 - loss: 0.9946 - mae: 0.7313409/533 [======================>.......] - ETA: 2:44 - loss: 0.9946 - mae: 0.7313410/533 [======================>.......] - ETA: 2:42 - loss: 0.9946 - mae: 0.7313411/533 [======================>.......] - ETA: 2:41 - loss: 0.9946 - mae: 0.7313412/533 [======================>.......] - ETA: 2:40 - loss: 0.9946 - mae: 0.7313413/533 [======================>.......] - ETA: 2:38 - loss: 0.9946 - mae: 0.7313414/533 [======================>.......] - ETA: 2:37 - loss: 0.9946 - mae: 0.7313415/533 [======================>.......] - ETA: 2:36 - loss: 0.9946 - mae: 0.7313416/533 [======================>.......] - ETA: 2:34 - loss: 0.9946 - mae: 0.7313417/533 [======================>.......] - ETA: 2:33 - loss: 0.9946 - mae: 0.7313418/533 [======================>.......] - ETA: 2:32 - loss: 0.9946 - mae: 0.7313419/533 [======================>.......] - ETA: 2:30 - loss: 0.9946 - mae: 0.7313420/533 [======================>.......] - ETA: 2:29 - loss: 0.9946 - mae: 0.7313421/533 [======================>.......] - ETA: 2:28 - loss: 0.9946 - mae: 0.7313422/533 [======================>.......] - ETA: 2:26 - loss: 0.9946 - mae: 0.7313423/533 [======================>.......] - ETA: 2:25 - loss: 0.9946 - mae: 0.7313424/533 [======================>.......] - ETA: 2:23 - loss: 0.9946 - mae: 0.7313425/533 [======================>.......] - ETA: 2:22 - loss: 0.9946 - mae: 0.7313426/533 [======================>.......] - ETA: 2:21 - loss: 0.9946 - mae: 0.7312427/533 [=======================>......] - ETA: 2:19 - loss: 0.9946 - mae: 0.7312428/533 [=======================>......] - ETA: 2:18 - loss: 0.9946 - mae: 0.7312429/533 [=======================>......] - ETA: 2:17 - loss: 0.9946 - mae: 0.7312430/533 [=======================>......] - ETA: 2:15 - loss: 0.9946 - mae: 0.7312431/533 [=======================>......] - ETA: 2:14 - loss: 0.9946 - mae: 0.7312432/533 [=======================>......] - ETA: 2:13 - loss: 0.9946 - mae: 0.7312433/533 [=======================>......] - ETA: 2:11 - loss: 0.9946 - mae: 0.7312434/533 [=======================>......] - ETA: 2:10 - loss: 0.9946 - mae: 0.7312435/533 [=======================>......] - ETA: 2:09 - loss: 0.9946 - mae: 0.7312436/533 [=======================>......] - ETA: 2:07 - loss: 0.9946 - mae: 0.7312437/533 [=======================>......] - ETA: 2:06 - loss: 0.9946 - mae: 0.7312438/533 [=======================>......] - ETA: 2:05 - loss: 0.9946 - mae: 0.7312439/533 [=======================>......] - ETA: 2:03 - loss: 0.9946 - mae: 0.7312440/533 [=======================>......] - ETA: 2:02 - loss: 0.9946 - mae: 0.7312441/533 [=======================>......] - ETA: 2:01 - loss: 0.9946 - mae: 0.7312442/533 [=======================>......] - ETA: 2:00 - loss: 0.9946 - mae: 0.7312443/533 [=======================>......] - ETA: 1:58 - loss: 0.9946 - mae: 0.7312444/533 [=======================>......] - ETA: 1:57 - loss: 0.9946 - mae: 0.7312445/533 [========================>.....] - ETA: 1:55 - loss: 0.9946 - mae: 0.7312446/533 [========================>.....] - ETA: 1:54 - loss: 0.9946 - mae: 0.7312447/533 [========================>.....] - ETA: 1:53 - loss: 0.9946 - mae: 0.7312448/533 [========================>.....] - ETA: 1:52 - loss: 0.9946 - mae: 0.7312449/533 [========================>.....] - ETA: 1:50 - loss: 0.9946 - mae: 0.7312450/533 [========================>.....] - ETA: 1:49 - loss: 0.9946 - mae: 0.7312451/533 [========================>.....] - ETA: 1:48 - loss: 0.9946 - mae: 0.7312452/533 [========================>.....] - ETA: 1:46 - loss: 0.9946 - mae: 0.7312453/533 [========================>.....] - ETA: 1:45 - loss: 0.9946 - mae: 0.7312454/533 [========================>.....] - ETA: 1:44 - loss: 0.9946 - mae: 0.7312455/533 [========================>.....] - ETA: 1:42 - loss: 0.9946 - mae: 0.7312456/533 [========================>.....] - ETA: 1:41 - loss: 0.9946 - mae: 0.7312457/533 [========================>.....] - ETA: 1:40 - loss: 0.9946 - mae: 0.7312458/533 [========================>.....] - ETA: 1:38 - loss: 0.9946 - mae: 0.7312459/533 [========================>.....] - ETA: 1:37 - loss: 0.9945 - mae: 0.7312460/533 [========================>.....] - ETA: 1:36 - loss: 0.9945 - mae: 0.7312461/533 [========================>.....] - ETA: 1:34 - loss: 0.9945 - mae: 0.7312462/533 [=========================>....] - ETA: 1:33 - loss: 0.9945 - mae: 0.7312463/533 [=========================>....] - ETA: 1:32 - loss: 0.9945 - mae: 0.7312464/533 [=========================>....] - ETA: 1:30 - loss: 0.9945 - mae: 0.7312465/533 [=========================>....] - ETA: 1:29 - loss: 0.9945 - mae: 0.7312466/533 [=========================>....] - ETA: 1:28 - loss: 0.9945 - mae: 0.7312467/533 [=========================>....] - ETA: 1:26 - loss: 0.9945 - mae: 0.7312468/533 [=========================>....] - ETA: 1:25 - loss: 0.9945 - mae: 0.7312469/533 [=========================>....] - ETA: 1:24 - loss: 0.9945 - mae: 0.7312470/533 [=========================>....] - ETA: 1:22 - loss: 0.9945 - mae: 0.7312471/533 [=========================>....] - ETA: 1:21 - loss: 0.9945 - mae: 0.7312472/533 [=========================>....] - ETA: 1:20 - loss: 0.9945 - mae: 0.7312473/533 [=========================>....] - ETA: 1:18 - loss: 0.9945 - mae: 0.7312474/533 [=========================>....] - ETA: 1:17 - loss: 0.9945 - mae: 0.7312475/533 [=========================>....] - ETA: 1:16 - loss: 0.9945 - mae: 0.7312476/533 [=========================>....] - ETA: 1:14 - loss: 0.9945 - mae: 0.7312477/533 [=========================>....] - ETA: 1:13 - loss: 0.9945 - mae: 0.7312478/533 [=========================>....] - ETA: 1:12 - loss: 0.9945 - mae: 0.7312479/533 [=========================>....] - ETA: 1:10 - loss: 0.9945 - mae: 0.7312480/533 [==========================>...] - ETA: 1:09 - loss: 0.9945 - mae: 0.7312481/533 [==========================>...] - ETA: 1:08 - loss: 0.9945 - mae: 0.7312482/533 [==========================>...] - ETA: 1:06 - loss: 0.9945 - mae: 0.7312483/533 [==========================>...] - ETA: 1:05 - loss: 0.9945 - mae: 0.7312484/533 [==========================>...] - ETA: 1:04 - loss: 0.9945 - mae: 0.7312485/533 [==========================>...] - ETA: 1:03 - loss: 0.9945 - mae: 0.7312486/533 [==========================>...] - ETA: 1:01 - loss: 0.9945 - mae: 0.7312487/533 [==========================>...] - ETA: 1:00 - loss: 0.9945 - mae: 0.7312488/533 [==========================>...] - ETA: 59s - loss: 0.9945 - mae: 0.7312 489/533 [==========================>...] - ETA: 57s - loss: 0.9945 - mae: 0.7312490/533 [==========================>...] - ETA: 56s - loss: 0.9945 - mae: 0.7312491/533 [==========================>...] - ETA: 55s - loss: 0.9945 - mae: 0.7312492/533 [==========================>...] - ETA: 53s - loss: 0.9945 - mae: 0.7312493/533 [==========================>...] - ETA: 52s - loss: 0.9945 - mae: 0.7312494/533 [==========================>...] - ETA: 51s - loss: 0.9945 - mae: 0.7312495/533 [==========================>...] - ETA: 49s - loss: 0.9945 - mae: 0.7312496/533 [==========================>...] - ETA: 48s - loss: 0.9945 - mae: 0.7312497/533 [==========================>...] - ETA: 47s - loss: 0.9945 - mae: 0.7312498/533 [===========================>..] - ETA: 45s - loss: 0.9945 - mae: 0.7312499/533 [===========================>..] - ETA: 44s - loss: 0.9945 - mae: 0.7312500/533 [===========================>..] - ETA: 43s - loss: 0.9945 - mae: 0.7312501/533 [===========================>..] - ETA: 42s - loss: 0.9945 - mae: 0.7312502/533 [===========================>..] - ETA: 40s - loss: 0.9945 - mae: 0.7312503/533 [===========================>..] - ETA: 39s - loss: 0.9945 - mae: 0.7312504/533 [===========================>..] - ETA: 38s - loss: 0.9945 - mae: 0.7312505/533 [===========================>..] - ETA: 36s - loss: 0.9945 - mae: 0.7312506/533 [===========================>..] - ETA: 35s - loss: 0.9945 - mae: 0.7312507/533 [===========================>..] - ETA: 34s - loss: 0.9945 - mae: 0.7312508/533 [===========================>..] - ETA: 32s - loss: 0.9945 - mae: 0.7312509/533 [===========================>..] - ETA: 31s - loss: 0.9944 - mae: 0.7312510/533 [===========================>..] - ETA: 30s - loss: 0.9944 - mae: 0.7312511/533 [===========================>..] - ETA: 28s - loss: 0.9944 - mae: 0.7312512/533 [===========================>..] - ETA: 27s - loss: 0.9944 - mae: 0.7312513/533 [===========================>..] - ETA: 26s - loss: 0.9944 - mae: 0.7312514/533 [===========================>..] - ETA: 24s - loss: 0.9944 - mae: 0.7312515/533 [===========================>..] - ETA: 23s - loss: 0.9944 - mae: 0.7312516/533 [============================>.] - ETA: 22s - loss: 0.9944 - mae: 0.7312517/533 [============================>.] - ETA: 20s - loss: 0.9944 - mae: 0.7312518/533 [============================>.] - ETA: 19s - loss: 0.9944 - mae: 0.7312519/533 [============================>.] - ETA: 18s - loss: 0.9944 - mae: 0.7312520/533 [============================>.] - ETA: 17s - loss: 0.9944 - mae: 0.7312521/533 [============================>.] - ETA: 15s - loss: 0.9944 - mae: 0.7312522/533 [============================>.] - ETA: 14s - loss: 0.9944 - mae: 0.7312523/533 [============================>.] - ETA: 13s - loss: 0.9944 - mae: 0.7312524/533 [============================>.] - ETA: 11s - loss: 0.9944 - mae: 0.7312525/533 [============================>.] - ETA: 10s - loss: 0.9944 - mae: 0.7312526/533 [============================>.] - ETA: 9s - loss: 0.9944 - mae: 0.7312 527/533 [============================>.] - ETA: 7s - loss: 0.9944 - mae: 0.7312528/533 [============================>.] - ETA: 6s - loss: 0.9944 - mae: 0.7312529/533 [============================>.] - ETA: 5s - loss: 0.9944 - mae: 0.7312530/533 [============================>.] - ETA: 3s - loss: 0.9944 - mae: 0.7312531/533 [============================>.] - ETA: 2s - loss: 0.9944 - mae: 0.7312532/533 [============================>.] - ETA: 1s - loss: 0.9944 - mae: 0.7312533/533 [==============================] - ETA: 0s - loss: 0.9944 - mae: 0.7312533/533 [==============================] - 731s 1s/step - loss: 0.9944 - mae: 0.7312
    
    654/654 - 407s
    DataSource(891c8e2fb49b4c33b8691613eb320a5aT)
    
    • 收益率12.2%
    • 年化收益率12.63%
    • 基准收益率-11.28%
    • 阿尔法0.34
    • 贝塔1.18
    • 夏普比率0.44
    • 胜率0.54
    • 盈亏比0.96
    • 收益波动率32.31%
    • 信息比率0.09
    • 最大回撤22.81%
    bigcharts-data-start/{"__type":"tabs","__id":"bigchart-bb596a46a46e4a7691e0b296f4a6c0c7"}/bigcharts-data-end
    In [6]:
    m10.data.read()["x"].shape
    
    Out[6]:
    (669547, 10)