|
@@ -5,7 +5,7 @@
|
|
|
# @Author :David
|
|
|
# @Company: shenyang JY
|
|
|
|
|
|
-from tensorflow.keras.layers import Input, Dense, LSTM, concatenate, Conv1D, Conv2D, MaxPooling1D, Reshape, Flatten
|
|
|
+from tensorflow.keras.layers import Input, Dense, LSTM, concatenate, Conv1D, Conv2D, MaxPooling1D, Reshape, Flatten, Add, Multiply, Concatenate
|
|
|
from tensorflow.keras.models import Model, load_model
|
|
|
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping, TensorBoard, ReduceLROnPlateau
|
|
|
from tensorflow.keras import optimizers, regularizers
|
|
@@ -37,7 +37,7 @@ class TSHandler(object):
|
|
|
self.logger.info("加载模型权重失败:{}".format(e.args))
|
|
|
|
|
|
@staticmethod
|
|
|
- def get_keras_model(opt, time_series=1, lstm_type=1):
|
|
|
+ def get_keras_model_20250515(opt, time_series=1, lstm_type=1):
|
|
|
loss = region_loss(opt)
|
|
|
l1_reg = regularizers.l1(opt.Model['lambda_value_1'])
|
|
|
l2_reg = regularizers.l2(opt.Model['lambda_value_2'])
|
|
@@ -59,6 +59,47 @@ class TSHandler(object):
|
|
|
|
|
|
return model
|
|
|
|
|
|
+ @staticmethod
|
|
|
+ def get_keras_model(opt, time_series=1, lstm_type=1):
|
|
|
+ loss = region_loss(opt)
|
|
|
+ l1_reg = regularizers.l1(opt.Model['lambda_value_1'])
|
|
|
+ l2_reg = regularizers.l2(opt.Model['lambda_value_2'])
|
|
|
+ nwp_input = Input(shape=(opt.Model['time_step'] * time_series, opt.Model['input_size']), name='nwp')
|
|
|
+
|
|
|
+ con1 = Conv1D(filters=64, kernel_size=1, strides=1, padding='valid', activation='relu',
|
|
|
+ kernel_regularizer=l2_reg)(nwp_input)
|
|
|
+ con1_p = MaxPooling1D(pool_size=1, strides=1, padding='valid', data_format='channels_last')(con1)
|
|
|
+ nwp_lstm = LSTM(units=opt.Model['hidden_size'], return_sequences=True, kernel_regularizer=l2_reg)(con1_p)
|
|
|
+ # 分区A/B独立特征提取
|
|
|
+ zone_a = LSTM(units=32, return_sequences=True, kernel_regularizer=l2_reg)(nwp_lstm) # 独立LSTM分支
|
|
|
+ zone_b = LSTM(units=32, return_sequences=True, kernel_regularizer=l2_reg)(nwp_lstm)
|
|
|
+
|
|
|
+ # 动态权重门控
|
|
|
+ gate = Dense(2, activation='softmax')(nwp_lstm) # 自动学习分区重要性
|
|
|
+ weighted_zone = Add()([
|
|
|
+ Multiply()([gate[:, :, 0:1], zone_a]),
|
|
|
+ Multiply()([gate[:, :, 1:2], zone_b])
|
|
|
+ ])
|
|
|
+
|
|
|
+ zone_pred = Dense(len(opt.zone), name='zone')(weighted_zone)
|
|
|
+ zone_flat = Flatten()(zone_pred)
|
|
|
+ # 在zone_flat后添加交叉层
|
|
|
+ cross_input = Concatenate()([zone_flat, nwp_lstm[:, -1, :]]) # 融合分区特征和LSTM最后时刻状态
|
|
|
+ cross_layer = Dense(64, activation='relu')(cross_input)
|
|
|
+
|
|
|
+ # 最终输出层
|
|
|
+ if lstm_type == 2:
|
|
|
+ output = Dense(opt.Model['time_step'], name='cdq_output')(cross_layer)
|
|
|
+ else:
|
|
|
+ output = Dense(opt.Model['time_step'] * time_series, name='cdq_output')(cross_layer)
|
|
|
+
|
|
|
+ model = Model(nwp_input, [zone_pred, output])
|
|
|
+ adam = optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-7, amsgrad=True)
|
|
|
+ model.compile(loss={"zone": loss, "cdq_output": loss}, loss_weights={"zone": 0.7, "cdq_output": 0.3},
|
|
|
+ optimizer=adam)
|
|
|
+
|
|
|
+ return model
|
|
|
+
|
|
|
def train_init(self):
|
|
|
try:
|
|
|
# 进行加强训练,支持修模
|