123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120 |
- #!/usr/bin/env python
- # -*- coding:utf-8 -*-
- # @FileName :tf_lstm_train.py
- # @Time :2025/2/13 10:52
- # @Author :David
- # @Company: shenyang JY
- import json, copy
- import numpy as np
- from flask import Flask, request, jsonify, g
- import traceback, uuid
- import logging, argparse
- from data_processing.data_operation.data_handler import DataHandler
- import time, yaml, threading
- from copy import deepcopy
- from models_processing.model_tf.tf_lstm import TSHandler
- from common.database_dml_koi import *
- from common.logs import Log
- logger = Log('tf_ts2').logger
- np.random.seed(42) # NumPy随机种子
- app = Flask('tf_lstm2_train——service')
- current_dir = os.path.dirname(os.path.abspath(__file__))
- with open(os.path.join(current_dir, 'lstm.yaml'), 'r', encoding='utf-8') as f:
- global_config = yaml.safe_load(f) # 只读的全局配置
- @app.before_request
- def update_config():
- # ------------ 整理参数,整合请求参数 ------------
- # 深拷贝全局配置 + 合并请求参数
- current_config = deepcopy(global_config)
- request_args = request.values.to_dict()
- # features参数规则:1.有传入,解析,覆盖 2. 无传入,不覆盖,原始值
- request_args['features'] = request_args['features'].split(',') if 'features' in request_args else current_config['features']
- current_config.update(request_args)
- # 存储到请求上下文
- g.opt = argparse.Namespace(**current_config)
- g.dh = DataHandler(logger, current_config) # 每个请求独立实例
- g.ts = TSHandler(logger, current_config)
- @app.route('/tf_lstm2_training', methods=['POST'])
- def model_training_bp():
- # 获取程序开始时间
- start_time = time.time()
- result = {}
- success = 0
- dh = g.dh
- ts = g.ts
- args = deepcopy(g.opt.__dict__)
- logger.info("Program starts execution!")
- try:
- # ------------ 获取数据,预处理训练数据 ------------
- train_data = get_data_from_mongo(args)
- train_x, train_y, valid_x, valid_y, scaled_train_bytes, scaled_target_bytes, scaled_cap = dh.train_data_handler(train_data, time_series=2)
- ts.opt.cap = round(scaled_cap, 2)
- ts.opt.Model['input_size'] = len(dh.opt.features)
- # ------------ 训练模型,保存模型 ------------
- # 1. 如果是加强训练模式,先加载预训练模型特征参数,再预处理训练数据
- # 2. 如果是普通模式,先预处理训练数据,再根据训练数据特征加载模型
- model = ts.train_init() if ts.opt.Model['add_train'] else ts.get_keras_model(ts.opt, time_series=2)
- if ts.opt.Model['add_train']:
- if model:
- feas = json.loads(ts.model_params)['features']
- if set(feas).issubset(set(dh.opt.features)):
- dh.opt.features = list(feas)
- train_x, train_y, valid_x, valid_y, scaled_train_bytes, scaled_target_bytes, scaled_cap = dh.train_data_handler(train_data, time_series=2)
- else:
- model = ts.get_keras_model(ts.opt, time_series=2)
- logger.info("训练数据特征,不满足,加强训练模型特征")
- else:
- model = ts.get_keras_model(ts.opt, time_series=2)
- ts_model = ts.training(model, [train_x, train_y, valid_x, valid_y])
- args['Model']['features'] = ','.join(dh.opt.features)
- args['params'] = json.dumps(args)
- args['descr'] = 'lstm2'
- args['gen_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
- insert_trained_model_into_mongo(ts_model, args)
- insert_scaler_model_into_mongo(scaled_train_bytes, scaled_target_bytes, args)
- success = 1
- except Exception as e:
- my_exception = traceback.format_exc()
- my_exception.replace("\n", "\t")
- result['msg'] = my_exception
- end_time = time.time()
- result['success'] = success
- result['args'] = args
- result['start_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time))
- result['end_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(end_time))
- print("Program execution ends!")
- return result
- if __name__ == "__main__":
- print("Program starts execution!")
- from waitress import serve
- serve(app, host="0.0.0.0", port=10119,
- threads=8, # 指定线程数(默认4,根据硬件调整)
- channel_timeout=600 # 连接超时时间(秒)
- )
- print("server start!")
- # args_dict = {"mongodb_database": 'realtimeDq', 'scaler_table': 'j00600_scaler', 'model_name': 'lstm1',
- # 'model_table': 'j00600_model', 'mongodb_read_table': 'j00600', 'col_time': 'dateTime',
- # 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
- # args_dict['features'] = args_dict['features'].split(',')
- # args.update(args_dict)
- # dh = DataHandler(logger, args)
- # ts = TSHandler(logger, args)
- # opt = argparse.Namespace(**args)
- # opt.Model['input_size'] = len(opt.features)
- # train_data = get_data_from_mongo(args_dict)
- # train_x, train_y, valid_x, valid_y, scaled_train_bytes, scaled_target_bytes = dh.train_data_handler(train_data)
- # ts_model = ts.training([train_x, train_y, valid_x, valid_y])
- #
- # args_dict['gen_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
- # args_dict['params'] = args
- # args_dict['descr'] = '测试'
- # insert_trained_model_into_mongo(ts_model, args_dict)
- # insert_scaler_model_into_mongo(scaled_train_bytes, scaled_target_bytes, args_dict)
|