#!/usr/bin/env python # -*- coding:utf-8 -*- # @FileName :tf_lstm_pre.py # @Time :2025/2/13 10:52 # @Author :David # @Company: shenyang JY import json, copy import numpy as np from flask import Flask, request, g import logging, argparse, traceback from common.database_dml_koi import * from common.processing_data_common import missing_features, str_to_list from data_processing.data_operation.data_handler import DataHandler from threading import Lock import time, yaml from copy import deepcopy model_lock = Lock() from itertools import chain from common.logs import Log from tf_test import TSHandler # logger = Log('tf_bp').logger() logger = Log('tf_test').logger np.random.seed(42) # NumPy随机种子 # tf.set_random_seed(42) # TensorFlow随机种子 app = Flask('tf_test_pre——service') current_dir = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(current_dir, 'test.yaml'), 'r', encoding='utf-8') as f: global_config = yaml.safe_load(f) # 只读的全局配置 @app.before_request def update_config(): # ------------ 整理参数,整合请求参数 ------------ # 深拷贝全局配置 + 合并请求参数 current_config = deepcopy(global_config) request_args = request.values.to_dict() # features参数规则:1.有传入,解析,覆盖 2. 无传入,不覆盖,原始值 request_args['features'] = request_args['features'].split(',') if 'features' in request_args else current_config['features'] current_config.update(request_args) # 存储到请求上下文 g.opt = argparse.Namespace(**current_config) g.dh = DataHandler(logger, current_config) # 每个请求独立实例 g.ts = TSHandler(logger, current_config) @app.route('/tf_test_predict', methods=['POST']) def model_prediction_test(): # 获取程序开始时间 start_time = time.time() result = {} success = 0 dh = g.dh ts = g.ts args = deepcopy(g.opt.__dict__) logger.info("Program starts execution!") try: pre_data = get_data_from_mongo(args) if args.get('algorithm_test', 0): field_mapping = {'clearsky_ghi': 'clearskyGhi', 'dni_calcd': 'dniCalcd','surface_pressure': 'surfacePressure'} pre_data = pre_data.rename(columns=field_mapping) feature_scaler, target_scaler = get_scaler_model_from_mongo(args) ts.opt.cap = round(target_scaler.transform(np.array([[float(args['cap'])]]))[0, 0], 2) ts.get_model(args) dh.opt.features = json.loads(ts.model_params)['Model']['features'].split(',') scaled_pre_x, pre_data = dh.pre_data_handler(pre_data, feature_scaler) res = list(chain.from_iterable(target_scaler.inverse_transform(ts.predict(scaled_pre_x)))) pre_data['farm_id'] = args.get('farm_id', 'null') if int(args.get('algorithm_test', 0)): pre_data[args['model_name']] = res[:len(pre_data)] pre_data.rename(columns={args['col_time']: 'dateTime'}, inplace=True) pre_data = pre_data[['dateTime', 'farm_id', args['target'], args['model_name'], 'dq']] pre_data = pre_data.melt(id_vars=['dateTime', 'farm_id', args['target']], var_name='model', value_name='power_forecast') res_cols = ['dateTime', 'power_forecast', 'farm_id', args['target'], 'model'] if 'howLongAgo' in args: pre_data['howLongAgo'] = int(args['howLongAgo']) res_cols += ['howLongAgo'] else: pre_data['power_forecast'] = res[:len(pre_data)] pre_data.rename(columns={args['col_time']: 'date_time'}, inplace=True) res_cols = ['date_time', 'power_forecast', 'farm_id'] pre_data = pre_data[res_cols] pre_data.loc[:, 'power_forecast'] = pre_data['power_forecast'].round(2) pre_data.loc[pre_data['power_forecast'] > float(args['cap']), 'power_forecast'] = float(args['cap']) pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0 insert_data_into_mongo(pre_data, args) success = 1 except Exception as e: my_exception = traceback.format_exc() my_exception.replace("\n", "\t") result['msg'] = my_exception end_time = time.time() result['success'] = success result['args'] = args result['start_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time)) result['end_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(end_time)) print("Program execution ends!") return result if __name__ == "__main__": print("Program starts execution!") from waitress import serve serve(app, host="0.0.0.0", port=10116, threads=8, # 指定线程数(默认4,根据硬件调整) channel_timeout=600 # 连接超时时间(秒) ) print("server start!") # ------------------------测试代码------------------------ # args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test', # 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs', # 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'} # args_dict['features'] = args_dict['features'].split(',') # arguments.update(args_dict) # dh = DataHandler(logger, arguments) # ts = TSHandler(logger) # opt = argparse.Namespace(**arguments) # # opt.Model['input_size'] = len(opt.features) # pre_data = get_data_from_mongo(args_dict) # feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments) # pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt) # ts.get_model(arguments) # result = ts.predict(pre_x) # result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()]))) # pre_data['power_forecast'] = result1[:len(pre_data)] # pre_data['farm_id'] = 'J00083' # pre_data['cdq'] = 1 # pre_data['dq'] = 1 # pre_data['zq'] = 1 # pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True) # pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']] # # pre_data['power_forecast'] = pre_data['power_forecast'].round(2) # pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap # pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0 # # insert_data_into_mongo(pre_data, arguments)