tf_cnn_pre.py 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. #!/usr/bin/env python
  2. # -*- coding:utf-8 -*-
  3. # @FileName :nn_bp_pre.py
  4. # @Time :2025/2/12 10:39
  5. # @Author :David
  6. # @Company: shenyang JY
  7. import json, copy
  8. import numpy as np
  9. from flask import Flask, request
  10. import logging, argparse, traceback
  11. from common.database_dml import *
  12. from common.processing_data_common import missing_features, str_to_list
  13. from data_processing.data_operation.data_handler import DataHandler
  14. from threading import Lock
  15. import time, yaml
  16. model_lock = Lock()
  17. from itertools import chain
  18. from common.logs import Log
  19. from tf_cnn import CNNHandler
  20. # logger = Log('tf_bp').logger()
  21. logger = Log('tf_bp').logger
  22. np.random.seed(42) # NumPy随机种子
  23. # tf.set_random_seed(42) # TensorFlow随机种子
  24. app = Flask('tf_cnn_pre——service')
  25. with app.app_context():
  26. with open('../model_koi/bp.yaml', 'r', encoding='utf-8') as f:
  27. arguments = yaml.safe_load(f)
  28. dh = DataHandler(logger, arguments)
  29. cnn = CNNHandler(logger)
  30. @app.route('/nn_bp_predict', methods=['POST'])
  31. def model_prediction_bp():
  32. # 获取程序开始时间
  33. start_time = time.time()
  34. result = {}
  35. success = 0
  36. print("Program starts execution!")
  37. params_dict = request.values.to_dict()
  38. args = arguments.deepcopy()
  39. args.update(params_dict)
  40. try:
  41. print('args', args)
  42. logger.info(args)
  43. pre_data = get_data_from_mongo(args)
  44. feature_scaler, target_scaler = get_scaler_model_from_mongo(args)
  45. scaled_pre_x = dh.pre_data_handler(pre_data, feature_scaler, args)
  46. cnn.get_model(args)
  47. # result = bp.predict(scaled_pre_x, args)
  48. result = list(chain.from_iterable(target_scaler.inverse_transform([cnn.predict(scaled_pre_x).flatten()])))
  49. pre_data['power_forecast'] = result[:len(pre_data)]
  50. pre_data['farm_id'] = 'J00083'
  51. pre_data['cdq'] = 1
  52. pre_data['dq'] = 1
  53. pre_data['zq'] = 1
  54. pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
  55. pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
  56. pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
  57. pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
  58. pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
  59. insert_data_into_mongo(pre_data, arguments)
  60. success = 1
  61. except Exception as e:
  62. my_exception = traceback.format_exc()
  63. my_exception.replace("\n", "\t")
  64. result['msg'] = my_exception
  65. end_time = time.time()
  66. result['success'] = success
  67. result['args'] = args
  68. result['start_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time))
  69. result['end_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(end_time))
  70. print("Program execution ends!")
  71. return result
  72. if __name__ == "__main__":
  73. print("Program starts execution!")
  74. logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
  75. logger = logging.getLogger("model_training_bp log")
  76. from waitress import serve
  77. # serve(app, host="0.0.0.0", port=1010x, threads=4)
  78. print("server start!")
  79. # ------------------------测试代码------------------------
  80. args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
  81. 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs',
  82. 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
  83. args_dict['features'] = args_dict['features'].split(',')
  84. arguments.update(args_dict)
  85. dh = DataHandler(logger, arguments)
  86. cnn = CNNHandler(logger)
  87. opt = argparse.Namespace(**arguments)
  88. opt.Model['input_size'] = len(opt.features)
  89. pre_data = get_data_from_mongo(args_dict)
  90. feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments)
  91. pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt)
  92. cnn.get_model(arguments)
  93. result = cnn.predict(pre_x)
  94. result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()])))
  95. pre_data['power_forecast'] = result1[:len(pre_data)]
  96. pre_data['farm_id'] = 'J00083'
  97. pre_data['cdq'] = 1
  98. pre_data['dq'] = 1
  99. pre_data['zq'] = 1
  100. pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
  101. pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
  102. pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
  103. pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
  104. pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
  105. insert_data_into_mongo(pre_data, arguments)