tf_lstm_pre.py 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112
  1. #!/usr/bin/env python
  2. # -*- coding:utf-8 -*-
  3. # @FileName :tf_lstm_pre.py
  4. # @Time :2025/2/13 10:52
  5. # @Author :David
  6. # @Company: shenyang JY
  7. import os.path
  8. import numpy as np
  9. import logging, argparse, traceback
  10. from app.common.data_handler import DataHandler, write_number_to_file
  11. from threading import Lock
  12. import time, json
  13. model_lock = Lock()
  14. from itertools import chain
  15. from app.common.logs import logger, params
  16. from app.common.tf_lstm import TSHandler
  17. from app.common.dbmg import MongoUtils
  18. from copy import deepcopy
  19. np.random.seed(42) # NumPy随机种子
  20. mgUtils = MongoUtils(logger)
  21. def model_prediction(pre_data, input_file, cap):
  22. # 获取程序开始时间
  23. start_time = time.time()
  24. success = 0
  25. print("Program starts execution!")
  26. farm_id = input_file.split('/')[-2]
  27. output_file = input_file.replace('IN', 'OUT')
  28. file = 'DQYC_OUT_PREDICT_POWER.txt'
  29. status_file = 'STATUS.TXT'
  30. local_params = deepcopy(params)
  31. ts = TSHandler(logger, local_params)
  32. dh = DataHandler(logger, local_params)
  33. try:
  34. local_params['model_table'] += farm_id
  35. local_params['scaler_table'] += farm_id
  36. feature_scaler, target_scaler = mgUtils.get_scaler_model_from_mongo(local_params)
  37. ts.opt.cap = round(target_scaler.transform(np.array([[cap]]))[0, 0], 2)
  38. ts.get_model(local_params)
  39. dh.opt.features = json.loads(ts.model_params).get('Model').get('features', ','.join(ts.opt.features)).split(',')
  40. scaled_pre_x, pre_data = dh.pre_data_handler(pre_data, feature_scaler)
  41. success = 1
  42. # 更新算法状态:1. 启动成功
  43. write_number_to_file(os.path.join(output_file, status_file), 1, 1, 'rewrite')
  44. logger.info("算法启动成功")
  45. res = list(chain.from_iterable(target_scaler.inverse_transform([ts.predict(scaled_pre_x).flatten()])))
  46. pre_data['Power'] = res[:len(pre_data)]
  47. pre_data['PlantID'] = farm_id
  48. pre_data = pre_data[['PlantID', local_params['col_time'], 'Power']]
  49. pre_data.loc[:, 'Power'] = pre_data['Power'].round(2)
  50. pre_data.loc[pre_data['Power'] > cap, 'Power'] = cap
  51. pre_data.loc[pre_data['Power'] < 0, 'Power'] = 0
  52. pre_data.to_csv(os.path.join(output_file, file), sep=' ', index=False)
  53. # 更新算法状态:正常结束
  54. write_number_to_file(os.path.join(output_file, status_file), 2, 2)
  55. logger.info("算法正常结束")
  56. except Exception as e:
  57. # 如果算法状态没启动,不更新
  58. if success:
  59. write_number_to_file(os.path.join(output_file, status_file), 2, 3)
  60. my_exception = traceback.format_exc()
  61. my_exception.replace("\n", "\t")
  62. logger.info("算法状态异常:{}".format(my_exception))
  63. end_time = time.time()
  64. logger.info("lstm预测任务:用了 %s 秒 " % (end_time - start_time))
  65. if __name__ == "__main__":
  66. print("Program starts execution!")
  67. logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
  68. logger = logging.getLogger("model_training_bp log")
  69. # serve(app, host="0.0.0.0", port=1010x, threads=4)
  70. print("server start!")
  71. # ------------------------测试代码------------------------
  72. args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
  73. 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs',
  74. 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
  75. args_dict['features'] = args_dict['features'].split(',')
  76. arguments.update(args_dict)
  77. dh = DataHandler(logger, arguments)
  78. ts = TSHandler(logger)
  79. opt = argparse.Namespace(**arguments)
  80. opt.Model['input_size'] = len(opt.features)
  81. pre_data = get_data_from_mongo(args_dict)
  82. feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments)
  83. pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt)
  84. ts.get_model(arguments)
  85. result = ts.predict(pre_x)
  86. result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()])))
  87. pre_data['power_forecast'] = result1[:len(pre_data)]
  88. pre_data['farm_id'] = 'J00083'
  89. pre_data['cdq'] = 1
  90. pre_data['dq'] = 1
  91. pre_data['zq'] = 1
  92. pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
  93. pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
  94. pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
  95. pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
  96. pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
  97. insert_data_into_mongo(pre_data, arguments)