tf_lstm_pre.py 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. #!/usr/bin/env python
  2. # -*- coding:utf-8 -*-
  3. # @FileName :tf_lstm_pre.py
  4. # @Time :2025/2/13 10:52
  5. # @Author :David
  6. # @Company: shenyang JY
  7. import os.path
  8. import numpy as np
  9. import logging, argparse, traceback
  10. from app.common.data_handler import DataHandler, write_number_to_file
  11. from threading import Lock
  12. import time, json
  13. model_lock = Lock()
  14. from itertools import chain
  15. from app.common.logs import logger, args
  16. from app.common.tf_lstm import TSHandler
  17. from app.common.dbmg import MongoUtils
  18. np.random.seed(42) # NumPy随机种子
  19. dh = DataHandler(logger, args)
  20. ts = TSHandler(logger, args)
  21. mgUtils = MongoUtils(logger)
  22. def model_prediction(pre_data, input_file, cap):
  23. # 获取程序开始时间
  24. start_time = time.time()
  25. result = {}
  26. success = 0
  27. print("Program starts execution!")
  28. farm_id = input_file.split('/')[-2]
  29. output_file = input_file.replace('IN', 'OUT')
  30. file = 'DQYC_OUT_PREDICT_POWER.txt'
  31. status_file = 'STATUS.TXT'
  32. try:
  33. args['model_table'] += farm_id
  34. args['scaler_table'] += farm_id
  35. feature_scaler, target_scaler = mgUtils.get_scaler_model_from_mongo(args)
  36. ts.opt.cap = round(target_scaler.transform(np.array([[float(cap)]]))[0, 0], 2)
  37. ts.get_model(args)
  38. dh.opt.features = json.loads(ts.model_params).get('Model').get('features', ','.join(ts.opt.features)).split(',')
  39. scaled_pre_x, pre_data = dh.pre_data_handler(pre_data, feature_scaler)
  40. success = 1
  41. # 更新算法状态:1. 启动成功
  42. write_number_to_file(os.path.join(output_file, status_file), 1, 1, 'rewrite')
  43. logger.info("算法启动成功")
  44. res = list(chain.from_iterable(target_scaler.inverse_transform([ts.predict(scaled_pre_x).flatten()])))
  45. pre_data['Power'] = res[:len(pre_data)]
  46. pre_data['PlantID'] = farm_id
  47. pre_data = pre_data[['PlantID', args['col_time'], 'Power']]
  48. pre_data.loc[:, 'Power'] = pre_data['Power'].round(2)
  49. pre_data.loc[pre_data['Power'] > args['cap'], 'Power'] = args['cap']
  50. pre_data.loc[pre_data['Power'] < 0, 'Power'] = 0
  51. pre_data.to_csv(os.path.join(output_file, file), sep=' ', index=False)
  52. # 更新算法状态:正常结束
  53. write_number_to_file(os.path.join(output_file, status_file), 2, 2)
  54. logger.info("算法正常结束")
  55. except Exception as e:
  56. # 如果算法状态没启动,不更新
  57. if success:
  58. write_number_to_file(os.path.join(output_file, status_file), 2, 3)
  59. my_exception = traceback.format_exc()
  60. my_exception.replace("\n", "\t")
  61. result['msg'] = my_exception
  62. logger.info("算法状态异常:{}".format(my_exception))
  63. end_time = time.time()
  64. result['success'] = success
  65. result['args'] = args
  66. result['start_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time))
  67. result['end_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(end_time))
  68. print("Program execution ends!")
  69. return result
  70. if __name__ == "__main__":
  71. print("Program starts execution!")
  72. logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
  73. logger = logging.getLogger("model_training_bp log")
  74. # serve(app, host="0.0.0.0", port=1010x, threads=4)
  75. print("server start!")
  76. # ------------------------测试代码------------------------
  77. args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
  78. 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs',
  79. 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
  80. args_dict['features'] = args_dict['features'].split(',')
  81. arguments.update(args_dict)
  82. dh = DataHandler(logger, arguments)
  83. ts = TSHandler(logger)
  84. opt = argparse.Namespace(**arguments)
  85. opt.Model['input_size'] = len(opt.features)
  86. pre_data = get_data_from_mongo(args_dict)
  87. feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments)
  88. pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt)
  89. ts.get_model(arguments)
  90. result = ts.predict(pre_x)
  91. result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()])))
  92. pre_data['power_forecast'] = result1[:len(pre_data)]
  93. pre_data['farm_id'] = 'J00083'
  94. pre_data['cdq'] = 1
  95. pre_data['dq'] = 1
  96. pre_data['zq'] = 1
  97. pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
  98. pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
  99. pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
  100. pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
  101. pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
  102. insert_data_into_mongo(pre_data, arguments)