tf_cnn_pre.py 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. #!/usr/bin/env python
  2. # -*- coding:utf-8 -*-
  3. # @FileName :tf_lstm_pre.py
  4. # @Time :2025/2/13 10:52
  5. # @Author :David
  6. # @Company: shenyang JY
  7. import os.path
  8. import numpy as np
  9. import logging, argparse, traceback
  10. from app.common.data_handler import DataHandler, write_number_to_file
  11. from threading import Lock
  12. import time, json
  13. model_lock = Lock()
  14. from itertools import chain
  15. from app.common.logs import logger, params
  16. from app.common.tf_cnn import CNNHandler
  17. from app.common.dbmg import MongoUtils
  18. np.random.seed(42) # NumPy随机种子
  19. dh = DataHandler(logger, params)
  20. cnn = CNNHandler(logger, params)
  21. mgUtils = MongoUtils(logger)
  22. def model_prediction(pre_data, input_file, cap):
  23. # 获取程序开始时间
  24. start_time = time.time()
  25. success = 0
  26. print("Program starts execution!")
  27. farm_id = input_file.split('/')[-2]
  28. output_file = input_file.replace('IN', 'OUT')
  29. file = 'DQYC_OUT_PREDICT_POWER.txt'
  30. status_file = 'STATUS.TXT'
  31. try:
  32. params['model_table'] += farm_id
  33. params['scaler_table'] += farm_id
  34. feature_scaler, target_scaler = mgUtils.get_scaler_model_from_mongo(params)
  35. cnn.opt.cap = round(target_scaler.transform(np.array([[cap]]))[0, 0], 2)
  36. cnn.get_model(params)
  37. dh.opt.features = json.loads(cnn.model_params).get('Model').get('features', ','.join(cnn.opt.features)).split(',')
  38. scaled_pre_x, pre_data = dh.pre_data_handler(pre_data, feature_scaler)
  39. success = 1
  40. # 更新算法状态:1. 启动成功
  41. write_number_to_file(os.path.join(output_file, status_file), 1, 1, 'rewrite')
  42. logger.info("算法启动成功")
  43. res = list(chain.from_iterable(target_scaler.inverse_transform([cnn.predict(scaled_pre_x).flatten()])))
  44. pre_data['Power'] = res[:len(pre_data)]
  45. pre_data['PlantID'] = farm_id
  46. pre_data = pre_data[['PlantID', params['col_time'], 'Power']]
  47. pre_data.loc[:, 'Power'] = pre_data['Power'].round(2)
  48. pre_data.loc[pre_data['Power'] > cap, 'Power'] = cap
  49. pre_data.loc[pre_data['Power'] < 0, 'Power'] = 0
  50. pre_data.to_csv(os.path.join(output_file, file), sep=' ', index=False)
  51. # 更新算法状态:正常结束
  52. write_number_to_file(os.path.join(output_file, status_file), 2, 2)
  53. logger.info("算法正常结束")
  54. except Exception as e:
  55. # 如果算法状态没启动,不更新
  56. if success:
  57. write_number_to_file(os.path.join(output_file, status_file), 2, 3)
  58. my_exception = traceback.format_exc()
  59. my_exception.replace("\n", "\t")
  60. logger.info("算法状态异常:{}".format(my_exception))
  61. end_time = time.time()
  62. logger.info("cnn预测任务:用了 %s 秒 " % (end_time - start_time))
  63. if __name__ == "__main__":
  64. print("Program starts execution!")
  65. logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
  66. logger = logging.getLogger("model_training_bp log")
  67. # serve(app, host="0.0.0.0", port=1010x, threads=4)
  68. print("server start!")
  69. # ------------------------测试代码------------------------
  70. args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
  71. 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs',
  72. 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
  73. args_dict['features'] = args_dict['features'].split(',')
  74. arguments.update(args_dict)
  75. dh = DataHandler(logger, arguments)
  76. ts = TSHandler(logger)
  77. opt = argparse.Namespace(**arguments)
  78. opt.Model['input_size'] = len(opt.features)
  79. pre_data = get_data_from_mongo(args_dict)
  80. feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments)
  81. pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt)
  82. ts.get_model(arguments)
  83. result = ts.predict(pre_x)
  84. result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()])))
  85. pre_data['power_forecast'] = result1[:len(pre_data)]
  86. pre_data['farm_id'] = 'J00083'
  87. pre_data['cdq'] = 1
  88. pre_data['dq'] = 1
  89. pre_data['zq'] = 1
  90. pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
  91. pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
  92. pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
  93. pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
  94. pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
  95. insert_data_into_mongo(pre_data, arguments)