|
@@ -25,44 +25,50 @@ app = Flask('tf_cnn_pre——service')
|
|
|
|
|
|
with app.app_context():
|
|
|
with open('../model_koi/bp.yaml', 'r', encoding='utf-8') as f:
|
|
|
- arguments = yaml.safe_load(f)
|
|
|
+ args = yaml.safe_load(f)
|
|
|
|
|
|
- dh = DataHandler(logger, arguments)
|
|
|
- cnn = CNNHandler(logger)
|
|
|
+ dh = DataHandler(logger, args)
|
|
|
+ cnn = CNNHandler(logger, args)
|
|
|
+ global opt
|
|
|
|
|
|
+@app.before_request
|
|
|
+def update_config():
|
|
|
+ # ------------ 整理参数,整合请求参数 ------------
|
|
|
+ args_dict = request.values.to_dict()
|
|
|
+ args_dict['features'] = args_dict['features'].split(',')
|
|
|
+ args.update(args_dict)
|
|
|
+ opt = argparse.Namespace(**args)
|
|
|
+ dh.opt = opt
|
|
|
+ cnn.opt = opt
|
|
|
+ logger.info(args)
|
|
|
|
|
|
-@app.route('/nn_bp_predict', methods=['POST'])
|
|
|
+@app.route('/nn_cnn_predict', methods=['POST'])
|
|
|
def model_prediction_bp():
|
|
|
# 获取程序开始时间
|
|
|
start_time = time.time()
|
|
|
result = {}
|
|
|
success = 0
|
|
|
print("Program starts execution!")
|
|
|
- params_dict = request.values.to_dict()
|
|
|
- args = arguments.deepcopy()
|
|
|
- args.update(params_dict)
|
|
|
try:
|
|
|
- print('args', args)
|
|
|
- logger.info(args)
|
|
|
pre_data = get_data_from_mongo(args)
|
|
|
feature_scaler, target_scaler = get_scaler_model_from_mongo(args)
|
|
|
- scaled_pre_x = dh.pre_data_handler(pre_data, feature_scaler, args)
|
|
|
+ scaled_pre_x = dh.pre_data_handler(pre_data, feature_scaler)
|
|
|
cnn.get_model(args)
|
|
|
# result = bp.predict(scaled_pre_x, args)
|
|
|
- result = list(chain.from_iterable(target_scaler.inverse_transform([cnn.predict(scaled_pre_x).flatten()])))
|
|
|
- pre_data['power_forecast'] = result[:len(pre_data)]
|
|
|
+ res = list(chain.from_iterable(target_scaler.inverse_transform([cnn.predict(scaled_pre_x).flatten()])))
|
|
|
+ pre_data['power_forecast'] = res[:len(pre_data)]
|
|
|
pre_data['farm_id'] = 'J00083'
|
|
|
pre_data['cdq'] = 1
|
|
|
pre_data['dq'] = 1
|
|
|
pre_data['zq'] = 1
|
|
|
- pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
|
|
|
+ pre_data.rename(columns={args['col_time']: 'date_time'}, inplace=True)
|
|
|
pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
|
|
|
|
|
|
pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
|
|
|
pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
|
|
|
pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
|
|
|
|
|
|
- insert_data_into_mongo(pre_data, arguments)
|
|
|
+ insert_data_into_mongo(pre_data, args)
|
|
|
success = 1
|
|
|
except Exception as e:
|
|
|
my_exception = traceback.format_exc()
|
|
@@ -84,36 +90,36 @@ if __name__ == "__main__":
|
|
|
logger = logging.getLogger("model_training_bp log")
|
|
|
from waitress import serve
|
|
|
|
|
|
- # serve(app, host="0.0.0.0", port=1010x, threads=4)
|
|
|
+ serve(app, host="0.0.0.0", port=1010, threads=4)
|
|
|
print("server start!")
|
|
|
|
|
|
# ------------------------测试代码------------------------
|
|
|
- args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
|
|
|
- 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs',
|
|
|
- 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
|
|
|
- args_dict['features'] = args_dict['features'].split(',')
|
|
|
- arguments.update(args_dict)
|
|
|
- dh = DataHandler(logger, arguments)
|
|
|
- cnn = CNNHandler(logger)
|
|
|
- opt = argparse.Namespace(**arguments)
|
|
|
-
|
|
|
- opt.Model['input_size'] = len(opt.features)
|
|
|
- pre_data = get_data_from_mongo(args_dict)
|
|
|
- feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments)
|
|
|
- pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt)
|
|
|
- cnn.get_model(arguments)
|
|
|
- result = cnn.predict(pre_x)
|
|
|
- result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()])))
|
|
|
- pre_data['power_forecast'] = result1[:len(pre_data)]
|
|
|
- pre_data['farm_id'] = 'J00083'
|
|
|
- pre_data['cdq'] = 1
|
|
|
- pre_data['dq'] = 1
|
|
|
- pre_data['zq'] = 1
|
|
|
- pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
|
|
|
- pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
|
|
|
-
|
|
|
- pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
|
|
|
- pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
|
|
|
- pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
|
|
|
-
|
|
|
- insert_data_into_mongo(pre_data, arguments)
|
|
|
+ # args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
|
|
|
+ # 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs',
|
|
|
+ # 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
|
|
|
+ # args_dict['features'] = args_dict['features'].split(',')
|
|
|
+ # arguments.update(args_dict)
|
|
|
+ # dh = DataHandler(logger, arguments)
|
|
|
+ # cnn = CNNHandler(logger)
|
|
|
+ # opt = argparse.Namespace(**arguments)
|
|
|
+ #
|
|
|
+ # opt.Model['input_size'] = len(opt.features)
|
|
|
+ # pre_data = get_data_from_mongo(args_dict)
|
|
|
+ # feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments)
|
|
|
+ # pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt)
|
|
|
+ # cnn.get_model(arguments)
|
|
|
+ # result = cnn.predict(pre_x)
|
|
|
+ # result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()])))
|
|
|
+ # pre_data['power_forecast'] = result1[:len(pre_data)]
|
|
|
+ # pre_data['farm_id'] = 'J00083'
|
|
|
+ # pre_data['cdq'] = 1
|
|
|
+ # pre_data['dq'] = 1
|
|
|
+ # pre_data['zq'] = 1
|
|
|
+ # pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
|
|
|
+ # pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
|
|
|
+ #
|
|
|
+ # pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
|
|
|
+ # pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
|
|
|
+ # pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
|
|
|
+ #
|
|
|
+ # insert_data_into_mongo(pre_data, arguments)
|