|
@@ -90,36 +90,36 @@ if __name__ == "__main__":
|
|
|
logger = logging.getLogger("model_training_bp log")
|
|
|
from waitress import serve
|
|
|
|
|
|
- # serve(app, host="0.0.0.0", port=1010x, threads=4)
|
|
|
+ serve(app, host="0.0.0.0", port=10114, threads=4)
|
|
|
print("server start!")
|
|
|
|
|
|
# ------------------------测试代码------------------------
|
|
|
- args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
|
|
|
- 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs',
|
|
|
- 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
|
|
|
- args_dict['features'] = args_dict['features'].split(',')
|
|
|
- arguments.update(args_dict)
|
|
|
- dh = DataHandler(logger, arguments)
|
|
|
- ts = TSHandler(logger)
|
|
|
- opt = argparse.Namespace(**arguments)
|
|
|
-
|
|
|
- opt.Model['input_size'] = len(opt.features)
|
|
|
- pre_data = get_data_from_mongo(args_dict)
|
|
|
- feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments)
|
|
|
- pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt)
|
|
|
- ts.get_model(arguments)
|
|
|
- result = ts.predict(pre_x)
|
|
|
- result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()])))
|
|
|
- pre_data['power_forecast'] = result1[:len(pre_data)]
|
|
|
- pre_data['farm_id'] = 'J00083'
|
|
|
- pre_data['cdq'] = 1
|
|
|
- pre_data['dq'] = 1
|
|
|
- pre_data['zq'] = 1
|
|
|
- pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
|
|
|
- pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
|
|
|
-
|
|
|
- pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
|
|
|
- pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
|
|
|
- pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
|
|
|
-
|
|
|
- insert_data_into_mongo(pre_data, arguments)
|
|
|
+ # args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
|
|
|
+ # 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs',
|
|
|
+ # 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
|
|
|
+ # args_dict['features'] = args_dict['features'].split(',')
|
|
|
+ # arguments.update(args_dict)
|
|
|
+ # dh = DataHandler(logger, arguments)
|
|
|
+ # ts = TSHandler(logger)
|
|
|
+ # opt = argparse.Namespace(**arguments)
|
|
|
+ #
|
|
|
+ # opt.Model['input_size'] = len(opt.features)
|
|
|
+ # pre_data = get_data_from_mongo(args_dict)
|
|
|
+ # feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments)
|
|
|
+ # pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt)
|
|
|
+ # ts.get_model(arguments)
|
|
|
+ # result = ts.predict(pre_x)
|
|
|
+ # result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()])))
|
|
|
+ # pre_data['power_forecast'] = result1[:len(pre_data)]
|
|
|
+ # pre_data['farm_id'] = 'J00083'
|
|
|
+ # pre_data['cdq'] = 1
|
|
|
+ # pre_data['dq'] = 1
|
|
|
+ # pre_data['zq'] = 1
|
|
|
+ # pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
|
|
|
+ # pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
|
|
|
+ #
|
|
|
+ # pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
|
|
|
+ # pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
|
|
|
+ # pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
|
|
|
+ #
|
|
|
+ # insert_data_into_mongo(pre_data, arguments)
|