12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970 |
- from flask import Flask,request
- import time
- import logging
- import traceback
- from itertools import chain
- from common.database_dml import get_data_from_mongo,insert_data_into_mongo,get_h5_model_from_mongo,get_scaler_model_from_mongo
- from common.processing_data_common import str_to_list
- app = Flask('model_prediction_bp——service')
- # 创建时间序列数据
- def model_prediction(df,args):
- if 'is_limit' in df.columns:
- df = df[df['is_limit'] == False]
- features, time_steps, col_time, model_name,col_reserve,howlongago = str_to_list(args['features']), int(args['time_steps']),args['col_time'],args['model_name'],str_to_list(args['col_reserve']),int(args['howlongago'])
- feature_scaler,target_scaler = get_scaler_model_from_mongo(args)
- df = df.sort_values(by=col_time).fillna(method='ffill').fillna(method='bfill')
- scaled_features = feature_scaler.transform(df[features])
- # X_predict, _ = create_sequences(scaled_features, [], time_steps)
- # 加载模型时传入自定义损失函数
- # model = load_model(f'{farmId}_model.h5', custom_objects={'rmse': rmse})
- model = get_h5_model_from_mongo(args)
- y_predict = list(chain.from_iterable(target_scaler.inverse_transform([model.predict(scaled_features).flatten()])))
- result = df[-len(y_predict):]
- result['predict'] = y_predict
- result.loc[result['predict'] < 0, 'predict'] = 0
- result['model'] = model_name
- result['howlongago'] = howlongago
- features_reserve = col_reserve + ['model', 'predict', 'howlongago']
- return result[list(set(features_reserve))]
- @app.route('/model_prediction_bp', methods=['POST'])
- def model_prediction_bp():
- # 获取程序开始时间
- start_time = time.time()
- result = {}
- success = 0
- print("Program starts execution!")
- try:
- args = request.values.to_dict()
- print('args',args)
- logger.info(args)
- power_df = get_data_from_mongo(args)
- model = model_prediction(power_df,args)
- insert_data_into_mongo(model,args)
- success = 1
- except Exception as e:
- my_exception = traceback.format_exc()
- my_exception.replace("\n","\t")
- result['msg'] = my_exception
- end_time = time.time()
-
- result['success'] = success
- result['args'] = args
- result['start_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time))
- result['end_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(end_time))
- print("Program execution ends!")
- return result
- if __name__=="__main__":
- print("Program starts execution!")
- logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
- logger = logging.getLogger("model_prediction_bp log")
- from waitress import serve
- serve(app, host="0.0.0.0", port=10104)
- print("server start!")
-
|