David 2 kuukautta sitten
vanhempi
commit
da9927e319

BIN
data_processing/data_operation/__pycache__/data_handler.cpython-37.pyc


+ 7 - 3
data_processing/data_operation/pre_prod_ftp.py

@@ -4,7 +4,11 @@
 # @Time      :2025/3/4 13:02
 # @Author    :David
 # @Company: shenyang JY
- 
- 
+
+"""
+要实现的功能:
+1.
+"""
+
 if __name__ == "__main__":
-    run_code = 0
+    run_code = 0

+ 1 - 1
models_processing/model_koi/tf_bp_pre.py

@@ -89,7 +89,7 @@ if __name__ == "__main__":
     logger = logging.getLogger("model_training_bp log")
     from waitress import serve
 
-    serve(app, host="0.0.0.0", port=1010, threads=4)
+    serve(app, host="0.0.0.0", port=10110, threads=4)
     print("server start!")
 
     # ------------------------测试代码------------------------

+ 1 - 1
models_processing/model_koi/tf_bp_train.py

@@ -80,7 +80,7 @@ if __name__ == "__main__":
     logger = logging.getLogger("model_training_bp log")
     from waitress import serve
 
-    serve(app, host="0.0.0.0", port=10103, threads=4)
+    serve(app, host="0.0.0.0", port=10111, threads=4)
     # print("server start!")
     # args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
     # 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083', 'col_time': 'dateTime',

+ 1 - 1
models_processing/model_koi/tf_cnn_pre.py

@@ -90,7 +90,7 @@ if __name__ == "__main__":
     logger = logging.getLogger("model_training_bp log")
     from waitress import serve
 
-    serve(app, host="0.0.0.0", port=1010, threads=4)
+    serve(app, host="0.0.0.0", port=10112, threads=4)
     print("server start!")
 
     # ------------------------测试代码------------------------

+ 1 - 1
models_processing/model_koi/tf_cnn_train.py

@@ -83,7 +83,7 @@ if __name__ == "__main__":
     logger = logging.getLogger("model_training_bp log")
     from waitress import serve
 
-    serve(app, host="0.0.0.0", port=10103, threads=4)
+    serve(app, host="0.0.0.0", port=10113, threads=4)
     # print("server start!")
     # args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
     # 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083', 'col_time': 'dateTime',

+ 30 - 30
models_processing/model_koi/tf_lstm_pre.py

@@ -90,36 +90,36 @@ if __name__ == "__main__":
     logger = logging.getLogger("model_training_bp log")
     from waitress import serve
 
-    # serve(app, host="0.0.0.0", port=1010x, threads=4)
+    serve(app, host="0.0.0.0", port=10114, threads=4)
     print("server start!")
 
     # ------------------------测试代码------------------------
-    args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
-                 'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs',
-                 'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
-    args_dict['features'] = args_dict['features'].split(',')
-    arguments.update(args_dict)
-    dh = DataHandler(logger, arguments)
-    ts = TSHandler(logger)
-    opt = argparse.Namespace(**arguments)
-
-    opt.Model['input_size'] = len(opt.features)
-    pre_data = get_data_from_mongo(args_dict)
-    feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments)
-    pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt)
-    ts.get_model(arguments)
-    result = ts.predict(pre_x)
-    result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()])))
-    pre_data['power_forecast'] = result1[:len(pre_data)]
-    pre_data['farm_id'] = 'J00083'
-    pre_data['cdq'] = 1
-    pre_data['dq'] = 1
-    pre_data['zq'] = 1
-    pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
-    pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
-
-    pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
-    pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
-    pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
-
-    insert_data_into_mongo(pre_data, arguments)
+    # args_dict = {"mongodb_database": 'david_test', 'scaler_table': 'j00083_scaler', 'model_name': 'bp1.0.test',
+    #              'model_table': 'j00083_model', 'mongodb_read_table': 'j00083_test', 'col_time': 'date_time', 'mongodb_write_table': 'j00083_rs',
+    #              'features': 'speed10,direction10,speed30,direction30,speed50,direction50,speed70,direction70,speed90,direction90,speed110,direction110,speed150,direction150,speed170,direction170'}
+    # args_dict['features'] = args_dict['features'].split(',')
+    # arguments.update(args_dict)
+    # dh = DataHandler(logger, arguments)
+    # ts = TSHandler(logger)
+    # opt = argparse.Namespace(**arguments)
+    #
+    # opt.Model['input_size'] = len(opt.features)
+    # pre_data = get_data_from_mongo(args_dict)
+    # feature_scaler, target_scaler = get_scaler_model_from_mongo(arguments)
+    # pre_x = dh.pre_data_handler(pre_data, feature_scaler, opt)
+    # ts.get_model(arguments)
+    # result = ts.predict(pre_x)
+    # result1 = list(chain.from_iterable(target_scaler.inverse_transform([result.flatten()])))
+    # pre_data['power_forecast'] = result1[:len(pre_data)]
+    # pre_data['farm_id'] = 'J00083'
+    # pre_data['cdq'] = 1
+    # pre_data['dq'] = 1
+    # pre_data['zq'] = 1
+    # pre_data.rename(columns={arguments['col_time']: 'date_time'}, inplace=True)
+    # pre_data = pre_data[['date_time', 'power_forecast', 'farm_id', 'cdq', 'dq', 'zq']]
+    #
+    # pre_data['power_forecast'] = pre_data['power_forecast'].round(2)
+    # pre_data.loc[pre_data['power_forecast'] > opt.cap, 'power_forecast'] = opt.cap
+    # pre_data.loc[pre_data['power_forecast'] < 0, 'power_forecast'] = 0
+    #
+    # insert_data_into_mongo(pre_data, arguments)

+ 1 - 1
models_processing/model_koi/tf_lstm_train.py

@@ -85,7 +85,7 @@ if __name__ == "__main__":
     logger = logging.getLogger("model_training_bp log")
     from waitress import serve
 
-    serve(app, host="0.0.0.0", port=10103, threads=4)
+    serve(app, host="0.0.0.0", port=10115, threads=4)
     print("server start!")
     # args_dict = {"mongodb_database": 'realtimeDq', 'scaler_table': 'j00600_scaler', 'model_name': 'lstm1',
     # 'model_table': 'j00600_model', 'mongodb_read_table': 'j00600', 'col_time': 'dateTime',