瀏覽代碼

添加短期和超短期数据

liudawei 1 年之前
父節點
當前提交
4aaecbe868
共有 2 個文件被更改,包括 33 次插入7 次删除
  1. 28 2
      db-wind/getdata/inputData.py
  2. 5 5
      db-wind/main.py

+ 28 - 2
db-wind/getdata/inputData.py

@@ -92,7 +92,7 @@ def get_process_turbine(database):
     engine = create_database(database)
     for i in arg.turbineloc:
         print("导出风机{}的数据".format(i))
-        sql_turbine = "select C_TIME,C_WS,C_WD,C_ACTIVE_POWER from t_wind_turbine_status_data WHERE C_EQUIPMENT_NO=" + str(i) #+ " and C_WS>0 and C_ACTIVE_POWER>0"
+        sql_turbine = "select C_TIME,C_DATA1 as C_WS, C_DATA2 as C_WD, C_DATA3 as C_ACTIVE_POWER from t_wind_turbine_status_data WHERE C_EQUIPMENT_NO=" + str(i) + " and C_DATA1 != -99 AND C_DATA1 != 0" #+ " and C_WS>0 and C_ACTIVE_POWER>0"
         turbine = exec_sql(sql_turbine, engine)
 
         #直接导出所有数据
@@ -139,6 +139,30 @@ def get_process_power(database):
     power = exec_sql(sql_power, engine)
     utils.savedata.saveData("power.csv", power)
 
+
+def get_process_dq(database):
+    """
+    获取短期预测结果
+    :param database:
+    :return:
+    """
+    engine = create_database(database)
+    sql_dq = "select C_ABLE_VALUE, C_FORECAST_TIME from t_forecast_power_short_term_his"
+    dq = exec_sql(sql_dq, engine)
+    utils.savedata.saveData("dq.csv", dq)
+
+def get_process_cdq(database):
+    """
+    获取超短期预测结果
+    :param database:
+    :return:
+    """
+    engine = create_database(database)
+    sql_cdq = "select C_ABLE_VALUE, C_FORECAST_TIME from t_forecast_power_ultra_short_term_his"
+    cdq = exec_sql(sql_cdq, engine)
+    utils.savedata.saveData("cdq.csv", cdq)
+
+
 def get_turbine_info(database):
     """
     获取风机信息
@@ -182,7 +206,7 @@ def indep_process():
     # utils.savedata.saveData("/tower/tower-{}-process.csv".format(2), tower2)
 
     # 所有表时间统一
-    filenames = ["/NWP.csv","/power.csv", '/tower/tower-1-process.csv']
+    filenames = ["/NWP.csv","/power.csv", "/dq.csv", "/cdq.csv", '/tower/tower-1-process.csv']
     dataframes = []
     for i in arg.turbineloc:
         filenames.append("/turbine-15/turbine-{}.csv".format(i))
@@ -313,6 +337,8 @@ def data_process(database):
     get_turbine_info(database)
     get_process_tower(database)
     get_process_power(database)
+    get_process_dq(database)
+    get_process_cdq(database)
     indep_process()
     NWP_indep_process()
     # Data_split()

+ 5 - 5
db-wind/main.py

@@ -207,17 +207,17 @@ def Continuous_Data(input_dir,output_dir,M,TopN):
 if __name__ == "__main__":
     arg = Arg.Arg()
     inputData.data_process(arg.database)
-    input_dir = "../data/turbine-15"  # 输入文件夹路径
-    output_dir = "../data/output_clean_csv_files"  # 输出文件夹路径
+    input_dir = "../data_mts/turbine-15"  # 输入文件夹路径
+    output_dir = "../data_mts/output_clean_csv_files"  # 输出文件夹路径
     # 对机头风速连续异常值和-99进行清洗,第三个参数是连续5个值不变以后就认为异常
     # 这步会生成一个"output_clean_csv_files"文件夹,里面包含全部单机的数据,存储的机头风速只清理了-99,参数50是风机数量+1,风机参数5就是连续5个点的认为是异常值,全部剔除。
     process_csv_files(input_dir, output_dir, 50, 5)
-    output_dir_time_Merge = "../data/output_filtered_csv_files"
+    output_dir_time_Merge = "../data_mts/output_filtered_csv_files"
     # 这步会生成一个"output_filtered_csv_files"文件夹,在上一步的基础上,对齐了全部风机的时间,只各自保留了交集。
     TimeMerge(output_dir,output_dir_time_Merge,50)
-    output_complete_data = "../data/complete_data"
+    output_complete_data = "../data_mts/complete_data"
     # 这步会生成一个"complete_data"文件夹,在上一步的基础上,填充了10个时间点之内的缺失。
     MissingPointProcessing(output_dir_time_Merge,output_complete_data,50,10)
-    continuous_time = "../data/continuous_data"
+    continuous_time = "../data_mts/continuous_data"
     # 这步会生成一个"Continuous_data"文件夹,在上一步的基础上,取Top10个连续时间段最长的单机数据。
     Continuous_Data(output_complete_data, continuous_time, 50, 10)