data_nwp_ftp.py 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228
  1. #!/usr/bin/env python
  2. # -*- coding:utf-8 -*-
  3. # @FileName :data_nwp_ftp.py
  4. # @Time :2024/12/26 08:38
  5. # @Author :David
  6. # @Company: shenyang JY
  7. from datetime import timedelta
  8. from ftplib import FTP
  9. import threading, glob
  10. import pandas as pd
  11. from pytz import timezone
  12. from flask import Flask,request,jsonify
  13. import time, datetime, os, traceback, re
  14. import logging, zipfile, tempfile, shutil, fnmatch
  15. from common.database_dml import insert_data_into_mongo
  16. from apscheduler.schedulers.background import BackgroundScheduler
  17. app = Flask('data_nwp_ftp——service')
  18. def update_thread():
  19. thread = threading.Thread(target=start_jobs)
  20. thread.start()
  21. def start_jobs():
  22. scheduler = BackgroundScheduler()
  23. scheduler.configure({'timezone': timezone("Asia/Shanghai")})
  24. scheduler.add_job(func=download_zip_files_from_ftp, args=['00'], trigger="cron", hour=0, minute=0)
  25. scheduler.add_job(func=download_zip_files_from_ftp, args=['06'], trigger="cron", hour=6, minute=0)
  26. scheduler.add_job(func=download_zip_files_from_ftp, args=['12'], trigger="cron", hour=12, minute=0)
  27. scheduler.add_job(func=download_zip_files_from_ftp, args=['18'], trigger="cron", hour=18, minute=0)
  28. scheduler.start()
  29. def match_date(date, filename):
  30. given_date = datetime.datetime.strptime(date, '%Y%m%d')
  31. date_pattern = re.compile(r'(\d{8})')
  32. match = date_pattern.search(filename)
  33. if match:
  34. filename_str = match.group(0)
  35. filename_date = datetime.datetime.strptime(filename_str, '%Y%m%d')
  36. if filename_date <= given_date:
  37. return True
  38. def delete_zip_files(date):
  39. xxl_path = ftp_params['xxl']['local_dir']
  40. # 遍历文件夹中的所有文件
  41. for root, dirs, files in os.walk(xxl_path):
  42. for filename in files:
  43. # 检查文件名是否以 'meteo_date_' 开头且以 '.zip' 结尾
  44. if fnmatch.fnmatch(filename, f'*.zip') and match_date(date, filename):
  45. # 构建文件的完整路径
  46. file_path = os.path.join(xxl_path, filename)
  47. # 删除文件
  48. try:
  49. os.remove(file_path)
  50. print(f"Deleted file: {file_path}")
  51. except OSError as e:
  52. print(f"Error deleting file {file_path}: {e.strerror}")
  53. for farmId in dirs:
  54. target_dir_path = os.path.join(root, farmId)
  55. for file_name in os.listdir(target_dir_path):
  56. csv_file_path = os.path.join(target_dir_path, file_name)
  57. if fnmatch.fnmatch(csv_file_path, f'*.csv') and match_date(date, file_name):
  58. try:
  59. os.remove(csv_file_path)
  60. print(f"Deleted file: {csv_file_path}")
  61. except OSError as e:
  62. print(f"Error deleting file {csv_file_path}: {e.strerror}")
  63. def download_zip_files_from_ftp(hour):
  64. date = datetime.datetime.now().strftime("%Y%m%d")
  65. date_2 = (datetime.datetime.now() - timedelta(days=2)).strftime("%Y%m%d")
  66. host, moment = 'xxl', hour
  67. ftp_host, ftp_user, ftp_password, remote_dir, local_dir = ftp_params[host]['host'], ftp_params[host]['user'], ftp_params[host]['password'], ftp_params[host]['remote_dir'], ftp_params['xxl']['local_dir']
  68. zip_extension = f'meteoforce_{date}{str(moment)}_*.zip'
  69. zip_file_path = []
  70. # 连接到FTP服务器
  71. with FTP(ftp_host) as ftp:
  72. ftp.login(user=ftp_user, passwd=ftp_password)
  73. # 切换到远程目录
  74. ftp.cwd(remote_dir)
  75. # 获取远程目录中的文件和目录列表
  76. files = ftp.nlst()
  77. # 遍历文件列表,找到ZIP文件并下载
  78. for file_name in files:
  79. if fnmatch.fnmatch(file_name, zip_extension):
  80. remote_file_path = os.path.join(remote_dir, file_name)
  81. local_file_path = os.path.join(local_dir, file_name)
  82. with open(local_file_path, 'wb') as local_file:
  83. logging.info(f"Downloading {remote_file_path} to {local_file_path}")
  84. ftp.retrbinary(f'RETR {remote_file_path}', local_file.write)
  85. logging.info(f"Downloaded {file_name}")
  86. zip_file_path.append(local_file_path)
  87. # 解压 ZIP 文件到临时目录
  88. for zip_file_p in zip_file_path:
  89. with zipfile.ZipFile(zip_file_p, 'r') as zip_ref:
  90. zip_ref.extractall(local_dir)
  91. # 删除前天之前所有 ZIP 文件
  92. delete_zip_files(date_2)
  93. def select_file_to_mongo(args):
  94. date, moment, farmId, isDq = args['date'], args['moment'], args['farmId'], args['isDq']
  95. date = datetime.datetime.strptime(args['date'], '%Y-%m-%d 00:00:00').strftime("%Y%m%d")
  96. csv_file_format = 'meteoforce_{}_{}_*.csv'.format(farmId, date + str(moment))
  97. csv_file_weather = csv_file_format.replace('*', 'weather')
  98. csv_file_power = csv_file_format.replace('*', 'power')
  99. csv_weather_path, csv_power_path = False, False
  100. # 查找目标目录并读取 CSV 文件
  101. for root, dirs, files in os.walk(ftp_params['xxl']['local_dir']):
  102. if farmId in dirs:
  103. target_dir_path = os.path.join(root, farmId)
  104. for file_name in os.listdir(target_dir_path):
  105. csv_file_path = os.path.join(target_dir_path, file_name)
  106. if fnmatch.fnmatch(file_name, csv_file_weather):
  107. csv_weather_path = csv_file_path
  108. if fnmatch.fnmatch(file_name, csv_file_power):
  109. csv_power_path = csv_file_path
  110. if csv_weather_path or csv_power_path:
  111. break
  112. # 使用 pandas 读取 CSV 文件
  113. weather = pd.read_csv(csv_weather_path) if csv_power_path else None
  114. power = pd.read_csv(csv_power_path) if csv_power_path else None
  115. if isDq:
  116. if csv_weather_path and csv_power_path:
  117. power.drop(columns=['farm_id'])
  118. weather_power = pd.merge(weather, power, on='date_time')
  119. # 截取D0-D13时段数据
  120. df = select_dx_from_nwp(weather_power, args)
  121. insert_data_into_mongo(df, args)
  122. else:
  123. logging.info(f"CSV 文件 {csv_file_power} 或 {csv_file_weather} 在目标目录 {farmId} 中未找到")
  124. else:
  125. if csv_weather_path:
  126. weather = select_dx_from_nwp(weather, args)
  127. # 截取D0-D13时段数据
  128. df = select_dx_from_nwp(weather, args)
  129. insert_data_into_mongo(df, args)
  130. else:
  131. logging.info(f"CSV 文件 {csv_file_weather} 在目标目录 {farmId} 中未找到")
  132. def select_dx_from_nwp(df, args):
  133. date = datetime.datetime.strptime(args['date'], '%Y-%m-%d 00:00:00')
  134. date_begin = date + pd.Timedelta(days=int(args.get('day_begin', 'D0')[1:]))
  135. date_end = date + pd.Timedelta(days=int(args.get('day_end', 'D13')[1:]))
  136. df['date_time'] = df['date_time'].str.replace("_", " ")
  137. df['date_time'] = pd.to_datetime(df['date_time'])
  138. df.set_index('date_time', inplace=True)
  139. df = df.loc[date_begin.strftime('%Y-%m-%d'): date_end.strftime('%Y-%m-%d')].reset_index(drop=False)
  140. df.reset_index(drop=True, inplace=True)
  141. df['date_time'] = df['date_time'].dt.strftime('%Y-%m-%d %H:%M:%S')
  142. return df
  143. # 示例使用
  144. ftp_params = {
  145. 'xxl' : {
  146. 'host' : '39.107.246.215',
  147. 'user' : 'jiayue',
  148. 'password' : 'JYoguf2018',
  149. 'remote_dir' : './',
  150. 'local_dir': '../cache/data/xxl'
  151. }
  152. }
  153. @app.route('/data_nwp_ftp', methods=['POST'])
  154. def get_nwp_from_ftp():
  155. # 获取程序开始时间
  156. start_time = time.time()
  157. result = {}
  158. success = 0
  159. print("Program starts execution!")
  160. try:
  161. args = request.values.to_dict()
  162. # 1. 获取参数:日期,数据源,时刻,D0-9,场站ID,存储的 mongo 和表
  163. print('args', args)
  164. logger.info(args)
  165. # 2. 连接FTP,从FTP服务器中获取指定参数的压缩文件(定时任务)
  166. # 3. 解压压缩文件,将其存储到mongo中
  167. select_file_to_mongo(args)
  168. success = 1
  169. except Exception as e:
  170. my_exception = traceback.format_exc()
  171. my_exception.replace("\n", "\t")
  172. result['msg'] = my_exception
  173. end_time = time.time()
  174. result['success'] = success
  175. result['args'] = args
  176. result['start_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time))
  177. result['end_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(end_time))
  178. print("Program execution ends!")
  179. return result
  180. if __name__ == "__main__":
  181. print("Program starts execution!")
  182. logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
  183. logger = logging.getLogger("data_nwp_ftp")
  184. from waitress import serve
  185. update_thread() #定时任务开启
  186. current_time = datetime.datetime.now()
  187. current_hour = current_time.hour
  188. if current_hour >= 18:
  189. current_hour = '18'
  190. elif current_hour >= 12:
  191. current_hour = '12'
  192. elif current_hour >= 6:
  193. current_hour = '06'
  194. else:
  195. current_hour = '00'
  196. threading.Thread(target=download_zip_files_from_ftp, args=(current_hour,)).start()
  197. serve(app, host="0.0.0.0", port=10102)
  198. print("server start!")
  199. # args = {"source": 'xxl', "date": '2024-12-27 00:00:00', 'moment': '06', 'farmId': 'J00645',
  200. # 'mongodb_database': 'db2', 'mongodb_write_table': 'j00645-d1', 'day_begin':'D0',
  201. # 'day_end': 'D13', 'isDq': True}
  202. # download_zip_files_from_ftp(hour='06')
  203. # select_file_to_mongo(args)
  204. # delete_zip_files('20241225')