data_clean.py 42 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108
  1. import os
  2. import json
  3. import pandas as pd
  4. import numpy as np
  5. import matplotlib.pyplot as plt
  6. from typing import Tuple, List
  7. import warnings
  8. import sys
  9. import frequency_filter as ff
  10. from datetime import datetime
  11. from scipy.optimize import least_squares, differential_evolution
  12. from scipy.signal import savgol_filter
  13. warnings.filterwarnings("ignore", category=FutureWarning)
  14. plt.rcParams['font.sans-serif'] = ['SimHei']
  15. plt.rcParams['axes.unicode_minus'] = False
  16. def result_main():
  17. python_interpreter_path = sys.executable
  18. project_directory = os.path.dirname(python_interpreter_path)
  19. data_folder = os.path.join(project_directory, 'data')
  20. if not os.path.exists(data_folder):
  21. os.makedirs(data_folder)
  22. csv_file_path = os.path.join(data_folder, 'history_data.csv')
  23. if not os.path.exists(csv_file_path):
  24. pd.DataFrame(columns=['时间', '场站', '风机编号', '采样频率',
  25. '叶片1角度偏差', '叶片2角度偏差', '叶片3角度偏差', '相对角度偏差',
  26. '叶片1净空值', '叶片2净空值', '叶片3净空值',
  27. '叶片1扭转', '叶片2扭转', '叶片3扭转', '平均扭转',
  28. '振动幅值', '振动主频']).to_csv(csv_file_path, index=False)
  29. return csv_file_path
  30. def delete_data(name):
  31. python_interpreter_path = sys.executable
  32. project_directory = os.path.dirname(python_interpreter_path)
  33. data_folder = os.path.join(project_directory, 'data')
  34. csv_file_path = os.path.join(data_folder, 'history_data.csv')
  35. df = pd.read_csv(csv_file_path)
  36. condition = ((df['时间'].astype(str).str.contains(name[0])) &
  37. (df['场站'].astype(str).str.contains(name[1])) &
  38. (df['风机编号'].astype(str).str.contains(name[2])))
  39. df = df[~condition]
  40. df.to_csv(csv_file_path, index=False)
  41. return csv_file_path
  42. def history_data(name):
  43. time_code = name[0]
  44. wind_name = name[1]
  45. turbine_code = name[2]
  46. python_interpreter_path = sys.executable
  47. project_directory = os.path.dirname(python_interpreter_path)
  48. data_folder = os.path.join(project_directory, 'data')
  49. time_code_cleaned = time_code.replace("-", "").replace(":", "").replace(" ", "")
  50. json_filename = f"{wind_name}_{turbine_code}_{time_code_cleaned}.json"
  51. json_file_path = os.path.join(data_folder, json_filename)
  52. if not os.path.exists(json_file_path):
  53. raise ValueError("文件不存在")
  54. with open(json_file_path, 'r') as f:
  55. data = json.load(f)
  56. return data
  57. def data_analyse(path: List[str]):
  58. locate_file = path[0]
  59. measure_file = path[1]
  60. noise_reduction = 0.000001
  61. min_difference = 1.5
  62. angle_cone = float(path[2])
  63. axial_inclination = float(path[3])
  64. lift_up_limit = float(path[4])
  65. group_length = [10000, 10000, 5000, 10000]
  66. return_list = []
  67. wind_name, turbine_code, time_code, sampling_fq, angle_nan, angle_cen = find_param(locate_file)
  68. wind_name_1, turbine_code_1, time_code_1, sampling_fq_1, angle_flange, angle_root = find_param(measure_file)
  69. sampling_fq_1 = sampling_fq_1 * 1000
  70. sampling_fq = sampling_fq * 1000
  71. data_nan, data_cen = process_data(locate_file)
  72. data_flange, data_root = process_data(measure_file)
  73. if lift_up_limit >= 0.1:
  74. discrete_values = np.arange(0, 0.101, 0.001)
  75. condition = data_flange['distance'] > lift_up_limit
  76. n = condition.sum()
  77. random_discrete = np.random.choice(discrete_values, size=n)
  78. data_flange.loc[condition, 'distance'] = lift_up_limit + 3 + random_discrete
  79. elif np.abs(lift_up_limit) < 0.1:
  80. pass
  81. else:
  82. raise ValueError("lift_up_limit error.")
  83. start_flange, end_flange, filtered_data_flange = cycle_calculate(data_flange, noise_reduction, min_difference)
  84. start_root, end_root, filtered_data_root = cycle_calculate(data_root, noise_reduction, min_difference)
  85. start_nan, end_nan, filtered_data_nan = cycle_calculate(data_nan, noise_reduction, min_difference)
  86. filtered_data_cen = tower_filter(data_cen, noise_reduction)
  87. dist_cen = np.mean(filtered_data_cen.iloc[:, 1].tolist())
  88. filtered_data_cen.iloc[:, 1] = filtered_data_cen.iloc[:, 1] * np.cos(np.deg2rad(angle_cen + axial_inclination))
  89. tower_dist_flange = ff.tower_cal(filtered_data_flange, start_flange, end_flange, sampling_fq_1)
  90. tower_dist_root = ff.tower_cal(filtered_data_root, start_root, end_root, sampling_fq_1)
  91. tower_dist_nan = ff.tower_cal(filtered_data_nan, start_nan, end_nan, sampling_fq)
  92. lowpass_data, fft_x, fft_y, tower_freq, tower_max = ff.process_fft(filtered_data_cen, sampling_fq)
  93. result_line_flange, result_scatter_flange, border_rows_flange, cycle_len_flange, min_flange \
  94. = data_normalize(filtered_data_flange, start_flange, end_flange, group_length[0])
  95. result_line_root, result_scatter_root, border_rows_root, cycle_len_root, min_root \
  96. = data_normalize(filtered_data_root, start_root, end_root, group_length[1])
  97. result_line_nan, result_scatter_nan, border_rows_nan, cycle_len_nan, min_nan \
  98. = data_normalize(filtered_data_nan, start_nan, end_nan, group_length[2])
  99. result_avg_flange, result_diff_flange = blade_shape(result_line_flange)
  100. result_avg_root, result_diff_root = blade_shape(result_line_root)
  101. border_rows_flange_new, angle_flange_new = coordinate_normalize(border_rows_flange, angle_flange)
  102. border_rows_nan_new, angle_nan_new = coordinate_normalize(border_rows_nan, angle_nan)
  103. flange_ava = pd.concat([df['distance'] for df in border_rows_flange_new]).mean(numeric_only=True).mean()
  104. root_ava = pd.concat([df['distance'] for df in border_rows_root]).mean(numeric_only=True).mean()
  105. d_radius = np.abs((flange_ava * np.cos(np.deg2rad(angle_flange_new))
  106. - root_ava * np.cos(np.deg2rad(angle_root))) * np.sin(np.deg2rad(axial_inclination))
  107. + (flange_ava * np.sin(np.deg2rad(angle_flange_new))
  108. - root_ava * np.sin(np.deg2rad(angle_root))) * np.cos(np.deg2rad(axial_inclination)))
  109. flange_root_dist = np.sqrt(flange_ava ** 2 + root_ava ** 2 - 2 * flange_ava * root_ava * np.cos(np.deg2rad(angle_flange_new - angle_root)))
  110. flange_r = radius_cal(border_rows_flange_new, angle_flange_new, dist_cen, angle_cen, axial_inclination, angle_cone)
  111. root_r = radius_cal(border_rows_root, angle_root, dist_cen, angle_cen, axial_inclination, angle_cone)
  112. nan_r = radius_cal(border_rows_nan_new, angle_nan_new, dist_cen, angle_cen, axial_inclination, angle_cone)
  113. blade_axis, blade_axis_tuple, result_line_flange = blade_axis_cal(filtered_data_flange, start_flange, end_flange,
  114. angle_flange + angle_cone + axial_inclination, group_length[3], flange_r)
  115. blade_axis_new, angle_flange_new = flange_coordinate_normalize(blade_axis, angle_flange)
  116. blade_axis_tuple_new, angle_flange_new = flange_coordinate_normalize(blade_axis_tuple, angle_flange)
  117. if np.abs((root_r - flange_r) - d_radius) > 0.5:
  118. root_r = flange_r + d_radius
  119. if np.abs(flange_root_dist - d_radius) > 0.5:
  120. root_r = flange_r + flange_root_dist
  121. blade_axis_new["中心y"] = blade_axis_new["中心y"] - (flange_ava - root_ava)
  122. blade_axis_tuple_new["中心y"] = blade_axis_tuple_new["中心y"] - (flange_ava - root_ava)
  123. aero_dist_flange, v_speed_flange, cen_blade_flange = (
  124. blade_angle_aero_dist(border_rows_flange, flange_r, cycle_len_flange, tower_dist_flange, angle_flange_new))
  125. aero_dist_nan, v_speed_nan, cen_blade_nan = (
  126. blade_angle_aero_dist(border_rows_nan_new, nan_r, cycle_len_nan, tower_dist_nan, angle_nan_new))
  127. pitch_angle_root, v_speed_root, blade_axis_tuple_new = (
  128. blade_angle(border_rows_root, blade_axis_tuple_new, root_r, cycle_len_root, angle_root + angle_cone + axial_inclination))
  129. blade_axis_tuple_new["中心y"] = blade_axis_tuple_new["中心y"]*np.cos(np.deg2rad(angle_root + angle_cone + axial_inclination))
  130. cen_blade_nan_array = np.array(cen_blade_nan)
  131. min_nan_array = np.array(min_nan)
  132. abs_diff_nan = np.abs(cen_blade_nan_array - min_nan_array)
  133. blade_dist_nan = abs_diff_nan * np.cos(np.deg2rad(angle_nan_new))
  134. blade_dist_nan.tolist()
  135. dist_distribute_nan = blade_dist_distribute_cal(filtered_data_nan, start_nan, end_nan,
  136. tower_dist_nan, angle_nan_new + angle_cone + axial_inclination, blade_dist_nan)
  137. dist_distribute = [df.round(5) for df in dist_distribute_nan]
  138. min_values = []
  139. min_keys = []
  140. max_values = []
  141. max_keys = []
  142. mean_values = []
  143. for df in dist_distribute:
  144. second_col_min = df[df.columns[1]].min()
  145. second_col_max = df[df.columns[1]].max()
  146. min_row = df[df[df.columns[1]] == second_col_min]
  147. max_row = df[df[df.columns[1]] == second_col_max]
  148. min_values.append(round(second_col_min, 2))
  149. min_keys.append(round(min_row.iloc[0][df.columns[0]], 2))
  150. max_values.append(round(second_col_max, 2))
  151. max_keys.append(round(max_row.iloc[0][df.columns[0]], 2))
  152. for i in range(3):
  153. mean_values.append(round((max_values[i] + min_values[i]) / 2, 2))
  154. for i in range(3):
  155. df = result_line_root[i]
  156. first_column = df.iloc[:, 0]
  157. sec_column = df.iloc[:, 1]
  158. df.iloc[:, 0] = first_column * v_speed_root
  159. min_time = df.iloc[:, 0].min()
  160. df.iloc[:, 0] -= min_time
  161. blade_axis_tuple_new.iloc[i, 1] -= min_time
  162. df.iloc[:, 1] = sec_column * np.cos(np.deg2rad(angle_root + angle_cone + axial_inclination))
  163. avg_flange = result_avg_flange.iloc[:, 0]
  164. result_avg_flange.iloc[:, 0] = avg_flange * v_speed_flange
  165. avg_root = result_avg_root.iloc[:, 0]
  166. result_avg_root.iloc[:, 0] = avg_root * v_speed_root
  167. pitch_angle_flange = [0, 0, 0]
  168. twist_1 = round(np.abs(pitch_angle_root[0] - pitch_angle_flange[0]), 2)
  169. twist_2 = round(np.abs(pitch_angle_root[1] - pitch_angle_flange[1]), 2)
  170. twist_3 = round(np.abs(pitch_angle_root[2] - pitch_angle_flange[2]), 2)
  171. twist_avg = round((twist_1 + twist_2 + twist_3) / 3, 2)
  172. sampling_num = int(0.015 * sampling_fq_1)
  173. data_flange.iloc[:, 0] = data_flange.iloc[:, 0] / 5000000
  174. data_root.iloc[:, 0] = data_root.iloc[:, 0] / 5000000
  175. lowpass_data.iloc[:, 0] = lowpass_data.iloc[:, 0] / 5000000
  176. rotated_root = [pd.DataFrame() for _ in range(3)]
  177. for i in range(3):
  178. angle_rad = np.deg2rad(-pitch_angle_root[i])
  179. rotation_matrix = np.array([
  180. [np.cos(angle_rad), -np.sin(angle_rad)],
  181. [np.sin(angle_rad), np.cos(angle_rad)]
  182. ])
  183. center_x = blade_axis_tuple_new.iloc[i, 1]
  184. center_y = blade_axis_tuple_new.iloc[i, 2]
  185. rotated_points = result_line_root[i].copy()
  186. for idx, row in result_line_root[i].iterrows():
  187. x = row.iloc[0]
  188. y = row.iloc[1]
  189. translated_x = x - center_x
  190. translated_y = y - center_y
  191. rotated = rotation_matrix @ np.array([translated_x, translated_y])
  192. final_x = rotated[0] + center_x
  193. final_y = rotated[1] + center_y
  194. rotated_points.iloc[idx, 0] = final_x
  195. rotated_points.iloc[idx, 1] = final_y
  196. rotated_root[i % 3] = pd.concat([rotated_root[i % 3], rotated_points])
  197. return_list.append(str(time_code))
  198. return_list.append(str(wind_name))
  199. return_list.append(str(turbine_code))
  200. return_list.append(sampling_fq_1)
  201. return_list.append(pitch_angle_root[0])
  202. return_list.append(pitch_angle_root[1])
  203. return_list.append(pitch_angle_root[2])
  204. return_list.append(pitch_angle_root[3])
  205. return_list.append(mean_values[0])
  206. return_list.append(mean_values[1])
  207. return_list.append(mean_values[2])
  208. return_list.append(twist_1)
  209. return_list.append(twist_2)
  210. return_list.append(twist_3)
  211. return_list.append(twist_avg)
  212. return_list.append(tower_max)
  213. return_list.append(tower_freq)
  214. df_new_row = pd.DataFrame([return_list],
  215. columns=['时间', '场站', '风机编号', '采样频率',
  216. '叶片1角度偏差', '叶片2角度偏差', '叶片3角度偏差', '相对角度偏差',
  217. '叶片1净空值', '叶片2净空值', '叶片3净空值',
  218. '叶片1扭转', '叶片2扭转', '叶片3扭转', '平均扭转',
  219. '振动幅值', '振动主频'])
  220. json_output = {
  221. 'original_plot': {
  222. 'blade_tip': {
  223. 'xdata': data_flange.iloc[:, 0].tolist()[::sampling_num],
  224. 'ydata': data_flange.iloc[:, 1].tolist()[::sampling_num]
  225. },
  226. 'blade_root': {
  227. 'xdata': data_root.iloc[:, 0].tolist()[::sampling_num],
  228. 'ydata': data_root.iloc[:, 1].tolist()[::sampling_num]
  229. }
  230. },
  231. 'fft_plot': {
  232. 'lowpass': {
  233. 'xdata': lowpass_data['time'].tolist()[::sampling_num],
  234. 'ydata': lowpass_data['distance_filtered'].tolist()[::sampling_num],
  235. 'xmax': max(lowpass_data['time'].tolist()),
  236. 'xmin': min(lowpass_data['time'].tolist()),
  237. 'ymax': max(lowpass_data['distance_filtered'].tolist()) + 0.02,
  238. 'ymin': min(lowpass_data['distance_filtered'].tolist()) - 0.02
  239. },
  240. 'fft': {
  241. 'xdata': fft_x,
  242. 'ydata': fft_y,
  243. 'xmax': max(fft_x),
  244. 'xmin': min(fft_x),
  245. 'ymax': max(fft_y) + 0.02,
  246. 'ymin': 0
  247. }
  248. },
  249. 'blade_tip': {
  250. 'first_blade': {
  251. 'xdata': result_line_flange[0].iloc[:, 0].tolist(),
  252. 'ydata': result_line_flange[0].iloc[:, 1].tolist()
  253. },
  254. 'second_blade': {
  255. 'xdata': result_line_flange[1].iloc[:, 0].tolist(),
  256. 'ydata': result_line_flange[1].iloc[:, 1].tolist()
  257. },
  258. 'third_blade': {
  259. 'xdata': result_line_flange[2].iloc[:, 0].tolist(),
  260. 'ydata': result_line_flange[2].iloc[:, 1].tolist()
  261. },
  262. 'avg_blade': {
  263. 'xdata': result_avg_flange.iloc[:, 0].tolist(),
  264. 'ydata': result_avg_flange.iloc[:, 1].tolist()
  265. },
  266. 'blade_center': {
  267. 'xdata': blade_axis_tuple.iloc[:, 1].tolist(),
  268. 'ydata': blade_axis_tuple.iloc[:, 2].tolist()
  269. }
  270. },
  271. 'blade_root': {
  272. 'first_blade': {
  273. 'xdata': result_line_root[0].iloc[:, 0].tolist(),
  274. 'ydata': result_line_root[0].iloc[:, 1].tolist()
  275. },
  276. 'second_blade': {
  277. 'xdata': result_line_root[1].iloc[:, 0].tolist(),
  278. 'ydata': result_line_root[1].iloc[:, 1].tolist()
  279. },
  280. 'third_blade': {
  281. 'xdata': result_line_root[2].iloc[:, 0].tolist(),
  282. 'ydata': result_line_root[2].iloc[:, 1].tolist()
  283. },
  284. 'avg_blade': {
  285. 'xdata': result_avg_root.iloc[:, 0].tolist(),
  286. 'ydata': result_avg_root.iloc[:, 1].tolist()
  287. },
  288. 'first_rotate_blade': {
  289. 'xdata': rotated_root[0].iloc[:, 0].tolist(),
  290. 'ydata': rotated_root[0].iloc[:, 1].tolist()
  291. },
  292. 'second_rotate_blade': {
  293. 'xdata': rotated_root[1].iloc[:, 0].tolist(),
  294. 'ydata': rotated_root[1].iloc[:, 1].tolist()
  295. },
  296. 'third_rotate_blade': {
  297. 'xdata': rotated_root[2].iloc[:, 0].tolist(),
  298. 'ydata': rotated_root[2].iloc[:, 1].tolist()
  299. },
  300. 'blade_center': {
  301. 'xdata': blade_axis_tuple_new.iloc[:, 1].tolist(),
  302. 'ydata': blade_axis_tuple_new.iloc[:, 2].tolist()
  303. }
  304. },
  305. 'dist_distribution': {
  306. 'first_blade': {
  307. 'xdata': dist_distribute[0].iloc[:, 0].tolist(),
  308. 'ydata': dist_distribute[0].iloc[:, 1].tolist()
  309. },
  310. 'second_blade': {
  311. 'xdata': dist_distribute[1].iloc[:, 0].tolist(),
  312. 'ydata': dist_distribute[1].iloc[:, 1].tolist()
  313. },
  314. 'third_blade': {
  315. 'xdata': dist_distribute[2].iloc[:, 0].tolist(),
  316. 'ydata': dist_distribute[2].iloc[:, 1].tolist()
  317. }
  318. },
  319. 'analyse_table': {
  320. 'pitch_angle_diff': {
  321. 'blade_1': pitch_angle_root[0],
  322. 'blade_2': pitch_angle_root[1],
  323. 'blade_3': pitch_angle_root[2],
  324. 'blade_relate': pitch_angle_root[3]
  325. },
  326. 'aero_dist': {
  327. 'first_blade': {
  328. 'x_min': min_keys[0],
  329. 'y_min': min_values[0],
  330. 'x_max': max_keys[0],
  331. 'y_max': max_values[0],
  332. 'y_diff': np.abs(max_values[0] - min_values[0]),
  333. 'y_ava': mean_values[0]
  334. },
  335. 'second_blade': {
  336. 'x_min': min_keys[1],
  337. 'y_min': min_values[1],
  338. 'x_max': max_keys[1],
  339. 'y_max': max_values[1],
  340. 'y_diff': np.abs(max_values[1] - min_values[1]),
  341. 'y_ava': mean_values[1]
  342. },
  343. 'third_blade': {
  344. 'x_min': min_keys[2],
  345. 'y_min': min_values[2],
  346. 'x_max': max_keys[2],
  347. 'y_max': max_values[2],
  348. 'y_diff': np.abs(max_values[2] - min_values[2]),
  349. 'y_ava': mean_values[2]
  350. }
  351. },
  352. 'blade_twist': {
  353. 'blade_1': twist_1,
  354. 'blade_2': twist_2,
  355. 'blade_3': twist_3,
  356. 'blade_avg': twist_avg
  357. },
  358. 'tower_vibration': {
  359. 'max_vibration': tower_max,
  360. 'main_vibration_freq': tower_freq
  361. }
  362. }
  363. }
  364. python_interpreter_path = sys.executable
  365. project_directory = os.path.dirname(python_interpreter_path)
  366. data_folder = os.path.join(project_directory, 'data')
  367. if not os.path.exists(data_folder):
  368. os.makedirs(data_folder)
  369. csv_file_path = os.path.join(data_folder, 'history_data.csv')
  370. if not os.path.exists(csv_file_path):
  371. pd.DataFrame(columns=['时间', '场站', '风机编号', '采样频率',
  372. '叶片1角度偏差', '叶片2角度偏差', '叶片3角度偏差', '相对角度偏差',
  373. '叶片1净空值', '叶片2净空值', '叶片3净空值',
  374. '叶片1扭转', '叶片2扭转', '叶片3扭转', '平均扭转',
  375. '振动幅值', '振动主频']).to_csv(csv_file_path, index=False)
  376. df_new_row.to_csv(csv_file_path, mode='a', header=False, index=False)
  377. time_code_cleaned = time_code.replace("-", "").replace(":", "").replace(" ", "")
  378. json_filename = f"{wind_name}_{turbine_code}_{time_code_cleaned}.json"
  379. json_file_path = os.path.join(data_folder, json_filename)
  380. with open(json_file_path, 'w') as json_file:
  381. json.dump(json_output, json_file, indent=4)
  382. return json_output
  383. def process_data(file_path):
  384. data = pd.read_csv(file_path, usecols=[1, 3, 4, 8, 9], header=None, engine='c')
  385. data = data.head(int(len(data) * 0.95))
  386. max_value = data.iloc[:, 0].max()
  387. max_index = data.iloc[:, 0].idxmax()
  388. min_index = data.iloc[:, 0].idxmin()
  389. if min_index == max_index + 1:
  390. data.iloc[min_index:, 0] += max_value
  391. last_time = data.iloc[-1, 0]
  392. first_time = data.iloc[0, 0]
  393. data = data[data.iloc[:, 0] >= first_time]
  394. data = data[data.iloc[:, 0] <= last_time]
  395. data.reset_index(drop=True, inplace=True)
  396. min_time = data.iloc[:, 0].min()
  397. data.iloc[:, 0] -= min_time
  398. data_1 = data.iloc[:, [0, 1, 2]]
  399. data_2 = data.iloc[:, [0, 3, 4]]
  400. data_1.columns = ['time', 'distance', 'grey']
  401. data_2.columns = ['time', 'distance', 'grey']
  402. return data_1, data_2
  403. def tower_filter(data_group: pd.DataFrame, noise_threshold: float):
  404. distance_counts = data_group['distance'].value_counts(normalize=True)
  405. noise_distance_threshold = distance_counts[distance_counts < noise_threshold].index
  406. noise_indices = data_group[data_group['distance'].isin(noise_distance_threshold)].index
  407. data_group.loc[noise_indices, 'distance'] = np.nan
  408. top_5_distances = distance_counts.head(5).index
  409. mean_values = data_group[data_group['distance'].isin(top_5_distances)]['distance'].mean()
  410. data_group.loc[(data_group['distance'] < mean_values * 0.9) | (
  411. data_group['distance'] > mean_values * 1.1), 'distance'] = np.nan
  412. data_group['distance'] = data_group['distance'].fillna(method='ffill')
  413. filtered_data = data_group
  414. return filtered_data
  415. def cycle_calculate(data_group: pd.DataFrame, noise_threshold: float, min_distance: float):
  416. distance_counts = data_group['distance'].value_counts(normalize=True)
  417. noise_distance_threshold = distance_counts[distance_counts < noise_threshold].index
  418. noise_indices = data_group[data_group['distance'].isin(noise_distance_threshold)].index
  419. data_group.loc[noise_indices, 'distance'] = np.nan
  420. if distance_counts.get(0, 0) >= 0.1:
  421. top_5_distances = distance_counts[distance_counts.index != 0].head(5).index
  422. mean_values = data_group[data_group['distance'].isin(top_5_distances)]['distance'].mean()
  423. data_group.loc[((data_group['distance'] > 0) & (data_group['distance'] < mean_values - 30)) |
  424. (data_group['distance'] > mean_values * 1.1), 'distance'] = np.nan
  425. else:
  426. top_5_distances = distance_counts[distance_counts.index != 0].head(5).index
  427. mean_values = data_group[data_group['distance'].isin(top_5_distances)]['distance'].mean()
  428. data_group.loc[(data_group['distance'] < mean_values-30) | (
  429. data_group['distance'] > mean_values*1.1), 'distance'] = np.nan
  430. data_group['distance'] = data_group['distance'].fillna(method='ffill')
  431. filtered_data = data_group
  432. filtered_data['distance_diff'] = filtered_data['distance'].diff()
  433. large_diff_indices = filtered_data[filtered_data['distance_diff'] > min_distance].index
  434. small_diff_indices = filtered_data[filtered_data['distance_diff'] < -min_distance].index
  435. filtered_data = filtered_data.drop(columns=['distance_diff'])
  436. start_points = pd.DataFrame()
  437. end_points = pd.DataFrame()
  438. for idx in large_diff_indices:
  439. current_distance = filtered_data.loc[idx, 'distance']
  440. next_rows_large = filtered_data.loc[idx - 500: idx - 1]
  441. if next_rows_large['distance'].le(current_distance - min_distance).all():
  442. end_points = pd.concat([end_points, filtered_data.loc[[idx - 1]]])
  443. for idx in small_diff_indices:
  444. current_distance = filtered_data.loc[idx - 1, 'distance']
  445. next_rows_small = filtered_data.iloc[idx: idx + 500]
  446. if next_rows_small['distance'].le(current_distance - min_distance).all():
  447. start_points = pd.concat([start_points, filtered_data.loc[[idx]]])
  448. if 0 in distance_counts.nlargest(3).index:
  449. end_points, start_points = start_points, end_points
  450. if end_points.iloc[0, 0] < start_points.iloc[0, 0]:
  451. end_points = end_points.drop(end_points.index[0])
  452. if end_points.iloc[-1, 0] < start_points.iloc[-1, 0]:
  453. start_points = start_points.drop(start_points.index[-1])
  454. else:
  455. pass
  456. return start_points, end_points, filtered_data
  457. def data_normalize(data_group: pd.DataFrame, start_points: pd.DataFrame, end_points: pd.DataFrame, group_len: int) \
  458. -> Tuple[List[pd.DataFrame], List[pd.DataFrame], List[pd.DataFrame], int, list]:
  459. combined_df_sorted = pd.concat([start_points, end_points]).sort_values(by='time')
  460. if combined_df_sorted.iloc[0].equals(end_points.iloc[0]):
  461. combined_df_sorted = combined_df_sorted.iloc[1:]
  462. if combined_df_sorted.iloc[-1].equals(start_points.iloc[-1]):
  463. combined_df_sorted = combined_df_sorted.iloc[:-1]
  464. combined_df_sorted.reset_index(drop=True, inplace=True)
  465. start_times = combined_df_sorted['time'].tolist()
  466. normalize_cycle = int(start_times[1] - start_times[0])
  467. full_cycle = int((start_times[2] - start_times[0]) * 3)
  468. turbines = [pd.DataFrame() for _ in range(3)]
  469. for i in range(0, len(start_times), 2):
  470. start_time = start_times[i]
  471. end_time = start_times[i + 1]
  472. segment = data_group[(data_group['time'] > start_time) & (data_group['time'] <= end_time)]
  473. if not segment.empty:
  474. ratio = (end_time - start_time) / normalize_cycle
  475. segment.loc[:, 'time'] = (segment['time'] - start_time) / ratio
  476. turbines[int(i / 2) % 3] = pd.concat([turbines[int(i / 2) % 3], segment])
  477. turbines_processed = []
  478. turbines_scattered = []
  479. min_list = []
  480. sd_time = [-1, -1]
  481. time_list = list(range(0, normalize_cycle, group_len))
  482. for turbine in turbines:
  483. turbine_sorted = turbine.sort_values(by='time').reset_index(drop=True)
  484. grey_start_index = int(len(turbine_sorted) * 0.1)
  485. grey_end_index = int(len(turbine_sorted) * 0.9)
  486. subset_grey = turbine_sorted[grey_start_index:grey_end_index]
  487. turbine_sorted = ff.median_filter_correction(turbine_sorted, 2, 10)
  488. mean_grey = subset_grey['grey'].mean() * 0.8
  489. n = len(turbine_sorted)
  490. n_10 = int(0.1 * n)
  491. is_extreme = (turbine_sorted.index < n_10) | (turbine_sorted.index >= len(turbine_sorted) - n_10)
  492. meets_condition = turbine_sorted['grey'] > mean_grey
  493. turbine_sorted = turbine_sorted[~is_extreme | (is_extreme & meets_condition)]
  494. first_time = turbine_sorted['time'].iloc[0]
  495. bins = list(range(int(first_time), int(turbine_sorted['time'].max()), group_len))
  496. grouped = turbine_sorted.groupby(pd.cut(turbine_sorted['time'], bins=bins, right=False))
  497. processed_df = pd.DataFrame()
  498. scattered_df = pd.DataFrame()
  499. mean_points = []
  500. diff_points = []
  501. for _, group in grouped:
  502. quantile_5 = group['distance'].quantile(0.05)
  503. quantile_95 = group['distance'].quantile(0.95)
  504. filtered_group = group[(group['distance'] > quantile_5) & (group['distance'] < quantile_95)]
  505. mean_point = filtered_group['distance'].mean()
  506. mean_points.append(mean_point)
  507. for i in range(len(mean_points) - 1):
  508. diff = abs(mean_points[i + 1] - mean_points[i])
  509. diff_points.append(diff)
  510. start_index = int(len(diff_points) * 0.05)
  511. end_index = int(len(diff_points) * 0.95)
  512. subset1 = diff_points[start_index:end_index]
  513. sdr_diff = np.max(subset1) * 1.1
  514. min_list.append(min(mean_points))
  515. first_index = np.where(diff_points < sdr_diff)[0][0]
  516. last_index = np.where(diff_points < sdr_diff)[0][-1]
  517. for index, (bin, group) in enumerate(grouped):
  518. quantile_5 = group['distance'].quantile(0.05)
  519. quantile_95 = group['distance'].quantile(0.95)
  520. filtered_group = group[(group['distance'] > quantile_5) & (group['distance'] < quantile_95)]
  521. if first_index <= index < last_index:
  522. mid_point = filtered_group.mean()
  523. mid_point_df = pd.DataFrame([mid_point])
  524. mid_point_df.iloc[0, 0] = time_list[index]
  525. processed_df = pd.concat([processed_df, mid_point_df], ignore_index=True)
  526. scattered_df = pd.concat([scattered_df, filtered_group], ignore_index=True)
  527. else:
  528. pass
  529. min_time = processed_df['time'].min()
  530. max_time = processed_df['time'].max()
  531. if sd_time == [-1, -1]:
  532. sd_time = [min_time, max_time]
  533. elif sd_time[0] < min_time:
  534. sd_time[0] = min_time
  535. elif sd_time[1] > max_time:
  536. sd_time[1] = max_time
  537. turbines_processed.append(processed_df)
  538. turbines_scattered.append(scattered_df)
  539. border_rows = []
  540. for i, turbine in enumerate(turbines_processed):
  541. closest_index_0 = (turbine['time'] - sd_time[0]).abs().idxmin()
  542. turbine.at[closest_index_0, 'time'] = sd_time[0]
  543. sd_time_row_0 = turbine.loc[closest_index_0]
  544. closest_index_1 = (turbine['time'] - sd_time[1]).abs().idxmin()
  545. turbine.at[closest_index_1, 'time'] = sd_time[1]
  546. sd_time_row_1 = turbine.loc[closest_index_1]
  547. turbines_processed[i] = turbine.iloc[closest_index_0:closest_index_1 + 1].reset_index(drop=True)
  548. sd_time_rows_turbine = pd.concat([pd.DataFrame([sd_time_row_0]), pd.DataFrame([sd_time_row_1])]
  549. , ignore_index=True)
  550. border_rows.append(sd_time_rows_turbine)
  551. return turbines_processed, turbines_scattered, border_rows, full_cycle, min_list
  552. def blade_shape(turbines_processed: List[pd.DataFrame]):
  553. row_counts = [df.shape[0] for df in turbines_processed]
  554. num_rows = min(row_counts)
  555. turbine_avg = pd.DataFrame(index=range(num_rows), columns=['time', 'distance'])
  556. turbine_diff = [pd.DataFrame(index=range(num_rows), columns=['time', 'distance']) for _ in turbines_processed]
  557. for i in range(num_rows):
  558. distances = [df.loc[i, 'distance'] for df in turbines_processed]
  559. avg_distance = sum(distances) / len(distances)
  560. time_value = turbines_processed[0].loc[i, 'time']
  561. turbine_avg.loc[i, 'time'] = time_value
  562. turbine_avg.loc[i, 'distance'] = avg_distance
  563. for j in range(len(distances)):
  564. distances[j] = distances[j] - avg_distance
  565. turbine_diff[j].loc[i, 'time'] = time_value
  566. turbine_diff[j].loc[i, 'distance'] = distances[j]
  567. return turbine_avg, turbine_diff
  568. def coordinate_normalize(tip_border_rows: List[pd.DataFrame], tip_angle):
  569. tip_angle1 = np.deg2rad(tip_angle)
  570. tip_angle_list = []
  571. for turbine in tip_border_rows:
  572. tip_angle_cal0 = ((np.sin(tip_angle1) * turbine['distance'] - 0.07608) /
  573. (np.cos(tip_angle1) * turbine['distance']))
  574. tip_angle_cal = np.arctan(tip_angle_cal0)
  575. turbine['distance'] = (turbine['distance'] ** 2 + 0.0057881664 -
  576. 0.15216 * turbine['distance'] * np.sin(tip_angle1)) ** 0.5
  577. tip_angle_list.append(tip_angle_cal)
  578. tip_angle_new = float(np.mean(tip_angle_list))
  579. tip_angle_new1 = np.rad2deg(tip_angle_new)
  580. return tip_border_rows, tip_angle_new1
  581. def flange_coordinate_normalize(flange_cen_row: pd.DataFrame, flange_angle):
  582. flange_angle1 = np.deg2rad(flange_angle)
  583. center_y_mean = flange_cen_row['中心y'].mean()
  584. flange_angle_cal0 = ((np.sin(flange_angle1) * center_y_mean - 0.07608) /
  585. (np.cos(flange_angle1) * center_y_mean))
  586. flange_angle_cal = np.arctan(flange_angle_cal0)
  587. flange_cen_row['中心y'] = (flange_cen_row['中心y'] ** 2 + 0.0057881664 -
  588. 0.15216 * flange_cen_row['中心y'] * np.sin(flange_angle1)) ** 0.5
  589. flange_angle_new = float(flange_angle_cal)
  590. flange_angle_new1 = np.rad2deg(flange_angle_new)
  591. return flange_cen_row, flange_angle_new1
  592. def blade_axis_cal(data_group: pd.DataFrame, start_points: pd.DataFrame, end_points: pd.DataFrame, horizon_angle: float,
  593. group_len: int, radius_blade: float):
  594. def fit_circle(df, v_fixed, top_k=5, prefilter=True):
  595. def smooth_savgol(y, window_length=101, polyorder=3):
  596. wl = min(window_length, len(y) if len(y) % 2 == 1 else len(y) - 1)
  597. if wl < 3:
  598. return y
  599. if wl % 2 == 0:
  600. wl -= 1
  601. return savgol_filter(y, wl, polyorder)
  602. t = np.asarray(df['time'])
  603. d_raw = np.asarray(df['distance'])
  604. d_smooth = smooth_savgol(d_raw, window_length=101, polyorder=3) if prefilter else d_raw
  605. x = v_fixed * t
  606. bounds = [(min(x) - 5, max(x) + 5),
  607. (min(d_smooth), max(d_smooth) + 10),
  608. (0.5, 10)]
  609. def residuals_sq(params):
  610. xc, yc, R = params
  611. if R <= 0:
  612. return 1e6 * np.ones_like(t)
  613. return (x - xc) ** 2 + (d_smooth - yc) ** 2 - R ** 2
  614. def objective_mean_sq(params):
  615. res = residuals_sq(params)
  616. return np.mean(res ** 2)
  617. result = differential_evolution(
  618. objective_mean_sq,
  619. bounds,
  620. strategy='rand2bin',
  621. mutation=(0.8, 1.2),
  622. recombination=0.8,
  623. popsize=30,
  624. maxiter=1000,
  625. polish=False,
  626. seed=42,
  627. workers=1
  628. )
  629. pop = result.population
  630. energies = result.population_energies
  631. idx = np.argsort(energies)[:top_k]
  632. candidates = pop[idx]
  633. best_rmse = np.inf
  634. best_result = None
  635. for cand in candidates:
  636. res = least_squares(
  637. residuals_sq,
  638. x0=cand,
  639. bounds=([-np.inf, -np.inf, 1e-6],
  640. [np.inf, np.inf, np.inf]),
  641. method='trf',
  642. loss='linear',
  643. max_nfev=50000,
  644. xtol=1e-12,
  645. ftol=1e-12,
  646. gtol=1e-12
  647. )
  648. xc_opt, yc_opt, R_opt = res.x
  649. Ri_all = np.sqrt((x - xc_opt) ** 2 + (d_smooth - yc_opt) ** 2)
  650. geo_rmse = np.sqrt(np.mean((Ri_all - R_opt) ** 2))
  651. if geo_rmse < best_rmse:
  652. best_rmse = geo_rmse
  653. best_result = [v_fixed, xc_opt, yc_opt, R_opt, geo_rmse]
  654. result_df = pd.DataFrame([best_result],
  655. columns=['旋转半径', '中心x', '中心y', '圆半径', '几何RMSE'])
  656. return result_df
  657. combined_df_sorted = pd.concat([start_points, end_points]).sort_values(by='time')
  658. if combined_df_sorted.iloc[0].equals(end_points.iloc[0]):
  659. combined_df_sorted = combined_df_sorted.iloc[1:]
  660. if combined_df_sorted.iloc[-1].equals(start_points.iloc[-1]):
  661. combined_df_sorted = combined_df_sorted.iloc[:-1]
  662. combined_df_sorted.reset_index(drop=True, inplace=True)
  663. start_times = combined_df_sorted['time'].tolist()
  664. data_group['distance'] = data_group['distance'] * np.cos(np.deg2rad(horizon_angle))
  665. normalize_cycle = start_times[1] - start_times[0]
  666. full_cycle = int((start_times[2] - start_times[0]) * 3)
  667. v_blade = 10000000 * np.pi * radius_blade / full_cycle
  668. angle_speed = (np.pi / full_cycle) * 5000000
  669. turbines = [pd.DataFrame() for _ in range(3)]
  670. for i in range(0, len(start_times), 2):
  671. start_time = start_times[i]
  672. end_time = start_times[i + 1]
  673. segment = data_group[(data_group['time'] > start_time) & (data_group['time'] <= end_time)]
  674. if segment is None or segment.empty:
  675. raise ValueError("Segment is empty")
  676. segment = segment.copy()
  677. ratio = (end_time - start_time) / normalize_cycle
  678. segment.loc[:, 'time'] = (segment['time'] - start_time) / ratio
  679. turbines[int(i / 2) % 3] = pd.concat([turbines[int(i / 2) % 3], segment])
  680. turbines_processed = []
  681. turbines_scattered = []
  682. result_df = pd.DataFrame()
  683. time_list = list(range(0, normalize_cycle, group_len))
  684. for turbine in turbines:
  685. turbine_sorted = turbine.sort_values(by='time').reset_index(drop=True)
  686. first_time = turbine_sorted['time'].iloc[0]
  687. bins = list(range(int(first_time), int(turbine_sorted['time'].max()), group_len))
  688. grouped = turbine_sorted.groupby(pd.cut(turbine_sorted['time'], bins=bins, right=False))
  689. process_df = pd.DataFrame()
  690. for index, (bin, group) in enumerate(grouped):
  691. mid_point = group.mean()
  692. mid_point_df = pd.DataFrame([mid_point])
  693. mid_point_df.iloc[0, 0] = time_list[index]
  694. process_df = pd.concat([process_df, mid_point_df], ignore_index=True)
  695. process_df['time'] = process_df['time'] / 5000000
  696. lower_bound = process_df['time'].quantile(0.2)
  697. upper_bound = process_df['time'].quantile(0.8)
  698. processed_df = process_df[(process_df['time'] >= lower_bound) & (process_df['time'] <= upper_bound)]
  699. blade_cen_est = fit_circle(processed_df, v_blade)
  700. processed_df['time'] = processed_df['time'] * v_blade
  701. turbines_processed.append(processed_df)
  702. turbines_scattered.append(turbine)
  703. result_df = pd.concat([result_df, blade_cen_est], ignore_index=True)
  704. if blade_cen_est['几何RMSE'].iloc[0] >= 0.1:
  705. raise ValueError("叶片几何误差过大")
  706. result_df_mean = result_df.mean(numeric_only=True).to_frame().T
  707. result_df_mean["中心y"] = result_df_mean["中心y"] / np.cos(np.deg2rad(horizon_angle))
  708. result_df["中心y"] = result_df["中心y"] / np.cos(np.deg2rad(horizon_angle))
  709. return result_df_mean, result_df, turbines_processed
  710. def radius_cal(border_rows, meas_angle, cen_dist, cen_angle, angle_main, angle_rotate):
  711. aero_dist = (pd.concat([df['distance'] for df in border_rows]).mean())
  712. radius = np.abs(aero_dist * np.sin(np.deg2rad(meas_angle - angle_main))
  713. - cen_dist * np.sin(np.deg2rad(cen_angle - angle_main)))
  714. return radius
  715. def blade_angle_aero_dist(border_rows: List[pd.DataFrame], radius: float, full_cycle: int,
  716. tower_dist: float, v_angle: float):
  717. v_speed = 2 * np.pi * radius / full_cycle
  718. aero_dist_list = []
  719. cen_blade = []
  720. for turbine in border_rows:
  721. mean_col2 = (turbine.iloc[1, 1] + turbine.iloc[0, 1]) / 2
  722. aero_dist = abs(mean_col2 - tower_dist) * np.cos(np.deg2rad(v_angle))
  723. aero_dist_list.append(aero_dist)
  724. cen_blade.append(mean_col2)
  725. aero_dist_list.append(np.mean(aero_dist_list))
  726. aero_dist_list = [round(num, 2) for num in aero_dist_list]
  727. return aero_dist_list, v_speed, cen_blade
  728. def blade_angle(border_rows: List[pd.DataFrame], cen_data: pd.DataFrame, radius: float, full_cycle: int,
  729. v_angle: float):
  730. v_speed = 2 * np.pi * radius / full_cycle
  731. values = []
  732. for df in border_rows:
  733. if df.shape[0] >= 2 and df.shape[1] >= 2:
  734. values.append(df.iloc[0, 1])
  735. values.append(df.iloc[1, 1])
  736. mean_value = sum(values) / len(values) if values else float('nan')
  737. for i in [0, 1, 2]:
  738. if np.abs(cen_data['中心y'].iloc[i] - mean_value) > 0.5:
  739. print('原本:' + str(cen_data['中心y'].iloc[i]) + '标准:' + str(mean_value))
  740. cen_data['中心y'].iloc[i] = mean_value
  741. print('y_change')
  742. if cen_data['中心x'].iloc[i] > 1.5:
  743. cen_data['中心x'].iloc[i] = 1.5
  744. print('x_change')
  745. if cen_data['中心x'].iloc[i] < 0.75:
  746. cen_data['中心x'].iloc[i] = 0.75
  747. print('x_change')
  748. print(cen_data['中心x'].iloc[i])
  749. pitch_angle_list = []
  750. for idx, turbine in enumerate(border_rows, start=1):
  751. diff_time = np.abs(cen_data['中心x'].iloc[idx - 1] - turbine.iloc[1, 0] * v_speed)
  752. diff_len = np.abs((cen_data['中心y'].iloc[idx - 1] - turbine.iloc[1, 1]) * np.cos(np.deg2rad(v_angle)))
  753. pitch_angle = np.degrees(np.arctan(diff_len / diff_time))
  754. pitch_angle_list.append(pitch_angle)
  755. pitch_mean = np.mean(pitch_angle_list)
  756. pitch_angle_list = [angle - pitch_mean for angle in pitch_angle_list]
  757. pitch_angle_list.append(max(pitch_angle_list) - min(pitch_angle_list))
  758. pitch_angle_list = [round(num, 2) for num in pitch_angle_list]
  759. return pitch_angle_list, v_speed, cen_data
  760. def find_param(path: str):
  761. path = path.replace('\\', '/')
  762. last_slash_index = path.rfind('/')
  763. result = path[last_slash_index + 1:]
  764. underscore_indices = []
  765. start = 0
  766. while True:
  767. index = result.find('_', start)
  768. if index == -1:
  769. break
  770. underscore_indices.append(index)
  771. start = index + 1
  772. wind_name = result[: underscore_indices[0]]
  773. turbine_code = result[underscore_indices[0] + 1: underscore_indices[1]]
  774. time_code = result[underscore_indices[1] + 1: underscore_indices[2]]
  775. sampling_fq = int(result[underscore_indices[2] + 1: underscore_indices[3]])
  776. tunnel_1 = float(result[underscore_indices[3] + 1: underscore_indices[4]])
  777. tunnel_2 = float(result[underscore_indices[4] + 1: -4])
  778. dt = datetime.strptime(time_code, "%Y%m%d%H%M%S")
  779. standard_time_str = dt.strftime("%Y-%m-%d %H:%M:%S")
  780. return wind_name, turbine_code, standard_time_str, sampling_fq, tunnel_1, tunnel_2
  781. def blade_dist_distribute_cal(data_group: pd.DataFrame, start_points: pd.DataFrame, end_points: pd.DataFrame,
  782. tower_dist: float, v_angle: float, blade_cen_dist: list):
  783. combined_df_sorted = pd.concat([start_points, end_points]).sort_values(by='time')
  784. if combined_df_sorted.iloc[0].equals(end_points.iloc[0]):
  785. combined_df_sorted = combined_df_sorted.iloc[1:]
  786. if combined_df_sorted.iloc[-1].equals(start_points.iloc[-1]):
  787. combined_df_sorted = combined_df_sorted.iloc[:-1]
  788. combined_df_sorted.reset_index(drop=True, inplace=True)
  789. start_times = combined_df_sorted['time'].tolist()
  790. normalize_cycle = start_times[1] - start_times[0]
  791. tower_clearance = [pd.DataFrame() for _ in range(3)]
  792. for i in range(0, len(start_times) - 2, 2):
  793. start_time = start_times[i]
  794. end_time = start_times[i + 1]
  795. segment = data_group[(data_group['time'] > start_time) & (data_group['time'] <= end_time)]
  796. min_distance = segment['distance'].min()
  797. clearance = np.abs(tower_dist - min_distance - blade_cen_dist[i % 3]) * np.cos(np.deg2rad(v_angle))
  798. r_speed = round(60 / ((start_times[i + 2] - start_times[i]) * 3 / 5000000), 2)
  799. ratio = (end_time - start_time) / normalize_cycle
  800. segment.loc[:, 'time'] = (segment['time'] - start_time) / ratio
  801. new_df = pd.DataFrame({
  802. 'r_speed': [r_speed],
  803. 'clearance': [clearance]
  804. })
  805. tower_clearance[i % 3] = pd.concat([tower_clearance[i % 3], new_df])
  806. tower_clearance = [df.sort_values(by='r_speed') for df in tower_clearance]
  807. return tower_clearance