123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116 |
- import os
- import pandas as pd
- import numpy as np
- from plotly.subplots import make_subplots
- import plotly.graph_objects as go
- import matplotlib.pyplot as plt
- from .analyst import Analyst
- from .utils.directoryUtil import DirectoryUtil as dir
- from confBusiness import ConfBusiness
- class CpTrendAnalyst(Analyst):
- """
- 风电机组风能利用系数时序分析
- """
- def typeAnalyst(self):
- return "cp_trend"
- def turbineAnalysis(self,
- dataFrame,
- outputAnalysisDir,
- outputFilePath,
- confData: ConfBusiness,
- turbineName):
- self.cp_trend(dataFrame, outputFilePath,
- confData.field_turbine_time, confData.field_wind_speed, confData.field_rotor_speed, confData.field_power, confData.field_pitch_angle1,
- confData.rotor_diameter, confData.density_air)
- def cp_trend(self, dataFrame, outputFilePath, time_col, wind_speed_col, generator_speed_col, power_col, pitch_col, rotor_diameter, density_air):
- dataFrame['time_day'] = dataFrame[time_col].dt.date
- # Assign columns and calculate 'cp'
- dataFrame['wind_speed'] = dataFrame[wind_speed_col].astype(float)
- dataFrame['rotor_speed'] = dataFrame[generator_speed_col].astype(float)
- dataFrame['power'] = dataFrame[power_col]
- # Power coefficient calculation
- rotor_diameter = pd.to_numeric(rotor_diameter, errors='coerce')
- air_density = pd.to_numeric(rotor_diameter, errors='coerce')
- # Calculate cp
- dataFrame['cp'] = dataFrame['power'] * 1000 / (0.5 * np.pi * air_density * (
- rotor_diameter ** 2) / 4 * dataFrame['wind_speed'] ** 3)
- # Group by day and aggregate
- grouped = dataFrame.groupby('time_day').agg({
- time_col: 'min', # Assuming time_col is the datetime column for minimum time
- 'wind_speed': 'mean',
- 'rotor_speed': 'mean',
- 'cp': ['mean', 'max', 'min']
- }).reset_index()
- # Rename columns post aggregation for clarity
- grouped.columns = ['time_day', 'time_', 'wind_speed',
- 'rotor_speed', 'cp', 'cp_max', 'cp_min']
- # Sort by day
- grouped = grouped.sort_values('time_day')
- # Write to CSV
- grouped.to_csv(outputFilePath, index=False)
- def turbinesAnalysis(self, dataFrameMerge, outputAnalysisDir, confData: ConfBusiness):
- self.create_cp_trend_plots(outputAnalysisDir, confData.farm_name)
- def create_cp_trend_plots(self, csvFileDirOfCp, farm_name, encoding='utf-8'):
- """
- Generates and saves error bar plots for CP trend data stored in CSV files.
- Parameters:
- - csvFileDirOfCp: Path to the directory containing the input CSV files.
- - farm_name: Name of the farm, used to format the output path.
- - encoding: str, encoding of the input CSV files. Defaults to 'utf-8'.
- """
- time_day = 'time_day'
- y_name = 'cp'
- y_min = 'cp_min'
- y_max = 'cp_max'
- split_way = '_cp_trend.csv'
- # Create the output directory if it does not exist
- if not os.path.exists(csvFileDirOfCp):
- os.makedirs(csvFileDirOfCp)
- # Walk through the input directory to process each CSV file
- for root, dir_names, file_names in dir.list_directory(csvFileDirOfCp):
- for file_name in file_names:
- if not file_name.endswith(".csv"):
- continue
- # Read each CSV file
- data = pd.read_csv(os.path.join(
- root, file_name), encoding=encoding)
- data.loc[:, time_day] = pd.to_datetime(data.loc[:, time_day])
- data[y_min] = data[y_name] - data[y_min]
- data[y_max] = data[y_max] - data[y_name]
- turbine_name = file_name.split(split_way)[0]
- # Generate the plot
- fig, ax = plt.subplots()
- ax.errorbar(x=data[time_day], y=data[y_name], yerr=[data[y_min], data[y_max]],
- fmt='o', capsize=4, elinewidth=2, ecolor='lightgrey', mfc='dodgerblue')
- ax.set_xlabel('time')
- ax.set_ylabel('Cp')
- # ax.set_ylim(-0.2, 8)
- ax.set_title('{}={}'.format('turbine_name', turbine_name))
- # 旋转x轴刻度标签
- plt.xticks(rotation=45)
- # Save the plot
- plt.savefig(os.path.join(csvFileDirOfCp, "{}.png".format(
- turbine_name)), bbox_inches='tight', dpi=120)
- plt.close()
|