Explorar el Código

添加故障分析算法(待完善)

wenjia Li hace 5 meses
padre
commit
2bcbdc8266
Se han modificado 1 ficheros con 83 adiciones y 0 borrados
  1. 83 0
      dataAnalysisBusiness/algorithm/faultAnalyst.py

+ 83 - 0
dataAnalysisBusiness/algorithm/faultAnalyst.py

@@ -0,0 +1,83 @@
+import pandas as pd
+import os
+import plotly.graph_objects as go
+from algorithmContract.confBusiness import *
+from algorithmContract.contract import Contract
+from behavior.analystNotFilter import AnalystNotFilter
+from plotly.subplots import make_subplots
+
+class FaultAnalyst(AnalystNotFilter):
+    """
+    风电机组故障分析
+    """
+
+    def typeAnalyst(self):
+        return "fault"
+    def selectColumns(self):
+        return [Field_DeviceName,Field_FaultTime,Field_FaultDetail]
+
+
+    def turbinesAnalysis(self,outputAnalysisDir, conf: Contract, turbineCodes):
+        dictionary = self.processTurbineData(turbineCodes,conf,self.selectColumns())
+        dataFrameMerge = self.userDataFrame(dictionary,conf.dataContract.configAnalysis,self)
+        
+        return self.get_result(dataFrameMerge, outputAnalysisDir, conf)
+
+    def get_result(self, dataFrame: pd.DataFrame, outputAnalysisDir: str, conf: Contract):
+        #---------------整个风场维度统计故障时长与次数---------------------------
+        # 统计各种类型故障出现的次数
+        fault_detail_count = dataFrame[Field_FaultDetail].value_counts().reset_index()
+        fault_detail_count.columns = [Field_FaultDetail, 'count']
+
+        # 统计每个 fault_detail 的时长加和
+        fault_time_sum = dataFrame.groupby(Field_FaultDetail)[Field_FaultTime].sum().reset_index()
+        fault_time_sum.columns = [Field_FaultDetail, 'fault_time_sum']
+
+        # 合并两个 DataFrame
+        fault_summary = pd.merge(fault_detail_count, fault_time_sum, on=Field_FaultDetail, how='inner')
+        fault_summary_sorted = fault_summary.sort_values(by='fault_time_sum', ascending=False)
+
+        # -------------按风机分组统计故障情况------------------------------------------
+        grouped = dataFrame.groupby(Field_DeviceName)
+        results= []
+
+        for name, group in grouped:
+            turbine_fault_summary = pd.DataFrame({
+                Field_DeviceName: [name],
+                'count': [len(group)],
+                'fault_time': [group[Field_FaultTime].sum()]
+            })
+            results.append(turbine_fault_summary)
+
+        # 合并所有风机的故障统计结果
+        turbine_fault_summary = pd.concat(results, ignore_index=True)
+        turbine_fault_sorted = turbine_fault_summary.sort_values(by='fault_time', ascending=False)
+
+        # 保存结果
+        results_row = []
+
+        filePathOfturbinefault = os.path.join(outputAnalysisDir, f"turbine_fault_result{CSVSuffix}")
+        turbine_fault_sorted.to_csv(filePathOfturbinefault, index=False)
+
+        results_row.append({
+            Field_Return_TypeAnalyst: self.typeAnalyst(),
+            Field_PowerFarmCode: conf.dataContract.dataFilter.powerFarmID,
+            Field_Return_BatchCode: conf.dataContract.dataFilter.dataBatchNum,
+            Field_CodeOfTurbine: "total",
+            Field_Return_FilePath: filePathOfturbinefault,
+            Field_Return_IsSaveDatabase: True
+        })
+
+        filePathOftotalfault = os.path.join(outputAnalysisDir, f"total_fault_result{CSVSuffix}")
+        fault_summary_sorted.to_csv(filePathOftotalfault, index=False)
+
+        results_row.append({
+            Field_Return_TypeAnalyst: self.typeAnalyst(),
+            Field_PowerFarmCode: conf.dataContract.dataFilter.powerFarmID,
+            Field_Return_BatchCode: conf.dataContract.dataFilter.dataBatchNum,
+            Field_CodeOfTurbine: "total",
+            Field_Return_FilePath: filePathOftotalfault,
+            Field_Return_IsSaveDatabase: True
+        })
+
+        return results_row