Temp_Diag.py 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191
  1. import numpy as np
  2. import pandas as pd
  3. from sklearn.neighbors import BallTree
  4. from sqlalchemy import create_engine, text
  5. import math, joblib, os
  6. class MSET_Temp:
  7. """
  8. MSET + SPRT 温度分析类:
  9. - 离线训练:genDLMatrix → save_model
  10. - 在线推理:load_model → predict_SPRT
  11. """
  12. def __init__(self,
  13. windCode: str,
  14. windTurbineNumberList: list[str],
  15. startTime: str,
  16. endTime: str):
  17. self.windCode = windCode.strip()
  18. self.windTurbineNumberList = windTurbineNumberList or []
  19. self.startTime = startTime
  20. self.endTime = endTime
  21. # 离线训练/加载后赋值
  22. self.matrixD = None
  23. self.healthyResidual = None
  24. self.normalDataBallTree = None
  25. # SPRT 参数(离线训练时设置)
  26. self.feature_weight: np.ndarray | None = None
  27. self.alpha: float = 0.1
  28. self.beta: float = 0.1
  29. def _get_data_by_filter(self) -> pd.DataFrame:
  30. """
  31. 在线推理专用:根据 self.windTurbineNumberList & 时间拉数据;
  32. 如果列表为空,则拉全场数据。
  33. """
  34. # 特殊风场表名映射
  35. special_wind_farms = {
  36. "WOF093400005": f"`{self.windCode}-WOB000001_minute`" # 加上反引号
  37. }
  38. # 根据风场编号获取表名,特殊风场用反引号,其他风场不加反引号
  39. table = special_wind_farms.get(self.windCode, f"{self.windCode}_minute")
  40. engine = create_engine(
  41. #"mysql+pymysql://root:admin123456@106.120.102.238:10336/energy_data_prod"
  42. "mysql+pymysql://root:admin123456@192.168.50.235:30306/energy_data_prod"
  43. )
  44. if self.windTurbineNumberList:
  45. turbines = ",".join(f"'{t}'" for t in self.windTurbineNumberList)
  46. cond = f"wind_turbine_number IN ({turbines}) AND time_stamp BETWEEN :start AND :end"
  47. else:
  48. cond = "time_stamp BETWEEN :start AND :end"
  49. sql = text(f"""
  50. SELECT *
  51. FROM {table}
  52. WHERE {cond}
  53. ORDER BY time_stamp ASC
  54. """)
  55. return pd.read_sql(sql, engine, params={"start": self.startTime, "end": self.endTime})
  56. def calcSimilarity(self, x: np.ndarray, y: np.ndarray, m: str = 'euc') -> float:
  57. if len(x) != len(y):
  58. return 0.0
  59. if m == 'cbd':
  60. return float(np.mean([1.0/(1.0+abs(p-q)) for p,q in zip(x,y)]))
  61. diffsq = np.sum((x-y)**2)
  62. return float(1.0/(1.0+math.sqrt(diffsq)))
  63. def genDLMatrix(self, trainDataset: np.ndarray,
  64. dataSize4D=100, dataSize4L=50) -> int:
  65. """
  66. 离线训练:构造 matrixD/matrixL/healthyResidual/BallTree
  67. """
  68. m, n = trainDataset.shape
  69. if m < dataSize4D + dataSize4L:
  70. return -1
  71. # Step1:每维最小/最大入 D
  72. D_idx, D = [], []
  73. for i in range(n):
  74. col = trainDataset[:, i]
  75. for idx in (np.argmin(col), np.argmax(col)):
  76. D.append(trainDataset[idx].tolist())
  77. D_idx.append(idx)
  78. # Step2:挑样本至 dataSize4D
  79. while len(D_idx) < dataSize4D:
  80. free = list(set(range(m)) - set(D_idx))
  81. scores = [(np.mean([1-self.calcSimilarity(trainDataset[i], d) for d in D]), i)
  82. for i in free]
  83. _, pick = max(scores)
  84. D.append(trainDataset[pick].tolist())
  85. D_idx.append(pick)
  86. self.matrixD = np.array(D)
  87. # BallTree + healthyResidual
  88. self.normalDataBallTree = BallTree(
  89. self.matrixD,
  90. leaf_size=4,
  91. metric=lambda a,b: 1.0 - self.calcSimilarity(a, b)
  92. )
  93. # healthyResidual
  94. ests = []
  95. for x in trainDataset:
  96. dist, idxs = self.normalDataBallTree.query([x], k=20, return_distance=True)
  97. w = 1.0/(dist[0]+1e-1)
  98. w /= w.sum()
  99. ests.append(np.sum([wi*self.matrixD[j] for wi,j in zip(w,idxs[0])], axis=0))
  100. self.healthyResidual = np.array(ests) - trainDataset
  101. return 0
  102. def calcSPRT(self,
  103. newsStates: np.ndarray,
  104. feature_weight: np.ndarray,
  105. alpha: float = 0.1,
  106. beta: float = 0.1,
  107. decisionGroup: int = 5) -> list[float]:
  108. """
  109. Wald-SPRT 得分
  110. """
  111. # 新状态残差
  112. ests = []
  113. for x in newsStates:
  114. dist, idxs = self.normalDataBallTree.query([x], k=20, return_distance=True)
  115. w = 1.0/(dist[0]+1e-1); w/=w.sum()
  116. ests.append(np.sum([wi*self.matrixD[j] for wi,j in zip(w,idxs[0])], axis=0))
  117. resN = np.array(ests) - newsStates
  118. # 加权
  119. wN = [np.dot(r, feature_weight) for r in resN]
  120. wH = [np.dot(r, feature_weight) for r in self.healthyResidual]
  121. mu0, sigma0 = np.mean(wH), np.std(wH)
  122. low = math.log(beta/(1-alpha)); high = math.log((1-beta)/alpha)
  123. flags = []
  124. for i in range(len(wN)-decisionGroup+1):
  125. seg = wN[i:i+decisionGroup]; mu1=np.mean(seg)
  126. si = (sum(seg)*(mu1-mu0)/sigma0**2
  127. - decisionGroup*((mu1**2-mu0**2)/(2*sigma0**2)))
  128. si = max(min(si, high), low)
  129. flags.append(si/high if si>0 else si/low)
  130. return flags
  131. def predict_SPRT(self,
  132. newsStates: np.ndarray,
  133. decisionGroup: int = 5) -> list[float]:
  134. """
  135. 在线推理:用离线保存的 matrixD/healthyResidual/feature_weight/alpha/beta
  136. """
  137. return self.calcSPRT(
  138. newsStates,
  139. self.feature_weight,
  140. alpha=self.alpha,
  141. beta=self.beta,
  142. decisionGroup=decisionGroup
  143. )
  144. def save_model(self, path: str):
  145. """
  146. Save matrixD, healthyResidual, feature_weight, alpha, beta
  147. """
  148. os.makedirs(os.path.dirname(path), exist_ok=True)
  149. joblib.dump({
  150. 'matrixD': self.matrixD,
  151. 'healthyResidual': self.healthyResidual,
  152. 'feature_weight': self.feature_weight,
  153. 'alpha': self.alpha,
  154. 'beta': self.beta,
  155. }, path)
  156. @classmethod
  157. def load_model(cls, path: str) -> 'MSET_Temp':
  158. """
  159. Load + rebuild BallTree
  160. """
  161. data = joblib.load(path)
  162. inst = cls('', [], '', '')
  163. inst.matrixD = data['matrixD']
  164. inst.healthyResidual = data['healthyResidual']
  165. inst.feature_weight = data['feature_weight']
  166. inst.alpha = data['alpha']
  167. inst.beta = data['beta']
  168. inst.normalDataBallTree = BallTree(
  169. inst.matrixD,
  170. leaf_size=4,
  171. metric=lambda a,b: 1.0 - inst.calcSimilarity(a, b)
  172. )
  173. return inst