sloss.py 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. # time: 2023/5/8 13:15
  4. # file: loss.py.py
  5. # author: David
  6. # company: shenyang JY
  7. from keras import backend as K
  8. import tensorflow as tf
  9. tf.compat.v1.set_random_seed(1234)
  10. class SouthLoss(tf.keras.losses.Loss):
  11. def __init__(self, opt, name='south_loss'):
  12. """
  13. 南网新规则损失函数
  14. :param cap:装机容量
  15. """
  16. super(SouthLoss, self).__init__(name=name)
  17. self.cap = opt.cap*0.2 # 没有归一化cap,必须要先进行归一化
  18. self.opt = opt
  19. # self.cap01 = opt.cap*0.1
  20. def call(self, y_true, y_predict):
  21. """
  22. 自动调用
  23. :param y_true: 标签
  24. :param y_predict: 预测
  25. :return: 损失值
  26. """
  27. # 计算实际和预测的差值
  28. y_true = y_true * self.opt.std['C_REAL_VALUE'] + self.opt.mean['C_REAL_VALUE']
  29. y_predict = y_predict * self.opt.std['C_REAL_VALUE'] + self.opt.mean['C_REAL_VALUE']
  30. y_true = y_true[:, 15]
  31. y_predict = y_predict[:, 15]
  32. diff = y_true - y_predict
  33. logistic_values = tf.sigmoid(10000 * (y_true - self.cap))
  34. base = logistic_values * y_true + (1-logistic_values)*self.cap
  35. loss = K.square(diff/base)
  36. # loss = K.mean(loss, axis=-1)
  37. return loss
  38. def call2(self, y_true, y_predict):
  39. y_true = y_true * self.opt.std['C_REAL_VALUE'] + self.opt.mean['C_REAL_VALUE']
  40. y_predict = y_predict * self.opt.std['C_REAL_VALUE'] + self.opt.mean['C_REAL_VALUE']
  41. y_true = y_true[:, 15]
  42. y_predict = y_predict[:, 15]
  43. diff = y_true - y_predict
  44. logistic_values = tf.sigmoid(10000 * (y_true - self.cap))
  45. base = logistic_values * y_true + (1 - logistic_values) * self.cap
  46. loss = K.square(diff / base)
  47. mask_logical = tf.logical_and(tf.greater(y_true, self.cap01), tf.greater(y_predict, self.cap01))
  48. count = tf.reduce_sum(tf.cast(mask_logical, tf.float32), axis=-1)
  49. safe_count = tf.maximum(count, 1)
  50. # reduce_sum_loss = tf.reduce_sum(loss, axis=-1)
  51. mean_loss = loss / safe_count
  52. return mean_loss
  53. def call1(self, y_true, y_predict):
  54. y_true = y_true * self.opt.std['C_REAL_VALUE'] + self.opt.mean['C_REAL_VALUE']
  55. y_predict = y_predict * self.opt.std['C_REAL_VALUE'] + self.opt.mean['C_REAL_VALUE']
  56. base = tf.where(y_true > self.cap, y_true, tf.ones_like(y_true)*self.cap)
  57. loss = (y_true - y_predict) / base
  58. squared_loss = tf.square(loss)
  59. mean_squared_loss = tf.reduce_mean(squared_loss, axis=[1])
  60. return mean_squared_loss
  61. class NorthEastLoss(tf.keras.losses.Loss):
  62. def __init__(self, opt, name='northeast_loss'):
  63. """
  64. 东北新规则超短期损失函数
  65. """
  66. super(NorthEastLoss, self).__init__(name=name)
  67. self.opt = opt
  68. self.cap = round(opt.cap*0.1, 2)
  69. def call(self, y_true, y_predict):
  70. # 这里我们添加了一个小的 epsilon 值来避免除以 0
  71. y_true = y_true * self.opt.std['C_REAL_VALUE'] + self.opt.mean['C_REAL_VALUE']
  72. y_predict = y_predict * self.opt.std['C_REAL_VALUE'] + self.opt.mean['C_REAL_VALUE']
  73. mask_logical = tf.logical_and(tf.greater(y_true, self.cap), tf.greater(y_predict, self.cap))
  74. # mask = tf.cast(~mask_logical, tf.float32)
  75. # y_true = y_true * (1 - mask) + 0 * mask
  76. # y_predict = y_predict * (1 - mask) + 0 * mask
  77. epsilon = tf.keras.backend.epsilon()
  78. y_predict_safe = y_predict + epsilon
  79. # 计算 (y_true - y_predict) / y_predict_safe
  80. difference_over_predict = tf.abs(y_predict - y_true) / tf.abs(y_predict_safe)
  81. # 将结果中大于等于 1 的部分置为 1,剩下的保留原值
  82. masked_difference = tf.where(difference_over_predict >= 1, tf.ones_like(difference_over_predict)*1, difference_over_predict) #tf.where的操作是逐元素的,并且它不会改变张量中元素的数学性质(如可微性、可导性)。
  83. # 这里我们先沿着特征维度求和,但你也可以选择平均(使用 tf.reduce_mean 而不是 tf.reduce_sum)
  84. count = tf.reduce_sum(tf.cast(mask_logical, tf.float32), axis=-1)
  85. sum_diff = tf.reduce_sum(masked_difference, axis=-1)
  86. # mean_loss = tf.reduce_mean(masked_difference, axis=[1])
  87. safe_count = tf.maximum(count, 1)
  88. mean = sum_diff / safe_count
  89. mean1 = tf.reduce_sum(masked_difference, axis=-1)
  90. return mean
  91. class NorthWestLoss(tf.keras.losses.Loss):
  92. def __init__(self, name='northwest_loss'):
  93. """
  94. 东北新规则超短期损失函数
  95. """
  96. super(NorthWestLoss, self).__init__(name=name)
  97. def call(self, y_true, y_pred):
  98. # 保证预测值和真实值是浮点数
  99. y_pred = tf.cast(y_pred, tf.float32)
  100. y_true = tf.cast(y_true, tf.float32)
  101. # 避免除零错误
  102. epsilon = 1e-8
  103. y_pred_adjusted = y_pred + epsilon
  104. y_true_adjusted = y_true + epsilon
  105. # 计算 |Pr - Pn|
  106. abs_diff = tf.abs(y_pred - y_true)
  107. # 计算 |Pr - Pn| 的总和
  108. sum_abs_diff = tf.reduce_sum(abs_diff)
  109. # 计算每个差值的权重 |Pr - Pn| / sum(|Pr - Pn|)
  110. weights = abs_diff / (sum_abs_diff + epsilon) # 添加 epsilon 避免除零
  111. # 计算 |Pr/(Pr + Pn) - 0.5|
  112. ratios = tf.abs((y_pred_adjusted / (y_pred_adjusted + y_true_adjusted)) - 0.5)
  113. # 计算最终的损失值
  114. loss = 1.0 - 2.0 * tf.reduce_sum(ratios * weights)
  115. return loss