deeptaylor.py 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204
  1. # Get Python six functionality:
  2. from __future__ import\
  3. absolute_import, print_function, division, unicode_literals
  4. ###############################################################################
  5. ###############################################################################
  6. ###############################################################################
  7. import keras.layers
  8. import keras.models
  9. from . import base
  10. from .relevance_based import relevance_rule as lrp_rules
  11. from ..utils.keras import checks as kchecks
  12. from ..utils.keras import graph as kgraph
  13. __all__ = [
  14. "DeepTaylor",
  15. "BoundedDeepTaylor",
  16. ]
  17. ###############################################################################
  18. ###############################################################################
  19. ###############################################################################
  20. class DeepTaylor(base.ReverseAnalyzerBase):
  21. """DeepTaylor for ReLU-networks with unbounded input
  22. This class implements the DeepTaylor algorithm for neural networks with
  23. ReLU activation and unbounded input ranges.
  24. :param model: A Keras model.
  25. """
  26. def __init__(self, model, *args, **kwargs):
  27. self._add_model_softmax_check()
  28. self._add_model_check(
  29. lambda layer: not kchecks.only_relu_activation(layer),
  30. "This DeepTaylor implementation only supports ReLU activations.",
  31. check_type="exception",
  32. )
  33. super(DeepTaylor, self).__init__(model, *args, **kwargs)
  34. def _create_analysis(self, *args, **kwargs):
  35. def do_nothing(Xs, Ys, As, reverse_state):
  36. return As
  37. # Kernel layers.
  38. self._add_conditional_reverse_mapping(
  39. lambda l: (kchecks.contains_kernel(l) and
  40. kchecks.contains_activation(l)),
  41. lrp_rules.Alpha1Beta0IgnoreBiasRule,
  42. name="deep_taylor_kernel_w_relu",
  43. )
  44. self._add_conditional_reverse_mapping(
  45. lambda l: (kchecks.contains_kernel(l) and
  46. not kchecks.contains_activation(l)),
  47. lrp_rules.WSquareRule,
  48. name="deep_taylor_kernel_wo_relu",
  49. )
  50. # ReLU Activation layer
  51. self._add_conditional_reverse_mapping(
  52. lambda l: (not kchecks.contains_kernel(l) and
  53. kchecks.contains_activation(l)),
  54. self._gradient_reverse_mapping,
  55. name="deep_taylor_relu",
  56. )
  57. # Assume conv layer beforehand -> unbounded
  58. bn_mapping = kgraph.apply_mapping_to_fused_bn_layer(
  59. lrp_rules.WSquareRule,
  60. fuse_mode="one_linear",
  61. )
  62. self._add_conditional_reverse_mapping(
  63. kchecks.is_batch_normalization_layer,
  64. bn_mapping,
  65. name="deep_taylor_batch_norm",
  66. )
  67. # Special layers.
  68. self._add_conditional_reverse_mapping(
  69. kchecks.is_max_pooling,
  70. self._gradient_reverse_mapping,
  71. name="deep_taylor_max_pooling",
  72. )
  73. self._add_conditional_reverse_mapping(
  74. kchecks.is_average_pooling,
  75. self._gradient_reverse_mapping,
  76. name="deep_taylor_average_pooling",
  77. )
  78. self._add_conditional_reverse_mapping(
  79. lambda l: isinstance(l, keras.layers.Add),
  80. # Ignore scaling with 0.5
  81. self._gradient_reverse_mapping,
  82. name="deep_taylor_add",
  83. )
  84. self._add_conditional_reverse_mapping(
  85. lambda l: isinstance(l, (
  86. keras.layers.convolutional.UpSampling1D,
  87. keras.layers.convolutional.UpSampling2D,
  88. keras.layers.convolutional.UpSampling3D,
  89. keras.layers.core.Dropout,
  90. keras.layers.core.SpatialDropout1D,
  91. keras.layers.core.SpatialDropout2D,
  92. keras.layers.core.SpatialDropout3D,
  93. )),
  94. self._gradient_reverse_mapping,
  95. name="deep_taylor_special_layers",
  96. )
  97. # Layers w/o transformation
  98. self._add_conditional_reverse_mapping(
  99. lambda l: isinstance(l, (
  100. keras.engine.topology.InputLayer,
  101. keras.layers.convolutional.Cropping1D,
  102. keras.layers.convolutional.Cropping2D,
  103. keras.layers.convolutional.Cropping3D,
  104. keras.layers.convolutional.ZeroPadding1D,
  105. keras.layers.convolutional.ZeroPadding2D,
  106. keras.layers.convolutional.ZeroPadding3D,
  107. keras.layers.Concatenate,
  108. keras.layers.core.Flatten,
  109. keras.layers.core.Masking,
  110. keras.layers.core.Permute,
  111. keras.layers.core.RepeatVector,
  112. keras.layers.core.Reshape,
  113. )),
  114. self._gradient_reverse_mapping,
  115. name="deep_taylor_no_transform",
  116. )
  117. return super(DeepTaylor, self)._create_analysis(
  118. *args, **kwargs)
  119. def _default_reverse_mapping(self, Xs, Ys, reversed_Ys, reverse_state):
  120. """
  121. Block all default mappings.
  122. """
  123. raise NotImplementedError(
  124. "Layer %s not supported." % reverse_state["layer"])
  125. def _prepare_model(self, model):
  126. """
  127. To be theoretically sound Deep-Taylor expects only positive outputs.
  128. """
  129. positive_outputs = [keras.layers.ReLU()(x) for x in model.outputs]
  130. model_with_positive_output = keras.models.Model(
  131. inputs=model.inputs, outputs=positive_outputs)
  132. return super(DeepTaylor, self)._prepare_model(
  133. model_with_positive_output)
  134. class BoundedDeepTaylor(DeepTaylor):
  135. """DeepTaylor for ReLU-networks with bounded input
  136. This class implements the DeepTaylor algorithm for neural networks with
  137. ReLU activation and bounded input ranges.
  138. :param model: A Keras model.
  139. :param low: Lowest value of the input range. See Z_B rule.
  140. :param high: Highest value of the input range. See Z_B rule.
  141. """
  142. def __init__(self, model, low=None, high=None, **kwargs):
  143. if low is None or high is None:
  144. raise ValueError("The low or high parameter is missing."
  145. " Z-B (bounded rule) require both values.")
  146. self._bounds_low = low
  147. self._bounds_high = high
  148. super(BoundedDeepTaylor, self).__init__(
  149. model, **kwargs)
  150. def _create_analysis(self, *args, **kwargs):
  151. low, high = self._bounds_low, self._bounds_high
  152. class BoundedProxyRule(lrp_rules.BoundedRule):
  153. def __init__(self, *args, **kwargs):
  154. super(BoundedProxyRule, self).__init__(
  155. *args, low=low, high=high,
  156. **kwargs)
  157. self._add_conditional_reverse_mapping(
  158. lambda l: kchecks.is_input_layer(l) and kchecks.contains_kernel(l),
  159. BoundedProxyRule,
  160. name="deep_taylor_first_layer_bounded",
  161. priority=10, # do first
  162. )
  163. return super(BoundedDeepTaylor, self)._create_analysis(
  164. *args, **kwargs)