Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/tensorflow/python/ops/weights_broadcast_ops.py: 24%

66 statements  

« prev     ^ index     » next       coverage.py v7.4.0, created at 2024-01-03 07:57 +0000

1# Copyright 2016 The TensorFlow Authors. All Rights Reserved. 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14# ============================================================================== 

15"""Weight broadcasting operations. 

16 

17In `tf.losses` and `tf.metrics`, we support limited weight broadcasting. This 

18file includes operations for those broadcasting rules. 

19""" 

20 

21from tensorflow.python.framework import ops 

22from tensorflow.python.framework import tensor_util 

23from tensorflow.python.ops import array_ops 

24from tensorflow.python.ops import cond 

25from tensorflow.python.ops import control_flow_assert 

26from tensorflow.python.ops import control_flow_ops 

27from tensorflow.python.ops import math_ops 

28from tensorflow.python.ops import sets 

29from tensorflow.python.util.tf_export import tf_export 

30 

31 

32def _has_valid_dims(weights_shape, values_shape): 

33 with ops.name_scope( 

34 None, "has_invalid_dims", (weights_shape, values_shape)) as scope: 

35 values_shape_2d = array_ops.expand_dims(values_shape, -1) 

36 valid_dims = array_ops.concat( 

37 (values_shape_2d, array_ops.ones_like(values_shape_2d)), axis=1) 

38 weights_shape_2d = array_ops.expand_dims(weights_shape, -1) 

39 invalid_dims = sets.set_difference(weights_shape_2d, valid_dims) 

40 num_invalid_dims = array_ops.size( 

41 invalid_dims.values, name="num_invalid_dims") 

42 return math_ops.equal(0, num_invalid_dims, name=scope) 

43 

44 

45def _has_valid_nonscalar_shape( 

46 weights_rank, weights_shape, values_rank, values_shape): 

47 with ops.name_scope( 

48 None, "has_valid_nonscalar_shape", 

49 (weights_rank, weights_shape, values_rank, values_shape)) as scope: 

50 is_same_rank = math_ops.equal( 

51 values_rank, weights_rank, name="is_same_rank") 

52 return cond.cond( 

53 is_same_rank, 

54 lambda: _has_valid_dims(weights_shape, values_shape), 

55 lambda: is_same_rank, 

56 name=scope) 

57 

58 

59_ASSERT_BROADCASTABLE_ERROR_PREFIX = "weights can not be broadcast to values." 

60 

61 

62def assert_broadcastable(weights, values): 

63 """Asserts `weights` can be broadcast to `values`. 

64 

65 In `tf.losses` and `tf.metrics`, we support limited weight broadcasting. We 

66 let weights be either scalar, or the same rank as the target values, with each 

67 dimension either 1, or the same as the corresponding values dimension. 

68 

69 Args: 

70 weights: `Tensor` of weights. 

71 values: `Tensor` of values to which weights are applied. 

72 

73 Returns: 

74 `Operation` raising `InvalidArgumentError` if `weights` has incorrect shape. 

75 `no_op` if static checks determine `weights` has correct shape. 

76 

77 Raises: 

78 ValueError: If static checks determine `weights` has incorrect shape. 

79 """ 

80 with ops.name_scope(None, "assert_broadcastable", (weights, values)) as scope: 

81 with ops.name_scope(None, "weights", (weights,)) as weights_scope: 

82 weights = ops.convert_to_tensor(weights, name=weights_scope) 

83 weights_shape = array_ops.shape(weights, name="shape") 

84 weights_rank = array_ops.rank(weights, name="rank") 

85 weights_rank_static = tensor_util.constant_value(weights_rank) 

86 

87 with ops.name_scope(None, "values", (values,)) as values_scope: 

88 values = ops.convert_to_tensor(values, name=values_scope) 

89 values_shape = array_ops.shape(values, name="shape") 

90 values_rank = array_ops.rank(values, name="rank") 

91 values_rank_static = tensor_util.constant_value(values_rank) 

92 

93 # Try static checks. 

94 if weights_rank_static is not None and values_rank_static is not None: 

95 if weights_rank_static == 0: 

96 return control_flow_ops.no_op(name="static_scalar_check_success") 

97 if weights_rank_static != values_rank_static: 

98 raise ValueError( 

99 f"{_ASSERT_BROADCASTABLE_ERROR_PREFIX} values.rank=" 

100 f"{values_rank_static}. weights.rank={weights_rank_static}. " 

101 f"values.shape={values.shape}. weights.shape={weights.shape}. " 

102 f"Received weights={weights}, values={values}") 

103 weights_shape_static = tensor_util.constant_value(weights_shape) 

104 values_shape_static = tensor_util.constant_value(values_shape) 

105 if weights_shape_static is not None and values_shape_static is not None: 

106 # Sanity check, this should always be true since we checked rank above. 

107 ndims = len(values_shape_static) 

108 assert ndims == len(weights_shape_static) 

109 

110 for i in range(ndims): 

111 if weights_shape_static[i] not in (1, values_shape_static[i]): 

112 raise ValueError( 

113 f"{_ASSERT_BROADCASTABLE_ERROR_PREFIX} Mismatch at dim {i}. " 

114 f"values.shape={values_shape_static}, weights.shape=" 

115 f"{weights_shape_static}. Received weights={weights}, " 

116 f"values={values}") 

117 return control_flow_ops.no_op(name="static_dims_check_success") 

118 

119 # Dynamic checks. 

120 is_scalar = math_ops.equal(0, weights_rank, name="is_scalar") 

121 data = ( 

122 _ASSERT_BROADCASTABLE_ERROR_PREFIX, 

123 "weights.shape=", weights.name, weights_shape, 

124 "values.shape=", values.name, values_shape, 

125 "is_scalar=", is_scalar, 

126 ) 

127 is_valid_shape = cond.cond( 

128 is_scalar, 

129 lambda: is_scalar, 

130 lambda: _has_valid_nonscalar_shape( # pylint: disable=g-long-lambda 

131 weights_rank, weights_shape, values_rank, values_shape), 

132 name="is_valid_shape") 

133 return control_flow_assert.Assert(is_valid_shape, data, name=scope) 

134 

135 

136@tf_export("__internal__.ops.broadcast_weights", v1=[]) 

137def broadcast_weights(weights, values): 

138 """Broadcast `weights` to the same shape as `values`. 

139 

140 This returns a version of `weights` following the same broadcast rules as 

141 `mul(weights, values)`, but limited to the weights shapes allowed by 

142 `assert_broadcastable`. When computing a weighted average, use this function 

143 to broadcast `weights` before summing them; e.g., 

144 `reduce_sum(w * v) / reduce_sum(_broadcast_weights(w, v))`. 

145 

146 Args: 

147 weights: `Tensor` whose shape is broadcastable to `values` according to the 

148 rules of `assert_broadcastable`. 

149 values: `Tensor` of any shape. 

150 

151 Returns: 

152 `weights` broadcast to `values` shape according to the rules of 

153 `assert_broadcastable`. 

154 """ 

155 with ops.name_scope(None, "broadcast_weights", (weights, values)) as scope: 

156 values = ops.convert_to_tensor(values, name="values") 

157 weights = ops.convert_to_tensor( 

158 weights, dtype=values.dtype.base_dtype, name="weights") 

159 

160 # Try static check for exact match. 

161 weights_shape = weights.get_shape() 

162 values_shape = values.get_shape() 

163 if (weights_shape.is_fully_defined() and 

164 values_shape.is_fully_defined() and 

165 weights_shape.is_compatible_with(values_shape)): 

166 return weights 

167 

168 # Skip the assert_broadcastable on TPU/GPU because asserts are not 

169 # supported so it only causes unnecessary ops. Also skip it because it uses 

170 # a DenseToDenseSetOperation op that is incompatible with the TPU/GPU when 

171 # the shape(s) are dynamic. 

172 if control_flow_ops.get_enclosing_xla_context() is not None: 

173 return math_ops.multiply( 

174 weights, array_ops.ones_like(values), name=scope) 

175 with ops.control_dependencies((assert_broadcastable(weights, values),)): 

176 return math_ops.multiply( 

177 weights, array_ops.ones_like(values), name=scope)