1from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
2
3from ..config import Config
4from .irbasemapping import IRBaseMapping
5from .irresource import IRResource
6
7if TYPE_CHECKING:
8 from .ir import IR # pragma: no cover
9
10
11class IRBaseMappingGroup(IRResource):
12 mappings: List[IRBaseMapping]
13 group_id: str
14 group_weight: List[Union[str, int]]
15 labels: Dict[str, Any]
16 _cache_key: Optional[str]
17
18 def __init__(
19 self,
20 ir: "IR",
21 aconf: Config,
22 location: str,
23 rkey: str = "ir.mappinggroup",
24 kind: str = "IRBaseMappingGroup",
25 name: str = "ir.mappinggroup",
26 **kwargs,
27 ) -> None:
28 # Default to no cache key...
29 self._cache_key = None
30
31 # Default to no mappings...
32 self.mappings = []
33
34 # ...before we init the superclass, which will call self.setup().
35 super().__init__(
36 ir=ir, aconf=aconf, rkey=rkey, location=location, kind=kind, name=name, **kwargs
37 )
38
39 @classmethod
40 def key_for_id(cls, group_id: str) -> str:
41 return f"{cls.__name__}-{group_id}"
42
43 # XXX WTFO, I hear you cry. Why is this "type: ignore here?" So here's the deal:
44 # mypy doesn't like it if you override just the getter of a property that has a
45 # setter, too, and I cannot figure out how else to shut it up.
46 @property # type: ignore
47 def cache_key(self) -> str:
48 # XXX WTFO, I hear you cry again! Can this possibly be thread-safe??!
49 # Well, no, not really. But as long as you're not trying to use the
50 # cache_key before actually initializing this group, key_for_id()
51 # will be idempotent, so it doesn't matter.
52
53 if not self._cache_key:
54 self._cache_key = self.__class__.key_for_id(self.group_id)
55
56 return self._cache_key
57
58 def normalize_weights_in_mappings(self) -> bool:
59 # If there's only one mapping in the group, it's automatically weighted
60 # at 100%.
61 if len(self.mappings) == 1:
62 self.logger.debug(
63 "Assigning weight 100 to single mapping %s in group", self.mappings[0].name
64 )
65 self.mappings[0]._weight = 100
66 return True
67
68 # For multiple mappings, we need to normalize the weights.
69 weightless_mappings = []
70 num_weightless_mappings = 0
71
72 normalized_mappings = []
73
74 current_weight = 0
75 for mapping in self.mappings:
76 if "weight" in mapping:
77 if mapping.weight > 100:
78 self.post_error(f"Mapping {mapping.name} has invalid weight {mapping.weight}")
79 return False
80
81 # increment current weight by mapping's weight
82 current_weight += round(mapping.weight)
83
84 # set mapping's calculated weight to current weight
85 self.logger.debug(
86 f"Assigning calculated weight {current_weight} to mapping {mapping.name}"
87 )
88 mapping._weight = current_weight
89
90 # add this mapping to normalized mappings
91 normalized_mappings.append(mapping)
92 else:
93 num_weightless_mappings += 1
94 weightless_mappings.append(mapping)
95
96 # Did we go over 100%?
97 if current_weight > 100:
98 self.post_error(
99 f"Total weight of mappings exceeds 100, please reconfigure for correct behavior..."
100 )
101 return False
102
103 if num_weightless_mappings > 0:
104 # You might expect that we'd want to generate errors for the case where we hit 100%
105 # but still have weightless mappings -- however, that would mess up a workflow where
106 # you add a canary Mapping, scale it to 100%, and then delete the original Mapping
107 # (much like we do for Argo rollouts).
108 #
109 # Likewise, you might expect that we'd generate errors if we're at less than 100% and
110 # have no weightless mappings. We don't do that because it's not entirely clear what
111 # to do -- a straightforward answer is to simply scale the weights we do have to hit
112 # 100%, and we may well do that for the next major version.
113 #
114 # At any rate: since we didn't go over, let's divide the remaining weight equally to
115 # the weightless mappings. Note that if current_weight == 100, then remaining_weight
116 # will be 0, and none of the weightless mappings will actually get traffic -- but that's
117 # what we want in the "scale the canary to 100% and then delete the original" case
118 # described above. (Not coincidentally, our CanaryDiffMapping tests exercise this.)
119 remaining_weight = 100 - current_weight
120 weight_per_weightless_mapping = round(remaining_weight / num_weightless_mappings)
121
122 self.logger.debug(
123 f"Assigning calculated weight {weight_per_weightless_mapping} of remaining weight {remaining_weight} to each of {num_weightless_mappings} weightless mappings"
124 )
125
126 # Now, let's add weight to every weightless mapping and push to normalized_mappings
127 for i, weightless_mapping in enumerate(weightless_mappings):
128
129 # We need last mapping's weight to be 100
130 if i == num_weightless_mappings - 1:
131 current_weight = 100
132 else:
133 current_weight += weight_per_weightless_mapping
134
135 self.logger.debug(
136 f"Assigning weight {current_weight} to weightless mapping {weightless_mapping.name}"
137 )
138 weightless_mapping._weight = current_weight
139 normalized_mappings.append(weightless_mapping)
140
141 self.mappings = normalized_mappings
142 return True
View as plain text