fix: bug where the doublestar operation had inconsistent formatting. … · psf/black@32230e6 (original) (raw)
`@@ -29,7 +29,7 @@
`
29
29
``
30
30
`from black.comments import contains_pragma_comment
`
31
31
`from black.lines import Line, append_leaves
`
32
``
`-
from black.mode import Feature, Mode
`
``
32
`+
from black.mode import Feature, Mode, Preview
`
33
33
`from black.nodes import (
`
34
34
`CLOSING_BRACKETS,
`
35
35
`OPENING_BRACKETS,
`
`@@ -94,43 +94,36 @@ def hug_power_op(
`
94
94
`else:
`
95
95
`raise CannotTransform("No doublestar token was found in the line.")
`
96
96
``
97
``
`-
def is_simple_lookup(index: int, step: Literal[1, -1]) -> bool:
`
``
97
`+
def is_simple_lookup(index: int, kind: Literal[1, -1]) -> bool:
`
98
98
`# Brackets and parentheses indicate calls, subscripts, etc. ...
`
99
99
`# basically stuff that doesn't count as "simple". Only a NAME lookup
`
100
100
`# or dotted lookup (eg. NAME.NAME) is OK.
`
101
``
`-
if step == -1:
`
102
``
`-
disallowed = {token.RPAR, token.RSQB}
`
103
``
`-
else:
`
104
``
`-
disallowed = {token.LPAR, token.LSQB}
`
105
``
-
106
``
`-
while 0 <= index < len(line.leaves):
`
107
``
`-
current = line.leaves[index]
`
108
``
`-
if current.type in disallowed:
`
109
``
`-
return False
`
110
``
`-
if current.type not in {token.NAME, token.DOT} or current.value == "for":
`
111
``
`-
If the current token isn't disallowed, we'll assume this is simple as
`
112
``
`-
only the disallowed tokens are semantically attached to this lookup
`
113
``
`-
expression we're checking. Also, stop early if we hit the 'for' bit
`
114
``
`-
of a comprehension.
`
115
``
`-
return True
`
``
101
`+
if Preview.is_simple_lookup_for_doublestar_expression not in mode:
`
``
102
`+
return original_is_simple_lookup_func(line, index, kind)
`
116
103
``
117
``
`-
index += step
`
118
``
-
119
``
`-
return True
`
``
104
`+
else:
`
``
105
`+
if kind == -1:
`
``
106
`+
return handle_is_simple_look_up_prev(
`
``
107
`+
line, index, {token.RPAR, token.RSQB}
`
``
108
`+
)
`
``
109
`+
else:
`
``
110
`+
return handle_is_simple_lookup_forward(
`
``
111
`+
line, index, {token.LPAR, token.LSQB}
`
``
112
`+
)
`
120
113
``
121
``
`-
def is_simple_operand(index: int, kind: Literal["base", "exponent"]) -> bool:
`
``
114
`+
def is_simple_operand(index: int, kind: Literal[1, -1]) -> bool:
`
122
115
`# An operand is considered "simple" if's a NAME, a numeric CONSTANT, a simple
`
123
116
`# lookup (see above), with or without a preceding unary operator.
`
124
117
`start = line.leaves[index]
`
125
118
`if start.type in {token.NAME, token.NUMBER}:
`
126
``
`-
return is_simple_lookup(index, step=(1 if kind == "exponent" else -1))
`
``
119
`+
return is_simple_lookup(index, kind)
`
127
120
``
128
121
`if start.type in {token.PLUS, token.MINUS, token.TILDE}:
`
129
122
`if line.leaves[index + 1].type in {token.NAME, token.NUMBER}:
`
130
``
`-
step is always one as bases with a preceding unary op will be checked
`
``
123
`+
kind is always one as bases with a preceding unary op will be checked
`
131
124
`# for simplicity starting from the next token (so it'll hit the check
`
132
125
`# above).
`
133
``
`-
return is_simple_lookup(index + 1, step=1)
`
``
126
`+
return is_simple_lookup(index + 1, kind=1)
`
134
127
``
135
128
`return False
`
136
129
``
`@@ -145,9 +138,9 @@ def is_simple_operand(index: int, kind: Literal["base", "exponent"]) -> bool:
`
145
138
`should_hug = (
`
146
139
` (0 < idx < len(line.leaves) - 1)
`
147
140
`and leaf.type == token.DOUBLESTAR
`
148
``
`-
and is_simple_operand(idx - 1, kind="base")
`
``
141
`+
and is_simple_operand(idx - 1, kind=-1)
`
149
142
`and line.leaves[idx - 1].value != "lambda"
`
150
``
`-
and is_simple_operand(idx + 1, kind="exponent")
`
``
143
`+
and is_simple_operand(idx + 1, kind=1)
`
151
144
` )
`
152
145
`if should_hug:
`
153
146
`new_leaf.prefix = ""
`
`@@ -162,6 +155,99 @@ def is_simple_operand(index: int, kind: Literal["base", "exponent"]) -> bool:
`
162
155
`yield new_line
`
163
156
``
164
157
``
``
158
`+
def original_is_simple_lookup_func(
`
``
159
`+
line: Line, index: int, step: Literal[1, -1]
`
``
160
`+
) -> bool:
`
``
161
`+
if step == -1:
`
``
162
`+
disallowed = {token.RPAR, token.RSQB}
`
``
163
`+
else:
`
``
164
`+
disallowed = {token.LPAR, token.LSQB}
`
``
165
+
``
166
`+
while 0 <= index < len(line.leaves):
`
``
167
`+
current = line.leaves[index]
`
``
168
`+
if current.type in disallowed:
`
``
169
`+
return False
`
``
170
`+
if current.type not in {token.NAME, token.DOT} or current.value == "for":
`
``
171
`+
If the current token isn't disallowed, we'll assume this is
`
``
172
`+
simple as only the disallowed tokens are semantically
`
``
173
`+
attached to this lookup expression we're checking. Also,
`
``
174
`+
stop early if we hit the 'for' bit of a comprehension.
`
``
175
`+
return True
`
``
176
+
``
177
`+
index += step
`
``
178
+
``
179
`+
return True
`
``
180
+
``
181
+
``
182
`+
def handle_is_simple_look_up_prev(line: Line, index: int, disallowed: Set[int]) -> bool:
`
``
183
`+
"""
`
``
184
`+
Handling the determination of is_simple_lookup for the lines prior to the doublestar
`
``
185
`+
token. This is required because of the need to isolate the chained expression
`
``
186
`+
to determine the bracket or parenthesis belong to the single expression.
`
``
187
`+
"""
`
``
188
`+
contains_disallowed = False
`
``
189
`+
chain = []
`
``
190
+
``
191
`+
while 0 <= index < len(line.leaves):
`
``
192
`+
current = line.leaves[index]
`
``
193
`+
chain.append(current)
`
``
194
`+
if not contains_disallowed and current.type in disallowed:
`
``
195
`+
contains_disallowed = True
`
``
196
`+
if not is_expression_chained(chain):
`
``
197
`+
return not contains_disallowed
`
``
198
+
``
199
`+
index -= 1
`
``
200
+
``
201
`+
return True
`
``
202
+
``
203
+
``
204
`+
def handle_is_simple_lookup_forward(
`
``
205
`+
line: Line, index: int, disallowed: Set[int]
`
``
206
`+
) -> bool:
`
``
207
`+
"""
`
``
208
`+
Handling decision is_simple_lookup for the lines behind the doublestar token.
`
``
209
`+
This function is simplified to keep consistent with the prior logic and the forward
`
``
210
`+
case are more straightforward and do not need to care about chained expressions.
`
``
211
`+
"""
`
``
212
`+
while 0 <= index < len(line.leaves):
`
``
213
`+
current = line.leaves[index]
`
``
214
`+
if current.type in disallowed:
`
``
215
`+
return False
`
``
216
`+
if current.type not in {token.NAME, token.DOT} or (
`
``
217
`+
current.type == token.NAME and current.value == "for"
`
``
218
`+
):
`
``
219
`+
If the current token isn't disallowed, we'll assume this is simple as
`
``
220
`+
only the disallowed tokens are semantically attached to this lookup
`
``
221
`+
expression we're checking. Also, stop early if we hit the 'for' bit
`
``
222
`+
of a comprehension.
`
``
223
`+
return True
`
``
224
+
``
225
`+
index += 1
`
``
226
+
``
227
`+
return True
`
``
228
+
``
229
+
``
230
`+
def is_expression_chained(chained_leaves: List[Leaf]) -> bool:
`
``
231
`+
"""
`
``
232
`+
Function to determine if the variable is a chained call.
`
``
233
`+
(e.g., foo.lookup, foo().lookup, (foo.lookup())) will be recognized as chained call)
`
``
234
`+
"""
`
``
235
`+
if len(chained_leaves) < 2:
`
``
236
`+
return True
`
``
237
+
``
238
`+
current_leaf = chained_leaves[-1]
`
``
239
`+
past_leaf = chained_leaves[-2]
`
``
240
+
``
241
`+
if past_leaf.type == token.NAME:
`
``
242
`+
return current_leaf.type in {token.DOT}
`
``
243
`+
elif past_leaf.type in {token.RPAR, token.RSQB}:
`
``
244
`+
return current_leaf.type in {token.RSQB, token.RPAR}
`
``
245
`+
elif past_leaf.type in {token.LPAR, token.LSQB}:
`
``
246
`+
return current_leaf.type in {token.NAME, token.LPAR, token.LSQB}
`
``
247
`+
else:
`
``
248
`+
return False
`
``
249
+
``
250
+
165
251
`class StringTransformer(ABC):
`
166
252
`"""
`
167
253
` An implementation of the Transformer protocol that relies on its
`