Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/black/linegen.py: 11%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

782 statements  

1""" 

2Generating lines of code. 

3""" 

4 

5import re 

6import sys 

7from collections.abc import Collection, Iterator 

8from dataclasses import replace 

9from enum import Enum, auto 

10from functools import partial, wraps 

11from typing import Optional, Union, cast 

12 

13from black.brackets import ( 

14 COMMA_PRIORITY, 

15 DOT_PRIORITY, 

16 STRING_PRIORITY, 

17 get_leaves_inside_matching_brackets, 

18 max_delimiter_priority_in_atom, 

19) 

20from black.comments import FMT_OFF, generate_comments, list_comments 

21from black.lines import ( 

22 Line, 

23 RHSResult, 

24 append_leaves, 

25 can_be_split, 

26 can_omit_invisible_parens, 

27 is_line_short_enough, 

28 line_to_string, 

29) 

30from black.mode import Feature, Mode, Preview 

31from black.nodes import ( 

32 ASSIGNMENTS, 

33 BRACKETS, 

34 CLOSING_BRACKETS, 

35 OPENING_BRACKETS, 

36 STANDALONE_COMMENT, 

37 STATEMENT, 

38 WHITESPACE, 

39 Visitor, 

40 ensure_visible, 

41 fstring_to_string, 

42 get_annotation_type, 

43 has_sibling_with_type, 

44 is_arith_like, 

45 is_async_stmt_or_funcdef, 

46 is_atom_with_invisible_parens, 

47 is_docstring, 

48 is_empty_tuple, 

49 is_generator, 

50 is_lpar_token, 

51 is_multiline_string, 

52 is_name_token, 

53 is_one_sequence_between, 

54 is_one_tuple, 

55 is_parent_function_or_class, 

56 is_part_of_annotation, 

57 is_rpar_token, 

58 is_stub_body, 

59 is_stub_suite, 

60 is_tuple, 

61 is_tuple_containing_star, 

62 is_tuple_containing_walrus, 

63 is_type_ignore_comment_string, 

64 is_vararg, 

65 is_walrus_assignment, 

66 is_yield, 

67 syms, 

68 wrap_in_parentheses, 

69) 

70from black.numerics import normalize_numeric_literal 

71from black.strings import ( 

72 fix_multiline_docstring, 

73 get_string_prefix, 

74 normalize_string_prefix, 

75 normalize_string_quotes, 

76 normalize_unicode_escape_sequences, 

77) 

78from black.trans import ( 

79 CannotTransform, 

80 StringMerger, 

81 StringParenStripper, 

82 StringParenWrapper, 

83 StringSplitter, 

84 Transformer, 

85 hug_power_op, 

86) 

87from blib2to3.pgen2 import token 

88from blib2to3.pytree import Leaf, Node 

89 

90# types 

91LeafID = int 

92LN = Union[Leaf, Node] 

93 

94 

95class CannotSplit(CannotTransform): 

96 """A readable split that fits the allotted line length is impossible.""" 

97 

98 

99# This isn't a dataclass because @dataclass + Generic breaks mypyc. 

100# See also https://github.com/mypyc/mypyc/issues/827. 

101class LineGenerator(Visitor[Line]): 

102 """Generates reformatted Line objects. Empty lines are not emitted. 

103 

104 Note: destroys the tree it's visiting by mutating prefixes of its leaves 

105 in ways that will no longer stringify to valid Python code on the tree. 

106 """ 

107 

108 def __init__(self, mode: Mode, features: Collection[Feature]) -> None: 

109 self.mode = mode 

110 self.features = features 

111 self.current_line: Line 

112 self.__post_init__() 

113 

114 def line(self, indent: int = 0) -> Iterator[Line]: 

115 """Generate a line. 

116 

117 If the line is empty, only emit if it makes sense. 

118 If the line is too long, split it first and then generate. 

119 

120 If any lines were generated, set up a new current_line. 

121 """ 

122 if not self.current_line: 

123 self.current_line.depth += indent 

124 return # Line is empty, don't emit. Creating a new one unnecessary. 

125 

126 if len(self.current_line.leaves) == 1 and is_async_stmt_or_funcdef( 

127 self.current_line.leaves[0] 

128 ): 

129 # Special case for async def/for/with statements. `visit_async_stmt` 

130 # adds an `ASYNC` leaf then visits the child def/for/with statement 

131 # nodes. Line yields from those nodes shouldn't treat the former 

132 # `ASYNC` leaf as a complete line. 

133 return 

134 

135 complete_line = self.current_line 

136 self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent) 

137 yield complete_line 

138 

139 def visit_default(self, node: LN) -> Iterator[Line]: 

140 """Default `visit_*()` implementation. Recurses to children of `node`.""" 

141 if isinstance(node, Leaf): 

142 any_open_brackets = self.current_line.bracket_tracker.any_open_brackets() 

143 for comment in generate_comments(node): 

144 if any_open_brackets: 

145 # any comment within brackets is subject to splitting 

146 self.current_line.append(comment) 

147 elif comment.type == token.COMMENT: 

148 # regular trailing comment 

149 self.current_line.append(comment) 

150 yield from self.line() 

151 

152 else: 

153 # regular standalone comment 

154 yield from self.line() 

155 

156 self.current_line.append(comment) 

157 yield from self.line() 

158 

159 if any_open_brackets: 

160 node.prefix = "" 

161 if node.type not in WHITESPACE: 

162 self.current_line.append(node) 

163 yield from super().visit_default(node) 

164 

165 def visit_test(self, node: Node) -> Iterator[Line]: 

166 """Visit an `x if y else z` test""" 

167 

168 already_parenthesized = ( 

169 node.prev_sibling and node.prev_sibling.type == token.LPAR 

170 ) 

171 

172 if not already_parenthesized: 

173 # Similar to logic in wrap_in_parentheses 

174 lpar = Leaf(token.LPAR, "") 

175 rpar = Leaf(token.RPAR, "") 

176 prefix = node.prefix 

177 node.prefix = "" 

178 lpar.prefix = prefix 

179 node.insert_child(0, lpar) 

180 node.append_child(rpar) 

181 

182 yield from self.visit_default(node) 

183 

184 def visit_INDENT(self, node: Leaf) -> Iterator[Line]: 

185 """Increase indentation level, maybe yield a line.""" 

186 # In blib2to3 INDENT never holds comments. 

187 yield from self.line(+1) 

188 yield from self.visit_default(node) 

189 

190 def visit_DEDENT(self, node: Leaf) -> Iterator[Line]: 

191 """Decrease indentation level, maybe yield a line.""" 

192 # The current line might still wait for trailing comments. At DEDENT time 

193 # there won't be any (they would be prefixes on the preceding NEWLINE). 

194 # Emit the line then. 

195 yield from self.line() 

196 

197 # While DEDENT has no value, its prefix may contain standalone comments 

198 # that belong to the current indentation level. Get 'em. 

199 yield from self.visit_default(node) 

200 

201 # Finally, emit the dedent. 

202 yield from self.line(-1) 

203 

204 def visit_stmt( 

205 self, node: Node, keywords: set[str], parens: set[str] 

206 ) -> Iterator[Line]: 

207 """Visit a statement. 

208 

209 This implementation is shared for `if`, `while`, `for`, `try`, `except`, 

210 `def`, `with`, `class`, `assert`, and assignments. 

211 

212 The relevant Python language `keywords` for a given statement will be 

213 NAME leaves within it. This methods puts those on a separate line. 

214 

215 `parens` holds a set of string leaf values immediately after which 

216 invisible parens should be put. 

217 """ 

218 normalize_invisible_parens( 

219 node, parens_after=parens, mode=self.mode, features=self.features 

220 ) 

221 for child in node.children: 

222 if is_name_token(child) and child.value in keywords: 

223 yield from self.line() 

224 

225 yield from self.visit(child) 

226 

227 def visit_typeparams(self, node: Node) -> Iterator[Line]: 

228 yield from self.visit_default(node) 

229 node.children[0].prefix = "" 

230 

231 def visit_typevartuple(self, node: Node) -> Iterator[Line]: 

232 yield from self.visit_default(node) 

233 node.children[1].prefix = "" 

234 

235 def visit_paramspec(self, node: Node) -> Iterator[Line]: 

236 yield from self.visit_default(node) 

237 node.children[1].prefix = "" 

238 

239 def visit_dictsetmaker(self, node: Node) -> Iterator[Line]: 

240 if Preview.wrap_long_dict_values_in_parens in self.mode: 

241 for i, child in enumerate(node.children): 

242 if i == 0: 

243 continue 

244 if node.children[i - 1].type == token.COLON: 

245 if ( 

246 child.type == syms.atom 

247 and child.children[0].type in OPENING_BRACKETS 

248 and not is_walrus_assignment(child) 

249 ): 

250 maybe_make_parens_invisible_in_atom( 

251 child, 

252 parent=node, 

253 remove_brackets_around_comma=False, 

254 ) 

255 else: 

256 wrap_in_parentheses(node, child, visible=False) 

257 yield from self.visit_default(node) 

258 

259 def visit_funcdef(self, node: Node) -> Iterator[Line]: 

260 """Visit function definition.""" 

261 yield from self.line() 

262 

263 # Remove redundant brackets around return type annotation. 

264 is_return_annotation = False 

265 for child in node.children: 

266 if child.type == token.RARROW: 

267 is_return_annotation = True 

268 elif is_return_annotation: 

269 if child.type == syms.atom and child.children[0].type == token.LPAR: 

270 if maybe_make_parens_invisible_in_atom( 

271 child, 

272 parent=node, 

273 remove_brackets_around_comma=False, 

274 ): 

275 wrap_in_parentheses(node, child, visible=False) 

276 else: 

277 wrap_in_parentheses(node, child, visible=False) 

278 is_return_annotation = False 

279 

280 for child in node.children: 

281 yield from self.visit(child) 

282 

283 def visit_match_case(self, node: Node) -> Iterator[Line]: 

284 """Visit either a match or case statement.""" 

285 normalize_invisible_parens( 

286 node, parens_after=set(), mode=self.mode, features=self.features 

287 ) 

288 

289 yield from self.line() 

290 for child in node.children: 

291 yield from self.visit(child) 

292 

293 def visit_suite(self, node: Node) -> Iterator[Line]: 

294 """Visit a suite.""" 

295 if is_stub_suite(node): 

296 yield from self.visit(node.children[2]) 

297 else: 

298 yield from self.visit_default(node) 

299 

300 def visit_simple_stmt(self, node: Node) -> Iterator[Line]: 

301 """Visit a statement without nested statements.""" 

302 prev_type: Optional[int] = None 

303 for child in node.children: 

304 if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child): 

305 wrap_in_parentheses(node, child, visible=False) 

306 prev_type = child.type 

307 

308 if node.parent and node.parent.type in STATEMENT: 

309 if is_parent_function_or_class(node) and is_stub_body(node): 

310 yield from self.visit_default(node) 

311 else: 

312 yield from self.line(+1) 

313 yield from self.visit_default(node) 

314 yield from self.line(-1) 

315 

316 else: 

317 if node.parent and is_stub_suite(node.parent): 

318 node.prefix = "" 

319 yield from self.visit_default(node) 

320 return 

321 yield from self.line() 

322 yield from self.visit_default(node) 

323 

324 def visit_async_stmt(self, node: Node) -> Iterator[Line]: 

325 """Visit `async def`, `async for`, `async with`.""" 

326 yield from self.line() 

327 

328 children = iter(node.children) 

329 for child in children: 

330 yield from self.visit(child) 

331 

332 if child.type == token.ASYNC or child.type == STANDALONE_COMMENT: 

333 # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async 

334 # line. 

335 break 

336 

337 internal_stmt = next(children) 

338 yield from self.visit(internal_stmt) 

339 

340 def visit_decorators(self, node: Node) -> Iterator[Line]: 

341 """Visit decorators.""" 

342 for child in node.children: 

343 yield from self.line() 

344 yield from self.visit(child) 

345 

346 def visit_power(self, node: Node) -> Iterator[Line]: 

347 for idx, leaf in enumerate(node.children[:-1]): 

348 next_leaf = node.children[idx + 1] 

349 

350 if not isinstance(leaf, Leaf): 

351 continue 

352 

353 value = leaf.value.lower() 

354 if ( 

355 leaf.type == token.NUMBER 

356 and next_leaf.type == syms.trailer 

357 # Ensure that we are in an attribute trailer 

358 and next_leaf.children[0].type == token.DOT 

359 # It shouldn't wrap hexadecimal, binary and octal literals 

360 and not value.startswith(("0x", "0b", "0o")) 

361 # It shouldn't wrap complex literals 

362 and "j" not in value 

363 ): 

364 wrap_in_parentheses(node, leaf) 

365 

366 remove_await_parens(node) 

367 

368 yield from self.visit_default(node) 

369 

370 def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]: 

371 """Remove a semicolon and put the other statement on a separate line.""" 

372 yield from self.line() 

373 

374 def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]: 

375 """End of file. Process outstanding comments and end with a newline.""" 

376 yield from self.visit_default(leaf) 

377 yield from self.line() 

378 

379 def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]: 

380 if not self.current_line.bracket_tracker.any_open_brackets(): 

381 yield from self.line() 

382 yield from self.visit_default(leaf) 

383 

384 def visit_factor(self, node: Node) -> Iterator[Line]: 

385 """Force parentheses between a unary op and a binary power: 

386 

387 -2 ** 8 -> -(2 ** 8) 

388 """ 

389 _operator, operand = node.children 

390 if ( 

391 operand.type == syms.power 

392 and len(operand.children) == 3 

393 and operand.children[1].type == token.DOUBLESTAR 

394 ): 

395 lpar = Leaf(token.LPAR, "(") 

396 rpar = Leaf(token.RPAR, ")") 

397 index = operand.remove() or 0 

398 node.insert_child(index, Node(syms.atom, [lpar, operand, rpar])) 

399 yield from self.visit_default(node) 

400 

401 def visit_tname(self, node: Node) -> Iterator[Line]: 

402 """ 

403 Add potential parentheses around types in function parameter lists to be made 

404 into real parentheses in case the type hint is too long to fit on a line 

405 Examples: 

406 def foo(a: int, b: float = 7): ... 

407 

408 -> 

409 

410 def foo(a: (int), b: (float) = 7): ... 

411 """ 

412 assert len(node.children) == 3 

413 if maybe_make_parens_invisible_in_atom(node.children[2], parent=node): 

414 wrap_in_parentheses(node, node.children[2], visible=False) 

415 

416 yield from self.visit_default(node) 

417 

418 def visit_STRING(self, leaf: Leaf) -> Iterator[Line]: 

419 normalize_unicode_escape_sequences(leaf) 

420 

421 if is_docstring(leaf) and not re.search(r"\\\s*\n", leaf.value): 

422 # We're ignoring docstrings with backslash newline escapes because changing 

423 # indentation of those changes the AST representation of the code. 

424 if self.mode.string_normalization: 

425 docstring = normalize_string_prefix(leaf.value) 

426 # We handle string normalization at the end of this method, but since 

427 # what we do right now acts differently depending on quote style (ex. 

428 # see padding logic below), there's a possibility for unstable 

429 # formatting. To avoid a situation where this function formats a 

430 # docstring differently on the second pass, normalize it early. 

431 docstring = normalize_string_quotes(docstring) 

432 else: 

433 docstring = leaf.value 

434 prefix = get_string_prefix(docstring) 

435 docstring = docstring[len(prefix) :] # Remove the prefix 

436 quote_char = docstring[0] 

437 # A natural way to remove the outer quotes is to do: 

438 # docstring = docstring.strip(quote_char) 

439 # but that breaks on """""x""" (which is '""x'). 

440 # So we actually need to remove the first character and the next two 

441 # characters but only if they are the same as the first. 

442 quote_len = 1 if docstring[1] != quote_char else 3 

443 docstring = docstring[quote_len:-quote_len] 

444 docstring_started_empty = not docstring 

445 indent = " " * 4 * self.current_line.depth 

446 

447 if is_multiline_string(leaf): 

448 docstring = fix_multiline_docstring(docstring, indent) 

449 else: 

450 docstring = docstring.strip() 

451 

452 has_trailing_backslash = False 

453 if docstring: 

454 # Add some padding if the docstring starts / ends with a quote mark. 

455 if docstring[0] == quote_char: 

456 docstring = " " + docstring 

457 if docstring[-1] == quote_char: 

458 docstring += " " 

459 if docstring[-1] == "\\": 

460 backslash_count = len(docstring) - len(docstring.rstrip("\\")) 

461 if backslash_count % 2: 

462 # Odd number of tailing backslashes, add some padding to 

463 # avoid escaping the closing string quote. 

464 docstring += " " 

465 has_trailing_backslash = True 

466 elif not docstring_started_empty: 

467 docstring = " " 

468 

469 # We could enforce triple quotes at this point. 

470 quote = quote_char * quote_len 

471 

472 # It's invalid to put closing single-character quotes on a new line. 

473 if quote_len == 3: 

474 # We need to find the length of the last line of the docstring 

475 # to find if we can add the closing quotes to the line without 

476 # exceeding the maximum line length. 

477 # If docstring is one line, we don't put the closing quotes on a 

478 # separate line because it looks ugly (#3320). 

479 lines = docstring.splitlines() 

480 last_line_length = len(lines[-1]) if docstring else 0 

481 

482 # If adding closing quotes would cause the last line to exceed 

483 # the maximum line length, and the closing quote is not 

484 # prefixed by a newline then put a line break before 

485 # the closing quotes 

486 if ( 

487 len(lines) > 1 

488 and last_line_length + quote_len > self.mode.line_length 

489 and len(indent) + quote_len <= self.mode.line_length 

490 and not has_trailing_backslash 

491 ): 

492 if leaf.value[-1 - quote_len] == "\n": 

493 leaf.value = prefix + quote + docstring + quote 

494 else: 

495 leaf.value = prefix + quote + docstring + "\n" + indent + quote 

496 else: 

497 leaf.value = prefix + quote + docstring + quote 

498 else: 

499 leaf.value = prefix + quote + docstring + quote 

500 

501 if self.mode.string_normalization and leaf.type == token.STRING: 

502 leaf.value = normalize_string_prefix(leaf.value) 

503 leaf.value = normalize_string_quotes(leaf.value) 

504 yield from self.visit_default(leaf) 

505 

506 def visit_NUMBER(self, leaf: Leaf) -> Iterator[Line]: 

507 normalize_numeric_literal(leaf) 

508 yield from self.visit_default(leaf) 

509 

510 def visit_atom(self, node: Node) -> Iterator[Line]: 

511 """Visit any atom""" 

512 if len(node.children) == 3: 

513 first = node.children[0] 

514 last = node.children[-1] 

515 if (first.type == token.LSQB and last.type == token.RSQB) or ( 

516 first.type == token.LBRACE and last.type == token.RBRACE 

517 ): 

518 # Lists or sets of one item 

519 maybe_make_parens_invisible_in_atom(node.children[1], parent=node) 

520 

521 yield from self.visit_default(node) 

522 

523 def visit_fstring(self, node: Node) -> Iterator[Line]: 

524 # currently we don't want to format and split f-strings at all. 

525 string_leaf = fstring_to_string(node) 

526 node.replace(string_leaf) 

527 if "\\" in string_leaf.value and any( 

528 "\\" in str(child) 

529 for child in node.children 

530 if child.type == syms.fstring_replacement_field 

531 ): 

532 # string normalization doesn't account for nested quotes, 

533 # causing breakages. skip normalization when nested quotes exist 

534 yield from self.visit_default(string_leaf) 

535 return 

536 yield from self.visit_STRING(string_leaf) 

537 

538 # TODO: Uncomment Implementation to format f-string children 

539 # fstring_start = node.children[0] 

540 # fstring_end = node.children[-1] 

541 # assert isinstance(fstring_start, Leaf) 

542 # assert isinstance(fstring_end, Leaf) 

543 

544 # quote_char = fstring_end.value[0] 

545 # quote_idx = fstring_start.value.index(quote_char) 

546 # prefix, quote = ( 

547 # fstring_start.value[:quote_idx], 

548 # fstring_start.value[quote_idx:] 

549 # ) 

550 

551 # if not is_docstring(node, self.mode): 

552 # prefix = normalize_string_prefix(prefix) 

553 

554 # assert quote == fstring_end.value 

555 

556 # is_raw_fstring = "r" in prefix or "R" in prefix 

557 # middles = [ 

558 # leaf 

559 # for leaf in node.leaves() 

560 # if leaf.type == token.FSTRING_MIDDLE 

561 # ] 

562 

563 # if self.mode.string_normalization: 

564 # middles, quote = normalize_fstring_quotes(quote, middles, is_raw_fstring) 

565 

566 # fstring_start.value = prefix + quote 

567 # fstring_end.value = quote 

568 

569 # yield from self.visit_default(node) 

570 

571 def __post_init__(self) -> None: 

572 """You are in a twisty little maze of passages.""" 

573 self.current_line = Line(mode=self.mode) 

574 

575 v = self.visit_stmt 

576 Ø: set[str] = set() 

577 self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","}) 

578 self.visit_if_stmt = partial( 

579 v, keywords={"if", "else", "elif"}, parens={"if", "elif"} 

580 ) 

581 self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"}) 

582 self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"}) 

583 self.visit_try_stmt = partial( 

584 v, keywords={"try", "except", "else", "finally"}, parens=Ø 

585 ) 

586 self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"}) 

587 self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"}) 

588 self.visit_classdef = partial(v, keywords={"class"}, parens=Ø) 

589 

590 self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS) 

591 self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"}) 

592 self.visit_import_from = partial(v, keywords=Ø, parens={"import"}) 

593 self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"}) 

594 self.visit_async_funcdef = self.visit_async_stmt 

595 self.visit_decorated = self.visit_decorators 

596 

597 # PEP 634 

598 self.visit_match_stmt = self.visit_match_case 

599 self.visit_case_block = self.visit_match_case 

600 self.visit_guard = partial(v, keywords=Ø, parens={"if"}) 

601 

602 

603def _hugging_power_ops_line_to_string( 

604 line: Line, 

605 features: Collection[Feature], 

606 mode: Mode, 

607) -> Optional[str]: 

608 try: 

609 return line_to_string(next(hug_power_op(line, features, mode))) 

610 except CannotTransform: 

611 return None 

612 

613 

614def transform_line( 

615 line: Line, mode: Mode, features: Collection[Feature] = () 

616) -> Iterator[Line]: 

617 """Transform a `line`, potentially splitting it into many lines. 

618 

619 They should fit in the allotted `line_length` but might not be able to. 

620 

621 `features` are syntactical features that may be used in the output. 

622 """ 

623 if line.is_comment: 

624 yield line 

625 return 

626 

627 line_str = line_to_string(line) 

628 

629 # We need the line string when power operators are hugging to determine if we should 

630 # split the line. Default to line_str, if no power operator are present on the line. 

631 line_str_hugging_power_ops = ( 

632 _hugging_power_ops_line_to_string(line, features, mode) or line_str 

633 ) 

634 

635 ll = mode.line_length 

636 sn = mode.string_normalization 

637 string_merge = StringMerger(ll, sn) 

638 string_paren_strip = StringParenStripper(ll, sn) 

639 string_split = StringSplitter(ll, sn) 

640 string_paren_wrap = StringParenWrapper(ll, sn) 

641 

642 transformers: list[Transformer] 

643 if ( 

644 not line.contains_uncollapsable_type_comments() 

645 and not line.should_split_rhs 

646 and not line.magic_trailing_comma 

647 and ( 

648 is_line_short_enough(line, mode=mode, line_str=line_str_hugging_power_ops) 

649 or line.contains_unsplittable_type_ignore() 

650 ) 

651 and not (line.inside_brackets and line.contains_standalone_comments()) 

652 and not line.contains_implicit_multiline_string_with_comments() 

653 ): 

654 # Only apply basic string preprocessing, since lines shouldn't be split here. 

655 if Preview.string_processing in mode: 

656 transformers = [string_merge, string_paren_strip] 

657 else: 

658 transformers = [] 

659 elif line.is_def and not should_split_funcdef_with_rhs(line, mode): 

660 transformers = [left_hand_split] 

661 else: 

662 

663 def _rhs( 

664 self: object, line: Line, features: Collection[Feature], mode: Mode 

665 ) -> Iterator[Line]: 

666 """Wraps calls to `right_hand_split`. 

667 

668 The calls increasingly `omit` right-hand trailers (bracket pairs with 

669 content), meaning the trailers get glued together to split on another 

670 bracket pair instead. 

671 """ 

672 for omit in generate_trailers_to_omit(line, mode.line_length): 

673 lines = list(right_hand_split(line, mode, features, omit=omit)) 

674 # Note: this check is only able to figure out if the first line of the 

675 # *current* transformation fits in the line length. This is true only 

676 # for simple cases. All others require running more transforms via 

677 # `transform_line()`. This check doesn't know if those would succeed. 

678 if is_line_short_enough(lines[0], mode=mode): 

679 yield from lines 

680 return 

681 

682 # All splits failed, best effort split with no omits. 

683 # This mostly happens to multiline strings that are by definition 

684 # reported as not fitting a single line, as well as lines that contain 

685 # trailing commas (those have to be exploded). 

686 yield from right_hand_split(line, mode, features=features) 

687 

688 # HACK: nested functions (like _rhs) compiled by mypyc don't retain their 

689 # __name__ attribute which is needed in `run_transformer` further down. 

690 # Unfortunately a nested class breaks mypyc too. So a class must be created 

691 # via type ... https://github.com/mypyc/mypyc/issues/884 

692 rhs = type("rhs", (), {"__call__": _rhs})() 

693 

694 if Preview.string_processing in mode: 

695 if line.inside_brackets: 

696 transformers = [ 

697 string_merge, 

698 string_paren_strip, 

699 string_split, 

700 delimiter_split, 

701 standalone_comment_split, 

702 string_paren_wrap, 

703 rhs, 

704 ] 

705 else: 

706 transformers = [ 

707 string_merge, 

708 string_paren_strip, 

709 string_split, 

710 string_paren_wrap, 

711 rhs, 

712 ] 

713 else: 

714 if line.inside_brackets: 

715 transformers = [delimiter_split, standalone_comment_split, rhs] 

716 else: 

717 transformers = [rhs] 

718 # It's always safe to attempt hugging of power operations and pretty much every line 

719 # could match. 

720 transformers.append(hug_power_op) 

721 

722 for transform in transformers: 

723 # We are accumulating lines in `result` because we might want to abort 

724 # mission and return the original line in the end, or attempt a different 

725 # split altogether. 

726 try: 

727 result = run_transformer(line, transform, mode, features, line_str=line_str) 

728 except CannotTransform: 

729 continue 

730 else: 

731 yield from result 

732 break 

733 

734 else: 

735 yield line 

736 

737 

738def should_split_funcdef_with_rhs(line: Line, mode: Mode) -> bool: 

739 """If a funcdef has a magic trailing comma in the return type, then we should first 

740 split the line with rhs to respect the comma. 

741 """ 

742 return_type_leaves: list[Leaf] = [] 

743 in_return_type = False 

744 

745 for leaf in line.leaves: 

746 if leaf.type == token.COLON: 

747 in_return_type = False 

748 if in_return_type: 

749 return_type_leaves.append(leaf) 

750 if leaf.type == token.RARROW: 

751 in_return_type = True 

752 

753 # using `bracket_split_build_line` will mess with whitespace, so we duplicate a 

754 # couple lines from it. 

755 result = Line(mode=line.mode, depth=line.depth) 

756 leaves_to_track = get_leaves_inside_matching_brackets(return_type_leaves) 

757 for leaf in return_type_leaves: 

758 result.append( 

759 leaf, 

760 preformatted=True, 

761 track_bracket=id(leaf) in leaves_to_track, 

762 ) 

763 

764 # we could also return true if the line is too long, and the return type is longer 

765 # than the param list. Or if `should_split_rhs` returns True. 

766 return result.magic_trailing_comma is not None 

767 

768 

769class _BracketSplitComponent(Enum): 

770 head = auto() 

771 body = auto() 

772 tail = auto() 

773 

774 

775def left_hand_split( 

776 line: Line, _features: Collection[Feature], mode: Mode 

777) -> Iterator[Line]: 

778 """Split line into many lines, starting with the first matching bracket pair. 

779 

780 Note: this usually looks weird, only use this for function definitions. 

781 Prefer RHS otherwise. This is why this function is not symmetrical with 

782 :func:`right_hand_split` which also handles optional parentheses. 

783 """ 

784 for leaf_type in [token.LPAR, token.LSQB]: 

785 tail_leaves: list[Leaf] = [] 

786 body_leaves: list[Leaf] = [] 

787 head_leaves: list[Leaf] = [] 

788 current_leaves = head_leaves 

789 matching_bracket: Optional[Leaf] = None 

790 depth = 0 

791 for index, leaf in enumerate(line.leaves): 

792 if index == 2 and leaf.type == token.LSQB: 

793 # A [ at index 2 means this is a type param, so start 

794 # tracking the depth 

795 depth += 1 

796 elif depth > 0: 

797 if leaf.type == token.LSQB: 

798 depth += 1 

799 elif leaf.type == token.RSQB: 

800 depth -= 1 

801 if ( 

802 current_leaves is body_leaves 

803 and leaf.type in CLOSING_BRACKETS 

804 and leaf.opening_bracket is matching_bracket 

805 and isinstance(matching_bracket, Leaf) 

806 # If the code is still on LPAR and we are inside a type 

807 # param, ignore the match since this is searching 

808 # for the function arguments 

809 and not (leaf_type == token.LPAR and depth > 0) 

810 ): 

811 ensure_visible(leaf) 

812 ensure_visible(matching_bracket) 

813 current_leaves = tail_leaves if body_leaves else head_leaves 

814 current_leaves.append(leaf) 

815 if current_leaves is head_leaves: 

816 if leaf.type == leaf_type: 

817 matching_bracket = leaf 

818 current_leaves = body_leaves 

819 if matching_bracket and tail_leaves: 

820 break 

821 if not matching_bracket or not tail_leaves: 

822 raise CannotSplit("No brackets found") 

823 

824 head = bracket_split_build_line( 

825 head_leaves, line, matching_bracket, component=_BracketSplitComponent.head 

826 ) 

827 body = bracket_split_build_line( 

828 body_leaves, line, matching_bracket, component=_BracketSplitComponent.body 

829 ) 

830 tail = bracket_split_build_line( 

831 tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail 

832 ) 

833 bracket_split_succeeded_or_raise(head, body, tail) 

834 for result in (head, body, tail): 

835 if result: 

836 yield result 

837 

838 

839def right_hand_split( 

840 line: Line, 

841 mode: Mode, 

842 features: Collection[Feature] = (), 

843 omit: Collection[LeafID] = (), 

844) -> Iterator[Line]: 

845 """Split line into many lines, starting with the last matching bracket pair. 

846 

847 If the split was by optional parentheses, attempt splitting without them, too. 

848 `omit` is a collection of closing bracket IDs that shouldn't be considered for 

849 this split. 

850 

851 Note: running this function modifies `bracket_depth` on the leaves of `line`. 

852 """ 

853 rhs_result = _first_right_hand_split(line, omit=omit) 

854 yield from _maybe_split_omitting_optional_parens( 

855 rhs_result, line, mode, features=features, omit=omit 

856 ) 

857 

858 

859def _first_right_hand_split( 

860 line: Line, 

861 omit: Collection[LeafID] = (), 

862) -> RHSResult: 

863 """Split the line into head, body, tail starting with the last bracket pair. 

864 

865 Note: this function should not have side effects. It's relied upon by 

866 _maybe_split_omitting_optional_parens to get an opinion whether to prefer 

867 splitting on the right side of an assignment statement. 

868 """ 

869 tail_leaves: list[Leaf] = [] 

870 body_leaves: list[Leaf] = [] 

871 head_leaves: list[Leaf] = [] 

872 current_leaves = tail_leaves 

873 opening_bracket: Optional[Leaf] = None 

874 closing_bracket: Optional[Leaf] = None 

875 for leaf in reversed(line.leaves): 

876 if current_leaves is body_leaves: 

877 if leaf is opening_bracket: 

878 current_leaves = head_leaves if body_leaves else tail_leaves 

879 current_leaves.append(leaf) 

880 if current_leaves is tail_leaves: 

881 if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit: 

882 opening_bracket = leaf.opening_bracket 

883 closing_bracket = leaf 

884 current_leaves = body_leaves 

885 if not (opening_bracket and closing_bracket and head_leaves): 

886 # If there is no opening or closing_bracket that means the split failed and 

887 # all content is in the tail. Otherwise, if `head_leaves` are empty, it means 

888 # the matching `opening_bracket` wasn't available on `line` anymore. 

889 raise CannotSplit("No brackets found") 

890 

891 tail_leaves.reverse() 

892 body_leaves.reverse() 

893 head_leaves.reverse() 

894 

895 body: Optional[Line] = None 

896 if ( 

897 Preview.hug_parens_with_braces_and_square_brackets in line.mode 

898 and tail_leaves[0].value 

899 and tail_leaves[0].opening_bracket is head_leaves[-1] 

900 ): 

901 inner_body_leaves = list(body_leaves) 

902 hugged_opening_leaves: list[Leaf] = [] 

903 hugged_closing_leaves: list[Leaf] = [] 

904 is_unpacking = body_leaves[0].type in [token.STAR, token.DOUBLESTAR] 

905 unpacking_offset: int = 1 if is_unpacking else 0 

906 while ( 

907 len(inner_body_leaves) >= 2 + unpacking_offset 

908 and inner_body_leaves[-1].type in CLOSING_BRACKETS 

909 and inner_body_leaves[-1].opening_bracket 

910 is inner_body_leaves[unpacking_offset] 

911 ): 

912 if unpacking_offset: 

913 hugged_opening_leaves.append(inner_body_leaves.pop(0)) 

914 unpacking_offset = 0 

915 hugged_opening_leaves.append(inner_body_leaves.pop(0)) 

916 hugged_closing_leaves.insert(0, inner_body_leaves.pop()) 

917 

918 if hugged_opening_leaves and inner_body_leaves: 

919 inner_body = bracket_split_build_line( 

920 inner_body_leaves, 

921 line, 

922 hugged_opening_leaves[-1], 

923 component=_BracketSplitComponent.body, 

924 ) 

925 if ( 

926 line.mode.magic_trailing_comma 

927 and inner_body_leaves[-1].type == token.COMMA 

928 ): 

929 should_hug = True 

930 else: 

931 line_length = line.mode.line_length - sum( 

932 len(str(leaf)) 

933 for leaf in hugged_opening_leaves + hugged_closing_leaves 

934 ) 

935 if is_line_short_enough( 

936 inner_body, mode=replace(line.mode, line_length=line_length) 

937 ): 

938 # Do not hug if it fits on a single line. 

939 should_hug = False 

940 else: 

941 should_hug = True 

942 if should_hug: 

943 body_leaves = inner_body_leaves 

944 head_leaves.extend(hugged_opening_leaves) 

945 tail_leaves = hugged_closing_leaves + tail_leaves 

946 body = inner_body # No need to re-calculate the body again later. 

947 

948 head = bracket_split_build_line( 

949 head_leaves, line, opening_bracket, component=_BracketSplitComponent.head 

950 ) 

951 if body is None: 

952 body = bracket_split_build_line( 

953 body_leaves, line, opening_bracket, component=_BracketSplitComponent.body 

954 ) 

955 tail = bracket_split_build_line( 

956 tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail 

957 ) 

958 bracket_split_succeeded_or_raise(head, body, tail) 

959 return RHSResult(head, body, tail, opening_bracket, closing_bracket) 

960 

961 

962def _maybe_split_omitting_optional_parens( 

963 rhs: RHSResult, 

964 line: Line, 

965 mode: Mode, 

966 features: Collection[Feature] = (), 

967 omit: Collection[LeafID] = (), 

968) -> Iterator[Line]: 

969 if ( 

970 Feature.FORCE_OPTIONAL_PARENTHESES not in features 

971 # the opening bracket is an optional paren 

972 and rhs.opening_bracket.type == token.LPAR 

973 and not rhs.opening_bracket.value 

974 # the closing bracket is an optional paren 

975 and rhs.closing_bracket.type == token.RPAR 

976 and not rhs.closing_bracket.value 

977 # it's not an import (optional parens are the only thing we can split on 

978 # in this case; attempting a split without them is a waste of time) 

979 and not line.is_import 

980 # and we can actually remove the parens 

981 and can_omit_invisible_parens(rhs, mode.line_length) 

982 ): 

983 omit = {id(rhs.closing_bracket), *omit} 

984 try: 

985 # The RHSResult Omitting Optional Parens. 

986 rhs_oop = _first_right_hand_split(line, omit=omit) 

987 if _prefer_split_rhs_oop_over_rhs(rhs_oop, rhs, mode): 

988 yield from _maybe_split_omitting_optional_parens( 

989 rhs_oop, line, mode, features=features, omit=omit 

990 ) 

991 return 

992 

993 except CannotSplit as e: 

994 # For chained assignments we want to use the previous successful split 

995 if line.is_chained_assignment: 

996 pass 

997 

998 elif ( 

999 not can_be_split(rhs.body) 

1000 and not is_line_short_enough(rhs.body, mode=mode) 

1001 and not ( 

1002 Preview.wrap_long_dict_values_in_parens 

1003 and rhs.opening_bracket.parent 

1004 and rhs.opening_bracket.parent.parent 

1005 and rhs.opening_bracket.parent.parent.type == syms.dictsetmaker 

1006 ) 

1007 ): 

1008 raise CannotSplit( 

1009 "Splitting failed, body is still too long and can't be split." 

1010 ) from e 

1011 

1012 elif ( 

1013 rhs.head.contains_multiline_strings() 

1014 or rhs.tail.contains_multiline_strings() 

1015 ): 

1016 raise CannotSplit( 

1017 "The current optional pair of parentheses is bound to fail to" 

1018 " satisfy the splitting algorithm because the head or the tail" 

1019 " contains multiline strings which by definition never fit one" 

1020 " line." 

1021 ) from e 

1022 

1023 ensure_visible(rhs.opening_bracket) 

1024 ensure_visible(rhs.closing_bracket) 

1025 for result in (rhs.head, rhs.body, rhs.tail): 

1026 if result: 

1027 yield result 

1028 

1029 

1030def _prefer_split_rhs_oop_over_rhs( 

1031 rhs_oop: RHSResult, rhs: RHSResult, mode: Mode 

1032) -> bool: 

1033 """ 

1034 Returns whether we should prefer the result from a split omitting optional parens 

1035 (rhs_oop) over the original (rhs). 

1036 """ 

1037 # contains unsplittable type ignore 

1038 if ( 

1039 rhs_oop.head.contains_unsplittable_type_ignore() 

1040 or rhs_oop.body.contains_unsplittable_type_ignore() 

1041 or rhs_oop.tail.contains_unsplittable_type_ignore() 

1042 ): 

1043 return True 

1044 

1045 # Retain optional parens around dictionary values 

1046 if ( 

1047 Preview.wrap_long_dict_values_in_parens 

1048 and rhs.opening_bracket.parent 

1049 and rhs.opening_bracket.parent.parent 

1050 and rhs.opening_bracket.parent.parent.type == syms.dictsetmaker 

1051 and rhs.body.bracket_tracker.delimiters 

1052 ): 

1053 # Unless the split is inside the key 

1054 return any(leaf.type == token.COLON for leaf in rhs_oop.tail.leaves) 

1055 

1056 # the split is right after `=` 

1057 if not (len(rhs.head.leaves) >= 2 and rhs.head.leaves[-2].type == token.EQUAL): 

1058 return True 

1059 

1060 # the left side of assignment contains brackets 

1061 if not any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1]): 

1062 return True 

1063 

1064 # the left side of assignment is short enough (the -1 is for the ending optional 

1065 # paren) 

1066 if not is_line_short_enough( 

1067 rhs.head, mode=replace(mode, line_length=mode.line_length - 1) 

1068 ): 

1069 return True 

1070 

1071 # the left side of assignment won't explode further because of magic trailing comma 

1072 if rhs.head.magic_trailing_comma is not None: 

1073 return True 

1074 

1075 # If we have multiple targets, we prefer more `=`s on the head vs pushing them to 

1076 # the body 

1077 rhs_head_equal_count = [leaf.type for leaf in rhs.head.leaves].count(token.EQUAL) 

1078 rhs_oop_head_equal_count = [leaf.type for leaf in rhs_oop.head.leaves].count( 

1079 token.EQUAL 

1080 ) 

1081 if rhs_head_equal_count > 1 and rhs_head_equal_count > rhs_oop_head_equal_count: 

1082 return False 

1083 

1084 has_closing_bracket_after_assign = False 

1085 for leaf in reversed(rhs_oop.head.leaves): 

1086 if leaf.type == token.EQUAL: 

1087 break 

1088 if leaf.type in CLOSING_BRACKETS: 

1089 has_closing_bracket_after_assign = True 

1090 break 

1091 return ( 

1092 # contains matching brackets after the `=` (done by checking there is a 

1093 # closing bracket) 

1094 has_closing_bracket_after_assign 

1095 or ( 

1096 # the split is actually from inside the optional parens (done by checking 

1097 # the first line still contains the `=`) 

1098 any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves) 

1099 # the first line is short enough 

1100 and is_line_short_enough(rhs_oop.head, mode=mode) 

1101 ) 

1102 ) 

1103 

1104 

1105def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None: 

1106 """Raise :exc:`CannotSplit` if the last left- or right-hand split failed. 

1107 

1108 Do nothing otherwise. 

1109 

1110 A left- or right-hand split is based on a pair of brackets. Content before 

1111 (and including) the opening bracket is left on one line, content inside the 

1112 brackets is put on a separate line, and finally content starting with and 

1113 following the closing bracket is put on a separate line. 

1114 

1115 Those are called `head`, `body`, and `tail`, respectively. If the split 

1116 produced the same line (all content in `head`) or ended up with an empty `body` 

1117 and the `tail` is just the closing bracket, then it's considered failed. 

1118 """ 

1119 tail_len = len(str(tail).strip()) 

1120 if not body: 

1121 if tail_len == 0: 

1122 raise CannotSplit("Splitting brackets produced the same line") 

1123 

1124 elif tail_len < 3: 

1125 raise CannotSplit( 

1126 f"Splitting brackets on an empty body to save {tail_len} characters is" 

1127 " not worth it" 

1128 ) 

1129 

1130 

1131def _ensure_trailing_comma( 

1132 leaves: list[Leaf], original: Line, opening_bracket: Leaf 

1133) -> bool: 

1134 if not leaves: 

1135 return False 

1136 # Ensure a trailing comma for imports 

1137 if original.is_import: 

1138 return True 

1139 # ...and standalone function arguments 

1140 if not original.is_def: 

1141 return False 

1142 if opening_bracket.value != "(": 

1143 return False 

1144 # Don't add commas if we already have any commas 

1145 if any( 

1146 leaf.type == token.COMMA and not is_part_of_annotation(leaf) for leaf in leaves 

1147 ): 

1148 return False 

1149 

1150 # Find a leaf with a parent (comments don't have parents) 

1151 leaf_with_parent = next((leaf for leaf in leaves if leaf.parent), None) 

1152 if leaf_with_parent is None: 

1153 return True 

1154 # Don't add commas inside parenthesized return annotations 

1155 if get_annotation_type(leaf_with_parent) == "return": 

1156 return False 

1157 # Don't add commas inside PEP 604 unions 

1158 if ( 

1159 leaf_with_parent.parent 

1160 and leaf_with_parent.parent.next_sibling 

1161 and leaf_with_parent.parent.next_sibling.type == token.VBAR 

1162 ): 

1163 return False 

1164 return True 

1165 

1166 

1167def bracket_split_build_line( 

1168 leaves: list[Leaf], 

1169 original: Line, 

1170 opening_bracket: Leaf, 

1171 *, 

1172 component: _BracketSplitComponent, 

1173) -> Line: 

1174 """Return a new line with given `leaves` and respective comments from `original`. 

1175 

1176 If it's the head component, brackets will be tracked so trailing commas are 

1177 respected. 

1178 

1179 If it's the body component, the result line is one-indented inside brackets and as 

1180 such has its first leaf's prefix normalized and a trailing comma added when 

1181 expected. 

1182 """ 

1183 result = Line(mode=original.mode, depth=original.depth) 

1184 if component is _BracketSplitComponent.body: 

1185 result.inside_brackets = True 

1186 result.depth += 1 

1187 if _ensure_trailing_comma(leaves, original, opening_bracket): 

1188 for i in range(len(leaves) - 1, -1, -1): 

1189 if leaves[i].type == STANDALONE_COMMENT: 

1190 continue 

1191 

1192 if leaves[i].type != token.COMMA: 

1193 new_comma = Leaf(token.COMMA, ",") 

1194 leaves.insert(i + 1, new_comma) 

1195 break 

1196 

1197 leaves_to_track: set[LeafID] = set() 

1198 if component is _BracketSplitComponent.head: 

1199 leaves_to_track = get_leaves_inside_matching_brackets(leaves) 

1200 # Populate the line 

1201 for leaf in leaves: 

1202 result.append( 

1203 leaf, 

1204 preformatted=True, 

1205 track_bracket=id(leaf) in leaves_to_track, 

1206 ) 

1207 for comment_after in original.comments_after(leaf): 

1208 result.append(comment_after, preformatted=True) 

1209 if component is _BracketSplitComponent.body and should_split_line( 

1210 result, opening_bracket 

1211 ): 

1212 result.should_split_rhs = True 

1213 return result 

1214 

1215 

1216def dont_increase_indentation(split_func: Transformer) -> Transformer: 

1217 """Normalize prefix of the first leaf in every line returned by `split_func`. 

1218 

1219 This is a decorator over relevant split functions. 

1220 """ 

1221 

1222 @wraps(split_func) 

1223 def split_wrapper( 

1224 line: Line, features: Collection[Feature], mode: Mode 

1225 ) -> Iterator[Line]: 

1226 for split_line in split_func(line, features, mode): 

1227 split_line.leaves[0].prefix = "" 

1228 yield split_line 

1229 

1230 return split_wrapper 

1231 

1232 

1233def _get_last_non_comment_leaf(line: Line) -> Optional[int]: 

1234 for leaf_idx in range(len(line.leaves) - 1, 0, -1): 

1235 if line.leaves[leaf_idx].type != STANDALONE_COMMENT: 

1236 return leaf_idx 

1237 return None 

1238 

1239 

1240def _can_add_trailing_comma(leaf: Leaf, features: Collection[Feature]) -> bool: 

1241 if is_vararg(leaf, within={syms.typedargslist}): 

1242 return Feature.TRAILING_COMMA_IN_DEF in features 

1243 if is_vararg(leaf, within={syms.arglist, syms.argument}): 

1244 return Feature.TRAILING_COMMA_IN_CALL in features 

1245 return True 

1246 

1247 

1248def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line: 

1249 if ( 

1250 safe 

1251 and delimiter_priority == COMMA_PRIORITY 

1252 and line.leaves[-1].type != token.COMMA 

1253 and line.leaves[-1].type != STANDALONE_COMMENT 

1254 ): 

1255 new_comma = Leaf(token.COMMA, ",") 

1256 line.append(new_comma) 

1257 return line 

1258 

1259 

1260MIGRATE_COMMENT_DELIMITERS = {STRING_PRIORITY, COMMA_PRIORITY} 

1261 

1262 

1263@dont_increase_indentation 

1264def delimiter_split( 

1265 line: Line, features: Collection[Feature], mode: Mode 

1266) -> Iterator[Line]: 

1267 """Split according to delimiters of the highest priority. 

1268 

1269 If the appropriate Features are given, the split will add trailing commas 

1270 also in function signatures and calls that contain `*` and `**`. 

1271 """ 

1272 if len(line.leaves) == 0: 

1273 raise CannotSplit("Line empty") from None 

1274 last_leaf = line.leaves[-1] 

1275 

1276 bt = line.bracket_tracker 

1277 try: 

1278 delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)}) 

1279 except ValueError: 

1280 raise CannotSplit("No delimiters found") from None 

1281 

1282 if ( 

1283 delimiter_priority == DOT_PRIORITY 

1284 and bt.delimiter_count_with_priority(delimiter_priority) == 1 

1285 ): 

1286 raise CannotSplit("Splitting a single attribute from its owner looks wrong") 

1287 

1288 current_line = Line( 

1289 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1290 ) 

1291 lowest_depth = sys.maxsize 

1292 trailing_comma_safe = True 

1293 

1294 def append_to_line(leaf: Leaf) -> Iterator[Line]: 

1295 """Append `leaf` to current line or to new line if appending impossible.""" 

1296 nonlocal current_line 

1297 try: 

1298 current_line.append_safe(leaf, preformatted=True) 

1299 except ValueError: 

1300 yield current_line 

1301 

1302 current_line = Line( 

1303 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1304 ) 

1305 current_line.append(leaf) 

1306 

1307 def append_comments(leaf: Leaf) -> Iterator[Line]: 

1308 for comment_after in line.comments_after(leaf): 

1309 yield from append_to_line(comment_after) 

1310 

1311 last_non_comment_leaf = _get_last_non_comment_leaf(line) 

1312 for leaf_idx, leaf in enumerate(line.leaves): 

1313 yield from append_to_line(leaf) 

1314 

1315 previous_priority = leaf_idx > 0 and bt.delimiters.get( 

1316 id(line.leaves[leaf_idx - 1]) 

1317 ) 

1318 if ( 

1319 previous_priority != delimiter_priority 

1320 or delimiter_priority in MIGRATE_COMMENT_DELIMITERS 

1321 ): 

1322 yield from append_comments(leaf) 

1323 

1324 lowest_depth = min(lowest_depth, leaf.bracket_depth) 

1325 if trailing_comma_safe and leaf.bracket_depth == lowest_depth: 

1326 trailing_comma_safe = _can_add_trailing_comma(leaf, features) 

1327 

1328 if last_leaf.type == STANDALONE_COMMENT and leaf_idx == last_non_comment_leaf: 

1329 current_line = _safe_add_trailing_comma( 

1330 trailing_comma_safe, delimiter_priority, current_line 

1331 ) 

1332 

1333 leaf_priority = bt.delimiters.get(id(leaf)) 

1334 if leaf_priority == delimiter_priority: 

1335 if ( 

1336 leaf_idx + 1 < len(line.leaves) 

1337 and delimiter_priority not in MIGRATE_COMMENT_DELIMITERS 

1338 ): 

1339 yield from append_comments(line.leaves[leaf_idx + 1]) 

1340 

1341 yield current_line 

1342 current_line = Line( 

1343 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1344 ) 

1345 

1346 if current_line: 

1347 current_line = _safe_add_trailing_comma( 

1348 trailing_comma_safe, delimiter_priority, current_line 

1349 ) 

1350 yield current_line 

1351 

1352 

1353@dont_increase_indentation 

1354def standalone_comment_split( 

1355 line: Line, features: Collection[Feature], mode: Mode 

1356) -> Iterator[Line]: 

1357 """Split standalone comments from the rest of the line.""" 

1358 if not line.contains_standalone_comments(): 

1359 raise CannotSplit("Line does not have any standalone comments") 

1360 

1361 current_line = Line( 

1362 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1363 ) 

1364 

1365 def append_to_line(leaf: Leaf) -> Iterator[Line]: 

1366 """Append `leaf` to current line or to new line if appending impossible.""" 

1367 nonlocal current_line 

1368 try: 

1369 current_line.append_safe(leaf, preformatted=True) 

1370 except ValueError: 

1371 yield current_line 

1372 

1373 current_line = Line( 

1374 line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1375 ) 

1376 current_line.append(leaf) 

1377 

1378 for leaf in line.leaves: 

1379 yield from append_to_line(leaf) 

1380 

1381 for comment_after in line.comments_after(leaf): 

1382 yield from append_to_line(comment_after) 

1383 

1384 if current_line: 

1385 yield current_line 

1386 

1387 

1388def normalize_invisible_parens( # noqa: C901 

1389 node: Node, parens_after: set[str], *, mode: Mode, features: Collection[Feature] 

1390) -> None: 

1391 """Make existing optional parentheses invisible or create new ones. 

1392 

1393 `parens_after` is a set of string leaf values immediately after which parens 

1394 should be put. 

1395 

1396 Standardizes on visible parentheses for single-element tuples, and keeps 

1397 existing visible parentheses for other tuples and generator expressions. 

1398 """ 

1399 for pc in list_comments(node.prefix, is_endmarker=False): 

1400 if pc.value in FMT_OFF: 

1401 # This `node` has a prefix with `# fmt: off`, don't mess with parens. 

1402 return 

1403 

1404 # The multiple context managers grammar has a different pattern, thus this is 

1405 # separate from the for-loop below. This possibly wraps them in invisible parens, 

1406 # and later will be removed in remove_with_parens when needed. 

1407 if node.type == syms.with_stmt: 

1408 _maybe_wrap_cms_in_parens(node, mode, features) 

1409 

1410 check_lpar = False 

1411 for index, child in enumerate(list(node.children)): 

1412 # Fixes a bug where invisible parens are not properly stripped from 

1413 # assignment statements that contain type annotations. 

1414 if isinstance(child, Node) and child.type == syms.annassign: 

1415 normalize_invisible_parens( 

1416 child, parens_after=parens_after, mode=mode, features=features 

1417 ) 

1418 

1419 # Fixes a bug where invisible parens are not properly wrapped around 

1420 # case blocks. 

1421 if isinstance(child, Node) and child.type == syms.case_block: 

1422 normalize_invisible_parens( 

1423 child, parens_after={"case"}, mode=mode, features=features 

1424 ) 

1425 

1426 # Add parentheses around if guards in case blocks 

1427 if isinstance(child, Node) and child.type == syms.guard: 

1428 normalize_invisible_parens( 

1429 child, parens_after={"if"}, mode=mode, features=features 

1430 ) 

1431 

1432 # Add parentheses around long tuple unpacking in assignments. 

1433 if ( 

1434 index == 0 

1435 and isinstance(child, Node) 

1436 and child.type == syms.testlist_star_expr 

1437 ): 

1438 check_lpar = True 

1439 

1440 if check_lpar: 

1441 if ( 

1442 child.type == syms.atom 

1443 and node.type == syms.for_stmt 

1444 and isinstance(child.prev_sibling, Leaf) 

1445 and child.prev_sibling.type == token.NAME 

1446 and child.prev_sibling.value == "for" 

1447 ): 

1448 if maybe_make_parens_invisible_in_atom( 

1449 child, 

1450 parent=node, 

1451 remove_brackets_around_comma=True, 

1452 ): 

1453 wrap_in_parentheses(node, child, visible=False) 

1454 elif isinstance(child, Node) and node.type == syms.with_stmt: 

1455 remove_with_parens(child, node) 

1456 elif child.type == syms.atom: 

1457 if maybe_make_parens_invisible_in_atom( 

1458 child, 

1459 parent=node, 

1460 ): 

1461 wrap_in_parentheses(node, child, visible=False) 

1462 elif is_one_tuple(child): 

1463 wrap_in_parentheses(node, child, visible=True) 

1464 elif node.type == syms.import_from: 

1465 _normalize_import_from(node, child, index) 

1466 break 

1467 elif ( 

1468 index == 1 

1469 and child.type == token.STAR 

1470 and node.type == syms.except_clause 

1471 ): 

1472 # In except* (PEP 654), the star is actually part of 

1473 # of the keyword. So we need to skip the insertion of 

1474 # invisible parentheses to work more precisely. 

1475 continue 

1476 

1477 elif ( 

1478 isinstance(child, Leaf) 

1479 and child.next_sibling is not None 

1480 and child.next_sibling.type == token.COLON 

1481 and child.value == "case" 

1482 ): 

1483 # A special patch for "case case:" scenario, the second occurrence 

1484 # of case will be not parsed as a Python keyword. 

1485 break 

1486 

1487 elif not is_multiline_string(child): 

1488 wrap_in_parentheses(node, child, visible=False) 

1489 

1490 comma_check = child.type == token.COMMA 

1491 

1492 check_lpar = isinstance(child, Leaf) and ( 

1493 child.value in parens_after or comma_check 

1494 ) 

1495 

1496 

1497def _normalize_import_from(parent: Node, child: LN, index: int) -> None: 

1498 # "import from" nodes store parentheses directly as part of 

1499 # the statement 

1500 if is_lpar_token(child): 

1501 assert is_rpar_token(parent.children[-1]) 

1502 # make parentheses invisible 

1503 child.value = "" 

1504 parent.children[-1].value = "" 

1505 elif child.type != token.STAR: 

1506 # insert invisible parentheses 

1507 parent.insert_child(index, Leaf(token.LPAR, "")) 

1508 parent.append_child(Leaf(token.RPAR, "")) 

1509 

1510 

1511def remove_await_parens(node: Node) -> None: 

1512 if node.children[0].type == token.AWAIT and len(node.children) > 1: 

1513 if ( 

1514 node.children[1].type == syms.atom 

1515 and node.children[1].children[0].type == token.LPAR 

1516 ): 

1517 if maybe_make_parens_invisible_in_atom( 

1518 node.children[1], 

1519 parent=node, 

1520 remove_brackets_around_comma=True, 

1521 ): 

1522 wrap_in_parentheses(node, node.children[1], visible=False) 

1523 

1524 # Since await is an expression we shouldn't remove 

1525 # brackets in cases where this would change 

1526 # the AST due to operator precedence. 

1527 # Therefore we only aim to remove brackets around 

1528 # power nodes that aren't also await expressions themselves. 

1529 # https://peps.python.org/pep-0492/#updated-operator-precedence-table 

1530 # N.B. We've still removed any redundant nested brackets though :) 

1531 opening_bracket = cast(Leaf, node.children[1].children[0]) 

1532 closing_bracket = cast(Leaf, node.children[1].children[-1]) 

1533 bracket_contents = node.children[1].children[1] 

1534 if isinstance(bracket_contents, Node) and ( 

1535 bracket_contents.type != syms.power 

1536 or bracket_contents.children[0].type == token.AWAIT 

1537 or any( 

1538 isinstance(child, Leaf) and child.type == token.DOUBLESTAR 

1539 for child in bracket_contents.children 

1540 ) 

1541 ): 

1542 ensure_visible(opening_bracket) 

1543 ensure_visible(closing_bracket) 

1544 

1545 

1546def _maybe_wrap_cms_in_parens( 

1547 node: Node, mode: Mode, features: Collection[Feature] 

1548) -> None: 

1549 """When enabled and safe, wrap the multiple context managers in invisible parens. 

1550 

1551 It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS. 

1552 """ 

1553 if ( 

1554 Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features 

1555 or len(node.children) <= 2 

1556 # If it's an atom, it's already wrapped in parens. 

1557 or node.children[1].type == syms.atom 

1558 ): 

1559 return 

1560 colon_index: Optional[int] = None 

1561 for i in range(2, len(node.children)): 

1562 if node.children[i].type == token.COLON: 

1563 colon_index = i 

1564 break 

1565 if colon_index is not None: 

1566 lpar = Leaf(token.LPAR, "") 

1567 rpar = Leaf(token.RPAR, "") 

1568 context_managers = node.children[1:colon_index] 

1569 for child in context_managers: 

1570 child.remove() 

1571 # After wrapping, the with_stmt will look like this: 

1572 # with_stmt 

1573 # NAME 'with' 

1574 # atom 

1575 # LPAR '' 

1576 # testlist_gexp 

1577 # ... <-- context_managers 

1578 # /testlist_gexp 

1579 # RPAR '' 

1580 # /atom 

1581 # COLON ':' 

1582 new_child = Node( 

1583 syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar] 

1584 ) 

1585 node.insert_child(1, new_child) 

1586 

1587 

1588def remove_with_parens(node: Node, parent: Node) -> None: 

1589 """Recursively hide optional parens in `with` statements.""" 

1590 # Removing all unnecessary parentheses in with statements in one pass is a tad 

1591 # complex as different variations of bracketed statements result in pretty 

1592 # different parse trees: 

1593 # 

1594 # with (open("file")) as f: # this is an asexpr_test 

1595 # ... 

1596 # 

1597 # with (open("file") as f): # this is an atom containing an 

1598 # ... # asexpr_test 

1599 # 

1600 # with (open("file")) as f, (open("file")) as f: # this is asexpr_test, COMMA, 

1601 # ... # asexpr_test 

1602 # 

1603 # with (open("file") as f, open("file") as f): # an atom containing a 

1604 # ... # testlist_gexp which then 

1605 # # contains multiple asexpr_test(s) 

1606 if node.type == syms.atom: 

1607 if maybe_make_parens_invisible_in_atom( 

1608 node, 

1609 parent=parent, 

1610 remove_brackets_around_comma=True, 

1611 ): 

1612 wrap_in_parentheses(parent, node, visible=False) 

1613 if isinstance(node.children[1], Node): 

1614 remove_with_parens(node.children[1], node) 

1615 elif node.type == syms.testlist_gexp: 

1616 for child in node.children: 

1617 if isinstance(child, Node): 

1618 remove_with_parens(child, node) 

1619 elif node.type == syms.asexpr_test and not any( 

1620 leaf.type == token.COLONEQUAL for leaf in node.leaves() 

1621 ): 

1622 if maybe_make_parens_invisible_in_atom( 

1623 node.children[0], 

1624 parent=node, 

1625 remove_brackets_around_comma=True, 

1626 ): 

1627 wrap_in_parentheses(node, node.children[0], visible=False) 

1628 

1629 

1630def maybe_make_parens_invisible_in_atom( 

1631 node: LN, 

1632 parent: LN, 

1633 remove_brackets_around_comma: bool = False, 

1634) -> bool: 

1635 """If it's safe, make the parens in the atom `node` invisible, recursively. 

1636 Additionally, remove repeated, adjacent invisible parens from the atom `node` 

1637 as they are redundant. 

1638 

1639 Returns whether the node should itself be wrapped in invisible parentheses. 

1640 """ 

1641 if ( 

1642 node.type not in (syms.atom, syms.expr) 

1643 or is_empty_tuple(node) 

1644 or is_one_tuple(node) 

1645 or (is_tuple(node) and parent.type == syms.asexpr_test) 

1646 or ( 

1647 is_tuple(node) 

1648 and parent.type == syms.with_stmt 

1649 and has_sibling_with_type(node, token.COMMA) 

1650 ) 

1651 or (is_yield(node) and parent.type != syms.expr_stmt) 

1652 or ( 

1653 # This condition tries to prevent removing non-optional brackets 

1654 # around a tuple, however, can be a bit overzealous so we provide 

1655 # and option to skip this check for `for` and `with` statements. 

1656 not remove_brackets_around_comma 

1657 and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY 

1658 ) 

1659 or is_tuple_containing_walrus(node) 

1660 or is_tuple_containing_star(node) 

1661 or is_generator(node) 

1662 ): 

1663 return False 

1664 

1665 if is_walrus_assignment(node): 

1666 if parent.type in [ 

1667 syms.annassign, 

1668 syms.expr_stmt, 

1669 syms.assert_stmt, 

1670 syms.return_stmt, 

1671 syms.except_clause, 

1672 syms.funcdef, 

1673 syms.with_stmt, 

1674 syms.testlist_gexp, 

1675 syms.tname, 

1676 # these ones aren't useful to end users, but they do please fuzzers 

1677 syms.for_stmt, 

1678 syms.del_stmt, 

1679 syms.for_stmt, 

1680 ]: 

1681 return False 

1682 

1683 first = node.children[0] 

1684 last = node.children[-1] 

1685 if is_lpar_token(first) and is_rpar_token(last): 

1686 middle = node.children[1] 

1687 # make parentheses invisible 

1688 if ( 

1689 # If the prefix of `middle` includes a type comment with 

1690 # ignore annotation, then we do not remove the parentheses 

1691 not is_type_ignore_comment_string(middle.prefix.strip()) 

1692 ): 

1693 first.value = "" 

1694 last.value = "" 

1695 maybe_make_parens_invisible_in_atom( 

1696 middle, 

1697 parent=parent, 

1698 remove_brackets_around_comma=remove_brackets_around_comma, 

1699 ) 

1700 

1701 if is_atom_with_invisible_parens(middle): 

1702 # Strip the invisible parens from `middle` by replacing 

1703 # it with the child in-between the invisible parens 

1704 middle.replace(middle.children[1]) 

1705 

1706 if middle.children[0].prefix.strip(): 

1707 # Preserve comments before first paren 

1708 middle.children[1].prefix = ( 

1709 middle.children[0].prefix + middle.children[1].prefix 

1710 ) 

1711 

1712 if middle.children[-1].prefix.strip(): 

1713 # Preserve comments before last paren 

1714 last.prefix = middle.children[-1].prefix + last.prefix 

1715 

1716 return False 

1717 

1718 return True 

1719 

1720 

1721def should_split_line(line: Line, opening_bracket: Leaf) -> bool: 

1722 """Should `line` be immediately split with `delimiter_split()` after RHS?""" 

1723 

1724 if not (opening_bracket.parent and opening_bracket.value in "[{("): 

1725 return False 

1726 

1727 # We're essentially checking if the body is delimited by commas and there's more 

1728 # than one of them (we're excluding the trailing comma and if the delimiter priority 

1729 # is still commas, that means there's more). 

1730 exclude = set() 

1731 trailing_comma = False 

1732 try: 

1733 last_leaf = line.leaves[-1] 

1734 if last_leaf.type == token.COMMA: 

1735 trailing_comma = True 

1736 exclude.add(id(last_leaf)) 

1737 max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude) 

1738 except (IndexError, ValueError): 

1739 return False 

1740 

1741 return max_priority == COMMA_PRIORITY and ( 

1742 (line.mode.magic_trailing_comma and trailing_comma) 

1743 # always explode imports 

1744 or opening_bracket.parent.type in {syms.atom, syms.import_from} 

1745 ) 

1746 

1747 

1748def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[set[LeafID]]: 

1749 """Generate sets of closing bracket IDs that should be omitted in a RHS. 

1750 

1751 Brackets can be omitted if the entire trailer up to and including 

1752 a preceding closing bracket fits in one line. 

1753 

1754 Yielded sets are cumulative (contain results of previous yields, too). First 

1755 set is empty, unless the line should explode, in which case bracket pairs until 

1756 the one that needs to explode are omitted. 

1757 """ 

1758 

1759 omit: set[LeafID] = set() 

1760 if not line.magic_trailing_comma: 

1761 yield omit 

1762 

1763 length = 4 * line.depth 

1764 opening_bracket: Optional[Leaf] = None 

1765 closing_bracket: Optional[Leaf] = None 

1766 inner_brackets: set[LeafID] = set() 

1767 for index, leaf, leaf_length in line.enumerate_with_length(is_reversed=True): 

1768 length += leaf_length 

1769 if length > line_length: 

1770 break 

1771 

1772 has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix) 

1773 if leaf.type == STANDALONE_COMMENT or has_inline_comment: 

1774 break 

1775 

1776 if opening_bracket: 

1777 if leaf is opening_bracket: 

1778 opening_bracket = None 

1779 elif leaf.type in CLOSING_BRACKETS: 

1780 prev = line.leaves[index - 1] if index > 0 else None 

1781 if ( 

1782 prev 

1783 and prev.type == token.COMMA 

1784 and leaf.opening_bracket is not None 

1785 and not is_one_sequence_between( 

1786 leaf.opening_bracket, leaf, line.leaves 

1787 ) 

1788 ): 

1789 # Never omit bracket pairs with trailing commas. 

1790 # We need to explode on those. 

1791 break 

1792 

1793 inner_brackets.add(id(leaf)) 

1794 elif leaf.type in CLOSING_BRACKETS: 

1795 prev = line.leaves[index - 1] if index > 0 else None 

1796 if prev and prev.type in OPENING_BRACKETS: 

1797 # Empty brackets would fail a split so treat them as "inner" 

1798 # brackets (e.g. only add them to the `omit` set if another 

1799 # pair of brackets was good enough. 

1800 inner_brackets.add(id(leaf)) 

1801 continue 

1802 

1803 if closing_bracket: 

1804 omit.add(id(closing_bracket)) 

1805 omit.update(inner_brackets) 

1806 inner_brackets.clear() 

1807 yield omit 

1808 

1809 if ( 

1810 prev 

1811 and prev.type == token.COMMA 

1812 and leaf.opening_bracket is not None 

1813 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves) 

1814 ): 

1815 # Never omit bracket pairs with trailing commas. 

1816 # We need to explode on those. 

1817 break 

1818 

1819 if leaf.value: 

1820 opening_bracket = leaf.opening_bracket 

1821 closing_bracket = leaf 

1822 

1823 

1824def run_transformer( 

1825 line: Line, 

1826 transform: Transformer, 

1827 mode: Mode, 

1828 features: Collection[Feature], 

1829 *, 

1830 line_str: str = "", 

1831) -> list[Line]: 

1832 if not line_str: 

1833 line_str = line_to_string(line) 

1834 result: list[Line] = [] 

1835 for transformed_line in transform(line, features, mode): 

1836 if str(transformed_line).strip("\n") == line_str: 

1837 raise CannotTransform("Line transformer returned an unchanged result") 

1838 

1839 result.extend(transform_line(transformed_line, mode=mode, features=features)) 

1840 

1841 features_set = set(features) 

1842 if ( 

1843 Feature.FORCE_OPTIONAL_PARENTHESES in features_set 

1844 or transform.__class__.__name__ != "rhs" 

1845 or not line.bracket_tracker.invisible 

1846 or any(bracket.value for bracket in line.bracket_tracker.invisible) 

1847 or line.contains_multiline_strings() 

1848 or result[0].contains_uncollapsable_type_comments() 

1849 or result[0].contains_unsplittable_type_ignore() 

1850 or is_line_short_enough(result[0], mode=mode) 

1851 # If any leaves have no parents (which _can_ occur since 

1852 # `transform(line)` potentially destroys the line's underlying node 

1853 # structure), then we can't proceed. Doing so would cause the below 

1854 # call to `append_leaves()` to fail. 

1855 or any(leaf.parent is None for leaf in line.leaves) 

1856 ): 

1857 return result 

1858 

1859 line_copy = line.clone() 

1860 append_leaves(line_copy, line, line.leaves) 

1861 features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES} 

1862 second_opinion = run_transformer( 

1863 line_copy, transform, mode, features_fop, line_str=line_str 

1864 ) 

1865 if all(is_line_short_enough(ln, mode=mode) for ln in second_opinion): 

1866 result = second_opinion 

1867 return result