Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/black/linegen.py: 11%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

782 statements  

1""" 

2Generating lines of code. 

3""" 

4 

5import re 

6import sys 

7from collections.abc import Collection, Iterator 

8from dataclasses import replace 

9from enum import Enum, auto 

10from functools import partial, wraps 

11from typing import Optional, Union, cast 

12 

13from black.brackets import ( 

14 COMMA_PRIORITY, 

15 DOT_PRIORITY, 

16 STRING_PRIORITY, 

17 get_leaves_inside_matching_brackets, 

18 max_delimiter_priority_in_atom, 

19) 

20from black.comments import FMT_OFF, generate_comments, list_comments 

21from black.lines import ( 

22 Line, 

23 RHSResult, 

24 append_leaves, 

25 can_be_split, 

26 can_omit_invisible_parens, 

27 is_line_short_enough, 

28 line_to_string, 

29) 

30from black.mode import Feature, Mode, Preview 

31from black.nodes import ( 

32 ASSIGNMENTS, 

33 BRACKETS, 

34 CLOSING_BRACKETS, 

35 OPENING_BRACKETS, 

36 STANDALONE_COMMENT, 

37 STATEMENT, 

38 WHITESPACE, 

39 Visitor, 

40 ensure_visible, 

41 fstring_to_string, 

42 get_annotation_type, 

43 has_sibling_with_type, 

44 is_arith_like, 

45 is_async_stmt_or_funcdef, 

46 is_atom_with_invisible_parens, 

47 is_docstring, 

48 is_empty_tuple, 

49 is_generator, 

50 is_lpar_token, 

51 is_multiline_string, 

52 is_name_token, 

53 is_one_sequence_between, 

54 is_one_tuple, 

55 is_parent_function_or_class, 

56 is_part_of_annotation, 

57 is_rpar_token, 

58 is_stub_body, 

59 is_stub_suite, 

60 is_tuple, 

61 is_tuple_containing_star, 

62 is_tuple_containing_walrus, 

63 is_type_ignore_comment_string, 

64 is_vararg, 

65 is_walrus_assignment, 

66 is_yield, 

67 syms, 

68 wrap_in_parentheses, 

69) 

70from black.numerics import normalize_numeric_literal 

71from black.strings import ( 

72 fix_multiline_docstring, 

73 get_string_prefix, 

74 normalize_string_prefix, 

75 normalize_string_quotes, 

76 normalize_unicode_escape_sequences, 

77) 

78from black.trans import ( 

79 CannotTransform, 

80 StringMerger, 

81 StringParenStripper, 

82 StringParenWrapper, 

83 StringSplitter, 

84 Transformer, 

85 hug_power_op, 

86) 

87from blib2to3.pgen2 import token 

88from blib2to3.pytree import Leaf, Node 

89 

90# types 

91LeafID = int 

92LN = Union[Leaf, Node] 

93 

94 

95class CannotSplit(CannotTransform): 

96 """A readable split that fits the allotted line length is impossible.""" 

97 

98 

99# This isn't a dataclass because @dataclass + Generic breaks mypyc. 

100# See also https://github.com/mypyc/mypyc/issues/827. 

101class LineGenerator(Visitor[Line]): 

102 """Generates reformatted Line objects. Empty lines are not emitted. 

103 

104 Note: destroys the tree it's visiting by mutating prefixes of its leaves 

105 in ways that will no longer stringify to valid Python code on the tree. 

106 """ 

107 

108 def __init__(self, mode: Mode, features: Collection[Feature]) -> None: 

109 self.mode = mode 

110 self.features = features 

111 self.current_line: Line 

112 self.__post_init__() 

113 

114 def line(self, indent: int = 0) -> Iterator[Line]: 

115 """Generate a line. 

116 

117 If the line is empty, only emit if it makes sense. 

118 If the line is too long, split it first and then generate. 

119 

120 If any lines were generated, set up a new current_line. 

121 """ 

122 if not self.current_line: 

123 self.current_line.depth += indent 

124 return # Line is empty, don't emit. Creating a new one unnecessary. 

125 

126 if len(self.current_line.leaves) == 1 and is_async_stmt_or_funcdef( 

127 self.current_line.leaves[0] 

128 ): 

129 # Special case for async def/for/with statements. `visit_async_stmt` 

130 # adds an `ASYNC` leaf then visits the child def/for/with statement 

131 # nodes. Line yields from those nodes shouldn't treat the former 

132 # `ASYNC` leaf as a complete line. 

133 return 

134 

135 complete_line = self.current_line 

136 self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent) 

137 yield complete_line 

138 

139 def visit_default(self, node: LN) -> Iterator[Line]: 

140 """Default `visit_*()` implementation. Recurses to children of `node`.""" 

141 if isinstance(node, Leaf): 

142 any_open_brackets = self.current_line.bracket_tracker.any_open_brackets() 

143 for comment in generate_comments(node): 

144 if any_open_brackets: 

145 # any comment within brackets is subject to splitting 

146 self.current_line.append(comment) 

147 elif comment.type == token.COMMENT: 

148 # regular trailing comment 

149 self.current_line.append(comment) 

150 yield from self.line() 

151 

152 else: 

153 # regular standalone comment 

154 yield from self.line() 

155 

156 self.current_line.append(comment) 

157 yield from self.line() 

158 

159 if any_open_brackets: 

160 node.prefix = "" 

161 if node.type not in WHITESPACE: 

162 self.current_line.append(node) 

163 yield from super().visit_default(node) 

164 

165 def visit_test(self, node: Node) -> Iterator[Line]: 

166 """Visit an `x if y else z` test""" 

167 

168 already_parenthesized = ( 

169 node.prev_sibling and node.prev_sibling.type == token.LPAR 

170 ) 

171 

172 if not already_parenthesized: 

173 # Similar to logic in wrap_in_parentheses 

174 lpar = Leaf(token.LPAR, "") 

175 rpar = Leaf(token.RPAR, "") 

176 prefix = node.prefix 

177 node.prefix = "" 

178 lpar.prefix = prefix 

179 node.insert_child(0, lpar) 

180 node.append_child(rpar) 

181 

182 yield from self.visit_default(node) 

183 

184 def visit_INDENT(self, node: Leaf) -> Iterator[Line]: 

185 """Increase indentation level, maybe yield a line.""" 

186 # In blib2to3 INDENT never holds comments. 

187 yield from self.line(+1) 

188 yield from self.visit_default(node) 

189 

190 def visit_DEDENT(self, node: Leaf) -> Iterator[Line]: 

191 """Decrease indentation level, maybe yield a line.""" 

192 # The current line might still wait for trailing comments. At DEDENT time 

193 # there won't be any (they would be prefixes on the preceding NEWLINE). 

194 # Emit the line then. 

195 yield from self.line() 

196 

197 # While DEDENT has no value, its prefix may contain standalone comments 

198 # that belong to the current indentation level. Get 'em. 

199 yield from self.visit_default(node) 

200 

201 # Finally, emit the dedent. 

202 yield from self.line(-1) 

203 

204 def visit_stmt( 

205 self, node: Node, keywords: set[str], parens: set[str] 

206 ) -> Iterator[Line]: 

207 """Visit a statement. 

208 

209 This implementation is shared for `if`, `while`, `for`, `try`, `except`, 

210 `def`, `with`, `class`, `assert`, and assignments. 

211 

212 The relevant Python language `keywords` for a given statement will be 

213 NAME leaves within it. This methods puts those on a separate line. 

214 

215 `parens` holds a set of string leaf values immediately after which 

216 invisible parens should be put. 

217 """ 

218 normalize_invisible_parens( 

219 node, parens_after=parens, mode=self.mode, features=self.features 

220 ) 

221 for child in node.children: 

222 if is_name_token(child) and child.value in keywords: 

223 yield from self.line() 

224 

225 yield from self.visit(child) 

226 

227 def visit_typeparams(self, node: Node) -> Iterator[Line]: 

228 yield from self.visit_default(node) 

229 node.children[0].prefix = "" 

230 

231 def visit_typevartuple(self, node: Node) -> Iterator[Line]: 

232 yield from self.visit_default(node) 

233 node.children[1].prefix = "" 

234 

235 def visit_paramspec(self, node: Node) -> Iterator[Line]: 

236 yield from self.visit_default(node) 

237 node.children[1].prefix = "" 

238 

239 def visit_dictsetmaker(self, node: Node) -> Iterator[Line]: 

240 if Preview.wrap_long_dict_values_in_parens in self.mode: 

241 for i, child in enumerate(node.children): 

242 if i == 0: 

243 continue 

244 if node.children[i - 1].type == token.COLON: 

245 if ( 

246 child.type == syms.atom 

247 and child.children[0].type in OPENING_BRACKETS 

248 and not is_walrus_assignment(child) 

249 ): 

250 maybe_make_parens_invisible_in_atom( 

251 child, 

252 parent=node, 

253 mode=self.mode, 

254 features=self.features, 

255 remove_brackets_around_comma=False, 

256 ) 

257 else: 

258 wrap_in_parentheses(node, child, visible=False) 

259 yield from self.visit_default(node) 

260 

261 def visit_funcdef(self, node: Node) -> Iterator[Line]: 

262 """Visit function definition.""" 

263 yield from self.line() 

264 

265 # Remove redundant brackets around return type annotation. 

266 is_return_annotation = False 

267 for child in node.children: 

268 if child.type == token.RARROW: 

269 is_return_annotation = True 

270 elif is_return_annotation: 

271 if child.type == syms.atom and child.children[0].type == token.LPAR: 

272 if maybe_make_parens_invisible_in_atom( 

273 child, 

274 parent=node, 

275 mode=self.mode, 

276 features=self.features, 

277 remove_brackets_around_comma=False, 

278 ): 

279 wrap_in_parentheses(node, child, visible=False) 

280 else: 

281 wrap_in_parentheses(node, child, visible=False) 

282 is_return_annotation = False 

283 

284 for child in node.children: 

285 yield from self.visit(child) 

286 

287 def visit_match_case(self, node: Node) -> Iterator[Line]: 

288 """Visit either a match or case statement.""" 

289 normalize_invisible_parens( 

290 node, parens_after=set(), mode=self.mode, features=self.features 

291 ) 

292 

293 yield from self.line() 

294 for child in node.children: 

295 yield from self.visit(child) 

296 

297 def visit_suite(self, node: Node) -> Iterator[Line]: 

298 """Visit a suite.""" 

299 if is_stub_suite(node): 

300 yield from self.visit(node.children[2]) 

301 else: 

302 yield from self.visit_default(node) 

303 

304 def visit_simple_stmt(self, node: Node) -> Iterator[Line]: 

305 """Visit a statement without nested statements.""" 

306 prev_type: Optional[int] = None 

307 for child in node.children: 

308 if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child): 

309 wrap_in_parentheses(node, child, visible=False) 

310 prev_type = child.type 

311 

312 if node.parent and node.parent.type in STATEMENT: 

313 if is_parent_function_or_class(node) and is_stub_body(node): 

314 yield from self.visit_default(node) 

315 else: 

316 yield from self.line(+1) 

317 yield from self.visit_default(node) 

318 yield from self.line(-1) 

319 

320 else: 

321 if node.parent and is_stub_suite(node.parent): 

322 node.prefix = "" 

323 yield from self.visit_default(node) 

324 return 

325 yield from self.line() 

326 yield from self.visit_default(node) 

327 

328 def visit_async_stmt(self, node: Node) -> Iterator[Line]: 

329 """Visit `async def`, `async for`, `async with`.""" 

330 yield from self.line() 

331 

332 children = iter(node.children) 

333 for child in children: 

334 yield from self.visit(child) 

335 

336 if child.type == token.ASYNC or child.type == STANDALONE_COMMENT: 

337 # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async 

338 # line. 

339 break 

340 

341 internal_stmt = next(children) 

342 yield from self.visit(internal_stmt) 

343 

344 def visit_decorators(self, node: Node) -> Iterator[Line]: 

345 """Visit decorators.""" 

346 for child in node.children: 

347 yield from self.line() 

348 yield from self.visit(child) 

349 

350 def visit_power(self, node: Node) -> Iterator[Line]: 

351 for idx, leaf in enumerate(node.children[:-1]): 

352 next_leaf = node.children[idx + 1] 

353 

354 if not isinstance(leaf, Leaf): 

355 continue 

356 

357 value = leaf.value.lower() 

358 if ( 

359 leaf.type == token.NUMBER 

360 and next_leaf.type == syms.trailer 

361 # Ensure that we are in an attribute trailer 

362 and next_leaf.children[0].type == token.DOT 

363 # It shouldn't wrap hexadecimal, binary and octal literals 

364 and not value.startswith(("0x", "0b", "0o")) 

365 # It shouldn't wrap complex literals 

366 and "j" not in value 

367 ): 

368 wrap_in_parentheses(node, leaf) 

369 

370 remove_await_parens(node, mode=self.mode, features=self.features) 

371 

372 yield from self.visit_default(node) 

373 

374 def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]: 

375 """Remove a semicolon and put the other statement on a separate line.""" 

376 yield from self.line() 

377 

378 def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]: 

379 """End of file. Process outstanding comments and end with a newline.""" 

380 yield from self.visit_default(leaf) 

381 yield from self.line() 

382 

383 def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]: 

384 if not self.current_line.bracket_tracker.any_open_brackets(): 

385 yield from self.line() 

386 yield from self.visit_default(leaf) 

387 

388 def visit_factor(self, node: Node) -> Iterator[Line]: 

389 """Force parentheses between a unary op and a binary power: 

390 

391 -2 ** 8 -> -(2 ** 8) 

392 """ 

393 _operator, operand = node.children 

394 if ( 

395 operand.type == syms.power 

396 and len(operand.children) == 3 

397 and operand.children[1].type == token.DOUBLESTAR 

398 ): 

399 lpar = Leaf(token.LPAR, "(") 

400 rpar = Leaf(token.RPAR, ")") 

401 index = operand.remove() or 0 

402 node.insert_child(index, Node(syms.atom, [lpar, operand, rpar])) 

403 yield from self.visit_default(node) 

404 

405 def visit_tname(self, node: Node) -> Iterator[Line]: 

406 """ 

407 Add potential parentheses around types in function parameter lists to be made 

408 into real parentheses in case the type hint is too long to fit on a line 

409 Examples: 

410 def foo(a: int, b: float = 7): ... 

411 

412 -> 

413 

414 def foo(a: (int), b: (float) = 7): ... 

415 """ 

416 assert len(node.children) == 3 

417 if maybe_make_parens_invisible_in_atom( 

418 node.children[2], parent=node, mode=self.mode, features=self.features 

419 ): 

420 wrap_in_parentheses(node, node.children[2], visible=False) 

421 

422 yield from self.visit_default(node) 

423 

424 def visit_STRING(self, leaf: Leaf) -> Iterator[Line]: 

425 normalize_unicode_escape_sequences(leaf) 

426 

427 if is_docstring(leaf) and not re.search(r"\\\s*\n", leaf.value): 

428 # We're ignoring docstrings with backslash newline escapes because changing 

429 # indentation of those changes the AST representation of the code. 

430 if self.mode.string_normalization: 

431 docstring = normalize_string_prefix(leaf.value) 

432 # We handle string normalization at the end of this method, but since 

433 # what we do right now acts differently depending on quote style (ex. 

434 # see padding logic below), there's a possibility for unstable 

435 # formatting. To avoid a situation where this function formats a 

436 # docstring differently on the second pass, normalize it early. 

437 docstring = normalize_string_quotes(docstring) 

438 else: 

439 docstring = leaf.value 

440 prefix = get_string_prefix(docstring) 

441 docstring = docstring[len(prefix) :] # Remove the prefix 

442 quote_char = docstring[0] 

443 # A natural way to remove the outer quotes is to do: 

444 # docstring = docstring.strip(quote_char) 

445 # but that breaks on """""x""" (which is '""x'). 

446 # So we actually need to remove the first character and the next two 

447 # characters but only if they are the same as the first. 

448 quote_len = 1 if docstring[1] != quote_char else 3 

449 docstring = docstring[quote_len:-quote_len] 

450 docstring_started_empty = not docstring 

451 indent = " " * 4 * self.current_line.depth 

452 

453 if is_multiline_string(leaf): 

454 docstring = fix_multiline_docstring(docstring, indent) 

455 else: 

456 docstring = docstring.strip() 

457 

458 has_trailing_backslash = False 

459 if docstring: 

460 # Add some padding if the docstring starts / ends with a quote mark. 

461 if docstring[0] == quote_char: 

462 docstring = " " + docstring 

463 if docstring[-1] == quote_char: 

464 docstring += " " 

465 if docstring[-1] == "\\": 

466 backslash_count = len(docstring) - len(docstring.rstrip("\\")) 

467 if backslash_count % 2: 

468 # Odd number of tailing backslashes, add some padding to 

469 # avoid escaping the closing string quote. 

470 docstring += " " 

471 has_trailing_backslash = True 

472 elif not docstring_started_empty: 

473 docstring = " " 

474 

475 # We could enforce triple quotes at this point. 

476 quote = quote_char * quote_len 

477 

478 # It's invalid to put closing single-character quotes on a new line. 

479 if quote_len == 3: 

480 # We need to find the length of the last line of the docstring 

481 # to find if we can add the closing quotes to the line without 

482 # exceeding the maximum line length. 

483 # If docstring is one line, we don't put the closing quotes on a 

484 # separate line because it looks ugly (#3320). 

485 lines = docstring.splitlines() 

486 last_line_length = len(lines[-1]) if docstring else 0 

487 

488 # If adding closing quotes would cause the last line to exceed 

489 # the maximum line length, and the closing quote is not 

490 # prefixed by a newline then put a line break before 

491 # the closing quotes 

492 if ( 

493 len(lines) > 1 

494 and last_line_length + quote_len > self.mode.line_length 

495 and len(indent) + quote_len <= self.mode.line_length 

496 and not has_trailing_backslash 

497 ): 

498 if leaf.value[-1 - quote_len] == "\n": 

499 leaf.value = prefix + quote + docstring + quote 

500 else: 

501 leaf.value = prefix + quote + docstring + "\n" + indent + quote 

502 else: 

503 leaf.value = prefix + quote + docstring + quote 

504 else: 

505 leaf.value = prefix + quote + docstring + quote 

506 

507 if self.mode.string_normalization and leaf.type == token.STRING: 

508 leaf.value = normalize_string_prefix(leaf.value) 

509 leaf.value = normalize_string_quotes(leaf.value) 

510 yield from self.visit_default(leaf) 

511 

512 def visit_NUMBER(self, leaf: Leaf) -> Iterator[Line]: 

513 normalize_numeric_literal(leaf) 

514 yield from self.visit_default(leaf) 

515 

516 def visit_atom(self, node: Node) -> Iterator[Line]: 

517 """Visit any atom""" 

518 if len(node.children) == 3: 

519 first = node.children[0] 

520 last = node.children[-1] 

521 if (first.type == token.LSQB and last.type == token.RSQB) or ( 

522 first.type == token.LBRACE and last.type == token.RBRACE 

523 ): 

524 # Lists or sets of one item 

525 maybe_make_parens_invisible_in_atom( 

526 node.children[1], 

527 parent=node, 

528 mode=self.mode, 

529 features=self.features, 

530 ) 

531 

532 yield from self.visit_default(node) 

533 

534 def visit_fstring(self, node: Node) -> Iterator[Line]: 

535 # currently we don't want to format and split f-strings at all. 

536 string_leaf = fstring_to_string(node) 

537 node.replace(string_leaf) 

538 if "\\" in string_leaf.value and any( 

539 "\\" in str(child) 

540 for child in node.children 

541 if child.type == syms.fstring_replacement_field 

542 ): 

543 # string normalization doesn't account for nested quotes, 

544 # causing breakages. skip normalization when nested quotes exist 

545 yield from self.visit_default(string_leaf) 

546 return 

547 yield from self.visit_STRING(string_leaf) 

548 

549 # TODO: Uncomment Implementation to format f-string children 

550 # fstring_start = node.children[0] 

551 # fstring_end = node.children[-1] 

552 # assert isinstance(fstring_start, Leaf) 

553 # assert isinstance(fstring_end, Leaf) 

554 

555 # quote_char = fstring_end.value[0] 

556 # quote_idx = fstring_start.value.index(quote_char) 

557 # prefix, quote = ( 

558 # fstring_start.value[:quote_idx], 

559 # fstring_start.value[quote_idx:] 

560 # ) 

561 

562 # if not is_docstring(node, self.mode): 

563 # prefix = normalize_string_prefix(prefix) 

564 

565 # assert quote == fstring_end.value 

566 

567 # is_raw_fstring = "r" in prefix or "R" in prefix 

568 # middles = [ 

569 # leaf 

570 # for leaf in node.leaves() 

571 # if leaf.type == token.FSTRING_MIDDLE 

572 # ] 

573 

574 # if self.mode.string_normalization: 

575 # middles, quote = normalize_fstring_quotes(quote, middles, is_raw_fstring) 

576 

577 # fstring_start.value = prefix + quote 

578 # fstring_end.value = quote 

579 

580 # yield from self.visit_default(node) 

581 

582 def __post_init__(self) -> None: 

583 """You are in a twisty little maze of passages.""" 

584 self.current_line = Line(mode=self.mode) 

585 

586 v = self.visit_stmt 

587 Ø: set[str] = set() 

588 self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","}) 

589 self.visit_if_stmt = partial( 

590 v, keywords={"if", "else", "elif"}, parens={"if", "elif"} 

591 ) 

592 self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"}) 

593 self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"}) 

594 self.visit_try_stmt = partial( 

595 v, keywords={"try", "except", "else", "finally"}, parens=Ø 

596 ) 

597 self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"}) 

598 self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"}) 

599 self.visit_classdef = partial(v, keywords={"class"}, parens=Ø) 

600 

601 self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS) 

602 self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"}) 

603 self.visit_import_from = partial(v, keywords=Ø, parens={"import"}) 

604 self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"}) 

605 self.visit_async_funcdef = self.visit_async_stmt 

606 self.visit_decorated = self.visit_decorators 

607 

608 # PEP 634 

609 self.visit_match_stmt = self.visit_match_case 

610 self.visit_case_block = self.visit_match_case 

611 self.visit_guard = partial(v, keywords=Ø, parens={"if"}) 

612 

613 

614def _hugging_power_ops_line_to_string( 

615 line: Line, 

616 features: Collection[Feature], 

617 mode: Mode, 

618) -> Optional[str]: 

619 try: 

620 return line_to_string(next(hug_power_op(line, features, mode))) 

621 except CannotTransform: 

622 return None 

623 

624 

625def transform_line( 

626 line: Line, mode: Mode, features: Collection[Feature] = () 

627) -> Iterator[Line]: 

628 """Transform a `line`, potentially splitting it into many lines. 

629 

630 They should fit in the allotted `line_length` but might not be able to. 

631 

632 `features` are syntactical features that may be used in the output. 

633 """ 

634 if line.is_comment: 

635 yield line 

636 return 

637 

638 line_str = line_to_string(line) 

639 

640 # We need the line string when power operators are hugging to determine if we should 

641 # split the line. Default to line_str, if no power operator are present on the line. 

642 line_str_hugging_power_ops = ( 

643 _hugging_power_ops_line_to_string(line, features, mode) or line_str 

644 ) 

645 

646 ll = mode.line_length 

647 sn = mode.string_normalization 

648 string_merge = StringMerger(ll, sn) 

649 string_paren_strip = StringParenStripper(ll, sn) 

650 string_split = StringSplitter(ll, sn) 

651 string_paren_wrap = StringParenWrapper(ll, sn) 

652 

653 transformers: list[Transformer] 

654 if ( 

655 not line.contains_uncollapsable_type_comments() 

656 and not line.should_split_rhs 

657 and not line.magic_trailing_comma 

658 and ( 

659 is_line_short_enough(line, mode=mode, line_str=line_str_hugging_power_ops) 

660 or line.contains_unsplittable_type_ignore() 

661 ) 

662 and not (line.inside_brackets and line.contains_standalone_comments()) 

663 and not line.contains_implicit_multiline_string_with_comments() 

664 ): 

665 # Only apply basic string preprocessing, since lines shouldn't be split here. 

666 if Preview.string_processing in mode: 

667 transformers = [string_merge, string_paren_strip] 

668 else: 

669 transformers = [] 

670 elif line.is_def and not should_split_funcdef_with_rhs(line, mode): 

671 transformers = [left_hand_split] 

672 else: 

673 

674 def _rhs( 

675 self: object, line: Line, features: Collection[Feature], mode: Mode 

676 ) -> Iterator[Line]: 

677 """Wraps calls to `right_hand_split`. 

678 

679 The calls increasingly `omit` right-hand trailers (bracket pairs with 

680 content), meaning the trailers get glued together to split on another 

681 bracket pair instead. 

682 """ 

683 for omit in generate_trailers_to_omit(line, mode.line_length): 

684 lines = list(right_hand_split(line, mode, features, omit=omit)) 

685 # Note: this check is only able to figure out if the first line of the 

686 # *current* transformation fits in the line length. This is true only 

687 # for simple cases. All others require running more transforms via 

688 # `transform_line()`. This check doesn't know if those would succeed. 

689 if is_line_short_enough(lines[0], mode=mode): 

690 yield from lines 

691 return 

692 

693 # All splits failed, best effort split with no omits. 

694 # This mostly happens to multiline strings that are by definition 

695 # reported as not fitting a single line, as well as lines that contain 

696 # trailing commas (those have to be exploded). 

697 yield from right_hand_split(line, mode, features=features) 

698 

699 # HACK: nested functions (like _rhs) compiled by mypyc don't retain their 

700 # __name__ attribute which is needed in `run_transformer` further down. 

701 # Unfortunately a nested class breaks mypyc too. So a class must be created 

702 # via type ... https://github.com/mypyc/mypyc/issues/884 

703 rhs = type("rhs", (), {"__call__": _rhs})() 

704 

705 if Preview.string_processing in mode: 

706 if line.inside_brackets: 

707 transformers = [ 

708 string_merge, 

709 string_paren_strip, 

710 string_split, 

711 delimiter_split, 

712 standalone_comment_split, 

713 string_paren_wrap, 

714 rhs, 

715 ] 

716 else: 

717 transformers = [ 

718 string_merge, 

719 string_paren_strip, 

720 string_split, 

721 string_paren_wrap, 

722 rhs, 

723 ] 

724 else: 

725 if line.inside_brackets: 

726 transformers = [delimiter_split, standalone_comment_split, rhs] 

727 else: 

728 transformers = [rhs] 

729 # It's always safe to attempt hugging of power operations and pretty much every line 

730 # could match. 

731 transformers.append(hug_power_op) 

732 

733 for transform in transformers: 

734 # We are accumulating lines in `result` because we might want to abort 

735 # mission and return the original line in the end, or attempt a different 

736 # split altogether. 

737 try: 

738 result = run_transformer(line, transform, mode, features, line_str=line_str) 

739 except CannotTransform: 

740 continue 

741 else: 

742 yield from result 

743 break 

744 

745 else: 

746 yield line 

747 

748 

749def should_split_funcdef_with_rhs(line: Line, mode: Mode) -> bool: 

750 """If a funcdef has a magic trailing comma in the return type, then we should first 

751 split the line with rhs to respect the comma. 

752 """ 

753 return_type_leaves: list[Leaf] = [] 

754 in_return_type = False 

755 

756 for leaf in line.leaves: 

757 if leaf.type == token.COLON: 

758 in_return_type = False 

759 if in_return_type: 

760 return_type_leaves.append(leaf) 

761 if leaf.type == token.RARROW: 

762 in_return_type = True 

763 

764 # using `bracket_split_build_line` will mess with whitespace, so we duplicate a 

765 # couple lines from it. 

766 result = Line(mode=line.mode, depth=line.depth) 

767 leaves_to_track = get_leaves_inside_matching_brackets(return_type_leaves) 

768 for leaf in return_type_leaves: 

769 result.append( 

770 leaf, 

771 preformatted=True, 

772 track_bracket=id(leaf) in leaves_to_track, 

773 ) 

774 

775 # we could also return true if the line is too long, and the return type is longer 

776 # than the param list. Or if `should_split_rhs` returns True. 

777 return result.magic_trailing_comma is not None 

778 

779 

780class _BracketSplitComponent(Enum): 

781 head = auto() 

782 body = auto() 

783 tail = auto() 

784 

785 

786def left_hand_split( 

787 line: Line, _features: Collection[Feature], mode: Mode 

788) -> Iterator[Line]: 

789 """Split line into many lines, starting with the first matching bracket pair. 

790 

791 Note: this usually looks weird, only use this for function definitions. 

792 Prefer RHS otherwise. This is why this function is not symmetrical with 

793 :func:`right_hand_split` which also handles optional parentheses. 

794 """ 

795 for leaf_type in [token.LPAR, token.LSQB]: 

796 tail_leaves: list[Leaf] = [] 

797 body_leaves: list[Leaf] = [] 

798 head_leaves: list[Leaf] = [] 

799 current_leaves = head_leaves 

800 matching_bracket: Optional[Leaf] = None 

801 depth = 0 

802 for index, leaf in enumerate(line.leaves): 

803 if index == 2 and leaf.type == token.LSQB: 

804 # A [ at index 2 means this is a type param, so start 

805 # tracking the depth 

806 depth += 1 

807 elif depth > 0: 

808 if leaf.type == token.LSQB: 

809 depth += 1 

810 elif leaf.type == token.RSQB: 

811 depth -= 1 

812 if ( 

813 current_leaves is body_leaves 

814 and leaf.type in CLOSING_BRACKETS 

815 and leaf.opening_bracket is matching_bracket 

816 and isinstance(matching_bracket, Leaf) 

817 # If the code is still on LPAR and we are inside a type 

818 # param, ignore the match since this is searching 

819 # for the function arguments 

820 and not (leaf_type == token.LPAR and depth > 0) 

821 ): 

822 ensure_visible(leaf) 

823 ensure_visible(matching_bracket) 

824 current_leaves = tail_leaves if body_leaves else head_leaves 

825 current_leaves.append(leaf) 

826 if current_leaves is head_leaves: 

827 if leaf.type == leaf_type: 

828 matching_bracket = leaf 

829 current_leaves = body_leaves 

830 if matching_bracket and tail_leaves: 

831 break 

832 if not matching_bracket or not tail_leaves: 

833 raise CannotSplit("No brackets found") 

834 

835 head = bracket_split_build_line( 

836 head_leaves, line, matching_bracket, component=_BracketSplitComponent.head 

837 ) 

838 body = bracket_split_build_line( 

839 body_leaves, line, matching_bracket, component=_BracketSplitComponent.body 

840 ) 

841 tail = bracket_split_build_line( 

842 tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail 

843 ) 

844 bracket_split_succeeded_or_raise(head, body, tail) 

845 for result in (head, body, tail): 

846 if result: 

847 yield result 

848 

849 

850def right_hand_split( 

851 line: Line, 

852 mode: Mode, 

853 features: Collection[Feature] = (), 

854 omit: Collection[LeafID] = (), 

855) -> Iterator[Line]: 

856 """Split line into many lines, starting with the last matching bracket pair. 

857 

858 If the split was by optional parentheses, attempt splitting without them, too. 

859 `omit` is a collection of closing bracket IDs that shouldn't be considered for 

860 this split. 

861 

862 Note: running this function modifies `bracket_depth` on the leaves of `line`. 

863 """ 

864 rhs_result = _first_right_hand_split(line, omit=omit) 

865 yield from _maybe_split_omitting_optional_parens( 

866 rhs_result, line, mode, features=features, omit=omit 

867 ) 

868 

869 

870def _first_right_hand_split( 

871 line: Line, 

872 omit: Collection[LeafID] = (), 

873) -> RHSResult: 

874 """Split the line into head, body, tail starting with the last bracket pair. 

875 

876 Note: this function should not have side effects. It's relied upon by 

877 _maybe_split_omitting_optional_parens to get an opinion whether to prefer 

878 splitting on the right side of an assignment statement. 

879 """ 

880 tail_leaves: list[Leaf] = [] 

881 body_leaves: list[Leaf] = [] 

882 head_leaves: list[Leaf] = [] 

883 current_leaves = tail_leaves 

884 opening_bracket: Optional[Leaf] = None 

885 closing_bracket: Optional[Leaf] = None 

886 for leaf in reversed(line.leaves): 

887 if current_leaves is body_leaves: 

888 if leaf is opening_bracket: 

889 current_leaves = head_leaves if body_leaves else tail_leaves 

890 current_leaves.append(leaf) 

891 if current_leaves is tail_leaves: 

892 if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit: 

893 opening_bracket = leaf.opening_bracket 

894 closing_bracket = leaf 

895 current_leaves = body_leaves 

896 if not (opening_bracket and closing_bracket and head_leaves): 

897 # If there is no opening or closing_bracket that means the split failed and 

898 # all content is in the tail. Otherwise, if `head_leaves` are empty, it means 

899 # the matching `opening_bracket` wasn't available on `line` anymore. 

900 raise CannotSplit("No brackets found") 

901 

902 tail_leaves.reverse() 

903 body_leaves.reverse() 

904 head_leaves.reverse() 

905 

906 body: Optional[Line] = None 

907 if ( 

908 Preview.hug_parens_with_braces_and_square_brackets in line.mode 

909 and tail_leaves[0].value 

910 and tail_leaves[0].opening_bracket is head_leaves[-1] 

911 ): 

912 inner_body_leaves = list(body_leaves) 

913 hugged_opening_leaves: list[Leaf] = [] 

914 hugged_closing_leaves: list[Leaf] = [] 

915 is_unpacking = body_leaves[0].type in [token.STAR, token.DOUBLESTAR] 

916 unpacking_offset: int = 1 if is_unpacking else 0 

917 while ( 

918 len(inner_body_leaves) >= 2 + unpacking_offset 

919 and inner_body_leaves[-1].type in CLOSING_BRACKETS 

920 and inner_body_leaves[-1].opening_bracket 

921 is inner_body_leaves[unpacking_offset] 

922 ): 

923 if unpacking_offset: 

924 hugged_opening_leaves.append(inner_body_leaves.pop(0)) 

925 unpacking_offset = 0 

926 hugged_opening_leaves.append(inner_body_leaves.pop(0)) 

927 hugged_closing_leaves.insert(0, inner_body_leaves.pop()) 

928 

929 if hugged_opening_leaves and inner_body_leaves: 

930 inner_body = bracket_split_build_line( 

931 inner_body_leaves, 

932 line, 

933 hugged_opening_leaves[-1], 

934 component=_BracketSplitComponent.body, 

935 ) 

936 if ( 

937 line.mode.magic_trailing_comma 

938 and inner_body_leaves[-1].type == token.COMMA 

939 ): 

940 should_hug = True 

941 else: 

942 line_length = line.mode.line_length - sum( 

943 len(str(leaf)) 

944 for leaf in hugged_opening_leaves + hugged_closing_leaves 

945 ) 

946 if is_line_short_enough( 

947 inner_body, mode=replace(line.mode, line_length=line_length) 

948 ): 

949 # Do not hug if it fits on a single line. 

950 should_hug = False 

951 else: 

952 should_hug = True 

953 if should_hug: 

954 body_leaves = inner_body_leaves 

955 head_leaves.extend(hugged_opening_leaves) 

956 tail_leaves = hugged_closing_leaves + tail_leaves 

957 body = inner_body # No need to re-calculate the body again later. 

958 

959 head = bracket_split_build_line( 

960 head_leaves, line, opening_bracket, component=_BracketSplitComponent.head 

961 ) 

962 if body is None: 

963 body = bracket_split_build_line( 

964 body_leaves, line, opening_bracket, component=_BracketSplitComponent.body 

965 ) 

966 tail = bracket_split_build_line( 

967 tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail 

968 ) 

969 bracket_split_succeeded_or_raise(head, body, tail) 

970 return RHSResult(head, body, tail, opening_bracket, closing_bracket) 

971 

972 

973def _maybe_split_omitting_optional_parens( 

974 rhs: RHSResult, 

975 line: Line, 

976 mode: Mode, 

977 features: Collection[Feature] = (), 

978 omit: Collection[LeafID] = (), 

979) -> Iterator[Line]: 

980 if ( 

981 Feature.FORCE_OPTIONAL_PARENTHESES not in features 

982 # the opening bracket is an optional paren 

983 and rhs.opening_bracket.type == token.LPAR 

984 and not rhs.opening_bracket.value 

985 # the closing bracket is an optional paren 

986 and rhs.closing_bracket.type == token.RPAR 

987 and not rhs.closing_bracket.value 

988 # it's not an import (optional parens are the only thing we can split on 

989 # in this case; attempting a split without them is a waste of time) 

990 and not line.is_import 

991 # and we can actually remove the parens 

992 and can_omit_invisible_parens(rhs, mode.line_length) 

993 ): 

994 omit = {id(rhs.closing_bracket), *omit} 

995 try: 

996 # The RHSResult Omitting Optional Parens. 

997 rhs_oop = _first_right_hand_split(line, omit=omit) 

998 if _prefer_split_rhs_oop_over_rhs(rhs_oop, rhs, mode): 

999 yield from _maybe_split_omitting_optional_parens( 

1000 rhs_oop, line, mode, features=features, omit=omit 

1001 ) 

1002 return 

1003 

1004 except CannotSplit as e: 

1005 # For chained assignments we want to use the previous successful split 

1006 if line.is_chained_assignment: 

1007 pass 

1008 

1009 elif ( 

1010 not can_be_split(rhs.body) 

1011 and not is_line_short_enough(rhs.body, mode=mode) 

1012 and not ( 

1013 Preview.wrap_long_dict_values_in_parens 

1014 and rhs.opening_bracket.parent 

1015 and rhs.opening_bracket.parent.parent 

1016 and rhs.opening_bracket.parent.parent.type == syms.dictsetmaker 

1017 ) 

1018 ): 

1019 raise CannotSplit( 

1020 "Splitting failed, body is still too long and can't be split." 

1021 ) from e 

1022 

1023 elif ( 

1024 rhs.head.contains_multiline_strings() 

1025 or rhs.tail.contains_multiline_strings() 

1026 ): 

1027 raise CannotSplit( 

1028 "The current optional pair of parentheses is bound to fail to" 

1029 " satisfy the splitting algorithm because the head or the tail" 

1030 " contains multiline strings which by definition never fit one" 

1031 " line." 

1032 ) from e 

1033 

1034 ensure_visible(rhs.opening_bracket) 

1035 ensure_visible(rhs.closing_bracket) 

1036 for result in (rhs.head, rhs.body, rhs.tail): 

1037 if result: 

1038 yield result 

1039 

1040 

1041def _prefer_split_rhs_oop_over_rhs( 

1042 rhs_oop: RHSResult, rhs: RHSResult, mode: Mode 

1043) -> bool: 

1044 """ 

1045 Returns whether we should prefer the result from a split omitting optional parens 

1046 (rhs_oop) over the original (rhs). 

1047 """ 

1048 # contains unsplittable type ignore 

1049 if ( 

1050 rhs_oop.head.contains_unsplittable_type_ignore() 

1051 or rhs_oop.body.contains_unsplittable_type_ignore() 

1052 or rhs_oop.tail.contains_unsplittable_type_ignore() 

1053 ): 

1054 return True 

1055 

1056 # Retain optional parens around dictionary values 

1057 if ( 

1058 Preview.wrap_long_dict_values_in_parens 

1059 and rhs.opening_bracket.parent 

1060 and rhs.opening_bracket.parent.parent 

1061 and rhs.opening_bracket.parent.parent.type == syms.dictsetmaker 

1062 and rhs.body.bracket_tracker.delimiters 

1063 ): 

1064 # Unless the split is inside the key 

1065 return any(leaf.type == token.COLON for leaf in rhs_oop.tail.leaves) 

1066 

1067 # the split is right after `=` 

1068 if not (len(rhs.head.leaves) >= 2 and rhs.head.leaves[-2].type == token.EQUAL): 

1069 return True 

1070 

1071 # the left side of assignment contains brackets 

1072 if not any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1]): 

1073 return True 

1074 

1075 # the left side of assignment is short enough (the -1 is for the ending optional 

1076 # paren) 

1077 if not is_line_short_enough( 

1078 rhs.head, mode=replace(mode, line_length=mode.line_length - 1) 

1079 ): 

1080 return True 

1081 

1082 # the left side of assignment won't explode further because of magic trailing comma 

1083 if rhs.head.magic_trailing_comma is not None: 

1084 return True 

1085 

1086 # If we have multiple targets, we prefer more `=`s on the head vs pushing them to 

1087 # the body 

1088 rhs_head_equal_count = [leaf.type for leaf in rhs.head.leaves].count(token.EQUAL) 

1089 rhs_oop_head_equal_count = [leaf.type for leaf in rhs_oop.head.leaves].count( 

1090 token.EQUAL 

1091 ) 

1092 if rhs_head_equal_count > 1 and rhs_head_equal_count > rhs_oop_head_equal_count: 

1093 return False 

1094 

1095 has_closing_bracket_after_assign = False 

1096 for leaf in reversed(rhs_oop.head.leaves): 

1097 if leaf.type == token.EQUAL: 

1098 break 

1099 if leaf.type in CLOSING_BRACKETS: 

1100 has_closing_bracket_after_assign = True 

1101 break 

1102 return ( 

1103 # contains matching brackets after the `=` (done by checking there is a 

1104 # closing bracket) 

1105 has_closing_bracket_after_assign 

1106 or ( 

1107 # the split is actually from inside the optional parens (done by checking 

1108 # the first line still contains the `=`) 

1109 any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves) 

1110 # the first line is short enough 

1111 and is_line_short_enough(rhs_oop.head, mode=mode) 

1112 ) 

1113 ) 

1114 

1115 

1116def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None: 

1117 """Raise :exc:`CannotSplit` if the last left- or right-hand split failed. 

1118 

1119 Do nothing otherwise. 

1120 

1121 A left- or right-hand split is based on a pair of brackets. Content before 

1122 (and including) the opening bracket is left on one line, content inside the 

1123 brackets is put on a separate line, and finally content starting with and 

1124 following the closing bracket is put on a separate line. 

1125 

1126 Those are called `head`, `body`, and `tail`, respectively. If the split 

1127 produced the same line (all content in `head`) or ended up with an empty `body` 

1128 and the `tail` is just the closing bracket, then it's considered failed. 

1129 """ 

1130 tail_len = len(str(tail).strip()) 

1131 if not body: 

1132 if tail_len == 0: 

1133 raise CannotSplit("Splitting brackets produced the same line") 

1134 

1135 elif tail_len < 3: 

1136 raise CannotSplit( 

1137 f"Splitting brackets on an empty body to save {tail_len} characters is" 

1138 " not worth it" 

1139 ) 

1140 

1141 

1142def _ensure_trailing_comma( 

1143 leaves: list[Leaf], original: Line, opening_bracket: Leaf 

1144) -> bool: 

1145 if not leaves: 

1146 return False 

1147 # Ensure a trailing comma for imports 

1148 if original.is_import: 

1149 return True 

1150 # ...and standalone function arguments 

1151 if not original.is_def: 

1152 return False 

1153 if opening_bracket.value != "(": 

1154 return False 

1155 # Don't add commas if we already have any commas 

1156 if any( 

1157 leaf.type == token.COMMA and not is_part_of_annotation(leaf) for leaf in leaves 

1158 ): 

1159 return False 

1160 

1161 # Find a leaf with a parent (comments don't have parents) 

1162 leaf_with_parent = next((leaf for leaf in leaves if leaf.parent), None) 

1163 if leaf_with_parent is None: 

1164 return True 

1165 # Don't add commas inside parenthesized return annotations 

1166 if get_annotation_type(leaf_with_parent) == "return": 

1167 return False 

1168 # Don't add commas inside PEP 604 unions 

1169 if ( 

1170 leaf_with_parent.parent 

1171 and leaf_with_parent.parent.next_sibling 

1172 and leaf_with_parent.parent.next_sibling.type == token.VBAR 

1173 ): 

1174 return False 

1175 return True 

1176 

1177 

1178def bracket_split_build_line( 

1179 leaves: list[Leaf], 

1180 original: Line, 

1181 opening_bracket: Leaf, 

1182 *, 

1183 component: _BracketSplitComponent, 

1184) -> Line: 

1185 """Return a new line with given `leaves` and respective comments from `original`. 

1186 

1187 If it's the head component, brackets will be tracked so trailing commas are 

1188 respected. 

1189 

1190 If it's the body component, the result line is one-indented inside brackets and as 

1191 such has its first leaf's prefix normalized and a trailing comma added when 

1192 expected. 

1193 """ 

1194 result = Line(mode=original.mode, depth=original.depth) 

1195 if component is _BracketSplitComponent.body: 

1196 result.inside_brackets = True 

1197 result.depth += 1 

1198 if _ensure_trailing_comma(leaves, original, opening_bracket): 

1199 for i in range(len(leaves) - 1, -1, -1): 

1200 if leaves[i].type == STANDALONE_COMMENT: 

1201 continue 

1202 

1203 if leaves[i].type != token.COMMA: 

1204 new_comma = Leaf(token.COMMA, ",") 

1205 leaves.insert(i + 1, new_comma) 

1206 break 

1207 

1208 leaves_to_track: set[LeafID] = set() 

1209 if component is _BracketSplitComponent.head: 

1210 leaves_to_track = get_leaves_inside_matching_brackets(leaves) 

1211 # Populate the line 

1212 for leaf in leaves: 

1213 result.append( 

1214 leaf, 

1215 preformatted=True, 

1216 track_bracket=id(leaf) in leaves_to_track, 

1217 ) 

1218 for comment_after in original.comments_after(leaf): 

1219 result.append(comment_after, preformatted=True) 

1220 if component is _BracketSplitComponent.body and should_split_line( 

1221 result, opening_bracket 

1222 ): 

1223 result.should_split_rhs = True 

1224 return result 

1225 

1226 

1227def dont_increase_indentation(split_func: Transformer) -> Transformer: 

1228 """Normalize prefix of the first leaf in every line returned by `split_func`. 

1229 

1230 This is a decorator over relevant split functions. 

1231 """ 

1232 

1233 @wraps(split_func) 

1234 def split_wrapper( 

1235 line: Line, features: Collection[Feature], mode: Mode 

1236 ) -> Iterator[Line]: 

1237 for split_line in split_func(line, features, mode): 

1238 split_line.leaves[0].prefix = "" 

1239 yield split_line 

1240 

1241 return split_wrapper 

1242 

1243 

1244def _get_last_non_comment_leaf(line: Line) -> Optional[int]: 

1245 for leaf_idx in range(len(line.leaves) - 1, 0, -1): 

1246 if line.leaves[leaf_idx].type != STANDALONE_COMMENT: 

1247 return leaf_idx 

1248 return None 

1249 

1250 

1251def _can_add_trailing_comma(leaf: Leaf, features: Collection[Feature]) -> bool: 

1252 if is_vararg(leaf, within={syms.typedargslist}): 

1253 return Feature.TRAILING_COMMA_IN_DEF in features 

1254 if is_vararg(leaf, within={syms.arglist, syms.argument}): 

1255 return Feature.TRAILING_COMMA_IN_CALL in features 

1256 return True 

1257 

1258 

1259def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line: 

1260 if ( 

1261 safe 

1262 and delimiter_priority == COMMA_PRIORITY 

1263 and line.leaves[-1].type != token.COMMA 

1264 and line.leaves[-1].type != STANDALONE_COMMENT 

1265 ): 

1266 new_comma = Leaf(token.COMMA, ",") 

1267 line.append(new_comma) 

1268 return line 

1269 

1270 

1271MIGRATE_COMMENT_DELIMITERS = {STRING_PRIORITY, COMMA_PRIORITY} 

1272 

1273 

1274@dont_increase_indentation 

1275def delimiter_split( 

1276 line: Line, features: Collection[Feature], mode: Mode 

1277) -> Iterator[Line]: 

1278 """Split according to delimiters of the highest priority. 

1279 

1280 If the appropriate Features are given, the split will add trailing commas 

1281 also in function signatures and calls that contain `*` and `**`. 

1282 """ 

1283 if len(line.leaves) == 0: 

1284 raise CannotSplit("Line empty") from None 

1285 last_leaf = line.leaves[-1] 

1286 

1287 bt = line.bracket_tracker 

1288 try: 

1289 delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)}) 

1290 except ValueError: 

1291 raise CannotSplit("No delimiters found") from None 

1292 

1293 if ( 

1294 delimiter_priority == DOT_PRIORITY 

1295 and bt.delimiter_count_with_priority(delimiter_priority) == 1 

1296 ): 

1297 raise CannotSplit("Splitting a single attribute from its owner looks wrong") 

1298 

1299 current_line = Line( 

1300 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1301 ) 

1302 lowest_depth = sys.maxsize 

1303 trailing_comma_safe = True 

1304 

1305 def append_to_line(leaf: Leaf) -> Iterator[Line]: 

1306 """Append `leaf` to current line or to new line if appending impossible.""" 

1307 nonlocal current_line 

1308 try: 

1309 current_line.append_safe(leaf, preformatted=True) 

1310 except ValueError: 

1311 yield current_line 

1312 

1313 current_line = Line( 

1314 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1315 ) 

1316 current_line.append(leaf) 

1317 

1318 def append_comments(leaf: Leaf) -> Iterator[Line]: 

1319 for comment_after in line.comments_after(leaf): 

1320 yield from append_to_line(comment_after) 

1321 

1322 last_non_comment_leaf = _get_last_non_comment_leaf(line) 

1323 for leaf_idx, leaf in enumerate(line.leaves): 

1324 yield from append_to_line(leaf) 

1325 

1326 previous_priority = leaf_idx > 0 and bt.delimiters.get( 

1327 id(line.leaves[leaf_idx - 1]) 

1328 ) 

1329 if ( 

1330 previous_priority != delimiter_priority 

1331 or delimiter_priority in MIGRATE_COMMENT_DELIMITERS 

1332 ): 

1333 yield from append_comments(leaf) 

1334 

1335 lowest_depth = min(lowest_depth, leaf.bracket_depth) 

1336 if trailing_comma_safe and leaf.bracket_depth == lowest_depth: 

1337 trailing_comma_safe = _can_add_trailing_comma(leaf, features) 

1338 

1339 if last_leaf.type == STANDALONE_COMMENT and leaf_idx == last_non_comment_leaf: 

1340 current_line = _safe_add_trailing_comma( 

1341 trailing_comma_safe, delimiter_priority, current_line 

1342 ) 

1343 

1344 leaf_priority = bt.delimiters.get(id(leaf)) 

1345 if leaf_priority == delimiter_priority: 

1346 if ( 

1347 leaf_idx + 1 < len(line.leaves) 

1348 and delimiter_priority not in MIGRATE_COMMENT_DELIMITERS 

1349 ): 

1350 yield from append_comments(line.leaves[leaf_idx + 1]) 

1351 

1352 yield current_line 

1353 current_line = Line( 

1354 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1355 ) 

1356 

1357 if current_line: 

1358 current_line = _safe_add_trailing_comma( 

1359 trailing_comma_safe, delimiter_priority, current_line 

1360 ) 

1361 yield current_line 

1362 

1363 

1364@dont_increase_indentation 

1365def standalone_comment_split( 

1366 line: Line, features: Collection[Feature], mode: Mode 

1367) -> Iterator[Line]: 

1368 """Split standalone comments from the rest of the line.""" 

1369 if not line.contains_standalone_comments(): 

1370 raise CannotSplit("Line does not have any standalone comments") 

1371 

1372 current_line = Line( 

1373 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1374 ) 

1375 

1376 def append_to_line(leaf: Leaf) -> Iterator[Line]: 

1377 """Append `leaf` to current line or to new line if appending impossible.""" 

1378 nonlocal current_line 

1379 try: 

1380 current_line.append_safe(leaf, preformatted=True) 

1381 except ValueError: 

1382 yield current_line 

1383 

1384 current_line = Line( 

1385 line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1386 ) 

1387 current_line.append(leaf) 

1388 

1389 for leaf in line.leaves: 

1390 yield from append_to_line(leaf) 

1391 

1392 for comment_after in line.comments_after(leaf): 

1393 yield from append_to_line(comment_after) 

1394 

1395 if current_line: 

1396 yield current_line 

1397 

1398 

1399def normalize_invisible_parens( # noqa: C901 

1400 node: Node, parens_after: set[str], *, mode: Mode, features: Collection[Feature] 

1401) -> None: 

1402 """Make existing optional parentheses invisible or create new ones. 

1403 

1404 `parens_after` is a set of string leaf values immediately after which parens 

1405 should be put. 

1406 

1407 Standardizes on visible parentheses for single-element tuples, and keeps 

1408 existing visible parentheses for other tuples and generator expressions. 

1409 """ 

1410 for pc in list_comments(node.prefix, is_endmarker=False): 

1411 if pc.value in FMT_OFF: 

1412 # This `node` has a prefix with `# fmt: off`, don't mess with parens. 

1413 return 

1414 

1415 # The multiple context managers grammar has a different pattern, thus this is 

1416 # separate from the for-loop below. This possibly wraps them in invisible parens, 

1417 # and later will be removed in remove_with_parens when needed. 

1418 if node.type == syms.with_stmt: 

1419 _maybe_wrap_cms_in_parens(node, mode, features) 

1420 

1421 check_lpar = False 

1422 for index, child in enumerate(list(node.children)): 

1423 # Fixes a bug where invisible parens are not properly stripped from 

1424 # assignment statements that contain type annotations. 

1425 if isinstance(child, Node) and child.type == syms.annassign: 

1426 normalize_invisible_parens( 

1427 child, parens_after=parens_after, mode=mode, features=features 

1428 ) 

1429 

1430 # Fixes a bug where invisible parens are not properly wrapped around 

1431 # case blocks. 

1432 if isinstance(child, Node) and child.type == syms.case_block: 

1433 normalize_invisible_parens( 

1434 child, parens_after={"case"}, mode=mode, features=features 

1435 ) 

1436 

1437 # Add parentheses around if guards in case blocks 

1438 if isinstance(child, Node) and child.type == syms.guard: 

1439 normalize_invisible_parens( 

1440 child, parens_after={"if"}, mode=mode, features=features 

1441 ) 

1442 

1443 # Add parentheses around long tuple unpacking in assignments. 

1444 if ( 

1445 index == 0 

1446 and isinstance(child, Node) 

1447 and child.type == syms.testlist_star_expr 

1448 ): 

1449 check_lpar = True 

1450 

1451 if check_lpar: 

1452 if ( 

1453 child.type == syms.atom 

1454 and node.type == syms.for_stmt 

1455 and isinstance(child.prev_sibling, Leaf) 

1456 and child.prev_sibling.type == token.NAME 

1457 and child.prev_sibling.value == "for" 

1458 ): 

1459 if maybe_make_parens_invisible_in_atom( 

1460 child, 

1461 parent=node, 

1462 mode=mode, 

1463 features=features, 

1464 remove_brackets_around_comma=True, 

1465 ): 

1466 wrap_in_parentheses(node, child, visible=False) 

1467 elif isinstance(child, Node) and node.type == syms.with_stmt: 

1468 remove_with_parens(child, node, mode=mode, features=features) 

1469 elif child.type == syms.atom: 

1470 if maybe_make_parens_invisible_in_atom( 

1471 child, parent=node, mode=mode, features=features 

1472 ): 

1473 wrap_in_parentheses(node, child, visible=False) 

1474 elif is_one_tuple(child): 

1475 wrap_in_parentheses(node, child, visible=True) 

1476 elif node.type == syms.import_from: 

1477 _normalize_import_from(node, child, index) 

1478 break 

1479 elif ( 

1480 index == 1 

1481 and child.type == token.STAR 

1482 and node.type == syms.except_clause 

1483 ): 

1484 # In except* (PEP 654), the star is actually part of 

1485 # of the keyword. So we need to skip the insertion of 

1486 # invisible parentheses to work more precisely. 

1487 continue 

1488 

1489 elif ( 

1490 isinstance(child, Leaf) 

1491 and child.next_sibling is not None 

1492 and child.next_sibling.type == token.COLON 

1493 and child.value == "case" 

1494 ): 

1495 # A special patch for "case case:" scenario, the second occurrence 

1496 # of case will be not parsed as a Python keyword. 

1497 break 

1498 

1499 elif not is_multiline_string(child): 

1500 wrap_in_parentheses(node, child, visible=False) 

1501 

1502 comma_check = child.type == token.COMMA 

1503 

1504 check_lpar = isinstance(child, Leaf) and ( 

1505 child.value in parens_after or comma_check 

1506 ) 

1507 

1508 

1509def _normalize_import_from(parent: Node, child: LN, index: int) -> None: 

1510 # "import from" nodes store parentheses directly as part of 

1511 # the statement 

1512 if is_lpar_token(child): 

1513 assert is_rpar_token(parent.children[-1]) 

1514 # make parentheses invisible 

1515 child.value = "" 

1516 parent.children[-1].value = "" 

1517 elif child.type != token.STAR: 

1518 # insert invisible parentheses 

1519 parent.insert_child(index, Leaf(token.LPAR, "")) 

1520 parent.append_child(Leaf(token.RPAR, "")) 

1521 

1522 

1523def remove_await_parens(node: Node, mode: Mode, features: Collection[Feature]) -> None: 

1524 if node.children[0].type == token.AWAIT and len(node.children) > 1: 

1525 if ( 

1526 node.children[1].type == syms.atom 

1527 and node.children[1].children[0].type == token.LPAR 

1528 ): 

1529 if maybe_make_parens_invisible_in_atom( 

1530 node.children[1], 

1531 parent=node, 

1532 mode=mode, 

1533 features=features, 

1534 remove_brackets_around_comma=True, 

1535 ): 

1536 wrap_in_parentheses(node, node.children[1], visible=False) 

1537 

1538 # Since await is an expression we shouldn't remove 

1539 # brackets in cases where this would change 

1540 # the AST due to operator precedence. 

1541 # Therefore we only aim to remove brackets around 

1542 # power nodes that aren't also await expressions themselves. 

1543 # https://peps.python.org/pep-0492/#updated-operator-precedence-table 

1544 # N.B. We've still removed any redundant nested brackets though :) 

1545 opening_bracket = cast(Leaf, node.children[1].children[0]) 

1546 closing_bracket = cast(Leaf, node.children[1].children[-1]) 

1547 bracket_contents = node.children[1].children[1] 

1548 if isinstance(bracket_contents, Node) and ( 

1549 bracket_contents.type != syms.power 

1550 or bracket_contents.children[0].type == token.AWAIT 

1551 or any( 

1552 isinstance(child, Leaf) and child.type == token.DOUBLESTAR 

1553 for child in bracket_contents.children 

1554 ) 

1555 ): 

1556 ensure_visible(opening_bracket) 

1557 ensure_visible(closing_bracket) 

1558 

1559 

1560def _maybe_wrap_cms_in_parens( 

1561 node: Node, mode: Mode, features: Collection[Feature] 

1562) -> None: 

1563 """When enabled and safe, wrap the multiple context managers in invisible parens. 

1564 

1565 It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS. 

1566 """ 

1567 if ( 

1568 Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features 

1569 or len(node.children) <= 2 

1570 # If it's an atom, it's already wrapped in parens. 

1571 or node.children[1].type == syms.atom 

1572 ): 

1573 return 

1574 colon_index: Optional[int] = None 

1575 for i in range(2, len(node.children)): 

1576 if node.children[i].type == token.COLON: 

1577 colon_index = i 

1578 break 

1579 if colon_index is not None: 

1580 lpar = Leaf(token.LPAR, "") 

1581 rpar = Leaf(token.RPAR, "") 

1582 context_managers = node.children[1:colon_index] 

1583 for child in context_managers: 

1584 child.remove() 

1585 # After wrapping, the with_stmt will look like this: 

1586 # with_stmt 

1587 # NAME 'with' 

1588 # atom 

1589 # LPAR '' 

1590 # testlist_gexp 

1591 # ... <-- context_managers 

1592 # /testlist_gexp 

1593 # RPAR '' 

1594 # /atom 

1595 # COLON ':' 

1596 new_child = Node( 

1597 syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar] 

1598 ) 

1599 node.insert_child(1, new_child) 

1600 

1601 

1602def remove_with_parens( 

1603 node: Node, parent: Node, mode: Mode, features: Collection[Feature] 

1604) -> None: 

1605 """Recursively hide optional parens in `with` statements.""" 

1606 # Removing all unnecessary parentheses in with statements in one pass is a tad 

1607 # complex as different variations of bracketed statements result in pretty 

1608 # different parse trees: 

1609 # 

1610 # with (open("file")) as f: # this is an asexpr_test 

1611 # ... 

1612 # 

1613 # with (open("file") as f): # this is an atom containing an 

1614 # ... # asexpr_test 

1615 # 

1616 # with (open("file")) as f, (open("file")) as f: # this is asexpr_test, COMMA, 

1617 # ... # asexpr_test 

1618 # 

1619 # with (open("file") as f, open("file") as f): # an atom containing a 

1620 # ... # testlist_gexp which then 

1621 # # contains multiple asexpr_test(s) 

1622 if node.type == syms.atom: 

1623 if maybe_make_parens_invisible_in_atom( 

1624 node, 

1625 parent=parent, 

1626 mode=mode, 

1627 features=features, 

1628 remove_brackets_around_comma=True, 

1629 ): 

1630 wrap_in_parentheses(parent, node, visible=False) 

1631 if isinstance(node.children[1], Node): 

1632 remove_with_parens(node.children[1], node, mode=mode, features=features) 

1633 elif node.type == syms.testlist_gexp: 

1634 for child in node.children: 

1635 if isinstance(child, Node): 

1636 remove_with_parens(child, node, mode=mode, features=features) 

1637 elif node.type == syms.asexpr_test and not any( 

1638 leaf.type == token.COLONEQUAL for leaf in node.leaves() 

1639 ): 

1640 if maybe_make_parens_invisible_in_atom( 

1641 node.children[0], 

1642 parent=node, 

1643 mode=mode, 

1644 features=features, 

1645 remove_brackets_around_comma=True, 

1646 ): 

1647 wrap_in_parentheses(node, node.children[0], visible=False) 

1648 

1649 

1650def maybe_make_parens_invisible_in_atom( 

1651 node: LN, 

1652 parent: LN, 

1653 mode: Mode, 

1654 features: Collection[Feature], 

1655 remove_brackets_around_comma: bool = False, 

1656) -> bool: 

1657 """If it's safe, make the parens in the atom `node` invisible, recursively. 

1658 Additionally, remove repeated, adjacent invisible parens from the atom `node` 

1659 as they are redundant. 

1660 

1661 Returns whether the node should itself be wrapped in invisible parentheses. 

1662 """ 

1663 if ( 

1664 node.type not in (syms.atom, syms.expr) 

1665 or is_empty_tuple(node) 

1666 or is_one_tuple(node) 

1667 or (is_tuple(node) and parent.type == syms.asexpr_test) 

1668 or ( 

1669 is_tuple(node) 

1670 and parent.type == syms.with_stmt 

1671 and has_sibling_with_type(node, token.COMMA) 

1672 ) 

1673 or (is_yield(node) and parent.type != syms.expr_stmt) 

1674 or ( 

1675 # This condition tries to prevent removing non-optional brackets 

1676 # around a tuple, however, can be a bit overzealous so we provide 

1677 # and option to skip this check for `for` and `with` statements. 

1678 not remove_brackets_around_comma 

1679 and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY 

1680 # Skip this check in Preview mode in order to 

1681 # Remove parentheses around multiple exception types in except and 

1682 # except* without as. See PEP 758 for details. 

1683 and not ( 

1684 Preview.remove_parens_around_except_types in mode 

1685 and Feature.UNPARENTHESIZED_EXCEPT_TYPES in features 

1686 # is a tuple 

1687 and is_tuple(node) 

1688 # has a parent node 

1689 and node.parent is not None 

1690 # parent is an except clause 

1691 and node.parent.type == syms.except_clause 

1692 # is not immediately followed by as clause 

1693 and not ( 

1694 node.next_sibling is not None 

1695 and is_name_token(node.next_sibling) 

1696 and node.next_sibling.value == "as" 

1697 ) 

1698 ) 

1699 ) 

1700 or is_tuple_containing_walrus(node) 

1701 or is_tuple_containing_star(node) 

1702 or is_generator(node) 

1703 ): 

1704 return False 

1705 

1706 if is_walrus_assignment(node): 

1707 if parent.type in [ 

1708 syms.annassign, 

1709 syms.expr_stmt, 

1710 syms.assert_stmt, 

1711 syms.return_stmt, 

1712 syms.except_clause, 

1713 syms.funcdef, 

1714 syms.with_stmt, 

1715 syms.testlist_gexp, 

1716 syms.tname, 

1717 # these ones aren't useful to end users, but they do please fuzzers 

1718 syms.for_stmt, 

1719 syms.del_stmt, 

1720 syms.for_stmt, 

1721 ]: 

1722 return False 

1723 

1724 first = node.children[0] 

1725 last = node.children[-1] 

1726 if is_lpar_token(first) and is_rpar_token(last): 

1727 middle = node.children[1] 

1728 # make parentheses invisible 

1729 if ( 

1730 # If the prefix of `middle` includes a type comment with 

1731 # ignore annotation, then we do not remove the parentheses 

1732 not is_type_ignore_comment_string(middle.prefix.strip()) 

1733 ): 

1734 first.value = "" 

1735 last.value = "" 

1736 maybe_make_parens_invisible_in_atom( 

1737 middle, 

1738 parent=parent, 

1739 mode=mode, 

1740 features=features, 

1741 remove_brackets_around_comma=remove_brackets_around_comma, 

1742 ) 

1743 

1744 if is_atom_with_invisible_parens(middle): 

1745 # Strip the invisible parens from `middle` by replacing 

1746 # it with the child in-between the invisible parens 

1747 middle.replace(middle.children[1]) 

1748 

1749 if middle.children[0].prefix.strip(): 

1750 # Preserve comments before first paren 

1751 middle.children[1].prefix = ( 

1752 middle.children[0].prefix + middle.children[1].prefix 

1753 ) 

1754 

1755 if middle.children[-1].prefix.strip(): 

1756 # Preserve comments before last paren 

1757 last.prefix = middle.children[-1].prefix + last.prefix 

1758 

1759 return False 

1760 

1761 return True 

1762 

1763 

1764def should_split_line(line: Line, opening_bracket: Leaf) -> bool: 

1765 """Should `line` be immediately split with `delimiter_split()` after RHS?""" 

1766 

1767 if not (opening_bracket.parent and opening_bracket.value in "[{("): 

1768 return False 

1769 

1770 # We're essentially checking if the body is delimited by commas and there's more 

1771 # than one of them (we're excluding the trailing comma and if the delimiter priority 

1772 # is still commas, that means there's more). 

1773 exclude = set() 

1774 trailing_comma = False 

1775 try: 

1776 last_leaf = line.leaves[-1] 

1777 if last_leaf.type == token.COMMA: 

1778 trailing_comma = True 

1779 exclude.add(id(last_leaf)) 

1780 max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude) 

1781 except (IndexError, ValueError): 

1782 return False 

1783 

1784 return max_priority == COMMA_PRIORITY and ( 

1785 (line.mode.magic_trailing_comma and trailing_comma) 

1786 # always explode imports 

1787 or opening_bracket.parent.type in {syms.atom, syms.import_from} 

1788 ) 

1789 

1790 

1791def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[set[LeafID]]: 

1792 """Generate sets of closing bracket IDs that should be omitted in a RHS. 

1793 

1794 Brackets can be omitted if the entire trailer up to and including 

1795 a preceding closing bracket fits in one line. 

1796 

1797 Yielded sets are cumulative (contain results of previous yields, too). First 

1798 set is empty, unless the line should explode, in which case bracket pairs until 

1799 the one that needs to explode are omitted. 

1800 """ 

1801 

1802 omit: set[LeafID] = set() 

1803 if not line.magic_trailing_comma: 

1804 yield omit 

1805 

1806 length = 4 * line.depth 

1807 opening_bracket: Optional[Leaf] = None 

1808 closing_bracket: Optional[Leaf] = None 

1809 inner_brackets: set[LeafID] = set() 

1810 for index, leaf, leaf_length in line.enumerate_with_length(is_reversed=True): 

1811 length += leaf_length 

1812 if length > line_length: 

1813 break 

1814 

1815 has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix) 

1816 if leaf.type == STANDALONE_COMMENT or has_inline_comment: 

1817 break 

1818 

1819 if opening_bracket: 

1820 if leaf is opening_bracket: 

1821 opening_bracket = None 

1822 elif leaf.type in CLOSING_BRACKETS: 

1823 prev = line.leaves[index - 1] if index > 0 else None 

1824 if ( 

1825 prev 

1826 and prev.type == token.COMMA 

1827 and leaf.opening_bracket is not None 

1828 and not is_one_sequence_between( 

1829 leaf.opening_bracket, leaf, line.leaves 

1830 ) 

1831 ): 

1832 # Never omit bracket pairs with trailing commas. 

1833 # We need to explode on those. 

1834 break 

1835 

1836 inner_brackets.add(id(leaf)) 

1837 elif leaf.type in CLOSING_BRACKETS: 

1838 prev = line.leaves[index - 1] if index > 0 else None 

1839 if prev and prev.type in OPENING_BRACKETS: 

1840 # Empty brackets would fail a split so treat them as "inner" 

1841 # brackets (e.g. only add them to the `omit` set if another 

1842 # pair of brackets was good enough. 

1843 inner_brackets.add(id(leaf)) 

1844 continue 

1845 

1846 if closing_bracket: 

1847 omit.add(id(closing_bracket)) 

1848 omit.update(inner_brackets) 

1849 inner_brackets.clear() 

1850 yield omit 

1851 

1852 if ( 

1853 prev 

1854 and prev.type == token.COMMA 

1855 and leaf.opening_bracket is not None 

1856 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves) 

1857 ): 

1858 # Never omit bracket pairs with trailing commas. 

1859 # We need to explode on those. 

1860 break 

1861 

1862 if leaf.value: 

1863 opening_bracket = leaf.opening_bracket 

1864 closing_bracket = leaf 

1865 

1866 

1867def run_transformer( 

1868 line: Line, 

1869 transform: Transformer, 

1870 mode: Mode, 

1871 features: Collection[Feature], 

1872 *, 

1873 line_str: str = "", 

1874) -> list[Line]: 

1875 if not line_str: 

1876 line_str = line_to_string(line) 

1877 result: list[Line] = [] 

1878 for transformed_line in transform(line, features, mode): 

1879 if str(transformed_line).strip("\n") == line_str: 

1880 raise CannotTransform("Line transformer returned an unchanged result") 

1881 

1882 result.extend(transform_line(transformed_line, mode=mode, features=features)) 

1883 

1884 features_set = set(features) 

1885 if ( 

1886 Feature.FORCE_OPTIONAL_PARENTHESES in features_set 

1887 or transform.__class__.__name__ != "rhs" 

1888 or not line.bracket_tracker.invisible 

1889 or any(bracket.value for bracket in line.bracket_tracker.invisible) 

1890 or line.contains_multiline_strings() 

1891 or result[0].contains_uncollapsable_type_comments() 

1892 or result[0].contains_unsplittable_type_ignore() 

1893 or is_line_short_enough(result[0], mode=mode) 

1894 # If any leaves have no parents (which _can_ occur since 

1895 # `transform(line)` potentially destroys the line's underlying node 

1896 # structure), then we can't proceed. Doing so would cause the below 

1897 # call to `append_leaves()` to fail. 

1898 or any(leaf.parent is None for leaf in line.leaves) 

1899 ): 

1900 return result 

1901 

1902 line_copy = line.clone() 

1903 append_leaves(line_copy, line, line.leaves) 

1904 features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES} 

1905 second_opinion = run_transformer( 

1906 line_copy, transform, mode, features_fop, line_str=line_str 

1907 ) 

1908 if all(is_line_short_enough(ln, mode=mode) for ln in second_opinion): 

1909 result = second_opinion 

1910 return result