Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/black/linegen.py: 11%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

788 statements  

1""" 

2Generating lines of code. 

3""" 

4 

5import re 

6import sys 

7from collections.abc import Collection, Iterator 

8from dataclasses import replace 

9from enum import Enum, auto 

10from functools import partial, wraps 

11from typing import Optional, Union, cast 

12 

13from black.brackets import ( 

14 COMMA_PRIORITY, 

15 DOT_PRIORITY, 

16 STRING_PRIORITY, 

17 get_leaves_inside_matching_brackets, 

18 max_delimiter_priority_in_atom, 

19) 

20from black.comments import FMT_OFF, generate_comments, list_comments 

21from black.lines import ( 

22 Line, 

23 RHSResult, 

24 append_leaves, 

25 can_be_split, 

26 can_omit_invisible_parens, 

27 is_line_short_enough, 

28 line_to_string, 

29) 

30from black.mode import Feature, Mode, Preview 

31from black.nodes import ( 

32 ASSIGNMENTS, 

33 BRACKETS, 

34 CLOSING_BRACKETS, 

35 OPENING_BRACKETS, 

36 STANDALONE_COMMENT, 

37 STATEMENT, 

38 WHITESPACE, 

39 Visitor, 

40 ensure_visible, 

41 fstring_to_string, 

42 get_annotation_type, 

43 has_sibling_with_type, 

44 is_arith_like, 

45 is_async_stmt_or_funcdef, 

46 is_atom_with_invisible_parens, 

47 is_docstring, 

48 is_empty_tuple, 

49 is_generator, 

50 is_lpar_token, 

51 is_multiline_string, 

52 is_name_token, 

53 is_one_sequence_between, 

54 is_one_tuple, 

55 is_parent_function_or_class, 

56 is_part_of_annotation, 

57 is_rpar_token, 

58 is_stub_body, 

59 is_stub_suite, 

60 is_tuple, 

61 is_tuple_containing_star, 

62 is_tuple_containing_walrus, 

63 is_type_ignore_comment_string, 

64 is_vararg, 

65 is_walrus_assignment, 

66 is_yield, 

67 syms, 

68 wrap_in_parentheses, 

69) 

70from black.numerics import normalize_numeric_literal 

71from black.strings import ( 

72 fix_multiline_docstring, 

73 get_string_prefix, 

74 normalize_string_prefix, 

75 normalize_string_quotes, 

76 normalize_unicode_escape_sequences, 

77) 

78from black.trans import ( 

79 CannotTransform, 

80 StringMerger, 

81 StringParenStripper, 

82 StringParenWrapper, 

83 StringSplitter, 

84 Transformer, 

85 hug_power_op, 

86) 

87from blib2to3.pgen2 import token 

88from blib2to3.pytree import Leaf, Node 

89 

90# types 

91LeafID = int 

92LN = Union[Leaf, Node] 

93 

94 

95class CannotSplit(CannotTransform): 

96 """A readable split that fits the allotted line length is impossible.""" 

97 

98 

99# This isn't a dataclass because @dataclass + Generic breaks mypyc. 

100# See also https://github.com/mypyc/mypyc/issues/827. 

101class LineGenerator(Visitor[Line]): 

102 """Generates reformatted Line objects. Empty lines are not emitted. 

103 

104 Note: destroys the tree it's visiting by mutating prefixes of its leaves 

105 in ways that will no longer stringify to valid Python code on the tree. 

106 """ 

107 

108 def __init__(self, mode: Mode, features: Collection[Feature]) -> None: 

109 self.mode = mode 

110 self.features = features 

111 self.current_line: Line 

112 self.__post_init__() 

113 

114 def line(self, indent: int = 0) -> Iterator[Line]: 

115 """Generate a line. 

116 

117 If the line is empty, only emit if it makes sense. 

118 If the line is too long, split it first and then generate. 

119 

120 If any lines were generated, set up a new current_line. 

121 """ 

122 if not self.current_line: 

123 self.current_line.depth += indent 

124 return # Line is empty, don't emit. Creating a new one unnecessary. 

125 

126 if len(self.current_line.leaves) == 1 and is_async_stmt_or_funcdef( 

127 self.current_line.leaves[0] 

128 ): 

129 # Special case for async def/for/with statements. `visit_async_stmt` 

130 # adds an `ASYNC` leaf then visits the child def/for/with statement 

131 # nodes. Line yields from those nodes shouldn't treat the former 

132 # `ASYNC` leaf as a complete line. 

133 return 

134 

135 complete_line = self.current_line 

136 self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent) 

137 yield complete_line 

138 

139 def visit_default(self, node: LN) -> Iterator[Line]: 

140 """Default `visit_*()` implementation. Recurses to children of `node`.""" 

141 if isinstance(node, Leaf): 

142 any_open_brackets = self.current_line.bracket_tracker.any_open_brackets() 

143 for comment in generate_comments(node): 

144 if any_open_brackets: 

145 # any comment within brackets is subject to splitting 

146 self.current_line.append(comment) 

147 elif comment.type == token.COMMENT: 

148 # regular trailing comment 

149 self.current_line.append(comment) 

150 yield from self.line() 

151 

152 else: 

153 # regular standalone comment 

154 yield from self.line() 

155 

156 self.current_line.append(comment) 

157 yield from self.line() 

158 

159 if any_open_brackets: 

160 node.prefix = "" 

161 if node.type not in WHITESPACE: 

162 self.current_line.append(node) 

163 yield from super().visit_default(node) 

164 

165 def visit_test(self, node: Node) -> Iterator[Line]: 

166 """Visit an `x if y else z` test""" 

167 

168 already_parenthesized = ( 

169 node.prev_sibling and node.prev_sibling.type == token.LPAR 

170 ) 

171 

172 if not already_parenthesized: 

173 # Similar to logic in wrap_in_parentheses 

174 lpar = Leaf(token.LPAR, "") 

175 rpar = Leaf(token.RPAR, "") 

176 prefix = node.prefix 

177 node.prefix = "" 

178 lpar.prefix = prefix 

179 node.insert_child(0, lpar) 

180 node.append_child(rpar) 

181 

182 yield from self.visit_default(node) 

183 

184 def visit_INDENT(self, node: Leaf) -> Iterator[Line]: 

185 """Increase indentation level, maybe yield a line.""" 

186 # In blib2to3 INDENT never holds comments. 

187 yield from self.line(+1) 

188 yield from self.visit_default(node) 

189 

190 def visit_DEDENT(self, node: Leaf) -> Iterator[Line]: 

191 """Decrease indentation level, maybe yield a line.""" 

192 # The current line might still wait for trailing comments. At DEDENT time 

193 # there won't be any (they would be prefixes on the preceding NEWLINE). 

194 # Emit the line then. 

195 yield from self.line() 

196 

197 # While DEDENT has no value, its prefix may contain standalone comments 

198 # that belong to the current indentation level. Get 'em. 

199 yield from self.visit_default(node) 

200 

201 # Finally, emit the dedent. 

202 yield from self.line(-1) 

203 

204 def visit_stmt( 

205 self, node: Node, keywords: set[str], parens: set[str] 

206 ) -> Iterator[Line]: 

207 """Visit a statement. 

208 

209 This implementation is shared for `if`, `while`, `for`, `try`, `except`, 

210 `def`, `with`, `class`, `assert`, and assignments. 

211 

212 The relevant Python language `keywords` for a given statement will be 

213 NAME leaves within it. This methods puts those on a separate line. 

214 

215 `parens` holds a set of string leaf values immediately after which 

216 invisible parens should be put. 

217 """ 

218 normalize_invisible_parens( 

219 node, parens_after=parens, mode=self.mode, features=self.features 

220 ) 

221 for child in node.children: 

222 if is_name_token(child) and child.value in keywords: 

223 yield from self.line() 

224 

225 yield from self.visit(child) 

226 

227 def visit_typeparams(self, node: Node) -> Iterator[Line]: 

228 yield from self.visit_default(node) 

229 node.children[0].prefix = "" 

230 

231 def visit_typevartuple(self, node: Node) -> Iterator[Line]: 

232 yield from self.visit_default(node) 

233 node.children[1].prefix = "" 

234 

235 def visit_paramspec(self, node: Node) -> Iterator[Line]: 

236 yield from self.visit_default(node) 

237 node.children[1].prefix = "" 

238 

239 def visit_dictsetmaker(self, node: Node) -> Iterator[Line]: 

240 if Preview.wrap_long_dict_values_in_parens in self.mode: 

241 for i, child in enumerate(node.children): 

242 if i == 0: 

243 continue 

244 if node.children[i - 1].type == token.COLON: 

245 if ( 

246 child.type == syms.atom 

247 and child.children[0].type in OPENING_BRACKETS 

248 and not is_walrus_assignment(child) 

249 ): 

250 maybe_make_parens_invisible_in_atom( 

251 child, 

252 parent=node, 

253 mode=self.mode, 

254 features=self.features, 

255 remove_brackets_around_comma=False, 

256 ) 

257 else: 

258 wrap_in_parentheses(node, child, visible=False) 

259 yield from self.visit_default(node) 

260 

261 def visit_funcdef(self, node: Node) -> Iterator[Line]: 

262 """Visit function definition.""" 

263 yield from self.line() 

264 

265 # Remove redundant brackets around return type annotation. 

266 is_return_annotation = False 

267 for child in node.children: 

268 if child.type == token.RARROW: 

269 is_return_annotation = True 

270 elif is_return_annotation: 

271 if child.type == syms.atom and child.children[0].type == token.LPAR: 

272 if maybe_make_parens_invisible_in_atom( 

273 child, 

274 parent=node, 

275 mode=self.mode, 

276 features=self.features, 

277 remove_brackets_around_comma=False, 

278 ): 

279 wrap_in_parentheses(node, child, visible=False) 

280 else: 

281 wrap_in_parentheses(node, child, visible=False) 

282 is_return_annotation = False 

283 

284 for child in node.children: 

285 yield from self.visit(child) 

286 

287 def visit_match_case(self, node: Node) -> Iterator[Line]: 

288 """Visit either a match or case statement.""" 

289 normalize_invisible_parens( 

290 node, parens_after=set(), mode=self.mode, features=self.features 

291 ) 

292 

293 yield from self.line() 

294 for child in node.children: 

295 yield from self.visit(child) 

296 

297 def visit_suite(self, node: Node) -> Iterator[Line]: 

298 """Visit a suite.""" 

299 if is_stub_suite(node): 

300 yield from self.visit(node.children[2]) 

301 else: 

302 yield from self.visit_default(node) 

303 

304 def visit_simple_stmt(self, node: Node) -> Iterator[Line]: 

305 """Visit a statement without nested statements.""" 

306 prev_type: Optional[int] = None 

307 for child in node.children: 

308 if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child): 

309 wrap_in_parentheses(node, child, visible=False) 

310 prev_type = child.type 

311 

312 if node.parent and node.parent.type in STATEMENT: 

313 if is_parent_function_or_class(node) and is_stub_body(node): 

314 yield from self.visit_default(node) 

315 else: 

316 yield from self.line(+1) 

317 yield from self.visit_default(node) 

318 yield from self.line(-1) 

319 

320 else: 

321 if node.parent and is_stub_suite(node.parent): 

322 node.prefix = "" 

323 yield from self.visit_default(node) 

324 return 

325 yield from self.line() 

326 yield from self.visit_default(node) 

327 

328 def visit_async_stmt(self, node: Node) -> Iterator[Line]: 

329 """Visit `async def`, `async for`, `async with`.""" 

330 yield from self.line() 

331 

332 children = iter(node.children) 

333 for child in children: 

334 yield from self.visit(child) 

335 

336 if child.type == token.ASYNC or child.type == STANDALONE_COMMENT: 

337 # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async 

338 # line. 

339 break 

340 

341 internal_stmt = next(children) 

342 yield from self.visit(internal_stmt) 

343 

344 def visit_decorators(self, node: Node) -> Iterator[Line]: 

345 """Visit decorators.""" 

346 for child in node.children: 

347 yield from self.line() 

348 yield from self.visit(child) 

349 

350 def visit_power(self, node: Node) -> Iterator[Line]: 

351 for idx, leaf in enumerate(node.children[:-1]): 

352 next_leaf = node.children[idx + 1] 

353 

354 if not isinstance(leaf, Leaf): 

355 continue 

356 

357 value = leaf.value.lower() 

358 if ( 

359 leaf.type == token.NUMBER 

360 and next_leaf.type == syms.trailer 

361 # Ensure that we are in an attribute trailer 

362 and next_leaf.children[0].type == token.DOT 

363 # It shouldn't wrap hexadecimal, binary and octal literals 

364 and not value.startswith(("0x", "0b", "0o")) 

365 # It shouldn't wrap complex literals 

366 and "j" not in value 

367 ): 

368 wrap_in_parentheses(node, leaf) 

369 

370 remove_await_parens(node, mode=self.mode, features=self.features) 

371 

372 yield from self.visit_default(node) 

373 

374 def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]: 

375 """Remove a semicolon and put the other statement on a separate line.""" 

376 yield from self.line() 

377 

378 def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]: 

379 """End of file. Process outstanding comments and end with a newline.""" 

380 yield from self.visit_default(leaf) 

381 yield from self.line() 

382 

383 def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]: 

384 if not self.current_line.bracket_tracker.any_open_brackets(): 

385 yield from self.line() 

386 yield from self.visit_default(leaf) 

387 

388 def visit_factor(self, node: Node) -> Iterator[Line]: 

389 """Force parentheses between a unary op and a binary power: 

390 

391 -2 ** 8 -> -(2 ** 8) 

392 """ 

393 _operator, operand = node.children 

394 if ( 

395 operand.type == syms.power 

396 and len(operand.children) == 3 

397 and operand.children[1].type == token.DOUBLESTAR 

398 ): 

399 lpar = Leaf(token.LPAR, "(") 

400 rpar = Leaf(token.RPAR, ")") 

401 index = operand.remove() or 0 

402 node.insert_child(index, Node(syms.atom, [lpar, operand, rpar])) 

403 yield from self.visit_default(node) 

404 

405 def visit_tname(self, node: Node) -> Iterator[Line]: 

406 """ 

407 Add potential parentheses around types in function parameter lists to be made 

408 into real parentheses in case the type hint is too long to fit on a line 

409 Examples: 

410 def foo(a: int, b: float = 7): ... 

411 

412 -> 

413 

414 def foo(a: (int), b: (float) = 7): ... 

415 """ 

416 assert len(node.children) == 3 

417 if maybe_make_parens_invisible_in_atom( 

418 node.children[2], parent=node, mode=self.mode, features=self.features 

419 ): 

420 wrap_in_parentheses(node, node.children[2], visible=False) 

421 

422 yield from self.visit_default(node) 

423 

424 def visit_STRING(self, leaf: Leaf) -> Iterator[Line]: 

425 normalize_unicode_escape_sequences(leaf) 

426 

427 if is_docstring(leaf) and not re.search(r"\\\s*\n", leaf.value): 

428 # We're ignoring docstrings with backslash newline escapes because changing 

429 # indentation of those changes the AST representation of the code. 

430 if self.mode.string_normalization: 

431 docstring = normalize_string_prefix(leaf.value) 

432 # We handle string normalization at the end of this method, but since 

433 # what we do right now acts differently depending on quote style (ex. 

434 # see padding logic below), there's a possibility for unstable 

435 # formatting. To avoid a situation where this function formats a 

436 # docstring differently on the second pass, normalize it early. 

437 docstring = normalize_string_quotes(docstring) 

438 else: 

439 docstring = leaf.value 

440 prefix = get_string_prefix(docstring) 

441 docstring = docstring[len(prefix) :] # Remove the prefix 

442 quote_char = docstring[0] 

443 # A natural way to remove the outer quotes is to do: 

444 # docstring = docstring.strip(quote_char) 

445 # but that breaks on """""x""" (which is '""x'). 

446 # So we actually need to remove the first character and the next two 

447 # characters but only if they are the same as the first. 

448 quote_len = 1 if docstring[1] != quote_char else 3 

449 docstring = docstring[quote_len:-quote_len] 

450 docstring_started_empty = not docstring 

451 indent = " " * 4 * self.current_line.depth 

452 

453 if is_multiline_string(leaf): 

454 docstring = fix_multiline_docstring(docstring, indent) 

455 else: 

456 docstring = docstring.strip() 

457 

458 has_trailing_backslash = False 

459 if docstring: 

460 # Add some padding if the docstring starts / ends with a quote mark. 

461 if docstring[0] == quote_char: 

462 docstring = " " + docstring 

463 if docstring[-1] == quote_char: 

464 docstring += " " 

465 if docstring[-1] == "\\": 

466 backslash_count = len(docstring) - len(docstring.rstrip("\\")) 

467 if backslash_count % 2: 

468 # Odd number of tailing backslashes, add some padding to 

469 # avoid escaping the closing string quote. 

470 docstring += " " 

471 has_trailing_backslash = True 

472 elif not docstring_started_empty: 

473 docstring = " " 

474 

475 # We could enforce triple quotes at this point. 

476 quote = quote_char * quote_len 

477 

478 # It's invalid to put closing single-character quotes on a new line. 

479 if quote_len == 3: 

480 # We need to find the length of the last line of the docstring 

481 # to find if we can add the closing quotes to the line without 

482 # exceeding the maximum line length. 

483 # If docstring is one line, we don't put the closing quotes on a 

484 # separate line because it looks ugly (#3320). 

485 lines = docstring.splitlines() 

486 last_line_length = len(lines[-1]) if docstring else 0 

487 

488 # If adding closing quotes would cause the last line to exceed 

489 # the maximum line length, and the closing quote is not 

490 # prefixed by a newline then put a line break before 

491 # the closing quotes 

492 if ( 

493 len(lines) > 1 

494 and last_line_length + quote_len > self.mode.line_length 

495 and len(indent) + quote_len <= self.mode.line_length 

496 and not has_trailing_backslash 

497 ): 

498 if leaf.value[-1 - quote_len] == "\n": 

499 leaf.value = prefix + quote + docstring + quote 

500 else: 

501 leaf.value = prefix + quote + docstring + "\n" + indent + quote 

502 else: 

503 leaf.value = prefix + quote + docstring + quote 

504 else: 

505 leaf.value = prefix + quote + docstring + quote 

506 

507 if self.mode.string_normalization and leaf.type == token.STRING: 

508 leaf.value = normalize_string_prefix(leaf.value) 

509 leaf.value = normalize_string_quotes(leaf.value) 

510 yield from self.visit_default(leaf) 

511 

512 def visit_NUMBER(self, leaf: Leaf) -> Iterator[Line]: 

513 normalize_numeric_literal(leaf) 

514 yield from self.visit_default(leaf) 

515 

516 def visit_atom(self, node: Node) -> Iterator[Line]: 

517 """Visit any atom""" 

518 if len(node.children) == 3: 

519 first = node.children[0] 

520 last = node.children[-1] 

521 if (first.type == token.LSQB and last.type == token.RSQB) or ( 

522 first.type == token.LBRACE and last.type == token.RBRACE 

523 ): 

524 # Lists or sets of one item 

525 maybe_make_parens_invisible_in_atom( 

526 node.children[1], 

527 parent=node, 

528 mode=self.mode, 

529 features=self.features, 

530 ) 

531 

532 yield from self.visit_default(node) 

533 

534 def visit_fstring(self, node: Node) -> Iterator[Line]: 

535 # currently we don't want to format and split f-strings at all. 

536 string_leaf = fstring_to_string(node) 

537 node.replace(string_leaf) 

538 if "\\" in string_leaf.value and any( 

539 "\\" in str(child) 

540 for child in node.children 

541 if child.type == syms.fstring_replacement_field 

542 ): 

543 # string normalization doesn't account for nested quotes, 

544 # causing breakages. skip normalization when nested quotes exist 

545 yield from self.visit_default(string_leaf) 

546 return 

547 yield from self.visit_STRING(string_leaf) 

548 

549 # TODO: Uncomment Implementation to format f-string children 

550 # fstring_start = node.children[0] 

551 # fstring_end = node.children[-1] 

552 # assert isinstance(fstring_start, Leaf) 

553 # assert isinstance(fstring_end, Leaf) 

554 

555 # quote_char = fstring_end.value[0] 

556 # quote_idx = fstring_start.value.index(quote_char) 

557 # prefix, quote = ( 

558 # fstring_start.value[:quote_idx], 

559 # fstring_start.value[quote_idx:] 

560 # ) 

561 

562 # if not is_docstring(node, self.mode): 

563 # prefix = normalize_string_prefix(prefix) 

564 

565 # assert quote == fstring_end.value 

566 

567 # is_raw_fstring = "r" in prefix or "R" in prefix 

568 # middles = [ 

569 # leaf 

570 # for leaf in node.leaves() 

571 # if leaf.type == token.FSTRING_MIDDLE 

572 # ] 

573 

574 # if self.mode.string_normalization: 

575 # middles, quote = normalize_fstring_quotes(quote, middles, is_raw_fstring) 

576 

577 # fstring_start.value = prefix + quote 

578 # fstring_end.value = quote 

579 

580 # yield from self.visit_default(node) 

581 

582 def visit_comp_for(self, node: Node) -> Iterator[Line]: 

583 if Preview.wrap_comprehension_in in self.mode: 

584 normalize_invisible_parens( 

585 node, parens_after={"in"}, mode=self.mode, features=self.features 

586 ) 

587 yield from self.visit_default(node) 

588 

589 def visit_old_comp_for(self, node: Node) -> Iterator[Line]: 

590 yield from self.visit_comp_for(node) 

591 

592 def __post_init__(self) -> None: 

593 """You are in a twisty little maze of passages.""" 

594 self.current_line = Line(mode=self.mode) 

595 

596 v = self.visit_stmt 

597 Ø: set[str] = set() 

598 self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","}) 

599 self.visit_if_stmt = partial( 

600 v, keywords={"if", "else", "elif"}, parens={"if", "elif"} 

601 ) 

602 self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"}) 

603 self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"}) 

604 self.visit_try_stmt = partial( 

605 v, keywords={"try", "except", "else", "finally"}, parens=Ø 

606 ) 

607 self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"}) 

608 self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"}) 

609 self.visit_classdef = partial(v, keywords={"class"}, parens=Ø) 

610 

611 self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS) 

612 self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"}) 

613 self.visit_import_from = partial(v, keywords=Ø, parens={"import"}) 

614 self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"}) 

615 self.visit_async_funcdef = self.visit_async_stmt 

616 self.visit_decorated = self.visit_decorators 

617 

618 # PEP 634 

619 self.visit_match_stmt = self.visit_match_case 

620 self.visit_case_block = self.visit_match_case 

621 self.visit_guard = partial(v, keywords=Ø, parens={"if"}) 

622 

623 

624def _hugging_power_ops_line_to_string( 

625 line: Line, 

626 features: Collection[Feature], 

627 mode: Mode, 

628) -> Optional[str]: 

629 try: 

630 return line_to_string(next(hug_power_op(line, features, mode))) 

631 except CannotTransform: 

632 return None 

633 

634 

635def transform_line( 

636 line: Line, mode: Mode, features: Collection[Feature] = () 

637) -> Iterator[Line]: 

638 """Transform a `line`, potentially splitting it into many lines. 

639 

640 They should fit in the allotted `line_length` but might not be able to. 

641 

642 `features` are syntactical features that may be used in the output. 

643 """ 

644 if line.is_comment: 

645 yield line 

646 return 

647 

648 line_str = line_to_string(line) 

649 

650 # We need the line string when power operators are hugging to determine if we should 

651 # split the line. Default to line_str, if no power operator are present on the line. 

652 line_str_hugging_power_ops = ( 

653 _hugging_power_ops_line_to_string(line, features, mode) or line_str 

654 ) 

655 

656 ll = mode.line_length 

657 sn = mode.string_normalization 

658 string_merge = StringMerger(ll, sn) 

659 string_paren_strip = StringParenStripper(ll, sn) 

660 string_split = StringSplitter(ll, sn) 

661 string_paren_wrap = StringParenWrapper(ll, sn) 

662 

663 transformers: list[Transformer] 

664 if ( 

665 not line.contains_uncollapsable_type_comments() 

666 and not line.should_split_rhs 

667 and not line.magic_trailing_comma 

668 and ( 

669 is_line_short_enough(line, mode=mode, line_str=line_str_hugging_power_ops) 

670 or line.contains_unsplittable_type_ignore() 

671 ) 

672 and not (line.inside_brackets and line.contains_standalone_comments()) 

673 and not line.contains_implicit_multiline_string_with_comments() 

674 ): 

675 # Only apply basic string preprocessing, since lines shouldn't be split here. 

676 if Preview.string_processing in mode: 

677 transformers = [string_merge, string_paren_strip] 

678 else: 

679 transformers = [] 

680 elif line.is_def and not should_split_funcdef_with_rhs(line, mode): 

681 transformers = [left_hand_split] 

682 else: 

683 

684 def _rhs( 

685 self: object, line: Line, features: Collection[Feature], mode: Mode 

686 ) -> Iterator[Line]: 

687 """Wraps calls to `right_hand_split`. 

688 

689 The calls increasingly `omit` right-hand trailers (bracket pairs with 

690 content), meaning the trailers get glued together to split on another 

691 bracket pair instead. 

692 """ 

693 for omit in generate_trailers_to_omit(line, mode.line_length): 

694 lines = list(right_hand_split(line, mode, features, omit=omit)) 

695 # Note: this check is only able to figure out if the first line of the 

696 # *current* transformation fits in the line length. This is true only 

697 # for simple cases. All others require running more transforms via 

698 # `transform_line()`. This check doesn't know if those would succeed. 

699 if is_line_short_enough(lines[0], mode=mode): 

700 yield from lines 

701 return 

702 

703 # All splits failed, best effort split with no omits. 

704 # This mostly happens to multiline strings that are by definition 

705 # reported as not fitting a single line, as well as lines that contain 

706 # trailing commas (those have to be exploded). 

707 yield from right_hand_split(line, mode, features=features) 

708 

709 # HACK: nested functions (like _rhs) compiled by mypyc don't retain their 

710 # __name__ attribute which is needed in `run_transformer` further down. 

711 # Unfortunately a nested class breaks mypyc too. So a class must be created 

712 # via type ... https://github.com/mypyc/mypyc/issues/884 

713 rhs = type("rhs", (), {"__call__": _rhs})() 

714 

715 if Preview.string_processing in mode: 

716 if line.inside_brackets: 

717 transformers = [ 

718 string_merge, 

719 string_paren_strip, 

720 string_split, 

721 delimiter_split, 

722 standalone_comment_split, 

723 string_paren_wrap, 

724 rhs, 

725 ] 

726 else: 

727 transformers = [ 

728 string_merge, 

729 string_paren_strip, 

730 string_split, 

731 string_paren_wrap, 

732 rhs, 

733 ] 

734 else: 

735 if line.inside_brackets: 

736 transformers = [delimiter_split, standalone_comment_split, rhs] 

737 else: 

738 transformers = [rhs] 

739 # It's always safe to attempt hugging of power operations and pretty much every line 

740 # could match. 

741 transformers.append(hug_power_op) 

742 

743 for transform in transformers: 

744 # We are accumulating lines in `result` because we might want to abort 

745 # mission and return the original line in the end, or attempt a different 

746 # split altogether. 

747 try: 

748 result = run_transformer(line, transform, mode, features, line_str=line_str) 

749 except CannotTransform: 

750 continue 

751 else: 

752 yield from result 

753 break 

754 

755 else: 

756 yield line 

757 

758 

759def should_split_funcdef_with_rhs(line: Line, mode: Mode) -> bool: 

760 """If a funcdef has a magic trailing comma in the return type, then we should first 

761 split the line with rhs to respect the comma. 

762 """ 

763 return_type_leaves: list[Leaf] = [] 

764 in_return_type = False 

765 

766 for leaf in line.leaves: 

767 if leaf.type == token.COLON: 

768 in_return_type = False 

769 if in_return_type: 

770 return_type_leaves.append(leaf) 

771 if leaf.type == token.RARROW: 

772 in_return_type = True 

773 

774 # using `bracket_split_build_line` will mess with whitespace, so we duplicate a 

775 # couple lines from it. 

776 result = Line(mode=line.mode, depth=line.depth) 

777 leaves_to_track = get_leaves_inside_matching_brackets(return_type_leaves) 

778 for leaf in return_type_leaves: 

779 result.append( 

780 leaf, 

781 preformatted=True, 

782 track_bracket=id(leaf) in leaves_to_track, 

783 ) 

784 

785 # we could also return true if the line is too long, and the return type is longer 

786 # than the param list. Or if `should_split_rhs` returns True. 

787 return result.magic_trailing_comma is not None 

788 

789 

790class _BracketSplitComponent(Enum): 

791 head = auto() 

792 body = auto() 

793 tail = auto() 

794 

795 

796def left_hand_split( 

797 line: Line, _features: Collection[Feature], mode: Mode 

798) -> Iterator[Line]: 

799 """Split line into many lines, starting with the first matching bracket pair. 

800 

801 Note: this usually looks weird, only use this for function definitions. 

802 Prefer RHS otherwise. This is why this function is not symmetrical with 

803 :func:`right_hand_split` which also handles optional parentheses. 

804 """ 

805 for leaf_type in [token.LPAR, token.LSQB]: 

806 tail_leaves: list[Leaf] = [] 

807 body_leaves: list[Leaf] = [] 

808 head_leaves: list[Leaf] = [] 

809 current_leaves = head_leaves 

810 matching_bracket: Optional[Leaf] = None 

811 depth = 0 

812 for index, leaf in enumerate(line.leaves): 

813 if index == 2 and leaf.type == token.LSQB: 

814 # A [ at index 2 means this is a type param, so start 

815 # tracking the depth 

816 depth += 1 

817 elif depth > 0: 

818 if leaf.type == token.LSQB: 

819 depth += 1 

820 elif leaf.type == token.RSQB: 

821 depth -= 1 

822 if ( 

823 current_leaves is body_leaves 

824 and leaf.type in CLOSING_BRACKETS 

825 and leaf.opening_bracket is matching_bracket 

826 and isinstance(matching_bracket, Leaf) 

827 # If the code is still on LPAR and we are inside a type 

828 # param, ignore the match since this is searching 

829 # for the function arguments 

830 and not (leaf_type == token.LPAR and depth > 0) 

831 ): 

832 ensure_visible(leaf) 

833 ensure_visible(matching_bracket) 

834 current_leaves = tail_leaves if body_leaves else head_leaves 

835 current_leaves.append(leaf) 

836 if current_leaves is head_leaves: 

837 if leaf.type == leaf_type: 

838 matching_bracket = leaf 

839 current_leaves = body_leaves 

840 if matching_bracket and tail_leaves: 

841 break 

842 if not matching_bracket or not tail_leaves: 

843 raise CannotSplit("No brackets found") 

844 

845 head = bracket_split_build_line( 

846 head_leaves, line, matching_bracket, component=_BracketSplitComponent.head 

847 ) 

848 body = bracket_split_build_line( 

849 body_leaves, line, matching_bracket, component=_BracketSplitComponent.body 

850 ) 

851 tail = bracket_split_build_line( 

852 tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail 

853 ) 

854 bracket_split_succeeded_or_raise(head, body, tail) 

855 for result in (head, body, tail): 

856 if result: 

857 yield result 

858 

859 

860def right_hand_split( 

861 line: Line, 

862 mode: Mode, 

863 features: Collection[Feature] = (), 

864 omit: Collection[LeafID] = (), 

865) -> Iterator[Line]: 

866 """Split line into many lines, starting with the last matching bracket pair. 

867 

868 If the split was by optional parentheses, attempt splitting without them, too. 

869 `omit` is a collection of closing bracket IDs that shouldn't be considered for 

870 this split. 

871 

872 Note: running this function modifies `bracket_depth` on the leaves of `line`. 

873 """ 

874 rhs_result = _first_right_hand_split(line, omit=omit) 

875 yield from _maybe_split_omitting_optional_parens( 

876 rhs_result, line, mode, features=features, omit=omit 

877 ) 

878 

879 

880def _first_right_hand_split( 

881 line: Line, 

882 omit: Collection[LeafID] = (), 

883) -> RHSResult: 

884 """Split the line into head, body, tail starting with the last bracket pair. 

885 

886 Note: this function should not have side effects. It's relied upon by 

887 _maybe_split_omitting_optional_parens to get an opinion whether to prefer 

888 splitting on the right side of an assignment statement. 

889 """ 

890 tail_leaves: list[Leaf] = [] 

891 body_leaves: list[Leaf] = [] 

892 head_leaves: list[Leaf] = [] 

893 current_leaves = tail_leaves 

894 opening_bracket: Optional[Leaf] = None 

895 closing_bracket: Optional[Leaf] = None 

896 for leaf in reversed(line.leaves): 

897 if current_leaves is body_leaves: 

898 if leaf is opening_bracket: 

899 current_leaves = head_leaves if body_leaves else tail_leaves 

900 current_leaves.append(leaf) 

901 if current_leaves is tail_leaves: 

902 if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit: 

903 opening_bracket = leaf.opening_bracket 

904 closing_bracket = leaf 

905 current_leaves = body_leaves 

906 if not (opening_bracket and closing_bracket and head_leaves): 

907 # If there is no opening or closing_bracket that means the split failed and 

908 # all content is in the tail. Otherwise, if `head_leaves` are empty, it means 

909 # the matching `opening_bracket` wasn't available on `line` anymore. 

910 raise CannotSplit("No brackets found") 

911 

912 tail_leaves.reverse() 

913 body_leaves.reverse() 

914 head_leaves.reverse() 

915 

916 body: Optional[Line] = None 

917 if ( 

918 Preview.hug_parens_with_braces_and_square_brackets in line.mode 

919 and tail_leaves[0].value 

920 and tail_leaves[0].opening_bracket is head_leaves[-1] 

921 ): 

922 inner_body_leaves = list(body_leaves) 

923 hugged_opening_leaves: list[Leaf] = [] 

924 hugged_closing_leaves: list[Leaf] = [] 

925 is_unpacking = body_leaves[0].type in [token.STAR, token.DOUBLESTAR] 

926 unpacking_offset: int = 1 if is_unpacking else 0 

927 while ( 

928 len(inner_body_leaves) >= 2 + unpacking_offset 

929 and inner_body_leaves[-1].type in CLOSING_BRACKETS 

930 and inner_body_leaves[-1].opening_bracket 

931 is inner_body_leaves[unpacking_offset] 

932 ): 

933 if unpacking_offset: 

934 hugged_opening_leaves.append(inner_body_leaves.pop(0)) 

935 unpacking_offset = 0 

936 hugged_opening_leaves.append(inner_body_leaves.pop(0)) 

937 hugged_closing_leaves.insert(0, inner_body_leaves.pop()) 

938 

939 if hugged_opening_leaves and inner_body_leaves: 

940 inner_body = bracket_split_build_line( 

941 inner_body_leaves, 

942 line, 

943 hugged_opening_leaves[-1], 

944 component=_BracketSplitComponent.body, 

945 ) 

946 if ( 

947 line.mode.magic_trailing_comma 

948 and inner_body_leaves[-1].type == token.COMMA 

949 ): 

950 should_hug = True 

951 else: 

952 line_length = line.mode.line_length - sum( 

953 len(str(leaf)) 

954 for leaf in hugged_opening_leaves + hugged_closing_leaves 

955 ) 

956 if is_line_short_enough( 

957 inner_body, mode=replace(line.mode, line_length=line_length) 

958 ): 

959 # Do not hug if it fits on a single line. 

960 should_hug = False 

961 else: 

962 should_hug = True 

963 if should_hug: 

964 body_leaves = inner_body_leaves 

965 head_leaves.extend(hugged_opening_leaves) 

966 tail_leaves = hugged_closing_leaves + tail_leaves 

967 body = inner_body # No need to re-calculate the body again later. 

968 

969 head = bracket_split_build_line( 

970 head_leaves, line, opening_bracket, component=_BracketSplitComponent.head 

971 ) 

972 if body is None: 

973 body = bracket_split_build_line( 

974 body_leaves, line, opening_bracket, component=_BracketSplitComponent.body 

975 ) 

976 tail = bracket_split_build_line( 

977 tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail 

978 ) 

979 bracket_split_succeeded_or_raise(head, body, tail) 

980 return RHSResult(head, body, tail, opening_bracket, closing_bracket) 

981 

982 

983def _maybe_split_omitting_optional_parens( 

984 rhs: RHSResult, 

985 line: Line, 

986 mode: Mode, 

987 features: Collection[Feature] = (), 

988 omit: Collection[LeafID] = (), 

989) -> Iterator[Line]: 

990 if ( 

991 Feature.FORCE_OPTIONAL_PARENTHESES not in features 

992 # the opening bracket is an optional paren 

993 and rhs.opening_bracket.type == token.LPAR 

994 and not rhs.opening_bracket.value 

995 # the closing bracket is an optional paren 

996 and rhs.closing_bracket.type == token.RPAR 

997 and not rhs.closing_bracket.value 

998 # it's not an import (optional parens are the only thing we can split on 

999 # in this case; attempting a split without them is a waste of time) 

1000 and not line.is_import 

1001 # and we can actually remove the parens 

1002 and can_omit_invisible_parens(rhs, mode.line_length) 

1003 ): 

1004 omit = {id(rhs.closing_bracket), *omit} 

1005 try: 

1006 # The RHSResult Omitting Optional Parens. 

1007 rhs_oop = _first_right_hand_split(line, omit=omit) 

1008 if _prefer_split_rhs_oop_over_rhs(rhs_oop, rhs, mode): 

1009 yield from _maybe_split_omitting_optional_parens( 

1010 rhs_oop, line, mode, features=features, omit=omit 

1011 ) 

1012 return 

1013 

1014 except CannotSplit as e: 

1015 # For chained assignments we want to use the previous successful split 

1016 if line.is_chained_assignment: 

1017 pass 

1018 

1019 elif ( 

1020 not can_be_split(rhs.body) 

1021 and not is_line_short_enough(rhs.body, mode=mode) 

1022 and not ( 

1023 Preview.wrap_long_dict_values_in_parens 

1024 and rhs.opening_bracket.parent 

1025 and rhs.opening_bracket.parent.parent 

1026 and rhs.opening_bracket.parent.parent.type == syms.dictsetmaker 

1027 ) 

1028 ): 

1029 raise CannotSplit( 

1030 "Splitting failed, body is still too long and can't be split." 

1031 ) from e 

1032 

1033 elif ( 

1034 rhs.head.contains_multiline_strings() 

1035 or rhs.tail.contains_multiline_strings() 

1036 ): 

1037 raise CannotSplit( 

1038 "The current optional pair of parentheses is bound to fail to" 

1039 " satisfy the splitting algorithm because the head or the tail" 

1040 " contains multiline strings which by definition never fit one" 

1041 " line." 

1042 ) from e 

1043 

1044 ensure_visible(rhs.opening_bracket) 

1045 ensure_visible(rhs.closing_bracket) 

1046 for result in (rhs.head, rhs.body, rhs.tail): 

1047 if result: 

1048 yield result 

1049 

1050 

1051def _prefer_split_rhs_oop_over_rhs( 

1052 rhs_oop: RHSResult, rhs: RHSResult, mode: Mode 

1053) -> bool: 

1054 """ 

1055 Returns whether we should prefer the result from a split omitting optional parens 

1056 (rhs_oop) over the original (rhs). 

1057 """ 

1058 # contains unsplittable type ignore 

1059 if ( 

1060 rhs_oop.head.contains_unsplittable_type_ignore() 

1061 or rhs_oop.body.contains_unsplittable_type_ignore() 

1062 or rhs_oop.tail.contains_unsplittable_type_ignore() 

1063 ): 

1064 return True 

1065 

1066 # Retain optional parens around dictionary values 

1067 if ( 

1068 Preview.wrap_long_dict_values_in_parens 

1069 and rhs.opening_bracket.parent 

1070 and rhs.opening_bracket.parent.parent 

1071 and rhs.opening_bracket.parent.parent.type == syms.dictsetmaker 

1072 and rhs.body.bracket_tracker.delimiters 

1073 ): 

1074 # Unless the split is inside the key 

1075 return any(leaf.type == token.COLON for leaf in rhs_oop.tail.leaves) 

1076 

1077 # the split is right after `=` 

1078 if not (len(rhs.head.leaves) >= 2 and rhs.head.leaves[-2].type == token.EQUAL): 

1079 return True 

1080 

1081 # the left side of assignment contains brackets 

1082 if not any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1]): 

1083 return True 

1084 

1085 # the left side of assignment is short enough (the -1 is for the ending optional 

1086 # paren) 

1087 if not is_line_short_enough( 

1088 rhs.head, mode=replace(mode, line_length=mode.line_length - 1) 

1089 ): 

1090 return True 

1091 

1092 # the left side of assignment won't explode further because of magic trailing comma 

1093 if rhs.head.magic_trailing_comma is not None: 

1094 return True 

1095 

1096 # If we have multiple targets, we prefer more `=`s on the head vs pushing them to 

1097 # the body 

1098 rhs_head_equal_count = [leaf.type for leaf in rhs.head.leaves].count(token.EQUAL) 

1099 rhs_oop_head_equal_count = [leaf.type for leaf in rhs_oop.head.leaves].count( 

1100 token.EQUAL 

1101 ) 

1102 if rhs_head_equal_count > 1 and rhs_head_equal_count > rhs_oop_head_equal_count: 

1103 return False 

1104 

1105 has_closing_bracket_after_assign = False 

1106 for leaf in reversed(rhs_oop.head.leaves): 

1107 if leaf.type == token.EQUAL: 

1108 break 

1109 if leaf.type in CLOSING_BRACKETS: 

1110 has_closing_bracket_after_assign = True 

1111 break 

1112 return ( 

1113 # contains matching brackets after the `=` (done by checking there is a 

1114 # closing bracket) 

1115 has_closing_bracket_after_assign 

1116 or ( 

1117 # the split is actually from inside the optional parens (done by checking 

1118 # the first line still contains the `=`) 

1119 any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves) 

1120 # the first line is short enough 

1121 and is_line_short_enough(rhs_oop.head, mode=mode) 

1122 ) 

1123 ) 

1124 

1125 

1126def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None: 

1127 """Raise :exc:`CannotSplit` if the last left- or right-hand split failed. 

1128 

1129 Do nothing otherwise. 

1130 

1131 A left- or right-hand split is based on a pair of brackets. Content before 

1132 (and including) the opening bracket is left on one line, content inside the 

1133 brackets is put on a separate line, and finally content starting with and 

1134 following the closing bracket is put on a separate line. 

1135 

1136 Those are called `head`, `body`, and `tail`, respectively. If the split 

1137 produced the same line (all content in `head`) or ended up with an empty `body` 

1138 and the `tail` is just the closing bracket, then it's considered failed. 

1139 """ 

1140 tail_len = len(str(tail).strip()) 

1141 if not body: 

1142 if tail_len == 0: 

1143 raise CannotSplit("Splitting brackets produced the same line") 

1144 

1145 elif tail_len < 3: 

1146 raise CannotSplit( 

1147 f"Splitting brackets on an empty body to save {tail_len} characters is" 

1148 " not worth it" 

1149 ) 

1150 

1151 

1152def _ensure_trailing_comma( 

1153 leaves: list[Leaf], original: Line, opening_bracket: Leaf 

1154) -> bool: 

1155 if not leaves: 

1156 return False 

1157 # Ensure a trailing comma for imports 

1158 if original.is_import: 

1159 return True 

1160 # ...and standalone function arguments 

1161 if not original.is_def: 

1162 return False 

1163 if opening_bracket.value != "(": 

1164 return False 

1165 # Don't add commas if we already have any commas 

1166 if any( 

1167 leaf.type == token.COMMA and not is_part_of_annotation(leaf) for leaf in leaves 

1168 ): 

1169 return False 

1170 

1171 # Find a leaf with a parent (comments don't have parents) 

1172 leaf_with_parent = next((leaf for leaf in leaves if leaf.parent), None) 

1173 if leaf_with_parent is None: 

1174 return True 

1175 # Don't add commas inside parenthesized return annotations 

1176 if get_annotation_type(leaf_with_parent) == "return": 

1177 return False 

1178 # Don't add commas inside PEP 604 unions 

1179 if ( 

1180 leaf_with_parent.parent 

1181 and leaf_with_parent.parent.next_sibling 

1182 and leaf_with_parent.parent.next_sibling.type == token.VBAR 

1183 ): 

1184 return False 

1185 return True 

1186 

1187 

1188def bracket_split_build_line( 

1189 leaves: list[Leaf], 

1190 original: Line, 

1191 opening_bracket: Leaf, 

1192 *, 

1193 component: _BracketSplitComponent, 

1194) -> Line: 

1195 """Return a new line with given `leaves` and respective comments from `original`. 

1196 

1197 If it's the head component, brackets will be tracked so trailing commas are 

1198 respected. 

1199 

1200 If it's the body component, the result line is one-indented inside brackets and as 

1201 such has its first leaf's prefix normalized and a trailing comma added when 

1202 expected. 

1203 """ 

1204 result = Line(mode=original.mode, depth=original.depth) 

1205 if component is _BracketSplitComponent.body: 

1206 result.inside_brackets = True 

1207 result.depth += 1 

1208 if _ensure_trailing_comma(leaves, original, opening_bracket): 

1209 for i in range(len(leaves) - 1, -1, -1): 

1210 if leaves[i].type == STANDALONE_COMMENT: 

1211 continue 

1212 

1213 if leaves[i].type != token.COMMA: 

1214 new_comma = Leaf(token.COMMA, ",") 

1215 leaves.insert(i + 1, new_comma) 

1216 break 

1217 

1218 leaves_to_track: set[LeafID] = set() 

1219 if component is _BracketSplitComponent.head: 

1220 leaves_to_track = get_leaves_inside_matching_brackets(leaves) 

1221 # Populate the line 

1222 for leaf in leaves: 

1223 result.append( 

1224 leaf, 

1225 preformatted=True, 

1226 track_bracket=id(leaf) in leaves_to_track, 

1227 ) 

1228 for comment_after in original.comments_after(leaf): 

1229 result.append(comment_after, preformatted=True) 

1230 if component is _BracketSplitComponent.body and should_split_line( 

1231 result, opening_bracket 

1232 ): 

1233 result.should_split_rhs = True 

1234 return result 

1235 

1236 

1237def dont_increase_indentation(split_func: Transformer) -> Transformer: 

1238 """Normalize prefix of the first leaf in every line returned by `split_func`. 

1239 

1240 This is a decorator over relevant split functions. 

1241 """ 

1242 

1243 @wraps(split_func) 

1244 def split_wrapper( 

1245 line: Line, features: Collection[Feature], mode: Mode 

1246 ) -> Iterator[Line]: 

1247 for split_line in split_func(line, features, mode): 

1248 split_line.leaves[0].prefix = "" 

1249 yield split_line 

1250 

1251 return split_wrapper 

1252 

1253 

1254def _get_last_non_comment_leaf(line: Line) -> Optional[int]: 

1255 for leaf_idx in range(len(line.leaves) - 1, 0, -1): 

1256 if line.leaves[leaf_idx].type != STANDALONE_COMMENT: 

1257 return leaf_idx 

1258 return None 

1259 

1260 

1261def _can_add_trailing_comma(leaf: Leaf, features: Collection[Feature]) -> bool: 

1262 if is_vararg(leaf, within={syms.typedargslist}): 

1263 return Feature.TRAILING_COMMA_IN_DEF in features 

1264 if is_vararg(leaf, within={syms.arglist, syms.argument}): 

1265 return Feature.TRAILING_COMMA_IN_CALL in features 

1266 return True 

1267 

1268 

1269def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line: 

1270 if ( 

1271 safe 

1272 and delimiter_priority == COMMA_PRIORITY 

1273 and line.leaves[-1].type != token.COMMA 

1274 and line.leaves[-1].type != STANDALONE_COMMENT 

1275 ): 

1276 new_comma = Leaf(token.COMMA, ",") 

1277 line.append(new_comma) 

1278 return line 

1279 

1280 

1281MIGRATE_COMMENT_DELIMITERS = {STRING_PRIORITY, COMMA_PRIORITY} 

1282 

1283 

1284@dont_increase_indentation 

1285def delimiter_split( 

1286 line: Line, features: Collection[Feature], mode: Mode 

1287) -> Iterator[Line]: 

1288 """Split according to delimiters of the highest priority. 

1289 

1290 If the appropriate Features are given, the split will add trailing commas 

1291 also in function signatures and calls that contain `*` and `**`. 

1292 """ 

1293 if len(line.leaves) == 0: 

1294 raise CannotSplit("Line empty") from None 

1295 last_leaf = line.leaves[-1] 

1296 

1297 bt = line.bracket_tracker 

1298 try: 

1299 delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)}) 

1300 except ValueError: 

1301 raise CannotSplit("No delimiters found") from None 

1302 

1303 if ( 

1304 delimiter_priority == DOT_PRIORITY 

1305 and bt.delimiter_count_with_priority(delimiter_priority) == 1 

1306 ): 

1307 raise CannotSplit("Splitting a single attribute from its owner looks wrong") 

1308 

1309 current_line = Line( 

1310 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1311 ) 

1312 lowest_depth = sys.maxsize 

1313 trailing_comma_safe = True 

1314 

1315 def append_to_line(leaf: Leaf) -> Iterator[Line]: 

1316 """Append `leaf` to current line or to new line if appending impossible.""" 

1317 nonlocal current_line 

1318 try: 

1319 current_line.append_safe(leaf, preformatted=True) 

1320 except ValueError: 

1321 yield current_line 

1322 

1323 current_line = Line( 

1324 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1325 ) 

1326 current_line.append(leaf) 

1327 

1328 def append_comments(leaf: Leaf) -> Iterator[Line]: 

1329 for comment_after in line.comments_after(leaf): 

1330 yield from append_to_line(comment_after) 

1331 

1332 last_non_comment_leaf = _get_last_non_comment_leaf(line) 

1333 for leaf_idx, leaf in enumerate(line.leaves): 

1334 yield from append_to_line(leaf) 

1335 

1336 previous_priority = leaf_idx > 0 and bt.delimiters.get( 

1337 id(line.leaves[leaf_idx - 1]) 

1338 ) 

1339 if ( 

1340 previous_priority != delimiter_priority 

1341 or delimiter_priority in MIGRATE_COMMENT_DELIMITERS 

1342 ): 

1343 yield from append_comments(leaf) 

1344 

1345 lowest_depth = min(lowest_depth, leaf.bracket_depth) 

1346 if trailing_comma_safe and leaf.bracket_depth == lowest_depth: 

1347 trailing_comma_safe = _can_add_trailing_comma(leaf, features) 

1348 

1349 if last_leaf.type == STANDALONE_COMMENT and leaf_idx == last_non_comment_leaf: 

1350 current_line = _safe_add_trailing_comma( 

1351 trailing_comma_safe, delimiter_priority, current_line 

1352 ) 

1353 

1354 leaf_priority = bt.delimiters.get(id(leaf)) 

1355 if leaf_priority == delimiter_priority: 

1356 if ( 

1357 leaf_idx + 1 < len(line.leaves) 

1358 and delimiter_priority not in MIGRATE_COMMENT_DELIMITERS 

1359 ): 

1360 yield from append_comments(line.leaves[leaf_idx + 1]) 

1361 

1362 yield current_line 

1363 current_line = Line( 

1364 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1365 ) 

1366 

1367 if current_line: 

1368 current_line = _safe_add_trailing_comma( 

1369 trailing_comma_safe, delimiter_priority, current_line 

1370 ) 

1371 yield current_line 

1372 

1373 

1374@dont_increase_indentation 

1375def standalone_comment_split( 

1376 line: Line, features: Collection[Feature], mode: Mode 

1377) -> Iterator[Line]: 

1378 """Split standalone comments from the rest of the line.""" 

1379 if not line.contains_standalone_comments(): 

1380 raise CannotSplit("Line does not have any standalone comments") 

1381 

1382 current_line = Line( 

1383 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1384 ) 

1385 

1386 def append_to_line(leaf: Leaf) -> Iterator[Line]: 

1387 """Append `leaf` to current line or to new line if appending impossible.""" 

1388 nonlocal current_line 

1389 try: 

1390 current_line.append_safe(leaf, preformatted=True) 

1391 except ValueError: 

1392 yield current_line 

1393 

1394 current_line = Line( 

1395 line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1396 ) 

1397 current_line.append(leaf) 

1398 

1399 for leaf in line.leaves: 

1400 yield from append_to_line(leaf) 

1401 

1402 for comment_after in line.comments_after(leaf): 

1403 yield from append_to_line(comment_after) 

1404 

1405 if current_line: 

1406 yield current_line 

1407 

1408 

1409def normalize_invisible_parens( # noqa: C901 

1410 node: Node, parens_after: set[str], *, mode: Mode, features: Collection[Feature] 

1411) -> None: 

1412 """Make existing optional parentheses invisible or create new ones. 

1413 

1414 `parens_after` is a set of string leaf values immediately after which parens 

1415 should be put. 

1416 

1417 Standardizes on visible parentheses for single-element tuples, and keeps 

1418 existing visible parentheses for other tuples and generator expressions. 

1419 """ 

1420 for pc in list_comments(node.prefix, is_endmarker=False): 

1421 if pc.value in FMT_OFF: 

1422 # This `node` has a prefix with `# fmt: off`, don't mess with parens. 

1423 return 

1424 

1425 # The multiple context managers grammar has a different pattern, thus this is 

1426 # separate from the for-loop below. This possibly wraps them in invisible parens, 

1427 # and later will be removed in remove_with_parens when needed. 

1428 if node.type == syms.with_stmt: 

1429 _maybe_wrap_cms_in_parens(node, mode, features) 

1430 

1431 check_lpar = False 

1432 for index, child in enumerate(list(node.children)): 

1433 # Fixes a bug where invisible parens are not properly stripped from 

1434 # assignment statements that contain type annotations. 

1435 if isinstance(child, Node) and child.type == syms.annassign: 

1436 normalize_invisible_parens( 

1437 child, parens_after=parens_after, mode=mode, features=features 

1438 ) 

1439 

1440 # Fixes a bug where invisible parens are not properly wrapped around 

1441 # case blocks. 

1442 if isinstance(child, Node) and child.type == syms.case_block: 

1443 normalize_invisible_parens( 

1444 child, parens_after={"case"}, mode=mode, features=features 

1445 ) 

1446 

1447 # Add parentheses around if guards in case blocks 

1448 if isinstance(child, Node) and child.type == syms.guard: 

1449 normalize_invisible_parens( 

1450 child, parens_after={"if"}, mode=mode, features=features 

1451 ) 

1452 

1453 # Add parentheses around long tuple unpacking in assignments. 

1454 if ( 

1455 index == 0 

1456 and isinstance(child, Node) 

1457 and child.type == syms.testlist_star_expr 

1458 ): 

1459 check_lpar = True 

1460 

1461 if check_lpar: 

1462 if ( 

1463 child.type == syms.atom 

1464 and node.type == syms.for_stmt 

1465 and isinstance(child.prev_sibling, Leaf) 

1466 and child.prev_sibling.type == token.NAME 

1467 and child.prev_sibling.value == "for" 

1468 ): 

1469 if maybe_make_parens_invisible_in_atom( 

1470 child, 

1471 parent=node, 

1472 mode=mode, 

1473 features=features, 

1474 remove_brackets_around_comma=True, 

1475 ): 

1476 wrap_in_parentheses(node, child, visible=False) 

1477 elif isinstance(child, Node) and node.type == syms.with_stmt: 

1478 remove_with_parens(child, node, mode=mode, features=features) 

1479 elif child.type == syms.atom and not ( 

1480 "in" in parens_after 

1481 and len(child.children) == 3 

1482 and is_lpar_token(child.children[0]) 

1483 and is_rpar_token(child.children[-1]) 

1484 and child.children[1].type == syms.test 

1485 ): 

1486 if maybe_make_parens_invisible_in_atom( 

1487 child, parent=node, mode=mode, features=features 

1488 ): 

1489 wrap_in_parentheses(node, child, visible=False) 

1490 elif is_one_tuple(child): 

1491 wrap_in_parentheses(node, child, visible=True) 

1492 elif node.type == syms.import_from: 

1493 _normalize_import_from(node, child, index) 

1494 break 

1495 elif ( 

1496 index == 1 

1497 and child.type == token.STAR 

1498 and node.type == syms.except_clause 

1499 ): 

1500 # In except* (PEP 654), the star is actually part of 

1501 # of the keyword. So we need to skip the insertion of 

1502 # invisible parentheses to work more precisely. 

1503 continue 

1504 

1505 elif ( 

1506 isinstance(child, Leaf) 

1507 and child.next_sibling is not None 

1508 and child.next_sibling.type == token.COLON 

1509 and child.value == "case" 

1510 ): 

1511 # A special patch for "case case:" scenario, the second occurrence 

1512 # of case will be not parsed as a Python keyword. 

1513 break 

1514 

1515 elif not is_multiline_string(child): 

1516 wrap_in_parentheses(node, child, visible=False) 

1517 

1518 comma_check = child.type == token.COMMA 

1519 

1520 check_lpar = isinstance(child, Leaf) and ( 

1521 child.value in parens_after or comma_check 

1522 ) 

1523 

1524 

1525def _normalize_import_from(parent: Node, child: LN, index: int) -> None: 

1526 # "import from" nodes store parentheses directly as part of 

1527 # the statement 

1528 if is_lpar_token(child): 

1529 assert is_rpar_token(parent.children[-1]) 

1530 # make parentheses invisible 

1531 child.value = "" 

1532 parent.children[-1].value = "" 

1533 elif child.type != token.STAR: 

1534 # insert invisible parentheses 

1535 parent.insert_child(index, Leaf(token.LPAR, "")) 

1536 parent.append_child(Leaf(token.RPAR, "")) 

1537 

1538 

1539def remove_await_parens(node: Node, mode: Mode, features: Collection[Feature]) -> None: 

1540 if node.children[0].type == token.AWAIT and len(node.children) > 1: 

1541 if ( 

1542 node.children[1].type == syms.atom 

1543 and node.children[1].children[0].type == token.LPAR 

1544 ): 

1545 if maybe_make_parens_invisible_in_atom( 

1546 node.children[1], 

1547 parent=node, 

1548 mode=mode, 

1549 features=features, 

1550 remove_brackets_around_comma=True, 

1551 ): 

1552 wrap_in_parentheses(node, node.children[1], visible=False) 

1553 

1554 # Since await is an expression we shouldn't remove 

1555 # brackets in cases where this would change 

1556 # the AST due to operator precedence. 

1557 # Therefore we only aim to remove brackets around 

1558 # power nodes that aren't also await expressions themselves. 

1559 # https://peps.python.org/pep-0492/#updated-operator-precedence-table 

1560 # N.B. We've still removed any redundant nested brackets though :) 

1561 opening_bracket = cast(Leaf, node.children[1].children[0]) 

1562 closing_bracket = cast(Leaf, node.children[1].children[-1]) 

1563 bracket_contents = node.children[1].children[1] 

1564 if isinstance(bracket_contents, Node) and ( 

1565 bracket_contents.type != syms.power 

1566 or bracket_contents.children[0].type == token.AWAIT 

1567 or any( 

1568 isinstance(child, Leaf) and child.type == token.DOUBLESTAR 

1569 for child in bracket_contents.children 

1570 ) 

1571 ): 

1572 ensure_visible(opening_bracket) 

1573 ensure_visible(closing_bracket) 

1574 

1575 

1576def _maybe_wrap_cms_in_parens( 

1577 node: Node, mode: Mode, features: Collection[Feature] 

1578) -> None: 

1579 """When enabled and safe, wrap the multiple context managers in invisible parens. 

1580 

1581 It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS. 

1582 """ 

1583 if ( 

1584 Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features 

1585 or len(node.children) <= 2 

1586 # If it's an atom, it's already wrapped in parens. 

1587 or node.children[1].type == syms.atom 

1588 ): 

1589 return 

1590 colon_index: Optional[int] = None 

1591 for i in range(2, len(node.children)): 

1592 if node.children[i].type == token.COLON: 

1593 colon_index = i 

1594 break 

1595 if colon_index is not None: 

1596 lpar = Leaf(token.LPAR, "") 

1597 rpar = Leaf(token.RPAR, "") 

1598 context_managers = node.children[1:colon_index] 

1599 for child in context_managers: 

1600 child.remove() 

1601 # After wrapping, the with_stmt will look like this: 

1602 # with_stmt 

1603 # NAME 'with' 

1604 # atom 

1605 # LPAR '' 

1606 # testlist_gexp 

1607 # ... <-- context_managers 

1608 # /testlist_gexp 

1609 # RPAR '' 

1610 # /atom 

1611 # COLON ':' 

1612 new_child = Node( 

1613 syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar] 

1614 ) 

1615 node.insert_child(1, new_child) 

1616 

1617 

1618def remove_with_parens( 

1619 node: Node, parent: Node, mode: Mode, features: Collection[Feature] 

1620) -> None: 

1621 """Recursively hide optional parens in `with` statements.""" 

1622 # Removing all unnecessary parentheses in with statements in one pass is a tad 

1623 # complex as different variations of bracketed statements result in pretty 

1624 # different parse trees: 

1625 # 

1626 # with (open("file")) as f: # this is an asexpr_test 

1627 # ... 

1628 # 

1629 # with (open("file") as f): # this is an atom containing an 

1630 # ... # asexpr_test 

1631 # 

1632 # with (open("file")) as f, (open("file")) as f: # this is asexpr_test, COMMA, 

1633 # ... # asexpr_test 

1634 # 

1635 # with (open("file") as f, open("file") as f): # an atom containing a 

1636 # ... # testlist_gexp which then 

1637 # # contains multiple asexpr_test(s) 

1638 if node.type == syms.atom: 

1639 if maybe_make_parens_invisible_in_atom( 

1640 node, 

1641 parent=parent, 

1642 mode=mode, 

1643 features=features, 

1644 remove_brackets_around_comma=True, 

1645 ): 

1646 wrap_in_parentheses(parent, node, visible=False) 

1647 if isinstance(node.children[1], Node): 

1648 remove_with_parens(node.children[1], node, mode=mode, features=features) 

1649 elif node.type == syms.testlist_gexp: 

1650 for child in node.children: 

1651 if isinstance(child, Node): 

1652 remove_with_parens(child, node, mode=mode, features=features) 

1653 elif node.type == syms.asexpr_test and not any( 

1654 leaf.type == token.COLONEQUAL for leaf in node.leaves() 

1655 ): 

1656 if maybe_make_parens_invisible_in_atom( 

1657 node.children[0], 

1658 parent=node, 

1659 mode=mode, 

1660 features=features, 

1661 remove_brackets_around_comma=True, 

1662 ): 

1663 wrap_in_parentheses(node, node.children[0], visible=False) 

1664 

1665 

1666def maybe_make_parens_invisible_in_atom( 

1667 node: LN, 

1668 parent: LN, 

1669 mode: Mode, 

1670 features: Collection[Feature], 

1671 remove_brackets_around_comma: bool = False, 

1672) -> bool: 

1673 """If it's safe, make the parens in the atom `node` invisible, recursively. 

1674 Additionally, remove repeated, adjacent invisible parens from the atom `node` 

1675 as they are redundant. 

1676 

1677 Returns whether the node should itself be wrapped in invisible parentheses. 

1678 """ 

1679 if ( 

1680 node.type not in (syms.atom, syms.expr) 

1681 or is_empty_tuple(node) 

1682 or is_one_tuple(node) 

1683 or (is_tuple(node) and parent.type == syms.asexpr_test) 

1684 or ( 

1685 is_tuple(node) 

1686 and parent.type == syms.with_stmt 

1687 and has_sibling_with_type(node, token.COMMA) 

1688 ) 

1689 or (is_yield(node) and parent.type != syms.expr_stmt) 

1690 or ( 

1691 # This condition tries to prevent removing non-optional brackets 

1692 # around a tuple, however, can be a bit overzealous so we provide 

1693 # and option to skip this check for `for` and `with` statements. 

1694 not remove_brackets_around_comma 

1695 and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY 

1696 # Skip this check in Preview mode in order to 

1697 # Remove parentheses around multiple exception types in except and 

1698 # except* without as. See PEP 758 for details. 

1699 and not ( 

1700 Preview.remove_parens_around_except_types in mode 

1701 and Feature.UNPARENTHESIZED_EXCEPT_TYPES in features 

1702 # is a tuple 

1703 and is_tuple(node) 

1704 # has a parent node 

1705 and node.parent is not None 

1706 # parent is an except clause 

1707 and node.parent.type == syms.except_clause 

1708 # is not immediately followed by as clause 

1709 and not ( 

1710 node.next_sibling is not None 

1711 and is_name_token(node.next_sibling) 

1712 and node.next_sibling.value == "as" 

1713 ) 

1714 ) 

1715 ) 

1716 or is_tuple_containing_walrus(node) 

1717 or is_tuple_containing_star(node) 

1718 or is_generator(node) 

1719 ): 

1720 return False 

1721 

1722 if is_walrus_assignment(node): 

1723 if parent.type in [ 

1724 syms.annassign, 

1725 syms.expr_stmt, 

1726 syms.assert_stmt, 

1727 syms.return_stmt, 

1728 syms.except_clause, 

1729 syms.funcdef, 

1730 syms.with_stmt, 

1731 syms.testlist_gexp, 

1732 syms.tname, 

1733 # these ones aren't useful to end users, but they do please fuzzers 

1734 syms.for_stmt, 

1735 syms.del_stmt, 

1736 syms.for_stmt, 

1737 ]: 

1738 return False 

1739 

1740 first = node.children[0] 

1741 last = node.children[-1] 

1742 if is_lpar_token(first) and is_rpar_token(last): 

1743 middle = node.children[1] 

1744 # make parentheses invisible 

1745 if ( 

1746 # If the prefix of `middle` includes a type comment with 

1747 # ignore annotation, then we do not remove the parentheses 

1748 not is_type_ignore_comment_string(middle.prefix.strip()) 

1749 ): 

1750 first.value = "" 

1751 last.value = "" 

1752 maybe_make_parens_invisible_in_atom( 

1753 middle, 

1754 parent=parent, 

1755 mode=mode, 

1756 features=features, 

1757 remove_brackets_around_comma=remove_brackets_around_comma, 

1758 ) 

1759 

1760 if is_atom_with_invisible_parens(middle): 

1761 # Strip the invisible parens from `middle` by replacing 

1762 # it with the child in-between the invisible parens 

1763 middle.replace(middle.children[1]) 

1764 

1765 if middle.children[0].prefix.strip(): 

1766 # Preserve comments before first paren 

1767 middle.children[1].prefix = ( 

1768 middle.children[0].prefix + middle.children[1].prefix 

1769 ) 

1770 

1771 if middle.children[-1].prefix.strip(): 

1772 # Preserve comments before last paren 

1773 last.prefix = middle.children[-1].prefix + last.prefix 

1774 

1775 return False 

1776 

1777 return True 

1778 

1779 

1780def should_split_line(line: Line, opening_bracket: Leaf) -> bool: 

1781 """Should `line` be immediately split with `delimiter_split()` after RHS?""" 

1782 

1783 if not (opening_bracket.parent and opening_bracket.value in "[{("): 

1784 return False 

1785 

1786 # We're essentially checking if the body is delimited by commas and there's more 

1787 # than one of them (we're excluding the trailing comma and if the delimiter priority 

1788 # is still commas, that means there's more). 

1789 exclude = set() 

1790 trailing_comma = False 

1791 try: 

1792 last_leaf = line.leaves[-1] 

1793 if last_leaf.type == token.COMMA: 

1794 trailing_comma = True 

1795 exclude.add(id(last_leaf)) 

1796 max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude) 

1797 except (IndexError, ValueError): 

1798 return False 

1799 

1800 return max_priority == COMMA_PRIORITY and ( 

1801 (line.mode.magic_trailing_comma and trailing_comma) 

1802 # always explode imports 

1803 or opening_bracket.parent.type in {syms.atom, syms.import_from} 

1804 ) 

1805 

1806 

1807def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[set[LeafID]]: 

1808 """Generate sets of closing bracket IDs that should be omitted in a RHS. 

1809 

1810 Brackets can be omitted if the entire trailer up to and including 

1811 a preceding closing bracket fits in one line. 

1812 

1813 Yielded sets are cumulative (contain results of previous yields, too). First 

1814 set is empty, unless the line should explode, in which case bracket pairs until 

1815 the one that needs to explode are omitted. 

1816 """ 

1817 

1818 omit: set[LeafID] = set() 

1819 if not line.magic_trailing_comma: 

1820 yield omit 

1821 

1822 length = 4 * line.depth 

1823 opening_bracket: Optional[Leaf] = None 

1824 closing_bracket: Optional[Leaf] = None 

1825 inner_brackets: set[LeafID] = set() 

1826 for index, leaf, leaf_length in line.enumerate_with_length(is_reversed=True): 

1827 length += leaf_length 

1828 if length > line_length: 

1829 break 

1830 

1831 has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix) 

1832 if leaf.type == STANDALONE_COMMENT or has_inline_comment: 

1833 break 

1834 

1835 if opening_bracket: 

1836 if leaf is opening_bracket: 

1837 opening_bracket = None 

1838 elif leaf.type in CLOSING_BRACKETS: 

1839 prev = line.leaves[index - 1] if index > 0 else None 

1840 if ( 

1841 prev 

1842 and prev.type == token.COMMA 

1843 and leaf.opening_bracket is not None 

1844 and not is_one_sequence_between( 

1845 leaf.opening_bracket, leaf, line.leaves 

1846 ) 

1847 ): 

1848 # Never omit bracket pairs with trailing commas. 

1849 # We need to explode on those. 

1850 break 

1851 

1852 inner_brackets.add(id(leaf)) 

1853 elif leaf.type in CLOSING_BRACKETS: 

1854 prev = line.leaves[index - 1] if index > 0 else None 

1855 if prev and prev.type in OPENING_BRACKETS: 

1856 # Empty brackets would fail a split so treat them as "inner" 

1857 # brackets (e.g. only add them to the `omit` set if another 

1858 # pair of brackets was good enough. 

1859 inner_brackets.add(id(leaf)) 

1860 continue 

1861 

1862 if closing_bracket: 

1863 omit.add(id(closing_bracket)) 

1864 omit.update(inner_brackets) 

1865 inner_brackets.clear() 

1866 yield omit 

1867 

1868 if ( 

1869 prev 

1870 and prev.type == token.COMMA 

1871 and leaf.opening_bracket is not None 

1872 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves) 

1873 ): 

1874 # Never omit bracket pairs with trailing commas. 

1875 # We need to explode on those. 

1876 break 

1877 

1878 if leaf.value: 

1879 opening_bracket = leaf.opening_bracket 

1880 closing_bracket = leaf 

1881 

1882 

1883def run_transformer( 

1884 line: Line, 

1885 transform: Transformer, 

1886 mode: Mode, 

1887 features: Collection[Feature], 

1888 *, 

1889 line_str: str = "", 

1890) -> list[Line]: 

1891 if not line_str: 

1892 line_str = line_to_string(line) 

1893 result: list[Line] = [] 

1894 for transformed_line in transform(line, features, mode): 

1895 if str(transformed_line).strip("\n") == line_str: 

1896 raise CannotTransform("Line transformer returned an unchanged result") 

1897 

1898 result.extend(transform_line(transformed_line, mode=mode, features=features)) 

1899 

1900 features_set = set(features) 

1901 if ( 

1902 Feature.FORCE_OPTIONAL_PARENTHESES in features_set 

1903 or transform.__class__.__name__ != "rhs" 

1904 or not line.bracket_tracker.invisible 

1905 or any(bracket.value for bracket in line.bracket_tracker.invisible) 

1906 or line.contains_multiline_strings() 

1907 or result[0].contains_uncollapsable_type_comments() 

1908 or result[0].contains_unsplittable_type_ignore() 

1909 or is_line_short_enough(result[0], mode=mode) 

1910 # If any leaves have no parents (which _can_ occur since 

1911 # `transform(line)` potentially destroys the line's underlying node 

1912 # structure), then we can't proceed. Doing so would cause the below 

1913 # call to `append_leaves()` to fail. 

1914 or any(leaf.parent is None for leaf in line.leaves) 

1915 ): 

1916 return result 

1917 

1918 line_copy = line.clone() 

1919 append_leaves(line_copy, line, line.leaves) 

1920 features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES} 

1921 second_opinion = run_transformer( 

1922 line_copy, transform, mode, features_fop, line_str=line_str 

1923 ) 

1924 if all(is_line_short_enough(ln, mode=mode) for ln in second_opinion): 

1925 result = second_opinion 

1926 return result