Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/black/linegen.py: 76%

659 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:15 +0000

1""" 

2Generating lines of code. 

3""" 

4import sys 

5from dataclasses import replace 

6from enum import Enum, auto 

7from functools import partial, wraps 

8from typing import Collection, Iterator, List, Optional, Set, Union, cast 

9 

10from black.brackets import ( 

11 COMMA_PRIORITY, 

12 DOT_PRIORITY, 

13 get_leaves_inside_matching_brackets, 

14 max_delimiter_priority_in_atom, 

15) 

16from black.comments import FMT_OFF, generate_comments, list_comments 

17from black.lines import ( 

18 Line, 

19 RHSResult, 

20 append_leaves, 

21 can_be_split, 

22 can_omit_invisible_parens, 

23 is_line_short_enough, 

24 line_to_string, 

25) 

26from black.mode import Feature, Mode, Preview 

27from black.nodes import ( 

28 ASSIGNMENTS, 

29 BRACKETS, 

30 CLOSING_BRACKETS, 

31 OPENING_BRACKETS, 

32 RARROW, 

33 STANDALONE_COMMENT, 

34 STATEMENT, 

35 WHITESPACE, 

36 Visitor, 

37 ensure_visible, 

38 is_arith_like, 

39 is_async_stmt_or_funcdef, 

40 is_atom_with_invisible_parens, 

41 is_docstring, 

42 is_empty_tuple, 

43 is_lpar_token, 

44 is_multiline_string, 

45 is_name_token, 

46 is_one_sequence_between, 

47 is_one_tuple, 

48 is_rpar_token, 

49 is_stub_body, 

50 is_stub_suite, 

51 is_tuple_containing_walrus, 

52 is_vararg, 

53 is_walrus_assignment, 

54 is_yield, 

55 syms, 

56 wrap_in_parentheses, 

57) 

58from black.numerics import normalize_numeric_literal 

59from black.strings import ( 

60 fix_docstring, 

61 get_string_prefix, 

62 normalize_string_prefix, 

63 normalize_string_quotes, 

64 normalize_unicode_escape_sequences, 

65) 

66from black.trans import ( 

67 CannotTransform, 

68 StringMerger, 

69 StringParenStripper, 

70 StringParenWrapper, 

71 StringSplitter, 

72 Transformer, 

73 hug_power_op, 

74) 

75from blib2to3.pgen2 import token 

76from blib2to3.pytree import Leaf, Node 

77 

78# types 

79LeafID = int 

80LN = Union[Leaf, Node] 

81 

82 

83class CannotSplit(CannotTransform): 

84 """A readable split that fits the allotted line length is impossible.""" 

85 

86 

87# This isn't a dataclass because @dataclass + Generic breaks mypyc. 

88# See also https://github.com/mypyc/mypyc/issues/827. 

89class LineGenerator(Visitor[Line]): 

90 """Generates reformatted Line objects. Empty lines are not emitted. 

91 

92 Note: destroys the tree it's visiting by mutating prefixes of its leaves 

93 in ways that will no longer stringify to valid Python code on the tree. 

94 """ 

95 

96 def __init__(self, mode: Mode, features: Collection[Feature]) -> None: 

97 self.mode = mode 

98 self.features = features 

99 self.current_line: Line 

100 self.__post_init__() 

101 

102 def line(self, indent: int = 0) -> Iterator[Line]: 

103 """Generate a line. 

104 

105 If the line is empty, only emit if it makes sense. 

106 If the line is too long, split it first and then generate. 

107 

108 If any lines were generated, set up a new current_line. 

109 """ 

110 if not self.current_line: 

111 self.current_line.depth += indent 

112 return # Line is empty, don't emit. Creating a new one unnecessary. 

113 

114 if ( 

115 Preview.improved_async_statements_handling in self.mode 

116 and len(self.current_line.leaves) == 1 

117 and is_async_stmt_or_funcdef(self.current_line.leaves[0]) 

118 ): 

119 # Special case for async def/for/with statements. `visit_async_stmt` 

120 # adds an `ASYNC` leaf then visits the child def/for/with statement 

121 # nodes. Line yields from those nodes shouldn't treat the former 

122 # `ASYNC` leaf as a complete line. 

123 return 

124 

125 complete_line = self.current_line 

126 self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent) 

127 yield complete_line 

128 

129 def visit_default(self, node: LN) -> Iterator[Line]: 

130 """Default `visit_*()` implementation. Recurses to children of `node`.""" 

131 if isinstance(node, Leaf): 

132 any_open_brackets = self.current_line.bracket_tracker.any_open_brackets() 

133 for comment in generate_comments(node): 

134 if any_open_brackets: 

135 # any comment within brackets is subject to splitting 

136 self.current_line.append(comment) 

137 elif comment.type == token.COMMENT: 

138 # regular trailing comment 

139 self.current_line.append(comment) 

140 yield from self.line() 

141 

142 else: 

143 # regular standalone comment 

144 yield from self.line() 

145 

146 self.current_line.append(comment) 

147 yield from self.line() 

148 

149 normalize_prefix(node, inside_brackets=any_open_brackets) 

150 if self.mode.string_normalization and node.type == token.STRING: 

151 node.value = normalize_string_prefix(node.value) 

152 node.value = normalize_string_quotes(node.value) 

153 if node.type == token.NUMBER: 

154 normalize_numeric_literal(node) 

155 if node.type not in WHITESPACE: 

156 self.current_line.append(node) 

157 yield from super().visit_default(node) 

158 

159 def visit_test(self, node: Node) -> Iterator[Line]: 

160 """Visit an `x if y else z` test""" 

161 

162 if Preview.parenthesize_conditional_expressions in self.mode: 

163 already_parenthesized = ( 

164 node.prev_sibling and node.prev_sibling.type == token.LPAR 

165 ) 

166 

167 if not already_parenthesized: 

168 lpar = Leaf(token.LPAR, "") 

169 rpar = Leaf(token.RPAR, "") 

170 node.insert_child(0, lpar) 

171 node.append_child(rpar) 

172 

173 yield from self.visit_default(node) 

174 

175 def visit_INDENT(self, node: Leaf) -> Iterator[Line]: 

176 """Increase indentation level, maybe yield a line.""" 

177 # In blib2to3 INDENT never holds comments. 

178 yield from self.line(+1) 

179 yield from self.visit_default(node) 

180 

181 def visit_DEDENT(self, node: Leaf) -> Iterator[Line]: 

182 """Decrease indentation level, maybe yield a line.""" 

183 # The current line might still wait for trailing comments. At DEDENT time 

184 # there won't be any (they would be prefixes on the preceding NEWLINE). 

185 # Emit the line then. 

186 yield from self.line() 

187 

188 # While DEDENT has no value, its prefix may contain standalone comments 

189 # that belong to the current indentation level. Get 'em. 

190 yield from self.visit_default(node) 

191 

192 # Finally, emit the dedent. 

193 yield from self.line(-1) 

194 

195 def visit_stmt( 

196 self, node: Node, keywords: Set[str], parens: Set[str] 

197 ) -> Iterator[Line]: 

198 """Visit a statement. 

199 

200 This implementation is shared for `if`, `while`, `for`, `try`, `except`, 

201 `def`, `with`, `class`, `assert`, and assignments. 

202 

203 The relevant Python language `keywords` for a given statement will be 

204 NAME leaves within it. This methods puts those on a separate line. 

205 

206 `parens` holds a set of string leaf values immediately after which 

207 invisible parens should be put. 

208 """ 

209 normalize_invisible_parens( 

210 node, parens_after=parens, mode=self.mode, features=self.features 

211 ) 

212 for child in node.children: 

213 if is_name_token(child) and child.value in keywords: 

214 yield from self.line() 

215 

216 yield from self.visit(child) 

217 

218 def visit_typeparams(self, node: Node) -> Iterator[Line]: 

219 yield from self.visit_default(node) 

220 node.children[0].prefix = "" 

221 

222 def visit_typevartuple(self, node: Node) -> Iterator[Line]: 

223 yield from self.visit_default(node) 

224 node.children[1].prefix = "" 

225 

226 def visit_paramspec(self, node: Node) -> Iterator[Line]: 

227 yield from self.visit_default(node) 

228 node.children[1].prefix = "" 

229 

230 def visit_dictsetmaker(self, node: Node) -> Iterator[Line]: 

231 if Preview.wrap_long_dict_values_in_parens in self.mode: 

232 for i, child in enumerate(node.children): 

233 if i == 0: 

234 continue 

235 if node.children[i - 1].type == token.COLON: 

236 if child.type == syms.atom and child.children[0].type == token.LPAR: 

237 if maybe_make_parens_invisible_in_atom( 

238 child, 

239 parent=node, 

240 remove_brackets_around_comma=False, 

241 ): 

242 wrap_in_parentheses(node, child, visible=False) 

243 else: 

244 wrap_in_parentheses(node, child, visible=False) 

245 yield from self.visit_default(node) 

246 

247 def visit_funcdef(self, node: Node) -> Iterator[Line]: 

248 """Visit function definition.""" 

249 yield from self.line() 

250 

251 # Remove redundant brackets around return type annotation. 

252 is_return_annotation = False 

253 for child in node.children: 

254 if child.type == token.RARROW: 

255 is_return_annotation = True 

256 elif is_return_annotation: 

257 if child.type == syms.atom and child.children[0].type == token.LPAR: 

258 if maybe_make_parens_invisible_in_atom( 

259 child, 

260 parent=node, 

261 remove_brackets_around_comma=False, 

262 ): 

263 wrap_in_parentheses(node, child, visible=False) 

264 else: 

265 wrap_in_parentheses(node, child, visible=False) 

266 is_return_annotation = False 

267 

268 for child in node.children: 

269 yield from self.visit(child) 

270 

271 def visit_match_case(self, node: Node) -> Iterator[Line]: 

272 """Visit either a match or case statement.""" 

273 normalize_invisible_parens( 

274 node, parens_after=set(), mode=self.mode, features=self.features 

275 ) 

276 

277 yield from self.line() 

278 for child in node.children: 

279 yield from self.visit(child) 

280 

281 def visit_suite(self, node: Node) -> Iterator[Line]: 

282 """Visit a suite.""" 

283 if self.mode.is_pyi and is_stub_suite(node): 

284 yield from self.visit(node.children[2]) 

285 else: 

286 yield from self.visit_default(node) 

287 

288 def visit_simple_stmt(self, node: Node) -> Iterator[Line]: 

289 """Visit a statement without nested statements.""" 

290 prev_type: Optional[int] = None 

291 for child in node.children: 

292 if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child): 

293 wrap_in_parentheses(node, child, visible=False) 

294 prev_type = child.type 

295 

296 is_suite_like = node.parent and node.parent.type in STATEMENT 

297 if is_suite_like: 

298 if self.mode.is_pyi and is_stub_body(node): 

299 yield from self.visit_default(node) 

300 else: 

301 yield from self.line(+1) 

302 yield from self.visit_default(node) 

303 yield from self.line(-1) 

304 

305 else: 

306 if ( 

307 not self.mode.is_pyi 

308 or not node.parent 

309 or not is_stub_suite(node.parent) 

310 ): 

311 yield from self.line() 

312 yield from self.visit_default(node) 

313 

314 def visit_async_stmt(self, node: Node) -> Iterator[Line]: 

315 """Visit `async def`, `async for`, `async with`.""" 

316 yield from self.line() 

317 

318 children = iter(node.children) 

319 for child in children: 

320 yield from self.visit(child) 

321 

322 if child.type == token.ASYNC or child.type == STANDALONE_COMMENT: 

323 # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async 

324 # line. 

325 break 

326 

327 internal_stmt = next(children) 

328 if Preview.improved_async_statements_handling in self.mode: 

329 yield from self.visit(internal_stmt) 

330 else: 

331 for child in internal_stmt.children: 

332 yield from self.visit(child) 

333 

334 def visit_decorators(self, node: Node) -> Iterator[Line]: 

335 """Visit decorators.""" 

336 for child in node.children: 

337 yield from self.line() 

338 yield from self.visit(child) 

339 

340 def visit_power(self, node: Node) -> Iterator[Line]: 

341 for idx, leaf in enumerate(node.children[:-1]): 

342 next_leaf = node.children[idx + 1] 

343 

344 if not isinstance(leaf, Leaf): 

345 continue 

346 

347 value = leaf.value.lower() 

348 if ( 

349 leaf.type == token.NUMBER 

350 and next_leaf.type == syms.trailer 

351 # Ensure that we are in an attribute trailer 

352 and next_leaf.children[0].type == token.DOT 

353 # It shouldn't wrap hexadecimal, binary and octal literals 

354 and not value.startswith(("0x", "0b", "0o")) 

355 # It shouldn't wrap complex literals 

356 and "j" not in value 

357 ): 

358 wrap_in_parentheses(node, leaf) 

359 

360 remove_await_parens(node) 

361 

362 yield from self.visit_default(node) 

363 

364 def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]: 

365 """Remove a semicolon and put the other statement on a separate line.""" 

366 yield from self.line() 

367 

368 def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]: 

369 """End of file. Process outstanding comments and end with a newline.""" 

370 yield from self.visit_default(leaf) 

371 yield from self.line() 

372 

373 def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]: 

374 if not self.current_line.bracket_tracker.any_open_brackets(): 

375 yield from self.line() 

376 yield from self.visit_default(leaf) 

377 

378 def visit_factor(self, node: Node) -> Iterator[Line]: 

379 """Force parentheses between a unary op and a binary power: 

380 

381 -2 ** 8 -> -(2 ** 8) 

382 """ 

383 _operator, operand = node.children 

384 if ( 

385 operand.type == syms.power 

386 and len(operand.children) == 3 

387 and operand.children[1].type == token.DOUBLESTAR 

388 ): 

389 lpar = Leaf(token.LPAR, "(") 

390 rpar = Leaf(token.RPAR, ")") 

391 index = operand.remove() or 0 

392 node.insert_child(index, Node(syms.atom, [lpar, operand, rpar])) 

393 yield from self.visit_default(node) 

394 

395 def visit_STRING(self, leaf: Leaf) -> Iterator[Line]: 

396 if Preview.hex_codes_in_unicode_sequences in self.mode: 

397 normalize_unicode_escape_sequences(leaf) 

398 

399 if is_docstring(leaf) and "\\\n" not in leaf.value: 

400 # We're ignoring docstrings with backslash newline escapes because changing 

401 # indentation of those changes the AST representation of the code. 

402 if self.mode.string_normalization: 

403 docstring = normalize_string_prefix(leaf.value) 

404 # visit_default() does handle string normalization for us, but 

405 # since this method acts differently depending on quote style (ex. 

406 # see padding logic below), there's a possibility for unstable 

407 # formatting as visit_default() is called *after*. To avoid a 

408 # situation where this function formats a docstring differently on 

409 # the second pass, normalize it early. 

410 docstring = normalize_string_quotes(docstring) 

411 else: 

412 docstring = leaf.value 

413 prefix = get_string_prefix(docstring) 

414 docstring = docstring[len(prefix) :] # Remove the prefix 

415 quote_char = docstring[0] 

416 # A natural way to remove the outer quotes is to do: 

417 # docstring = docstring.strip(quote_char) 

418 # but that breaks on """""x""" (which is '""x'). 

419 # So we actually need to remove the first character and the next two 

420 # characters but only if they are the same as the first. 

421 quote_len = 1 if docstring[1] != quote_char else 3 

422 docstring = docstring[quote_len:-quote_len] 

423 docstring_started_empty = not docstring 

424 indent = " " * 4 * self.current_line.depth 

425 

426 if is_multiline_string(leaf): 

427 docstring = fix_docstring(docstring, indent) 

428 else: 

429 docstring = docstring.strip() 

430 

431 has_trailing_backslash = False 

432 if docstring: 

433 # Add some padding if the docstring starts / ends with a quote mark. 

434 if docstring[0] == quote_char: 

435 docstring = " " + docstring 

436 if docstring[-1] == quote_char: 

437 docstring += " " 

438 if docstring[-1] == "\\": 

439 backslash_count = len(docstring) - len(docstring.rstrip("\\")) 

440 if backslash_count % 2: 

441 # Odd number of tailing backslashes, add some padding to 

442 # avoid escaping the closing string quote. 

443 docstring += " " 

444 has_trailing_backslash = True 

445 elif not docstring_started_empty: 

446 docstring = " " 

447 

448 # We could enforce triple quotes at this point. 

449 quote = quote_char * quote_len 

450 

451 # It's invalid to put closing single-character quotes on a new line. 

452 if self.mode and quote_len == 3: 

453 # We need to find the length of the last line of the docstring 

454 # to find if we can add the closing quotes to the line without 

455 # exceeding the maximum line length. 

456 # If docstring is one line, we don't put the closing quotes on a 

457 # separate line because it looks ugly (#3320). 

458 lines = docstring.splitlines() 

459 last_line_length = len(lines[-1]) if docstring else 0 

460 

461 # If adding closing quotes would cause the last line to exceed 

462 # the maximum line length then put a line break before the 

463 # closing quotes 

464 if ( 

465 len(lines) > 1 

466 and last_line_length + quote_len > self.mode.line_length 

467 and len(indent) + quote_len <= self.mode.line_length 

468 and not has_trailing_backslash 

469 ): 

470 leaf.value = prefix + quote + docstring + "\n" + indent + quote 

471 else: 

472 leaf.value = prefix + quote + docstring + quote 

473 else: 

474 leaf.value = prefix + quote + docstring + quote 

475 

476 yield from self.visit_default(leaf) 

477 

478 def __post_init__(self) -> None: 

479 """You are in a twisty little maze of passages.""" 

480 self.current_line = Line(mode=self.mode) 

481 

482 v = self.visit_stmt 

483 Ø: Set[str] = set() 

484 self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","}) 

485 self.visit_if_stmt = partial( 

486 v, keywords={"if", "else", "elif"}, parens={"if", "elif"} 

487 ) 

488 self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"}) 

489 self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"}) 

490 self.visit_try_stmt = partial( 

491 v, keywords={"try", "except", "else", "finally"}, parens=Ø 

492 ) 

493 self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"}) 

494 self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"}) 

495 self.visit_classdef = partial(v, keywords={"class"}, parens=Ø) 

496 self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS) 

497 self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"}) 

498 self.visit_import_from = partial(v, keywords=Ø, parens={"import"}) 

499 self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"}) 

500 self.visit_async_funcdef = self.visit_async_stmt 

501 self.visit_decorated = self.visit_decorators 

502 

503 # PEP 634 

504 self.visit_match_stmt = self.visit_match_case 

505 self.visit_case_block = self.visit_match_case 

506 

507 

508def transform_line( 

509 line: Line, mode: Mode, features: Collection[Feature] = () 

510) -> Iterator[Line]: 

511 """Transform a `line`, potentially splitting it into many lines. 

512 

513 They should fit in the allotted `line_length` but might not be able to. 

514 

515 `features` are syntactical features that may be used in the output. 

516 """ 

517 if line.is_comment: 

518 yield line 

519 return 

520 

521 line_str = line_to_string(line) 

522 

523 ll = mode.line_length 

524 sn = mode.string_normalization 

525 string_merge = StringMerger(ll, sn) 

526 string_paren_strip = StringParenStripper(ll, sn) 

527 string_split = StringSplitter(ll, sn) 

528 string_paren_wrap = StringParenWrapper(ll, sn) 

529 

530 transformers: List[Transformer] 

531 if ( 

532 not line.contains_uncollapsable_type_comments() 

533 and not line.should_split_rhs 

534 and not line.magic_trailing_comma 

535 and ( 

536 is_line_short_enough(line, mode=mode, line_str=line_str) 

537 or line.contains_unsplittable_type_ignore() 

538 ) 

539 and not (line.inside_brackets and line.contains_standalone_comments()) 

540 ): 

541 # Only apply basic string preprocessing, since lines shouldn't be split here. 

542 if Preview.string_processing in mode: 

543 transformers = [string_merge, string_paren_strip] 

544 else: 

545 transformers = [] 

546 elif line.is_def: 

547 transformers = [left_hand_split] 

548 else: 

549 

550 def _rhs( 

551 self: object, line: Line, features: Collection[Feature], mode: Mode 

552 ) -> Iterator[Line]: 

553 """Wraps calls to `right_hand_split`. 

554 

555 The calls increasingly `omit` right-hand trailers (bracket pairs with 

556 content), meaning the trailers get glued together to split on another 

557 bracket pair instead. 

558 """ 

559 for omit in generate_trailers_to_omit(line, mode.line_length): 

560 lines = list(right_hand_split(line, mode, features, omit=omit)) 

561 # Note: this check is only able to figure out if the first line of the 

562 # *current* transformation fits in the line length. This is true only 

563 # for simple cases. All others require running more transforms via 

564 # `transform_line()`. This check doesn't know if those would succeed. 

565 if is_line_short_enough(lines[0], mode=mode): 

566 yield from lines 

567 return 

568 

569 # All splits failed, best effort split with no omits. 

570 # This mostly happens to multiline strings that are by definition 

571 # reported as not fitting a single line, as well as lines that contain 

572 # trailing commas (those have to be exploded). 

573 yield from right_hand_split(line, mode, features=features) 

574 

575 # HACK: nested functions (like _rhs) compiled by mypyc don't retain their 

576 # __name__ attribute which is needed in `run_transformer` further down. 

577 # Unfortunately a nested class breaks mypyc too. So a class must be created 

578 # via type ... https://github.com/mypyc/mypyc/issues/884 

579 rhs = type("rhs", (), {"__call__": _rhs})() 

580 

581 if Preview.string_processing in mode: 

582 if line.inside_brackets: 

583 transformers = [ 

584 string_merge, 

585 string_paren_strip, 

586 string_split, 

587 delimiter_split, 

588 standalone_comment_split, 

589 string_paren_wrap, 

590 rhs, 

591 ] 

592 else: 

593 transformers = [ 

594 string_merge, 

595 string_paren_strip, 

596 string_split, 

597 string_paren_wrap, 

598 rhs, 

599 ] 

600 else: 

601 if line.inside_brackets: 

602 transformers = [delimiter_split, standalone_comment_split, rhs] 

603 else: 

604 transformers = [rhs] 

605 # It's always safe to attempt hugging of power operations and pretty much every line 

606 # could match. 

607 transformers.append(hug_power_op) 

608 

609 for transform in transformers: 

610 # We are accumulating lines in `result` because we might want to abort 

611 # mission and return the original line in the end, or attempt a different 

612 # split altogether. 

613 try: 

614 result = run_transformer(line, transform, mode, features, line_str=line_str) 

615 except CannotTransform: 

616 continue 

617 else: 

618 yield from result 

619 break 

620 

621 else: 

622 yield line 

623 

624 

625class _BracketSplitComponent(Enum): 

626 head = auto() 

627 body = auto() 

628 tail = auto() 

629 

630 

631def left_hand_split( 

632 line: Line, _features: Collection[Feature], mode: Mode 

633) -> Iterator[Line]: 

634 """Split line into many lines, starting with the first matching bracket pair. 

635 

636 Note: this usually looks weird, only use this for function definitions. 

637 Prefer RHS otherwise. This is why this function is not symmetrical with 

638 :func:`right_hand_split` which also handles optional parentheses. 

639 """ 

640 tail_leaves: List[Leaf] = [] 

641 body_leaves: List[Leaf] = [] 

642 head_leaves: List[Leaf] = [] 

643 current_leaves = head_leaves 

644 matching_bracket: Optional[Leaf] = None 

645 for leaf in line.leaves: 

646 if ( 

647 current_leaves is body_leaves 

648 and leaf.type in CLOSING_BRACKETS 

649 and leaf.opening_bracket is matching_bracket 

650 and isinstance(matching_bracket, Leaf) 

651 ): 

652 ensure_visible(leaf) 

653 ensure_visible(matching_bracket) 

654 current_leaves = tail_leaves if body_leaves else head_leaves 

655 current_leaves.append(leaf) 

656 if current_leaves is head_leaves: 

657 if leaf.type in OPENING_BRACKETS: 

658 matching_bracket = leaf 

659 current_leaves = body_leaves 

660 if not matching_bracket: 

661 raise CannotSplit("No brackets found") 

662 

663 head = bracket_split_build_line( 

664 head_leaves, line, matching_bracket, component=_BracketSplitComponent.head 

665 ) 

666 body = bracket_split_build_line( 

667 body_leaves, line, matching_bracket, component=_BracketSplitComponent.body 

668 ) 

669 tail = bracket_split_build_line( 

670 tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail 

671 ) 

672 bracket_split_succeeded_or_raise(head, body, tail) 

673 for result in (head, body, tail): 

674 if result: 

675 yield result 

676 

677 

678def right_hand_split( 

679 line: Line, 

680 mode: Mode, 

681 features: Collection[Feature] = (), 

682 omit: Collection[LeafID] = (), 

683) -> Iterator[Line]: 

684 """Split line into many lines, starting with the last matching bracket pair. 

685 

686 If the split was by optional parentheses, attempt splitting without them, too. 

687 `omit` is a collection of closing bracket IDs that shouldn't be considered for 

688 this split. 

689 

690 Note: running this function modifies `bracket_depth` on the leaves of `line`. 

691 """ 

692 rhs_result = _first_right_hand_split(line, omit=omit) 

693 yield from _maybe_split_omitting_optional_parens( 

694 rhs_result, line, mode, features=features, omit=omit 

695 ) 

696 

697 

698def _first_right_hand_split( 

699 line: Line, 

700 omit: Collection[LeafID] = (), 

701) -> RHSResult: 

702 """Split the line into head, body, tail starting with the last bracket pair. 

703 

704 Note: this function should not have side effects. It's relied upon by 

705 _maybe_split_omitting_optional_parens to get an opinion whether to prefer 

706 splitting on the right side of an assignment statement. 

707 """ 

708 tail_leaves: List[Leaf] = [] 

709 body_leaves: List[Leaf] = [] 

710 head_leaves: List[Leaf] = [] 

711 current_leaves = tail_leaves 

712 opening_bracket: Optional[Leaf] = None 

713 closing_bracket: Optional[Leaf] = None 

714 for leaf in reversed(line.leaves): 

715 if current_leaves is body_leaves: 

716 if leaf is opening_bracket: 

717 current_leaves = head_leaves if body_leaves else tail_leaves 

718 current_leaves.append(leaf) 

719 if current_leaves is tail_leaves: 

720 if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit: 

721 opening_bracket = leaf.opening_bracket 

722 closing_bracket = leaf 

723 current_leaves = body_leaves 

724 if not (opening_bracket and closing_bracket and head_leaves): 

725 # If there is no opening or closing_bracket that means the split failed and 

726 # all content is in the tail. Otherwise, if `head_leaves` are empty, it means 

727 # the matching `opening_bracket` wasn't available on `line` anymore. 

728 raise CannotSplit("No brackets found") 

729 

730 tail_leaves.reverse() 

731 body_leaves.reverse() 

732 head_leaves.reverse() 

733 head = bracket_split_build_line( 

734 head_leaves, line, opening_bracket, component=_BracketSplitComponent.head 

735 ) 

736 body = bracket_split_build_line( 

737 body_leaves, line, opening_bracket, component=_BracketSplitComponent.body 

738 ) 

739 tail = bracket_split_build_line( 

740 tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail 

741 ) 

742 bracket_split_succeeded_or_raise(head, body, tail) 

743 return RHSResult(head, body, tail, opening_bracket, closing_bracket) 

744 

745 

746def _maybe_split_omitting_optional_parens( 

747 rhs: RHSResult, 

748 line: Line, 

749 mode: Mode, 

750 features: Collection[Feature] = (), 

751 omit: Collection[LeafID] = (), 

752) -> Iterator[Line]: 

753 if ( 

754 Feature.FORCE_OPTIONAL_PARENTHESES not in features 

755 # the opening bracket is an optional paren 

756 and rhs.opening_bracket.type == token.LPAR 

757 and not rhs.opening_bracket.value 

758 # the closing bracket is an optional paren 

759 and rhs.closing_bracket.type == token.RPAR 

760 and not rhs.closing_bracket.value 

761 # it's not an import (optional parens are the only thing we can split on 

762 # in this case; attempting a split without them is a waste of time) 

763 and not line.is_import 

764 # there are no standalone comments in the body 

765 and not rhs.body.contains_standalone_comments(0) 

766 # and we can actually remove the parens 

767 and can_omit_invisible_parens(rhs, mode.line_length) 

768 ): 

769 omit = {id(rhs.closing_bracket), *omit} 

770 try: 

771 # The RHSResult Omitting Optional Parens. 

772 rhs_oop = _first_right_hand_split(line, omit=omit) 

773 if not ( 

774 Preview.prefer_splitting_right_hand_side_of_assignments in line.mode 

775 # the split is right after `=` 

776 and len(rhs.head.leaves) >= 2 

777 and rhs.head.leaves[-2].type == token.EQUAL 

778 # the left side of assignment contains brackets 

779 and any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1]) 

780 # the left side of assignment is short enough (the -1 is for the ending 

781 # optional paren) 

782 and is_line_short_enough( 

783 rhs.head, mode=replace(mode, line_length=mode.line_length - 1) 

784 ) 

785 # the left side of assignment won't explode further because of magic 

786 # trailing comma 

787 and rhs.head.magic_trailing_comma is None 

788 # the split by omitting optional parens isn't preferred by some other 

789 # reason 

790 and not _prefer_split_rhs_oop(rhs_oop, mode) 

791 ): 

792 yield from _maybe_split_omitting_optional_parens( 

793 rhs_oop, line, mode, features=features, omit=omit 

794 ) 

795 return 

796 

797 except CannotSplit as e: 

798 if not ( 

799 can_be_split(rhs.body) or is_line_short_enough(rhs.body, mode=mode) 

800 ): 

801 raise CannotSplit( 

802 "Splitting failed, body is still too long and can't be split." 

803 ) from e 

804 

805 elif ( 

806 rhs.head.contains_multiline_strings() 

807 or rhs.tail.contains_multiline_strings() 

808 ): 

809 raise CannotSplit( 

810 "The current optional pair of parentheses is bound to fail to" 

811 " satisfy the splitting algorithm because the head or the tail" 

812 " contains multiline strings which by definition never fit one" 

813 " line." 

814 ) from e 

815 

816 ensure_visible(rhs.opening_bracket) 

817 ensure_visible(rhs.closing_bracket) 

818 for result in (rhs.head, rhs.body, rhs.tail): 

819 if result: 

820 yield result 

821 

822 

823def _prefer_split_rhs_oop(rhs_oop: RHSResult, mode: Mode) -> bool: 

824 """ 

825 Returns whether we should prefer the result from a split omitting optional parens. 

826 """ 

827 has_closing_bracket_after_assign = False 

828 for leaf in reversed(rhs_oop.head.leaves): 

829 if leaf.type == token.EQUAL: 

830 break 

831 if leaf.type in CLOSING_BRACKETS: 

832 has_closing_bracket_after_assign = True 

833 break 

834 return ( 

835 # contains matching brackets after the `=` (done by checking there is a 

836 # closing bracket) 

837 has_closing_bracket_after_assign 

838 or ( 

839 # the split is actually from inside the optional parens (done by checking 

840 # the first line still contains the `=`) 

841 any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves) 

842 # the first line is short enough 

843 and is_line_short_enough(rhs_oop.head, mode=mode) 

844 ) 

845 # contains unsplittable type ignore 

846 or rhs_oop.head.contains_unsplittable_type_ignore() 

847 or rhs_oop.body.contains_unsplittable_type_ignore() 

848 or rhs_oop.tail.contains_unsplittable_type_ignore() 

849 ) 

850 

851 

852def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None: 

853 """Raise :exc:`CannotSplit` if the last left- or right-hand split failed. 

854 

855 Do nothing otherwise. 

856 

857 A left- or right-hand split is based on a pair of brackets. Content before 

858 (and including) the opening bracket is left on one line, content inside the 

859 brackets is put on a separate line, and finally content starting with and 

860 following the closing bracket is put on a separate line. 

861 

862 Those are called `head`, `body`, and `tail`, respectively. If the split 

863 produced the same line (all content in `head`) or ended up with an empty `body` 

864 and the `tail` is just the closing bracket, then it's considered failed. 

865 """ 

866 tail_len = len(str(tail).strip()) 

867 if not body: 

868 if tail_len == 0: 

869 raise CannotSplit("Splitting brackets produced the same line") 

870 

871 elif tail_len < 3: 

872 raise CannotSplit( 

873 f"Splitting brackets on an empty body to save {tail_len} characters is" 

874 " not worth it" 

875 ) 

876 

877 

878def bracket_split_build_line( 

879 leaves: List[Leaf], 

880 original: Line, 

881 opening_bracket: Leaf, 

882 *, 

883 component: _BracketSplitComponent, 

884) -> Line: 

885 """Return a new line with given `leaves` and respective comments from `original`. 

886 

887 If it's the head component, brackets will be tracked so trailing commas are 

888 respected. 

889 

890 If it's the body component, the result line is one-indented inside brackets and as 

891 such has its first leaf's prefix normalized and a trailing comma added when 

892 expected. 

893 """ 

894 result = Line(mode=original.mode, depth=original.depth) 

895 if component is _BracketSplitComponent.body: 

896 result.inside_brackets = True 

897 result.depth += 1 

898 if leaves: 

899 # Since body is a new indent level, remove spurious leading whitespace. 

900 normalize_prefix(leaves[0], inside_brackets=True) 

901 # Ensure a trailing comma for imports and standalone function arguments, but 

902 # be careful not to add one after any comments or within type annotations. 

903 no_commas = ( 

904 original.is_def 

905 and opening_bracket.value == "(" 

906 and not any(leaf.type == token.COMMA for leaf in leaves) 

907 # In particular, don't add one within a parenthesized return annotation. 

908 # Unfortunately the indicator we're in a return annotation (RARROW) may 

909 # be defined directly in the parent node, the parent of the parent ... 

910 # and so on depending on how complex the return annotation is. 

911 # This isn't perfect and there's some false negatives but they are in 

912 # contexts were a comma is actually fine. 

913 and not any( 

914 node.prev_sibling.type == RARROW 

915 for node in ( 

916 leaves[0].parent, 

917 getattr(leaves[0].parent, "parent", None), 

918 ) 

919 if isinstance(node, Node) and isinstance(node.prev_sibling, Leaf) 

920 ) 

921 ) 

922 

923 if original.is_import or no_commas: 

924 for i in range(len(leaves) - 1, -1, -1): 

925 if leaves[i].type == STANDALONE_COMMENT: 

926 continue 

927 

928 if leaves[i].type != token.COMMA: 

929 new_comma = Leaf(token.COMMA, ",") 

930 leaves.insert(i + 1, new_comma) 

931 break 

932 

933 leaves_to_track: Set[LeafID] = set() 

934 if component is _BracketSplitComponent.head: 

935 leaves_to_track = get_leaves_inside_matching_brackets(leaves) 

936 # Populate the line 

937 for leaf in leaves: 

938 result.append( 

939 leaf, 

940 preformatted=True, 

941 track_bracket=id(leaf) in leaves_to_track, 

942 ) 

943 for comment_after in original.comments_after(leaf): 

944 result.append(comment_after, preformatted=True) 

945 if component is _BracketSplitComponent.body and should_split_line( 

946 result, opening_bracket 

947 ): 

948 result.should_split_rhs = True 

949 return result 

950 

951 

952def dont_increase_indentation(split_func: Transformer) -> Transformer: 

953 """Normalize prefix of the first leaf in every line returned by `split_func`. 

954 

955 This is a decorator over relevant split functions. 

956 """ 

957 

958 @wraps(split_func) 

959 def split_wrapper( 

960 line: Line, features: Collection[Feature], mode: Mode 

961 ) -> Iterator[Line]: 

962 for split_line in split_func(line, features, mode): 

963 normalize_prefix(split_line.leaves[0], inside_brackets=True) 

964 yield split_line 

965 

966 return split_wrapper 

967 

968 

969def _get_last_non_comment_leaf(line: Line) -> Optional[int]: 

970 for leaf_idx in range(len(line.leaves) - 1, 0, -1): 

971 if line.leaves[leaf_idx].type != STANDALONE_COMMENT: 

972 return leaf_idx 

973 return None 

974 

975 

976def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line: 

977 if ( 

978 safe 

979 and delimiter_priority == COMMA_PRIORITY 

980 and line.leaves[-1].type != token.COMMA 

981 and line.leaves[-1].type != STANDALONE_COMMENT 

982 ): 

983 new_comma = Leaf(token.COMMA, ",") 

984 line.append(new_comma) 

985 return line 

986 

987 

988@dont_increase_indentation 

989def delimiter_split( 

990 line: Line, features: Collection[Feature], mode: Mode 

991) -> Iterator[Line]: 

992 """Split according to delimiters of the highest priority. 

993 

994 If the appropriate Features are given, the split will add trailing commas 

995 also in function signatures and calls that contain `*` and `**`. 

996 """ 

997 try: 

998 last_leaf = line.leaves[-1] 

999 except IndexError: 

1000 raise CannotSplit("Line empty") from None 

1001 

1002 bt = line.bracket_tracker 

1003 try: 

1004 delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)}) 

1005 except ValueError: 

1006 raise CannotSplit("No delimiters found") from None 

1007 

1008 if delimiter_priority == DOT_PRIORITY: 

1009 if bt.delimiter_count_with_priority(delimiter_priority) == 1: 

1010 raise CannotSplit("Splitting a single attribute from its owner looks wrong") 

1011 

1012 current_line = Line( 

1013 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1014 ) 

1015 lowest_depth = sys.maxsize 

1016 trailing_comma_safe = True 

1017 

1018 def append_to_line(leaf: Leaf) -> Iterator[Line]: 

1019 """Append `leaf` to current line or to new line if appending impossible.""" 

1020 nonlocal current_line 

1021 try: 

1022 current_line.append_safe(leaf, preformatted=True) 

1023 except ValueError: 

1024 yield current_line 

1025 

1026 current_line = Line( 

1027 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1028 ) 

1029 current_line.append(leaf) 

1030 

1031 last_non_comment_leaf = _get_last_non_comment_leaf(line) 

1032 for leaf_idx, leaf in enumerate(line.leaves): 

1033 yield from append_to_line(leaf) 

1034 

1035 for comment_after in line.comments_after(leaf): 

1036 yield from append_to_line(comment_after) 

1037 

1038 lowest_depth = min(lowest_depth, leaf.bracket_depth) 

1039 if leaf.bracket_depth == lowest_depth: 

1040 if is_vararg(leaf, within={syms.typedargslist}): 

1041 trailing_comma_safe = ( 

1042 trailing_comma_safe and Feature.TRAILING_COMMA_IN_DEF in features 

1043 ) 

1044 elif is_vararg(leaf, within={syms.arglist, syms.argument}): 

1045 trailing_comma_safe = ( 

1046 trailing_comma_safe and Feature.TRAILING_COMMA_IN_CALL in features 

1047 ) 

1048 

1049 if ( 

1050 Preview.add_trailing_comma_consistently in mode 

1051 and last_leaf.type == STANDALONE_COMMENT 

1052 and leaf_idx == last_non_comment_leaf 

1053 ): 

1054 current_line = _safe_add_trailing_comma( 

1055 trailing_comma_safe, delimiter_priority, current_line 

1056 ) 

1057 

1058 leaf_priority = bt.delimiters.get(id(leaf)) 

1059 if leaf_priority == delimiter_priority: 

1060 yield current_line 

1061 

1062 current_line = Line( 

1063 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1064 ) 

1065 if current_line: 

1066 current_line = _safe_add_trailing_comma( 

1067 trailing_comma_safe, delimiter_priority, current_line 

1068 ) 

1069 yield current_line 

1070 

1071 

1072@dont_increase_indentation 

1073def standalone_comment_split( 

1074 line: Line, features: Collection[Feature], mode: Mode 

1075) -> Iterator[Line]: 

1076 """Split standalone comments from the rest of the line.""" 

1077 if not line.contains_standalone_comments(0): 

1078 raise CannotSplit("Line does not have any standalone comments") 

1079 

1080 current_line = Line( 

1081 mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1082 ) 

1083 

1084 def append_to_line(leaf: Leaf) -> Iterator[Line]: 

1085 """Append `leaf` to current line or to new line if appending impossible.""" 

1086 nonlocal current_line 

1087 try: 

1088 current_line.append_safe(leaf, preformatted=True) 

1089 except ValueError: 

1090 yield current_line 

1091 

1092 current_line = Line( 

1093 line.mode, depth=line.depth, inside_brackets=line.inside_brackets 

1094 ) 

1095 current_line.append(leaf) 

1096 

1097 for leaf in line.leaves: 

1098 yield from append_to_line(leaf) 

1099 

1100 for comment_after in line.comments_after(leaf): 

1101 yield from append_to_line(comment_after) 

1102 

1103 if current_line: 

1104 yield current_line 

1105 

1106 

1107def normalize_prefix(leaf: Leaf, *, inside_brackets: bool) -> None: 

1108 """Leave existing extra newlines if not `inside_brackets`. Remove everything 

1109 else. 

1110 

1111 Note: don't use backslashes for formatting or you'll lose your voting rights. 

1112 """ 

1113 if not inside_brackets: 

1114 spl = leaf.prefix.split("#") 

1115 if "\\" not in spl[0]: 

1116 nl_count = spl[-1].count("\n") 

1117 if len(spl) > 1: 

1118 nl_count -= 1 

1119 leaf.prefix = "\n" * nl_count 

1120 return 

1121 

1122 leaf.prefix = "" 

1123 

1124 

1125def normalize_invisible_parens( 

1126 node: Node, parens_after: Set[str], *, mode: Mode, features: Collection[Feature] 

1127) -> None: 

1128 """Make existing optional parentheses invisible or create new ones. 

1129 

1130 `parens_after` is a set of string leaf values immediately after which parens 

1131 should be put. 

1132 

1133 Standardizes on visible parentheses for single-element tuples, and keeps 

1134 existing visible parentheses for other tuples and generator expressions. 

1135 """ 

1136 for pc in list_comments(node.prefix, is_endmarker=False): 

1137 if pc.value in FMT_OFF: 

1138 # This `node` has a prefix with `# fmt: off`, don't mess with parens. 

1139 return 

1140 

1141 # The multiple context managers grammar has a different pattern, thus this is 

1142 # separate from the for-loop below. This possibly wraps them in invisible parens, 

1143 # and later will be removed in remove_with_parens when needed. 

1144 if node.type == syms.with_stmt: 

1145 _maybe_wrap_cms_in_parens(node, mode, features) 

1146 

1147 check_lpar = False 

1148 for index, child in enumerate(list(node.children)): 

1149 # Fixes a bug where invisible parens are not properly stripped from 

1150 # assignment statements that contain type annotations. 

1151 if isinstance(child, Node) and child.type == syms.annassign: 

1152 normalize_invisible_parens( 

1153 child, parens_after=parens_after, mode=mode, features=features 

1154 ) 

1155 

1156 # Add parentheses around long tuple unpacking in assignments. 

1157 if ( 

1158 index == 0 

1159 and isinstance(child, Node) 

1160 and child.type == syms.testlist_star_expr 

1161 ): 

1162 check_lpar = True 

1163 

1164 if check_lpar: 

1165 if ( 

1166 child.type == syms.atom 

1167 and node.type == syms.for_stmt 

1168 and isinstance(child.prev_sibling, Leaf) 

1169 and child.prev_sibling.type == token.NAME 

1170 and child.prev_sibling.value == "for" 

1171 ): 

1172 if maybe_make_parens_invisible_in_atom( 

1173 child, 

1174 parent=node, 

1175 remove_brackets_around_comma=True, 

1176 ): 

1177 wrap_in_parentheses(node, child, visible=False) 

1178 elif isinstance(child, Node) and node.type == syms.with_stmt: 

1179 remove_with_parens(child, node) 

1180 elif child.type == syms.atom: 

1181 if maybe_make_parens_invisible_in_atom( 

1182 child, 

1183 parent=node, 

1184 ): 

1185 wrap_in_parentheses(node, child, visible=False) 

1186 elif is_one_tuple(child): 

1187 wrap_in_parentheses(node, child, visible=True) 

1188 elif node.type == syms.import_from: 

1189 _normalize_import_from(node, child, index) 

1190 break 

1191 elif ( 

1192 index == 1 

1193 and child.type == token.STAR 

1194 and node.type == syms.except_clause 

1195 ): 

1196 # In except* (PEP 654), the star is actually part of 

1197 # of the keyword. So we need to skip the insertion of 

1198 # invisible parentheses to work more precisely. 

1199 continue 

1200 

1201 elif not (isinstance(child, Leaf) and is_multiline_string(child)): 

1202 wrap_in_parentheses(node, child, visible=False) 

1203 

1204 comma_check = child.type == token.COMMA 

1205 

1206 check_lpar = isinstance(child, Leaf) and ( 

1207 child.value in parens_after or comma_check 

1208 ) 

1209 

1210 

1211def _normalize_import_from(parent: Node, child: LN, index: int) -> None: 

1212 # "import from" nodes store parentheses directly as part of 

1213 # the statement 

1214 if is_lpar_token(child): 

1215 assert is_rpar_token(parent.children[-1]) 

1216 # make parentheses invisible 

1217 child.value = "" 

1218 parent.children[-1].value = "" 

1219 elif child.type != token.STAR: 

1220 # insert invisible parentheses 

1221 parent.insert_child(index, Leaf(token.LPAR, "")) 

1222 parent.append_child(Leaf(token.RPAR, "")) 

1223 

1224 

1225def remove_await_parens(node: Node) -> None: 

1226 if node.children[0].type == token.AWAIT and len(node.children) > 1: 

1227 if ( 

1228 node.children[1].type == syms.atom 

1229 and node.children[1].children[0].type == token.LPAR 

1230 ): 

1231 if maybe_make_parens_invisible_in_atom( 

1232 node.children[1], 

1233 parent=node, 

1234 remove_brackets_around_comma=True, 

1235 ): 

1236 wrap_in_parentheses(node, node.children[1], visible=False) 

1237 

1238 # Since await is an expression we shouldn't remove 

1239 # brackets in cases where this would change 

1240 # the AST due to operator precedence. 

1241 # Therefore we only aim to remove brackets around 

1242 # power nodes that aren't also await expressions themselves. 

1243 # https://peps.python.org/pep-0492/#updated-operator-precedence-table 

1244 # N.B. We've still removed any redundant nested brackets though :) 

1245 opening_bracket = cast(Leaf, node.children[1].children[0]) 

1246 closing_bracket = cast(Leaf, node.children[1].children[-1]) 

1247 bracket_contents = node.children[1].children[1] 

1248 if isinstance(bracket_contents, Node): 

1249 if bracket_contents.type != syms.power: 

1250 ensure_visible(opening_bracket) 

1251 ensure_visible(closing_bracket) 

1252 elif ( 

1253 bracket_contents.type == syms.power 

1254 and bracket_contents.children[0].type == token.AWAIT 

1255 ): 

1256 ensure_visible(opening_bracket) 

1257 ensure_visible(closing_bracket) 

1258 # If we are in a nested await then recurse down. 

1259 remove_await_parens(bracket_contents) 

1260 

1261 

1262def _maybe_wrap_cms_in_parens( 

1263 node: Node, mode: Mode, features: Collection[Feature] 

1264) -> None: 

1265 """When enabled and safe, wrap the multiple context managers in invisible parens. 

1266 

1267 It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS. 

1268 """ 

1269 if ( 

1270 Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features 

1271 or Preview.wrap_multiple_context_managers_in_parens not in mode 

1272 or len(node.children) <= 2 

1273 # If it's an atom, it's already wrapped in parens. 

1274 or node.children[1].type == syms.atom 

1275 ): 

1276 return 

1277 colon_index: Optional[int] = None 

1278 for i in range(2, len(node.children)): 

1279 if node.children[i].type == token.COLON: 

1280 colon_index = i 

1281 break 

1282 if colon_index is not None: 

1283 lpar = Leaf(token.LPAR, "") 

1284 rpar = Leaf(token.RPAR, "") 

1285 context_managers = node.children[1:colon_index] 

1286 for child in context_managers: 

1287 child.remove() 

1288 # After wrapping, the with_stmt will look like this: 

1289 # with_stmt 

1290 # NAME 'with' 

1291 # atom 

1292 # LPAR '' 

1293 # testlist_gexp 

1294 # ... <-- context_managers 

1295 # /testlist_gexp 

1296 # RPAR '' 

1297 # /atom 

1298 # COLON ':' 

1299 new_child = Node( 

1300 syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar] 

1301 ) 

1302 node.insert_child(1, new_child) 

1303 

1304 

1305def remove_with_parens(node: Node, parent: Node) -> None: 

1306 """Recursively hide optional parens in `with` statements.""" 

1307 # Removing all unnecessary parentheses in with statements in one pass is a tad 

1308 # complex as different variations of bracketed statements result in pretty 

1309 # different parse trees: 

1310 # 

1311 # with (open("file")) as f: # this is an asexpr_test 

1312 # ... 

1313 # 

1314 # with (open("file") as f): # this is an atom containing an 

1315 # ... # asexpr_test 

1316 # 

1317 # with (open("file")) as f, (open("file")) as f: # this is asexpr_test, COMMA, 

1318 # ... # asexpr_test 

1319 # 

1320 # with (open("file") as f, open("file") as f): # an atom containing a 

1321 # ... # testlist_gexp which then 

1322 # # contains multiple asexpr_test(s) 

1323 if node.type == syms.atom: 

1324 if maybe_make_parens_invisible_in_atom( 

1325 node, 

1326 parent=parent, 

1327 remove_brackets_around_comma=True, 

1328 ): 

1329 wrap_in_parentheses(parent, node, visible=False) 

1330 if isinstance(node.children[1], Node): 

1331 remove_with_parens(node.children[1], node) 

1332 elif node.type == syms.testlist_gexp: 

1333 for child in node.children: 

1334 if isinstance(child, Node): 

1335 remove_with_parens(child, node) 

1336 elif node.type == syms.asexpr_test and not any( 

1337 leaf.type == token.COLONEQUAL for leaf in node.leaves() 

1338 ): 

1339 if maybe_make_parens_invisible_in_atom( 

1340 node.children[0], 

1341 parent=node, 

1342 remove_brackets_around_comma=True, 

1343 ): 

1344 wrap_in_parentheses(node, node.children[0], visible=False) 

1345 

1346 

1347def maybe_make_parens_invisible_in_atom( 

1348 node: LN, 

1349 parent: LN, 

1350 remove_brackets_around_comma: bool = False, 

1351) -> bool: 

1352 """If it's safe, make the parens in the atom `node` invisible, recursively. 

1353 Additionally, remove repeated, adjacent invisible parens from the atom `node` 

1354 as they are redundant. 

1355 

1356 Returns whether the node should itself be wrapped in invisible parentheses. 

1357 """ 

1358 if ( 

1359 node.type != syms.atom 

1360 or is_empty_tuple(node) 

1361 or is_one_tuple(node) 

1362 or (is_yield(node) and parent.type != syms.expr_stmt) 

1363 or ( 

1364 # This condition tries to prevent removing non-optional brackets 

1365 # around a tuple, however, can be a bit overzealous so we provide 

1366 # and option to skip this check for `for` and `with` statements. 

1367 not remove_brackets_around_comma 

1368 and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY 

1369 ) 

1370 or is_tuple_containing_walrus(node) 

1371 ): 

1372 return False 

1373 

1374 if is_walrus_assignment(node): 

1375 if parent.type in [ 

1376 syms.annassign, 

1377 syms.expr_stmt, 

1378 syms.assert_stmt, 

1379 syms.return_stmt, 

1380 syms.except_clause, 

1381 syms.funcdef, 

1382 syms.with_stmt, 

1383 # these ones aren't useful to end users, but they do please fuzzers 

1384 syms.for_stmt, 

1385 syms.del_stmt, 

1386 syms.for_stmt, 

1387 ]: 

1388 return False 

1389 

1390 first = node.children[0] 

1391 last = node.children[-1] 

1392 if is_lpar_token(first) and is_rpar_token(last): 

1393 middle = node.children[1] 

1394 # make parentheses invisible 

1395 first.value = "" 

1396 last.value = "" 

1397 maybe_make_parens_invisible_in_atom( 

1398 middle, 

1399 parent=parent, 

1400 remove_brackets_around_comma=remove_brackets_around_comma, 

1401 ) 

1402 

1403 if is_atom_with_invisible_parens(middle): 

1404 # Strip the invisible parens from `middle` by replacing 

1405 # it with the child in-between the invisible parens 

1406 middle.replace(middle.children[1]) 

1407 

1408 return False 

1409 

1410 return True 

1411 

1412 

1413def should_split_line(line: Line, opening_bracket: Leaf) -> bool: 

1414 """Should `line` be immediately split with `delimiter_split()` after RHS?""" 

1415 

1416 if not (opening_bracket.parent and opening_bracket.value in "[{("): 

1417 return False 

1418 

1419 # We're essentially checking if the body is delimited by commas and there's more 

1420 # than one of them (we're excluding the trailing comma and if the delimiter priority 

1421 # is still commas, that means there's more). 

1422 exclude = set() 

1423 trailing_comma = False 

1424 try: 

1425 last_leaf = line.leaves[-1] 

1426 if last_leaf.type == token.COMMA: 

1427 trailing_comma = True 

1428 exclude.add(id(last_leaf)) 

1429 max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude) 

1430 except (IndexError, ValueError): 

1431 return False 

1432 

1433 return max_priority == COMMA_PRIORITY and ( 

1434 (line.mode.magic_trailing_comma and trailing_comma) 

1435 # always explode imports 

1436 or opening_bracket.parent.type in {syms.atom, syms.import_from} 

1437 ) 

1438 

1439 

1440def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[Set[LeafID]]: 

1441 """Generate sets of closing bracket IDs that should be omitted in a RHS. 

1442 

1443 Brackets can be omitted if the entire trailer up to and including 

1444 a preceding closing bracket fits in one line. 

1445 

1446 Yielded sets are cumulative (contain results of previous yields, too). First 

1447 set is empty, unless the line should explode, in which case bracket pairs until 

1448 the one that needs to explode are omitted. 

1449 """ 

1450 

1451 omit: Set[LeafID] = set() 

1452 if not line.magic_trailing_comma: 

1453 yield omit 

1454 

1455 length = 4 * line.depth 

1456 opening_bracket: Optional[Leaf] = None 

1457 closing_bracket: Optional[Leaf] = None 

1458 inner_brackets: Set[LeafID] = set() 

1459 for index, leaf, leaf_length in line.enumerate_with_length(reversed=True): 

1460 length += leaf_length 

1461 if length > line_length: 

1462 break 

1463 

1464 has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix) 

1465 if leaf.type == STANDALONE_COMMENT or has_inline_comment: 

1466 break 

1467 

1468 if opening_bracket: 

1469 if leaf is opening_bracket: 

1470 opening_bracket = None 

1471 elif leaf.type in CLOSING_BRACKETS: 

1472 prev = line.leaves[index - 1] if index > 0 else None 

1473 if ( 

1474 prev 

1475 and prev.type == token.COMMA 

1476 and leaf.opening_bracket is not None 

1477 and not is_one_sequence_between( 

1478 leaf.opening_bracket, leaf, line.leaves 

1479 ) 

1480 ): 

1481 # Never omit bracket pairs with trailing commas. 

1482 # We need to explode on those. 

1483 break 

1484 

1485 inner_brackets.add(id(leaf)) 

1486 elif leaf.type in CLOSING_BRACKETS: 

1487 prev = line.leaves[index - 1] if index > 0 else None 

1488 if prev and prev.type in OPENING_BRACKETS: 

1489 # Empty brackets would fail a split so treat them as "inner" 

1490 # brackets (e.g. only add them to the `omit` set if another 

1491 # pair of brackets was good enough. 

1492 inner_brackets.add(id(leaf)) 

1493 continue 

1494 

1495 if closing_bracket: 

1496 omit.add(id(closing_bracket)) 

1497 omit.update(inner_brackets) 

1498 inner_brackets.clear() 

1499 yield omit 

1500 

1501 if ( 

1502 prev 

1503 and prev.type == token.COMMA 

1504 and leaf.opening_bracket is not None 

1505 and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves) 

1506 ): 

1507 # Never omit bracket pairs with trailing commas. 

1508 # We need to explode on those. 

1509 break 

1510 

1511 if leaf.value: 

1512 opening_bracket = leaf.opening_bracket 

1513 closing_bracket = leaf 

1514 

1515 

1516def run_transformer( 

1517 line: Line, 

1518 transform: Transformer, 

1519 mode: Mode, 

1520 features: Collection[Feature], 

1521 *, 

1522 line_str: str = "", 

1523) -> List[Line]: 

1524 if not line_str: 

1525 line_str = line_to_string(line) 

1526 result: List[Line] = [] 

1527 for transformed_line in transform(line, features, mode): 

1528 if str(transformed_line).strip("\n") == line_str: 

1529 raise CannotTransform("Line transformer returned an unchanged result") 

1530 

1531 result.extend(transform_line(transformed_line, mode=mode, features=features)) 

1532 

1533 features_set = set(features) 

1534 if ( 

1535 Feature.FORCE_OPTIONAL_PARENTHESES in features_set 

1536 or transform.__class__.__name__ != "rhs" 

1537 or not line.bracket_tracker.invisible 

1538 or any(bracket.value for bracket in line.bracket_tracker.invisible) 

1539 or line.contains_multiline_strings() 

1540 or result[0].contains_uncollapsable_type_comments() 

1541 or result[0].contains_unsplittable_type_ignore() 

1542 or is_line_short_enough(result[0], mode=mode) 

1543 # If any leaves have no parents (which _can_ occur since 

1544 # `transform(line)` potentially destroys the line's underlying node 

1545 # structure), then we can't proceed. Doing so would cause the below 

1546 # call to `append_leaves()` to fail. 

1547 or any(leaf.parent is None for leaf in line.leaves) 

1548 ): 

1549 return result 

1550 

1551 line_copy = line.clone() 

1552 append_leaves(line_copy, line, line.leaves) 

1553 features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES} 

1554 second_opinion = run_transformer( 

1555 line_copy, transform, mode, features_fop, line_str=line_str 

1556 ) 

1557 if all(is_line_short_enough(ln, mode=mode) for ln in second_opinion): 

1558 result = second_opinion 

1559 return result