Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/black/__init__.py: 18%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

651 statements  

1import io 

2import json 

3import platform 

4import re 

5import sys 

6import tokenize 

7import traceback 

8from collections.abc import ( 

9 Collection, 

10 Generator, 

11 MutableMapping, 

12 Sequence, 

13) 

14from contextlib import nullcontext 

15from dataclasses import replace 

16from datetime import datetime, timezone 

17from enum import Enum 

18from json.decoder import JSONDecodeError 

19from pathlib import Path 

20from re import Pattern 

21from typing import Any 

22 

23import click 

24from click.core import ParameterSource 

25from mypy_extensions import mypyc_attr 

26from pathspec import GitIgnoreSpec 

27from pathspec.patterns.gitignore import GitIgnorePatternError 

28 

29from _black_version import version as __version__ 

30from black.cache import Cache 

31from black.comments import normalize_fmt_off 

32from black.const import ( 

33 DEFAULT_EXCLUDES, 

34 DEFAULT_INCLUDES, 

35 DEFAULT_LINE_LENGTH, 

36 STDIN_PLACEHOLDER, 

37) 

38from black.files import ( 

39 best_effort_relative_path, 

40 find_project_root, 

41 find_pyproject_toml, 

42 find_user_pyproject_toml, 

43 gen_python_files, 

44 get_gitignore, 

45 parse_pyproject_toml, 

46 path_is_excluded, 

47 resolves_outside_root_or_cannot_stat, 

48 wrap_stream_for_windows, 

49) 

50from black.handle_ipynb_magics import ( 

51 PYTHON_CELL_MAGICS, 

52 jupyter_dependencies_are_installed, 

53 mask_cell, 

54 put_trailing_semicolon_back, 

55 remove_trailing_semicolon, 

56 unmask_cell, 

57 validate_cell, 

58) 

59from black.linegen import LN, LineGenerator, transform_line 

60from black.lines import EmptyLineTracker, LinesBlock 

61from black.mode import FUTURE_FLAG_TO_FEATURE, VERSION_TO_FEATURES, Feature 

62from black.mode import Mode as Mode # re-exported 

63from black.mode import Preview, TargetVersion, supports_feature 

64from black.nodes import STARS, is_number_token, is_simple_decorator_expression, syms 

65from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out 

66from black.parsing import ( # noqa F401 

67 ASTSafetyError, 

68 InvalidInput, 

69 lib2to3_parse, 

70 parse_ast, 

71 stringify_ast, 

72) 

73from black.ranges import ( 

74 adjusted_lines, 

75 convert_unchanged_lines, 

76 parse_line_ranges, 

77 sanitized_lines, 

78) 

79from black.report import Changed, NothingChanged, Report 

80from blib2to3.pgen2 import token 

81from blib2to3.pytree import Leaf, Node 

82 

83COMPILED = Path(__file__).suffix in (".pyd", ".so") 

84 

85# types 

86FileContent = str 

87Encoding = str 

88NewLine = str 

89 

90 

91class WriteBack(Enum): 

92 NO = 0 

93 YES = 1 

94 DIFF = 2 

95 CHECK = 3 

96 COLOR_DIFF = 4 

97 

98 @classmethod 

99 def from_configuration( 

100 cls, *, check: bool, diff: bool, color: bool = False 

101 ) -> "WriteBack": 

102 if check and not diff: 

103 return cls.CHECK 

104 

105 if diff and color: 

106 return cls.COLOR_DIFF 

107 

108 return cls.DIFF if diff else cls.YES 

109 

110 

111# Legacy name, left for integrations. 

112FileMode = Mode 

113 

114 

115def read_pyproject_toml( 

116 ctx: click.Context, param: click.Parameter, value: str | None 

117) -> str | None: 

118 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`. 

119 

120 Returns the path to a successfully found and read configuration file, None 

121 otherwise. 

122 """ 

123 if not value: 

124 value = find_pyproject_toml( 

125 ctx.params.get("src", ()), ctx.params.get("stdin_filename", None) 

126 ) 

127 if value is None: 

128 return None 

129 

130 try: 

131 config = parse_pyproject_toml(value) 

132 except (OSError, ValueError) as e: 

133 raise click.FileError( 

134 filename=value, hint=f"Error reading configuration file: {e}" 

135 ) from None 

136 

137 if not config: 

138 return None 

139 else: 

140 spellcheck_pyproject_toml_keys(ctx, list(config), value) 

141 # Sanitize the values to be Click friendly. For more information please see: 

142 # https://github.com/psf/black/issues/1458 

143 # https://github.com/pallets/click/issues/1567 

144 config = { 

145 k: str(v) if not isinstance(v, (list, dict)) else v 

146 for k, v in config.items() 

147 } 

148 

149 target_version = config.get("target_version") 

150 if target_version is not None and not isinstance(target_version, list): 

151 raise click.BadOptionUsage( 

152 "target-version", "Config key target-version must be a list" 

153 ) 

154 

155 exclude = config.get("exclude") 

156 if exclude is not None and not isinstance(exclude, str): 

157 raise click.BadOptionUsage("exclude", "Config key exclude must be a string") 

158 

159 extend_exclude = config.get("extend_exclude") 

160 if extend_exclude is not None and not isinstance(extend_exclude, str): 

161 raise click.BadOptionUsage( 

162 "extend-exclude", "Config key extend-exclude must be a string" 

163 ) 

164 

165 line_ranges = config.get("line_ranges") 

166 if line_ranges is not None: 

167 raise click.BadOptionUsage( 

168 "line-ranges", "Cannot use line-ranges in the pyproject.toml file." 

169 ) 

170 

171 default_map: dict[str, Any] = {} 

172 if ctx.default_map: 

173 default_map.update(ctx.default_map) 

174 default_map.update(config) 

175 

176 ctx.default_map = default_map 

177 return value 

178 

179 

180def spellcheck_pyproject_toml_keys( 

181 ctx: click.Context, config_keys: list[str], config_file_path: str 

182) -> None: 

183 invalid_keys: list[str] = [] 

184 available_config_options = {param.name for param in ctx.command.params} 

185 invalid_keys = [key for key in config_keys if key not in available_config_options] 

186 if invalid_keys: 

187 keys_str = ", ".join(map(repr, invalid_keys)) 

188 out( 

189 f"Invalid config keys detected: {keys_str} (in {config_file_path})", 

190 fg="red", 

191 ) 

192 

193 

194def target_version_option_callback( 

195 c: click.Context, p: click.Option | click.Parameter, v: tuple[str, ...] 

196) -> list[TargetVersion]: 

197 """Compute the target versions from a --target-version flag. 

198 

199 This is its own function because mypy couldn't infer the type correctly 

200 when it was a lambda, causing mypyc trouble. 

201 """ 

202 return [TargetVersion[val.upper()] for val in v] 

203 

204 

205def _target_versions_exceed_runtime( 

206 target_versions: set[TargetVersion], 

207) -> bool: 

208 if not target_versions: 

209 return False 

210 max_target_minor = max(tv.value for tv in target_versions) 

211 return max_target_minor > sys.version_info[1] 

212 

213 

214def _version_mismatch_message(target_versions: set[TargetVersion]) -> str: 

215 max_target = max(target_versions, key=lambda tv: tv.value) 

216 runtime = f"{sys.version_info[0]}.{sys.version_info[1]}" 

217 return ( 

218 f"Python {runtime} cannot parse code formatted for" 

219 f" {max_target.pretty()}. To fix this: run Black with" 

220 f" {max_target.pretty()}, set --target-version to" 

221 f" py3{sys.version_info[1]}, or use --fast to skip the safety" 

222 " check." 

223 ) 

224 

225 

226def enable_unstable_feature_callback( 

227 c: click.Context, p: click.Option | click.Parameter, v: tuple[str, ...] 

228) -> list[Preview]: 

229 """Compute the features from an --enable-unstable-feature flag.""" 

230 return [Preview[val] for val in v] 

231 

232 

233def re_compile_maybe_verbose(regex: str) -> Pattern[str]: 

234 """Compile a regular expression string in `regex`. 

235 

236 If it contains newlines, use verbose mode. 

237 """ 

238 if "\n" in regex: 

239 regex = "(?x)" + regex 

240 compiled: Pattern[str] = re.compile(regex) 

241 return compiled 

242 

243 

244def validate_regex( 

245 ctx: click.Context, 

246 param: click.Parameter, 

247 value: str | None, 

248) -> Pattern[str] | None: 

249 try: 

250 return re_compile_maybe_verbose(value) if value is not None else None 

251 except re.error as e: 

252 raise click.BadParameter(f"Not a valid regular expression: {e}") from None 

253 

254 

255@click.command( 

256 context_settings={"help_option_names": ["-h", "--help"]}, 

257 # While Click does set this field automatically using the docstring, mypyc 

258 # (annoyingly) strips 'em so we need to set it here too. 

259 help="The uncompromising code formatter.", 

260) 

261@click.option("-c", "--code", type=str, help="Format the code passed in as a string.") 

262@click.option( 

263 "-l", 

264 "--line-length", 

265 type=int, 

266 default=DEFAULT_LINE_LENGTH, 

267 help="How many characters per line to allow.", 

268 show_default=True, 

269) 

270@click.option( 

271 "-t", 

272 "--target-version", 

273 type=click.Choice([v.name.lower() for v in TargetVersion]), 

274 callback=target_version_option_callback, 

275 multiple=True, 

276 help=( 

277 "Python versions that should be supported by Black's output. You should" 

278 " include all versions that your code supports. By default, Black will infer" 

279 " target versions from the project metadata in pyproject.toml. If this does" 

280 " not yield conclusive results, Black will use per-file auto-detection." 

281 ), 

282) 

283@click.option( 

284 "--pyi", 

285 is_flag=True, 

286 help=( 

287 "Format all input files like typing stubs regardless of file extension. This" 

288 " is useful when piping source on standard input." 

289 ), 

290) 

291@click.option( 

292 "--ipynb", 

293 is_flag=True, 

294 help=( 

295 "Format all input files like Jupyter Notebooks regardless of file extension." 

296 " This is useful when piping source on standard input." 

297 ), 

298) 

299@click.option( 

300 "--python-cell-magics", 

301 multiple=True, 

302 help=( 

303 "When processing Jupyter Notebooks, add the given magic to the list" 

304 f" of known python-magics ({', '.join(sorted(PYTHON_CELL_MAGICS))})." 

305 " Useful for formatting cells with custom python magics." 

306 ), 

307 default=[], 

308) 

309@click.option( 

310 "-x", 

311 "--skip-source-first-line", 

312 is_flag=True, 

313 help="Skip the first line of the source code.", 

314) 

315@click.option( 

316 "-S", 

317 "--skip-string-normalization", 

318 is_flag=True, 

319 help="Don't normalize string quotes or prefixes.", 

320) 

321@click.option( 

322 "-C", 

323 "--skip-magic-trailing-comma", 

324 is_flag=True, 

325 help="Don't use trailing commas as a reason to split lines.", 

326) 

327@click.option( 

328 "--preview", 

329 is_flag=True, 

330 help=( 

331 "Enable potentially disruptive style changes that may be added to Black's main" 

332 " functionality in the next major release." 

333 ), 

334) 

335@click.option( 

336 "--unstable", 

337 is_flag=True, 

338 help=( 

339 "Enable potentially disruptive style changes that have known bugs or are not" 

340 " currently expected to make it into the stable style Black's next major" 

341 " release. Implies --preview." 

342 ), 

343) 

344@click.option( 

345 "--enable-unstable-feature", 

346 type=click.Choice([v.name for v in Preview]), 

347 callback=enable_unstable_feature_callback, 

348 multiple=True, 

349 help=( 

350 "Enable specific features included in the `--unstable` style. Requires" 

351 " `--preview`. No compatibility guarantees are provided on the behavior" 

352 " or existence of any unstable features." 

353 ), 

354) 

355@click.option( 

356 "--check", 

357 is_flag=True, 

358 help=( 

359 "Don't write the files back, just return the status. Return code 0 means" 

360 " nothing would change. Return code 1 means some files would be reformatted." 

361 " Return code 123 means there was an internal error." 

362 ), 

363) 

364@click.option( 

365 "--diff", 

366 is_flag=True, 

367 help=( 

368 "Don't write the files back, just output a diff to indicate what changes" 

369 " Black would've made. They are printed to stdout so capturing them is simple." 

370 ), 

371) 

372@click.option( 

373 "--color/--no-color", 

374 is_flag=True, 

375 help="Show (or do not show) colored diff. Only applies when --diff is given.", 

376) 

377@click.option( 

378 "--line-ranges", 

379 multiple=True, 

380 metavar="START-END", 

381 help=( 

382 "When specified, Black will try its best to only format these lines. This" 

383 " option can be specified multiple times, and a union of the lines will be" 

384 " formatted. Each range must be specified as two integers connected by a `-`:" 

385 " `<START>-<END>`. The `<START>` and `<END>` integer indices are 1-based and" 

386 " inclusive on both ends." 

387 ), 

388 default=(), 

389) 

390@click.option( 

391 "--fast/--safe", 

392 is_flag=True, 

393 help=( 

394 "By default, Black performs an AST safety check after formatting your code." 

395 " The --fast flag turns off this check and the --safe flag explicitly enables" 

396 " it. [default: --safe]" 

397 ), 

398) 

399@click.option( 

400 "--required-version", 

401 type=str, 

402 help=( 

403 "Require a specific version of Black to be running. This is useful for" 

404 " ensuring that all contributors to your project are using the same" 

405 " version, because different versions of Black may format code a little" 

406 " differently. This option can be set in a configuration file for consistent" 

407 " results across environments." 

408 ), 

409) 

410@click.option( 

411 "--exclude", 

412 type=str, 

413 callback=validate_regex, 

414 help=( 

415 "A regular expression that matches files and directories that should be" 

416 " excluded on recursive searches. An empty value means no paths are excluded." 

417 " Use forward slashes for directories on all platforms (Windows, too)." 

418 " By default, Black also ignores all paths listed in .gitignore. Changing this" 

419 f" value will override all default exclusions. [default: {DEFAULT_EXCLUDES}]" 

420 ), 

421 show_default=False, 

422) 

423@click.option( 

424 "--extend-exclude", 

425 type=str, 

426 callback=validate_regex, 

427 help=( 

428 "Like --exclude, but adds additional files and directories on top of the" 

429 " default values instead of overriding them." 

430 ), 

431) 

432@click.option( 

433 "--force-exclude", 

434 type=str, 

435 callback=validate_regex, 

436 help=( 

437 "Like --exclude, but files and directories matching this regex will be excluded" 

438 " even when they are passed explicitly as arguments. This is useful when" 

439 " invoking Black programmatically on changed files, such as in a pre-commit" 

440 " hook or editor plugin." 

441 ), 

442) 

443@click.option( 

444 "--stdin-filename", 

445 type=str, 

446 is_eager=True, 

447 help=( 

448 "The name of the file when passing it through stdin. Useful to make sure Black" 

449 " will respect the --force-exclude option on some editors that rely on using" 

450 " stdin." 

451 ), 

452) 

453@click.option( 

454 "--include", 

455 type=str, 

456 default=DEFAULT_INCLUDES, 

457 callback=validate_regex, 

458 help=( 

459 "A regular expression that matches files and directories that should be" 

460 " included on recursive searches. An empty value means all files are included" 

461 " regardless of the name. Use forward slashes for directories on all platforms" 

462 " (Windows, too). Overrides all exclusions, including from .gitignore and" 

463 " command line options." 

464 ), 

465 show_default=True, 

466) 

467@click.option( 

468 "-W", 

469 "--workers", 

470 type=click.IntRange(min=1), 

471 default=None, 

472 help=( 

473 "When Black formats multiple files, it may use a process pool to speed up" 

474 " formatting. This option controls the number of parallel workers. This can" 

475 " also be specified via the BLACK_NUM_WORKERS environment variable. Defaults" 

476 " to the number of CPUs in the system." 

477 ), 

478) 

479@click.option( 

480 "-q", 

481 "--quiet", 

482 is_flag=True, 

483 help=( 

484 "Stop emitting all non-critical output. Error messages will still be emitted" 

485 " (which can silenced by 2>/dev/null)." 

486 ), 

487) 

488@click.option( 

489 "-v", 

490 "--verbose", 

491 is_flag=True, 

492 help=( 

493 "Emit messages about files that were not changed or were ignored due to" 

494 " exclusion patterns. If Black is using a configuration file, a message" 

495 " detailing which one it is using will be emitted." 

496 ), 

497) 

498@click.version_option( 

499 version=__version__, 

500 message=( 

501 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n" 

502 f"Python ({platform.python_implementation()}) {platform.python_version()}" 

503 ), 

504) 

505@click.argument( 

506 "src", 

507 nargs=-1, 

508 type=click.Path( 

509 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True 

510 ), 

511 is_eager=True, 

512 metavar="SRC ...", 

513) 

514@click.option( 

515 "--config", 

516 type=click.Path( 

517 exists=True, 

518 file_okay=True, 

519 dir_okay=False, 

520 readable=True, 

521 allow_dash=False, 

522 path_type=str, 

523 ), 

524 is_eager=True, 

525 callback=read_pyproject_toml, 

526 help="Read configuration options from a configuration file.", 

527) 

528@click.option( 

529 "--no-cache", 

530 is_flag=True, 

531 help=( 

532 "Skip reading and writing the cache, forcing Black to reformat all" 

533 " included files." 

534 ), 

535) 

536@click.pass_context 

537def main( 

538 ctx: click.Context, 

539 code: str | None, 

540 line_length: int, 

541 target_version: list[TargetVersion], 

542 check: bool, 

543 diff: bool, 

544 line_ranges: Sequence[str], 

545 color: bool, 

546 fast: bool, 

547 pyi: bool, 

548 ipynb: bool, 

549 python_cell_magics: Sequence[str], 

550 skip_source_first_line: bool, 

551 skip_string_normalization: bool, 

552 skip_magic_trailing_comma: bool, 

553 preview: bool, 

554 unstable: bool, 

555 enable_unstable_feature: list[Preview], 

556 quiet: bool, 

557 verbose: bool, 

558 required_version: str | None, 

559 include: Pattern[str], 

560 exclude: Pattern[str] | None, 

561 extend_exclude: Pattern[str] | None, 

562 force_exclude: Pattern[str] | None, 

563 stdin_filename: str | None, 

564 workers: int | None, 

565 src: tuple[str, ...], 

566 config: str | None, 

567 no_cache: bool, 

568) -> None: 

569 """The uncompromising code formatter.""" 

570 ctx.ensure_object(dict) 

571 

572 assert sys.version_info >= (3, 10), "Black requires Python 3.10+" 

573 if sys.version_info[:3] == (3, 12, 5): 

574 out( 

575 "Python 3.12.5 has a memory safety issue that can cause Black's " 

576 "AST safety checks to fail. " 

577 "Please upgrade to Python 3.12.6 or downgrade to Python 3.12.4" 

578 ) 

579 ctx.exit(1) 

580 

581 if src and code is not None: 

582 out( 

583 main.get_usage(ctx) 

584 + "\n\n'SRC' and 'code' cannot be passed simultaneously." 

585 ) 

586 ctx.exit(1) 

587 if not src and code is None: 

588 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.") 

589 ctx.exit(1) 

590 

591 # It doesn't do anything if --unstable is also passed, so just allow it. 

592 if enable_unstable_feature and not (preview or unstable): 

593 out( 

594 main.get_usage(ctx) 

595 + "\n\n'--enable-unstable-feature' requires '--preview'." 

596 ) 

597 ctx.exit(1) 

598 

599 root, method = ( 

600 find_project_root(src, stdin_filename) if code is None else (None, None) 

601 ) 

602 ctx.obj["root"] = root 

603 

604 if verbose: 

605 if root: 

606 out( 

607 f"Identified `{root}` as project root containing a {method}.", 

608 fg="blue", 

609 ) 

610 

611 if config: 

612 config_source = ctx.get_parameter_source("config") 

613 user_level_config = str(find_user_pyproject_toml()) 

614 if config == user_level_config: 

615 out( 

616 "Using configuration from user-level config at " 

617 f"'{user_level_config}'.", 

618 fg="blue", 

619 ) 

620 elif config_source in ( 

621 ParameterSource.DEFAULT, 

622 ParameterSource.DEFAULT_MAP, 

623 ): 

624 out("Using configuration from project root.", fg="blue") 

625 else: 

626 out(f"Using configuration in '{config}'.", fg="blue") 

627 if ctx.default_map: 

628 for param, value in ctx.default_map.items(): 

629 out(f"{param}: {value}") 

630 

631 error_msg = "Oh no! 💥 💔 💥" 

632 if ( 

633 required_version 

634 and required_version != __version__ 

635 and required_version != __version__.split(".")[0] 

636 ): 

637 err( 

638 f"{error_msg} The required version `{required_version}` does not match" 

639 f" the running version `{__version__}`!" 

640 ) 

641 ctx.exit(1) 

642 if ipynb and pyi: 

643 err("Cannot pass both `pyi` and `ipynb` flags!") 

644 ctx.exit(1) 

645 

646 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color) 

647 if target_version: 

648 versions = set(target_version) 

649 else: 

650 # We'll autodetect later. 

651 versions = set() 

652 mode = Mode( 

653 target_versions=versions, 

654 line_length=line_length, 

655 is_pyi=pyi, 

656 is_ipynb=ipynb, 

657 skip_source_first_line=skip_source_first_line, 

658 string_normalization=not skip_string_normalization, 

659 magic_trailing_comma=not skip_magic_trailing_comma, 

660 preview=preview, 

661 unstable=unstable, 

662 python_cell_magics=set(python_cell_magics), 

663 enabled_features=set(enable_unstable_feature), 

664 ) 

665 

666 if not fast and _target_versions_exceed_runtime(versions): 

667 err( 

668 f"Warning: {_version_mismatch_message(versions)} Black's safety" 

669 " check verifies equivalence by parsing the AST, which fails" 

670 " when the running Python is older than the target version.", 

671 fg="yellow", 

672 ) 

673 

674 lines: list[tuple[int, int]] = [] 

675 if line_ranges: 

676 if ipynb: 

677 err("Cannot use --line-ranges with ipynb files.") 

678 ctx.exit(1) 

679 

680 try: 

681 lines = parse_line_ranges(line_ranges) 

682 except ValueError as e: 

683 err(str(e)) 

684 ctx.exit(1) 

685 

686 if code is not None: 

687 # Run in quiet mode by default with -c; the extra output isn't useful. 

688 # You can still pass -v to get verbose output. 

689 quiet = True 

690 

691 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose) 

692 

693 if code is not None: 

694 reformat_code( 

695 content=code, 

696 fast=fast, 

697 write_back=write_back, 

698 mode=mode, 

699 report=report, 

700 lines=lines, 

701 ) 

702 else: 

703 assert root is not None # root is only None if code is not None 

704 try: 

705 sources = get_sources( 

706 root=root, 

707 src=src, 

708 quiet=quiet, 

709 verbose=verbose, 

710 include=include, 

711 exclude=exclude, 

712 extend_exclude=extend_exclude, 

713 force_exclude=force_exclude, 

714 report=report, 

715 stdin_filename=stdin_filename, 

716 ) 

717 except GitIgnorePatternError: 

718 ctx.exit(1) 

719 

720 if not sources: 

721 if verbose or not quiet: 

722 out("No Python files are present to be formatted. Nothing to do 😴") 

723 if "-" in src: 

724 sys.stdout.write(sys.stdin.read()) 

725 ctx.exit(0) 

726 

727 if len(sources) == 1: 

728 reformat_one( 

729 src=sources.pop(), 

730 fast=fast, 

731 write_back=write_back, 

732 mode=mode, 

733 report=report, 

734 lines=lines, 

735 no_cache=no_cache, 

736 ) 

737 else: 

738 from black.concurrency import reformat_many 

739 

740 if lines: 

741 err("Cannot use --line-ranges to format multiple files.") 

742 ctx.exit(1) 

743 reformat_many( 

744 sources=sources, 

745 fast=fast, 

746 write_back=write_back, 

747 mode=mode, 

748 report=report, 

749 workers=workers, 

750 no_cache=no_cache, 

751 ) 

752 

753 if verbose or not quiet: 

754 if code is None and (verbose or report.change_count or report.failure_count): 

755 out() 

756 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨") 

757 if code is None: 

758 click.echo(str(report), err=True) 

759 ctx.exit(report.return_code) 

760 

761 

762def get_sources( 

763 *, 

764 root: Path, 

765 src: tuple[str, ...], 

766 quiet: bool, 

767 verbose: bool, 

768 include: Pattern[str], 

769 exclude: Pattern[str] | None, 

770 extend_exclude: Pattern[str] | None, 

771 force_exclude: Pattern[str] | None, 

772 report: "Report", 

773 stdin_filename: str | None, 

774) -> set[Path]: 

775 """Compute the set of files to be formatted.""" 

776 sources: set[Path] = set() 

777 

778 assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}" 

779 using_default_exclude = exclude is None 

780 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude 

781 gitignore: dict[Path, GitIgnoreSpec] | None = None 

782 root_gitignore = get_gitignore(root) 

783 

784 for s in src: 

785 if s == "-" and stdin_filename: 

786 path = Path(stdin_filename) 

787 if path_is_excluded(stdin_filename, force_exclude): 

788 report.path_ignored( 

789 path, 

790 "--stdin-filename matches the --force-exclude regular expression", 

791 ) 

792 continue 

793 is_stdin = True 

794 else: 

795 path = Path(s) 

796 is_stdin = False 

797 

798 # Compare the logic here to the logic in `gen_python_files`. 

799 if is_stdin or path.is_file(): 

800 if resolves_outside_root_or_cannot_stat(path, root, report): 

801 if verbose: 

802 out(f'Skipping invalid source: "{path}"', fg="red") 

803 continue 

804 

805 root_relative_path = best_effort_relative_path(path, root).as_posix() 

806 root_relative_path = "/" + root_relative_path 

807 

808 # Hard-exclude any files that matches the `--force-exclude` regex. 

809 if path_is_excluded(root_relative_path, force_exclude): 

810 report.path_ignored( 

811 path, "matches the --force-exclude regular expression" 

812 ) 

813 continue 

814 

815 if is_stdin: 

816 path = Path(f"{STDIN_PLACEHOLDER}{path}") 

817 

818 if path.suffix == ".ipynb" and not jupyter_dependencies_are_installed( 

819 warn=verbose or not quiet 

820 ): 

821 continue 

822 

823 if verbose: 

824 out(f'Found input source: "{path}"', fg="blue") 

825 sources.add(path) 

826 elif path.is_dir(): 

827 path = root / (path.resolve().relative_to(root)) 

828 if verbose: 

829 out(f'Found input source directory: "{path}"', fg="blue") 

830 

831 if using_default_exclude: 

832 gitignore = { 

833 root: root_gitignore, 

834 path: get_gitignore(path), 

835 } 

836 sources.update( 

837 gen_python_files( 

838 path.iterdir(), 

839 root, 

840 include, 

841 exclude, 

842 extend_exclude, 

843 force_exclude, 

844 report, 

845 gitignore, 

846 verbose=verbose, 

847 quiet=quiet, 

848 ) 

849 ) 

850 elif s == "-": 

851 if verbose: 

852 out("Found input source stdin", fg="blue") 

853 sources.add(path) 

854 else: 

855 err(f"invalid path: {s}") 

856 

857 return sources 

858 

859 

860def reformat_code( 

861 content: str, 

862 fast: bool, 

863 write_back: WriteBack, 

864 mode: Mode, 

865 report: Report, 

866 *, 

867 lines: Collection[tuple[int, int]] = (), 

868) -> None: 

869 """ 

870 Reformat and print out `content` without spawning child processes. 

871 Similar to `reformat_one`, but for string content. 

872 

873 `fast`, `write_back`, and `mode` options are passed to 

874 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`. 

875 """ 

876 path = Path("<string>") 

877 try: 

878 changed = Changed.NO 

879 if format_stdin_to_stdout( 

880 content=content, fast=fast, write_back=write_back, mode=mode, lines=lines 

881 ): 

882 changed = Changed.YES 

883 report.done(path, changed) 

884 except Exception as exc: 

885 if report.verbose: 

886 traceback.print_exc() 

887 report.failed(path, str(exc)) 

888 

889 

890# diff-shades depends on being to monkeypatch this function to operate. I know it's 

891# not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26 

892@mypyc_attr(patchable=True) 

893def reformat_one( 

894 src: Path, 

895 fast: bool, 

896 write_back: WriteBack, 

897 mode: Mode, 

898 report: "Report", 

899 *, 

900 lines: Collection[tuple[int, int]] = (), 

901 no_cache: bool = False, 

902) -> None: 

903 """Reformat a single file under `src` without spawning child processes. 

904 

905 `fast`, `write_back`, and `mode` options are passed to 

906 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`. 

907 """ 

908 try: 

909 changed = Changed.NO 

910 

911 if str(src) == "-": 

912 is_stdin = True 

913 elif str(src).startswith(STDIN_PLACEHOLDER): 

914 is_stdin = True 

915 # Use the original name again in case we want to print something 

916 # to the user 

917 src = Path(str(src)[len(STDIN_PLACEHOLDER) :]) 

918 else: 

919 is_stdin = False 

920 

921 if is_stdin: 

922 if src.suffix == ".pyi": 

923 mode = replace(mode, is_pyi=True) 

924 elif src.suffix == ".ipynb": 

925 mode = replace(mode, is_ipynb=True) 

926 if format_stdin_to_stdout( 

927 fast=fast, write_back=write_back, mode=mode, lines=lines 

928 ): 

929 changed = Changed.YES 

930 else: 

931 cache = None if no_cache else Cache.read(mode) 

932 if cache is not None and write_back not in ( 

933 WriteBack.DIFF, 

934 WriteBack.COLOR_DIFF, 

935 ): 

936 if not cache.is_changed(src): 

937 changed = Changed.CACHED 

938 if changed is not Changed.CACHED and format_file_in_place( 

939 src, fast=fast, write_back=write_back, mode=mode, lines=lines 

940 ): 

941 changed = Changed.YES 

942 if cache is not None and ( 

943 (write_back is WriteBack.YES and changed is not Changed.CACHED) 

944 or (write_back is WriteBack.CHECK and changed is Changed.NO) 

945 ): 

946 cache.write([src]) 

947 report.done(src, changed) 

948 except Exception as exc: 

949 if report.verbose: 

950 traceback.print_exc() 

951 report.failed(src, str(exc)) 

952 

953 

954def format_file_in_place( 

955 src: Path, 

956 fast: bool, 

957 mode: Mode, 

958 write_back: WriteBack = WriteBack.NO, 

959 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy 

960 *, 

961 lines: Collection[tuple[int, int]] = (), 

962) -> bool: 

963 """Format file under `src` path. Return True if changed. 

964 

965 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted 

966 code to the file. 

967 `mode` and `fast` options are passed to :func:`format_file_contents`. 

968 """ 

969 if src.suffix == ".pyi": 

970 mode = replace(mode, is_pyi=True) 

971 elif src.suffix == ".ipynb": 

972 mode = replace(mode, is_ipynb=True) 

973 

974 then = datetime.fromtimestamp(src.stat().st_mtime, timezone.utc) 

975 header = b"" 

976 with open(src, "rb") as buf: 

977 if mode.skip_source_first_line: 

978 header = buf.readline() 

979 src_contents, encoding, newline = decode_bytes(buf.read(), mode) 

980 try: 

981 dst_contents = format_file_contents( 

982 src_contents, fast=fast, mode=mode, lines=lines 

983 ) 

984 except NothingChanged: 

985 return False 

986 except JSONDecodeError: 

987 raise ValueError( 

988 f"File '{src}' cannot be parsed as valid Jupyter notebook." 

989 ) from None 

990 src_contents = header.decode(encoding) + src_contents 

991 dst_contents = header.decode(encoding) + dst_contents 

992 

993 if write_back == WriteBack.YES: 

994 with open(src, "w", encoding=encoding, newline=newline) as f: 

995 f.write(dst_contents) 

996 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF): 

997 now = datetime.now(timezone.utc) 

998 src_name = f"{src}\t{then}" 

999 dst_name = f"{src}\t{now}" 

1000 if mode.is_ipynb: 

1001 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name) 

1002 else: 

1003 diff_contents = diff(src_contents, dst_contents, src_name, dst_name) 

1004 

1005 if write_back == WriteBack.COLOR_DIFF: 

1006 diff_contents = color_diff(diff_contents) 

1007 

1008 with lock or nullcontext(): 

1009 f = io.TextIOWrapper( 

1010 sys.stdout.buffer, 

1011 encoding=encoding, 

1012 newline=newline, 

1013 write_through=True, 

1014 ) 

1015 f = wrap_stream_for_windows(f) 

1016 f.write(diff_contents) 

1017 f.detach() 

1018 

1019 return True 

1020 

1021 

1022def format_stdin_to_stdout( 

1023 fast: bool, 

1024 *, 

1025 content: str | None = None, 

1026 write_back: WriteBack = WriteBack.NO, 

1027 mode: Mode, 

1028 lines: Collection[tuple[int, int]] = (), 

1029) -> bool: 

1030 """Format file on stdin. Return True if changed. 

1031 

1032 If content is None, it's read from sys.stdin. 

1033 

1034 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF, 

1035 write a diff to stdout. The `mode` argument is passed to 

1036 :func:`format_file_contents`. 

1037 """ 

1038 then = datetime.now(timezone.utc) 

1039 

1040 if content is None: 

1041 src, encoding, newline = decode_bytes(sys.stdin.buffer.read(), mode) 

1042 else: 

1043 src, encoding, newline = content, "utf-8", "\n" 

1044 

1045 dst = src 

1046 try: 

1047 dst = format_file_contents(src, fast=fast, mode=mode, lines=lines) 

1048 return True 

1049 

1050 except NothingChanged: 

1051 return False 

1052 

1053 finally: 

1054 f = io.TextIOWrapper( 

1055 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True 

1056 ) 

1057 if write_back == WriteBack.YES: 

1058 # Make sure there's a newline after the content 

1059 if dst and dst[-1] != "\n" and dst[-1] != "\r": 

1060 dst += newline 

1061 f.write(dst) 

1062 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF): 

1063 now = datetime.now(timezone.utc) 

1064 src_name = f"STDIN\t{then}" 

1065 dst_name = f"STDOUT\t{now}" 

1066 d = diff(src, dst, src_name, dst_name) 

1067 if write_back == WriteBack.COLOR_DIFF: 

1068 d = color_diff(d) 

1069 f = wrap_stream_for_windows(f) 

1070 f.write(d) 

1071 f.detach() 

1072 

1073 

1074def check_stability_and_equivalence( 

1075 src_contents: str, 

1076 dst_contents: str, 

1077 *, 

1078 mode: Mode, 

1079 lines: Collection[tuple[int, int]] = (), 

1080) -> None: 

1081 """Perform stability and equivalence checks. 

1082 

1083 Raise AssertionError if source and destination contents are not 

1084 equivalent, or if a second pass of the formatter would format the 

1085 content differently. 

1086 """ 

1087 try: 

1088 assert_equivalent(src_contents, dst_contents) 

1089 except ASTSafetyError: 

1090 if _target_versions_exceed_runtime(mode.target_versions): 

1091 raise ASTSafetyError( 

1092 "failed to verify equivalence of the formatted output:" 

1093 f" {_version_mismatch_message(mode.target_versions)}" 

1094 ) from None 

1095 raise 

1096 assert_stable(src_contents, dst_contents, mode=mode, lines=lines) 

1097 

1098 

1099def format_file_contents( 

1100 src_contents: str, 

1101 *, 

1102 fast: bool, 

1103 mode: Mode, 

1104 lines: Collection[tuple[int, int]] = (), 

1105) -> FileContent: 

1106 """Reformat contents of a file and return new contents. 

1107 

1108 If `fast` is False, additionally confirm that the reformatted code is 

1109 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it. 

1110 `mode` is passed to :func:`format_str`. 

1111 """ 

1112 if mode.is_ipynb: 

1113 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode) 

1114 else: 

1115 dst_contents = format_str(src_contents, mode=mode, lines=lines) 

1116 if src_contents == dst_contents: 

1117 raise NothingChanged 

1118 

1119 if not fast and not mode.is_ipynb: 

1120 # Jupyter notebooks will already have been checked above. 

1121 check_stability_and_equivalence( 

1122 src_contents, dst_contents, mode=mode, lines=lines 

1123 ) 

1124 return dst_contents 

1125 

1126 

1127def format_cell(src: str, *, fast: bool, mode: Mode) -> str: 

1128 """Format code in given cell of Jupyter notebook. 

1129 

1130 General idea is: 

1131 

1132 - if cell has trailing semicolon, remove it; 

1133 - if cell has IPython magics, mask them; 

1134 - format cell; 

1135 - reinstate IPython magics; 

1136 - reinstate trailing semicolon (if originally present); 

1137 - strip trailing newlines. 

1138 

1139 Cells with syntax errors will not be processed, as they 

1140 could potentially be automagics or multi-line magics, which 

1141 are currently not supported. 

1142 """ 

1143 validate_cell(src, mode) 

1144 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon( 

1145 src 

1146 ) 

1147 try: 

1148 masked_src, replacements = mask_cell(src_without_trailing_semicolon) 

1149 except SyntaxError: 

1150 raise NothingChanged from None 

1151 masked_dst = format_str(masked_src, mode=mode) 

1152 if not fast: 

1153 check_stability_and_equivalence(masked_src, masked_dst, mode=mode) 

1154 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements) 

1155 dst = put_trailing_semicolon_back( 

1156 dst_without_trailing_semicolon, has_trailing_semicolon 

1157 ) 

1158 dst = dst.rstrip("\n") 

1159 if dst == src: 

1160 raise NothingChanged from None 

1161 return dst 

1162 

1163 

1164def validate_metadata(nb: MutableMapping[str, Any]) -> None: 

1165 """If notebook is marked as non-Python, don't format it. 

1166 

1167 All notebook metadata fields are optional, see 

1168 https://nbformat.readthedocs.io/en/latest/format_description.html. So 

1169 if a notebook has empty metadata, we will try to parse it anyway. 

1170 """ 

1171 language = nb.get("metadata", {}).get("language_info", {}).get("name", None) 

1172 if language is not None and language != "python": 

1173 raise NothingChanged from None 

1174 

1175 

1176def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent: 

1177 """Format Jupyter notebook. 

1178 

1179 Operate cell-by-cell, only on code cells, only for Python notebooks. 

1180 If the ``.ipynb`` originally had a trailing newline, it'll be preserved. 

1181 """ 

1182 if not src_contents: 

1183 raise NothingChanged 

1184 

1185 trailing_newline = src_contents[-1] == "\n" 

1186 modified = False 

1187 nb = json.loads(src_contents) 

1188 validate_metadata(nb) 

1189 for cell in nb["cells"]: 

1190 if cell.get("cell_type", None) == "code": 

1191 try: 

1192 src = "".join(cell["source"]) 

1193 dst = format_cell(src, fast=fast, mode=mode) 

1194 except NothingChanged: 

1195 pass 

1196 else: 

1197 cell["source"] = dst.splitlines(keepends=True) 

1198 modified = True 

1199 if modified: 

1200 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False) 

1201 if trailing_newline: 

1202 dst_contents = dst_contents + "\n" 

1203 return dst_contents 

1204 else: 

1205 raise NothingChanged 

1206 

1207 

1208def format_str( 

1209 src_contents: str, *, mode: Mode, lines: Collection[tuple[int, int]] = () 

1210) -> str: 

1211 """Reformat a string and return new contents. 

1212 

1213 `mode` determines formatting options, such as how many characters per line are 

1214 allowed. Example: 

1215 

1216 >>> import black 

1217 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode())) 

1218 def f(arg: str = "") -> None: 

1219 ... 

1220 

1221 A more complex example: 

1222 

1223 >>> print( 

1224 ... black.format_str( 

1225 ... "def f(arg:str='')->None: hey", 

1226 ... mode=black.Mode( 

1227 ... target_versions={black.TargetVersion.PY36}, 

1228 ... line_length=10, 

1229 ... string_normalization=False, 

1230 ... is_pyi=False, 

1231 ... ), 

1232 ... ), 

1233 ... ) 

1234 def f( 

1235 arg: str = '', 

1236 ) -> None: 

1237 hey 

1238 

1239 """ 

1240 if lines: 

1241 lines = sanitized_lines(lines, src_contents) 

1242 if not lines: 

1243 return src_contents # Nothing to format 

1244 dst_contents = _format_str_once(src_contents, mode=mode, lines=lines) 

1245 # Forced second pass to work around optional trailing commas (becoming 

1246 # forced trailing commas on pass 2) interacting differently with optional 

1247 # parentheses. Admittedly ugly. 

1248 if src_contents != dst_contents: 

1249 if lines: 

1250 lines = adjusted_lines(lines, src_contents, dst_contents) 

1251 return _format_str_once(dst_contents, mode=mode, lines=lines) 

1252 return dst_contents 

1253 

1254 

1255def _format_str_once( 

1256 src_contents: str, *, mode: Mode, lines: Collection[tuple[int, int]] = () 

1257) -> str: 

1258 # Use the encoding overwrite since the src_contents may contain a different 

1259 # magic encoding comment than utf-8 

1260 normalized_contents, _, newline_type = decode_bytes( 

1261 src_contents.encode("utf-8"), mode, encoding_overwrite="utf-8" 

1262 ) 

1263 

1264 src_node = lib2to3_parse( 

1265 normalized_contents.lstrip(), target_versions=mode.target_versions 

1266 ) 

1267 

1268 dst_blocks: list[LinesBlock] = [] 

1269 if mode.target_versions: 

1270 versions = mode.target_versions 

1271 else: 

1272 future_imports = get_future_imports(src_node) 

1273 versions = detect_target_versions(src_node, future_imports=future_imports) 

1274 

1275 line_generation_features = { 

1276 feature 

1277 for feature in { 

1278 Feature.PARENTHESIZED_CONTEXT_MANAGERS, 

1279 Feature.UNPARENTHESIZED_EXCEPT_TYPES, 

1280 Feature.T_STRINGS, 

1281 } 

1282 if supports_feature(versions, feature) 

1283 } 

1284 normalize_fmt_off(src_node, mode, lines) 

1285 if lines: 

1286 # This should be called after normalize_fmt_off. 

1287 convert_unchanged_lines(src_node, lines) 

1288 

1289 line_generator = LineGenerator(mode=mode, features=line_generation_features) 

1290 elt = EmptyLineTracker(mode=mode) 

1291 split_line_features = { 

1292 feature 

1293 for feature in { 

1294 Feature.TRAILING_COMMA_IN_CALL, 

1295 Feature.TRAILING_COMMA_IN_DEF, 

1296 } 

1297 if supports_feature(versions, feature) 

1298 } 

1299 block: LinesBlock | None = None 

1300 for current_line in line_generator.visit(src_node): 

1301 block = elt.maybe_empty_lines(current_line) 

1302 dst_blocks.append(block) 

1303 for line in transform_line( 

1304 current_line, mode=mode, features=split_line_features 

1305 ): 

1306 block.content_lines.append(str(line)) 

1307 if dst_blocks: 

1308 dst_blocks[-1].after = 0 

1309 dst_contents = [] 

1310 for block in dst_blocks: 

1311 dst_contents.extend(block.all_lines()) 

1312 if not dst_contents: 

1313 if "\n" in normalized_contents: 

1314 return newline_type 

1315 return "".join(dst_contents).replace("\n", newline_type) 

1316 

1317 

1318def decode_bytes( 

1319 src: bytes, mode: Mode, *, encoding_overwrite: str | None = None 

1320) -> tuple[FileContent, Encoding, NewLine]: 

1321 """Return a tuple of (decoded_contents, encoding, newline). 

1322 

1323 `newline` is either CRLF, LF, or CR, but `decoded_contents` is decoded with 

1324 universal newlines (i.e. only contains LF). 

1325 

1326 Use the keyword only encoding_overwrite argument if the bytes are encoded 

1327 differently to their possible encoding magic comment. 

1328 """ 

1329 srcbuf = io.BytesIO(src) 

1330 

1331 # Still use detect encoding even if overwrite set because otherwise lines 

1332 # might be different 

1333 encoding, lines = tokenize.detect_encoding(srcbuf.readline) 

1334 if encoding_overwrite is not None: 

1335 encoding = encoding_overwrite 

1336 

1337 if not lines: 

1338 return "", encoding, "\n" 

1339 

1340 if lines[0][-2:] == b"\r\n": 

1341 if b"\r" in lines[0][:-2]: 

1342 newline = "\r" 

1343 else: 

1344 newline = "\r\n" 

1345 elif lines[0][-1:] == b"\n": 

1346 if b"\r" in lines[0][:-1]: 

1347 newline = "\r" 

1348 else: 

1349 newline = "\n" 

1350 else: 

1351 if b"\r" in lines[0]: 

1352 newline = "\r" 

1353 else: 

1354 newline = "\n" 

1355 

1356 srcbuf.seek(0) 

1357 with io.TextIOWrapper(srcbuf, encoding) as tiow: 

1358 return tiow.read(), encoding, newline 

1359 

1360 

1361def get_features_used( 

1362 node: Node, *, future_imports: set[str] | None = None 

1363) -> set[Feature]: 

1364 """Return a set of (relatively) new Python features used in this file. 

1365 

1366 Currently looking for: 

1367 - f-strings; 

1368 - self-documenting expressions in f-strings (f"{x=}"); 

1369 - underscores in numeric literals; 

1370 - trailing commas after * or ** in function signatures and calls; 

1371 - positional only arguments in function signatures and lambdas; 

1372 - assignment expression; 

1373 - relaxed decorator syntax; 

1374 - usage of __future__ flags (annotations); 

1375 - print / exec statements; 

1376 - parenthesized context managers; 

1377 - match statements; 

1378 - except* clause; 

1379 - variadic generics; 

1380 - lazy imports; 

1381 - starred or double-starred comprehensions. 

1382 """ 

1383 features: set[Feature] = set() 

1384 if future_imports: 

1385 features |= { 

1386 FUTURE_FLAG_TO_FEATURE[future_import] 

1387 for future_import in future_imports 

1388 if future_import in FUTURE_FLAG_TO_FEATURE 

1389 } 

1390 

1391 for n in node.pre_order(): 

1392 if n.type == token.FSTRING_START: 

1393 features.add(Feature.F_STRINGS) 

1394 elif n.type == token.TSTRING_START: 

1395 features.add(Feature.T_STRINGS) 

1396 elif ( 

1397 n.type == token.RBRACE 

1398 and n.parent is not None 

1399 and any(child.type == token.EQUAL for child in n.parent.children) 

1400 ): 

1401 features.add(Feature.DEBUG_F_STRINGS) 

1402 

1403 elif is_number_token(n): 

1404 if "_" in n.value: 

1405 features.add(Feature.NUMERIC_UNDERSCORES) 

1406 

1407 elif n.type == token.SLASH: 

1408 if n.parent and n.parent.type in { 

1409 syms.typedargslist, 

1410 syms.arglist, 

1411 syms.varargslist, 

1412 }: 

1413 features.add(Feature.POS_ONLY_ARGUMENTS) 

1414 

1415 elif n.type == token.COLONEQUAL: 

1416 features.add(Feature.ASSIGNMENT_EXPRESSIONS) 

1417 

1418 elif n.type == token.LAZY: 

1419 features.add(Feature.LAZY_IMPORTS) 

1420 

1421 elif n.type == syms.decorator: 

1422 if len(n.children) > 1 and not is_simple_decorator_expression( 

1423 n.children[1] 

1424 ): 

1425 features.add(Feature.RELAXED_DECORATORS) 

1426 

1427 elif is_unpacking_comprehension(n): 

1428 features.add(Feature.UNPACKING_IN_COMPREHENSIONS) 

1429 

1430 elif ( 

1431 n.type in {syms.typedargslist, syms.arglist} 

1432 and n.children 

1433 and n.children[-1].type == token.COMMA 

1434 ): 

1435 if n.type == syms.typedargslist: 

1436 feature = Feature.TRAILING_COMMA_IN_DEF 

1437 else: 

1438 feature = Feature.TRAILING_COMMA_IN_CALL 

1439 

1440 for ch in n.children: 

1441 if ch.type in STARS: 

1442 features.add(feature) 

1443 

1444 if ch.type == syms.argument: 

1445 for argch in ch.children: 

1446 if argch.type in STARS: 

1447 features.add(feature) 

1448 

1449 elif ( 

1450 n.type in {syms.return_stmt, syms.yield_expr} 

1451 and len(n.children) >= 2 

1452 and n.children[1].type == syms.testlist_star_expr 

1453 and any(child.type == syms.star_expr for child in n.children[1].children) 

1454 ): 

1455 features.add(Feature.UNPACKING_ON_FLOW) 

1456 

1457 elif ( 

1458 n.type == syms.annassign 

1459 and len(n.children) >= 4 

1460 and n.children[3].type == syms.testlist_star_expr 

1461 ): 

1462 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS) 

1463 

1464 elif ( 

1465 n.type == syms.with_stmt 

1466 and len(n.children) > 2 

1467 and n.children[1].type == syms.atom 

1468 ): 

1469 atom_children = n.children[1].children 

1470 if ( 

1471 len(atom_children) == 3 

1472 and atom_children[0].type == token.LPAR 

1473 and _contains_asexpr(atom_children[1]) 

1474 and atom_children[2].type == token.RPAR 

1475 ): 

1476 features.add(Feature.PARENTHESIZED_CONTEXT_MANAGERS) 

1477 

1478 elif n.type == syms.match_stmt: 

1479 features.add(Feature.PATTERN_MATCHING) 

1480 

1481 elif n.type in {syms.subscriptlist, syms.trailer} and any( 

1482 child.type == syms.star_expr for child in n.children 

1483 ): 

1484 features.add(Feature.VARIADIC_GENERICS) 

1485 

1486 elif ( 

1487 n.type == syms.tname_star 

1488 and len(n.children) == 3 

1489 and n.children[2].type == syms.star_expr 

1490 ): 

1491 features.add(Feature.VARIADIC_GENERICS) 

1492 

1493 elif n.type in (syms.type_stmt, syms.typeparams): 

1494 features.add(Feature.TYPE_PARAMS) 

1495 

1496 elif ( 

1497 n.type in (syms.typevartuple, syms.paramspec, syms.typevar) 

1498 and n.children[-2].type == token.EQUAL 

1499 ): 

1500 features.add(Feature.TYPE_PARAM_DEFAULTS) 

1501 

1502 elif ( 

1503 n.type == syms.except_clause 

1504 and len(n.children) >= 2 

1505 and ( 

1506 n.children[1].type == token.STAR or n.children[1].type == syms.testlist 

1507 ) 

1508 ): 

1509 is_star_except = n.children[1].type == token.STAR 

1510 

1511 if is_star_except: 

1512 features.add(Feature.EXCEPT_STAR) 

1513 

1514 # Presence of except* pushes as clause 1 index back 

1515 has_as_clause = ( 

1516 len(n.children) >= is_star_except + 3 

1517 and n.children[is_star_except + 2].type == token.NAME 

1518 and n.children[is_star_except + 2].value == "as" # type: ignore 

1519 ) 

1520 

1521 # If there's no 'as' clause and the except expression is a testlist. 

1522 if not has_as_clause and ( 

1523 (is_star_except and n.children[2].type == syms.testlist) 

1524 or (not is_star_except and n.children[1].type == syms.testlist) 

1525 ): 

1526 features.add(Feature.UNPARENTHESIZED_EXCEPT_TYPES) 

1527 

1528 return features 

1529 

1530 

1531def is_unpacking_comprehension(node: LN) -> bool: 

1532 if node.type not in {syms.listmaker, syms.testlist_gexp, syms.dictsetmaker}: 

1533 return False 

1534 

1535 if not any( 

1536 child.type in {syms.comp_for, syms.old_comp_for} for child in node.children 

1537 ): 

1538 return False 

1539 

1540 first_child = node.children[0] 

1541 return first_child.type == syms.star_expr or first_child.type == token.DOUBLESTAR 

1542 

1543 

1544def _contains_asexpr(node: Node | Leaf) -> bool: 

1545 """Return True if `node` contains an as-pattern.""" 

1546 if node.type == syms.asexpr_test: 

1547 return True 

1548 elif node.type == syms.atom: 

1549 if ( 

1550 len(node.children) == 3 

1551 and node.children[0].type == token.LPAR 

1552 and node.children[2].type == token.RPAR 

1553 ): 

1554 return _contains_asexpr(node.children[1]) 

1555 elif node.type == syms.testlist_gexp: 

1556 return any(_contains_asexpr(child) for child in node.children) 

1557 return False 

1558 

1559 

1560def detect_target_versions( 

1561 node: Node, *, future_imports: set[str] | None = None 

1562) -> set[TargetVersion]: 

1563 """Detect the version to target based on the nodes used.""" 

1564 features = get_features_used(node, future_imports=future_imports) 

1565 return { 

1566 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version] 

1567 } 

1568 

1569 

1570def get_future_imports(node: Node) -> set[str]: 

1571 """Return a set of __future__ imports in the file.""" 

1572 imports: set[str] = set() 

1573 

1574 def get_imports_from_children(children: list[LN]) -> Generator[str, None, None]: 

1575 for child in children: 

1576 if isinstance(child, Leaf): 

1577 if child.type == token.NAME: 

1578 yield child.value 

1579 

1580 elif child.type == syms.import_as_name: 

1581 orig_name = child.children[0] 

1582 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports" 

1583 assert orig_name.type == token.NAME, "Invalid syntax parsing imports" 

1584 yield orig_name.value 

1585 

1586 elif child.type == syms.import_as_names: 

1587 yield from get_imports_from_children(child.children) 

1588 

1589 else: 

1590 raise AssertionError("Invalid syntax parsing imports") 

1591 

1592 for child in node.children: 

1593 if child.type != syms.simple_stmt: 

1594 break 

1595 

1596 first_child = child.children[0] 

1597 if isinstance(first_child, Leaf): 

1598 # Continue looking if we see a docstring; otherwise stop. 

1599 if ( 

1600 len(child.children) == 2 

1601 and first_child.type == token.STRING 

1602 and child.children[1].type == token.NEWLINE 

1603 ): 

1604 continue 

1605 

1606 break 

1607 

1608 elif first_child.type == syms.import_from: 

1609 if first_child.children[0].type == token.LAZY: 

1610 break 

1611 

1612 module_name = first_child.children[1] 

1613 if not isinstance(module_name, Leaf) or module_name.value != "__future__": 

1614 break 

1615 

1616 imports |= set(get_imports_from_children(first_child.children[3:])) 

1617 else: 

1618 break 

1619 

1620 return imports 

1621 

1622 

1623def _black_info() -> str: 

1624 return ( 

1625 f"Black {__version__} on " 

1626 f"Python ({platform.python_implementation()}) {platform.python_version()}" 

1627 ) 

1628 

1629 

1630def assert_equivalent(src: str, dst: str) -> None: 

1631 """Raise AssertionError if `src` and `dst` aren't equivalent.""" 

1632 try: 

1633 src_ast = parse_ast(src) 

1634 except Exception as exc: 

1635 raise ASTSafetyError( 

1636 "cannot use --safe with this file; failed to parse source file AST: " 

1637 f"{exc}\n" 

1638 "This could be caused by running Black with an older Python version " 

1639 "that does not support new syntax used in your source file." 

1640 ) from exc 

1641 

1642 try: 

1643 dst_ast = parse_ast(dst) 

1644 except Exception as exc: 

1645 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst) 

1646 raise ASTSafetyError( 

1647 f"INTERNAL ERROR: {_black_info()} produced invalid code: {exc}. " 

1648 "Please report a bug on https://github.com/psf/black/issues. " 

1649 f"This invalid output might be helpful: {log}" 

1650 ) from None 

1651 

1652 src_ast_str = "\n".join(stringify_ast(src_ast)) 

1653 dst_ast_str = "\n".join(stringify_ast(dst_ast)) 

1654 if src_ast_str != dst_ast_str: 

1655 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst")) 

1656 raise ASTSafetyError( 

1657 f"INTERNAL ERROR: {_black_info()} produced code that is not equivalent to" 

1658 " the source. Please report a bug on https://github.com/psf/black/issues." 

1659 f" This diff might be helpful: {log}" 

1660 ) from None 

1661 

1662 

1663def assert_stable( 

1664 src: str, dst: str, mode: Mode, *, lines: Collection[tuple[int, int]] = () 

1665) -> None: 

1666 """Raise AssertionError if `dst` reformats differently the second time.""" 

1667 if lines: 

1668 # Formatting specified lines requires `adjusted_lines` to map original lines 

1669 # to the formatted lines before re-formatting the previously formatted result. 

1670 # Due to less-ideal diff algorithm, some edge cases produce incorrect new line 

1671 # ranges. Hence for now, we skip the stable check. 

1672 # See https://github.com/psf/black/issues/4033 for context. 

1673 return 

1674 # We shouldn't call format_str() here, because that formats the string 

1675 # twice and may hide a bug where we bounce back and forth between two 

1676 # versions. 

1677 newdst = _format_str_once(dst, mode=mode, lines=lines) 

1678 if dst != newdst: 

1679 log = dump_to_file( 

1680 str(mode), 

1681 diff(src, dst, "source", "first pass"), 

1682 diff(dst, newdst, "first pass", "second pass"), 

1683 ) 

1684 raise AssertionError( 

1685 f"INTERNAL ERROR: {_black_info()} produced different code on the second" 

1686 " pass of the formatter. Please report a bug on" 

1687 f" https://github.com/psf/black/issues. This diff might be helpful: {log}" 

1688 ) from None 

1689 

1690 

1691def patched_main() -> None: 

1692 # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows 

1693 # environments so just assume we always need to call it if frozen. 

1694 if getattr(sys, "frozen", False): 

1695 from multiprocessing import freeze_support 

1696 

1697 freeze_support() 

1698 

1699 main() 

1700 

1701 

1702if __name__ == "__main__": 

1703 patched_main()