Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/black/__init__.py: 18%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1import io
2import json
3import platform
4import re
5import sys
6import tokenize
7import traceback
8from collections.abc import (
9 Collection,
10 Generator,
11 MutableMapping,
12 Sequence,
13)
14from contextlib import nullcontext
15from dataclasses import replace
16from datetime import datetime, timezone
17from enum import Enum
18from json.decoder import JSONDecodeError
19from pathlib import Path
20from re import Pattern
21from typing import Any
23import click
24from click.core import ParameterSource
25from mypy_extensions import mypyc_attr
26from pathspec import GitIgnoreSpec
27from pathspec.patterns.gitignore import GitIgnorePatternError
29from _black_version import version as __version__
30from black.cache import Cache
31from black.comments import normalize_fmt_off
32from black.const import (
33 DEFAULT_EXCLUDES,
34 DEFAULT_INCLUDES,
35 DEFAULT_LINE_LENGTH,
36 STDIN_PLACEHOLDER,
37)
38from black.files import (
39 best_effort_relative_path,
40 find_project_root,
41 find_pyproject_toml,
42 find_user_pyproject_toml,
43 gen_python_files,
44 get_gitignore,
45 parse_pyproject_toml,
46 path_is_excluded,
47 resolves_outside_root_or_cannot_stat,
48 wrap_stream_for_windows,
49)
50from black.handle_ipynb_magics import (
51 PYTHON_CELL_MAGICS,
52 jupyter_dependencies_are_installed,
53 mask_cell,
54 put_trailing_semicolon_back,
55 remove_trailing_semicolon,
56 unmask_cell,
57 validate_cell,
58)
59from black.linegen import LN, LineGenerator, transform_line
60from black.lines import EmptyLineTracker, LinesBlock
61from black.mode import FUTURE_FLAG_TO_FEATURE, VERSION_TO_FEATURES, Feature
62from black.mode import Mode as Mode # re-exported
63from black.mode import Preview, TargetVersion, supports_feature
64from black.nodes import STARS, is_number_token, is_simple_decorator_expression, syms
65from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
66from black.parsing import ( # noqa F401
67 ASTSafetyError,
68 InvalidInput,
69 lib2to3_parse,
70 parse_ast,
71 stringify_ast,
72)
73from black.ranges import (
74 adjusted_lines,
75 convert_unchanged_lines,
76 parse_line_ranges,
77 sanitized_lines,
78)
79from black.report import Changed, NothingChanged, Report
80from blib2to3.pgen2 import token
81from blib2to3.pytree import Leaf, Node
83COMPILED = Path(__file__).suffix in (".pyd", ".so")
85# types
86FileContent = str
87Encoding = str
88NewLine = str
91class WriteBack(Enum):
92 NO = 0
93 YES = 1
94 DIFF = 2
95 CHECK = 3
96 COLOR_DIFF = 4
98 @classmethod
99 def from_configuration(
100 cls, *, check: bool, diff: bool, color: bool = False
101 ) -> "WriteBack":
102 if check and not diff:
103 return cls.CHECK
105 if diff and color:
106 return cls.COLOR_DIFF
108 return cls.DIFF if diff else cls.YES
111# Legacy name, left for integrations.
112FileMode = Mode
115def read_pyproject_toml(
116 ctx: click.Context, param: click.Parameter, value: str | None
117) -> str | None:
118 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
120 Returns the path to a successfully found and read configuration file, None
121 otherwise.
122 """
123 if not value:
124 value = find_pyproject_toml(
125 ctx.params.get("src", ()), ctx.params.get("stdin_filename", None)
126 )
127 if value is None:
128 return None
130 try:
131 config = parse_pyproject_toml(value)
132 except (OSError, ValueError) as e:
133 raise click.FileError(
134 filename=value, hint=f"Error reading configuration file: {e}"
135 ) from None
137 if not config:
138 return None
139 else:
140 spellcheck_pyproject_toml_keys(ctx, list(config), value)
141 # Sanitize the values to be Click friendly. For more information please see:
142 # https://github.com/psf/black/issues/1458
143 # https://github.com/pallets/click/issues/1567
144 config = {
145 k: str(v) if not isinstance(v, (list, dict)) else v
146 for k, v in config.items()
147 }
149 target_version = config.get("target_version")
150 if target_version is not None and not isinstance(target_version, list):
151 raise click.BadOptionUsage(
152 "target-version", "Config key target-version must be a list"
153 )
155 exclude = config.get("exclude")
156 if exclude is not None and not isinstance(exclude, str):
157 raise click.BadOptionUsage("exclude", "Config key exclude must be a string")
159 extend_exclude = config.get("extend_exclude")
160 if extend_exclude is not None and not isinstance(extend_exclude, str):
161 raise click.BadOptionUsage(
162 "extend-exclude", "Config key extend-exclude must be a string"
163 )
165 line_ranges = config.get("line_ranges")
166 if line_ranges is not None:
167 raise click.BadOptionUsage(
168 "line-ranges", "Cannot use line-ranges in the pyproject.toml file."
169 )
171 default_map: dict[str, Any] = {}
172 if ctx.default_map:
173 default_map.update(ctx.default_map)
174 default_map.update(config)
176 ctx.default_map = default_map
177 return value
180def spellcheck_pyproject_toml_keys(
181 ctx: click.Context, config_keys: list[str], config_file_path: str
182) -> None:
183 invalid_keys: list[str] = []
184 available_config_options = {param.name for param in ctx.command.params}
185 invalid_keys = [key for key in config_keys if key not in available_config_options]
186 if invalid_keys:
187 keys_str = ", ".join(map(repr, invalid_keys))
188 out(
189 f"Invalid config keys detected: {keys_str} (in {config_file_path})",
190 fg="red",
191 )
194def target_version_option_callback(
195 c: click.Context, p: click.Option | click.Parameter, v: tuple[str, ...]
196) -> list[TargetVersion]:
197 """Compute the target versions from a --target-version flag.
199 This is its own function because mypy couldn't infer the type correctly
200 when it was a lambda, causing mypyc trouble.
201 """
202 return [TargetVersion[val.upper()] for val in v]
205def enable_unstable_feature_callback(
206 c: click.Context, p: click.Option | click.Parameter, v: tuple[str, ...]
207) -> list[Preview]:
208 """Compute the features from an --enable-unstable-feature flag."""
209 return [Preview[val] for val in v]
212def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
213 """Compile a regular expression string in `regex`.
215 If it contains newlines, use verbose mode.
216 """
217 if "\n" in regex:
218 regex = "(?x)" + regex
219 compiled: Pattern[str] = re.compile(regex)
220 return compiled
223def validate_regex(
224 ctx: click.Context,
225 param: click.Parameter,
226 value: str | None,
227) -> Pattern[str] | None:
228 try:
229 return re_compile_maybe_verbose(value) if value is not None else None
230 except re.error as e:
231 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
234@click.command(
235 context_settings={"help_option_names": ["-h", "--help"]},
236 # While Click does set this field automatically using the docstring, mypyc
237 # (annoyingly) strips 'em so we need to set it here too.
238 help="The uncompromising code formatter.",
239)
240@click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
241@click.option(
242 "-l",
243 "--line-length",
244 type=int,
245 default=DEFAULT_LINE_LENGTH,
246 help="How many characters per line to allow.",
247 show_default=True,
248)
249@click.option(
250 "-t",
251 "--target-version",
252 type=click.Choice([v.name.lower() for v in TargetVersion]),
253 callback=target_version_option_callback,
254 multiple=True,
255 help=(
256 "Python versions that should be supported by Black's output. You should"
257 " include all versions that your code supports. By default, Black will infer"
258 " target versions from the project metadata in pyproject.toml. If this does"
259 " not yield conclusive results, Black will use per-file auto-detection."
260 ),
261)
262@click.option(
263 "--pyi",
264 is_flag=True,
265 help=(
266 "Format all input files like typing stubs regardless of file extension. This"
267 " is useful when piping source on standard input."
268 ),
269)
270@click.option(
271 "--ipynb",
272 is_flag=True,
273 help=(
274 "Format all input files like Jupyter Notebooks regardless of file extension."
275 " This is useful when piping source on standard input."
276 ),
277)
278@click.option(
279 "--python-cell-magics",
280 multiple=True,
281 help=(
282 "When processing Jupyter Notebooks, add the given magic to the list"
283 f" of known python-magics ({', '.join(sorted(PYTHON_CELL_MAGICS))})."
284 " Useful for formatting cells with custom python magics."
285 ),
286 default=[],
287)
288@click.option(
289 "-x",
290 "--skip-source-first-line",
291 is_flag=True,
292 help="Skip the first line of the source code.",
293)
294@click.option(
295 "-S",
296 "--skip-string-normalization",
297 is_flag=True,
298 help="Don't normalize string quotes or prefixes.",
299)
300@click.option(
301 "-C",
302 "--skip-magic-trailing-comma",
303 is_flag=True,
304 help="Don't use trailing commas as a reason to split lines.",
305)
306@click.option(
307 "--preview",
308 is_flag=True,
309 help=(
310 "Enable potentially disruptive style changes that may be added to Black's main"
311 " functionality in the next major release."
312 ),
313)
314@click.option(
315 "--unstable",
316 is_flag=True,
317 help=(
318 "Enable potentially disruptive style changes that have known bugs or are not"
319 " currently expected to make it into the stable style Black's next major"
320 " release. Implies --preview."
321 ),
322)
323@click.option(
324 "--enable-unstable-feature",
325 type=click.Choice([v.name for v in Preview]),
326 callback=enable_unstable_feature_callback,
327 multiple=True,
328 help=(
329 "Enable specific features included in the `--unstable` style. Requires"
330 " `--preview`. No compatibility guarantees are provided on the behavior"
331 " or existence of any unstable features."
332 ),
333)
334@click.option(
335 "--check",
336 is_flag=True,
337 help=(
338 "Don't write the files back, just return the status. Return code 0 means"
339 " nothing would change. Return code 1 means some files would be reformatted."
340 " Return code 123 means there was an internal error."
341 ),
342)
343@click.option(
344 "--diff",
345 is_flag=True,
346 help=(
347 "Don't write the files back, just output a diff to indicate what changes"
348 " Black would've made. They are printed to stdout so capturing them is simple."
349 ),
350)
351@click.option(
352 "--color/--no-color",
353 is_flag=True,
354 help="Show (or do not show) colored diff. Only applies when --diff is given.",
355)
356@click.option(
357 "--line-ranges",
358 multiple=True,
359 metavar="START-END",
360 help=(
361 "When specified, Black will try its best to only format these lines. This"
362 " option can be specified multiple times, and a union of the lines will be"
363 " formatted. Each range must be specified as two integers connected by a `-`:"
364 " `<START>-<END>`. The `<START>` and `<END>` integer indices are 1-based and"
365 " inclusive on both ends."
366 ),
367 default=(),
368)
369@click.option(
370 "--fast/--safe",
371 is_flag=True,
372 help=(
373 "By default, Black performs an AST safety check after formatting your code."
374 " The --fast flag turns off this check and the --safe flag explicitly enables"
375 " it. [default: --safe]"
376 ),
377)
378@click.option(
379 "--required-version",
380 type=str,
381 help=(
382 "Require a specific version of Black to be running. This is useful for"
383 " ensuring that all contributors to your project are using the same"
384 " version, because different versions of Black may format code a little"
385 " differently. This option can be set in a configuration file for consistent"
386 " results across environments."
387 ),
388)
389@click.option(
390 "--exclude",
391 type=str,
392 callback=validate_regex,
393 help=(
394 "A regular expression that matches files and directories that should be"
395 " excluded on recursive searches. An empty value means no paths are excluded."
396 " Use forward slashes for directories on all platforms (Windows, too)."
397 " By default, Black also ignores all paths listed in .gitignore. Changing this"
398 f" value will override all default exclusions. [default: {DEFAULT_EXCLUDES}]"
399 ),
400 show_default=False,
401)
402@click.option(
403 "--extend-exclude",
404 type=str,
405 callback=validate_regex,
406 help=(
407 "Like --exclude, but adds additional files and directories on top of the"
408 " default values instead of overriding them."
409 ),
410)
411@click.option(
412 "--force-exclude",
413 type=str,
414 callback=validate_regex,
415 help=(
416 "Like --exclude, but files and directories matching this regex will be excluded"
417 " even when they are passed explicitly as arguments. This is useful when"
418 " invoking Black programmatically on changed files, such as in a pre-commit"
419 " hook or editor plugin."
420 ),
421)
422@click.option(
423 "--stdin-filename",
424 type=str,
425 is_eager=True,
426 help=(
427 "The name of the file when passing it through stdin. Useful to make sure Black"
428 " will respect the --force-exclude option on some editors that rely on using"
429 " stdin."
430 ),
431)
432@click.option(
433 "--include",
434 type=str,
435 default=DEFAULT_INCLUDES,
436 callback=validate_regex,
437 help=(
438 "A regular expression that matches files and directories that should be"
439 " included on recursive searches. An empty value means all files are included"
440 " regardless of the name. Use forward slashes for directories on all platforms"
441 " (Windows, too). Overrides all exclusions, including from .gitignore and"
442 " command line options."
443 ),
444 show_default=True,
445)
446@click.option(
447 "-W",
448 "--workers",
449 type=click.IntRange(min=1),
450 default=None,
451 help=(
452 "When Black formats multiple files, it may use a process pool to speed up"
453 " formatting. This option controls the number of parallel workers. This can"
454 " also be specified via the BLACK_NUM_WORKERS environment variable. Defaults"
455 " to the number of CPUs in the system."
456 ),
457)
458@click.option(
459 "-q",
460 "--quiet",
461 is_flag=True,
462 help=(
463 "Stop emitting all non-critical output. Error messages will still be emitted"
464 " (which can silenced by 2>/dev/null)."
465 ),
466)
467@click.option(
468 "-v",
469 "--verbose",
470 is_flag=True,
471 help=(
472 "Emit messages about files that were not changed or were ignored due to"
473 " exclusion patterns. If Black is using a configuration file, a message"
474 " detailing which one it is using will be emitted."
475 ),
476)
477@click.version_option(
478 version=__version__,
479 message=(
480 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
481 f"Python ({platform.python_implementation()}) {platform.python_version()}"
482 ),
483)
484@click.argument(
485 "src",
486 nargs=-1,
487 type=click.Path(
488 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
489 ),
490 is_eager=True,
491 metavar="SRC ...",
492)
493@click.option(
494 "--config",
495 type=click.Path(
496 exists=True,
497 file_okay=True,
498 dir_okay=False,
499 readable=True,
500 allow_dash=False,
501 path_type=str,
502 ),
503 is_eager=True,
504 callback=read_pyproject_toml,
505 help="Read configuration options from a configuration file.",
506)
507@click.option(
508 "--no-cache",
509 is_flag=True,
510 help=(
511 "Skip reading and writing the cache, forcing Black to reformat all"
512 " included files."
513 ),
514)
515@click.pass_context
516def main(
517 ctx: click.Context,
518 code: str | None,
519 line_length: int,
520 target_version: list[TargetVersion],
521 check: bool,
522 diff: bool,
523 line_ranges: Sequence[str],
524 color: bool,
525 fast: bool,
526 pyi: bool,
527 ipynb: bool,
528 python_cell_magics: Sequence[str],
529 skip_source_first_line: bool,
530 skip_string_normalization: bool,
531 skip_magic_trailing_comma: bool,
532 preview: bool,
533 unstable: bool,
534 enable_unstable_feature: list[Preview],
535 quiet: bool,
536 verbose: bool,
537 required_version: str | None,
538 include: Pattern[str],
539 exclude: Pattern[str] | None,
540 extend_exclude: Pattern[str] | None,
541 force_exclude: Pattern[str] | None,
542 stdin_filename: str | None,
543 workers: int | None,
544 src: tuple[str, ...],
545 config: str | None,
546 no_cache: bool,
547) -> None:
548 """The uncompromising code formatter."""
549 ctx.ensure_object(dict)
551 assert sys.version_info >= (3, 10), "Black requires Python 3.10+"
552 if sys.version_info[:3] == (3, 12, 5):
553 out(
554 "Python 3.12.5 has a memory safety issue that can cause Black's "
555 "AST safety checks to fail. "
556 "Please upgrade to Python 3.12.6 or downgrade to Python 3.12.4"
557 )
558 ctx.exit(1)
560 if src and code is not None:
561 out(
562 main.get_usage(ctx)
563 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
564 )
565 ctx.exit(1)
566 if not src and code is None:
567 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
568 ctx.exit(1)
570 # It doesn't do anything if --unstable is also passed, so just allow it.
571 if enable_unstable_feature and not (preview or unstable):
572 out(
573 main.get_usage(ctx)
574 + "\n\n'--enable-unstable-feature' requires '--preview'."
575 )
576 ctx.exit(1)
578 root, method = (
579 find_project_root(src, stdin_filename) if code is None else (None, None)
580 )
581 ctx.obj["root"] = root
583 if verbose:
584 if root:
585 out(
586 f"Identified `{root}` as project root containing a {method}.",
587 fg="blue",
588 )
590 if config:
591 config_source = ctx.get_parameter_source("config")
592 user_level_config = str(find_user_pyproject_toml())
593 if config == user_level_config:
594 out(
595 "Using configuration from user-level config at "
596 f"'{user_level_config}'.",
597 fg="blue",
598 )
599 elif config_source in (
600 ParameterSource.DEFAULT,
601 ParameterSource.DEFAULT_MAP,
602 ):
603 out("Using configuration from project root.", fg="blue")
604 else:
605 out(f"Using configuration in '{config}'.", fg="blue")
606 if ctx.default_map:
607 for param, value in ctx.default_map.items():
608 out(f"{param}: {value}")
610 error_msg = "Oh no! 💥 💔 💥"
611 if (
612 required_version
613 and required_version != __version__
614 and required_version != __version__.split(".")[0]
615 ):
616 err(
617 f"{error_msg} The required version `{required_version}` does not match"
618 f" the running version `{__version__}`!"
619 )
620 ctx.exit(1)
621 if ipynb and pyi:
622 err("Cannot pass both `pyi` and `ipynb` flags!")
623 ctx.exit(1)
625 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
626 if target_version:
627 versions = set(target_version)
628 else:
629 # We'll autodetect later.
630 versions = set()
631 mode = Mode(
632 target_versions=versions,
633 line_length=line_length,
634 is_pyi=pyi,
635 is_ipynb=ipynb,
636 skip_source_first_line=skip_source_first_line,
637 string_normalization=not skip_string_normalization,
638 magic_trailing_comma=not skip_magic_trailing_comma,
639 preview=preview,
640 unstable=unstable,
641 python_cell_magics=set(python_cell_magics),
642 enabled_features=set(enable_unstable_feature),
643 )
645 lines: list[tuple[int, int]] = []
646 if line_ranges:
647 if ipynb:
648 err("Cannot use --line-ranges with ipynb files.")
649 ctx.exit(1)
651 try:
652 lines = parse_line_ranges(line_ranges)
653 except ValueError as e:
654 err(str(e))
655 ctx.exit(1)
657 if code is not None:
658 # Run in quiet mode by default with -c; the extra output isn't useful.
659 # You can still pass -v to get verbose output.
660 quiet = True
662 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
664 if code is not None:
665 reformat_code(
666 content=code,
667 fast=fast,
668 write_back=write_back,
669 mode=mode,
670 report=report,
671 lines=lines,
672 )
673 else:
674 assert root is not None # root is only None if code is not None
675 try:
676 sources = get_sources(
677 root=root,
678 src=src,
679 quiet=quiet,
680 verbose=verbose,
681 include=include,
682 exclude=exclude,
683 extend_exclude=extend_exclude,
684 force_exclude=force_exclude,
685 report=report,
686 stdin_filename=stdin_filename,
687 )
688 except GitIgnorePatternError:
689 ctx.exit(1)
691 if not sources:
692 if verbose or not quiet:
693 out("No Python files are present to be formatted. Nothing to do 😴")
694 if "-" in src:
695 sys.stdout.write(sys.stdin.read())
696 ctx.exit(0)
698 if len(sources) == 1:
699 reformat_one(
700 src=sources.pop(),
701 fast=fast,
702 write_back=write_back,
703 mode=mode,
704 report=report,
705 lines=lines,
706 no_cache=no_cache,
707 )
708 else:
709 from black.concurrency import reformat_many
711 if lines:
712 err("Cannot use --line-ranges to format multiple files.")
713 ctx.exit(1)
714 reformat_many(
715 sources=sources,
716 fast=fast,
717 write_back=write_back,
718 mode=mode,
719 report=report,
720 workers=workers,
721 no_cache=no_cache,
722 )
724 if verbose or not quiet:
725 if code is None and (verbose or report.change_count or report.failure_count):
726 out()
727 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
728 if code is None:
729 click.echo(str(report), err=True)
730 ctx.exit(report.return_code)
733def get_sources(
734 *,
735 root: Path,
736 src: tuple[str, ...],
737 quiet: bool,
738 verbose: bool,
739 include: Pattern[str],
740 exclude: Pattern[str] | None,
741 extend_exclude: Pattern[str] | None,
742 force_exclude: Pattern[str] | None,
743 report: "Report",
744 stdin_filename: str | None,
745) -> set[Path]:
746 """Compute the set of files to be formatted."""
747 sources: set[Path] = set()
749 assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
750 using_default_exclude = exclude is None
751 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude
752 gitignore: dict[Path, GitIgnoreSpec] | None = None
753 root_gitignore = get_gitignore(root)
755 for s in src:
756 if s == "-" and stdin_filename:
757 path = Path(stdin_filename)
758 if path_is_excluded(stdin_filename, force_exclude):
759 report.path_ignored(
760 path,
761 "--stdin-filename matches the --force-exclude regular expression",
762 )
763 continue
764 is_stdin = True
765 else:
766 path = Path(s)
767 is_stdin = False
769 # Compare the logic here to the logic in `gen_python_files`.
770 if is_stdin or path.is_file():
771 if resolves_outside_root_or_cannot_stat(path, root, report):
772 if verbose:
773 out(f'Skipping invalid source: "{path}"', fg="red")
774 continue
776 root_relative_path = best_effort_relative_path(path, root).as_posix()
777 root_relative_path = "/" + root_relative_path
779 # Hard-exclude any files that matches the `--force-exclude` regex.
780 if path_is_excluded(root_relative_path, force_exclude):
781 report.path_ignored(
782 path, "matches the --force-exclude regular expression"
783 )
784 continue
786 if is_stdin:
787 path = Path(f"{STDIN_PLACEHOLDER}{path}")
789 if path.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
790 warn=verbose or not quiet
791 ):
792 continue
794 if verbose:
795 out(f'Found input source: "{path}"', fg="blue")
796 sources.add(path)
797 elif path.is_dir():
798 path = root / (path.resolve().relative_to(root))
799 if verbose:
800 out(f'Found input source directory: "{path}"', fg="blue")
802 if using_default_exclude:
803 gitignore = {
804 root: root_gitignore,
805 path: get_gitignore(path),
806 }
807 sources.update(
808 gen_python_files(
809 path.iterdir(),
810 root,
811 include,
812 exclude,
813 extend_exclude,
814 force_exclude,
815 report,
816 gitignore,
817 verbose=verbose,
818 quiet=quiet,
819 )
820 )
821 elif s == "-":
822 if verbose:
823 out("Found input source stdin", fg="blue")
824 sources.add(path)
825 else:
826 err(f"invalid path: {s}")
828 return sources
831def reformat_code(
832 content: str,
833 fast: bool,
834 write_back: WriteBack,
835 mode: Mode,
836 report: Report,
837 *,
838 lines: Collection[tuple[int, int]] = (),
839) -> None:
840 """
841 Reformat and print out `content` without spawning child processes.
842 Similar to `reformat_one`, but for string content.
844 `fast`, `write_back`, and `mode` options are passed to
845 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
846 """
847 path = Path("<string>")
848 try:
849 changed = Changed.NO
850 if format_stdin_to_stdout(
851 content=content, fast=fast, write_back=write_back, mode=mode, lines=lines
852 ):
853 changed = Changed.YES
854 report.done(path, changed)
855 except Exception as exc:
856 if report.verbose:
857 traceback.print_exc()
858 report.failed(path, str(exc))
861# diff-shades depends on being to monkeypatch this function to operate. I know it's
862# not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
863@mypyc_attr(patchable=True)
864def reformat_one(
865 src: Path,
866 fast: bool,
867 write_back: WriteBack,
868 mode: Mode,
869 report: "Report",
870 *,
871 lines: Collection[tuple[int, int]] = (),
872 no_cache: bool = False,
873) -> None:
874 """Reformat a single file under `src` without spawning child processes.
876 `fast`, `write_back`, and `mode` options are passed to
877 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
878 """
879 try:
880 changed = Changed.NO
882 if str(src) == "-":
883 is_stdin = True
884 elif str(src).startswith(STDIN_PLACEHOLDER):
885 is_stdin = True
886 # Use the original name again in case we want to print something
887 # to the user
888 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
889 else:
890 is_stdin = False
892 if is_stdin:
893 if src.suffix == ".pyi":
894 mode = replace(mode, is_pyi=True)
895 elif src.suffix == ".ipynb":
896 mode = replace(mode, is_ipynb=True)
897 if format_stdin_to_stdout(
898 fast=fast, write_back=write_back, mode=mode, lines=lines
899 ):
900 changed = Changed.YES
901 else:
902 cache = None if no_cache else Cache.read(mode)
903 if cache is not None and write_back not in (
904 WriteBack.DIFF,
905 WriteBack.COLOR_DIFF,
906 ):
907 if not cache.is_changed(src):
908 changed = Changed.CACHED
909 if changed is not Changed.CACHED and format_file_in_place(
910 src, fast=fast, write_back=write_back, mode=mode, lines=lines
911 ):
912 changed = Changed.YES
913 if cache is not None and (
914 (write_back is WriteBack.YES and changed is not Changed.CACHED)
915 or (write_back is WriteBack.CHECK and changed is Changed.NO)
916 ):
917 cache.write([src])
918 report.done(src, changed)
919 except Exception as exc:
920 if report.verbose:
921 traceback.print_exc()
922 report.failed(src, str(exc))
925def format_file_in_place(
926 src: Path,
927 fast: bool,
928 mode: Mode,
929 write_back: WriteBack = WriteBack.NO,
930 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
931 *,
932 lines: Collection[tuple[int, int]] = (),
933) -> bool:
934 """Format file under `src` path. Return True if changed.
936 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
937 code to the file.
938 `mode` and `fast` options are passed to :func:`format_file_contents`.
939 """
940 if src.suffix == ".pyi":
941 mode = replace(mode, is_pyi=True)
942 elif src.suffix == ".ipynb":
943 mode = replace(mode, is_ipynb=True)
945 then = datetime.fromtimestamp(src.stat().st_mtime, timezone.utc)
946 header = b""
947 with open(src, "rb") as buf:
948 if mode.skip_source_first_line:
949 header = buf.readline()
950 src_contents, encoding, newline = decode_bytes(buf.read(), mode)
951 try:
952 dst_contents = format_file_contents(
953 src_contents, fast=fast, mode=mode, lines=lines
954 )
955 except NothingChanged:
956 return False
957 except JSONDecodeError:
958 raise ValueError(
959 f"File '{src}' cannot be parsed as valid Jupyter notebook."
960 ) from None
961 src_contents = header.decode(encoding) + src_contents
962 dst_contents = header.decode(encoding) + dst_contents
964 if write_back == WriteBack.YES:
965 with open(src, "w", encoding=encoding, newline=newline) as f:
966 f.write(dst_contents)
967 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
968 now = datetime.now(timezone.utc)
969 src_name = f"{src}\t{then}"
970 dst_name = f"{src}\t{now}"
971 if mode.is_ipynb:
972 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
973 else:
974 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
976 if write_back == WriteBack.COLOR_DIFF:
977 diff_contents = color_diff(diff_contents)
979 with lock or nullcontext():
980 f = io.TextIOWrapper(
981 sys.stdout.buffer,
982 encoding=encoding,
983 newline=newline,
984 write_through=True,
985 )
986 f = wrap_stream_for_windows(f)
987 f.write(diff_contents)
988 f.detach()
990 return True
993def format_stdin_to_stdout(
994 fast: bool,
995 *,
996 content: str | None = None,
997 write_back: WriteBack = WriteBack.NO,
998 mode: Mode,
999 lines: Collection[tuple[int, int]] = (),
1000) -> bool:
1001 """Format file on stdin. Return True if changed.
1003 If content is None, it's read from sys.stdin.
1005 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
1006 write a diff to stdout. The `mode` argument is passed to
1007 :func:`format_file_contents`.
1008 """
1009 then = datetime.now(timezone.utc)
1011 if content is None:
1012 src, encoding, newline = decode_bytes(sys.stdin.buffer.read(), mode)
1013 else:
1014 src, encoding, newline = content, "utf-8", "\n"
1016 dst = src
1017 try:
1018 dst = format_file_contents(src, fast=fast, mode=mode, lines=lines)
1019 return True
1021 except NothingChanged:
1022 return False
1024 finally:
1025 f = io.TextIOWrapper(
1026 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
1027 )
1028 if write_back == WriteBack.YES:
1029 # Make sure there's a newline after the content
1030 if dst and dst[-1] != "\n" and dst[-1] != "\r":
1031 dst += newline
1032 f.write(dst)
1033 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
1034 now = datetime.now(timezone.utc)
1035 src_name = f"STDIN\t{then}"
1036 dst_name = f"STDOUT\t{now}"
1037 d = diff(src, dst, src_name, dst_name)
1038 if write_back == WriteBack.COLOR_DIFF:
1039 d = color_diff(d)
1040 f = wrap_stream_for_windows(f)
1041 f.write(d)
1042 f.detach()
1045def check_stability_and_equivalence(
1046 src_contents: str,
1047 dst_contents: str,
1048 *,
1049 mode: Mode,
1050 lines: Collection[tuple[int, int]] = (),
1051) -> None:
1052 """Perform stability and equivalence checks.
1054 Raise AssertionError if source and destination contents are not
1055 equivalent, or if a second pass of the formatter would format the
1056 content differently.
1057 """
1058 assert_equivalent(src_contents, dst_contents)
1059 assert_stable(src_contents, dst_contents, mode=mode, lines=lines)
1062def format_file_contents(
1063 src_contents: str,
1064 *,
1065 fast: bool,
1066 mode: Mode,
1067 lines: Collection[tuple[int, int]] = (),
1068) -> FileContent:
1069 """Reformat contents of a file and return new contents.
1071 If `fast` is False, additionally confirm that the reformatted code is
1072 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
1073 `mode` is passed to :func:`format_str`.
1074 """
1075 if mode.is_ipynb:
1076 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
1077 else:
1078 dst_contents = format_str(src_contents, mode=mode, lines=lines)
1079 if src_contents == dst_contents:
1080 raise NothingChanged
1082 if not fast and not mode.is_ipynb:
1083 # Jupyter notebooks will already have been checked above.
1084 check_stability_and_equivalence(
1085 src_contents, dst_contents, mode=mode, lines=lines
1086 )
1087 return dst_contents
1090def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1091 """Format code in given cell of Jupyter notebook.
1093 General idea is:
1095 - if cell has trailing semicolon, remove it;
1096 - if cell has IPython magics, mask them;
1097 - format cell;
1098 - reinstate IPython magics;
1099 - reinstate trailing semicolon (if originally present);
1100 - strip trailing newlines.
1102 Cells with syntax errors will not be processed, as they
1103 could potentially be automagics or multi-line magics, which
1104 are currently not supported.
1105 """
1106 validate_cell(src, mode)
1107 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1108 src
1109 )
1110 try:
1111 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1112 except SyntaxError:
1113 raise NothingChanged from None
1114 masked_dst = format_str(masked_src, mode=mode)
1115 if not fast:
1116 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1117 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1118 dst = put_trailing_semicolon_back(
1119 dst_without_trailing_semicolon, has_trailing_semicolon
1120 )
1121 dst = dst.rstrip("\n")
1122 if dst == src:
1123 raise NothingChanged from None
1124 return dst
1127def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1128 """If notebook is marked as non-Python, don't format it.
1130 All notebook metadata fields are optional, see
1131 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1132 if a notebook has empty metadata, we will try to parse it anyway.
1133 """
1134 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1135 if language is not None and language != "python":
1136 raise NothingChanged from None
1139def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1140 """Format Jupyter notebook.
1142 Operate cell-by-cell, only on code cells, only for Python notebooks.
1143 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1144 """
1145 if not src_contents:
1146 raise NothingChanged
1148 trailing_newline = src_contents[-1] == "\n"
1149 modified = False
1150 nb = json.loads(src_contents)
1151 validate_metadata(nb)
1152 for cell in nb["cells"]:
1153 if cell.get("cell_type", None) == "code":
1154 try:
1155 src = "".join(cell["source"])
1156 dst = format_cell(src, fast=fast, mode=mode)
1157 except NothingChanged:
1158 pass
1159 else:
1160 cell["source"] = dst.splitlines(keepends=True)
1161 modified = True
1162 if modified:
1163 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1164 if trailing_newline:
1165 dst_contents = dst_contents + "\n"
1166 return dst_contents
1167 else:
1168 raise NothingChanged
1171def format_str(
1172 src_contents: str, *, mode: Mode, lines: Collection[tuple[int, int]] = ()
1173) -> str:
1174 """Reformat a string and return new contents.
1176 `mode` determines formatting options, such as how many characters per line are
1177 allowed. Example:
1179 >>> import black
1180 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1181 def f(arg: str = "") -> None:
1182 ...
1184 A more complex example:
1186 >>> print(
1187 ... black.format_str(
1188 ... "def f(arg:str='')->None: hey",
1189 ... mode=black.Mode(
1190 ... target_versions={black.TargetVersion.PY36},
1191 ... line_length=10,
1192 ... string_normalization=False,
1193 ... is_pyi=False,
1194 ... ),
1195 ... ),
1196 ... )
1197 def f(
1198 arg: str = '',
1199 ) -> None:
1200 hey
1202 """
1203 if lines:
1204 lines = sanitized_lines(lines, src_contents)
1205 if not lines:
1206 return src_contents # Nothing to format
1207 dst_contents = _format_str_once(src_contents, mode=mode, lines=lines)
1208 # Forced second pass to work around optional trailing commas (becoming
1209 # forced trailing commas on pass 2) interacting differently with optional
1210 # parentheses. Admittedly ugly.
1211 if src_contents != dst_contents:
1212 if lines:
1213 lines = adjusted_lines(lines, src_contents, dst_contents)
1214 return _format_str_once(dst_contents, mode=mode, lines=lines)
1215 return dst_contents
1218def _format_str_once(
1219 src_contents: str, *, mode: Mode, lines: Collection[tuple[int, int]] = ()
1220) -> str:
1221 # Use the encoding overwrite since the src_contents may contain a different
1222 # magic encoding comment than utf-8
1223 normalized_contents, _, newline_type = decode_bytes(
1224 src_contents.encode("utf-8"), mode, encoding_overwrite="utf-8"
1225 )
1227 src_node = lib2to3_parse(
1228 normalized_contents.lstrip(), target_versions=mode.target_versions
1229 )
1231 dst_blocks: list[LinesBlock] = []
1232 if mode.target_versions:
1233 versions = mode.target_versions
1234 else:
1235 future_imports = get_future_imports(src_node)
1236 versions = detect_target_versions(src_node, future_imports=future_imports)
1238 line_generation_features = {
1239 feature
1240 for feature in {
1241 Feature.PARENTHESIZED_CONTEXT_MANAGERS,
1242 Feature.UNPARENTHESIZED_EXCEPT_TYPES,
1243 Feature.T_STRINGS,
1244 }
1245 if supports_feature(versions, feature)
1246 }
1247 normalize_fmt_off(src_node, mode, lines)
1248 if lines:
1249 # This should be called after normalize_fmt_off.
1250 convert_unchanged_lines(src_node, lines)
1252 line_generator = LineGenerator(mode=mode, features=line_generation_features)
1253 elt = EmptyLineTracker(mode=mode)
1254 split_line_features = {
1255 feature
1256 for feature in {
1257 Feature.TRAILING_COMMA_IN_CALL,
1258 Feature.TRAILING_COMMA_IN_DEF,
1259 }
1260 if supports_feature(versions, feature)
1261 }
1262 block: LinesBlock | None = None
1263 for current_line in line_generator.visit(src_node):
1264 block = elt.maybe_empty_lines(current_line)
1265 dst_blocks.append(block)
1266 for line in transform_line(
1267 current_line, mode=mode, features=split_line_features
1268 ):
1269 block.content_lines.append(str(line))
1270 if dst_blocks:
1271 dst_blocks[-1].after = 0
1272 dst_contents = []
1273 for block in dst_blocks:
1274 dst_contents.extend(block.all_lines())
1275 if not dst_contents:
1276 if "\n" in normalized_contents:
1277 return newline_type
1278 return "".join(dst_contents).replace("\n", newline_type)
1281def decode_bytes(
1282 src: bytes, mode: Mode, *, encoding_overwrite: str | None = None
1283) -> tuple[FileContent, Encoding, NewLine]:
1284 """Return a tuple of (decoded_contents, encoding, newline).
1286 `newline` is either CRLF, LF, or CR, but `decoded_contents` is decoded with
1287 universal newlines (i.e. only contains LF).
1289 Use the keyword only encoding_overwrite argument if the bytes are encoded
1290 differently to their possible encoding magic comment.
1291 """
1292 srcbuf = io.BytesIO(src)
1294 # Still use detect encoding even if overrite set because otherwise lines
1295 # might be different
1296 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1297 if encoding_overwrite is not None:
1298 encoding = encoding_overwrite
1300 if not lines:
1301 return "", encoding, "\n"
1303 if lines[0][-2:] == b"\r\n":
1304 if b"\r" in lines[0][:-2]:
1305 newline = "\r"
1306 else:
1307 newline = "\r\n"
1308 elif lines[0][-1:] == b"\n":
1309 if b"\r" in lines[0][:-1]:
1310 newline = "\r"
1311 else:
1312 newline = "\n"
1313 else:
1314 if b"\r" in lines[0]:
1315 newline = "\r"
1316 else:
1317 newline = "\n"
1319 srcbuf.seek(0)
1320 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1321 return tiow.read(), encoding, newline
1324def get_features_used(
1325 node: Node, *, future_imports: set[str] | None = None
1326) -> set[Feature]:
1327 """Return a set of (relatively) new Python features used in this file.
1329 Currently looking for:
1330 - f-strings;
1331 - self-documenting expressions in f-strings (f"{x=}");
1332 - underscores in numeric literals;
1333 - trailing commas after * or ** in function signatures and calls;
1334 - positional only arguments in function signatures and lambdas;
1335 - assignment expression;
1336 - relaxed decorator syntax;
1337 - usage of __future__ flags (annotations);
1338 - print / exec statements;
1339 - parenthesized context managers;
1340 - match statements;
1341 - except* clause;
1342 - variadic generics;
1343 """
1344 features: set[Feature] = set()
1345 if future_imports:
1346 features |= {
1347 FUTURE_FLAG_TO_FEATURE[future_import]
1348 for future_import in future_imports
1349 if future_import in FUTURE_FLAG_TO_FEATURE
1350 }
1352 for n in node.pre_order():
1353 if n.type == token.FSTRING_START:
1354 features.add(Feature.F_STRINGS)
1355 elif n.type == token.TSTRING_START:
1356 features.add(Feature.T_STRINGS)
1357 elif (
1358 n.type == token.RBRACE
1359 and n.parent is not None
1360 and any(child.type == token.EQUAL for child in n.parent.children)
1361 ):
1362 features.add(Feature.DEBUG_F_STRINGS)
1364 elif is_number_token(n):
1365 if "_" in n.value:
1366 features.add(Feature.NUMERIC_UNDERSCORES)
1368 elif n.type == token.SLASH:
1369 if n.parent and n.parent.type in {
1370 syms.typedargslist,
1371 syms.arglist,
1372 syms.varargslist,
1373 }:
1374 features.add(Feature.POS_ONLY_ARGUMENTS)
1376 elif n.type == token.COLONEQUAL:
1377 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1379 elif n.type == syms.decorator:
1380 if len(n.children) > 1 and not is_simple_decorator_expression(
1381 n.children[1]
1382 ):
1383 features.add(Feature.RELAXED_DECORATORS)
1385 elif (
1386 n.type in {syms.typedargslist, syms.arglist}
1387 and n.children
1388 and n.children[-1].type == token.COMMA
1389 ):
1390 if n.type == syms.typedargslist:
1391 feature = Feature.TRAILING_COMMA_IN_DEF
1392 else:
1393 feature = Feature.TRAILING_COMMA_IN_CALL
1395 for ch in n.children:
1396 if ch.type in STARS:
1397 features.add(feature)
1399 if ch.type == syms.argument:
1400 for argch in ch.children:
1401 if argch.type in STARS:
1402 features.add(feature)
1404 elif (
1405 n.type in {syms.return_stmt, syms.yield_expr}
1406 and len(n.children) >= 2
1407 and n.children[1].type == syms.testlist_star_expr
1408 and any(child.type == syms.star_expr for child in n.children[1].children)
1409 ):
1410 features.add(Feature.UNPACKING_ON_FLOW)
1412 elif (
1413 n.type == syms.annassign
1414 and len(n.children) >= 4
1415 and n.children[3].type == syms.testlist_star_expr
1416 ):
1417 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1419 elif (
1420 n.type == syms.with_stmt
1421 and len(n.children) > 2
1422 and n.children[1].type == syms.atom
1423 ):
1424 atom_children = n.children[1].children
1425 if (
1426 len(atom_children) == 3
1427 and atom_children[0].type == token.LPAR
1428 and _contains_asexpr(atom_children[1])
1429 and atom_children[2].type == token.RPAR
1430 ):
1431 features.add(Feature.PARENTHESIZED_CONTEXT_MANAGERS)
1433 elif n.type == syms.match_stmt:
1434 features.add(Feature.PATTERN_MATCHING)
1436 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1437 child.type == syms.star_expr for child in n.children
1438 ):
1439 features.add(Feature.VARIADIC_GENERICS)
1441 elif (
1442 n.type == syms.tname_star
1443 and len(n.children) == 3
1444 and n.children[2].type == syms.star_expr
1445 ):
1446 features.add(Feature.VARIADIC_GENERICS)
1448 elif n.type in (syms.type_stmt, syms.typeparams):
1449 features.add(Feature.TYPE_PARAMS)
1451 elif (
1452 n.type in (syms.typevartuple, syms.paramspec, syms.typevar)
1453 and n.children[-2].type == token.EQUAL
1454 ):
1455 features.add(Feature.TYPE_PARAM_DEFAULTS)
1457 elif (
1458 n.type == syms.except_clause
1459 and len(n.children) >= 2
1460 and (
1461 n.children[1].type == token.STAR or n.children[1].type == syms.testlist
1462 )
1463 ):
1464 is_star_except = n.children[1].type == token.STAR
1466 if is_star_except:
1467 features.add(Feature.EXCEPT_STAR)
1469 # Presence of except* pushes as clause 1 index back
1470 has_as_clause = (
1471 len(n.children) >= is_star_except + 3
1472 and n.children[is_star_except + 2].type == token.NAME
1473 and n.children[is_star_except + 2].value == "as" # type: ignore
1474 )
1476 # If there's no 'as' clause and the except expression is a testlist.
1477 if not has_as_clause and (
1478 (is_star_except and n.children[2].type == syms.testlist)
1479 or (not is_star_except and n.children[1].type == syms.testlist)
1480 ):
1481 features.add(Feature.UNPARENTHESIZED_EXCEPT_TYPES)
1483 return features
1486def _contains_asexpr(node: Node | Leaf) -> bool:
1487 """Return True if `node` contains an as-pattern."""
1488 if node.type == syms.asexpr_test:
1489 return True
1490 elif node.type == syms.atom:
1491 if (
1492 len(node.children) == 3
1493 and node.children[0].type == token.LPAR
1494 and node.children[2].type == token.RPAR
1495 ):
1496 return _contains_asexpr(node.children[1])
1497 elif node.type == syms.testlist_gexp:
1498 return any(_contains_asexpr(child) for child in node.children)
1499 return False
1502def detect_target_versions(
1503 node: Node, *, future_imports: set[str] | None = None
1504) -> set[TargetVersion]:
1505 """Detect the version to target based on the nodes used."""
1506 features = get_features_used(node, future_imports=future_imports)
1507 return {
1508 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1509 }
1512def get_future_imports(node: Node) -> set[str]:
1513 """Return a set of __future__ imports in the file."""
1514 imports: set[str] = set()
1516 def get_imports_from_children(children: list[LN]) -> Generator[str, None, None]:
1517 for child in children:
1518 if isinstance(child, Leaf):
1519 if child.type == token.NAME:
1520 yield child.value
1522 elif child.type == syms.import_as_name:
1523 orig_name = child.children[0]
1524 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1525 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1526 yield orig_name.value
1528 elif child.type == syms.import_as_names:
1529 yield from get_imports_from_children(child.children)
1531 else:
1532 raise AssertionError("Invalid syntax parsing imports")
1534 for child in node.children:
1535 if child.type != syms.simple_stmt:
1536 break
1538 first_child = child.children[0]
1539 if isinstance(first_child, Leaf):
1540 # Continue looking if we see a docstring; otherwise stop.
1541 if (
1542 len(child.children) == 2
1543 and first_child.type == token.STRING
1544 and child.children[1].type == token.NEWLINE
1545 ):
1546 continue
1548 break
1550 elif first_child.type == syms.import_from:
1551 module_name = first_child.children[1]
1552 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1553 break
1555 imports |= set(get_imports_from_children(first_child.children[3:]))
1556 else:
1557 break
1559 return imports
1562def _black_info() -> str:
1563 return (
1564 f"Black {__version__} on "
1565 f"Python ({platform.python_implementation()}) {platform.python_version()}"
1566 )
1569def assert_equivalent(src: str, dst: str) -> None:
1570 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1571 try:
1572 src_ast = parse_ast(src)
1573 except Exception as exc:
1574 raise ASTSafetyError(
1575 "cannot use --safe with this file; failed to parse source file AST: "
1576 f"{exc}\n"
1577 "This could be caused by running Black with an older Python version "
1578 "that does not support new syntax used in your source file."
1579 ) from exc
1581 try:
1582 dst_ast = parse_ast(dst)
1583 except Exception as exc:
1584 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1585 raise ASTSafetyError(
1586 f"INTERNAL ERROR: {_black_info()} produced invalid code: {exc}. "
1587 "Please report a bug on https://github.com/psf/black/issues. "
1588 f"This invalid output might be helpful: {log}"
1589 ) from None
1591 src_ast_str = "\n".join(stringify_ast(src_ast))
1592 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1593 if src_ast_str != dst_ast_str:
1594 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1595 raise ASTSafetyError(
1596 f"INTERNAL ERROR: {_black_info()} produced code that is not equivalent to"
1597 " the source. Please report a bug on https://github.com/psf/black/issues."
1598 f" This diff might be helpful: {log}"
1599 ) from None
1602def assert_stable(
1603 src: str, dst: str, mode: Mode, *, lines: Collection[tuple[int, int]] = ()
1604) -> None:
1605 """Raise AssertionError if `dst` reformats differently the second time."""
1606 if lines:
1607 # Formatting specified lines requires `adjusted_lines` to map original lines
1608 # to the formatted lines before re-formatting the previously formatted result.
1609 # Due to less-ideal diff algorithm, some edge cases produce incorrect new line
1610 # ranges. Hence for now, we skip the stable check.
1611 # See https://github.com/psf/black/issues/4033 for context.
1612 return
1613 # We shouldn't call format_str() here, because that formats the string
1614 # twice and may hide a bug where we bounce back and forth between two
1615 # versions.
1616 newdst = _format_str_once(dst, mode=mode, lines=lines)
1617 if dst != newdst:
1618 log = dump_to_file(
1619 str(mode),
1620 diff(src, dst, "source", "first pass"),
1621 diff(dst, newdst, "first pass", "second pass"),
1622 )
1623 raise AssertionError(
1624 f"INTERNAL ERROR: {_black_info()} produced different code on the second"
1625 " pass of the formatter. Please report a bug on"
1626 f" https://github.com/psf/black/issues. This diff might be helpful: {log}"
1627 ) from None
1630def patched_main() -> None:
1631 # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows
1632 # environments so just assume we always need to call it if frozen.
1633 if getattr(sys, "frozen", False):
1634 from multiprocessing import freeze_support
1636 freeze_support()
1638 main()
1641if __name__ == "__main__":
1642 patched_main()