Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/black/__init__.py: 18%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1import io
2import json
3import platform
4import re
5import sys
6import tokenize
7import traceback
8from collections.abc import (
9 Collection,
10 Generator,
11 MutableMapping,
12 Sequence,
13)
14from contextlib import nullcontext
15from dataclasses import replace
16from datetime import datetime, timezone
17from enum import Enum
18from json.decoder import JSONDecodeError
19from pathlib import Path
20from re import Pattern
21from typing import Any
23import click
24from click.core import ParameterSource
25from mypy_extensions import mypyc_attr
26from pathspec import PathSpec
27from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
29from _black_version import version as __version__
30from black.cache import Cache
31from black.comments import normalize_fmt_off
32from black.const import (
33 DEFAULT_EXCLUDES,
34 DEFAULT_INCLUDES,
35 DEFAULT_LINE_LENGTH,
36 STDIN_PLACEHOLDER,
37)
38from black.files import (
39 best_effort_relative_path,
40 find_project_root,
41 find_pyproject_toml,
42 find_user_pyproject_toml,
43 gen_python_files,
44 get_gitignore,
45 parse_pyproject_toml,
46 path_is_excluded,
47 resolves_outside_root_or_cannot_stat,
48 wrap_stream_for_windows,
49)
50from black.handle_ipynb_magics import (
51 PYTHON_CELL_MAGICS,
52 jupyter_dependencies_are_installed,
53 mask_cell,
54 put_trailing_semicolon_back,
55 remove_trailing_semicolon,
56 unmask_cell,
57 validate_cell,
58)
59from black.linegen import LN, LineGenerator, transform_line
60from black.lines import EmptyLineTracker, LinesBlock
61from black.mode import FUTURE_FLAG_TO_FEATURE, VERSION_TO_FEATURES, Feature
62from black.mode import Mode as Mode # re-exported
63from black.mode import Preview, TargetVersion, supports_feature
64from black.nodes import STARS, is_number_token, is_simple_decorator_expression, syms
65from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
66from black.parsing import ( # noqa F401
67 ASTSafetyError,
68 InvalidInput,
69 lib2to3_parse,
70 parse_ast,
71 stringify_ast,
72)
73from black.ranges import (
74 adjusted_lines,
75 convert_unchanged_lines,
76 parse_line_ranges,
77 sanitized_lines,
78)
79from black.report import Changed, NothingChanged, Report
80from blib2to3.pgen2 import token
81from blib2to3.pytree import Leaf, Node
83COMPILED = Path(__file__).suffix in (".pyd", ".so")
85# types
86FileContent = str
87Encoding = str
88NewLine = str
91class WriteBack(Enum):
92 NO = 0
93 YES = 1
94 DIFF = 2
95 CHECK = 3
96 COLOR_DIFF = 4
98 @classmethod
99 def from_configuration(
100 cls, *, check: bool, diff: bool, color: bool = False
101 ) -> "WriteBack":
102 if check and not diff:
103 return cls.CHECK
105 if diff and color:
106 return cls.COLOR_DIFF
108 return cls.DIFF if diff else cls.YES
111# Legacy name, left for integrations.
112FileMode = Mode
115def read_pyproject_toml(
116 ctx: click.Context, param: click.Parameter, value: str | None
117) -> str | None:
118 """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
120 Returns the path to a successfully found and read configuration file, None
121 otherwise.
122 """
123 if not value:
124 value = find_pyproject_toml(
125 ctx.params.get("src", ()), ctx.params.get("stdin_filename", None)
126 )
127 if value is None:
128 return None
130 try:
131 config = parse_pyproject_toml(value)
132 except (OSError, ValueError) as e:
133 raise click.FileError(
134 filename=value, hint=f"Error reading configuration file: {e}"
135 ) from None
137 if not config:
138 return None
139 else:
140 spellcheck_pyproject_toml_keys(ctx, list(config), value)
141 # Sanitize the values to be Click friendly. For more information please see:
142 # https://github.com/psf/black/issues/1458
143 # https://github.com/pallets/click/issues/1567
144 config = {
145 k: str(v) if not isinstance(v, (list, dict)) else v
146 for k, v in config.items()
147 }
149 target_version = config.get("target_version")
150 if target_version is not None and not isinstance(target_version, list):
151 raise click.BadOptionUsage(
152 "target-version", "Config key target-version must be a list"
153 )
155 exclude = config.get("exclude")
156 if exclude is not None and not isinstance(exclude, str):
157 raise click.BadOptionUsage("exclude", "Config key exclude must be a string")
159 extend_exclude = config.get("extend_exclude")
160 if extend_exclude is not None and not isinstance(extend_exclude, str):
161 raise click.BadOptionUsage(
162 "extend-exclude", "Config key extend-exclude must be a string"
163 )
165 line_ranges = config.get("line_ranges")
166 if line_ranges is not None:
167 raise click.BadOptionUsage(
168 "line-ranges", "Cannot use line-ranges in the pyproject.toml file."
169 )
171 default_map: dict[str, Any] = {}
172 if ctx.default_map:
173 default_map.update(ctx.default_map)
174 default_map.update(config)
176 ctx.default_map = default_map
177 return value
180def spellcheck_pyproject_toml_keys(
181 ctx: click.Context, config_keys: list[str], config_file_path: str
182) -> None:
183 invalid_keys: list[str] = []
184 available_config_options = {param.name for param in ctx.command.params}
185 invalid_keys = [key for key in config_keys if key not in available_config_options]
186 if invalid_keys:
187 keys_str = ", ".join(map(repr, invalid_keys))
188 out(
189 f"Invalid config keys detected: {keys_str} (in {config_file_path})",
190 fg="red",
191 )
194def target_version_option_callback(
195 c: click.Context, p: click.Option | click.Parameter, v: tuple[str, ...]
196) -> list[TargetVersion]:
197 """Compute the target versions from a --target-version flag.
199 This is its own function because mypy couldn't infer the type correctly
200 when it was a lambda, causing mypyc trouble.
201 """
202 return [TargetVersion[val.upper()] for val in v]
205def enable_unstable_feature_callback(
206 c: click.Context, p: click.Option | click.Parameter, v: tuple[str, ...]
207) -> list[Preview]:
208 """Compute the features from an --enable-unstable-feature flag."""
209 return [Preview[val] for val in v]
212def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
213 """Compile a regular expression string in `regex`.
215 If it contains newlines, use verbose mode.
216 """
217 if "\n" in regex:
218 regex = "(?x)" + regex
219 compiled: Pattern[str] = re.compile(regex)
220 return compiled
223def validate_regex(
224 ctx: click.Context,
225 param: click.Parameter,
226 value: str | None,
227) -> Pattern[str] | None:
228 try:
229 return re_compile_maybe_verbose(value) if value is not None else None
230 except re.error as e:
231 raise click.BadParameter(f"Not a valid regular expression: {e}") from None
234@click.command(
235 context_settings={"help_option_names": ["-h", "--help"]},
236 # While Click does set this field automatically using the docstring, mypyc
237 # (annoyingly) strips 'em so we need to set it here too.
238 help="The uncompromising code formatter.",
239)
240@click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
241@click.option(
242 "-l",
243 "--line-length",
244 type=int,
245 default=DEFAULT_LINE_LENGTH,
246 help="How many characters per line to allow.",
247 show_default=True,
248)
249@click.option(
250 "-t",
251 "--target-version",
252 type=click.Choice([v.name.lower() for v in TargetVersion]),
253 callback=target_version_option_callback,
254 multiple=True,
255 help=(
256 "Python versions that should be supported by Black's output. You should"
257 " include all versions that your code supports. By default, Black will infer"
258 " target versions from the project metadata in pyproject.toml. If this does"
259 " not yield conclusive results, Black will use per-file auto-detection."
260 ),
261)
262@click.option(
263 "--pyi",
264 is_flag=True,
265 help=(
266 "Format all input files like typing stubs regardless of file extension. This"
267 " is useful when piping source on standard input."
268 ),
269)
270@click.option(
271 "--ipynb",
272 is_flag=True,
273 help=(
274 "Format all input files like Jupyter Notebooks regardless of file extension."
275 " This is useful when piping source on standard input."
276 ),
277)
278@click.option(
279 "--python-cell-magics",
280 multiple=True,
281 help=(
282 "When processing Jupyter Notebooks, add the given magic to the list"
283 f" of known python-magics ({', '.join(sorted(PYTHON_CELL_MAGICS))})."
284 " Useful for formatting cells with custom python magics."
285 ),
286 default=[],
287)
288@click.option(
289 "-x",
290 "--skip-source-first-line",
291 is_flag=True,
292 help="Skip the first line of the source code.",
293)
294@click.option(
295 "-S",
296 "--skip-string-normalization",
297 is_flag=True,
298 help="Don't normalize string quotes or prefixes.",
299)
300@click.option(
301 "-C",
302 "--skip-magic-trailing-comma",
303 is_flag=True,
304 help="Don't use trailing commas as a reason to split lines.",
305)
306@click.option(
307 "--preview",
308 is_flag=True,
309 help=(
310 "Enable potentially disruptive style changes that may be added to Black's main"
311 " functionality in the next major release."
312 ),
313)
314@click.option(
315 "--unstable",
316 is_flag=True,
317 help=(
318 "Enable potentially disruptive style changes that have known bugs or are not"
319 " currently expected to make it into the stable style Black's next major"
320 " release. Implies --preview."
321 ),
322)
323@click.option(
324 "--enable-unstable-feature",
325 type=click.Choice([v.name for v in Preview]),
326 callback=enable_unstable_feature_callback,
327 multiple=True,
328 help=(
329 "Enable specific features included in the `--unstable` style. Requires"
330 " `--preview`. No compatibility guarantees are provided on the behavior"
331 " or existence of any unstable features."
332 ),
333)
334@click.option(
335 "--check",
336 is_flag=True,
337 help=(
338 "Don't write the files back, just return the status. Return code 0 means"
339 " nothing would change. Return code 1 means some files would be reformatted."
340 " Return code 123 means there was an internal error."
341 ),
342)
343@click.option(
344 "--diff",
345 is_flag=True,
346 help=(
347 "Don't write the files back, just output a diff to indicate what changes"
348 " Black would've made. They are printed to stdout so capturing them is simple."
349 ),
350)
351@click.option(
352 "--color/--no-color",
353 is_flag=True,
354 help="Show (or do not show) colored diff. Only applies when --diff is given.",
355)
356@click.option(
357 "--line-ranges",
358 multiple=True,
359 metavar="START-END",
360 help=(
361 "When specified, Black will try its best to only format these lines. This"
362 " option can be specified multiple times, and a union of the lines will be"
363 " formatted. Each range must be specified as two integers connected by a `-`:"
364 " `<START>-<END>`. The `<START>` and `<END>` integer indices are 1-based and"
365 " inclusive on both ends."
366 ),
367 default=(),
368)
369@click.option(
370 "--fast/--safe",
371 is_flag=True,
372 help=(
373 "By default, Black performs an AST safety check after formatting your code."
374 " The --fast flag turns off this check and the --safe flag explicitly enables"
375 " it. [default: --safe]"
376 ),
377)
378@click.option(
379 "--required-version",
380 type=str,
381 help=(
382 "Require a specific version of Black to be running. This is useful for"
383 " ensuring that all contributors to your project are using the same"
384 " version, because different versions of Black may format code a little"
385 " differently. This option can be set in a configuration file for consistent"
386 " results across environments."
387 ),
388)
389@click.option(
390 "--exclude",
391 type=str,
392 callback=validate_regex,
393 help=(
394 "A regular expression that matches files and directories that should be"
395 " excluded on recursive searches. An empty value means no paths are excluded."
396 " Use forward slashes for directories on all platforms (Windows, too)."
397 " By default, Black also ignores all paths listed in .gitignore. Changing this"
398 f" value will override all default exclusions. [default: {DEFAULT_EXCLUDES}]"
399 ),
400 show_default=False,
401)
402@click.option(
403 "--extend-exclude",
404 type=str,
405 callback=validate_regex,
406 help=(
407 "Like --exclude, but adds additional files and directories on top of the"
408 " default values instead of overriding them."
409 ),
410)
411@click.option(
412 "--force-exclude",
413 type=str,
414 callback=validate_regex,
415 help=(
416 "Like --exclude, but files and directories matching this regex will be excluded"
417 " even when they are passed explicitly as arguments. This is useful when"
418 " invoking Black programmatically on changed files, such as in a pre-commit"
419 " hook or editor plugin."
420 ),
421)
422@click.option(
423 "--stdin-filename",
424 type=str,
425 is_eager=True,
426 help=(
427 "The name of the file when passing it through stdin. Useful to make sure Black"
428 " will respect the --force-exclude option on some editors that rely on using"
429 " stdin."
430 ),
431)
432@click.option(
433 "--include",
434 type=str,
435 default=DEFAULT_INCLUDES,
436 callback=validate_regex,
437 help=(
438 "A regular expression that matches files and directories that should be"
439 " included on recursive searches. An empty value means all files are included"
440 " regardless of the name. Use forward slashes for directories on all platforms"
441 " (Windows, too). Overrides all exclusions, including from .gitignore and"
442 " command line options."
443 ),
444 show_default=True,
445)
446@click.option(
447 "-W",
448 "--workers",
449 type=click.IntRange(min=1),
450 default=None,
451 help=(
452 "When Black formats multiple files, it may use a process pool to speed up"
453 " formatting. This option controls the number of parallel workers. This can"
454 " also be specified via the BLACK_NUM_WORKERS environment variable. Defaults"
455 " to the number of CPUs in the system."
456 ),
457)
458@click.option(
459 "-q",
460 "--quiet",
461 is_flag=True,
462 help=(
463 "Stop emitting all non-critical output. Error messages will still be emitted"
464 " (which can silenced by 2>/dev/null)."
465 ),
466)
467@click.option(
468 "-v",
469 "--verbose",
470 is_flag=True,
471 help=(
472 "Emit messages about files that were not changed or were ignored due to"
473 " exclusion patterns. If Black is using a configuration file, a message"
474 " detailing which one it is using will be emitted."
475 ),
476)
477@click.version_option(
478 version=__version__,
479 message=(
480 f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
481 f"Python ({platform.python_implementation()}) {platform.python_version()}"
482 ),
483)
484@click.argument(
485 "src",
486 nargs=-1,
487 type=click.Path(
488 exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
489 ),
490 is_eager=True,
491 metavar="SRC ...",
492)
493@click.option(
494 "--config",
495 type=click.Path(
496 exists=True,
497 file_okay=True,
498 dir_okay=False,
499 readable=True,
500 allow_dash=False,
501 path_type=str,
502 ),
503 is_eager=True,
504 callback=read_pyproject_toml,
505 help="Read configuration options from a configuration file.",
506)
507@click.option(
508 "--no-cache",
509 is_flag=True,
510 help=(
511 "Skip reading and writing the cache, forcing Black to reformat all"
512 " included files."
513 ),
514)
515@click.pass_context
516def main(
517 ctx: click.Context,
518 code: str | None,
519 line_length: int,
520 target_version: list[TargetVersion],
521 check: bool,
522 diff: bool,
523 line_ranges: Sequence[str],
524 color: bool,
525 fast: bool,
526 pyi: bool,
527 ipynb: bool,
528 python_cell_magics: Sequence[str],
529 skip_source_first_line: bool,
530 skip_string_normalization: bool,
531 skip_magic_trailing_comma: bool,
532 preview: bool,
533 unstable: bool,
534 enable_unstable_feature: list[Preview],
535 quiet: bool,
536 verbose: bool,
537 required_version: str | None,
538 include: Pattern[str],
539 exclude: Pattern[str] | None,
540 extend_exclude: Pattern[str] | None,
541 force_exclude: Pattern[str] | None,
542 stdin_filename: str | None,
543 workers: int | None,
544 src: tuple[str, ...],
545 config: str | None,
546 no_cache: bool,
547) -> None:
548 """The uncompromising code formatter."""
549 ctx.ensure_object(dict)
551 assert sys.version_info >= (3, 10), "Black requires Python 3.10+"
552 if sys.version_info[:3] == (3, 12, 5):
553 out(
554 "Python 3.12.5 has a memory safety issue that can cause Black's "
555 "AST safety checks to fail. "
556 "Please upgrade to Python 3.12.6 or downgrade to Python 3.12.4"
557 )
558 ctx.exit(1)
560 if src and code is not None:
561 out(
562 main.get_usage(ctx)
563 + "\n\n'SRC' and 'code' cannot be passed simultaneously."
564 )
565 ctx.exit(1)
566 if not src and code is None:
567 out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
568 ctx.exit(1)
570 # It doesn't do anything if --unstable is also passed, so just allow it.
571 if enable_unstable_feature and not (preview or unstable):
572 out(
573 main.get_usage(ctx)
574 + "\n\n'--enable-unstable-feature' requires '--preview'."
575 )
576 ctx.exit(1)
578 root, method = (
579 find_project_root(src, stdin_filename) if code is None else (None, None)
580 )
581 ctx.obj["root"] = root
583 if verbose:
584 if root:
585 out(
586 f"Identified `{root}` as project root containing a {method}.",
587 fg="blue",
588 )
590 if config:
591 config_source = ctx.get_parameter_source("config")
592 user_level_config = str(find_user_pyproject_toml())
593 if config == user_level_config:
594 out(
595 "Using configuration from user-level config at "
596 f"'{user_level_config}'.",
597 fg="blue",
598 )
599 elif config_source in (
600 ParameterSource.DEFAULT,
601 ParameterSource.DEFAULT_MAP,
602 ):
603 out("Using configuration from project root.", fg="blue")
604 else:
605 out(f"Using configuration in '{config}'.", fg="blue")
606 if ctx.default_map:
607 for param, value in ctx.default_map.items():
608 out(f"{param}: {value}")
610 error_msg = "Oh no! 💥 💔 💥"
611 if (
612 required_version
613 and required_version != __version__
614 and required_version != __version__.split(".")[0]
615 ):
616 err(
617 f"{error_msg} The required version `{required_version}` does not match"
618 f" the running version `{__version__}`!"
619 )
620 ctx.exit(1)
621 if ipynb and pyi:
622 err("Cannot pass both `pyi` and `ipynb` flags!")
623 ctx.exit(1)
625 write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
626 if target_version:
627 versions = set(target_version)
628 else:
629 # We'll autodetect later.
630 versions = set()
631 mode = Mode(
632 target_versions=versions,
633 line_length=line_length,
634 is_pyi=pyi,
635 is_ipynb=ipynb,
636 skip_source_first_line=skip_source_first_line,
637 string_normalization=not skip_string_normalization,
638 magic_trailing_comma=not skip_magic_trailing_comma,
639 preview=preview,
640 unstable=unstable,
641 python_cell_magics=set(python_cell_magics),
642 enabled_features=set(enable_unstable_feature),
643 )
645 lines: list[tuple[int, int]] = []
646 if line_ranges:
647 if ipynb:
648 err("Cannot use --line-ranges with ipynb files.")
649 ctx.exit(1)
651 try:
652 lines = parse_line_ranges(line_ranges)
653 except ValueError as e:
654 err(str(e))
655 ctx.exit(1)
657 if code is not None:
658 # Run in quiet mode by default with -c; the extra output isn't useful.
659 # You can still pass -v to get verbose output.
660 quiet = True
662 report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
664 if code is not None:
665 reformat_code(
666 content=code,
667 fast=fast,
668 write_back=write_back,
669 mode=mode,
670 report=report,
671 lines=lines,
672 )
673 else:
674 assert root is not None # root is only None if code is not None
675 try:
676 sources = get_sources(
677 root=root,
678 src=src,
679 quiet=quiet,
680 verbose=verbose,
681 include=include,
682 exclude=exclude,
683 extend_exclude=extend_exclude,
684 force_exclude=force_exclude,
685 report=report,
686 stdin_filename=stdin_filename,
687 )
688 except GitWildMatchPatternError:
689 ctx.exit(1)
691 if not sources:
692 if verbose or not quiet:
693 out("No Python files are present to be formatted. Nothing to do 😴")
694 if "-" in src:
695 sys.stdout.write(sys.stdin.read())
696 ctx.exit(0)
698 if len(sources) == 1:
699 reformat_one(
700 src=sources.pop(),
701 fast=fast,
702 write_back=write_back,
703 mode=mode,
704 report=report,
705 lines=lines,
706 no_cache=no_cache,
707 )
708 else:
709 from black.concurrency import reformat_many
711 if lines:
712 err("Cannot use --line-ranges to format multiple files.")
713 ctx.exit(1)
714 reformat_many(
715 sources=sources,
716 fast=fast,
717 write_back=write_back,
718 mode=mode,
719 report=report,
720 workers=workers,
721 no_cache=no_cache,
722 )
724 if verbose or not quiet:
725 if code is None and (verbose or report.change_count or report.failure_count):
726 out()
727 out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
728 if code is None:
729 click.echo(str(report), err=True)
730 ctx.exit(report.return_code)
733def get_sources(
734 *,
735 root: Path,
736 src: tuple[str, ...],
737 quiet: bool,
738 verbose: bool,
739 include: Pattern[str],
740 exclude: Pattern[str] | None,
741 extend_exclude: Pattern[str] | None,
742 force_exclude: Pattern[str] | None,
743 report: "Report",
744 stdin_filename: str | None,
745) -> set[Path]:
746 """Compute the set of files to be formatted."""
747 sources: set[Path] = set()
749 assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
750 using_default_exclude = exclude is None
751 exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude
752 gitignore: dict[Path, PathSpec] | None = None
753 root_gitignore = get_gitignore(root)
755 for s in src:
756 if s == "-" and stdin_filename:
757 path = Path(stdin_filename)
758 if path_is_excluded(stdin_filename, force_exclude):
759 report.path_ignored(
760 path,
761 "--stdin-filename matches the --force-exclude regular expression",
762 )
763 continue
764 is_stdin = True
765 else:
766 path = Path(s)
767 is_stdin = False
769 # Compare the logic here to the logic in `gen_python_files`.
770 if is_stdin or path.is_file():
771 if resolves_outside_root_or_cannot_stat(path, root, report):
772 if verbose:
773 out(f'Skipping invalid source: "{path}"', fg="red")
774 continue
776 root_relative_path = best_effort_relative_path(path, root).as_posix()
777 root_relative_path = "/" + root_relative_path
779 # Hard-exclude any files that matches the `--force-exclude` regex.
780 if path_is_excluded(root_relative_path, force_exclude):
781 report.path_ignored(
782 path, "matches the --force-exclude regular expression"
783 )
784 continue
786 if is_stdin:
787 path = Path(f"{STDIN_PLACEHOLDER}{path}")
789 if path.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
790 warn=verbose or not quiet
791 ):
792 continue
794 if verbose:
795 out(f'Found input source: "{path}"', fg="blue")
796 sources.add(path)
797 elif path.is_dir():
798 path = root / (path.resolve().relative_to(root))
799 if verbose:
800 out(f'Found input source directory: "{path}"', fg="blue")
802 if using_default_exclude:
803 gitignore = {
804 root: root_gitignore,
805 path: get_gitignore(path),
806 }
807 sources.update(
808 gen_python_files(
809 path.iterdir(),
810 root,
811 include,
812 exclude,
813 extend_exclude,
814 force_exclude,
815 report,
816 gitignore,
817 verbose=verbose,
818 quiet=quiet,
819 )
820 )
821 elif s == "-":
822 if verbose:
823 out("Found input source stdin", fg="blue")
824 sources.add(path)
825 else:
826 err(f"invalid path: {s}")
828 return sources
831def reformat_code(
832 content: str,
833 fast: bool,
834 write_back: WriteBack,
835 mode: Mode,
836 report: Report,
837 *,
838 lines: Collection[tuple[int, int]] = (),
839) -> None:
840 """
841 Reformat and print out `content` without spawning child processes.
842 Similar to `reformat_one`, but for string content.
844 `fast`, `write_back`, and `mode` options are passed to
845 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
846 """
847 path = Path("<string>")
848 try:
849 changed = Changed.NO
850 if format_stdin_to_stdout(
851 content=content, fast=fast, write_back=write_back, mode=mode, lines=lines
852 ):
853 changed = Changed.YES
854 report.done(path, changed)
855 except Exception as exc:
856 if report.verbose:
857 traceback.print_exc()
858 report.failed(path, str(exc))
861# diff-shades depends on being to monkeypatch this function to operate. I know it's
862# not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
863@mypyc_attr(patchable=True)
864def reformat_one(
865 src: Path,
866 fast: bool,
867 write_back: WriteBack,
868 mode: Mode,
869 report: "Report",
870 *,
871 lines: Collection[tuple[int, int]] = (),
872 no_cache: bool = False,
873) -> None:
874 """Reformat a single file under `src` without spawning child processes.
876 `fast`, `write_back`, and `mode` options are passed to
877 :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
878 """
879 try:
880 changed = Changed.NO
882 if str(src) == "-":
883 is_stdin = True
884 elif str(src).startswith(STDIN_PLACEHOLDER):
885 is_stdin = True
886 # Use the original name again in case we want to print something
887 # to the user
888 src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
889 else:
890 is_stdin = False
892 if is_stdin:
893 if src.suffix == ".pyi":
894 mode = replace(mode, is_pyi=True)
895 elif src.suffix == ".ipynb":
896 mode = replace(mode, is_ipynb=True)
897 if format_stdin_to_stdout(
898 fast=fast, write_back=write_back, mode=mode, lines=lines
899 ):
900 changed = Changed.YES
901 else:
902 cache = None if no_cache else Cache.read(mode)
903 if cache is not None and write_back not in (
904 WriteBack.DIFF,
905 WriteBack.COLOR_DIFF,
906 ):
907 if not cache.is_changed(src):
908 changed = Changed.CACHED
909 if changed is not Changed.CACHED and format_file_in_place(
910 src, fast=fast, write_back=write_back, mode=mode, lines=lines
911 ):
912 changed = Changed.YES
913 if cache is not None and (
914 (write_back is WriteBack.YES and changed is not Changed.CACHED)
915 or (write_back is WriteBack.CHECK and changed is Changed.NO)
916 ):
917 cache.write([src])
918 report.done(src, changed)
919 except Exception as exc:
920 if report.verbose:
921 traceback.print_exc()
922 report.failed(src, str(exc))
925def format_file_in_place(
926 src: Path,
927 fast: bool,
928 mode: Mode,
929 write_back: WriteBack = WriteBack.NO,
930 lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
931 *,
932 lines: Collection[tuple[int, int]] = (),
933) -> bool:
934 """Format file under `src` path. Return True if changed.
936 If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
937 code to the file.
938 `mode` and `fast` options are passed to :func:`format_file_contents`.
939 """
940 if src.suffix == ".pyi":
941 mode = replace(mode, is_pyi=True)
942 elif src.suffix == ".ipynb":
943 mode = replace(mode, is_ipynb=True)
945 then = datetime.fromtimestamp(src.stat().st_mtime, timezone.utc)
946 header = b""
947 with open(src, "rb") as buf:
948 if mode.skip_source_first_line:
949 header = buf.readline()
950 src_contents, encoding, newline = decode_bytes(buf.read(), mode)
951 try:
952 dst_contents = format_file_contents(
953 src_contents, fast=fast, mode=mode, lines=lines
954 )
955 except NothingChanged:
956 return False
957 except JSONDecodeError:
958 raise ValueError(
959 f"File '{src}' cannot be parsed as valid Jupyter notebook."
960 ) from None
961 src_contents = header.decode(encoding) + src_contents
962 dst_contents = header.decode(encoding) + dst_contents
964 if write_back == WriteBack.YES:
965 with open(src, "w", encoding=encoding, newline=newline) as f:
966 f.write(dst_contents)
967 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
968 now = datetime.now(timezone.utc)
969 src_name = f"{src}\t{then}"
970 dst_name = f"{src}\t{now}"
971 if mode.is_ipynb:
972 diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
973 else:
974 diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
976 if write_back == WriteBack.COLOR_DIFF:
977 diff_contents = color_diff(diff_contents)
979 with lock or nullcontext():
980 f = io.TextIOWrapper(
981 sys.stdout.buffer,
982 encoding=encoding,
983 newline=newline,
984 write_through=True,
985 )
986 f = wrap_stream_for_windows(f)
987 f.write(diff_contents)
988 f.detach()
990 return True
993def format_stdin_to_stdout(
994 fast: bool,
995 *,
996 content: str | None = None,
997 write_back: WriteBack = WriteBack.NO,
998 mode: Mode,
999 lines: Collection[tuple[int, int]] = (),
1000) -> bool:
1001 """Format file on stdin. Return True if changed.
1003 If content is None, it's read from sys.stdin.
1005 If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
1006 write a diff to stdout. The `mode` argument is passed to
1007 :func:`format_file_contents`.
1008 """
1009 then = datetime.now(timezone.utc)
1011 if content is None:
1012 src, encoding, newline = decode_bytes(sys.stdin.buffer.read(), mode)
1013 elif Preview.normalize_cr_newlines in mode:
1014 src, encoding, newline = content, "utf-8", "\n"
1015 else:
1016 src, encoding, newline = content, "utf-8", ""
1018 dst = src
1019 try:
1020 dst = format_file_contents(src, fast=fast, mode=mode, lines=lines)
1021 return True
1023 except NothingChanged:
1024 return False
1026 finally:
1027 f = io.TextIOWrapper(
1028 sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
1029 )
1030 if write_back == WriteBack.YES:
1031 # Make sure there's a newline after the content
1032 if Preview.normalize_cr_newlines in mode:
1033 if dst and dst[-1] != "\n" and dst[-1] != "\r":
1034 dst += newline
1035 else:
1036 if dst and dst[-1] != "\n":
1037 dst += "\n"
1038 f.write(dst)
1039 elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
1040 now = datetime.now(timezone.utc)
1041 src_name = f"STDIN\t{then}"
1042 dst_name = f"STDOUT\t{now}"
1043 d = diff(src, dst, src_name, dst_name)
1044 if write_back == WriteBack.COLOR_DIFF:
1045 d = color_diff(d)
1046 f = wrap_stream_for_windows(f)
1047 f.write(d)
1048 f.detach()
1051def check_stability_and_equivalence(
1052 src_contents: str,
1053 dst_contents: str,
1054 *,
1055 mode: Mode,
1056 lines: Collection[tuple[int, int]] = (),
1057) -> None:
1058 """Perform stability and equivalence checks.
1060 Raise AssertionError if source and destination contents are not
1061 equivalent, or if a second pass of the formatter would format the
1062 content differently.
1063 """
1064 assert_equivalent(src_contents, dst_contents)
1065 assert_stable(src_contents, dst_contents, mode=mode, lines=lines)
1068def format_file_contents(
1069 src_contents: str,
1070 *,
1071 fast: bool,
1072 mode: Mode,
1073 lines: Collection[tuple[int, int]] = (),
1074) -> FileContent:
1075 """Reformat contents of a file and return new contents.
1077 If `fast` is False, additionally confirm that the reformatted code is
1078 valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
1079 `mode` is passed to :func:`format_str`.
1080 """
1081 if mode.is_ipynb:
1082 dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
1083 else:
1084 dst_contents = format_str(src_contents, mode=mode, lines=lines)
1085 if src_contents == dst_contents:
1086 raise NothingChanged
1088 if not fast and not mode.is_ipynb:
1089 # Jupyter notebooks will already have been checked above.
1090 check_stability_and_equivalence(
1091 src_contents, dst_contents, mode=mode, lines=lines
1092 )
1093 return dst_contents
1096def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
1097 """Format code in given cell of Jupyter notebook.
1099 General idea is:
1101 - if cell has trailing semicolon, remove it;
1102 - if cell has IPython magics, mask them;
1103 - format cell;
1104 - reinstate IPython magics;
1105 - reinstate trailing semicolon (if originally present);
1106 - strip trailing newlines.
1108 Cells with syntax errors will not be processed, as they
1109 could potentially be automagics or multi-line magics, which
1110 are currently not supported.
1111 """
1112 validate_cell(src, mode)
1113 src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
1114 src
1115 )
1116 try:
1117 masked_src, replacements = mask_cell(src_without_trailing_semicolon)
1118 except SyntaxError:
1119 raise NothingChanged from None
1120 masked_dst = format_str(masked_src, mode=mode)
1121 if not fast:
1122 check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
1123 dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
1124 dst = put_trailing_semicolon_back(
1125 dst_without_trailing_semicolon, has_trailing_semicolon
1126 )
1127 dst = dst.rstrip("\n")
1128 if dst == src:
1129 raise NothingChanged from None
1130 return dst
1133def validate_metadata(nb: MutableMapping[str, Any]) -> None:
1134 """If notebook is marked as non-Python, don't format it.
1136 All notebook metadata fields are optional, see
1137 https://nbformat.readthedocs.io/en/latest/format_description.html. So
1138 if a notebook has empty metadata, we will try to parse it anyway.
1139 """
1140 language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
1141 if language is not None and language != "python":
1142 raise NothingChanged from None
1145def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
1146 """Format Jupyter notebook.
1148 Operate cell-by-cell, only on code cells, only for Python notebooks.
1149 If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
1150 """
1151 if not src_contents:
1152 raise NothingChanged
1154 trailing_newline = src_contents[-1] == "\n"
1155 modified = False
1156 nb = json.loads(src_contents)
1157 validate_metadata(nb)
1158 for cell in nb["cells"]:
1159 if cell.get("cell_type", None) == "code":
1160 try:
1161 src = "".join(cell["source"])
1162 dst = format_cell(src, fast=fast, mode=mode)
1163 except NothingChanged:
1164 pass
1165 else:
1166 cell["source"] = dst.splitlines(keepends=True)
1167 modified = True
1168 if modified:
1169 dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
1170 if trailing_newline:
1171 dst_contents = dst_contents + "\n"
1172 return dst_contents
1173 else:
1174 raise NothingChanged
1177def format_str(
1178 src_contents: str, *, mode: Mode, lines: Collection[tuple[int, int]] = ()
1179) -> str:
1180 """Reformat a string and return new contents.
1182 `mode` determines formatting options, such as how many characters per line are
1183 allowed. Example:
1185 >>> import black
1186 >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
1187 def f(arg: str = "") -> None:
1188 ...
1190 A more complex example:
1192 >>> print(
1193 ... black.format_str(
1194 ... "def f(arg:str='')->None: hey",
1195 ... mode=black.Mode(
1196 ... target_versions={black.TargetVersion.PY36},
1197 ... line_length=10,
1198 ... string_normalization=False,
1199 ... is_pyi=False,
1200 ... ),
1201 ... ),
1202 ... )
1203 def f(
1204 arg: str = '',
1205 ) -> None:
1206 hey
1208 """
1209 if lines:
1210 lines = sanitized_lines(lines, src_contents)
1211 if not lines:
1212 return src_contents # Nothing to format
1213 dst_contents = _format_str_once(src_contents, mode=mode, lines=lines)
1214 # Forced second pass to work around optional trailing commas (becoming
1215 # forced trailing commas on pass 2) interacting differently with optional
1216 # parentheses. Admittedly ugly.
1217 if src_contents != dst_contents:
1218 if lines:
1219 lines = adjusted_lines(lines, src_contents, dst_contents)
1220 return _format_str_once(dst_contents, mode=mode, lines=lines)
1221 return dst_contents
1224def _format_str_once(
1225 src_contents: str, *, mode: Mode, lines: Collection[tuple[int, int]] = ()
1226) -> str:
1227 if Preview.normalize_cr_newlines in mode:
1228 normalized_contents, _, newline_type = decode_bytes(
1229 src_contents.encode("utf-8"), mode
1230 )
1232 src_node = lib2to3_parse(
1233 normalized_contents.lstrip(), target_versions=mode.target_versions
1234 )
1235 else:
1236 src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
1238 dst_blocks: list[LinesBlock] = []
1239 if mode.target_versions:
1240 versions = mode.target_versions
1241 else:
1242 future_imports = get_future_imports(src_node)
1243 versions = detect_target_versions(src_node, future_imports=future_imports)
1245 line_generation_features = {
1246 feature
1247 for feature in {
1248 Feature.PARENTHESIZED_CONTEXT_MANAGERS,
1249 Feature.UNPARENTHESIZED_EXCEPT_TYPES,
1250 Feature.T_STRINGS,
1251 }
1252 if supports_feature(versions, feature)
1253 }
1254 normalize_fmt_off(src_node, mode, lines)
1255 if lines:
1256 # This should be called after normalize_fmt_off.
1257 convert_unchanged_lines(src_node, lines)
1259 line_generator = LineGenerator(mode=mode, features=line_generation_features)
1260 elt = EmptyLineTracker(mode=mode)
1261 split_line_features = {
1262 feature
1263 for feature in {
1264 Feature.TRAILING_COMMA_IN_CALL,
1265 Feature.TRAILING_COMMA_IN_DEF,
1266 }
1267 if supports_feature(versions, feature)
1268 }
1269 block: LinesBlock | None = None
1270 for current_line in line_generator.visit(src_node):
1271 block = elt.maybe_empty_lines(current_line)
1272 dst_blocks.append(block)
1273 for line in transform_line(
1274 current_line, mode=mode, features=split_line_features
1275 ):
1276 block.content_lines.append(str(line))
1277 if dst_blocks:
1278 dst_blocks[-1].after = 0
1279 dst_contents = []
1280 for block in dst_blocks:
1281 dst_contents.extend(block.all_lines())
1282 if not dst_contents:
1283 if Preview.normalize_cr_newlines in mode:
1284 if "\n" in normalized_contents:
1285 return newline_type
1286 else:
1287 # Use decode_bytes to retrieve the correct source newline (CRLF or LF),
1288 # and check if normalized_content has more than one line
1289 normalized_content, _, newline = decode_bytes(
1290 src_contents.encode("utf-8"), mode
1291 )
1292 if "\n" in normalized_content:
1293 return newline
1294 return ""
1295 if Preview.normalize_cr_newlines in mode:
1296 return "".join(dst_contents).replace("\n", newline_type)
1297 else:
1298 return "".join(dst_contents)
1301def decode_bytes(src: bytes, mode: Mode) -> tuple[FileContent, Encoding, NewLine]:
1302 """Return a tuple of (decoded_contents, encoding, newline).
1304 `newline` is either CRLF or LF but `decoded_contents` is decoded with
1305 universal newlines (i.e. only contains LF).
1306 """
1307 srcbuf = io.BytesIO(src)
1308 encoding, lines = tokenize.detect_encoding(srcbuf.readline)
1309 if not lines:
1310 return "", encoding, "\n"
1312 if Preview.normalize_cr_newlines in mode:
1313 if lines[0][-2:] == b"\r\n":
1314 if b"\r" in lines[0][:-2]:
1315 newline = "\r"
1316 else:
1317 newline = "\r\n"
1318 elif lines[0][-1:] == b"\n":
1319 if b"\r" in lines[0][:-1]:
1320 newline = "\r"
1321 else:
1322 newline = "\n"
1323 else:
1324 if b"\r" in lines[0]:
1325 newline = "\r"
1326 else:
1327 newline = "\n"
1328 else:
1329 newline = "\r\n" if lines[0][-2:] == b"\r\n" else "\n"
1331 srcbuf.seek(0)
1332 with io.TextIOWrapper(srcbuf, encoding) as tiow:
1333 return tiow.read(), encoding, newline
1336def get_features_used(
1337 node: Node, *, future_imports: set[str] | None = None
1338) -> set[Feature]:
1339 """Return a set of (relatively) new Python features used in this file.
1341 Currently looking for:
1342 - f-strings;
1343 - self-documenting expressions in f-strings (f"{x=}");
1344 - underscores in numeric literals;
1345 - trailing commas after * or ** in function signatures and calls;
1346 - positional only arguments in function signatures and lambdas;
1347 - assignment expression;
1348 - relaxed decorator syntax;
1349 - usage of __future__ flags (annotations);
1350 - print / exec statements;
1351 - parenthesized context managers;
1352 - match statements;
1353 - except* clause;
1354 - variadic generics;
1355 """
1356 features: set[Feature] = set()
1357 if future_imports:
1358 features |= {
1359 FUTURE_FLAG_TO_FEATURE[future_import]
1360 for future_import in future_imports
1361 if future_import in FUTURE_FLAG_TO_FEATURE
1362 }
1364 for n in node.pre_order():
1365 if n.type == token.FSTRING_START:
1366 features.add(Feature.F_STRINGS)
1367 elif n.type == token.TSTRING_START:
1368 features.add(Feature.T_STRINGS)
1369 elif (
1370 n.type == token.RBRACE
1371 and n.parent is not None
1372 and any(child.type == token.EQUAL for child in n.parent.children)
1373 ):
1374 features.add(Feature.DEBUG_F_STRINGS)
1376 elif is_number_token(n):
1377 if "_" in n.value:
1378 features.add(Feature.NUMERIC_UNDERSCORES)
1380 elif n.type == token.SLASH:
1381 if n.parent and n.parent.type in {
1382 syms.typedargslist,
1383 syms.arglist,
1384 syms.varargslist,
1385 }:
1386 features.add(Feature.POS_ONLY_ARGUMENTS)
1388 elif n.type == token.COLONEQUAL:
1389 features.add(Feature.ASSIGNMENT_EXPRESSIONS)
1391 elif n.type == syms.decorator:
1392 if len(n.children) > 1 and not is_simple_decorator_expression(
1393 n.children[1]
1394 ):
1395 features.add(Feature.RELAXED_DECORATORS)
1397 elif (
1398 n.type in {syms.typedargslist, syms.arglist}
1399 and n.children
1400 and n.children[-1].type == token.COMMA
1401 ):
1402 if n.type == syms.typedargslist:
1403 feature = Feature.TRAILING_COMMA_IN_DEF
1404 else:
1405 feature = Feature.TRAILING_COMMA_IN_CALL
1407 for ch in n.children:
1408 if ch.type in STARS:
1409 features.add(feature)
1411 if ch.type == syms.argument:
1412 for argch in ch.children:
1413 if argch.type in STARS:
1414 features.add(feature)
1416 elif (
1417 n.type in {syms.return_stmt, syms.yield_expr}
1418 and len(n.children) >= 2
1419 and n.children[1].type == syms.testlist_star_expr
1420 and any(child.type == syms.star_expr for child in n.children[1].children)
1421 ):
1422 features.add(Feature.UNPACKING_ON_FLOW)
1424 elif (
1425 n.type == syms.annassign
1426 and len(n.children) >= 4
1427 and n.children[3].type == syms.testlist_star_expr
1428 ):
1429 features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
1431 elif (
1432 n.type == syms.with_stmt
1433 and len(n.children) > 2
1434 and n.children[1].type == syms.atom
1435 ):
1436 atom_children = n.children[1].children
1437 if (
1438 len(atom_children) == 3
1439 and atom_children[0].type == token.LPAR
1440 and _contains_asexpr(atom_children[1])
1441 and atom_children[2].type == token.RPAR
1442 ):
1443 features.add(Feature.PARENTHESIZED_CONTEXT_MANAGERS)
1445 elif n.type == syms.match_stmt:
1446 features.add(Feature.PATTERN_MATCHING)
1448 elif n.type in {syms.subscriptlist, syms.trailer} and any(
1449 child.type == syms.star_expr for child in n.children
1450 ):
1451 features.add(Feature.VARIADIC_GENERICS)
1453 elif (
1454 n.type == syms.tname_star
1455 and len(n.children) == 3
1456 and n.children[2].type == syms.star_expr
1457 ):
1458 features.add(Feature.VARIADIC_GENERICS)
1460 elif n.type in (syms.type_stmt, syms.typeparams):
1461 features.add(Feature.TYPE_PARAMS)
1463 elif (
1464 n.type in (syms.typevartuple, syms.paramspec, syms.typevar)
1465 and n.children[-2].type == token.EQUAL
1466 ):
1467 features.add(Feature.TYPE_PARAM_DEFAULTS)
1469 elif (
1470 n.type == syms.except_clause
1471 and len(n.children) >= 2
1472 and (
1473 n.children[1].type == token.STAR or n.children[1].type == syms.testlist
1474 )
1475 ):
1476 is_star_except = n.children[1].type == token.STAR
1478 if is_star_except:
1479 features.add(Feature.EXCEPT_STAR)
1481 # Presence of except* pushes as clause 1 index back
1482 has_as_clause = (
1483 len(n.children) >= is_star_except + 3
1484 and n.children[is_star_except + 2].type == token.NAME
1485 and n.children[is_star_except + 2].value == "as" # type: ignore
1486 )
1488 # If there's no 'as' clause and the except expression is a testlist.
1489 if not has_as_clause and (
1490 (is_star_except and n.children[2].type == syms.testlist)
1491 or (not is_star_except and n.children[1].type == syms.testlist)
1492 ):
1493 features.add(Feature.UNPARENTHESIZED_EXCEPT_TYPES)
1495 return features
1498def _contains_asexpr(node: Node | Leaf) -> bool:
1499 """Return True if `node` contains an as-pattern."""
1500 if node.type == syms.asexpr_test:
1501 return True
1502 elif node.type == syms.atom:
1503 if (
1504 len(node.children) == 3
1505 and node.children[0].type == token.LPAR
1506 and node.children[2].type == token.RPAR
1507 ):
1508 return _contains_asexpr(node.children[1])
1509 elif node.type == syms.testlist_gexp:
1510 return any(_contains_asexpr(child) for child in node.children)
1511 return False
1514def detect_target_versions(
1515 node: Node, *, future_imports: set[str] | None = None
1516) -> set[TargetVersion]:
1517 """Detect the version to target based on the nodes used."""
1518 features = get_features_used(node, future_imports=future_imports)
1519 return {
1520 version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
1521 }
1524def get_future_imports(node: Node) -> set[str]:
1525 """Return a set of __future__ imports in the file."""
1526 imports: set[str] = set()
1528 def get_imports_from_children(children: list[LN]) -> Generator[str, None, None]:
1529 for child in children:
1530 if isinstance(child, Leaf):
1531 if child.type == token.NAME:
1532 yield child.value
1534 elif child.type == syms.import_as_name:
1535 orig_name = child.children[0]
1536 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
1537 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1538 yield orig_name.value
1540 elif child.type == syms.import_as_names:
1541 yield from get_imports_from_children(child.children)
1543 else:
1544 raise AssertionError("Invalid syntax parsing imports")
1546 for child in node.children:
1547 if child.type != syms.simple_stmt:
1548 break
1550 first_child = child.children[0]
1551 if isinstance(first_child, Leaf):
1552 # Continue looking if we see a docstring; otherwise stop.
1553 if (
1554 len(child.children) == 2
1555 and first_child.type == token.STRING
1556 and child.children[1].type == token.NEWLINE
1557 ):
1558 continue
1560 break
1562 elif first_child.type == syms.import_from:
1563 module_name = first_child.children[1]
1564 if not isinstance(module_name, Leaf) or module_name.value != "__future__":
1565 break
1567 imports |= set(get_imports_from_children(first_child.children[3:]))
1568 else:
1569 break
1571 return imports
1574def _black_info() -> str:
1575 return (
1576 f"Black {__version__} on "
1577 f"Python ({platform.python_implementation()}) {platform.python_version()}"
1578 )
1581def assert_equivalent(src: str, dst: str) -> None:
1582 """Raise AssertionError if `src` and `dst` aren't equivalent."""
1583 try:
1584 src_ast = parse_ast(src)
1585 except Exception as exc:
1586 raise ASTSafetyError(
1587 "cannot use --safe with this file; failed to parse source file AST: "
1588 f"{exc}\n"
1589 "This could be caused by running Black with an older Python version "
1590 "that does not support new syntax used in your source file."
1591 ) from exc
1593 try:
1594 dst_ast = parse_ast(dst)
1595 except Exception as exc:
1596 log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
1597 raise ASTSafetyError(
1598 f"INTERNAL ERROR: {_black_info()} produced invalid code: {exc}. "
1599 "Please report a bug on https://github.com/psf/black/issues. "
1600 f"This invalid output might be helpful: {log}"
1601 ) from None
1603 src_ast_str = "\n".join(stringify_ast(src_ast))
1604 dst_ast_str = "\n".join(stringify_ast(dst_ast))
1605 if src_ast_str != dst_ast_str:
1606 log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
1607 raise ASTSafetyError(
1608 f"INTERNAL ERROR: {_black_info()} produced code that is not equivalent to"
1609 " the source. Please report a bug on https://github.com/psf/black/issues."
1610 f" This diff might be helpful: {log}"
1611 ) from None
1614def assert_stable(
1615 src: str, dst: str, mode: Mode, *, lines: Collection[tuple[int, int]] = ()
1616) -> None:
1617 """Raise AssertionError if `dst` reformats differently the second time."""
1618 if lines:
1619 # Formatting specified lines requires `adjusted_lines` to map original lines
1620 # to the formatted lines before re-formatting the previously formatted result.
1621 # Due to less-ideal diff algorithm, some edge cases produce incorrect new line
1622 # ranges. Hence for now, we skip the stable check.
1623 # See https://github.com/psf/black/issues/4033 for context.
1624 return
1625 # We shouldn't call format_str() here, because that formats the string
1626 # twice and may hide a bug where we bounce back and forth between two
1627 # versions.
1628 newdst = _format_str_once(dst, mode=mode, lines=lines)
1629 if dst != newdst:
1630 log = dump_to_file(
1631 str(mode),
1632 diff(src, dst, "source", "first pass"),
1633 diff(dst, newdst, "first pass", "second pass"),
1634 )
1635 raise AssertionError(
1636 f"INTERNAL ERROR: {_black_info()} produced different code on the second"
1637 " pass of the formatter. Please report a bug on"
1638 f" https://github.com/psf/black/issues. This diff might be helpful: {log}"
1639 ) from None
1642def patched_main() -> None:
1643 # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows
1644 # environments so just assume we always need to call it if frozen.
1645 if getattr(sys, "frozen", False):
1646 from multiprocessing import freeze_support
1648 freeze_support()
1650 main()
1653if __name__ == "__main__":
1654 patched_main()