Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/pygments/lexers/nix.py: 62%
26 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-01 06:54 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-01 06:54 +0000
1"""
2 pygments.lexers.nix
3 ~~~~~~~~~~~~~~~~~~~
5 Lexers for the NixOS Nix language.
7 :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
8 :license: BSD, see LICENSE for details.
9"""
11import re
13from pygments.lexer import RegexLexer, include
14from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
15 Number, Punctuation, Literal
17__all__ = ['NixLexer']
20class NixLexer(RegexLexer):
21 """
22 For the Nix language.
24 .. versionadded:: 2.0
25 """
27 name = 'Nix'
28 url = 'http://nixos.org/nix/'
29 aliases = ['nixos', 'nix']
30 filenames = ['*.nix']
31 mimetypes = ['text/x-nix']
33 keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
34 'else', 'then', '...']
35 builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
36 'map', 'removeAttrs', 'throw', 'toString', 'derivation']
37 operators = ['++', '+', '?', '.', '!', '//', '==',
38 '!=', '&&', '||', '->', '=']
40 punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
42 tokens = {
43 'root': [
44 # comments starting with #
45 (r'#.*$', Comment.Single),
47 # multiline comments
48 (r'/\*', Comment.Multiline, 'comment'),
50 # whitespace
51 (r'\s+', Text),
53 # keywords
54 ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
56 # highlight the builtins
57 ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
58 Name.Builtin),
60 (r'\b(true|false|null)\b', Name.Constant),
62 # operators
63 ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
64 Operator),
66 # word operators
67 (r'\b(or|and)\b', Operator.Word),
69 # punctuations
70 ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
72 # integers
73 (r'[0-9]+', Number.Integer),
75 # strings
76 (r'"', String.Double, 'doublequote'),
77 (r"''", String.Single, 'singlequote'),
79 # paths
80 (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
81 (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
83 # urls
84 (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
86 # names of variables
87 (r'[\w-]+\s*=', String.Symbol),
88 (r'[a-zA-Z_][\w\'-]*', Text),
90 ],
91 'comment': [
92 (r'[^/*]+', Comment.Multiline),
93 (r'/\*', Comment.Multiline, '#push'),
94 (r'\*/', Comment.Multiline, '#pop'),
95 (r'[*/]', Comment.Multiline),
96 ],
97 'singlequote': [
98 (r"'''", String.Escape),
99 (r"''\$\{", String.Escape),
100 (r"''\n", String.Escape),
101 (r"''\r", String.Escape),
102 (r"''\t", String.Escape),
103 (r"''", String.Single, '#pop'),
104 (r'\$\{', String.Interpol, 'antiquote'),
105 (r"['$]", String.Single),
106 (r"[^'$]+", String.Single),
107 ],
108 'doublequote': [
109 (r'\\', String.Escape),
110 (r'\\"', String.Escape),
111 (r'\\$\{', String.Escape),
112 (r'"', String.Double, '#pop'),
113 (r'\$\{', String.Interpol, 'antiquote'),
114 (r'[^"]', String.Double),
115 ],
116 'antiquote': [
117 (r"\}", String.Interpol, '#pop'),
118 # TODO: we should probably escape also here ''${ \${
119 (r"\$\{", String.Interpol, '#push'),
120 include('root'),
121 ],
122 }
124 def analyse_text(text):
125 rv = 0.0
126 # TODO: let/in
127 if re.search(r'import.+?<[^>]+>', text):
128 rv += 0.4
129 if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
130 rv += 0.4
131 if re.search(r'=\s+mkIf\s+', text):
132 rv += 0.4
133 if re.search(r'\{[a-zA-Z,\s]+\}:', text):
134 rv += 0.1
135 return rv