1"""
2 pygments.lexers.shell
3 ~~~~~~~~~~~~~~~~~~~~~
4
5 Lexers for various shells.
6
7 :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
8 :license: BSD, see LICENSE for details.
9"""
10
11import re
12
13from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
14 include, default, this, using, words, line_re
15from pygments.token import Punctuation, Whitespace, \
16 Text, Comment, Operator, Keyword, Name, String, Number, Generic
17from pygments.util import shebang_matches
18
19__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
20 'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer',
21 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer',
22 'ExeclineLexer']
23
24
25class BashLexer(RegexLexer):
26 """
27 Lexer for (ba|k|z|)sh shell scripts.
28 """
29
30 name = 'Bash'
31 aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell', 'openrc']
32 filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
33 '*.exheres-0', '*.exlib', '*.zsh',
34 '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
35 '.kshrc', 'kshrc',
36 'PKGBUILD']
37 mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript']
38 url = 'https://en.wikipedia.org/wiki/Unix_shell'
39 version_added = '0.6'
40
41 tokens = {
42 'root': [
43 include('basic'),
44 (r'`', String.Backtick, 'backticks'),
45 include('data'),
46 include('interp'),
47 ],
48 'interp': [
49 (r'\$\(\(', Keyword, 'math'),
50 (r'\$\(', Keyword, 'paren'),
51 (r'\$\{#?', String.Interpol, 'curly'),
52 (r'\$[a-zA-Z_]\w*', Name.Variable), # user variable
53 (r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
54 (r'\$', Text),
55 ],
56 'basic': [
57 (r'\b(if|fi|else|while|in|do|done|for|then|return|function|case|'
58 r'select|break|continue|until|esac|elif)(\s*)\b',
59 bygroups(Keyword, Whitespace)),
60 (r'\b(alias|bg|bind|builtin|caller|cd|command|compgen|'
61 r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
62 r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
63 r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
64 r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
65 r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
66 Name.Builtin),
67 (r'\A#!.+\n', Comment.Hashbang),
68 (r'#.*\n', Comment.Single),
69 (r'\\[\w\W]', String.Escape),
70 (r'(\b\w+)(\s*)(\+?=)', bygroups(Name.Variable, Whitespace, Operator)),
71 (r'[\[\]{}()=]', Operator),
72 (r'<<<', Operator), # here-string
73 (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
74 (r'&&|\|\|', Operator),
75 ],
76 'data': [
77 (r'(?s)\$?"(\\.|[^"\\$])*"', String.Double),
78 (r'"', String.Double, 'string'),
79 (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
80 (r"(?s)'.*?'", String.Single),
81 (r';', Punctuation),
82 (r'&', Punctuation),
83 (r'\|', Punctuation),
84 (r'\s+', Whitespace),
85 (r'\d+\b', Number),
86 (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
87 (r'<', Text),
88 ],
89 'string': [
90 (r'"', String.Double, '#pop'),
91 (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
92 include('interp'),
93 ],
94 'curly': [
95 (r'\}', String.Interpol, '#pop'),
96 (r':-', Keyword),
97 (r'\w+', Name.Variable),
98 (r'[^}:"\'`$\\]+', Punctuation),
99 (r':', Punctuation),
100 include('root'),
101 ],
102 'paren': [
103 (r'\)', Keyword, '#pop'),
104 include('root'),
105 ],
106 'math': [
107 (r'\)\)', Keyword, '#pop'),
108 (r'\*\*|\|\||<<|>>|[-+*/%^|&<>]', Operator),
109 (r'\d+#[\da-zA-Z]+', Number),
110 (r'\d+#(?! )', Number),
111 (r'0[xX][\da-fA-F]+', Number),
112 (r'\d+', Number),
113 (r'[a-zA-Z_]\w*', Name.Variable), # user variable
114 include('root'),
115 ],
116 'backticks': [
117 (r'`', String.Backtick, '#pop'),
118 include('root'),
119 ],
120 }
121
122 def analyse_text(text):
123 if shebang_matches(text, r'(ba|z|)sh'):
124 return 1
125 if text.startswith('$ '):
126 return 0.2
127
128
129class SlurmBashLexer(BashLexer):
130 """
131 Lexer for (ba|k|z|)sh Slurm scripts.
132 """
133
134 name = 'Slurm'
135 aliases = ['slurm', 'sbatch']
136 filenames = ['*.sl']
137 mimetypes = []
138 version_added = '2.4'
139 EXTRA_KEYWORDS = {'srun'}
140
141 def get_tokens_unprocessed(self, text):
142 for index, token, value in BashLexer.get_tokens_unprocessed(self, text):
143 if token is Text and value in self.EXTRA_KEYWORDS:
144 yield index, Name.Builtin, value
145 elif token is Comment.Single and 'SBATCH' in value:
146 yield index, Keyword.Pseudo, value
147 else:
148 yield index, token, value
149
150
151class ShellSessionBaseLexer(Lexer):
152 """
153 Base lexer for shell sessions.
154
155 .. versionadded:: 2.1
156 """
157
158 _bare_continuation = False
159 _venv = re.compile(r'^(\([^)]*\))(\s*)')
160
161 def get_tokens_unprocessed(self, text):
162 innerlexer = self._innerLexerCls(**self.options)
163
164 pos = 0
165 curcode = ''
166 insertions = []
167 backslash_continuation = False
168
169 for match in line_re.finditer(text):
170 line = match.group()
171
172 venv_match = self._venv.match(line)
173 if venv_match:
174 venv = venv_match.group(1)
175 venv_whitespace = venv_match.group(2)
176 insertions.append((len(curcode),
177 [(0, Generic.Prompt.VirtualEnv, venv)]))
178 if venv_whitespace:
179 insertions.append((len(curcode),
180 [(0, Text, venv_whitespace)]))
181 line = line[venv_match.end():]
182
183 m = self._ps1rgx.match(line)
184 if m:
185 # To support output lexers (say diff output), the output
186 # needs to be broken by prompts whenever the output lexer
187 # changes.
188 if not insertions:
189 pos = match.start()
190
191 insertions.append((len(curcode),
192 [(0, Generic.Prompt, m.group(1))]))
193 curcode += m.group(2)
194 backslash_continuation = curcode.endswith('\\\n')
195 elif backslash_continuation:
196 if line.startswith(self._ps2):
197 insertions.append((len(curcode),
198 [(0, Generic.Prompt,
199 line[:len(self._ps2)])]))
200 curcode += line[len(self._ps2):]
201 else:
202 curcode += line
203 backslash_continuation = curcode.endswith('\\\n')
204 elif self._bare_continuation and line.startswith(self._ps2):
205 insertions.append((len(curcode),
206 [(0, Generic.Prompt,
207 line[:len(self._ps2)])]))
208 curcode += line[len(self._ps2):]
209 else:
210 if insertions:
211 toks = innerlexer.get_tokens_unprocessed(curcode)
212 for i, t, v in do_insertions(insertions, toks):
213 yield pos+i, t, v
214 yield match.start(), Generic.Output, line
215 insertions = []
216 curcode = ''
217 if insertions:
218 for i, t, v in do_insertions(insertions,
219 innerlexer.get_tokens_unprocessed(curcode)):
220 yield pos+i, t, v
221
222
223class BashSessionLexer(ShellSessionBaseLexer):
224 """
225 Lexer for Bash shell sessions, i.e. command lines, including a
226 prompt, interspersed with output.
227 """
228
229 name = 'Bash Session'
230 aliases = ['console', 'shell-session']
231 filenames = ['*.sh-session', '*.shell-session']
232 mimetypes = ['application/x-shell-session', 'application/x-sh-session']
233 url = 'https://en.wikipedia.org/wiki/Unix_shell'
234 version_added = '1.1'
235 _example = "console/example.sh-session"
236
237 _innerLexerCls = BashLexer
238 _ps1rgx = re.compile(
239 r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
240 r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
241 _ps2 = '> '
242
243
244class BatchLexer(RegexLexer):
245 """
246 Lexer for the DOS/Windows Batch file format.
247 """
248 name = 'Batchfile'
249 aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
250 filenames = ['*.bat', '*.cmd']
251 mimetypes = ['application/x-dos-batch']
252 url = 'https://en.wikipedia.org/wiki/Batch_file'
253 version_added = '0.7'
254
255 flags = re.MULTILINE | re.IGNORECASE
256
257 _nl = r'\n\x1a'
258 _punct = r'&<>|'
259 _ws = r'\t\v\f\r ,;=\xa0'
260 _nlws = r'\s\x1a\xa0,;='
261 _space = rf'(?:(?:(?:\^[{_nl}])?[{_ws}])+)'
262 _keyword_terminator = (rf'(?=(?:\^[{_nl}]?)?[{_ws}+./:[\\\]]|[{_nl}{_punct}(])')
263 _token_terminator = rf'(?=\^?[{_ws}]|[{_punct}{_nl}])'
264 _start_label = rf'((?:(?<=^[^:])|^[^:]?)[{_ws}]*)(:)'
265 _label = rf'(?:(?:[^{_nlws}{_punct}+:^]|\^[{_nl}]?[\w\W])*)'
266 _label_compound = rf'(?:(?:[^{_nlws}{_punct}+:^)]|\^[{_nl}]?[^)])*)'
267 _number = rf'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+){_token_terminator})'
268 _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
269 _string = rf'(?:"[^{_nl}"]*(?:"|(?=[{_nl}])))'
270 _variable = (r'(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
271 rf'[^%:{_nl}]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%{_nl}^]|'
272 rf'\^[^%{_nl}])[^={_nl}]*=(?:[^%{_nl}^]|\^[^%{_nl}])*)?)?%))|'
273 rf'(?:\^?![^!:{_nl}]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
274 rf'[^!{_nl}^]|\^[^!{_nl}])[^={_nl}]*=(?:[^!{_nl}^]|\^[^!{_nl}])*)?)?\^?!))')
275 _core_token = rf'(?:(?:(?:\^[{_nl}]?)?[^"{_nlws}{_punct}])+)'
276 _core_token_compound = rf'(?:(?:(?:\^[{_nl}]?)?[^"{_nlws}{_punct})])+)'
277 _token = rf'(?:[{_punct}]+|{_core_token})'
278 _token_compound = rf'(?:[{_punct}]+|{_core_token_compound})'
279 _stoken = (rf'(?:[{_punct}]+|(?:{_string}|{_variable}|{_core_token})+)')
280
281 def _make_begin_state(compound, _core_token=_core_token,
282 _core_token_compound=_core_token_compound,
283 _keyword_terminator=_keyword_terminator,
284 _nl=_nl, _punct=_punct, _string=_string,
285 _space=_space, _start_label=_start_label,
286 _stoken=_stoken, _token_terminator=_token_terminator,
287 _variable=_variable, _ws=_ws):
288 rest = '(?:{}|{}|[^"%{}{}{}])*'.format(_string, _variable, _nl, _punct,
289 ')' if compound else '')
290 rest_of_line = rf'(?:(?:[^{_nl}^]|\^[{_nl}]?[\w\W])*)'
291 rest_of_line_compound = rf'(?:(?:[^{_nl}^)]|\^[{_nl}]?[^)])*)'
292 set_space = rf'((?:(?:\^[{_nl}]?)?[^\S\n])*)'
293 suffix = ''
294 if compound:
295 _keyword_terminator = rf'(?:(?=\))|{_keyword_terminator})'
296 _token_terminator = rf'(?:(?=\))|{_token_terminator})'
297 suffix = '/compound'
298 return [
299 ((r'\)', Punctuation, '#pop') if compound else
300 (rf'\)((?=\()|{_token_terminator}){rest_of_line}',
301 Comment.Single)),
302 (rf'(?={_start_label})', Text, f'follow{suffix}'),
303 (_space, using(this, state='text')),
304 include(f'redirect{suffix}'),
305 (rf'[{_nl}]+', Text),
306 (r'\(', Punctuation, 'root/compound'),
307 (r'@+', Punctuation),
308 (rf'((?:for|if|rem)(?:(?=(?:\^[{_nl}]?)?/)|(?:(?!\^)|'
309 rf'(?<=m))(?:(?=\()|{_token_terminator})))({_space}?{_core_token_compound if compound else _core_token}?(?:\^[{_nl}]?)?/(?:\^[{_nl}]?)?\?)',
310 bygroups(Keyword, using(this, state='text')),
311 f'follow{suffix}'),
312 (rf'(goto{_keyword_terminator})({rest}(?:\^[{_nl}]?)?/(?:\^[{_nl}]?)?\?{rest})',
313 bygroups(Keyword, using(this, state='text')),
314 f'follow{suffix}'),
315 (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
316 'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
317 'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
318 'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
319 'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
320 'title', 'type', 'ver', 'verify', 'vol'),
321 suffix=_keyword_terminator), Keyword, f'follow{suffix}'),
322 (rf'(call)({_space}?)(:)',
323 bygroups(Keyword, using(this, state='text'), Punctuation),
324 f'call{suffix}'),
325 (rf'call{_keyword_terminator}', Keyword),
326 (rf'(for{_token_terminator}(?!\^))({_space})(/f{_token_terminator})',
327 bygroups(Keyword, using(this, state='text'), Keyword),
328 ('for/f', 'for')),
329 (rf'(for{_token_terminator}(?!\^))({_space})(/l{_token_terminator})',
330 bygroups(Keyword, using(this, state='text'), Keyword),
331 ('for/l', 'for')),
332 (rf'for{_token_terminator}(?!\^)', Keyword, ('for2', 'for')),
333 (rf'(goto{_keyword_terminator})({_space}?)(:?)',
334 bygroups(Keyword, using(this, state='text'), Punctuation),
335 f'label{suffix}'),
336 (rf'(if(?:(?=\()|{_token_terminator})(?!\^))({_space}?)((?:/i{_token_terminator})?)({_space}?)((?:not{_token_terminator})?)({_space}?)',
337 bygroups(Keyword, using(this, state='text'), Keyword,
338 using(this, state='text'), Keyword,
339 using(this, state='text')), ('(?', 'if')),
340 (rf'rem(((?=\()|{_token_terminator}){_space}?{_stoken}?.*|{_keyword_terminator}{rest_of_line_compound if compound else rest_of_line})',
341 Comment.Single, f'follow{suffix}'),
342 (rf'(set{_keyword_terminator}){set_space}(/a)',
343 bygroups(Keyword, using(this, state='text'), Keyword),
344 f'arithmetic{suffix}'),
345 (r'(set{}){}((?:/p)?){}((?:(?:(?:\^[{}]?)?[^"{}{}^={}]|'
346 r'\^[{}]?[^"=])+)?)((?:(?:\^[{}]?)?=)?)'.format(_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
347 ')' if compound else '', _nl, _nl),
348 bygroups(Keyword, using(this, state='text'), Keyword,
349 using(this, state='text'), using(this, state='variable'),
350 Punctuation),
351 f'follow{suffix}'),
352 default(f'follow{suffix}')
353 ]
354
355 def _make_follow_state(compound, _label=_label,
356 _label_compound=_label_compound, _nl=_nl,
357 _space=_space, _start_label=_start_label,
358 _token=_token, _token_compound=_token_compound,
359 _ws=_ws):
360 suffix = '/compound' if compound else ''
361 state = []
362 if compound:
363 state.append((r'(?=\))', Text, '#pop'))
364 state += [
365 (rf'{_start_label}([{_ws}]*)({_label_compound if compound else _label})(.*)',
366 bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
367 include(f'redirect{suffix}'),
368 (rf'(?=[{_nl}])', Text, '#pop'),
369 (r'\|\|?|&&?', Punctuation, '#pop'),
370 include('text')
371 ]
372 return state
373
374 def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
375 _string=_string, _variable=_variable,
376 _ws=_ws, _nlws=_nlws):
377 op = r'=+\-*/!~'
378 state = []
379 if compound:
380 state.append((r'(?=\))', Text, '#pop'))
381 state += [
382 (r'0[0-7]+', Number.Oct),
383 (r'0x[\da-f]+', Number.Hex),
384 (r'\d+', Number.Integer),
385 (r'[(),]+', Punctuation),
386 (rf'([{op}]|%|\^\^)+', Operator),
387 (r'({}|{}|(\^[{}]?)?[^(){}%\^"{}{}]|\^[{}]?{})+'.format(_string, _variable, _nl, op, _nlws, _punct, _nlws,
388 r'[^)]' if compound else r'[\w\W]'),
389 using(this, state='variable')),
390 (r'(?=[\x00|&])', Text, '#pop'),
391 include('follow')
392 ]
393 return state
394
395 def _make_call_state(compound, _label=_label,
396 _label_compound=_label_compound):
397 state = []
398 if compound:
399 state.append((r'(?=\))', Text, '#pop'))
400 state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
401 bygroups(Punctuation, Name.Label), '#pop'))
402 return state
403
404 def _make_label_state(compound, _label=_label,
405 _label_compound=_label_compound, _nl=_nl,
406 _punct=_punct, _string=_string, _variable=_variable):
407 state = []
408 if compound:
409 state.append((r'(?=\))', Text, '#pop'))
410 state.append((r'({}?)((?:{}|{}|\^[{}]?{}|[^"%^{}{}{}])*)'.format(_label_compound if compound else _label, _string,
411 _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
412 _punct, r')' if compound else ''),
413 bygroups(Name.Label, Comment.Single), '#pop'))
414 return state
415
416 def _make_redirect_state(compound,
417 _core_token_compound=_core_token_compound,
418 _nl=_nl, _punct=_punct, _stoken=_stoken,
419 _string=_string, _space=_space,
420 _variable=_variable, _nlws=_nlws):
421 stoken_compound = (rf'(?:[{_punct}]+|(?:{_string}|{_variable}|{_core_token_compound})+)')
422 return [
423 (rf'((?:(?<=[{_nlws}])\d)?)(>>?&|<&)([{_nlws}]*)(\d)',
424 bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
425 (rf'((?:(?<=[{_nlws}])(?<!\^[{_nl}])\d)?)(>>?|<)({_space}?{stoken_compound if compound else _stoken})',
426 bygroups(Number.Integer, Punctuation, using(this, state='text')))
427 ]
428
429 tokens = {
430 'root': _make_begin_state(False),
431 'follow': _make_follow_state(False),
432 'arithmetic': _make_arithmetic_state(False),
433 'call': _make_call_state(False),
434 'label': _make_label_state(False),
435 'redirect': _make_redirect_state(False),
436 'root/compound': _make_begin_state(True),
437 'follow/compound': _make_follow_state(True),
438 'arithmetic/compound': _make_arithmetic_state(True),
439 'call/compound': _make_call_state(True),
440 'label/compound': _make_label_state(True),
441 'redirect/compound': _make_redirect_state(True),
442 'variable-or-escape': [
443 (_variable, Name.Variable),
444 (rf'%%|\^[{_nl}]?(\^!|[\w\W])', String.Escape)
445 ],
446 'string': [
447 (r'"', String.Double, '#pop'),
448 (_variable, Name.Variable),
449 (r'\^!|%%', String.Escape),
450 (rf'[^"%^{_nl}]+|[%^]', String.Double),
451 default('#pop')
452 ],
453 'sqstring': [
454 include('variable-or-escape'),
455 (r'[^%]+|%', String.Single)
456 ],
457 'bqstring': [
458 include('variable-or-escape'),
459 (r'[^%]+|%', String.Backtick)
460 ],
461 'text': [
462 (r'"', String.Double, 'string'),
463 include('variable-or-escape'),
464 (rf'[^"%^{_nlws}{_punct}\d)]+|.', Text)
465 ],
466 'variable': [
467 (r'"', String.Double, 'string'),
468 include('variable-or-escape'),
469 (rf'[^"%^{_nl}]+|.', Name.Variable)
470 ],
471 'for': [
472 (rf'({_space})(in)({_space})(\()',
473 bygroups(using(this, state='text'), Keyword,
474 using(this, state='text'), Punctuation), '#pop'),
475 include('follow')
476 ],
477 'for2': [
478 (r'\)', Punctuation),
479 (rf'({_space})(do{_token_terminator})',
480 bygroups(using(this, state='text'), Keyword), '#pop'),
481 (rf'[{_nl}]+', Text),
482 include('follow')
483 ],
484 'for/f': [
485 (rf'(")((?:{_variable}|[^"])*?")([{_nlws}]*)(\))',
486 bygroups(String.Double, using(this, state='string'), Text,
487 Punctuation)),
488 (r'"', String.Double, ('#pop', 'for2', 'string')),
489 (rf"('(?:%%|{_variable}|[\w\W])*?')([{_nlws}]*)(\))",
490 bygroups(using(this, state='sqstring'), Text, Punctuation)),
491 (rf'(`(?:%%|{_variable}|[\w\W])*?`)([{_nlws}]*)(\))',
492 bygroups(using(this, state='bqstring'), Text, Punctuation)),
493 include('for2')
494 ],
495 'for/l': [
496 (r'-?\d+', Number.Integer),
497 include('for2')
498 ],
499 'if': [
500 (rf'((?:cmdextversion|errorlevel){_token_terminator})({_space})(\d+)',
501 bygroups(Keyword, using(this, state='text'),
502 Number.Integer), '#pop'),
503 (rf'(defined{_token_terminator})({_space})({_stoken})',
504 bygroups(Keyword, using(this, state='text'),
505 using(this, state='variable')), '#pop'),
506 (rf'(exist{_token_terminator})({_space}{_stoken})',
507 bygroups(Keyword, using(this, state='text')), '#pop'),
508 (rf'({_number}{_space})({_opword})({_space}{_number})',
509 bygroups(using(this, state='arithmetic'), Operator.Word,
510 using(this, state='arithmetic')), '#pop'),
511 (_stoken, using(this, state='text'), ('#pop', 'if2')),
512 ],
513 'if2': [
514 (rf'({_space}?)(==)({_space}?{_stoken})',
515 bygroups(using(this, state='text'), Operator,
516 using(this, state='text')), '#pop'),
517 (rf'({_space})({_opword})({_space}{_stoken})',
518 bygroups(using(this, state='text'), Operator.Word,
519 using(this, state='text')), '#pop')
520 ],
521 '(?': [
522 (_space, using(this, state='text')),
523 (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
524 default('#pop')
525 ],
526 'else?': [
527 (_space, using(this, state='text')),
528 (rf'else{_token_terminator}', Keyword, '#pop'),
529 default('#pop')
530 ]
531 }
532
533
534class MSDOSSessionLexer(ShellSessionBaseLexer):
535 """
536 Lexer for MS DOS shell sessions, i.e. command lines, including a
537 prompt, interspersed with output.
538 """
539
540 name = 'MSDOS Session'
541 aliases = ['doscon']
542 filenames = []
543 mimetypes = []
544 url = 'https://en.wikipedia.org/wiki/MS-DOS'
545 version_added = '2.1'
546 _example = "doscon/session"
547
548 _innerLexerCls = BatchLexer
549 _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
550 _ps2 = 'More? '
551
552
553class TcshLexer(RegexLexer):
554 """
555 Lexer for tcsh scripts.
556 """
557
558 name = 'Tcsh'
559 aliases = ['tcsh', 'csh']
560 filenames = ['*.tcsh', '*.csh']
561 mimetypes = ['application/x-csh']
562 url = 'https://www.tcsh.org'
563 version_added = '0.10'
564
565 tokens = {
566 'root': [
567 include('basic'),
568 (r'\$\(', Keyword, 'paren'),
569 (r'\$\{#?', Keyword, 'curly'),
570 (r'`', String.Backtick, 'backticks'),
571 include('data'),
572 ],
573 'basic': [
574 (r'\b(if|endif|else|while|then|foreach|case|default|'
575 r'break|continue|goto|breaksw|end|switch|endsw)\s*\b',
576 Keyword),
577 (r'\b(alias|alloc|bg|bindkey|builtins|bye|caller|cd|chdir|'
578 r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
579 r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
580 r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
581 r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
582 r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
583 r'source|stop|suspend|source|suspend|telltc|time|'
584 r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
585 r'ver|wait|warp|watchlog|where|which)\s*\b',
586 Name.Builtin),
587 (r'#.*', Comment),
588 (r'\\[\w\W]', String.Escape),
589 (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
590 (r'[\[\]{}()=]+', Operator),
591 (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
592 (r';', Punctuation),
593 ],
594 'data': [
595 (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
596 (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
597 (r'\s+', Text),
598 (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
599 (r'\d+(?= |\Z)', Number),
600 (r'\$#?(\w+|.)', Name.Variable),
601 ],
602 'curly': [
603 (r'\}', Keyword, '#pop'),
604 (r':-', Keyword),
605 (r'\w+', Name.Variable),
606 (r'[^}:"\'`$]+', Punctuation),
607 (r':', Punctuation),
608 include('root'),
609 ],
610 'paren': [
611 (r'\)', Keyword, '#pop'),
612 include('root'),
613 ],
614 'backticks': [
615 (r'`', String.Backtick, '#pop'),
616 include('root'),
617 ],
618 }
619
620
621class TcshSessionLexer(ShellSessionBaseLexer):
622 """
623 Lexer for Tcsh sessions, i.e. command lines, including a
624 prompt, interspersed with output.
625 """
626
627 name = 'Tcsh Session'
628 aliases = ['tcshcon']
629 filenames = []
630 mimetypes = []
631 url = 'https://www.tcsh.org'
632 version_added = '2.1'
633 _example = "tcshcon/session"
634
635 _innerLexerCls = TcshLexer
636 _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
637 _ps2 = '? '
638
639
640class PowerShellLexer(RegexLexer):
641 """
642 For Windows PowerShell code.
643 """
644 name = 'PowerShell'
645 aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
646 filenames = ['*.ps1', '*.psm1']
647 mimetypes = ['text/x-powershell']
648 url = 'https://learn.microsoft.com/en-us/powershell'
649 version_added = '1.5'
650
651 flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
652
653 keywords = (
654 'while validateset validaterange validatepattern validatelength '
655 'validatecount until trap switch return ref process param parameter in '
656 'if global: local: function foreach for finally filter end elseif else '
657 'dynamicparam do default continue cmdletbinding break begin alias \\? '
658 '% #script #private #local #global mandatory parametersetname position '
659 'valuefrompipeline valuefrompipelinebypropertyname '
660 'valuefromremainingarguments helpmessage try catch throw').split()
661
662 operators = (
663 'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
664 'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
665 'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
666 'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
667 'lt match ne not notcontains notlike notmatch or regex replace '
668 'wildcard').split()
669
670 verbs = (
671 'write where watch wait use update unregister unpublish unprotect '
672 'unlock uninstall undo unblock trace test tee take sync switch '
673 'suspend submit stop step start split sort skip show set send select '
674 'search scroll save revoke resume restore restart resolve resize '
675 'reset request repair rename remove register redo receive read push '
676 'publish protect pop ping out optimize open new move mount merge '
677 'measure lock limit join invoke install initialize import hide group '
678 'grant get format foreach find export expand exit enter enable edit '
679 'dismount disconnect disable deny debug cxnew copy convertto '
680 'convertfrom convert connect confirm compress complete compare close '
681 'clear checkpoint block backup assert approve aggregate add').split()
682
683 aliases_ = (
684 'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn '
685 'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal '
686 'epcsv epsn erase etsn exsn fc fhx fl foreach ft fw gal gbp gc gci gcm '
687 'gcs gdr ghy gi gjb gl gm gmo gp gps gpv group gsn gsnp gsv gu gv gwmi '
688 'h history icm iex ihy ii ipal ipcsv ipmo ipsn irm ise iwmi iwr kill lp '
689 'ls man md measure mi mount move mp mv nal ndr ni nmo npssc nsn nv ogv '
690 'oh popd ps pushd pwd r rbp rcjb rcsn rd rdr ren ri rjb rm rmdir rmo '
691 'rni rnp rp rsn rsnp rujb rv rvpa rwmi sajb sal saps sasv sbp sc select '
692 'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee '
693 'trcm type wget where wjb write').split()
694
695 commenthelp = (
696 'component description example externalhelp forwardhelpcategory '
697 'forwardhelptargetname functionality inputs link '
698 'notes outputs parameter remotehelprunspace role synopsis').split()
699
700 tokens = {
701 'root': [
702 # we need to count pairs of parentheses for correct highlight
703 # of '$(...)' blocks in strings
704 (r'\(', Punctuation, 'child'),
705 (r'\s+', Text),
706 (r'^(\s*#[#\s]*)(\.(?:{}))([^\n]*$)'.format('|'.join(commenthelp)),
707 bygroups(Comment, String.Doc, Comment)),
708 (r'#[^\n]*?$', Comment),
709 (r'(<|<)#', Comment.Multiline, 'multline'),
710 (r'@"\n', String.Heredoc, 'heredoc-double'),
711 (r"@'\n.*?\n'@", String.Heredoc),
712 # escaped syntax
713 (r'`[\'"$@-]', Punctuation),
714 (r'"', String.Double, 'string'),
715 (r"'([^']|'')*'", String.Single),
716 (r'(\$|@@|@)((global|script|private|env):)?\w+',
717 Name.Variable),
718 (r'({})\b'.format('|'.join(keywords)), Keyword),
719 (r'-({})\b'.format('|'.join(operators)), Operator),
720 (r'({})-[a-z_]\w*\b'.format('|'.join(verbs)), Name.Builtin),
721 (r'({})\s'.format('|'.join(aliases_)), Name.Builtin),
722 (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
723 (r'-[a-z_]\w*', Name),
724 (r'\w+', Name),
725 (r'[.,;:@{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
726 ],
727 'child': [
728 (r'\)', Punctuation, '#pop'),
729 include('root'),
730 ],
731 'multline': [
732 (r'[^#&.]+', Comment.Multiline),
733 (r'#(>|>)', Comment.Multiline, '#pop'),
734 (r'\.({})'.format('|'.join(commenthelp)), String.Doc),
735 (r'[#&.]', Comment.Multiline),
736 ],
737 'string': [
738 (r"`[0abfnrtv'\"$`]", String.Escape),
739 (r'[^$`"]+', String.Double),
740 (r'\$\(', Punctuation, 'child'),
741 (r'""', String.Double),
742 (r'[`$]', String.Double),
743 (r'"', String.Double, '#pop'),
744 ],
745 'heredoc-double': [
746 (r'\n"@', String.Heredoc, '#pop'),
747 (r'\$\(', Punctuation, 'child'),
748 (r'[^@\n]+"]', String.Heredoc),
749 (r".", String.Heredoc),
750 ]
751 }
752
753
754class PowerShellSessionLexer(ShellSessionBaseLexer):
755 """
756 Lexer for PowerShell sessions, i.e. command lines, including a
757 prompt, interspersed with output.
758 """
759
760 name = 'PowerShell Session'
761 aliases = ['pwsh-session', 'ps1con']
762 filenames = []
763 mimetypes = []
764 url = 'https://learn.microsoft.com/en-us/powershell'
765 version_added = '2.1'
766 _example = "pwsh-session/session"
767
768 _innerLexerCls = PowerShellLexer
769 _bare_continuation = True
770 _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)')
771 _ps2 = '> '
772
773
774class FishShellLexer(RegexLexer):
775 """
776 Lexer for Fish shell scripts.
777 """
778
779 name = 'Fish'
780 aliases = ['fish', 'fishshell']
781 filenames = ['*.fish', '*.load']
782 mimetypes = ['application/x-fish']
783 url = 'https://fishshell.com'
784 version_added = '2.1'
785
786 tokens = {
787 'root': [
788 include('basic'),
789 include('data'),
790 include('interp'),
791 ],
792 'interp': [
793 (r'\$\(\(', Keyword, 'math'),
794 (r'\(', Keyword, 'paren'),
795 (r'\$#?(\w+|.)', Name.Variable),
796 ],
797 'basic': [
798 (r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
799 r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
800 r'cd|count|test)(\s*)\b',
801 bygroups(Keyword, Text)),
802 (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
803 r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
804 r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
805 r'fish_update_completions|fishd|funced|funcsave|functions|help|'
806 r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
807 r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
808 r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
809 Name.Builtin),
810 (r'#.*\n', Comment),
811 (r'\\[\w\W]', String.Escape),
812 (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Whitespace, Operator)),
813 (r'[\[\]()=]', Operator),
814 (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
815 ],
816 'data': [
817 (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
818 (r'"', String.Double, 'string'),
819 (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
820 (r"(?s)'.*?'", String.Single),
821 (r';', Punctuation),
822 (r'&|\||\^|<|>', Operator),
823 (r'\s+', Text),
824 (r'\d+(?= |\Z)', Number),
825 (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
826 ],
827 'string': [
828 (r'"', String.Double, '#pop'),
829 (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
830 include('interp'),
831 ],
832 'paren': [
833 (r'\)', Keyword, '#pop'),
834 include('root'),
835 ],
836 'math': [
837 (r'\)\)', Keyword, '#pop'),
838 (r'[-+*/%^|&]|\*\*|\|\|', Operator),
839 (r'\d+#\d+', Number),
840 (r'\d+#(?! )', Number),
841 (r'\d+', Number),
842 include('root'),
843 ],
844 }
845
846class ExeclineLexer(RegexLexer):
847 """
848 Lexer for Laurent Bercot's execline language.
849 """
850
851 name = 'execline'
852 aliases = ['execline']
853 filenames = ['*.exec']
854 url = 'https://skarnet.org/software/execline'
855 version_added = '2.7'
856
857 tokens = {
858 'root': [
859 include('basic'),
860 include('data'),
861 include('interp')
862 ],
863 'interp': [
864 (r'\$\{', String.Interpol, 'curly'),
865 (r'\$[\w@#]+', Name.Variable), # user variable
866 (r'\$', Text),
867 ],
868 'basic': [
869 (r'\b(background|backtick|cd|define|dollarat|elgetopt|'
870 r'elgetpositionals|elglob|emptyenv|envfile|exec|execlineb|'
871 r'exit|export|fdblock|fdclose|fdmove|fdreserve|fdswap|'
872 r'forbacktickx|foreground|forstdin|forx|getcwd|getpid|heredoc|'
873 r'homeof|if|ifelse|ifte|ifthenelse|importas|loopwhilex|'
874 r'multidefine|multisubstitute|pipeline|piperw|posix-cd|'
875 r'redirfd|runblock|shift|trap|tryexec|umask|unexport|wait|'
876 r'withstdinas)\b', Name.Builtin),
877 (r'\A#!.+\n', Comment.Hashbang),
878 (r'#.*\n', Comment.Single),
879 (r'[{}]', Operator)
880 ],
881 'data': [
882 (r'(?s)"(\\.|[^"\\$])*"', String.Double),
883 (r'"', String.Double, 'string'),
884 (r'\s+', Text),
885 (r'[^\s{}$"\\]+', Text)
886 ],
887 'string': [
888 (r'"', String.Double, '#pop'),
889 (r'(?s)(\\\\|\\.|[^"\\$])+', String.Double),
890 include('interp'),
891 ],
892 'curly': [
893 (r'\}', String.Interpol, '#pop'),
894 (r'[\w#@]+', Name.Variable),
895 include('root')
896 ]
897
898 }
899
900 def analyse_text(text):
901 if shebang_matches(text, r'execlineb'):
902 return 1