Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/pygments/lexers/algebra.py: 63%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

84 statements  

1""" 

2 pygments.lexers.algebra 

3 ~~~~~~~~~~~~~~~~~~~~~~~ 

4 

5 Lexers for computer algebra systems. 

6 

7 :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. 

8 :license: BSD, see LICENSE for details. 

9""" 

10 

11import re 

12 

13from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, words 

14from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ 

15 Number, Punctuation, Generic, Whitespace 

16 

17__all__ = ['GAPLexer', 'GAPConsoleLexer', 'MathematicaLexer', 'MuPADLexer', 

18 'BCLexer'] 

19 

20 

21class GAPLexer(RegexLexer): 

22 """ 

23 For GAP source code. 

24 """ 

25 name = 'GAP' 

26 url = 'https://www.gap-system.org' 

27 aliases = ['gap'] 

28 filenames = ['*.g', '*.gd', '*.gi', '*.gap'] 

29 version_added = '2.0' 

30 

31 tokens = { 

32 'root': [ 

33 (r'#.*$', Comment.Single), 

34 (r'"(?:[^"\\]|\\.)*"', String), 

35 (r'\(|\)|\[|\]|\{|\}', Punctuation), 

36 (r'''(?x)\b(?: 

37 if|then|elif|else|fi| 

38 for|while|do|od| 

39 repeat|until| 

40 break|continue| 

41 function|local|return|end| 

42 rec| 

43 quit|QUIT| 

44 IsBound|Unbind| 

45 TryNextMethod| 

46 Info|Assert 

47 )\b''', Keyword), 

48 (r'''(?x)\b(?: 

49 true|false|fail|infinity 

50 )\b''', 

51 Name.Constant), 

52 (r'''(?x)\b(?: 

53 (Declare|Install)([A-Z][A-Za-z]+)| 

54 BindGlobal|BIND_GLOBAL 

55 )\b''', 

56 Name.Builtin), 

57 (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator), 

58 (r'''(?x)\b(?: 

59 and|or|not|mod|in 

60 )\b''', 

61 Operator.Word), 

62 (r'''(?x) 

63 (?:\w+|`[^`]*`) 

64 (?:::\w+|`[^`]*`)*''', Name.Variable), 

65 (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number), 

66 (r'\.[0-9]+(?:e[0-9]+)?', Number), 

67 (r'.', Text) 

68 ], 

69 } 

70 

71 def analyse_text(text): 

72 score = 0.0 

73 

74 # Declaration part 

75 if re.search( 

76 r"(InstallTrueMethod|Declare(Attribute|Category|Filter|Operation" + 

77 r"|GlobalFunction|Synonym|SynonymAttr|Property))", text 

78 ): 

79 score += 0.7 

80 

81 # Implementation part 

82 if re.search( 

83 r"(DeclareRepresentation|Install(GlobalFunction|Method|" + 

84 r"ImmediateMethod|OtherMethod)|New(Family|Type)|Objectify)", text 

85 ): 

86 score += 0.7 

87 

88 return min(score, 1.0) 

89 

90 

91class GAPConsoleLexer(Lexer): 

92 """ 

93 For GAP console sessions. Modeled after JuliaConsoleLexer. 

94 """ 

95 name = 'GAP session' 

96 aliases = ['gap-console', 'gap-repl'] 

97 filenames = ['*.tst'] 

98 url = 'https://www.gap-system.org' 

99 version_added = '2.14' 

100 _example = "gap-repl/euclidean.tst" 

101 

102 def get_tokens_unprocessed(self, text): 

103 gaplexer = GAPLexer(**self.options) 

104 start = 0 

105 curcode = '' 

106 insertions = [] 

107 output = False 

108 error = False 

109 

110 for line in text.splitlines(keepends=True): 

111 if line.startswith('gap> ') or line.startswith('brk> '): 

112 insertions.append((len(curcode), [(0, Generic.Prompt, line[:5])])) 

113 curcode += line[5:] 

114 output = False 

115 error = False 

116 elif not output and line.startswith('> '): 

117 insertions.append((len(curcode), [(0, Generic.Prompt, line[:2])])) 

118 curcode += line[2:] 

119 else: 

120 if curcode: 

121 yield from do_insertions( 

122 insertions, gaplexer.get_tokens_unprocessed(curcode)) 

123 curcode = '' 

124 insertions = [] 

125 if line.startswith('Error, ') or error: 

126 yield start, Generic.Error, line 

127 error = True 

128 else: 

129 yield start, Generic.Output, line 

130 output = True 

131 start += len(line) 

132 

133 if curcode: 

134 yield from do_insertions( 

135 insertions, gaplexer.get_tokens_unprocessed(curcode)) 

136 

137 # the following is needed to distinguish Scilab and GAP .tst files 

138 def analyse_text(text): 

139 # GAP prompts are a dead give away, although hypothetical;y a 

140 # file in another language could be trying to compare a variable 

141 # "gap" as in "gap> 0.1". But that this should happen at the 

142 # start of a line seems unlikely... 

143 if re.search(r"^gap> ", text): 

144 return 0.9 

145 else: 

146 return 0.0 

147 

148 

149class MathematicaLexer(RegexLexer): 

150 """ 

151 Lexer for Mathematica source code. 

152 """ 

153 name = 'Mathematica' 

154 url = 'http://www.wolfram.com/mathematica/' 

155 aliases = ['mathematica', 'mma', 'nb'] 

156 filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma'] 

157 mimetypes = ['application/mathematica', 

158 'application/vnd.wolfram.mathematica', 

159 'application/vnd.wolfram.mathematica.package', 

160 'application/vnd.wolfram.cdf'] 

161 version_added = '2.0' 

162 

163 # http://reference.wolfram.com/mathematica/guide/Syntax.html 

164 operators = ( 

165 ";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/", 

166 "^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@", 

167 "@@@", "~~", "===", "&", "<", ">", "<=", ">=", 

168 ) 

169 

170 punctuation = (",", ";", "(", ")", "[", "]", "{", "}") 

171 

172 def _multi_escape(entries): 

173 return '({})'.format('|'.join(re.escape(entry) for entry in entries)) 

174 

175 tokens = { 

176 'root': [ 

177 (r'(?s)\(\*.*?\*\)', Comment), 

178 

179 (r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace), 

180 (r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable), 

181 (r'#\d*', Name.Variable), 

182 (r'([a-zA-Z]+[a-zA-Z0-9]*)', Name), 

183 

184 (r'-?\d+\.\d*', Number.Float), 

185 (r'-?\d*\.\d+', Number.Float), 

186 (r'-?\d+', Number.Integer), 

187 

188 (words(operators), Operator), 

189 (words(punctuation), Punctuation), 

190 (r'".*?"', String), 

191 (r'\s+', Text.Whitespace), 

192 ], 

193 } 

194 

195 

196class MuPADLexer(RegexLexer): 

197 """ 

198 A MuPAD lexer. 

199 Contributed by Christopher Creutzig <christopher@creutzig.de>. 

200 """ 

201 name = 'MuPAD' 

202 url = 'http://www.mupad.com' 

203 aliases = ['mupad'] 

204 filenames = ['*.mu'] 

205 version_added = '0.8' 

206 

207 tokens = { 

208 'root': [ 

209 (r'//.*?$', Comment.Single), 

210 (r'/\*', Comment.Multiline, 'comment'), 

211 (r'"(?:[^"\\]|\\.)*"', String), 

212 (r'\(|\)|\[|\]|\{|\}', Punctuation), 

213 (r'''(?x)\b(?: 

214 next|break|end| 

215 axiom|end_axiom|category|end_category|domain|end_domain|inherits| 

216 if|%if|then|elif|else|end_if| 

217 case|of|do|otherwise|end_case| 

218 while|end_while| 

219 repeat|until|end_repeat| 

220 for|from|to|downto|step|end_for| 

221 proc|local|option|save|begin|end_proc| 

222 delete|frame 

223 )\b''', Keyword), 

224 (r'''(?x)\b(?: 

225 DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR| 

226 DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT| 

227 DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC| 

228 DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR 

229 )\b''', Name.Class), 

230 (r'''(?x)\b(?: 

231 PI|EULER|E|CATALAN| 

232 NIL|FAIL|undefined|infinity| 

233 TRUE|FALSE|UNKNOWN 

234 )\b''', 

235 Name.Constant), 

236 (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo), 

237 (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator), 

238 (r'''(?x)\b(?: 

239 and|or|not|xor| 

240 assuming| 

241 div|mod| 

242 union|minus|intersect|in|subset 

243 )\b''', 

244 Operator.Word), 

245 (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number), 

246 # (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin), 

247 (r'''(?x) 

248 ((?:[a-zA-Z_#][\w#]*|`[^`]*`) 

249 (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''', 

250 bygroups(Name.Function, Text, Punctuation)), 

251 (r'''(?x) 

252 (?:[a-zA-Z_#][\w#]*|`[^`]*`) 

253 (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*''', Name.Variable), 

254 (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number), 

255 (r'\.[0-9]+(?:e[0-9]+)?', Number), 

256 (r'\s+', Whitespace), 

257 (r'.', Text) 

258 ], 

259 'comment': [ 

260 (r'[^/*]+', Comment.Multiline), 

261 (r'/\*', Comment.Multiline, '#push'), 

262 (r'\*/', Comment.Multiline, '#pop'), 

263 (r'[*/]', Comment.Multiline) 

264 ], 

265 } 

266 

267 

268class BCLexer(RegexLexer): 

269 """ 

270 A BC lexer. 

271 """ 

272 name = 'BC' 

273 url = 'https://www.gnu.org/software/bc/' 

274 aliases = ['bc'] 

275 filenames = ['*.bc'] 

276 version_added = '2.1' 

277 

278 tokens = { 

279 'root': [ 

280 (r'/\*', Comment.Multiline, 'comment'), 

281 (r'"(?:[^"\\]|\\.)*"', String), 

282 (r'[{}();,]', Punctuation), 

283 (words(('if', 'else', 'while', 'for', 'break', 'continue', 

284 'halt', 'return', 'define', 'auto', 'print', 'read', 

285 'length', 'scale', 'sqrt', 'limits', 'quit', 

286 'warranty'), suffix=r'\b'), Keyword), 

287 (r'\+\+|--|\|\||&&|' 

288 r'([-<>+*%\^/!=])=?', Operator), 

289 # bc doesn't support exponential 

290 (r'[0-9]+(\.[0-9]*)?', Number), 

291 (r'\.[0-9]+', Number), 

292 (r'.', Text) 

293 ], 

294 'comment': [ 

295 (r'[^*/]+', Comment.Multiline), 

296 (r'\*/', Comment.Multiline, '#pop'), 

297 (r'[*/]', Comment.Multiline) 

298 ], 

299 }