Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/pygments/lexers/make.py: 83%

47 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-07-01 06:54 +0000

1""" 

2 pygments.lexers.make 

3 ~~~~~~~~~~~~~~~~~~~~ 

4 

5 Lexers for Makefiles and similar. 

6 

7 :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. 

8 :license: BSD, see LICENSE for details. 

9""" 

10 

11import re 

12 

13from pygments.lexer import Lexer, RegexLexer, include, bygroups, \ 

14 do_insertions, using 

15from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ 

16 Punctuation, Whitespace 

17from pygments.lexers.shell import BashLexer 

18 

19__all__ = ['MakefileLexer', 'BaseMakefileLexer', 'CMakeLexer'] 

20 

21 

22class MakefileLexer(Lexer): 

23 """ 

24 Lexer for BSD and GNU make extensions (lenient enough to handle both in 

25 the same file even). 

26 

27 *Rewritten in Pygments 0.10.* 

28 """ 

29 

30 name = 'Makefile' 

31 aliases = ['make', 'makefile', 'mf', 'bsdmake'] 

32 filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'] 

33 mimetypes = ['text/x-makefile'] 

34 

35 r_special = re.compile( 

36 r'^(?:' 

37 # BSD Make 

38 r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|' 

39 # GNU Make 

40 r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:|vpath)|' 

41 # GNU Automake 

42 r'\s*(if|else|endif))(?=\s)') 

43 r_comment = re.compile(r'^\s*@?#') 

44 

45 def get_tokens_unprocessed(self, text): 

46 ins = [] 

47 lines = text.splitlines(keepends=True) 

48 done = '' 

49 lex = BaseMakefileLexer(**self.options) 

50 backslashflag = False 

51 for line in lines: 

52 if self.r_special.match(line) or backslashflag: 

53 ins.append((len(done), [(0, Comment.Preproc, line)])) 

54 backslashflag = line.strip().endswith('\\') 

55 elif self.r_comment.match(line): 

56 ins.append((len(done), [(0, Comment, line)])) 

57 else: 

58 done += line 

59 yield from do_insertions(ins, lex.get_tokens_unprocessed(done)) 

60 

61 def analyse_text(text): 

62 # Many makefiles have $(BIG_CAPS) style variables 

63 if re.search(r'\$\([A-Z_]+\)', text): 

64 return 0.1 

65 

66 

67class BaseMakefileLexer(RegexLexer): 

68 """ 

69 Lexer for simple Makefiles (no preprocessing). 

70 

71 .. versionadded:: 0.10 

72 """ 

73 

74 name = 'Base Makefile' 

75 aliases = ['basemake'] 

76 filenames = [] 

77 mimetypes = [] 

78 

79 tokens = { 

80 'root': [ 

81 # recipes (need to allow spaces because of expandtabs) 

82 (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)), 

83 # special variables 

84 (r'\$[<@$+%?|*]', Keyword), 

85 (r'\s+', Whitespace), 

86 (r'#.*?\n', Comment), 

87 (r'((?:un)?export)(\s+)(?=[\w${}\t -]+\n)', 

88 bygroups(Keyword, Whitespace), 'export'), 

89 (r'(?:un)?export\s+', Keyword), 

90 # assignment 

91 (r'([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)', 

92 bygroups( 

93 Name.Variable, Whitespace, Operator, Whitespace, 

94 using(BashLexer))), 

95 # strings 

96 (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), 

97 (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), 

98 # targets 

99 (r'([^\n:]+)(:+)([ \t]*)', bygroups( 

100 Name.Function, Operator, Whitespace), 

101 'block-header'), 

102 # expansions 

103 (r'\$\(', Keyword, 'expansion'), 

104 ], 

105 'expansion': [ 

106 (r'[^\w$().-]+', Text), 

107 (r'[\w.-]+', Name.Variable), 

108 (r'\$', Keyword), 

109 (r'\(', Keyword, '#push'), 

110 (r'\)', Keyword, '#pop'), 

111 ], 

112 'export': [ 

113 (r'[\w${}-]+', Name.Variable), 

114 (r'\n', Text, '#pop'), 

115 (r'\s+', Whitespace), 

116 ], 

117 'block-header': [ 

118 (r'[,|]', Punctuation), 

119 (r'#.*?\n', Comment, '#pop'), 

120 (r'\\\n', Text), # line continuation 

121 (r'\$\(', Keyword, 'expansion'), 

122 (r'[a-zA-Z_]+', Name), 

123 (r'\n', Whitespace, '#pop'), 

124 (r'.', Text), 

125 ], 

126 } 

127 

128 

129class CMakeLexer(RegexLexer): 

130 """ 

131 Lexer for CMake files. 

132 

133 .. versionadded:: 1.2 

134 """ 

135 name = 'CMake' 

136 url = 'https://cmake.org/documentation/' 

137 aliases = ['cmake'] 

138 filenames = ['*.cmake', 'CMakeLists.txt'] 

139 mimetypes = ['text/x-cmake'] 

140 

141 tokens = { 

142 'root': [ 

143 # (r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|' 

144 # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|' 

145 # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|' 

146 # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|' 

147 # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|' 

148 # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|' 

149 # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|' 

150 # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|' 

151 # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|' 

152 # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|' 

153 # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|' 

154 # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|' 

155 # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|' 

156 # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|' 

157 # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|' 

158 # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|' 

159 # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|' 

160 # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|' 

161 # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|' 

162 # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|' 

163 # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|' 

164 # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|' 

165 # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|' 

166 # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|' 

167 # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|' 

168 # r'COUNTARGS)\b', Name.Builtin, 'args'), 

169 (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Whitespace, 

170 Punctuation), 'args'), 

171 include('keywords'), 

172 include('ws') 

173 ], 

174 'args': [ 

175 (r'\(', Punctuation, '#push'), 

176 (r'\)', Punctuation, '#pop'), 

177 (r'(\$\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)), 

178 (r'(\$ENV\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)), 

179 (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)), 

180 (r'(?s)".*?"', String.Double), 

181 (r'\\\S+', String), 

182 (r'[^)$"# \t\n]+', String), 

183 (r'\n', Whitespace), # explicitly legal 

184 include('keywords'), 

185 include('ws') 

186 ], 

187 'string': [ 

188 

189 ], 

190 'keywords': [ 

191 (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|' 

192 r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword), 

193 ], 

194 'ws': [ 

195 (r'[ \t]+', Whitespace), 

196 (r'#\[(?P<level>=*)\[[\w\W]*?\](?P=level)\]', Comment), 

197 (r'#.*\n', Comment), 

198 ] 

199 } 

200 

201 def analyse_text(text): 

202 exp = ( 

203 r'^[ \t]*CMAKE_MINIMUM_REQUIRED[ \t]*' 

204 r'\([ \t]*VERSION[ \t]*\d+(\.\d+)*[ \t]*' 

205 r'([ \t]FATAL_ERROR)?[ \t]*\)[ \t]*' 

206 r'(#[^\n]*)?$' 

207 ) 

208 if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE): 

209 return 0.8 

210 return 0.0