Coverage Report

Created: 2026-04-20 06:11

next uncovered line (L), next uncovered region (R), next uncovered branch (B)
/src/cpython/Parser/pegen_errors.c
Line
Count
Source
1
#include <Python.h>
2
#include <errcode.h>
3
4
#include "pycore_pyerrors.h"      // _PyErr_ProgramDecodedTextObject()
5
#include "pycore_runtime.h"       // _Py_ID()
6
#include "pycore_tuple.h"         // _PyTuple_FromPair
7
#include "lexer/state.h"
8
#include "lexer/lexer.h"
9
#include "pegen.h"
10
11
// TOKENIZER ERRORS
12
13
void
14
_PyPegen_raise_tokenizer_init_error(PyObject *filename)
15
2.48k
{
16
2.48k
    if (!(PyErr_ExceptionMatches(PyExc_LookupError)
17
2.31k
          || PyErr_ExceptionMatches(PyExc_SyntaxError)
18
1.73k
          || PyErr_ExceptionMatches(PyExc_ValueError)
19
65
          || PyErr_ExceptionMatches(PyExc_UnicodeDecodeError))) {
20
65
        return;
21
65
    }
22
2.41k
    PyObject *errstr = NULL;
23
2.41k
    PyObject *tuple = NULL;
24
2.41k
    PyObject *type;
25
2.41k
    PyObject *value;
26
2.41k
    PyObject *tback;
27
2.41k
    PyErr_Fetch(&type, &value, &tback);
28
2.41k
    if (PyErr_GivenExceptionMatches(value, PyExc_SyntaxError)) {
29
585
        if (PyObject_SetAttr(value, &_Py_ID(filename), filename)) {
30
0
            goto error;
31
0
        }
32
585
        PyErr_Restore(type, value, tback);
33
585
        return;
34
585
    }
35
1.83k
    errstr = PyObject_Str(value);
36
1.83k
    if (!errstr) {
37
0
        goto error;
38
0
    }
39
40
1.83k
    PyObject *tmp = Py_BuildValue("(OiiO)", filename, 0, -1, Py_None);
41
1.83k
    if (!tmp) {
42
0
        goto error;
43
0
    }
44
45
1.83k
    tuple = _PyTuple_FromPair(errstr, tmp);
46
1.83k
    Py_DECREF(tmp);
47
1.83k
    if (!tuple) {
48
0
        goto error;
49
0
    }
50
1.83k
    PyErr_SetObject(PyExc_SyntaxError, tuple);
51
52
1.83k
error:
53
1.83k
    Py_XDECREF(type);
54
1.83k
    Py_XDECREF(value);
55
1.83k
    Py_XDECREF(tback);
56
1.83k
    Py_XDECREF(errstr);
57
1.83k
    Py_XDECREF(tuple);
58
1.83k
}
59
60
static inline void
61
1.79k
raise_unclosed_parentheses_error(Parser *p) {
62
1.79k
       int error_lineno = p->tok->parenlinenostack[p->tok->level-1];
63
1.79k
       int error_col = p->tok->parencolstack[p->tok->level-1];
64
1.79k
       RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError,
65
1.79k
                                  error_lineno, error_col, error_lineno, -1,
66
1.79k
                                  "'%c' was never closed",
67
1.79k
                                  p->tok->parenstack[p->tok->level-1]);
68
1.79k
}
69
70
int
71
_Pypegen_tokenizer_error(Parser *p)
72
3.73k
{
73
3.73k
    if (PyErr_Occurred()) {
74
1.91k
        return -1;
75
1.91k
    }
76
77
1.82k
    const char *msg = NULL;
78
1.82k
    PyObject* errtype = PyExc_SyntaxError;
79
1.82k
    Py_ssize_t col_offset = -1;
80
1.82k
    p->error_indicator = 1;
81
1.82k
    switch (p->tok->done) {
82
0
        case E_TOKEN:
83
0
            msg = "invalid token";
84
0
            break;
85
1.76k
        case E_EOF:
86
1.76k
            if (p->tok->level) {
87
1.72k
                raise_unclosed_parentheses_error(p);
88
1.72k
            } else {
89
42
                RAISE_SYNTAX_ERROR("unexpected EOF while parsing");
90
42
            }
91
1.76k
            return -1;
92
8
        case E_DEDENT:
93
8
            RAISE_INDENTATION_ERROR("unindent does not match any outer indentation level");
94
8
            return -1;
95
0
        case E_INTR:
96
0
            if (!PyErr_Occurred()) {
97
0
                PyErr_SetNone(PyExc_KeyboardInterrupt);
98
0
            }
99
0
            return -1;
100
0
        case E_NOMEM:
101
0
            PyErr_NoMemory();
102
0
            return -1;
103
2
        case E_TABSPACE:
104
2
            errtype = PyExc_TabError;
105
2
            msg = "inconsistent use of tabs and spaces in indentation";
106
2
            break;
107
0
        case E_TOODEEP:
108
0
            errtype = PyExc_IndentationError;
109
0
            msg = "too many levels of indentation";
110
0
            break;
111
46
        case E_LINECONT: {
112
46
            col_offset = p->tok->cur - p->tok->buf - 1;
113
46
            msg = "unexpected character after line continuation character";
114
46
            break;
115
0
        }
116
0
        case E_COLUMNOVERFLOW:
117
0
            PyErr_SetString(PyExc_OverflowError,
118
0
                    "Parser column offset overflow - source line is too big");
119
0
            return -1;
120
0
        default:
121
0
            msg = "unknown parsing error";
122
1.82k
    }
123
124
48
    RAISE_ERROR_KNOWN_LOCATION(p, errtype, p->tok->lineno,
125
48
                               col_offset >= 0 ? col_offset : 0,
126
48
                               p->tok->lineno, -1, msg);
127
48
    return -1;
128
1.82k
}
129
130
int
131
_Pypegen_raise_decode_error(Parser *p)
132
119
{
133
119
    assert(PyErr_Occurred());
134
119
    const char *errtype = NULL;
135
119
    if (PyErr_ExceptionMatches(PyExc_UnicodeError)) {
136
107
        errtype = "unicode error";
137
107
    }
138
12
    else if (PyErr_ExceptionMatches(PyExc_ValueError)) {
139
4
        errtype = "value error";
140
4
    }
141
119
    if (errtype) {
142
111
        PyObject *type;
143
111
        PyObject *value;
144
111
        PyObject *tback;
145
111
        PyObject *errstr;
146
111
        PyErr_Fetch(&type, &value, &tback);
147
111
        errstr = PyObject_Str(value);
148
111
        if (errstr) {
149
111
            RAISE_SYNTAX_ERROR("(%s) %U", errtype, errstr);
150
111
            Py_DECREF(errstr);
151
111
        }
152
0
        else {
153
0
            PyErr_Clear();
154
0
            RAISE_SYNTAX_ERROR("(%s) unknown error", errtype);
155
0
        }
156
111
        Py_XDECREF(type);
157
111
        Py_XDECREF(value);
158
111
        Py_XDECREF(tback);
159
111
    }
160
161
119
    return -1;
162
119
}
163
164
static int
165
85.8k
_PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) {
166
    // Tokenize the whole input to see if there are any tokenization
167
    // errors such as mismatching parentheses. These will get priority
168
    // over generic syntax errors only if the line number of the error is
169
    // before the one that we had for the generic error.
170
171
    // We don't want to tokenize to the end for interactive input
172
85.8k
    if (p->tok->prompt != NULL) {
173
0
        return 0;
174
0
    }
175
176
85.8k
    PyObject *type, *value, *traceback;
177
85.8k
    PyErr_Fetch(&type, &value, &traceback);
178
179
85.8k
    Token *current_token = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1];
180
85.8k
    Py_ssize_t current_err_line = current_token->lineno;
181
182
85.8k
    int ret = 0;
183
85.8k
    struct token new_token;
184
85.8k
    _PyToken_Init(&new_token);
185
186
364k
    for (;;) {
187
364k
        switch (_PyTokenizer_Get(p->tok, &new_token)) {
188
2.80k
            case ERRORTOKEN:
189
2.80k
                if (PyErr_Occurred()) {
190
593
                    ret = -1;
191
593
                    goto exit;
192
593
                }
193
2.21k
                if (p->tok->level != 0) {
194
2.18k
                    int error_lineno = p->tok->parenlinenostack[p->tok->level-1];
195
2.18k
                    if (current_err_line > error_lineno) {
196
73
                        raise_unclosed_parentheses_error(p);
197
73
                        ret = -1;
198
73
                        goto exit;
199
73
                    }
200
2.18k
                }
201
2.13k
                break;
202
83.0k
            case ENDMARKER:
203
83.0k
                break;
204
278k
            default:
205
278k
                continue;
206
364k
        }
207
85.2k
        break;
208
364k
    }
209
210
211
85.8k
exit:
212
85.8k
    _PyToken_Free(&new_token);
213
    // If we're in an f-string, we want the syntax error in the expression part
214
    // to propagate, so that tokenizer errors (like expecting '}') that happen afterwards
215
    // do not swallow it.
216
85.8k
    if (PyErr_Occurred() && p->tok->tok_mode_stack_index <= 0) {
217
479
        Py_XDECREF(value);
218
479
        Py_XDECREF(type);
219
479
        Py_XDECREF(traceback);
220
85.4k
    } else {
221
85.4k
        PyErr_Restore(type, value, traceback);
222
85.4k
    }
223
85.8k
    return ret;
224
85.8k
}
225
226
// PARSER ERRORS
227
228
void *
229
_PyPegen_raise_error(Parser *p, PyObject *errtype, int use_mark, const char *errmsg, ...)
230
998
{
231
    // Bail out if we already have an error set.
232
998
    if (p->error_indicator && PyErr_Occurred()) {
233
121
        return NULL;
234
121
    }
235
877
    if (p->fill == 0) {
236
0
        va_list va;
237
0
        va_start(va, errmsg);
238
0
        _PyPegen_raise_error_known_location(p, errtype, 0, 0, 0, -1, errmsg, va);
239
0
        va_end(va);
240
0
        return NULL;
241
0
    }
242
877
    if (use_mark && p->mark == p->fill && _PyPegen_fill_token(p) < 0) {
243
0
        p->error_indicator = 1;
244
0
        return NULL;
245
0
    }
246
877
    Token *t = p->known_err_token != NULL
247
877
                   ? p->known_err_token
248
877
                   : p->tokens[use_mark ? p->mark : p->fill - 1];
249
877
    Py_ssize_t col_offset;
250
877
    Py_ssize_t end_col_offset = -1;
251
877
    if (t->col_offset == -1) {
252
242
        if (p->tok->cur == p->tok->buf) {
253
4
            col_offset = 0;
254
238
        } else {
255
238
            const char* start = p->tok->buf  ? p->tok->line_start : p->tok->buf;
256
238
            col_offset = Py_SAFE_DOWNCAST(p->tok->cur - start, intptr_t, int);
257
238
        }
258
635
    } else {
259
635
        col_offset = t->col_offset + 1;
260
635
    }
261
262
877
    if (t->end_col_offset != -1) {
263
635
        end_col_offset = t->end_col_offset + 1;
264
635
    }
265
266
877
    va_list va;
267
877
    va_start(va, errmsg);
268
877
    _PyPegen_raise_error_known_location(p, errtype, t->lineno, col_offset, t->end_lineno, end_col_offset, errmsg, va);
269
877
    va_end(va);
270
271
877
    return NULL;
272
877
}
273
274
static PyObject *
275
get_error_line_from_tokenizer_buffers(Parser *p, Py_ssize_t lineno)
276
211
{
277
    /* If the file descriptor is interactive, the source lines of the current
278
     * (multi-line) statement are stored in p->tok->interactive_src_start.
279
     * If not, we're parsing from a string, which means that the whole source
280
     * is stored in p->tok->str. */
281
211
    assert((p->tok->fp == NULL && p->tok->str != NULL) || p->tok->fp != NULL);
282
283
211
    char *cur_line = p->tok->fp_interactive ? p->tok->interactive_src_start : p->tok->str;
284
211
    if (cur_line == NULL) {
285
0
        assert(p->tok->fp_interactive);
286
        // We can reach this point if the tokenizer buffers for interactive source have not been
287
        // initialized because we failed to decode the original source with the given locale.
288
0
        return Py_GetConstant(Py_CONSTANT_EMPTY_STR);
289
0
    }
290
291
211
    Py_ssize_t relative_lineno = p->starting_lineno ? lineno - p->starting_lineno + 1 : lineno;
292
211
    const char* buf_end = p->tok->fp_interactive ? p->tok->interactive_src_end : p->tok->inp;
293
294
211
    if (buf_end < cur_line) {
295
0
        buf_end = cur_line + strlen(cur_line);
296
0
    }
297
298
3.38k
    for (int i = 0; i < relative_lineno - 1; i++) {
299
3.17k
        char *new_line = strchr(cur_line, '\n');
300
        // The assert is here for debug builds but the conditional that
301
        // follows is there so in release builds we do not crash at the cost
302
        // to report a potentially wrong line.
303
3.17k
        assert(new_line != NULL && new_line + 1 < buf_end);
304
3.17k
        if (new_line == NULL || new_line + 1 > buf_end) {
305
0
            break;
306
0
        }
307
3.17k
        cur_line = new_line + 1;
308
3.17k
    }
309
310
211
    char *next_newline;
311
211
    if ((next_newline = strchr(cur_line, '\n')) == NULL) { // This is the last line
312
0
        next_newline = cur_line + strlen(cur_line);
313
0
    }
314
211
    return PyUnicode_DecodeUTF8(cur_line, next_newline - cur_line, "replace");
315
211
}
316
317
void *
318
_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype,
319
                                    Py_ssize_t lineno, Py_ssize_t col_offset,
320
                                    Py_ssize_t end_lineno, Py_ssize_t end_col_offset,
321
                                    const char *errmsg, va_list va)
322
88.3k
{
323
    // Bail out if we already have an error set.
324
88.3k
    if (p->error_indicator && PyErr_Occurred()) {
325
422
        return NULL;
326
422
    }
327
87.9k
    PyObject *value = NULL;
328
87.9k
    PyObject *errstr = NULL;
329
87.9k
    PyObject *error_line = NULL;
330
87.9k
    PyObject *tmp = NULL;
331
87.9k
    p->error_indicator = 1;
332
333
87.9k
    if (end_lineno == CURRENT_POS) {
334
26
        end_lineno = p->tok->lineno;
335
26
    }
336
87.9k
    if (end_col_offset == CURRENT_POS) {
337
26
        end_col_offset = p->tok->cur - p->tok->line_start;
338
26
    }
339
340
87.9k
    errstr = PyUnicode_FromFormatV(errmsg, va);
341
87.9k
    if (!errstr) {
342
0
        goto error;
343
0
    }
344
345
87.9k
    if (p->tok->fp_interactive && p->tok->interactive_src_start != NULL) {
346
0
        error_line = get_error_line_from_tokenizer_buffers(p, lineno);
347
0
    }
348
87.9k
    else if (p->start_rule == Py_file_input) {
349
87.9k
        error_line = _PyErr_ProgramDecodedTextObject(p->tok->filename,
350
87.9k
                                                     (int) lineno, p->tok->encoding);
351
87.9k
    }
352
353
87.9k
    if (!error_line) {
354
        /* PyErr_ProgramTextObject was not called or returned NULL. If it was not called,
355
           then we need to find the error line from some other source, because
356
           p->start_rule != Py_file_input. If it returned NULL, then it either unexpectedly
357
           failed or we're parsing from a string or the REPL. There's a third edge case where
358
           we're actually parsing from a file, which has an E_EOF SyntaxError and in that case
359
           `PyErr_ProgramTextObject` fails because lineno points to last_file_line + 1, which
360
           does not physically exist */
361
87.9k
        assert(p->tok->fp == NULL || p->tok->fp == stdin || p->tok->done == E_EOF);
362
363
87.9k
        if (p->tok->lineno <= lineno && p->tok->inp > p->tok->buf) {
364
87.7k
            Py_ssize_t size = p->tok->inp - p->tok->line_start;
365
87.7k
            error_line = PyUnicode_DecodeUTF8(p->tok->line_start, size, "replace");
366
87.7k
        }
367
211
        else if (p->tok->fp == NULL || p->tok->fp == stdin) {
368
211
            error_line = get_error_line_from_tokenizer_buffers(p, lineno);
369
211
        }
370
0
        else {
371
0
            error_line = Py_GetConstant(Py_CONSTANT_EMPTY_STR);
372
0
        }
373
87.9k
        if (!error_line) {
374
0
            goto error;
375
0
        }
376
87.9k
    }
377
378
87.9k
    Py_ssize_t col_number = col_offset;
379
87.9k
    Py_ssize_t end_col_number = end_col_offset;
380
381
87.9k
    col_number = _PyPegen_byte_offset_to_character_offset(error_line, col_offset);
382
87.9k
    if (col_number < 0) {
383
0
        goto error;
384
0
    }
385
386
87.9k
    if (end_col_offset > 0) {
387
85.8k
        end_col_number = _PyPegen_byte_offset_to_character_offset(error_line, end_col_offset);
388
85.8k
        if (end_col_number < 0) {
389
0
            goto error;
390
0
        }
391
85.8k
    }
392
393
87.9k
    tmp = Py_BuildValue("(OnnNnn)", p->tok->filename, lineno, col_number, error_line, end_lineno, end_col_number);
394
87.9k
    if (!tmp) {
395
0
        goto error;
396
0
    }
397
87.9k
    value = _PyTuple_FromPair(errstr, tmp);
398
87.9k
    Py_DECREF(tmp);
399
87.9k
    if (!value) {
400
0
        goto error;
401
0
    }
402
87.9k
    PyErr_SetObject(errtype, value);
403
404
87.9k
    Py_DECREF(errstr);
405
87.9k
    Py_DECREF(value);
406
87.9k
    return NULL;
407
408
0
error:
409
0
    Py_XDECREF(errstr);
410
0
    Py_XDECREF(error_line);
411
0
    return NULL;
412
87.9k
}
413
414
void
415
89.8k
_Pypegen_set_syntax_error(Parser* p, Token* last_token) {
416
    // Existing syntax error
417
89.8k
    if (PyErr_Occurred()) {
418
        // Prioritize tokenizer errors to custom syntax errors raised
419
        // on the second phase only if the errors come from the parser.
420
5.57k
        int is_tok_ok = (p->tok->done == E_DONE || p->tok->done == E_OK);
421
5.57k
        if (is_tok_ok && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
422
1.69k
            _PyPegen_tokenize_full_source_to_check_for_errors(p);
423
1.69k
        }
424
        // Propagate the existing syntax error.
425
5.57k
        return;
426
5.57k
    }
427
    // Initialization error
428
84.2k
    if (p->fill == 0) {
429
0
        RAISE_SYNTAX_ERROR("error at start before reading any input");
430
0
    }
431
    // Parser encountered EOF (End of File) unexpectedtly
432
84.2k
    if (last_token->type == ERRORTOKEN && p->tok->done == E_EOF) {
433
0
        if (p->tok->level) {
434
0
            raise_unclosed_parentheses_error(p);
435
0
        } else {
436
0
            RAISE_SYNTAX_ERROR("unexpected EOF while parsing");
437
0
        }
438
0
        return;
439
0
    }
440
    // Indentation error in the tokenizer
441
84.2k
    if (last_token->type == INDENT || last_token->type == DEDENT) {
442
89
        RAISE_INDENTATION_ERROR(last_token->type == INDENT ? "unexpected indent" : "unexpected unindent");
443
89
        return;
444
89
    }
445
    // Unknown error (generic case)
446
447
    // Use the last token we found on the first pass to avoid reporting
448
    // incorrect locations for generic syntax errors just because we reached
449
    // further away when trying to find specific syntax errors in the second
450
    // pass.
451
84.1k
    RAISE_SYNTAX_ERROR_KNOWN_LOCATION(last_token, "invalid syntax");
452
    // _PyPegen_tokenize_full_source_to_check_for_errors will override the existing
453
    // generic SyntaxError we just raised if errors are found.
454
84.1k
    _PyPegen_tokenize_full_source_to_check_for_errors(p);
455
84.1k
}
456
457
void
458
_Pypegen_stack_overflow(Parser *p)
459
50
{
460
50
    p->error_indicator = 1;
461
50
    PyErr_SetString(PyExc_MemoryError,
462
50
        "Parser stack overflowed - Python source too complex to parse");
463
50
}