Coverage Report

Created: 2025-07-18 06:09

/src/cpython/Parser/pegen_errors.c
Line
Count
Source (jump to first uncovered line)
1
#include <Python.h>
2
#include <errcode.h>
3
4
#include "pycore_pyerrors.h"      // _PyErr_ProgramDecodedTextObject()
5
#include "lexer/state.h"
6
#include "lexer/lexer.h"
7
#include "pegen.h"
8
9
// TOKENIZER ERRORS
10
11
void
12
_PyPegen_raise_tokenizer_init_error(PyObject *filename)
13
1.74k
{
14
1.74k
    if (!(PyErr_ExceptionMatches(PyExc_LookupError)
15
1.74k
          || PyErr_ExceptionMatches(PyExc_SyntaxError)
16
1.74k
          || PyErr_ExceptionMatches(PyExc_ValueError)
17
1.74k
          || PyErr_ExceptionMatches(PyExc_UnicodeDecodeError))) {
18
60
        return;
19
60
    }
20
1.68k
    PyObject *errstr = NULL;
21
1.68k
    PyObject *tuple = NULL;
22
1.68k
    PyObject *type;
23
1.68k
    PyObject *value;
24
1.68k
    PyObject *tback;
25
1.68k
    PyErr_Fetch(&type, &value, &tback);
26
1.68k
    errstr = PyObject_Str(value);
27
1.68k
    if (!errstr) {
28
0
        goto error;
29
0
    }
30
31
1.68k
    PyObject *tmp = Py_BuildValue("(OiiO)", filename, 0, -1, Py_None);
32
1.68k
    if (!tmp) {
33
0
        goto error;
34
0
    }
35
36
1.68k
    tuple = PyTuple_Pack(2, errstr, tmp);
37
1.68k
    Py_DECREF(tmp);
38
1.68k
    if (!value) {
39
0
        goto error;
40
0
    }
41
1.68k
    PyErr_SetObject(PyExc_SyntaxError, tuple);
42
43
1.68k
error:
44
1.68k
    Py_XDECREF(type);
45
1.68k
    Py_XDECREF(value);
46
1.68k
    Py_XDECREF(tback);
47
1.68k
    Py_XDECREF(errstr);
48
1.68k
    Py_XDECREF(tuple);
49
1.68k
}
50
51
static inline void
52
1.60k
raise_unclosed_parentheses_error(Parser *p) {
53
1.60k
       int error_lineno = p->tok->parenlinenostack[p->tok->level-1];
54
1.60k
       int error_col = p->tok->parencolstack[p->tok->level-1];
55
1.60k
       RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError,
56
1.60k
                                  error_lineno, error_col, error_lineno, -1,
57
1.60k
                                  "'%c' was never closed",
58
1.60k
                                  p->tok->parenstack[p->tok->level-1]);
59
1.60k
}
60
61
int
62
_Pypegen_tokenizer_error(Parser *p)
63
3.35k
{
64
3.35k
    if (PyErr_Occurred()) {
65
1.77k
        return -1;
66
1.77k
    }
67
68
1.57k
    const char *msg = NULL;
69
1.57k
    PyObject* errtype = PyExc_SyntaxError;
70
1.57k
    Py_ssize_t col_offset = -1;
71
1.57k
    p->error_indicator = 1;
72
1.57k
    switch (p->tok->done) {
73
0
        case E_TOKEN:
74
0
            msg = "invalid token";
75
0
            break;
76
1.51k
        case E_EOF:
77
1.51k
            if (p->tok->level) {
78
1.48k
                raise_unclosed_parentheses_error(p);
79
1.48k
            } else {
80
28
                RAISE_SYNTAX_ERROR("unexpected EOF while parsing");
81
28
            }
82
1.51k
            return -1;
83
6
        case E_DEDENT:
84
6
            RAISE_INDENTATION_ERROR("unindent does not match any outer indentation level");
85
6
            return -1;
86
0
        case E_INTR:
87
0
            if (!PyErr_Occurred()) {
88
0
                PyErr_SetNone(PyExc_KeyboardInterrupt);
89
0
            }
90
0
            return -1;
91
0
        case E_NOMEM:
92
0
            PyErr_NoMemory();
93
0
            return -1;
94
2
        case E_TABSPACE:
95
2
            errtype = PyExc_TabError;
96
2
            msg = "inconsistent use of tabs and spaces in indentation";
97
2
            break;
98
0
        case E_TOODEEP:
99
0
            errtype = PyExc_IndentationError;
100
0
            msg = "too many levels of indentation";
101
0
            break;
102
60
        case E_LINECONT: {
103
60
            col_offset = p->tok->cur - p->tok->buf - 1;
104
60
            msg = "unexpected character after line continuation character";
105
60
            break;
106
0
        }
107
0
        case E_COLUMNOVERFLOW:
108
0
            PyErr_SetString(PyExc_OverflowError,
109
0
                    "Parser column offset overflow - source line is too big");
110
0
            return -1;
111
0
        default:
112
0
            msg = "unknown parsing error";
113
1.57k
    }
114
115
62
    RAISE_ERROR_KNOWN_LOCATION(p, errtype, p->tok->lineno,
116
62
                               col_offset >= 0 ? col_offset : 0,
117
62
                               p->tok->lineno, -1, msg);
118
62
    return -1;
119
1.57k
}
120
121
int
122
_Pypegen_raise_decode_error(Parser *p)
123
1.25k
{
124
1.25k
    assert(PyErr_Occurred());
125
1.25k
    const char *errtype = NULL;
126
1.25k
    if (PyErr_ExceptionMatches(PyExc_UnicodeError)) {
127
1.24k
        errtype = "unicode error";
128
1.24k
    }
129
14
    else if (PyErr_ExceptionMatches(PyExc_ValueError)) {
130
5
        errtype = "value error";
131
5
    }
132
1.25k
    if (errtype) {
133
1.24k
        PyObject *type;
134
1.24k
        PyObject *value;
135
1.24k
        PyObject *tback;
136
1.24k
        PyObject *errstr;
137
1.24k
        PyErr_Fetch(&type, &value, &tback);
138
1.24k
        errstr = PyObject_Str(value);
139
1.24k
        if (errstr) {
140
1.24k
            RAISE_SYNTAX_ERROR("(%s) %U", errtype, errstr);
141
1.24k
            Py_DECREF(errstr);
142
1.24k
        }
143
0
        else {
144
0
            PyErr_Clear();
145
0
            RAISE_SYNTAX_ERROR("(%s) unknown error", errtype);
146
0
        }
147
1.24k
        Py_XDECREF(type);
148
1.24k
        Py_XDECREF(value);
149
1.24k
        Py_XDECREF(tback);
150
1.24k
    }
151
152
1.25k
    return -1;
153
1.25k
}
154
155
static int
156
7.60k
_PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) {
157
    // Tokenize the whole input to see if there are any tokenization
158
    // errors such as mismatching parentheses. These will get priority
159
    // over generic syntax errors only if the line number of the error is
160
    // before the one that we had for the generic error.
161
162
    // We don't want to tokenize to the end for interactive input
163
7.60k
    if (p->tok->prompt != NULL) {
164
0
        return 0;
165
0
    }
166
167
7.60k
    PyObject *type, *value, *traceback;
168
7.60k
    PyErr_Fetch(&type, &value, &traceback);
169
170
7.60k
    Token *current_token = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1];
171
7.60k
    Py_ssize_t current_err_line = current_token->lineno;
172
173
7.60k
    int ret = 0;
174
7.60k
    struct token new_token;
175
7.60k
    _PyToken_Init(&new_token);
176
177
49.2k
    for (;;) {
178
49.2k
        switch (_PyTokenizer_Get(p->tok, &new_token)) {
179
3.25k
            case ERRORTOKEN:
180
3.25k
                if (PyErr_Occurred()) {
181
1.09k
                    ret = -1;
182
1.09k
                    goto exit;
183
1.09k
                }
184
2.16k
                if (p->tok->level != 0) {
185
2.13k
                    int error_lineno = p->tok->parenlinenostack[p->tok->level-1];
186
2.13k
                    if (current_err_line > error_lineno) {
187
121
                        raise_unclosed_parentheses_error(p);
188
121
                        ret = -1;
189
121
                        goto exit;
190
121
                    }
191
2.13k
                }
192
2.03k
                break;
193
4.34k
            case ENDMARKER:
194
4.34k
                break;
195
41.6k
            default:
196
41.6k
                continue;
197
49.2k
        }
198
6.38k
        break;
199
49.2k
    }
200
201
202
7.60k
exit:
203
7.60k
    _PyToken_Free(&new_token);
204
    // If we're in an f-string, we want the syntax error in the expression part
205
    // to propagate, so that tokenizer errors (like expecting '}') that happen afterwards
206
    // do not swallow it.
207
7.60k
    if (PyErr_Occurred() && p->tok->tok_mode_stack_index <= 0) {
208
965
        Py_XDECREF(value);
209
965
        Py_XDECREF(type);
210
965
        Py_XDECREF(traceback);
211
6.63k
    } else {
212
6.63k
        PyErr_Restore(type, value, traceback);
213
6.63k
    }
214
7.60k
    return ret;
215
7.60k
}
216
217
// PARSER ERRORS
218
219
void *
220
_PyPegen_raise_error(Parser *p, PyObject *errtype, int use_mark, const char *errmsg, ...)
221
2.23k
{
222
    // Bail out if we already have an error set.
223
2.23k
    if (p->error_indicator && PyErr_Occurred()) {
224
117
        return NULL;
225
117
    }
226
2.11k
    if (p->fill == 0) {
227
0
        va_list va;
228
0
        va_start(va, errmsg);
229
0
        _PyPegen_raise_error_known_location(p, errtype, 0, 0, 0, -1, errmsg, va);
230
0
        va_end(va);
231
0
        return NULL;
232
0
    }
233
2.11k
    if (use_mark && p->mark == p->fill && _PyPegen_fill_token(p) < 0) {
234
0
        p->error_indicator = 1;
235
0
        return NULL;
236
0
    }
237
2.11k
    Token *t = p->known_err_token != NULL
238
2.11k
                   ? p->known_err_token
239
2.11k
                   : p->tokens[use_mark ? p->mark : p->fill - 1];
240
2.11k
    Py_ssize_t col_offset;
241
2.11k
    Py_ssize_t end_col_offset = -1;
242
2.11k
    if (t->col_offset == -1) {
243
1.17k
        if (p->tok->cur == p->tok->buf) {
244
4
            col_offset = 0;
245
1.16k
        } else {
246
1.16k
            const char* start = p->tok->buf  ? p->tok->line_start : p->tok->buf;
247
1.16k
            col_offset = Py_SAFE_DOWNCAST(p->tok->cur - start, intptr_t, int);
248
1.16k
        }
249
1.17k
    } else {
250
941
        col_offset = t->col_offset + 1;
251
941
    }
252
253
2.11k
    if (t->end_col_offset != -1) {
254
941
        end_col_offset = t->end_col_offset + 1;
255
941
    }
256
257
2.11k
    va_list va;
258
2.11k
    va_start(va, errmsg);
259
2.11k
    _PyPegen_raise_error_known_location(p, errtype, t->lineno, col_offset, t->end_lineno, end_col_offset, errmsg, va);
260
2.11k
    va_end(va);
261
262
2.11k
    return NULL;
263
2.11k
}
264
265
static PyObject *
266
get_error_line_from_tokenizer_buffers(Parser *p, Py_ssize_t lineno)
267
262
{
268
    /* If the file descriptor is interactive, the source lines of the current
269
     * (multi-line) statement are stored in p->tok->interactive_src_start.
270
     * If not, we're parsing from a string, which means that the whole source
271
     * is stored in p->tok->str. */
272
262
    assert((p->tok->fp == NULL && p->tok->str != NULL) || p->tok->fp != NULL);
273
274
262
    char *cur_line = p->tok->fp_interactive ? p->tok->interactive_src_start : p->tok->str;
275
262
    if (cur_line == NULL) {
276
0
        assert(p->tok->fp_interactive);
277
        // We can reach this point if the tokenizer buffers for interactive source have not been
278
        // initialized because we failed to decode the original source with the given locale.
279
0
        return Py_GetConstant(Py_CONSTANT_EMPTY_STR);
280
0
    }
281
282
262
    Py_ssize_t relative_lineno = p->starting_lineno ? lineno - p->starting_lineno + 1 : lineno;
283
262
    const char* buf_end = p->tok->fp_interactive ? p->tok->interactive_src_end : p->tok->inp;
284
285
262
    if (buf_end < cur_line) {
286
18
        buf_end = cur_line + strlen(cur_line);
287
18
    }
288
289
2.11k
    for (int i = 0; i < relative_lineno - 1; i++) {
290
1.85k
        char *new_line = strchr(cur_line, '\n');
291
        // The assert is here for debug builds but the conditional that
292
        // follows is there so in release builds we do not crash at the cost
293
        // to report a potentially wrong line.
294
1.85k
        assert(new_line != NULL && new_line + 1 < buf_end);
295
1.85k
        if (new_line == NULL || new_line + 1 > buf_end) {
296
0
            break;
297
0
        }
298
1.85k
        cur_line = new_line + 1;
299
1.85k
    }
300
301
262
    char *next_newline;
302
262
    if ((next_newline = strchr(cur_line, '\n')) == NULL) { // This is the last line
303
0
        next_newline = cur_line + strlen(cur_line);
304
0
    }
305
262
    return PyUnicode_DecodeUTF8(cur_line, next_newline - cur_line, "replace");
306
262
}
307
308
void *
309
_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype,
310
                                    Py_ssize_t lineno, Py_ssize_t col_offset,
311
                                    Py_ssize_t end_lineno, Py_ssize_t end_col_offset,
312
                                    const char *errmsg, va_list va)
313
11.2k
{
314
    // Bail out if we already have an error set.
315
11.2k
    if (p->error_indicator && PyErr_Occurred()) {
316
851
        return NULL;
317
851
    }
318
10.4k
    PyObject *value = NULL;
319
10.4k
    PyObject *errstr = NULL;
320
10.4k
    PyObject *error_line = NULL;
321
10.4k
    PyObject *tmp = NULL;
322
10.4k
    p->error_indicator = 1;
323
324
10.4k
    if (end_lineno == CURRENT_POS) {
325
27
        end_lineno = p->tok->lineno;
326
27
    }
327
10.4k
    if (end_col_offset == CURRENT_POS) {
328
27
        end_col_offset = p->tok->cur - p->tok->line_start;
329
27
    }
330
331
10.4k
    errstr = PyUnicode_FromFormatV(errmsg, va);
332
10.4k
    if (!errstr) {
333
0
        goto error;
334
0
    }
335
336
10.4k
    if (p->tok->fp_interactive && p->tok->interactive_src_start != NULL) {
337
0
        error_line = get_error_line_from_tokenizer_buffers(p, lineno);
338
0
    }
339
10.4k
    else if (p->start_rule == Py_file_input) {
340
10.4k
        error_line = _PyErr_ProgramDecodedTextObject(p->tok->filename,
341
10.4k
                                                     (int) lineno, p->tok->encoding);
342
10.4k
    }
343
344
10.4k
    if (!error_line) {
345
        /* PyErr_ProgramTextObject was not called or returned NULL. If it was not called,
346
           then we need to find the error line from some other source, because
347
           p->start_rule != Py_file_input. If it returned NULL, then it either unexpectedly
348
           failed or we're parsing from a string or the REPL. There's a third edge case where
349
           we're actually parsing from a file, which has an E_EOF SyntaxError and in that case
350
           `PyErr_ProgramTextObject` fails because lineno points to last_file_line + 1, which
351
           does not physically exist */
352
10.4k
        assert(p->tok->fp == NULL || p->tok->fp == stdin || p->tok->done == E_EOF);
353
354
10.4k
        if (p->tok->lineno <= lineno && p->tok->inp > p->tok->buf) {
355
10.1k
            Py_ssize_t size = p->tok->inp - p->tok->line_start;
356
10.1k
            error_line = PyUnicode_DecodeUTF8(p->tok->line_start, size, "replace");
357
10.1k
        }
358
262
        else if (p->tok->fp == NULL || p->tok->fp == stdin) {
359
262
            error_line = get_error_line_from_tokenizer_buffers(p, lineno);
360
262
        }
361
0
        else {
362
0
            error_line = Py_GetConstant(Py_CONSTANT_EMPTY_STR);
363
0
        }
364
10.4k
        if (!error_line) {
365
0
            goto error;
366
0
        }
367
10.4k
    }
368
369
10.4k
    Py_ssize_t col_number = col_offset;
370
10.4k
    Py_ssize_t end_col_number = end_col_offset;
371
372
10.4k
    col_number = _PyPegen_byte_offset_to_character_offset(error_line, col_offset);
373
10.4k
    if (col_number < 0) {
374
0
        goto error;
375
0
    }
376
377
10.4k
    if (end_col_offset > 0) {
378
7.48k
        end_col_number = _PyPegen_byte_offset_to_character_offset(error_line, end_col_offset);
379
7.48k
        if (end_col_number < 0) {
380
0
            goto error;
381
0
        }
382
7.48k
    }
383
384
10.4k
    tmp = Py_BuildValue("(OnnNnn)", p->tok->filename, lineno, col_number, error_line, end_lineno, end_col_number);
385
10.4k
    if (!tmp) {
386
0
        goto error;
387
0
    }
388
10.4k
    value = PyTuple_Pack(2, errstr, tmp);
389
10.4k
    Py_DECREF(tmp);
390
10.4k
    if (!value) {
391
0
        goto error;
392
0
    }
393
10.4k
    PyErr_SetObject(errtype, value);
394
395
10.4k
    Py_DECREF(errstr);
396
10.4k
    Py_DECREF(value);
397
10.4k
    return NULL;
398
399
0
error:
400
0
    Py_XDECREF(errstr);
401
0
    Py_XDECREF(error_line);
402
0
    return NULL;
403
10.4k
}
404
405
void
406
12.1k
_Pypegen_set_syntax_error(Parser* p, Token* last_token) {
407
    // Existing syntax error
408
12.1k
    if (PyErr_Occurred()) {
409
        // Prioritize tokenizer errors to custom syntax errors raised
410
        // on the second phase only if the errors come from the parser.
411
6.27k
        int is_tok_ok = (p->tok->done == E_DONE || p->tok->done == E_OK);
412
6.27k
        if (is_tok_ok && PyErr_ExceptionMatches(PyExc_SyntaxError)) {
413
1.87k
            _PyPegen_tokenize_full_source_to_check_for_errors(p);
414
1.87k
        }
415
        // Propagate the existing syntax error.
416
6.27k
        return;
417
6.27k
    }
418
    // Initialization error
419
5.87k
    if (p->fill == 0) {
420
0
        RAISE_SYNTAX_ERROR("error at start before reading any input");
421
0
    }
422
    // Parser encountered EOF (End of File) unexpectedtly
423
5.87k
    if (last_token->type == ERRORTOKEN && p->tok->done == E_EOF) {
424
0
        if (p->tok->level) {
425
0
            raise_unclosed_parentheses_error(p);
426
0
        } else {
427
0
            RAISE_SYNTAX_ERROR("unexpected EOF while parsing");
428
0
        }
429
0
        return;
430
0
    }
431
    // Indentation error in the tokenizer
432
5.87k
    if (last_token->type == INDENT || last_token->type == DEDENT) {
433
146
        RAISE_INDENTATION_ERROR(last_token->type == INDENT ? "unexpected indent" : "unexpected unindent");
434
146
        return;
435
146
    }
436
    // Unknown error (generic case)
437
438
    // Use the last token we found on the first pass to avoid reporting
439
    // incorrect locations for generic syntax errors just because we reached
440
    // further away when trying to find specific syntax errors in the second
441
    // pass.
442
5.72k
    RAISE_SYNTAX_ERROR_KNOWN_LOCATION(last_token, "invalid syntax");
443
    // _PyPegen_tokenize_full_source_to_check_for_errors will override the existing
444
    // generic SyntaxError we just raised if errors are found.
445
5.72k
    _PyPegen_tokenize_full_source_to_check_for_errors(p);
446
5.72k
}
447
448
void
449
_Pypegen_stack_overflow(Parser *p)
450
65
{
451
65
    p->error_indicator = 1;
452
65
    PyErr_SetString(PyExc_MemoryError,
453
65
        "Parser stack overflowed - Python source too complex to parse");
454
65
}