Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
allendowney
GitHub Repository: allendowney/cpython
Path: blob/main/Parser/tokenizer.h
12 views
1
#ifndef Py_TOKENIZER_H
2
#define Py_TOKENIZER_H
3
#ifdef __cplusplus
4
extern "C" {
5
#endif
6
7
#include "object.h"
8
9
/* Tokenizer interface */
10
11
#include "pycore_token.h" /* For token types */
12
13
#define MAXINDENT 100 /* Max indentation level */
14
#define MAXLEVEL 200 /* Max parentheses level */
15
#define MAXFSTRINGLEVEL 150 /* Max f-string nesting level */
16
17
enum decoding_state {
18
STATE_INIT,
19
STATE_SEEK_CODING,
20
STATE_NORMAL
21
};
22
23
enum interactive_underflow_t {
24
/* Normal mode of operation: return a new token when asked in interactive mode */
25
IUNDERFLOW_NORMAL,
26
/* Forcefully return ENDMARKER when asked for a new token in interactive mode. This
27
* can be used to prevent the tokenizer to prompt the user for new tokens */
28
IUNDERFLOW_STOP,
29
};
30
31
struct token {
32
int level;
33
int lineno, col_offset, end_lineno, end_col_offset;
34
const char *start, *end;
35
PyObject *metadata;
36
};
37
38
enum tokenizer_mode_kind_t {
39
TOK_REGULAR_MODE,
40
TOK_FSTRING_MODE,
41
};
42
43
#define MAX_EXPR_NESTING 3
44
45
typedef struct _tokenizer_mode {
46
enum tokenizer_mode_kind_t kind;
47
48
int curly_bracket_depth;
49
int curly_bracket_expr_start_depth;
50
51
char f_string_quote;
52
int f_string_quote_size;
53
int f_string_raw;
54
const char* f_string_start;
55
const char* f_string_multi_line_start;
56
int f_string_line_start;
57
58
Py_ssize_t f_string_start_offset;
59
Py_ssize_t f_string_multi_line_start_offset;
60
61
Py_ssize_t last_expr_size;
62
Py_ssize_t last_expr_end;
63
char* last_expr_buffer;
64
int f_string_debug;
65
} tokenizer_mode;
66
67
/* Tokenizer state */
68
struct tok_state {
69
/* Input state; buf <= cur <= inp <= end */
70
/* NB an entire line is held in the buffer */
71
char *buf; /* Input buffer, or NULL; malloc'ed if fp != NULL or readline != NULL */
72
char *cur; /* Next character in buffer */
73
char *inp; /* End of data in buffer */
74
int fp_interactive; /* If the file descriptor is interactive */
75
char *interactive_src_start; /* The start of the source parsed so far in interactive mode */
76
char *interactive_src_end; /* The end of the source parsed so far in interactive mode */
77
const char *end; /* End of input buffer if buf != NULL */
78
const char *start; /* Start of current token if not NULL */
79
int done; /* E_OK normally, E_EOF at EOF, otherwise error code */
80
/* NB If done != E_OK, cur must be == inp!!! */
81
FILE *fp; /* Rest of input; NULL if tokenizing a string */
82
int tabsize; /* Tab spacing */
83
int indent; /* Current indentation index */
84
int indstack[MAXINDENT]; /* Stack of indents */
85
int atbol; /* Nonzero if at begin of new line */
86
int pendin; /* Pending indents (if > 0) or dedents (if < 0) */
87
const char *prompt, *nextprompt; /* For interactive prompting */
88
int lineno; /* Current line number */
89
int first_lineno; /* First line of a single line or multi line string
90
expression (cf. issue 16806) */
91
int starting_col_offset; /* The column offset at the beginning of a token */
92
int col_offset; /* Current col offset */
93
int level; /* () [] {} Parentheses nesting level */
94
/* Used to allow free continuations inside them */
95
char parenstack[MAXLEVEL];
96
int parenlinenostack[MAXLEVEL];
97
int parencolstack[MAXLEVEL];
98
PyObject *filename;
99
/* Stuff for checking on different tab sizes */
100
int altindstack[MAXINDENT]; /* Stack of alternate indents */
101
/* Stuff for PEP 0263 */
102
enum decoding_state decoding_state;
103
int decoding_erred; /* whether erred in decoding */
104
char *encoding; /* Source encoding. */
105
int cont_line; /* whether we are in a continuation line. */
106
const char* line_start; /* pointer to start of current line */
107
const char* multi_line_start; /* pointer to start of first line of
108
a single line or multi line string
109
expression (cf. issue 16806) */
110
PyObject *decoding_readline; /* open(...).readline */
111
PyObject *decoding_buffer;
112
PyObject *readline; /* readline() function */
113
const char* enc; /* Encoding for the current str. */
114
char* str; /* Source string being tokenized (if tokenizing from a string)*/
115
char* input; /* Tokenizer's newline translated copy of the string. */
116
117
int type_comments; /* Whether to look for type comments */
118
119
/* async/await related fields (still needed depending on feature_version) */
120
int async_hacks; /* =1 if async/await aren't always keywords */
121
int async_def; /* =1 if tokens are inside an 'async def' body. */
122
int async_def_indent; /* Indentation level of the outermost 'async def'. */
123
int async_def_nl; /* =1 if the outermost 'async def' had at least one
124
NEWLINE token after it. */
125
/* How to proceed when asked for a new token in interactive mode */
126
enum interactive_underflow_t interactive_underflow;
127
int report_warnings;
128
// TODO: Factor this into its own thing
129
tokenizer_mode tok_mode_stack[MAXFSTRINGLEVEL];
130
int tok_mode_stack_index;
131
int tok_report_warnings;
132
int tok_extra_tokens;
133
int comment_newline;
134
int implicit_newline;
135
#ifdef Py_DEBUG
136
int debug;
137
#endif
138
};
139
140
extern struct tok_state *_PyTokenizer_FromString(const char *, int, int);
141
extern struct tok_state *_PyTokenizer_FromUTF8(const char *, int, int);
142
extern struct tok_state *_PyTokenizer_FromReadline(PyObject*, const char*, int, int);
143
extern struct tok_state *_PyTokenizer_FromFile(FILE *, const char*,
144
const char *, const char *);
145
extern void _PyTokenizer_Free(struct tok_state *);
146
extern void _PyToken_Free(struct token *);
147
extern void _PyToken_Init(struct token *);
148
extern int _PyTokenizer_Get(struct tok_state *, struct token *);
149
150
#define tok_dump _Py_tok_dump
151
152
#ifdef __cplusplus
153
}
154
#endif
155
#endif /* !Py_TOKENIZER_H */
156
157