1 |
|
---|
2 | /* Parser-tokenizer link implementation */
|
---|
3 |
|
---|
4 | #include "pgenheaders.h"
|
---|
5 | #include "tokenizer.h"
|
---|
6 | #include "node.h"
|
---|
7 | #include "grammar.h"
|
---|
8 | #include "parser.h"
|
---|
9 | #include "parsetok.h"
|
---|
10 | #include "errcode.h"
|
---|
11 | #include "graminit.h"
|
---|
12 |
|
---|
13 | int Py_TabcheckFlag;
|
---|
14 |
|
---|
15 |
|
---|
16 | /* Forward */
|
---|
17 | static node *parsetok(struct tok_state *, grammar *, int, perrdetail *, int);
|
---|
18 | static void initerr(perrdetail *err_ret, const char* filename);
|
---|
19 |
|
---|
20 | /* Parse input coming from a string. Return error code, print some errors. */
|
---|
21 | node *
|
---|
22 | PyParser_ParseString(const char *s, grammar *g, int start, perrdetail *err_ret)
|
---|
23 | {
|
---|
24 | return PyParser_ParseStringFlagsFilename(s, NULL, g, start, err_ret, 0);
|
---|
25 | }
|
---|
26 |
|
---|
27 | node *
|
---|
28 | PyParser_ParseStringFlags(const char *s, grammar *g, int start,
|
---|
29 | perrdetail *err_ret, int flags)
|
---|
30 | {
|
---|
31 | return PyParser_ParseStringFlagsFilename(s, NULL,
|
---|
32 | g, start, err_ret, flags);
|
---|
33 | }
|
---|
34 |
|
---|
35 | node *
|
---|
36 | PyParser_ParseStringFlagsFilename(const char *s, const char *filename,
|
---|
37 | grammar *g, int start,
|
---|
38 | perrdetail *err_ret, int flags)
|
---|
39 | {
|
---|
40 | struct tok_state *tok;
|
---|
41 |
|
---|
42 | initerr(err_ret, filename);
|
---|
43 |
|
---|
44 | if ((tok = PyTokenizer_FromString(s)) == NULL) {
|
---|
45 | err_ret->error = PyErr_Occurred() ? E_DECODE : E_NOMEM;
|
---|
46 | return NULL;
|
---|
47 | }
|
---|
48 |
|
---|
49 | tok->filename = filename ? filename : "<string>";
|
---|
50 | if (Py_TabcheckFlag || Py_VerboseFlag) {
|
---|
51 | tok->altwarning = (tok->filename != NULL);
|
---|
52 | if (Py_TabcheckFlag >= 2)
|
---|
53 | tok->alterror++;
|
---|
54 | }
|
---|
55 |
|
---|
56 | return parsetok(tok, g, start, err_ret, flags);
|
---|
57 | }
|
---|
58 |
|
---|
59 | /* Parse input coming from a file. Return error code, print some errors. */
|
---|
60 |
|
---|
61 | node *
|
---|
62 | PyParser_ParseFile(FILE *fp, const char *filename, grammar *g, int start,
|
---|
63 | char *ps1, char *ps2, perrdetail *err_ret)
|
---|
64 | {
|
---|
65 | return PyParser_ParseFileFlags(fp, filename, g, start, ps1, ps2,
|
---|
66 | err_ret, 0);
|
---|
67 | }
|
---|
68 |
|
---|
69 | node *
|
---|
70 | PyParser_ParseFileFlags(FILE *fp, const char *filename, grammar *g, int start,
|
---|
71 | char *ps1, char *ps2, perrdetail *err_ret, int flags)
|
---|
72 | {
|
---|
73 | struct tok_state *tok;
|
---|
74 |
|
---|
75 | initerr(err_ret, filename);
|
---|
76 |
|
---|
77 | if ((tok = PyTokenizer_FromFile(fp, ps1, ps2)) == NULL) {
|
---|
78 | err_ret->error = E_NOMEM;
|
---|
79 | return NULL;
|
---|
80 | }
|
---|
81 | tok->filename = filename;
|
---|
82 | if (Py_TabcheckFlag || Py_VerboseFlag) {
|
---|
83 | tok->altwarning = (filename != NULL);
|
---|
84 | if (Py_TabcheckFlag >= 2)
|
---|
85 | tok->alterror++;
|
---|
86 | }
|
---|
87 |
|
---|
88 |
|
---|
89 | return parsetok(tok, g, start, err_ret, flags);
|
---|
90 | }
|
---|
91 |
|
---|
92 | /* Parse input coming from the given tokenizer structure.
|
---|
93 | Return error code. */
|
---|
94 |
|
---|
95 | static char with_msg[] =
|
---|
96 | "%s:%d: Warning: 'with' will become a reserved keyword in Python 2.6\n";
|
---|
97 |
|
---|
98 | static char as_msg[] =
|
---|
99 | "%s:%d: Warning: 'as' will become a reserved keyword in Python 2.6\n";
|
---|
100 |
|
---|
101 | static void
|
---|
102 | warn(const char *msg, const char *filename, int lineno)
|
---|
103 | {
|
---|
104 | if (filename == NULL)
|
---|
105 | filename = "<string>";
|
---|
106 | PySys_WriteStderr(msg, filename, lineno);
|
---|
107 | }
|
---|
108 |
|
---|
109 | static node *
|
---|
110 | parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
|
---|
111 | int flags)
|
---|
112 | {
|
---|
113 | parser_state *ps;
|
---|
114 | node *n;
|
---|
115 | int started = 0, handling_import = 0, handling_with = 0;
|
---|
116 |
|
---|
117 | if ((ps = PyParser_New(g, start)) == NULL) {
|
---|
118 | fprintf(stderr, "no mem for new parser\n");
|
---|
119 | err_ret->error = E_NOMEM;
|
---|
120 | PyTokenizer_Free(tok);
|
---|
121 | return NULL;
|
---|
122 | }
|
---|
123 | #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
|
---|
124 | if (flags & PyPARSE_WITH_IS_KEYWORD)
|
---|
125 | ps->p_flags |= CO_FUTURE_WITH_STATEMENT;
|
---|
126 | #endif
|
---|
127 |
|
---|
128 | for (;;) {
|
---|
129 | char *a, *b;
|
---|
130 | int type;
|
---|
131 | size_t len;
|
---|
132 | char *str;
|
---|
133 | int col_offset;
|
---|
134 |
|
---|
135 | type = PyTokenizer_Get(tok, &a, &b);
|
---|
136 | if (type == ERRORTOKEN) {
|
---|
137 | err_ret->error = tok->done;
|
---|
138 | break;
|
---|
139 | }
|
---|
140 | if (type == ENDMARKER && started) {
|
---|
141 | type = NEWLINE; /* Add an extra newline */
|
---|
142 | handling_with = handling_import = 0;
|
---|
143 | started = 0;
|
---|
144 | /* Add the right number of dedent tokens,
|
---|
145 | except if a certain flag is given --
|
---|
146 | codeop.py uses this. */
|
---|
147 | if (tok->indent &&
|
---|
148 | !(flags & PyPARSE_DONT_IMPLY_DEDENT))
|
---|
149 | {
|
---|
150 | tok->pendin = -tok->indent;
|
---|
151 | tok->indent = 0;
|
---|
152 | }
|
---|
153 | }
|
---|
154 | else
|
---|
155 | started = 1;
|
---|
156 | len = b - a; /* XXX this may compute NULL - NULL */
|
---|
157 | str = (char *) PyObject_MALLOC(len + 1);
|
---|
158 | if (str == NULL) {
|
---|
159 | fprintf(stderr, "no mem for next token\n");
|
---|
160 | err_ret->error = E_NOMEM;
|
---|
161 | break;
|
---|
162 | }
|
---|
163 | if (len > 0)
|
---|
164 | strncpy(str, a, len);
|
---|
165 | str[len] = '\0';
|
---|
166 |
|
---|
167 | #ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD
|
---|
168 | /* This is only necessary to support the "as" warning, but
|
---|
169 | we don't want to warn about "as" in import statements. */
|
---|
170 | if (type == NAME &&
|
---|
171 | len == 6 && str[0] == 'i' && strcmp(str, "import") == 0)
|
---|
172 | handling_import = 1;
|
---|
173 |
|
---|
174 | /* Warn about with as NAME */
|
---|
175 | if (type == NAME &&
|
---|
176 | !(ps->p_flags & CO_FUTURE_WITH_STATEMENT)) {
|
---|
177 | if (len == 4 && str[0] == 'w' && strcmp(str, "with") == 0)
|
---|
178 | warn(with_msg, err_ret->filename, tok->lineno);
|
---|
179 | else if (!(handling_import || handling_with) &&
|
---|
180 | len == 2 && str[0] == 'a' &&
|
---|
181 | strcmp(str, "as") == 0)
|
---|
182 | warn(as_msg, err_ret->filename, tok->lineno);
|
---|
183 | }
|
---|
184 | else if (type == NAME &&
|
---|
185 | (ps->p_flags & CO_FUTURE_WITH_STATEMENT) &&
|
---|
186 | len == 4 && str[0] == 'w' && strcmp(str, "with") == 0)
|
---|
187 | handling_with = 1;
|
---|
188 | #endif
|
---|
189 | if (a >= tok->line_start)
|
---|
190 | col_offset = a - tok->line_start;
|
---|
191 | else
|
---|
192 | col_offset = -1;
|
---|
193 |
|
---|
194 | if ((err_ret->error =
|
---|
195 | PyParser_AddToken(ps, (int)type, str, tok->lineno, col_offset,
|
---|
196 | &(err_ret->expected))) != E_OK) {
|
---|
197 | if (err_ret->error != E_DONE) {
|
---|
198 | PyObject_FREE(str);
|
---|
199 | err_ret->token = type;
|
---|
200 | }
|
---|
201 | break;
|
---|
202 | }
|
---|
203 | }
|
---|
204 |
|
---|
205 | if (err_ret->error == E_DONE) {
|
---|
206 | n = ps->p_tree;
|
---|
207 | ps->p_tree = NULL;
|
---|
208 | }
|
---|
209 | else
|
---|
210 | n = NULL;
|
---|
211 |
|
---|
212 | PyParser_Delete(ps);
|
---|
213 |
|
---|
214 | if (n == NULL) {
|
---|
215 | if (tok->lineno <= 1 && tok->done == E_EOF)
|
---|
216 | err_ret->error = E_EOF;
|
---|
217 | err_ret->lineno = tok->lineno;
|
---|
218 | if (tok->buf != NULL) {
|
---|
219 | size_t len;
|
---|
220 | assert(tok->cur - tok->buf < INT_MAX);
|
---|
221 | err_ret->offset = (int)(tok->cur - tok->buf);
|
---|
222 | len = tok->inp - tok->buf;
|
---|
223 | err_ret->text = (char *) PyObject_MALLOC(len + 1);
|
---|
224 | if (err_ret->text != NULL) {
|
---|
225 | if (len > 0)
|
---|
226 | strncpy(err_ret->text, tok->buf, len);
|
---|
227 | err_ret->text[len] = '\0';
|
---|
228 | }
|
---|
229 | }
|
---|
230 | } else if (tok->encoding != NULL) {
|
---|
231 | node* r = PyNode_New(encoding_decl);
|
---|
232 | if (!r) {
|
---|
233 | err_ret->error = E_NOMEM;
|
---|
234 | n = NULL;
|
---|
235 | goto done;
|
---|
236 | }
|
---|
237 | r->n_str = tok->encoding;
|
---|
238 | r->n_nchildren = 1;
|
---|
239 | r->n_child = n;
|
---|
240 | tok->encoding = NULL;
|
---|
241 | n = r;
|
---|
242 | }
|
---|
243 |
|
---|
244 | done:
|
---|
245 | PyTokenizer_Free(tok);
|
---|
246 |
|
---|
247 | return n;
|
---|
248 | }
|
---|
249 |
|
---|
250 | static void
|
---|
251 | initerr(perrdetail *err_ret, const char *filename)
|
---|
252 | {
|
---|
253 | err_ret->error = E_OK;
|
---|
254 | err_ret->filename = filename;
|
---|
255 | err_ret->lineno = 0;
|
---|
256 | err_ret->offset = 0;
|
---|
257 | err_ret->text = NULL;
|
---|
258 | err_ret->token = -1;
|
---|
259 | err_ret->expected = -1;
|
---|
260 | }
|
---|