0
0
mirror of https://github.com/python/cpython.git synced 2024-12-01 03:01:36 +01:00
cpython/Parser/parsetok.c

159 lines
3.2 KiB
C
Raw Normal View History

1991-02-19 13:39:46 +01:00
1990-10-14 13:07:46 +01:00
/* Parser-tokenizer link implementation */
1990-12-20 16:06:42 +01:00
#include "pgenheaders.h"
1990-10-14 13:07:46 +01:00
#include "tokenizer.h"
#include "node.h"
#include "grammar.h"
#include "parser.h"
1990-12-20 16:06:42 +01:00
#include "parsetok.h"
1990-10-14 13:07:46 +01:00
#include "errcode.h"
1998-04-10 21:35:06 +02:00
int Py_TabcheckFlag;
1990-10-14 13:07:46 +01:00
1990-12-20 16:06:42 +01:00
/* Forward */
static node *parsetok(struct tok_state *, grammar *, int, perrdetail *);
1990-10-14 13:07:46 +01:00
1990-12-20 16:06:42 +01:00
/* Parse input coming from a string. Return error code, print some errors. */
1990-10-14 13:07:46 +01:00
1994-08-29 14:25:45 +02:00
node *
PyParser_ParseString(char *s, grammar *g, int start, perrdetail *err_ret)
1990-10-14 13:07:46 +01:00
{
1994-08-29 14:25:45 +02:00
struct tok_state *tok;
err_ret->error = E_OK;
err_ret->filename = NULL;
err_ret->lineno = 0;
err_ret->offset = 0;
err_ret->text = NULL;
err_ret->token = -1;
err_ret->expected = -1;
1994-08-29 14:25:45 +02:00
1997-04-29 23:03:06 +02:00
if ((tok = PyTokenizer_FromString(s)) == NULL) {
1994-08-29 14:25:45 +02:00
err_ret->error = E_NOMEM;
return NULL;
1990-10-14 13:07:46 +01:00
}
1994-08-29 14:25:45 +02:00
if (Py_TabcheckFlag || Py_VerboseFlag) {
tok->filename = "<string>";
tok->altwarning = (tok->filename != NULL);
if (Py_TabcheckFlag >= 2)
tok->alterror++;
}
1994-08-29 14:25:45 +02:00
return parsetok(tok, g, start, err_ret);
1990-10-14 13:07:46 +01:00
}
1990-12-20 16:06:42 +01:00
/* Parse input coming from a file. Return error code, print some errors. */
1990-10-14 13:07:46 +01:00
1994-08-29 14:25:45 +02:00
node *
PyParser_ParseFile(FILE *fp, char *filename, grammar *g, int start,
char *ps1, char *ps2, perrdetail *err_ret)
1990-10-14 13:07:46 +01:00
{
1994-08-29 14:25:45 +02:00
struct tok_state *tok;
err_ret->error = E_OK;
err_ret->filename = filename;
err_ret->lineno = 0;
err_ret->offset = 0;
err_ret->text = NULL;
1997-04-29 23:03:06 +02:00
if ((tok = PyTokenizer_FromFile(fp, ps1, ps2)) == NULL) {
1994-08-29 14:25:45 +02:00
err_ret->error = E_NOMEM;
return NULL;
1990-10-14 13:07:46 +01:00
}
1998-04-10 21:35:06 +02:00
if (Py_TabcheckFlag || Py_VerboseFlag) {
tok->filename = filename;
tok->altwarning = (filename != NULL);
if (Py_TabcheckFlag >= 2)
tok->alterror++;
}
1994-08-29 14:25:45 +02:00
1990-12-20 16:06:42 +01:00
1994-08-29 14:25:45 +02:00
return parsetok(tok, g, start, err_ret);
}
1990-12-20 16:06:42 +01:00
/* Parse input coming from the given tokenizer structure.
Return error code. */
1994-08-29 14:25:45 +02:00
static node *
parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret)
1990-12-20 16:06:42 +01:00
{
parser_state *ps;
1994-08-29 14:25:45 +02:00
node *n;
int started = 0;
1994-08-29 14:25:45 +02:00
1997-04-29 23:03:06 +02:00
if ((ps = PyParser_New(g, start)) == NULL) {
1990-12-20 16:06:42 +01:00
fprintf(stderr, "no mem for new parser\n");
1994-08-29 14:25:45 +02:00
err_ret->error = E_NOMEM;
return NULL;
1990-12-20 16:06:42 +01:00
}
1994-08-29 14:25:45 +02:00
1990-12-20 16:06:42 +01:00
for (;;) {
char *a, *b;
int type;
size_t len;
1990-12-20 16:06:42 +01:00
char *str;
1994-08-29 14:25:45 +02:00
1997-04-29 23:03:06 +02:00
type = PyTokenizer_Get(tok, &a, &b);
1990-12-20 16:06:42 +01:00
if (type == ERRORTOKEN) {
1994-08-29 14:25:45 +02:00
err_ret->error = tok->done;
1990-12-20 16:06:42 +01:00
break;
}
if (type == ENDMARKER && started) {
type = NEWLINE; /* Add an extra newline */
started = 0;
}
else
started = 1;
1994-08-29 14:25:45 +02:00
len = b - a; /* XXX this may compute NULL - NULL */
1997-04-29 23:03:06 +02:00
str = PyMem_NEW(char, len + 1);
1990-12-20 16:06:42 +01:00
if (str == NULL) {
fprintf(stderr, "no mem for next token\n");
1994-08-29 14:25:45 +02:00
err_ret->error = E_NOMEM;
1990-12-20 16:06:42 +01:00
break;
}
1994-08-29 14:25:45 +02:00
if (len > 0)
strncpy(str, a, len);
1990-12-20 16:06:42 +01:00
str[len] = '\0';
1994-08-29 14:25:45 +02:00
if ((err_ret->error =
PyParser_AddToken(ps, (int)type, str, tok->lineno,
&(err_ret->expected))) != E_OK) {
if (err_ret->error != E_DONE)
PyMem_DEL(str);
1990-12-20 16:06:42 +01:00
break;
}
1990-12-20 16:06:42 +01:00
}
1994-08-29 14:25:45 +02:00
if (err_ret->error == E_DONE) {
n = ps->p_tree;
ps->p_tree = NULL;
}
else
n = NULL;
1997-04-29 23:03:06 +02:00
PyParser_Delete(ps);
1994-08-29 14:25:45 +02:00
if (n == NULL) {
if (tok->lineno <= 1 && tok->done == E_EOF)
err_ret->error = E_EOF;
err_ret->lineno = tok->lineno;
err_ret->offset = tok->cur - tok->buf;
if (tok->buf != NULL) {
size_t len = tok->inp - tok->buf;
err_ret->text = PyMem_NEW(char, len + 1);
1994-08-29 14:25:45 +02:00
if (err_ret->text != NULL) {
if (len > 0)
strncpy(err_ret->text, tok->buf, len);
1994-08-29 14:25:45 +02:00
err_ret->text[len] = '\0';
}
}
}
1997-04-29 23:03:06 +02:00
PyTokenizer_Free(tok);
1994-08-29 14:25:45 +02:00
return n;
1990-12-20 16:06:42 +01:00
}