mirror of https://github.com/python/cpython
Preliminary support for "from __future__ import generators" to enable
the yield statement. I figure we have to have this in before I can release 2.2a1 on Wednesday. Note: test_generators is currently broken, I'm counting on Tim to fix this.
This commit is contained in:
parent
045ca7ae72
commit
b09f7ed623
|
@ -65,6 +65,9 @@ DL_IMPORT(PyCodeObject *) PyNode_CompileFlags(struct _node *, char *,
|
|||
#define NESTED_SCOPES_DEFAULT 1
|
||||
#define FUTURE_NESTED_SCOPES "nested_scopes"
|
||||
|
||||
#define GENERATORS_DEFAULT 0
|
||||
#define FUTURE_GENERATORS "generators"
|
||||
|
||||
/* for internal use only */
|
||||
#define _PyCode_GETCODEPTR(co, pp) \
|
||||
((*(co)->co_code->ob_type->tp_as_buffer->bf_getreadbuffer) \
|
||||
|
|
|
@ -67,3 +67,4 @@ class _Feature:
|
|||
`self.getMandatoryRelease()` + ")"
|
||||
|
||||
nested_scopes = _Feature((2, 1, 0, "beta", 1), (2, 2, 0, "alpha", 0))
|
||||
generators = _Feature((2, 2, 0, "alpha", 1), (2, 3, 0, "final", 0))
|
||||
|
|
|
@ -24,6 +24,8 @@ Here are some of the useful functions provided by this module:
|
|||
|
||||
# This module is in the public domain. No warranties.
|
||||
|
||||
from __future__ import generators
|
||||
|
||||
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
|
||||
__date__ = '1 Jan 2001'
|
||||
|
||||
|
|
|
@ -22,6 +22,8 @@ are the same, except instead of generating tokens, tokeneater is a callback
|
|||
function to which the 5 fields described above are passed as 5 arguments,
|
||||
each time a new token is found."""
|
||||
|
||||
from __future__ import generators
|
||||
|
||||
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
|
||||
__credits__ = \
|
||||
'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro'
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
Types that are part of optional modules (e.g. array) are not listed.
|
||||
"""
|
||||
from __future__ import generators
|
||||
|
||||
import sys
|
||||
|
||||
|
|
|
@ -79,6 +79,7 @@ PyParser_New(grammar *g, int start)
|
|||
if (ps == NULL)
|
||||
return NULL;
|
||||
ps->p_grammar = g;
|
||||
ps->p_generators = 0;
|
||||
ps->p_tree = PyNode_New(start);
|
||||
if (ps->p_tree == NULL) {
|
||||
PyMem_DEL(ps);
|
||||
|
@ -131,8 +132,9 @@ push(register stack *s, int type, dfa *d, int newstate, int lineno)
|
|||
/* PARSER PROPER */
|
||||
|
||||
static int
|
||||
classify(grammar *g, int type, char *str)
|
||||
classify(parser_state *ps, int type, char *str)
|
||||
{
|
||||
grammar *g = ps->p_grammar;
|
||||
register int n = g->g_ll.ll_nlabels;
|
||||
|
||||
if (type == NAME) {
|
||||
|
@ -143,6 +145,10 @@ classify(grammar *g, int type, char *str)
|
|||
if (l->lb_type == NAME && l->lb_str != NULL &&
|
||||
l->lb_str[0] == s[0] &&
|
||||
strcmp(l->lb_str, s) == 0) {
|
||||
if (!ps->p_generators &&
|
||||
s[0] == 'y' &&
|
||||
strcmp(s, "yield") == 0)
|
||||
break; /* not a keyword */
|
||||
D(printf("It's a keyword\n"));
|
||||
return n - i;
|
||||
}
|
||||
|
@ -164,6 +170,22 @@ classify(grammar *g, int type, char *str)
|
|||
return -1;
|
||||
}
|
||||
|
||||
static void
|
||||
future_hack(parser_state *ps)
|
||||
{
|
||||
node *n = ps->p_stack.s_top->s_parent;
|
||||
node *ch;
|
||||
|
||||
if (strcmp(STR(CHILD(n, 0)), "from") != 0)
|
||||
return;
|
||||
ch = CHILD(n, 1);
|
||||
if (strcmp(STR(CHILD(ch, 0)), "__future__") != 0)
|
||||
return;
|
||||
ch = CHILD(n, 3);
|
||||
if (NCH(ch) == 1 && strcmp(STR(CHILD(ch, 0)), "generators") == 0)
|
||||
ps->p_generators = 1;
|
||||
}
|
||||
|
||||
int
|
||||
PyParser_AddToken(register parser_state *ps, register int type, char *str,
|
||||
int lineno, int *expected_ret)
|
||||
|
@ -174,7 +196,7 @@ PyParser_AddToken(register parser_state *ps, register int type, char *str,
|
|||
D(printf("Token %s/'%s' ... ", _PyParser_TokenNames[type], str));
|
||||
|
||||
/* Find out which label this token is */
|
||||
ilabel = classify(ps->p_grammar, type, str);
|
||||
ilabel = classify(ps, type, str);
|
||||
if (ilabel < 0)
|
||||
return E_SYNTAX;
|
||||
|
||||
|
@ -217,7 +239,14 @@ PyParser_AddToken(register parser_state *ps, register int type, char *str,
|
|||
while (s = &d->d_state
|
||||
[ps->p_stack.s_top->s_state],
|
||||
s->s_accept && s->s_narcs == 1) {
|
||||
D(printf(" Direct pop.\n"));
|
||||
D(printf(" DFA '%s', state %d: "
|
||||
"Direct pop.\n",
|
||||
d->d_name,
|
||||
ps->p_stack.s_top->s_state));
|
||||
if (d->d_name[0] == 'i' &&
|
||||
strcmp(d->d_name,
|
||||
"import_stmt") == 0)
|
||||
future_hack(ps);
|
||||
s_pop(&ps->p_stack);
|
||||
if (s_empty(&ps->p_stack)) {
|
||||
D(printf(" ACCEPT.\n"));
|
||||
|
@ -230,6 +259,9 @@ PyParser_AddToken(register parser_state *ps, register int type, char *str,
|
|||
}
|
||||
|
||||
if (s->s_accept) {
|
||||
if (d->d_name[0] == 'i' &&
|
||||
strcmp(d->d_name, "import_stmt") == 0)
|
||||
future_hack(ps);
|
||||
/* Pop this dfa and try again */
|
||||
s_pop(&ps->p_stack);
|
||||
D(printf(" Pop ...\n"));
|
||||
|
|
|
@ -25,6 +25,7 @@ typedef struct {
|
|||
stack p_stack; /* Stack of parser states */
|
||||
grammar *p_grammar; /* Grammar to use */
|
||||
node *p_tree; /* Top of parse tree */
|
||||
int p_generators; /* 1 if yield is a keyword */
|
||||
} parser_state;
|
||||
|
||||
parser_state *PyParser_New(grammar *g, int start);
|
||||
|
|
|
@ -31,6 +31,8 @@ future_check_features(PyFutureFeatures *ff, node *n, char *filename)
|
|||
feature = STR(CHILD(ch, 0));
|
||||
if (strcmp(feature, FUTURE_NESTED_SCOPES) == 0) {
|
||||
ff->ff_nested_scopes = 1;
|
||||
} else if (strcmp(feature, FUTURE_GENERATORS) == 0) {
|
||||
/* OK; this is processed by the parser */
|
||||
} else if (strcmp(feature, "braces") == 0) {
|
||||
PyErr_SetString(PyExc_SyntaxError,
|
||||
"not a chance");
|
||||
|
|
Loading…
Reference in New Issue