mirror of https://github.com/python/cpython
Issue #2335: Backport set literals syntax from Python 3.x.
This commit is contained in:
parent
e365613528
commit
ee936a2130
|
@ -65,7 +65,7 @@ atoms is:
|
|||
.. productionlist::
|
||||
atom: `identifier` | `literal` | `enclosure`
|
||||
enclosure: `parenth_form` | `list_display`
|
||||
: | `generator_expression` | `dict_display`
|
||||
: | `generator_expression` | `dict_display` | `set_display`
|
||||
: | `string_conversion` | `yield_atom`
|
||||
|
||||
|
||||
|
@ -281,6 +281,30 @@ datum (textually rightmost in the display) stored for a given key value
|
|||
prevails.
|
||||
|
||||
|
||||
.. _set:
|
||||
|
||||
Set displays
|
||||
------------
|
||||
|
||||
.. index:: pair: set; display
|
||||
object: set
|
||||
|
||||
A set display is denoted by curly braces and distinguishable from dictionary
|
||||
displays by the lack of colons separating keys and values:
|
||||
|
||||
.. productionlist::
|
||||
set_display: "{" (`expression_list` | `comprehension`) "}"
|
||||
|
||||
A set display yields a new mutable set object, the contents being specified by
|
||||
either a sequence of expressions or a comprehension. When a comma-separated
|
||||
list of expressions is supplied, its elements are evaluated from left to right
|
||||
and added to the set object. When a comprehension is supplied, the set is
|
||||
constructed from the elements resulting from the comprehension.
|
||||
|
||||
An empty set cannot be constructed with ``{}``; this literal constructs an empty
|
||||
dictionary.
|
||||
|
||||
|
||||
.. _string-conversions:
|
||||
|
||||
String conversions
|
||||
|
|
|
@ -102,7 +102,7 @@ factor: ('+'|'-'|'~') factor | power
|
|||
power: atom trailer* ['**' factor]
|
||||
atom: ('(' [yield_expr|testlist_gexp] ')' |
|
||||
'[' [listmaker] ']' |
|
||||
'{' [dictmaker] '}' |
|
||||
'{' [dictorsetmaker] '}' |
|
||||
'`' testlist1 '`' |
|
||||
NAME | NUMBER | STRING+)
|
||||
listmaker: test ( list_for | (',' test)* [','] )
|
||||
|
@ -115,6 +115,8 @@ sliceop: ':' [test]
|
|||
exprlist: expr (',' expr)* [',']
|
||||
testlist: test (',' test)* [',']
|
||||
dictmaker: test ':' test (',' test ':' test)* [',']
|
||||
dictorsetmaker: ( (test ':' test (',' test ':' test)* [',']) |
|
||||
(test (',' test)* [',']) )
|
||||
|
||||
classdef: 'class' NAME ['(' [testlist] ')'] ':' suite
|
||||
|
||||
|
|
|
@ -185,11 +185,11 @@ struct _stmt {
|
|||
};
|
||||
|
||||
enum _expr_kind {BoolOp_kind=1, BinOp_kind=2, UnaryOp_kind=3, Lambda_kind=4,
|
||||
IfExp_kind=5, Dict_kind=6, ListComp_kind=7,
|
||||
GeneratorExp_kind=8, Yield_kind=9, Compare_kind=10,
|
||||
Call_kind=11, Repr_kind=12, Num_kind=13, Str_kind=14,
|
||||
Attribute_kind=15, Subscript_kind=16, Name_kind=17,
|
||||
List_kind=18, Tuple_kind=19};
|
||||
IfExp_kind=5, Dict_kind=6, Set_kind=7, ListComp_kind=8,
|
||||
GeneratorExp_kind=9, Yield_kind=10, Compare_kind=11,
|
||||
Call_kind=12, Repr_kind=13, Num_kind=14, Str_kind=15,
|
||||
Attribute_kind=16, Subscript_kind=17, Name_kind=18,
|
||||
List_kind=19, Tuple_kind=20};
|
||||
struct _expr {
|
||||
enum _expr_kind kind;
|
||||
union {
|
||||
|
@ -225,6 +225,10 @@ struct _expr {
|
|||
asdl_seq *values;
|
||||
} Dict;
|
||||
|
||||
struct {
|
||||
asdl_seq *elts;
|
||||
} Set;
|
||||
|
||||
struct {
|
||||
expr_ty elt;
|
||||
asdl_seq *generators;
|
||||
|
@ -449,6 +453,8 @@ expr_ty _Py_IfExp(expr_ty test, expr_ty body, expr_ty orelse, int lineno, int
|
|||
#define Dict(a0, a1, a2, a3, a4) _Py_Dict(a0, a1, a2, a3, a4)
|
||||
expr_ty _Py_Dict(asdl_seq * keys, asdl_seq * values, int lineno, int
|
||||
col_offset, PyArena *arena);
|
||||
#define Set(a0, a1, a2, a3) _Py_Set(a0, a1, a2, a3)
|
||||
expr_ty _Py_Set(asdl_seq * elts, int lineno, int col_offset, PyArena *arena);
|
||||
#define ListComp(a0, a1, a2, a3, a4) _Py_ListComp(a0, a1, a2, a3, a4)
|
||||
expr_ty _Py_ListComp(expr_ty elt, asdl_seq * generators, int lineno, int
|
||||
col_offset, PyArena *arena);
|
||||
|
|
|
@ -73,15 +73,16 @@
|
|||
#define exprlist 326
|
||||
#define testlist 327
|
||||
#define dictmaker 328
|
||||
#define classdef 329
|
||||
#define arglist 330
|
||||
#define argument 331
|
||||
#define list_iter 332
|
||||
#define list_for 333
|
||||
#define list_if 334
|
||||
#define gen_iter 335
|
||||
#define gen_for 336
|
||||
#define gen_if 337
|
||||
#define testlist1 338
|
||||
#define encoding_decl 339
|
||||
#define yield_expr 340
|
||||
#define dictorsetmaker 329
|
||||
#define classdef 330
|
||||
#define arglist 331
|
||||
#define argument 332
|
||||
#define list_iter 333
|
||||
#define list_for 334
|
||||
#define list_if 335
|
||||
#define gen_iter 336
|
||||
#define gen_for 337
|
||||
#define gen_if 338
|
||||
#define testlist1 339
|
||||
#define encoding_decl 340
|
||||
#define yield_expr 341
|
||||
|
|
|
@ -99,13 +99,14 @@ extern "C" {
|
|||
#define LOAD_NAME 101 /* Index in name list */
|
||||
#define BUILD_TUPLE 102 /* Number of tuple items */
|
||||
#define BUILD_LIST 103 /* Number of list items */
|
||||
#define BUILD_MAP 104 /* Always zero for now */
|
||||
#define LOAD_ATTR 105 /* Index in name list */
|
||||
#define COMPARE_OP 106 /* Comparison operator */
|
||||
#define IMPORT_NAME 107 /* Index in name list */
|
||||
#define IMPORT_FROM 108 /* Index in name list */
|
||||
|
||||
#define BUILD_SET 104 /* Number of set items */
|
||||
#define BUILD_MAP 105 /* Always zero for now */
|
||||
#define LOAD_ATTR 106 /* Index in name list */
|
||||
#define COMPARE_OP 107 /* Comparison operator */
|
||||
#define IMPORT_NAME 108 /* Index in name list */
|
||||
#define IMPORT_FROM 109 /* Index in name list */
|
||||
#define JUMP_FORWARD 110 /* Number of bytes to skip */
|
||||
|
||||
#define JUMP_IF_FALSE_OR_POP 111 /* Target byte offset from beginning
|
||||
of code */
|
||||
#define JUMP_IF_TRUE_OR_POP 112 /* "" */
|
||||
|
|
|
@ -1107,6 +1107,22 @@ class RightShift(Node):
|
|||
def __repr__(self):
|
||||
return "RightShift((%s, %s))" % (repr(self.left), repr(self.right))
|
||||
|
||||
class Set(Node):
|
||||
def __init__(self, nodes, lineno=None):
|
||||
self.nodes = nodes
|
||||
self.lineno = lineno
|
||||
|
||||
def getChildren(self):
|
||||
return tuple(flatten(self.nodes))
|
||||
|
||||
def getChildNodes(self):
|
||||
nodelist = []
|
||||
nodelist.extend(flatten_nodes(self.nodes))
|
||||
return tuple(nodelist)
|
||||
|
||||
def __repr__(self):
|
||||
return "Set(%s)" % (repr(self.nodes),)
|
||||
|
||||
class Slice(Node):
|
||||
def __init__(self, expr, flags, lower, upper, lineno=None):
|
||||
self.expr = expr
|
||||
|
|
|
@ -734,6 +734,8 @@ class StackDepthTracker:
|
|||
return -count+1
|
||||
def BUILD_LIST(self, count):
|
||||
return -count+1
|
||||
def BUILD_SET(self, count):
|
||||
return -count+1
|
||||
def CALL_FUNCTION(self, argc):
|
||||
hi, lo = divmod(argc, 256)
|
||||
return -(lo + hi * 2)
|
||||
|
|
|
@ -1215,6 +1215,12 @@ class CodeGenerator:
|
|||
self.visit(elt)
|
||||
self.emit('BUILD_LIST', len(node.nodes))
|
||||
|
||||
def visitSet(self, node):
|
||||
self.set_lineno(node)
|
||||
for elt in node.nodes:
|
||||
self.visit(elt)
|
||||
self.emit('BUILD_SET', len(node.nodes))
|
||||
|
||||
def visitSliceobj(self, node):
|
||||
for child in node.nodes:
|
||||
self.visit(child)
|
||||
|
|
|
@ -749,7 +749,7 @@ class Transformer:
|
|||
def atom_lbrace(self, nodelist):
|
||||
if nodelist[1][0] == token.RBRACE:
|
||||
return Dict((), lineno=nodelist[0][2])
|
||||
return self.com_dictmaker(nodelist[1])
|
||||
return self.com_dictorsetmaker(nodelist[1])
|
||||
|
||||
def atom_backquote(self, nodelist):
|
||||
return Backquote(self.com_node(nodelist[1]))
|
||||
|
@ -1197,13 +1197,21 @@ class Transformer:
|
|||
assert node[0] == symbol.gen_iter
|
||||
return node[1]
|
||||
|
||||
def com_dictmaker(self, nodelist):
|
||||
# dictmaker: test ':' test (',' test ':' value)* [',']
|
||||
items = []
|
||||
for i in range(1, len(nodelist), 4):
|
||||
items.append((self.com_node(nodelist[i]),
|
||||
self.com_node(nodelist[i+2])))
|
||||
return Dict(items, lineno=items[0][0].lineno)
|
||||
def com_dictorsetmaker(self, nodelist):
|
||||
# dictorsetmaker: ( (test ':' test (',' test ':' test)* [',']) |
|
||||
# (test (',' test)* [',']) )
|
||||
assert nodelist[0] == symbol.dictorsetmaker
|
||||
if len(nodelist) == 2 or nodelist[2][0] == token.COMMA:
|
||||
items = []
|
||||
for i in range(1, len(nodelist), 2):
|
||||
items.append(self.com_node(nodelist[i]))
|
||||
return Set(items, lineno=items[0].lineno)
|
||||
else:
|
||||
items = []
|
||||
for i in range(1, len(nodelist), 4):
|
||||
items.append((self.com_node(nodelist[i]),
|
||||
self.com_node(nodelist[i+2])))
|
||||
return Dict(items, lineno=items[0][0].lineno)
|
||||
|
||||
def com_apply_trailer(self, primaryNode, nodelist):
|
||||
t = nodelist[1][0]
|
||||
|
|
|
@ -138,13 +138,13 @@ hasconst.append(100)
|
|||
name_op('LOAD_NAME', 101) # Index in name list
|
||||
def_op('BUILD_TUPLE', 102) # Number of tuple items
|
||||
def_op('BUILD_LIST', 103) # Number of list items
|
||||
def_op('BUILD_MAP', 104) # Number of dict entries (upto 255)
|
||||
name_op('LOAD_ATTR', 105) # Index in name list
|
||||
def_op('COMPARE_OP', 106) # Comparison operator
|
||||
hascompare.append(106)
|
||||
name_op('IMPORT_NAME', 107) # Index in name list
|
||||
name_op('IMPORT_FROM', 108) # Index in name list
|
||||
|
||||
def_op('BUILD_SET', 104) # Number of set items
|
||||
def_op('BUILD_MAP', 105) # Number of dict entries (upto 255)
|
||||
name_op('LOAD_ATTR', 106) # Index in name list
|
||||
def_op('COMPARE_OP', 107) # Comparison operator
|
||||
hascompare.append(107)
|
||||
name_op('IMPORT_NAME', 108) # Index in name list
|
||||
name_op('IMPORT_FROM', 109) # Index in name list
|
||||
jrel_op('JUMP_FORWARD', 110) # Number of bytes to skip
|
||||
jabs_op('JUMP_IF_FALSE_OR_POP', 111) # Target byte offset from beginning of code
|
||||
jabs_op('JUMP_IF_TRUE_OR_POP', 112) # ""
|
||||
|
|
|
@ -83,18 +83,19 @@ sliceop = 325
|
|||
exprlist = 326
|
||||
testlist = 327
|
||||
dictmaker = 328
|
||||
classdef = 329
|
||||
arglist = 330
|
||||
argument = 331
|
||||
list_iter = 332
|
||||
list_for = 333
|
||||
list_if = 334
|
||||
gen_iter = 335
|
||||
gen_for = 336
|
||||
gen_if = 337
|
||||
testlist1 = 338
|
||||
encoding_decl = 339
|
||||
yield_expr = 340
|
||||
dictorsetmaker = 329
|
||||
classdef = 330
|
||||
arglist = 331
|
||||
argument = 332
|
||||
list_iter = 333
|
||||
list_for = 334
|
||||
list_if = 335
|
||||
gen_iter = 336
|
||||
gen_for = 337
|
||||
gen_if = 338
|
||||
testlist1 = 339
|
||||
encoding_decl = 340
|
||||
yield_expr = 341
|
||||
#--end constants--
|
||||
|
||||
sym_name = {}
|
||||
|
|
|
@ -248,6 +248,7 @@ l[0]
|
|||
l[3:4]
|
||||
d = {'a': 2}
|
||||
d = {}
|
||||
s = {1}
|
||||
t = ()
|
||||
t = (1, 2)
|
||||
l = []
|
||||
|
|
|
@ -749,7 +749,7 @@ hello world
|
|||
|
||||
def testAtoms(self):
|
||||
### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictmaker] '}' | '`' testlist '`' | NAME | NUMBER | STRING
|
||||
### dictmaker: test ':' test (',' test ':' test)* [',']
|
||||
### dictorsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [','])
|
||||
|
||||
x = (1)
|
||||
x = (1 or 2 or 3)
|
||||
|
@ -769,6 +769,11 @@ hello world
|
|||
x = {'one': 1, 'two': 2,}
|
||||
x = {'one': 1, 'two': 2, 'three': 3, 'four': 4, 'five': 5, 'six': 6}
|
||||
|
||||
x = {'one'}
|
||||
x = {'one', 1,}
|
||||
x = {'one', 'two', 'three'}
|
||||
x = {2, 3, 4,}
|
||||
|
||||
x = `x`
|
||||
x = `1 or 2 or 3`
|
||||
self.assertEqual(`1,2`, '(1, 2)')
|
||||
|
|
|
@ -59,7 +59,20 @@ class RoundtripLegalSyntaxTestCase(unittest.TestCase):
|
|||
|
||||
def test_expressions(self):
|
||||
self.check_expr("foo(1)")
|
||||
self.check_expr("{1:1}")
|
||||
self.check_expr("{1:1, 2:2, 3:3}")
|
||||
self.check_expr("{1:1, 2:2, 3:3,}")
|
||||
self.check_expr("{1}")
|
||||
self.check_expr("{1, 2, 3}")
|
||||
self.check_expr("{1, 2, 3,}")
|
||||
self.check_expr("[]")
|
||||
self.check_expr("[1]")
|
||||
self.check_expr("[1, 2, 3]")
|
||||
self.check_expr("[1, 2, 3,]")
|
||||
self.check_expr("()")
|
||||
self.check_expr("(1,)")
|
||||
self.check_expr("(1, 2, 3)")
|
||||
self.check_expr("(1, 2, 3,)")
|
||||
self.check_expr("[x**3 for x in range(20)]")
|
||||
self.check_expr("[x**3 for x in range(20) if x % 3]")
|
||||
self.check_expr("[x**3 for x in range(20) if x % 2 if x % 3]")
|
||||
|
|
|
@ -12,6 +12,8 @@ What's New in Python 2.7 alpha 3?
|
|||
Core and Builtins
|
||||
-----------------
|
||||
|
||||
- Issue #2335: Backport set literals syntax from Python 3.x.
|
||||
|
||||
Library
|
||||
-------
|
||||
|
||||
|
|
|
@ -935,7 +935,7 @@ VALIDATER(term); VALIDATER(factor);
|
|||
VALIDATER(atom); VALIDATER(lambdef);
|
||||
VALIDATER(trailer); VALIDATER(subscript);
|
||||
VALIDATER(subscriptlist); VALIDATER(sliceop);
|
||||
VALIDATER(exprlist); VALIDATER(dictmaker);
|
||||
VALIDATER(exprlist); VALIDATER(dictorsetmaker);
|
||||
VALIDATER(arglist); VALIDATER(argument);
|
||||
VALIDATER(listmaker); VALIDATER(yield_stmt);
|
||||
VALIDATER(testlist1); VALIDATER(gen_for);
|
||||
|
@ -2478,7 +2478,7 @@ validate_atom(node *tree)
|
|||
&& validate_ntype(CHILD(tree, nch - 1), RBRACE));
|
||||
|
||||
if (res && (nch == 3))
|
||||
res = validate_dictmaker(CHILD(tree, 1));
|
||||
res = validate_dictorsetmaker(CHILD(tree, 1));
|
||||
break;
|
||||
case BACKQUOTE:
|
||||
res = ((nch == 3)
|
||||
|
@ -2966,32 +2966,59 @@ validate_exprlist(node *tree)
|
|||
|
||||
|
||||
static int
|
||||
validate_dictmaker(node *tree)
|
||||
validate_dictorsetmaker(node *tree)
|
||||
{
|
||||
int nch = NCH(tree);
|
||||
int res = (validate_ntype(tree, dictmaker)
|
||||
&& (nch >= 3)
|
||||
&& validate_test(CHILD(tree, 0))
|
||||
&& validate_colon(CHILD(tree, 1))
|
||||
&& validate_test(CHILD(tree, 2)));
|
||||
int ok = validate_ntype(tree, dictorsetmaker);
|
||||
int i = 0;
|
||||
|
||||
if (res && ((nch % 4) == 0))
|
||||
res = validate_comma(CHILD(tree, --nch));
|
||||
else if (res)
|
||||
res = ((nch % 4) == 3);
|
||||
assert(nch > 0);
|
||||
|
||||
if (res && (nch > 3)) {
|
||||
int pos = 3;
|
||||
/* ( ',' test ':' test )* */
|
||||
while (res && (pos < nch)) {
|
||||
res = (validate_comma(CHILD(tree, pos))
|
||||
&& validate_test(CHILD(tree, pos + 1))
|
||||
&& validate_colon(CHILD(tree, pos + 2))
|
||||
&& validate_test(CHILD(tree, pos + 3)));
|
||||
pos += 4;
|
||||
if (ok && (nch == 1 || TYPE(CHILD(tree, 1)) == COMMA)) {
|
||||
/* We got a set:
|
||||
* test (',' test)* [',']
|
||||
*/
|
||||
ok = validate_test(CHILD(tree, i++));
|
||||
while (ok && nch - i >= 2) {
|
||||
ok = (validate_comma(CHILD(tree, i))
|
||||
&& validate_test(CHILD(tree, i+1)));
|
||||
i += 2;
|
||||
}
|
||||
}
|
||||
return (res);
|
||||
else if (ok) {
|
||||
/* We got a dict:
|
||||
* test ':' test (',' test ':' test)* [',']
|
||||
*/
|
||||
if (nch >= 3) {
|
||||
ok = (validate_test(CHILD(tree, i))
|
||||
&& validate_colon(CHILD(tree, i+1))
|
||||
&& validate_test(CHILD(tree, i+2)));
|
||||
i += 3;
|
||||
}
|
||||
else {
|
||||
ok = 0;
|
||||
err_string("illegal number of nodes for dictorsetmaker");
|
||||
}
|
||||
|
||||
while (ok && nch - i >= 4) {
|
||||
ok = (validate_comma(CHILD(tree, i))
|
||||
&& validate_test(CHILD(tree, i+1))
|
||||
&& validate_colon(CHILD(tree, i+2))
|
||||
&& validate_test(CHILD(tree, i+3)));
|
||||
i += 4;
|
||||
}
|
||||
}
|
||||
/* Check for a trailing comma. */
|
||||
if (ok) {
|
||||
if (i == nch-1)
|
||||
ok = validate_comma(CHILD(tree, i));
|
||||
else if (i != nch) {
|
||||
ok = 0;
|
||||
err_string("illegal trailing nodes for dictorsetmaker");
|
||||
}
|
||||
}
|
||||
|
||||
return ok;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -56,6 +56,7 @@ module Python version "$Revision$"
|
|||
| Lambda(arguments args, expr body)
|
||||
| IfExp(expr test, expr body, expr orelse)
|
||||
| Dict(expr* keys, expr* values)
|
||||
| Set(expr* elts)
|
||||
| ListComp(expr elt, comprehension* generators)
|
||||
| GeneratorExp(expr elt, comprehension* generators)
|
||||
-- the grammar constrains where yield expressions can occur
|
||||
|
|
|
@ -188,6 +188,10 @@ static char *Dict_fields[]={
|
|||
"keys",
|
||||
"values",
|
||||
};
|
||||
static PyTypeObject *Set_type;
|
||||
static char *Set_fields[]={
|
||||
"elts",
|
||||
};
|
||||
static PyTypeObject *ListComp_type;
|
||||
static char *ListComp_fields[]={
|
||||
"elt",
|
||||
|
@ -718,6 +722,8 @@ static int init_types(void)
|
|||
if (!IfExp_type) return 0;
|
||||
Dict_type = make_type("Dict", expr_type, Dict_fields, 2);
|
||||
if (!Dict_type) return 0;
|
||||
Set_type = make_type("Set", expr_type, Set_fields, 1);
|
||||
if (!Set_type) return 0;
|
||||
ListComp_type = make_type("ListComp", expr_type, ListComp_fields, 2);
|
||||
if (!ListComp_type) return 0;
|
||||
GeneratorExp_type = make_type("GeneratorExp", expr_type,
|
||||
|
@ -1589,6 +1595,20 @@ Dict(asdl_seq * keys, asdl_seq * values, int lineno, int col_offset, PyArena
|
|||
return p;
|
||||
}
|
||||
|
||||
expr_ty
|
||||
Set(asdl_seq * elts, int lineno, int col_offset, PyArena *arena)
|
||||
{
|
||||
expr_ty p;
|
||||
p = (expr_ty)PyArena_Malloc(arena, sizeof(*p));
|
||||
if (!p)
|
||||
return NULL;
|
||||
p->kind = Set_kind;
|
||||
p->v.Set.elts = elts;
|
||||
p->lineno = lineno;
|
||||
p->col_offset = col_offset;
|
||||
return p;
|
||||
}
|
||||
|
||||
expr_ty
|
||||
ListComp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset,
|
||||
PyArena *arena)
|
||||
|
@ -2566,6 +2586,15 @@ ast2obj_expr(void* _o)
|
|||
goto failed;
|
||||
Py_DECREF(value);
|
||||
break;
|
||||
case Set_kind:
|
||||
result = PyType_GenericNew(Set_type, NULL, NULL);
|
||||
if (!result) goto failed;
|
||||
value = ast2obj_list(o->v.Set.elts, ast2obj_expr);
|
||||
if (!value) goto failed;
|
||||
if (PyObject_SetAttrString(result, "elts", value) == -1)
|
||||
goto failed;
|
||||
Py_DECREF(value);
|
||||
break;
|
||||
case ListComp_kind:
|
||||
result = PyType_GenericNew(ListComp_type, NULL, NULL);
|
||||
if (!result) goto failed;
|
||||
|
@ -4860,6 +4889,42 @@ obj2ast_expr(PyObject* obj, expr_ty* out, PyArena* arena)
|
|||
if (*out == NULL) goto failed;
|
||||
return 0;
|
||||
}
|
||||
isinstance = PyObject_IsInstance(obj, (PyObject*)Set_type);
|
||||
if (isinstance == -1) {
|
||||
return 1;
|
||||
}
|
||||
if (isinstance) {
|
||||
asdl_seq* elts;
|
||||
|
||||
if (PyObject_HasAttrString(obj, "elts")) {
|
||||
int res;
|
||||
Py_ssize_t len;
|
||||
Py_ssize_t i;
|
||||
tmp = PyObject_GetAttrString(obj, "elts");
|
||||
if (tmp == NULL) goto failed;
|
||||
if (!PyList_Check(tmp)) {
|
||||
PyErr_Format(PyExc_TypeError, "Set field \"elts\" must be a list, not a %.200s", tmp->ob_type->tp_name);
|
||||
goto failed;
|
||||
}
|
||||
len = PyList_GET_SIZE(tmp);
|
||||
elts = asdl_seq_new(len, arena);
|
||||
if (elts == NULL) goto failed;
|
||||
for (i = 0; i < len; i++) {
|
||||
expr_ty value;
|
||||
res = obj2ast_expr(PyList_GET_ITEM(tmp, i), &value, arena);
|
||||
if (res != 0) goto failed;
|
||||
asdl_seq_SET(elts, i, value);
|
||||
}
|
||||
Py_XDECREF(tmp);
|
||||
tmp = NULL;
|
||||
} else {
|
||||
PyErr_SetString(PyExc_TypeError, "required field \"elts\" missing from Set");
|
||||
return 1;
|
||||
}
|
||||
*out = Set(elts, lineno, col_offset, arena);
|
||||
if (*out == NULL) goto failed;
|
||||
return 0;
|
||||
}
|
||||
isinstance = PyObject_IsInstance(obj, (PyObject*)ListComp_type);
|
||||
if (isinstance == -1) {
|
||||
return 1;
|
||||
|
@ -6351,6 +6416,7 @@ init_ast(void)
|
|||
return;
|
||||
if (PyDict_SetItemString(d, "IfExp", (PyObject*)IfExp_type) < 0) return;
|
||||
if (PyDict_SetItemString(d, "Dict", (PyObject*)Dict_type) < 0) return;
|
||||
if (PyDict_SetItemString(d, "Set", (PyObject*)Set_type) < 0) return;
|
||||
if (PyDict_SetItemString(d, "ListComp", (PyObject*)ListComp_type) < 0)
|
||||
return;
|
||||
if (PyDict_SetItemString(d, "GeneratorExp",
|
||||
|
|
65
Python/ast.c
65
Python/ast.c
|
@ -1383,36 +1383,59 @@ ast_for_atom(struct compiling *c, const node *n)
|
|||
else
|
||||
return ast_for_listcomp(c, ch);
|
||||
case LBRACE: {
|
||||
/* dictmaker: test ':' test (',' test ':' test)* [','] */
|
||||
/* dictorsetmaker: test ':' test (',' test ':' test)* [','] |
|
||||
* test (',' test)* [','])
|
||||
*/
|
||||
int i, size;
|
||||
asdl_seq *keys, *values;
|
||||
|
||||
|
||||
ch = CHILD(n, 1);
|
||||
size = (NCH(ch) + 1) / 4; /* +1 in case no trailing comma */
|
||||
keys = asdl_seq_new(size, c->c_arena);
|
||||
if (!keys)
|
||||
return NULL;
|
||||
|
||||
values = asdl_seq_new(size, c->c_arena);
|
||||
if (!values)
|
||||
return NULL;
|
||||
|
||||
for (i = 0; i < NCH(ch); i += 4) {
|
||||
expr_ty expression;
|
||||
if (TYPE(ch) == RBRACE) {
|
||||
/* it's an empty dict */
|
||||
return Dict(NULL, NULL, LINENO(n), n->n_col_offset, c->c_arena);
|
||||
} else if (NCH(ch) == 1 || TYPE(CHILD(ch, 1)) == COMMA) {
|
||||
/* it's a simple set */
|
||||
asdl_seq *elts;
|
||||
size = (NCH(ch) + 1) / 2; /* +1 in case no trailing comma */
|
||||
elts = asdl_seq_new(size, c->c_arena);
|
||||
if (!elts)
|
||||
return NULL;
|
||||
for (i = 0; i < NCH(ch); i += 2) {
|
||||
expr_ty expression;
|
||||
expression = ast_for_expr(c, CHILD(ch, i));
|
||||
if (!expression)
|
||||
return NULL;
|
||||
asdl_seq_SET(elts, i / 2, expression);
|
||||
}
|
||||
return Set(elts, LINENO(n), n->n_col_offset, c->c_arena);
|
||||
} else {
|
||||
/* it's a dict */
|
||||
size = (NCH(ch) + 1) / 4; /* +1 in case no trailing comma */
|
||||
keys = asdl_seq_new(size, c->c_arena);
|
||||
if (!keys)
|
||||
return NULL;
|
||||
|
||||
expression = ast_for_expr(c, CHILD(ch, i));
|
||||
if (!expression)
|
||||
values = asdl_seq_new(size, c->c_arena);
|
||||
if (!values)
|
||||
return NULL;
|
||||
|
||||
for (i = 0; i < NCH(ch); i += 4) {
|
||||
expr_ty expression;
|
||||
|
||||
expression = ast_for_expr(c, CHILD(ch, i));
|
||||
if (!expression)
|
||||
return NULL;
|
||||
|
||||
asdl_seq_SET(keys, i / 4, expression);
|
||||
asdl_seq_SET(keys, i / 4, expression);
|
||||
|
||||
expression = ast_for_expr(c, CHILD(ch, i + 2));
|
||||
if (!expression)
|
||||
return NULL;
|
||||
expression = ast_for_expr(c, CHILD(ch, i + 2));
|
||||
if (!expression)
|
||||
return NULL;
|
||||
|
||||
asdl_seq_SET(values, i / 4, expression);
|
||||
asdl_seq_SET(values, i / 4, expression);
|
||||
}
|
||||
return Dict(keys, values, LINENO(n), n->n_col_offset, c->c_arena);
|
||||
}
|
||||
return Dict(keys, values, LINENO(n), n->n_col_offset, c->c_arena);
|
||||
}
|
||||
case BACKQUOTE: { /* repr */
|
||||
expr_ty expression;
|
||||
|
|
|
@ -2186,6 +2186,25 @@ PyEval_EvalFrameEx(PyFrameObject *f, int throwflag)
|
|||
}
|
||||
break;
|
||||
|
||||
case BUILD_SET:
|
||||
x = PySet_New(NULL);
|
||||
if (x != NULL) {
|
||||
for (; --oparg >= 0;) {
|
||||
w = POP();
|
||||
if (err == 0)
|
||||
err = PySet_Add(x, w);
|
||||
Py_DECREF(w);
|
||||
}
|
||||
if (err != 0) {
|
||||
Py_DECREF(x);
|
||||
break;
|
||||
}
|
||||
PUSH(x);
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
|
||||
|
||||
case BUILD_MAP:
|
||||
x = _PyDict_NewPresized((Py_ssize_t)oparg);
|
||||
PUSH(x);
|
||||
|
|
|
@ -808,6 +808,7 @@ opcode_stack_effect(int opcode, int oparg)
|
|||
return 1;
|
||||
case BUILD_TUPLE:
|
||||
case BUILD_LIST:
|
||||
case BUILD_SET:
|
||||
return 1-oparg;
|
||||
case BUILD_MAP:
|
||||
return 1;
|
||||
|
@ -2894,6 +2895,11 @@ compiler_visit_expr(struct compiler *c, expr_ty e)
|
|||
ADDOP(c, STORE_MAP);
|
||||
}
|
||||
break;
|
||||
case Set_kind:
|
||||
n = asdl_seq_LEN(e->v.Set.elts);
|
||||
VISIT_SEQ(c, expr, e->v.Set.elts);
|
||||
ADDOP_I(c, BUILD_SET, n);
|
||||
break;
|
||||
case ListComp_kind:
|
||||
return compiler_listcomp(c, e);
|
||||
case GeneratorExp_kind:
|
||||
|
|
|
@ -1548,258 +1548,298 @@ static state states_72[5] = {
|
|||
{2, arcs_72_4},
|
||||
};
|
||||
static arc arcs_73_0[1] = {
|
||||
{161, 1},
|
||||
{28, 1},
|
||||
};
|
||||
static arc arcs_73_1[1] = {
|
||||
{21, 2},
|
||||
static arc arcs_73_1[3] = {
|
||||
{23, 2},
|
||||
{29, 3},
|
||||
{0, 1},
|
||||
};
|
||||
static arc arcs_73_2[2] = {
|
||||
{13, 3},
|
||||
{23, 4},
|
||||
static arc arcs_73_2[1] = {
|
||||
{28, 4},
|
||||
};
|
||||
static arc arcs_73_3[2] = {
|
||||
{9, 5},
|
||||
{15, 6},
|
||||
{28, 5},
|
||||
{0, 3},
|
||||
};
|
||||
static arc arcs_73_4[1] = {
|
||||
{24, 7},
|
||||
static arc arcs_73_4[2] = {
|
||||
{29, 6},
|
||||
{0, 4},
|
||||
};
|
||||
static arc arcs_73_5[1] = {
|
||||
{15, 6},
|
||||
static arc arcs_73_5[2] = {
|
||||
{29, 3},
|
||||
{0, 5},
|
||||
};
|
||||
static arc arcs_73_6[1] = {
|
||||
{23, 4},
|
||||
static arc arcs_73_6[2] = {
|
||||
{28, 7},
|
||||
{0, 6},
|
||||
};
|
||||
static arc arcs_73_7[1] = {
|
||||
{0, 7},
|
||||
{23, 2},
|
||||
};
|
||||
static state states_73[8] = {
|
||||
{1, arcs_73_0},
|
||||
{1, arcs_73_1},
|
||||
{2, arcs_73_2},
|
||||
{3, arcs_73_1},
|
||||
{1, arcs_73_2},
|
||||
{2, arcs_73_3},
|
||||
{1, arcs_73_4},
|
||||
{1, arcs_73_5},
|
||||
{1, arcs_73_6},
|
||||
{2, arcs_73_4},
|
||||
{2, arcs_73_5},
|
||||
{2, arcs_73_6},
|
||||
{1, arcs_73_7},
|
||||
};
|
||||
static arc arcs_74_0[3] = {
|
||||
static arc arcs_74_0[1] = {
|
||||
{162, 1},
|
||||
};
|
||||
static arc arcs_74_1[1] = {
|
||||
{21, 2},
|
||||
};
|
||||
static arc arcs_74_2[2] = {
|
||||
{13, 3},
|
||||
{23, 4},
|
||||
};
|
||||
static arc arcs_74_3[2] = {
|
||||
{9, 5},
|
||||
{15, 6},
|
||||
};
|
||||
static arc arcs_74_4[1] = {
|
||||
{24, 7},
|
||||
};
|
||||
static arc arcs_74_5[1] = {
|
||||
{15, 6},
|
||||
};
|
||||
static arc arcs_74_6[1] = {
|
||||
{23, 4},
|
||||
};
|
||||
static arc arcs_74_7[1] = {
|
||||
{0, 7},
|
||||
};
|
||||
static state states_74[8] = {
|
||||
{1, arcs_74_0},
|
||||
{1, arcs_74_1},
|
||||
{2, arcs_74_2},
|
||||
{2, arcs_74_3},
|
||||
{1, arcs_74_4},
|
||||
{1, arcs_74_5},
|
||||
{1, arcs_74_6},
|
||||
{1, arcs_74_7},
|
||||
};
|
||||
static arc arcs_75_0[3] = {
|
||||
{163, 1},
|
||||
{30, 2},
|
||||
{31, 3},
|
||||
};
|
||||
static arc arcs_74_1[2] = {
|
||||
static arc arcs_75_1[2] = {
|
||||
{29, 4},
|
||||
{0, 1},
|
||||
};
|
||||
static arc arcs_74_2[1] = {
|
||||
static arc arcs_75_2[1] = {
|
||||
{28, 5},
|
||||
};
|
||||
static arc arcs_74_3[1] = {
|
||||
static arc arcs_75_3[1] = {
|
||||
{28, 6},
|
||||
};
|
||||
static arc arcs_74_4[4] = {
|
||||
{162, 1},
|
||||
static arc arcs_75_4[4] = {
|
||||
{163, 1},
|
||||
{30, 2},
|
||||
{31, 3},
|
||||
{0, 4},
|
||||
};
|
||||
static arc arcs_74_5[2] = {
|
||||
static arc arcs_75_5[2] = {
|
||||
{29, 7},
|
||||
{0, 5},
|
||||
};
|
||||
static arc arcs_74_6[1] = {
|
||||
static arc arcs_75_6[1] = {
|
||||
{0, 6},
|
||||
};
|
||||
static arc arcs_74_7[2] = {
|
||||
{162, 5},
|
||||
static arc arcs_75_7[2] = {
|
||||
{163, 5},
|
||||
{31, 3},
|
||||
};
|
||||
static state states_74[8] = {
|
||||
{3, arcs_74_0},
|
||||
{2, arcs_74_1},
|
||||
{1, arcs_74_2},
|
||||
{1, arcs_74_3},
|
||||
{4, arcs_74_4},
|
||||
{2, arcs_74_5},
|
||||
{1, arcs_74_6},
|
||||
{2, arcs_74_7},
|
||||
static state states_75[8] = {
|
||||
{3, arcs_75_0},
|
||||
{2, arcs_75_1},
|
||||
{1, arcs_75_2},
|
||||
{1, arcs_75_3},
|
||||
{4, arcs_75_4},
|
||||
{2, arcs_75_5},
|
||||
{1, arcs_75_6},
|
||||
{2, arcs_75_7},
|
||||
};
|
||||
static arc arcs_75_0[1] = {
|
||||
static arc arcs_76_0[1] = {
|
||||
{28, 1},
|
||||
};
|
||||
static arc arcs_75_1[3] = {
|
||||
static arc arcs_76_1[3] = {
|
||||
{157, 2},
|
||||
{27, 3},
|
||||
{0, 1},
|
||||
};
|
||||
static arc arcs_75_2[1] = {
|
||||
static arc arcs_76_2[1] = {
|
||||
{0, 2},
|
||||
};
|
||||
static arc arcs_75_3[1] = {
|
||||
static arc arcs_76_3[1] = {
|
||||
{28, 2},
|
||||
};
|
||||
static state states_75[4] = {
|
||||
{1, arcs_75_0},
|
||||
{3, arcs_75_1},
|
||||
{1, arcs_75_2},
|
||||
{1, arcs_75_3},
|
||||
static state states_76[4] = {
|
||||
{1, arcs_76_0},
|
||||
{3, arcs_76_1},
|
||||
{1, arcs_76_2},
|
||||
{1, arcs_76_3},
|
||||
};
|
||||
static arc arcs_76_0[2] = {
|
||||
static arc arcs_77_0[2] = {
|
||||
{156, 1},
|
||||
{164, 1},
|
||||
};
|
||||
static arc arcs_76_1[1] = {
|
||||
{0, 1},
|
||||
};
|
||||
static state states_76[2] = {
|
||||
{2, arcs_76_0},
|
||||
{1, arcs_76_1},
|
||||
};
|
||||
static arc arcs_77_0[1] = {
|
||||
{96, 1},
|
||||
{165, 1},
|
||||
};
|
||||
static arc arcs_77_1[1] = {
|
||||
{61, 2},
|
||||
{0, 1},
|
||||
};
|
||||
static arc arcs_77_2[1] = {
|
||||
{85, 3},
|
||||
};
|
||||
static arc arcs_77_3[1] = {
|
||||
{105, 4},
|
||||
};
|
||||
static arc arcs_77_4[2] = {
|
||||
{163, 5},
|
||||
{0, 4},
|
||||
};
|
||||
static arc arcs_77_5[1] = {
|
||||
{0, 5},
|
||||
};
|
||||
static state states_77[6] = {
|
||||
{1, arcs_77_0},
|
||||
static state states_77[2] = {
|
||||
{2, arcs_77_0},
|
||||
{1, arcs_77_1},
|
||||
{1, arcs_77_2},
|
||||
{1, arcs_77_3},
|
||||
{2, arcs_77_4},
|
||||
{1, arcs_77_5},
|
||||
};
|
||||
static arc arcs_78_0[1] = {
|
||||
{92, 1},
|
||||
};
|
||||
static arc arcs_78_1[1] = {
|
||||
{106, 2},
|
||||
};
|
||||
static arc arcs_78_2[2] = {
|
||||
{163, 3},
|
||||
{0, 2},
|
||||
};
|
||||
static arc arcs_78_3[1] = {
|
||||
{0, 3},
|
||||
};
|
||||
static state states_78[4] = {
|
||||
{1, arcs_78_0},
|
||||
{1, arcs_78_1},
|
||||
{2, arcs_78_2},
|
||||
{1, arcs_78_3},
|
||||
};
|
||||
static arc arcs_79_0[2] = {
|
||||
{157, 1},
|
||||
{166, 1},
|
||||
};
|
||||
static arc arcs_79_1[1] = {
|
||||
{0, 1},
|
||||
};
|
||||
static state states_79[2] = {
|
||||
{2, arcs_79_0},
|
||||
{1, arcs_79_1},
|
||||
};
|
||||
static arc arcs_80_0[1] = {
|
||||
{96, 1},
|
||||
};
|
||||
static arc arcs_80_1[1] = {
|
||||
static arc arcs_78_1[1] = {
|
||||
{61, 2},
|
||||
};
|
||||
static arc arcs_80_2[1] = {
|
||||
static arc arcs_78_2[1] = {
|
||||
{85, 3},
|
||||
};
|
||||
static arc arcs_80_3[1] = {
|
||||
{107, 4},
|
||||
static arc arcs_78_3[1] = {
|
||||
{105, 4},
|
||||
};
|
||||
static arc arcs_80_4[2] = {
|
||||
{165, 5},
|
||||
static arc arcs_78_4[2] = {
|
||||
{164, 5},
|
||||
{0, 4},
|
||||
};
|
||||
static arc arcs_80_5[1] = {
|
||||
static arc arcs_78_5[1] = {
|
||||
{0, 5},
|
||||
};
|
||||
static state states_80[6] = {
|
||||
{1, arcs_80_0},
|
||||
{1, arcs_80_1},
|
||||
{1, arcs_80_2},
|
||||
{1, arcs_80_3},
|
||||
{2, arcs_80_4},
|
||||
{1, arcs_80_5},
|
||||
static state states_78[6] = {
|
||||
{1, arcs_78_0},
|
||||
{1, arcs_78_1},
|
||||
{1, arcs_78_2},
|
||||
{1, arcs_78_3},
|
||||
{2, arcs_78_4},
|
||||
{1, arcs_78_5},
|
||||
};
|
||||
static arc arcs_81_0[1] = {
|
||||
static arc arcs_79_0[1] = {
|
||||
{92, 1},
|
||||
};
|
||||
static arc arcs_81_1[1] = {
|
||||
static arc arcs_79_1[1] = {
|
||||
{106, 2},
|
||||
};
|
||||
static arc arcs_81_2[2] = {
|
||||
{165, 3},
|
||||
static arc arcs_79_2[2] = {
|
||||
{164, 3},
|
||||
{0, 2},
|
||||
};
|
||||
static arc arcs_81_3[1] = {
|
||||
static arc arcs_79_3[1] = {
|
||||
{0, 3},
|
||||
};
|
||||
static state states_81[4] = {
|
||||
{1, arcs_81_0},
|
||||
{1, arcs_81_1},
|
||||
{2, arcs_81_2},
|
||||
{1, arcs_81_3},
|
||||
static state states_79[4] = {
|
||||
{1, arcs_79_0},
|
||||
{1, arcs_79_1},
|
||||
{2, arcs_79_2},
|
||||
{1, arcs_79_3},
|
||||
};
|
||||
static arc arcs_82_0[1] = {
|
||||
{28, 1},
|
||||
static arc arcs_80_0[2] = {
|
||||
{157, 1},
|
||||
{167, 1},
|
||||
};
|
||||
static arc arcs_82_1[2] = {
|
||||
{29, 0},
|
||||
static arc arcs_80_1[1] = {
|
||||
{0, 1},
|
||||
};
|
||||
static state states_82[2] = {
|
||||
static state states_80[2] = {
|
||||
{2, arcs_80_0},
|
||||
{1, arcs_80_1},
|
||||
};
|
||||
static arc arcs_81_0[1] = {
|
||||
{96, 1},
|
||||
};
|
||||
static arc arcs_81_1[1] = {
|
||||
{61, 2},
|
||||
};
|
||||
static arc arcs_81_2[1] = {
|
||||
{85, 3},
|
||||
};
|
||||
static arc arcs_81_3[1] = {
|
||||
{107, 4},
|
||||
};
|
||||
static arc arcs_81_4[2] = {
|
||||
{166, 5},
|
||||
{0, 4},
|
||||
};
|
||||
static arc arcs_81_5[1] = {
|
||||
{0, 5},
|
||||
};
|
||||
static state states_81[6] = {
|
||||
{1, arcs_81_0},
|
||||
{1, arcs_81_1},
|
||||
{1, arcs_81_2},
|
||||
{1, arcs_81_3},
|
||||
{2, arcs_81_4},
|
||||
{1, arcs_81_5},
|
||||
};
|
||||
static arc arcs_82_0[1] = {
|
||||
{92, 1},
|
||||
};
|
||||
static arc arcs_82_1[1] = {
|
||||
{106, 2},
|
||||
};
|
||||
static arc arcs_82_2[2] = {
|
||||
{166, 3},
|
||||
{0, 2},
|
||||
};
|
||||
static arc arcs_82_3[1] = {
|
||||
{0, 3},
|
||||
};
|
||||
static state states_82[4] = {
|
||||
{1, arcs_82_0},
|
||||
{2, arcs_82_1},
|
||||
{1, arcs_82_1},
|
||||
{2, arcs_82_2},
|
||||
{1, arcs_82_3},
|
||||
};
|
||||
static arc arcs_83_0[1] = {
|
||||
{21, 1},
|
||||
{28, 1},
|
||||
};
|
||||
static arc arcs_83_1[1] = {
|
||||
static arc arcs_83_1[2] = {
|
||||
{29, 0},
|
||||
{0, 1},
|
||||
};
|
||||
static state states_83[2] = {
|
||||
{1, arcs_83_0},
|
||||
{1, arcs_83_1},
|
||||
{2, arcs_83_1},
|
||||
};
|
||||
static arc arcs_84_0[1] = {
|
||||
{168, 1},
|
||||
{21, 1},
|
||||
};
|
||||
static arc arcs_84_1[2] = {
|
||||
static arc arcs_84_1[1] = {
|
||||
{0, 1},
|
||||
};
|
||||
static state states_84[2] = {
|
||||
{1, arcs_84_0},
|
||||
{1, arcs_84_1},
|
||||
};
|
||||
static arc arcs_85_0[1] = {
|
||||
{169, 1},
|
||||
};
|
||||
static arc arcs_85_1[2] = {
|
||||
{9, 2},
|
||||
{0, 1},
|
||||
};
|
||||
static arc arcs_84_2[1] = {
|
||||
static arc arcs_85_2[1] = {
|
||||
{0, 2},
|
||||
};
|
||||
static state states_84[3] = {
|
||||
{1, arcs_84_0},
|
||||
{2, arcs_84_1},
|
||||
{1, arcs_84_2},
|
||||
static state states_85[3] = {
|
||||
{1, arcs_85_0},
|
||||
{2, arcs_85_1},
|
||||
{1, arcs_85_2},
|
||||
};
|
||||
static dfa dfas[85] = {
|
||||
static dfa dfas[86] = {
|
||||
{256, "single_input", 0, 3, states_0,
|
||||
"\004\050\060\000\000\000\000\124\360\024\114\220\023\040\010\000\200\041\044\015\002\001"},
|
||||
"\004\050\060\000\000\000\000\124\360\024\114\220\023\040\010\000\200\041\044\015\004\002"},
|
||||
{257, "file_input", 0, 2, states_1,
|
||||
"\204\050\060\000\000\000\000\124\360\024\114\220\023\040\010\000\200\041\044\015\002\001"},
|
||||
"\204\050\060\000\000\000\000\124\360\024\114\220\023\040\010\000\200\041\044\015\004\002"},
|
||||
{258, "eval_input", 0, 3, states_2,
|
||||
"\000\040\040\000\000\000\000\000\000\000\000\000\000\040\010\000\200\041\044\015\000\000"},
|
||||
{259, "decorator", 0, 7, states_3,
|
||||
|
@ -1819,11 +1859,11 @@ static dfa dfas[85] = {
|
|||
{266, "fplist", 0, 3, states_10,
|
||||
"\000\040\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{267, "stmt", 0, 2, states_11,
|
||||
"\000\050\060\000\000\000\000\124\360\024\114\220\023\040\010\000\200\041\044\015\002\001"},
|
||||
"\000\050\060\000\000\000\000\124\360\024\114\220\023\040\010\000\200\041\044\015\004\002"},
|
||||
{268, "simple_stmt", 0, 4, states_12,
|
||||
"\000\040\040\000\000\000\000\124\360\024\114\000\000\040\010\000\200\041\044\015\000\001"},
|
||||
"\000\040\040\000\000\000\000\124\360\024\114\000\000\040\010\000\200\041\044\015\000\002"},
|
||||
{269, "small_stmt", 0, 2, states_13,
|
||||
"\000\040\040\000\000\000\000\124\360\024\114\000\000\040\010\000\200\041\044\015\000\001"},
|
||||
"\000\040\040\000\000\000\000\124\360\024\114\000\000\040\010\000\200\041\044\015\000\002"},
|
||||
{270, "expr_stmt", 0, 6, states_14,
|
||||
"\000\040\040\000\000\000\000\000\000\000\000\000\000\040\010\000\200\041\044\015\000\000"},
|
||||
{271, "augassign", 0, 2, states_15,
|
||||
|
@ -1835,7 +1875,7 @@ static dfa dfas[85] = {
|
|||
{274, "pass_stmt", 0, 2, states_18,
|
||||
"\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{275, "flow_stmt", 0, 2, states_19,
|
||||
"\000\000\000\000\000\000\000\000\360\000\000\000\000\000\000\000\000\000\000\000\000\001"},
|
||||
"\000\000\000\000\000\000\000\000\360\000\000\000\000\000\000\000\000\000\000\000\000\002"},
|
||||
{276, "break_stmt", 0, 2, states_20,
|
||||
"\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{277, "continue_stmt", 0, 2, states_21,
|
||||
|
@ -1843,7 +1883,7 @@ static dfa dfas[85] = {
|
|||
{278, "return_stmt", 0, 3, states_22,
|
||||
"\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{279, "yield_stmt", 0, 2, states_23,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001"},
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002"},
|
||||
{280, "raise_stmt", 0, 7, states_24,
|
||||
"\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{281, "import_stmt", 0, 2, states_25,
|
||||
|
@ -1869,7 +1909,7 @@ static dfa dfas[85] = {
|
|||
{291, "assert_stmt", 0, 5, states_35,
|
||||
"\000\000\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{292, "compound_stmt", 0, 2, states_36,
|
||||
"\000\010\020\000\000\000\000\000\000\000\000\220\023\000\000\000\000\000\000\000\002\000"},
|
||||
"\000\010\020\000\000\000\000\000\000\000\000\220\023\000\000\000\000\000\000\000\004\000"},
|
||||
{293, "if_stmt", 0, 8, states_37,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"},
|
||||
{294, "while_stmt", 0, 8, states_38,
|
||||
|
@ -1885,7 +1925,7 @@ static dfa dfas[85] = {
|
|||
{299, "except_clause", 0, 5, states_43,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000"},
|
||||
{300, "suite", 0, 5, states_44,
|
||||
"\004\040\040\000\000\000\000\124\360\024\114\000\000\040\010\000\200\041\044\015\000\001"},
|
||||
"\004\040\040\000\000\000\000\124\360\024\114\000\000\040\010\000\200\041\044\015\000\002"},
|
||||
{301, "testlist_safe", 0, 5, states_45,
|
||||
"\000\040\040\000\000\000\000\000\000\000\000\000\000\040\010\000\200\041\044\015\000\000"},
|
||||
{302, "old_test", 0, 2, states_46,
|
||||
|
@ -1942,32 +1982,34 @@ static dfa dfas[85] = {
|
|||
"\000\040\040\000\000\000\000\000\000\000\000\000\000\040\010\000\200\041\044\015\000\000"},
|
||||
{328, "dictmaker", 0, 5, states_72,
|
||||
"\000\040\040\000\000\000\000\000\000\000\000\000\000\040\010\000\200\041\044\015\000\000"},
|
||||
{329, "classdef", 0, 8, states_73,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002\000"},
|
||||
{330, "arglist", 0, 8, states_74,
|
||||
{329, "dictorsetmaker", 0, 8, states_73,
|
||||
"\000\040\040\000\000\000\000\000\000\000\000\000\000\040\010\000\200\041\044\015\000\000"},
|
||||
{330, "classdef", 0, 8, states_74,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\004\000"},
|
||||
{331, "arglist", 0, 8, states_75,
|
||||
"\000\040\040\300\000\000\000\000\000\000\000\000\000\040\010\000\200\041\044\015\000\000"},
|
||||
{331, "argument", 0, 4, states_75,
|
||||
{332, "argument", 0, 4, states_76,
|
||||
"\000\040\040\000\000\000\000\000\000\000\000\000\000\040\010\000\200\041\044\015\000\000"},
|
||||
{332, "list_iter", 0, 2, states_76,
|
||||
{333, "list_iter", 0, 2, states_77,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\020\001\000\000\000\000\000\000\000\000\000"},
|
||||
{333, "list_for", 0, 6, states_77,
|
||||
{334, "list_for", 0, 6, states_78,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000"},
|
||||
{334, "list_if", 0, 4, states_78,
|
||||
{335, "list_if", 0, 4, states_79,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"},
|
||||
{335, "gen_iter", 0, 2, states_79,
|
||||
{336, "gen_iter", 0, 2, states_80,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\020\001\000\000\000\000\000\000\000\000\000"},
|
||||
{336, "gen_for", 0, 6, states_80,
|
||||
{337, "gen_for", 0, 6, states_81,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000"},
|
||||
{337, "gen_if", 0, 4, states_81,
|
||||
{338, "gen_if", 0, 4, states_82,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"},
|
||||
{338, "testlist1", 0, 2, states_82,
|
||||
{339, "testlist1", 0, 2, states_83,
|
||||
"\000\040\040\000\000\000\000\000\000\000\000\000\000\040\010\000\200\041\044\015\000\000"},
|
||||
{339, "encoding_decl", 0, 2, states_83,
|
||||
{340, "encoding_decl", 0, 2, states_84,
|
||||
"\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
|
||||
{340, "yield_expr", 0, 3, states_84,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001"},
|
||||
{341, "yield_expr", 0, 3, states_85,
|
||||
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002"},
|
||||
};
|
||||
static label labels[169] = {
|
||||
static label labels[170] = {
|
||||
{0, "EMPTY"},
|
||||
{256, 0},
|
||||
{4, 0},
|
||||
|
@ -1982,11 +2024,11 @@ static label labels[169] = {
|
|||
{50, 0},
|
||||
{288, 0},
|
||||
{7, 0},
|
||||
{330, 0},
|
||||
{331, 0},
|
||||
{8, 0},
|
||||
{260, 0},
|
||||
{261, 0},
|
||||
{329, 0},
|
||||
{330, 0},
|
||||
{262, 0},
|
||||
{1, "def"},
|
||||
{1, 0},
|
||||
|
@ -2013,7 +2055,7 @@ static label labels[169] = {
|
|||
{290, 0},
|
||||
{291, 0},
|
||||
{271, 0},
|
||||
{340, 0},
|
||||
{341, 0},
|
||||
{37, 0},
|
||||
{38, 0},
|
||||
{39, 0},
|
||||
|
@ -2118,29 +2160,30 @@ static label labels[169] = {
|
|||
{319, 0},
|
||||
{10, 0},
|
||||
{26, 0},
|
||||
{328, 0},
|
||||
{329, 0},
|
||||
{27, 0},
|
||||
{25, 0},
|
||||
{338, 0},
|
||||
{339, 0},
|
||||
{2, 0},
|
||||
{3, 0},
|
||||
{333, 0},
|
||||
{336, 0},
|
||||
{334, 0},
|
||||
{337, 0},
|
||||
{323, 0},
|
||||
{324, 0},
|
||||
{325, 0},
|
||||
{328, 0},
|
||||
{1, "class"},
|
||||
{331, 0},
|
||||
{332, 0},
|
||||
{334, 0},
|
||||
{333, 0},
|
||||
{335, 0},
|
||||
{337, 0},
|
||||
{339, 0},
|
||||
{336, 0},
|
||||
{338, 0},
|
||||
{340, 0},
|
||||
{1, "yield"},
|
||||
};
|
||||
grammar _PyParser_Grammar = {
|
||||
85,
|
||||
86,
|
||||
dfas,
|
||||
{169, labels},
|
||||
{170, labels},
|
||||
256
|
||||
};
|
||||
|
|
|
@ -75,9 +75,10 @@ typedef unsigned short mode_t;
|
|||
Python 2.7a0: 62181 (optimize conditional branches:
|
||||
introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)
|
||||
Python 2.7a0 62191 (introduce SETUP_WITH)
|
||||
Python 2.7a0 62201 (introduce BUILD_SET)
|
||||
.
|
||||
*/
|
||||
#define MAGIC (62191 | ((long)'\r'<<16) | ((long)'\n'<<24))
|
||||
#define MAGIC (62201 | ((long)'\r'<<16) | ((long)'\n'<<24))
|
||||
|
||||
/* Magic word as global; note that _PyImport_Init() can change the
|
||||
value of this global to accommodate for alterations of how the
|
||||
|
|
|
@ -1211,6 +1211,9 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
|
|||
VISIT_SEQ(st, expr, e->v.Dict.keys);
|
||||
VISIT_SEQ(st, expr, e->v.Dict.values);
|
||||
break;
|
||||
case Set_kind:
|
||||
VISIT_SEQ(st, expr, e->v.Set.elts);
|
||||
break;
|
||||
case ListComp_kind:
|
||||
VISIT(st, expr, e->v.ListComp.elt);
|
||||
VISIT_SEQ(st, comprehension, e->v.ListComp.generators);
|
||||
|
|
Loading…
Reference in New Issue