[pypy-svn] r65632 - in pypy/branch/parser-compiler/pypy/interpreter/pyparser: . test

benjamin at codespeak.net benjamin at codespeak.net
Sat Jun 6 22:58:14 CEST 2009


Author: benjamin
Date: Sat Jun  6 22:58:14 2009
New Revision: 65632

Modified:
   pypy/branch/parser-compiler/pypy/interpreter/pyparser/metaparser.py
   pypy/branch/parser-compiler/pypy/interpreter/pyparser/parser.py
   pypy/branch/parser-compiler/pypy/interpreter/pyparser/test/test_metaparser.py
Log:
make labels a simple list of ints instead a tuple with a unused field

Modified: pypy/branch/parser-compiler/pypy/interpreter/pyparser/metaparser.py
==============================================================================
--- pypy/branch/parser-compiler/pypy/interpreter/pyparser/metaparser.py	(original)
+++ pypy/branch/parser-compiler/pypy/interpreter/pyparser/metaparser.py	Sat Jun  6 22:58:14 2009
@@ -156,7 +156,7 @@
                 if label in gram.symbol_to_label:
                     return gram.symbol_to_label[label]
                 else:
-                    gram.labels.append((gram.symbol_ids[label], None))
+                    gram.labels.append(gram.symbol_ids[label])
                     gram.symbol_to_label[label] = label_index
                     return label_index
             elif label.isupper():
@@ -164,7 +164,7 @@
                 if token_index in gram.token_ids:
                     return gram.token_ids[token_index]
                 else:
-                    gram.labels.append((token_index, None))
+                    gram.labels.append(token_index)
                     gram.token_ids[token_index] = label_index
                     return label_index
             else:
@@ -177,7 +177,7 @@
                 if value in gram.keyword_ids:
                     return gram.keyword_ids[value]
                 else:
-                    gram.labels.append((gram.KEYWORD_TOKEN, value))
+                    gram.labels.append(gram.KEYWORD_TOKEN)
                     gram.keyword_ids[value] = label_index
                     return label_index
             else:
@@ -188,7 +188,7 @@
                 if token_index in gram.token_ids:
                     return gram.token_ids[token_index]
                 else:
-                    gram.labels.append((token_index, None))
+                    gram.labels.append(token_index)
                     gram.token_ids[token_index] = label_index
                     return label_index
 

Modified: pypy/branch/parser-compiler/pypy/interpreter/pyparser/parser.py
==============================================================================
--- pypy/branch/parser-compiler/pypy/interpreter/pyparser/parser.py	(original)
+++ pypy/branch/parser-compiler/pypy/interpreter/pyparser/parser.py	Sat Jun  6 22:58:14 2009
@@ -85,7 +85,7 @@
             states, first = dfa
             arcs = states[state_index]
             for i, next_state in arcs:
-                t = self.grammar.labels[i][0]
+                sym_id = self.grammar.labels[i]
                 if label_index == i:
                     self.shift(next_state, token_type, value, lineno, column)
                     state_index = next_state
@@ -96,10 +96,11 @@
                         dfa, state_index, node = self.stack[-1]
                         states = dfa[0]
                     return False
-                elif t >= 256:
-                    sub_node_dfa = self.grammar.dfas[t]
+                elif sym_id >= 256:
+                    sub_node_dfa = self.grammar.dfas[sym_id]
                     if label_index in sub_node_dfa[1]:
-                        self.push(sub_node_dfa, next_state, t, lineno, column)
+                        self.push(sub_node_dfa, next_state, sym_id, lineno,
+                                  column)
                         break
             else:
                 if (0, state_index) in arcs:

Modified: pypy/branch/parser-compiler/pypy/interpreter/pyparser/test/test_metaparser.py
==============================================================================
--- pypy/branch/parser-compiler/pypy/interpreter/pyparser/test/test_metaparser.py	(original)
+++ pypy/branch/parser-compiler/pypy/interpreter/pyparser/test/test_metaparser.py	Sat Jun  6 22:58:14 2009
@@ -71,21 +71,15 @@
         g = self.gram_for("foo: 'some_keyword' 'for'")
         assert len(g.keyword_ids) == 2
         assert len(g.token_ids) == 0
-        for keyword in ("some_keyword", "for"):
-            label_index = g.keyword_ids[keyword]
-            assert g.labels[label_index][1] == keyword
 
     def test_token(self):
         g = self.gram_for("foo: NAME")
         assert len(g.token_ids) == 1
-        label_index = g.token_ids[token.NAME]
-        assert g.labels[label_index][1] is None
 
     def test_operator(self):
         g = self.gram_for("add: NUMBER '+' NUMBER")
         assert len(g.keyword_ids) == 0
         assert len(g.token_ids) == 2
-        assert g.labels[g.token_ids[token.OP]][1] is None
 
         exc = py.test.raises(PgenError, self.gram_for, "add: '/'").value
         assert str(exc) == "no such operator: '/'"
@@ -93,7 +87,7 @@
     def test_symbol(self):
         g = self.gram_for("foo: some_other_rule\nsome_other_rule: NAME")
         assert len(g.dfas) == 2
-        assert len(g.labels) == 3
+        assert len(g.labels) == 2
 
         exc = py.test.raises(PgenError, self.gram_for, "foo: no_rule").value
         assert str(exc) == "no such rule: 'no_rule'"



More information about the Pypy-commit mailing list