Patchwork [2,of,5] parser: fill invalid infix and suffix actions by None

login
register
mail settings
Submitter Yuya Nishihara
Date July 14, 2015, 2:44 p.m.
Message ID <2a4c58a330b6a84a228a.1436885066@mimosa>
Download mbox | patch
Permalink /patch/9972/
State Accepted
Headers show

Comments

Yuya Nishihara - July 14, 2015, 2:44 p.m.
# HG changeset patch
# User Yuya Nishihara <yuya@tcha.org>
# Date 1436062642 -32400
#      Sun Jul 05 11:17:22 2015 +0900
# Node ID 2a4c58a330b6a84a228ac5a9ce008b5cf4572d3f
# Parent  72b91af07b4a9e52d88fce6f856a5c9f6739677f
parser: fill invalid infix and suffix actions by None

This can simplify the expansion of (prefix, infix, suffix) actions.

Patch

diff --git a/mercurial/fileset.py b/mercurial/fileset.py
--- a/mercurial/fileset.py
+++ b/mercurial/fileset.py
@@ -11,20 +11,20 @@  from i18n import _
 
 elements = {
     # token-type: binding-strength, prefix, infix, suffix
-    "(": (20, ("group", 1, ")"), ("func", 1, ")")),
-    "-": (5, ("negate", 19), ("minus", 5)),
-    "not": (10, ("not", 10)),
-    "!": (10, ("not", 10)),
-    "and": (5, None, ("and", 5)),
-    "&": (5, None, ("and", 5)),
-    "or": (4, None, ("or", 4)),
-    "|": (4, None, ("or", 4)),
-    "+": (4, None, ("or", 4)),
-    ",": (2, None, ("list", 2)),
-    ")": (0, None, None),
-    "symbol": (0, ("symbol",), None),
-    "string": (0, ("string",), None),
-    "end": (0, None, None),
+    "(": (20, ("group", 1, ")"), ("func", 1, ")"), None),
+    "-": (5, ("negate", 19), ("minus", 5), None),
+    "not": (10, ("not", 10), None, None),
+    "!": (10, ("not", 10), None, None),
+    "and": (5, None, ("and", 5), None),
+    "&": (5, None, ("and", 5), None),
+    "or": (4, None, ("or", 4), None),
+    "|": (4, None, ("or", 4), None),
+    "+": (4, None, ("or", 4), None),
+    ",": (2, None, ("list", 2), None),
+    ")": (0, None, None, None),
+    "symbol": (0, ("symbol",), None, None),
+    "string": (0, ("string",), None, None),
+    "end": (0, None, None, None),
 }
 
 keywords = set(['and', 'or', 'not'])
diff --git a/mercurial/parser.py b/mercurial/parser.py
--- a/mercurial/parser.py
+++ b/mercurial/parser.py
@@ -12,7 +12,7 @@ 
 # takes a tokenizer and elements
 # tokenizer is an iterator that returns (type, value, pos) tuples
 # elements is a mapping of types to binding strength, prefix, infix and
-# optional suffix actions
+# suffix actions
 # an action is a tree node name, a tree label, and an optional match
 # __call__(program) parses program into a labeled tree
 
@@ -54,16 +54,14 @@  class parser(object):
         # gather tokens until we meet a lower binding strength
         while bind < self._elements[self.current[0]][0]:
             token, value, pos = self._advance()
-            e = self._elements[token]
+            infix, suffix = self._elements[token][2:]
             # check for suffix - next token isn't a valid prefix
-            if len(e) == 4 and not self._elements[self.current[0]][1]:
-                suffix = e[3]
+            if suffix and not self._elements[self.current[0]][1]:
                 expr = (suffix[0], expr)
             else:
                 # handle infix rules
-                if len(e) < 3 or not e[2]:
+                if not infix:
                     raise error.ParseError(_("not an infix: %s") % token, pos)
-                infix = e[2]
                 if len(infix) == 3 and infix[2] == self.current[0]:
                     self._match(infix[2], pos)
                     expr = (infix[0], expr, (None))
diff --git a/mercurial/revset.py b/mercurial/revset.py
--- a/mercurial/revset.py
+++ b/mercurial/revset.py
@@ -116,30 +116,30 @@  def _revsbetween(repo, roots, heads):
 
 elements = {
     # token-type: binding-strength, prefix, infix, suffix
-    "(": (21, ("group", 1, ")"), ("func", 1, ")")),
-    "##": (20, None, ("_concat", 20)),
-    "~": (18, None, ("ancestor", 18)),
+    "(": (21, ("group", 1, ")"), ("func", 1, ")"), None),
+    "##": (20, None, ("_concat", 20), None),
+    "~": (18, None, ("ancestor", 18), None),
     "^": (18, None, ("parent", 18), ("parentpost", 18)),
-    "-": (5, ("negate", 19), ("minus", 5)),
+    "-": (5, ("negate", 19), ("minus", 5), None),
     "::": (17, ("dagrangepre", 17), ("dagrange", 17),
            ("dagrangepost", 17)),
     "..": (17, ("dagrangepre", 17), ("dagrange", 17),
            ("dagrangepost", 17)),
     ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
-    "not": (10, ("not", 10)),
-    "!": (10, ("not", 10)),
-    "and": (5, None, ("and", 5)),
-    "&": (5, None, ("and", 5)),
+    "not": (10, ("not", 10), None, None),
+    "!": (10, ("not", 10), None, None),
+    "and": (5, None, ("and", 5), None),
+    "&": (5, None, ("and", 5), None),
     "%": (5, None, ("only", 5), ("onlypost", 5)),
-    "or": (4, None, ("or", 4)),
-    "|": (4, None, ("or", 4)),
-    "+": (4, None, ("or", 4)),
-    "=": (3, None, ("keyvalue", 3)),
-    ",": (2, None, ("list", 2)),
-    ")": (0, None, None),
-    "symbol": (0, ("symbol",), None),
-    "string": (0, ("string",), None),
-    "end": (0, None, None),
+    "or": (4, None, ("or", 4), None),
+    "|": (4, None, ("or", 4), None),
+    "+": (4, None, ("or", 4), None),
+    "=": (3, None, ("keyvalue", 3), None),
+    ",": (2, None, ("list", 2), None),
+    ")": (0, None, None, None),
+    "symbol": (0, ("symbol",), None, None),
+    "string": (0, ("string",), None, None),
+    "end": (0, None, None, None),
 }
 
 keywords = set(['and', 'or', 'not'])
diff --git a/mercurial/templater.py b/mercurial/templater.py
--- a/mercurial/templater.py
+++ b/mercurial/templater.py
@@ -16,16 +16,16 @@  import minirst
 
 elements = {
     # token-type: binding-strength, prefix, infix, suffix
-    "(": (20, ("group", 1, ")"), ("func", 1, ")")),
-    ",": (2, None, ("list", 2)),
-    "|": (5, None, ("|", 5)),
-    "%": (6, None, ("%", 6)),
-    ")": (0, None, None),
-    "integer": (0, ("integer",), None),
-    "symbol": (0, ("symbol",), None),
-    "string": (0, ("string",), None),
-    "template": (0, ("template",), None),
-    "end": (0, None, None),
+    "(": (20, ("group", 1, ")"), ("func", 1, ")"), None),
+    ",": (2, None, ("list", 2), None),
+    "|": (5, None, ("|", 5), None),
+    "%": (6, None, ("%", 6), None),
+    ")": (0, None, None, None),
+    "integer": (0, ("integer",), None, None),
+    "symbol": (0, ("symbol",), None, None),
+    "string": (0, ("string",), None, None),
+    "template": (0, ("template",), None, None),
+    "end": (0, None, None, None),
 }
 
 def tokenize(program, start, end):