commit e959c08061185b7b7faf881e325ba29d68ffbff3
parent c2e0c02a3b62161b58a7777cc06f47322f8199ee
Author: simos.lists <simos.lists@70737e48-4f4a-0410-8df8-290828ad50c4>
Date: Thu, 15 May 2008 21:08:13 +0000
0.5 - Tree generation is go
git-svn-id: http://keyboardlayouteditor.googlecode.com/svn/trunk@22 70737e48-4f4a-0410-8df8-290828ad50c4
Diffstat:
7 files changed, 329 insertions(+), 302 deletions(-)
diff --git a/XKBGrammar/XKBGrammar.g b/XKBGrammar/XKBGrammar.g
@@ -1,6 +1,6 @@
// XKB Grammar (X.org)
// Written by Simos Xenitellis <simos.lists@googlemail.com>, 2008.
-// Version 0.4
+// Version 0.5
grammar XKBGrammar;
@@ -59,8 +59,8 @@ layout
;
section
- : mapType mapMaterial
- -> ^(SECTION mapType mapMaterial)
+ : mapType '{' mapMaterial+ '}' ';'
+ -> ^(SECTION mapType ^(MAPMATERIAL mapMaterial+))
;
mapType
@@ -69,12 +69,10 @@ mapType
;
mapMaterial
- : '{'
- ( line_include
+ : line_include
| line_name ';'!
| line_keytype ';'!
| line_key ';'!
- )+ '}' ';'!
;
line_include
diff --git a/XKBGrammar/XKBGrammar.tokens b/XKBGrammar/XKBGrammar.tokens
@@ -42,12 +42,12 @@ TOKEN_KEY=14
MAPNAME=16
TOKEN_KEY_TYPE=12
'alphanumeric_keys'=44
-'"'=27
-'}'=30
+'"'=30
+'}'=28
'alternate_group'=45
'key'=37
'partial'=43
-'{'=28
+'{'=27
'>'=39
'include'=31
'hidden'=42
diff --git a/XKBGrammar/XKBGrammarLexer.py b/XKBGrammar/XKBGrammarLexer.py
@@ -1,4 +1,4 @@
-# $ANTLR 3.1b1 XKBGrammar.g 2008-05-15 19:18:28
+# $ANTLR 3.1b1 XKBGrammar.g 2008-05-15 22:06:33
import sys
from antlr3 import *
@@ -87,9 +87,9 @@ class XKBGrammarLexer(Lexer):
_type = T__27
_channel = DEFAULT_CHANNEL
- # XKBGrammar.g:7:7: ( '\"' )
- # XKBGrammar.g:7:9: '\"'
- self.match(34)
+ # XKBGrammar.g:7:7: ( '{' )
+ # XKBGrammar.g:7:9: '{'
+ self.match(123)
@@ -112,9 +112,9 @@ class XKBGrammarLexer(Lexer):
_type = T__28
_channel = DEFAULT_CHANNEL
- # XKBGrammar.g:8:7: ( '{' )
- # XKBGrammar.g:8:9: '{'
- self.match(123)
+ # XKBGrammar.g:8:7: ( '}' )
+ # XKBGrammar.g:8:9: '}'
+ self.match(125)
@@ -162,9 +162,9 @@ class XKBGrammarLexer(Lexer):
_type = T__30
_channel = DEFAULT_CHANNEL
- # XKBGrammar.g:10:7: ( '}' )
- # XKBGrammar.g:10:9: '}'
- self.match(125)
+ # XKBGrammar.g:10:7: ( '\"' )
+ # XKBGrammar.g:10:9: '\"'
+ self.match(34)
@@ -587,9 +587,9 @@ class XKBGrammarLexer(Lexer):
_type = NAME
_channel = DEFAULT_CHANNEL
- # XKBGrammar.g:120:2: ( ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '-' | '(' | ')' | '0' .. '9' )* )
- # XKBGrammar.g:120:4: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '-' | '(' | ')' | '0' .. '9' )*
- # XKBGrammar.g:120:4: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '-' | '(' | ')' | '0' .. '9' )*
+ # XKBGrammar.g:118:2: ( ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '-' | '(' | ')' | '0' .. '9' )* )
+ # XKBGrammar.g:118:4: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '-' | '(' | ')' | '0' .. '9' )*
+ # XKBGrammar.g:118:4: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '-' | '(' | ')' | '0' .. '9' )*
while True: #loop1
alt1 = 2
LA1_0 = self.input.LA(1)
@@ -635,8 +635,8 @@ class XKBGrammarLexer(Lexer):
_type = WS
_channel = DEFAULT_CHANNEL
- # XKBGrammar.g:124:2: ( ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) )
- # XKBGrammar.g:125:2: ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' )
+ # XKBGrammar.g:122:2: ( ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) )
+ # XKBGrammar.g:123:2: ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' )
if (9 <= self.input.LA(1) <= 10) or (12 <= self.input.LA(1) <= 13) or self.input.LA(1) == 32:
self.input.consume();
else:
@@ -669,10 +669,10 @@ class XKBGrammarLexer(Lexer):
_type = COMMENT
_channel = DEFAULT_CHANNEL
- # XKBGrammar.g:130:6: ( '/*' ( . )* '*/' )
- # XKBGrammar.g:131:2: '/*' ( . )* '*/'
+ # XKBGrammar.g:128:6: ( '/*' ( . )* '*/' )
+ # XKBGrammar.g:129:2: '/*' ( . )* '*/'
self.match("/*")
- # XKBGrammar.g:131:7: ( . )*
+ # XKBGrammar.g:129:7: ( . )*
while True: #loop2
alt2 = 2
LA2_0 = self.input.LA(1)
@@ -691,7 +691,7 @@ class XKBGrammarLexer(Lexer):
if alt2 == 1:
- # XKBGrammar.g:131:7: .
+ # XKBGrammar.g:129:7: .
self.matchAny()
@@ -726,10 +726,10 @@ class XKBGrammarLexer(Lexer):
_type = LINE_COMMENT
_channel = DEFAULT_CHANNEL
- # XKBGrammar.g:135:6: ( '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
- # XKBGrammar.g:136:2: '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
+ # XKBGrammar.g:133:6: ( '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
+ # XKBGrammar.g:134:2: '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
self.match("//")
- # XKBGrammar.g:136:7: (~ ( '\\n' | '\\r' ) )*
+ # XKBGrammar.g:134:7: (~ ( '\\n' | '\\r' ) )*
while True: #loop3
alt3 = 2
LA3_0 = self.input.LA(1)
@@ -739,7 +739,7 @@ class XKBGrammarLexer(Lexer):
if alt3 == 1:
- # XKBGrammar.g:136:7: ~ ( '\\n' | '\\r' )
+ # XKBGrammar.g:134:7: ~ ( '\\n' | '\\r' )
if (0 <= self.input.LA(1) <= 9) or (11 <= self.input.LA(1) <= 12) or (14 <= self.input.LA(1) <= 65534):
self.input.consume();
else:
@@ -754,14 +754,14 @@ class XKBGrammarLexer(Lexer):
break #loop3
- # XKBGrammar.g:136:23: ( '\\r' )?
+ # XKBGrammar.g:134:23: ( '\\r' )?
alt4 = 2
LA4_0 = self.input.LA(1)
if (LA4_0 == 13) :
alt4 = 1
if alt4 == 1:
- # XKBGrammar.g:136:23: '\\r'
+ # XKBGrammar.g:134:23: '\\r'
self.match(13)
@@ -989,10 +989,10 @@ class XKBGrammarLexer(Lexer):
DFA5_transition = [
- DFA.unpack(u"\2\24\1\uffff\2\24\22\uffff\1\24\1\uffff\1\1\11\uffff"
+ DFA.unpack(u"\2\24\1\uffff\2\24\22\uffff\1\24\1\uffff\1\4\11\uffff"
u"\1\15\2\uffff\1\25\13\uffff\1\3\1\13\1\11\1\14\34\uffff\1\7\1\uffff"
u"\1\10\3\uffff\1\21\2\uffff\1\16\3\uffff\1\17\1\5\1\uffff\1\12\2"
- u"\uffff\1\6\1\uffff\1\20\7\uffff\1\22\2\uffff\1\2\1\uffff\1\4"),
+ u"\uffff\1\6\1\uffff\1\20\7\uffff\1\22\2\uffff\1\1\1\uffff\1\2"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
diff --git a/XKBGrammar/XKBGrammarLexer.pyc b/XKBGrammar/XKBGrammarLexer.pyc
Binary files differ.
diff --git a/XKBGrammar/XKBGrammarParser.py b/XKBGrammar/XKBGrammarParser.py
@@ -1,4 +1,4 @@
-# $ANTLR 3.1b1 XKBGrammar.g 2008-05-15 19:18:28
+# $ANTLR 3.1b1 XKBGrammar.g 2008-05-15 22:06:32
import sys
from antlr3 import *
@@ -64,8 +64,8 @@ tokenNames = [
"TOKEN_MODIFIER_KEYS", "TOKEN_ALTERNATE_GROUP", "TOKEN_XKB_SYMBOLS",
"TOKEN_INCLUDE", "TOKEN_KEY_TYPE", "TOKEN_NAME", "TOKEN_KEY", "MAPTYPE",
"MAPNAME", "MAPOPTIONS", "MAPMATERIAL", "SECTION", "KEYCODE", "KEYSYMS",
- "VALUE", "NAME", "WS", "COMMENT", "LINE_COMMENT", "'\"'", "'{'", "';'",
- "'}'", "'include'", "'name'", "'['", "']'", "'='", "'key.type'", "'key'",
+ "VALUE", "NAME", "WS", "COMMENT", "LINE_COMMENT", "'{'", "'}'", "';'",
+ "'\"'", "'include'", "'name'", "'['", "']'", "'='", "'key.type'", "'key'",
"'<'", "'>'", "','", "'default'", "'hidden'", "'partial'", "'alphanumeric_keys'",
"'alternate_group'", "'xkb_symbols'"
]
@@ -193,7 +193,7 @@ class XKBGrammarParser(Parser):
# $ANTLR start section
- # XKBGrammar.g:61:1: section : mapType mapMaterial -> ^( SECTION mapType mapMaterial ) ;
+ # XKBGrammar.g:61:1: section : mapType '{' ( mapMaterial )+ '}' ';' -> ^( SECTION mapType ^( MAPMATERIAL ( mapMaterial )+ ) ) ;
def section(self, ):
retval = self.section_return()
@@ -201,27 +201,70 @@ class XKBGrammarParser(Parser):
root_0 = None
+ char_literal4 = None
+ char_literal6 = None
+ char_literal7 = None
mapType3 = None
- mapMaterial4 = None
+ mapMaterial5 = None
+ char_literal4_tree = None
+ char_literal6_tree = None
+ char_literal7_tree = None
+ stream_27 = RewriteRuleTokenStream(self.adaptor, "token 27")
+ stream_28 = RewriteRuleTokenStream(self.adaptor, "token 28")
+ stream_29 = RewriteRuleTokenStream(self.adaptor, "token 29")
stream_mapMaterial = RewriteRuleSubtreeStream(self.adaptor, "rule mapMaterial")
stream_mapType = RewriteRuleSubtreeStream(self.adaptor, "rule mapType")
try:
try:
- # XKBGrammar.g:62:2: ( mapType mapMaterial -> ^( SECTION mapType mapMaterial ) )
- # XKBGrammar.g:62:4: mapType mapMaterial
+ # XKBGrammar.g:62:2: ( mapType '{' ( mapMaterial )+ '}' ';' -> ^( SECTION mapType ^( MAPMATERIAL ( mapMaterial )+ ) ) )
+ # XKBGrammar.g:62:4: mapType '{' ( mapMaterial )+ '}' ';'
self._state.following.append(self.FOLLOW_mapType_in_section161)
mapType3 = self.mapType()
self._state.following.pop()
stream_mapType.add(mapType3.tree)
- self._state.following.append(self.FOLLOW_mapMaterial_in_section163)
- mapMaterial4 = self.mapMaterial()
+ char_literal4 = self.input.LT(1)
+ self.match(self.input, 27, self.FOLLOW_27_in_section163)
+ stream_27.add(char_literal4)
+ # XKBGrammar.g:62:16: ( mapMaterial )+
+ cnt2 = 0
+ while True: #loop2
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
- self._state.following.pop()
- stream_mapMaterial.add(mapMaterial4.tree)
+ if ((31 <= LA2_0 <= 32) or (36 <= LA2_0 <= 37)) :
+ alt2 = 1
+
+
+ if alt2 == 1:
+ # XKBGrammar.g:62:16: mapMaterial
+ self._state.following.append(self.FOLLOW_mapMaterial_in_section165)
+ mapMaterial5 = self.mapMaterial()
+
+ self._state.following.pop()
+ stream_mapMaterial.add(mapMaterial5.tree)
+
+
+
+ else:
+ if cnt2 >= 1:
+ break #loop2
+
+ eee = EarlyExitException(2, self.input)
+ raise eee
+
+ cnt2 += 1
+
+
+ char_literal6 = self.input.LT(1)
+ self.match(self.input, 28, self.FOLLOW_28_in_section168)
+ stream_28.add(char_literal6)
+ char_literal7 = self.input.LT(1)
+ self.match(self.input, 29, self.FOLLOW_29_in_section170)
+ stream_29.add(char_literal7)
# AST Rewrite
# elements: mapMaterial, mapType
# token labels:
@@ -238,13 +281,27 @@ class XKBGrammarParser(Parser):
root_0 = self.adaptor.nil()
- # 63:2: -> ^( SECTION mapType mapMaterial )
- # XKBGrammar.g:63:5: ^( SECTION mapType mapMaterial )
+ # 63:2: -> ^( SECTION mapType ^( MAPMATERIAL ( mapMaterial )+ ) )
+ # XKBGrammar.g:63:5: ^( SECTION mapType ^( MAPMATERIAL ( mapMaterial )+ ) )
root_1 = self.adaptor.nil()
root_1 = self.adaptor.becomeRoot(self.adaptor.createFromType(SECTION, "SECTION"), root_1)
self.adaptor.addChild(root_1, stream_mapType.nextTree())
- self.adaptor.addChild(root_1, stream_mapMaterial.nextTree())
+ # XKBGrammar.g:63:23: ^( MAPMATERIAL ( mapMaterial )+ )
+ root_2 = self.adaptor.nil()
+ root_2 = self.adaptor.becomeRoot(self.adaptor.createFromType(MAPMATERIAL, "MAPMATERIAL"), root_2)
+
+ # XKBGrammar.g:63:37: ( mapMaterial )+
+ if not (stream_mapMaterial.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_mapMaterial.hasNext():
+ self.adaptor.addChild(root_2, stream_mapMaterial.nextTree())
+
+
+ stream_mapMaterial.reset()
+
+ self.adaptor.addChild(root_1, root_2)
self.adaptor.addChild(root_0, root_1)
@@ -293,61 +350,61 @@ class XKBGrammarParser(Parser):
root_0 = None
- char_literal6 = None
- NAME7 = None
- char_literal8 = None
- mapOptions5 = None
+ char_literal9 = None
+ NAME10 = None
+ char_literal11 = None
+ mapOptions8 = None
- char_literal6_tree = None
- NAME7_tree = None
- char_literal8_tree = None
+ char_literal9_tree = None
+ NAME10_tree = None
+ char_literal11_tree = None
stream_NAME = RewriteRuleTokenStream(self.adaptor, "token NAME")
- stream_27 = RewriteRuleTokenStream(self.adaptor, "token 27")
+ stream_30 = RewriteRuleTokenStream(self.adaptor, "token 30")
stream_mapOptions = RewriteRuleSubtreeStream(self.adaptor, "rule mapOptions")
try:
try:
# XKBGrammar.g:67:2: ( ( mapOptions )+ '\"' NAME '\"' -> ^( MAPTYPE ^( MAPOPTIONS ( mapOptions )+ ) ^( MAPNAME NAME ) ) )
# XKBGrammar.g:67:4: ( mapOptions )+ '\"' NAME '\"'
# XKBGrammar.g:67:4: ( mapOptions )+
- cnt2 = 0
- while True: #loop2
- alt2 = 2
- LA2_0 = self.input.LA(1)
+ cnt3 = 0
+ while True: #loop3
+ alt3 = 2
+ LA3_0 = self.input.LA(1)
- if ((41 <= LA2_0 <= 46)) :
- alt2 = 1
+ if ((41 <= LA3_0 <= 46)) :
+ alt3 = 1
- if alt2 == 1:
+ if alt3 == 1:
# XKBGrammar.g:67:4: mapOptions
- self._state.following.append(self.FOLLOW_mapOptions_in_mapType186)
- mapOptions5 = self.mapOptions()
+ self._state.following.append(self.FOLLOW_mapOptions_in_mapType198)
+ mapOptions8 = self.mapOptions()
self._state.following.pop()
- stream_mapOptions.add(mapOptions5.tree)
+ stream_mapOptions.add(mapOptions8.tree)
else:
- if cnt2 >= 1:
- break #loop2
+ if cnt3 >= 1:
+ break #loop3
- eee = EarlyExitException(2, self.input)
+ eee = EarlyExitException(3, self.input)
raise eee
- cnt2 += 1
+ cnt3 += 1
- char_literal6 = self.input.LT(1)
- self.match(self.input, 27, self.FOLLOW_27_in_mapType189)
- stream_27.add(char_literal6)
- NAME7 = self.input.LT(1)
- self.match(self.input, NAME, self.FOLLOW_NAME_in_mapType191)
- stream_NAME.add(NAME7)
- char_literal8 = self.input.LT(1)
- self.match(self.input, 27, self.FOLLOW_27_in_mapType193)
- stream_27.add(char_literal8)
+ char_literal9 = self.input.LT(1)
+ self.match(self.input, 30, self.FOLLOW_30_in_mapType201)
+ stream_30.add(char_literal9)
+ NAME10 = self.input.LT(1)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_mapType203)
+ stream_NAME.add(NAME10)
+ char_literal11 = self.input.LT(1)
+ self.match(self.input, 30, self.FOLLOW_30_in_mapType205)
+ stream_30.add(char_literal11)
# AST Rewrite
# elements: mapOptions, NAME
# token labels:
@@ -431,7 +488,7 @@ class XKBGrammarParser(Parser):
# $ANTLR start mapMaterial
- # XKBGrammar.g:71:1: mapMaterial : '{' ( line_include | line_name ';' | line_keytype ';' | line_key ';' )+ '}' ';' ;
+ # XKBGrammar.g:71:1: mapMaterial : ( line_include | line_name ';' | line_keytype ';' | line_key ';' );
def mapMaterial(self, ):
retval = self.mapMaterial_return()
@@ -439,119 +496,91 @@ class XKBGrammarParser(Parser):
root_0 = None
- char_literal9 = None
- char_literal12 = None
char_literal14 = None
char_literal16 = None
- char_literal17 = None
char_literal18 = None
- line_include10 = None
+ line_include12 = None
- line_name11 = None
+ line_name13 = None
- line_keytype13 = None
+ line_keytype15 = None
- line_key15 = None
+ line_key17 = None
- char_literal9_tree = None
- char_literal12_tree = None
char_literal14_tree = None
char_literal16_tree = None
- char_literal17_tree = None
char_literal18_tree = None
try:
try:
- # XKBGrammar.g:72:2: ( '{' ( line_include | line_name ';' | line_keytype ';' | line_key ';' )+ '}' ';' )
- # XKBGrammar.g:72:4: '{' ( line_include | line_name ';' | line_keytype ';' | line_key ';' )+ '}' ';'
- root_0 = self.adaptor.nil()
-
- char_literal9 = self.input.LT(1)
- self.match(self.input, 28, self.FOLLOW_28_in_mapMaterial225)
-
- char_literal9_tree = self.adaptor.createWithPayload(char_literal9)
- self.adaptor.addChild(root_0, char_literal9_tree)
-
- # XKBGrammar.g:73:2: ( line_include | line_name ';' | line_keytype ';' | line_key ';' )+
- cnt3 = 0
- while True: #loop3
- alt3 = 5
- LA3 = self.input.LA(1)
- if LA3 == 31:
- alt3 = 1
- elif LA3 == 32:
- alt3 = 2
- elif LA3 == 36:
- alt3 = 3
- elif LA3 == 37:
- alt3 = 4
-
- if alt3 == 1:
- # XKBGrammar.g:73:4: line_include
- self._state.following.append(self.FOLLOW_line_include_in_mapMaterial230)
- line_include10 = self.line_include()
+ # XKBGrammar.g:72:2: ( line_include | line_name ';' | line_keytype ';' | line_key ';' )
+ alt4 = 4
+ LA4 = self.input.LA(1)
+ if LA4 == 31:
+ alt4 = 1
+ elif LA4 == 32:
+ alt4 = 2
+ elif LA4 == 36:
+ alt4 = 3
+ elif LA4 == 37:
+ alt4 = 4
+ else:
+ nvae = NoViableAltException("", 4, 0, self.input)
- self._state.following.pop()
- self.adaptor.addChild(root_0, line_include10.tree)
+ raise nvae
+ if alt4 == 1:
+ # XKBGrammar.g:72:4: line_include
+ root_0 = self.adaptor.nil()
+ self._state.following.append(self.FOLLOW_line_include_in_mapMaterial237)
+ line_include12 = self.line_include()
- elif alt3 == 2:
- # XKBGrammar.g:74:4: line_name ';'
- self._state.following.append(self.FOLLOW_line_name_in_mapMaterial236)
- line_name11 = self.line_name()
+ self._state.following.pop()
+ self.adaptor.addChild(root_0, line_include12.tree)
- self._state.following.pop()
- self.adaptor.addChild(root_0, line_name11.tree)
- char_literal12 = self.input.LT(1)
- self.match(self.input, 29, self.FOLLOW_29_in_mapMaterial238)
+ elif alt4 == 2:
+ # XKBGrammar.g:73:4: line_name ';'
+ root_0 = self.adaptor.nil()
- elif alt3 == 3:
- # XKBGrammar.g:75:4: line_keytype ';'
- self._state.following.append(self.FOLLOW_line_keytype_in_mapMaterial244)
- line_keytype13 = self.line_keytype()
+ self._state.following.append(self.FOLLOW_line_name_in_mapMaterial243)
+ line_name13 = self.line_name()
- self._state.following.pop()
- self.adaptor.addChild(root_0, line_keytype13.tree)
- char_literal14 = self.input.LT(1)
- self.match(self.input, 29, self.FOLLOW_29_in_mapMaterial246)
+ self._state.following.pop()
+ self.adaptor.addChild(root_0, line_name13.tree)
+ char_literal14 = self.input.LT(1)
+ self.match(self.input, 29, self.FOLLOW_29_in_mapMaterial245)
- elif alt3 == 4:
- # XKBGrammar.g:76:4: line_key ';'
- self._state.following.append(self.FOLLOW_line_key_in_mapMaterial252)
- line_key15 = self.line_key()
+ elif alt4 == 3:
+ # XKBGrammar.g:74:4: line_keytype ';'
+ root_0 = self.adaptor.nil()
- self._state.following.pop()
- self.adaptor.addChild(root_0, line_key15.tree)
- char_literal16 = self.input.LT(1)
- self.match(self.input, 29, self.FOLLOW_29_in_mapMaterial254)
+ self._state.following.append(self.FOLLOW_line_keytype_in_mapMaterial251)
+ line_keytype15 = self.line_keytype()
+ self._state.following.pop()
+ self.adaptor.addChild(root_0, line_keytype15.tree)
+ char_literal16 = self.input.LT(1)
+ self.match(self.input, 29, self.FOLLOW_29_in_mapMaterial253)
- else:
- if cnt3 >= 1:
- break #loop3
-
- eee = EarlyExitException(3, self.input)
- raise eee
- cnt3 += 1
+ elif alt4 == 4:
+ # XKBGrammar.g:75:4: line_key ';'
+ root_0 = self.adaptor.nil()
+ self._state.following.append(self.FOLLOW_line_key_in_mapMaterial259)
+ line_key17 = self.line_key()
- char_literal17 = self.input.LT(1)
- self.match(self.input, 30, self.FOLLOW_30_in_mapMaterial261)
-
- char_literal17_tree = self.adaptor.createWithPayload(char_literal17)
- self.adaptor.addChild(root_0, char_literal17_tree)
-
- char_literal18 = self.input.LT(1)
- self.match(self.input, 29, self.FOLLOW_29_in_mapMaterial263)
-
+ self._state.following.pop()
+ self.adaptor.addChild(root_0, line_key17.tree)
+ char_literal18 = self.input.LT(1)
+ self.match(self.input, 29, self.FOLLOW_29_in_mapMaterial261)
@@ -585,7 +614,7 @@ class XKBGrammarParser(Parser):
# $ANTLR start line_include
- # XKBGrammar.g:80:1: line_include : 'include' '\"' NAME '\"' -> ^( TOKEN_INCLUDE NAME ) ;
+ # XKBGrammar.g:78:1: line_include : 'include' '\"' NAME '\"' -> ^( TOKEN_INCLUDE NAME ) ;
def line_include(self, ):
retval = self.line_include_return()
@@ -603,25 +632,25 @@ class XKBGrammarParser(Parser):
NAME21_tree = None
char_literal22_tree = None
stream_NAME = RewriteRuleTokenStream(self.adaptor, "token NAME")
+ stream_30 = RewriteRuleTokenStream(self.adaptor, "token 30")
stream_31 = RewriteRuleTokenStream(self.adaptor, "token 31")
- stream_27 = RewriteRuleTokenStream(self.adaptor, "token 27")
try:
try:
- # XKBGrammar.g:81:2: ( 'include' '\"' NAME '\"' -> ^( TOKEN_INCLUDE NAME ) )
- # XKBGrammar.g:81:4: 'include' '\"' NAME '\"'
+ # XKBGrammar.g:79:2: ( 'include' '\"' NAME '\"' -> ^( TOKEN_INCLUDE NAME ) )
+ # XKBGrammar.g:79:4: 'include' '\"' NAME '\"'
string_literal19 = self.input.LT(1)
- self.match(self.input, 31, self.FOLLOW_31_in_line_include275)
+ self.match(self.input, 31, self.FOLLOW_31_in_line_include273)
stream_31.add(string_literal19)
char_literal20 = self.input.LT(1)
- self.match(self.input, 27, self.FOLLOW_27_in_line_include277)
- stream_27.add(char_literal20)
+ self.match(self.input, 30, self.FOLLOW_30_in_line_include275)
+ stream_30.add(char_literal20)
NAME21 = self.input.LT(1)
- self.match(self.input, NAME, self.FOLLOW_NAME_in_line_include279)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_line_include277)
stream_NAME.add(NAME21)
char_literal22 = self.input.LT(1)
- self.match(self.input, 27, self.FOLLOW_27_in_line_include281)
- stream_27.add(char_literal22)
+ self.match(self.input, 30, self.FOLLOW_30_in_line_include279)
+ stream_30.add(char_literal22)
# AST Rewrite
# elements: NAME
# token labels:
@@ -638,8 +667,8 @@ class XKBGrammarParser(Parser):
root_0 = self.adaptor.nil()
- # 82:2: -> ^( TOKEN_INCLUDE NAME )
- # XKBGrammar.g:82:5: ^( TOKEN_INCLUDE NAME )
+ # 80:2: -> ^( TOKEN_INCLUDE NAME )
+ # XKBGrammar.g:80:5: ^( TOKEN_INCLUDE NAME )
root_1 = self.adaptor.nil()
root_1 = self.adaptor.becomeRoot(self.adaptor.createFromType(TOKEN_INCLUDE, "TOKEN_INCLUDE"), root_1)
@@ -684,7 +713,7 @@ class XKBGrammarParser(Parser):
# $ANTLR start line_name
- # XKBGrammar.g:85:1: line_name : 'name' '[' n1= NAME ']' '=' '\"' n2= NAME '\"' -> ^( TOKEN_NAME $n1 ^( VALUE $n2) ) ;
+ # XKBGrammar.g:83:1: line_name : 'name' '[' n1= NAME ']' '=' '\"' n2= NAME '\"' -> ^( TOKEN_NAME $n1 ^( VALUE $n2) ) ;
def line_name(self, ):
retval = self.line_name_return()
@@ -710,40 +739,40 @@ class XKBGrammarParser(Parser):
char_literal27_tree = None
char_literal28_tree = None
stream_NAME = RewriteRuleTokenStream(self.adaptor, "token NAME")
+ stream_30 = RewriteRuleTokenStream(self.adaptor, "token 30")
stream_32 = RewriteRuleTokenStream(self.adaptor, "token 32")
stream_35 = RewriteRuleTokenStream(self.adaptor, "token 35")
stream_33 = RewriteRuleTokenStream(self.adaptor, "token 33")
stream_34 = RewriteRuleTokenStream(self.adaptor, "token 34")
- stream_27 = RewriteRuleTokenStream(self.adaptor, "token 27")
try:
try:
- # XKBGrammar.g:86:2: ( 'name' '[' n1= NAME ']' '=' '\"' n2= NAME '\"' -> ^( TOKEN_NAME $n1 ^( VALUE $n2) ) )
- # XKBGrammar.g:86:4: 'name' '[' n1= NAME ']' '=' '\"' n2= NAME '\"'
+ # XKBGrammar.g:84:2: ( 'name' '[' n1= NAME ']' '=' '\"' n2= NAME '\"' -> ^( TOKEN_NAME $n1 ^( VALUE $n2) ) )
+ # XKBGrammar.g:84:4: 'name' '[' n1= NAME ']' '=' '\"' n2= NAME '\"'
string_literal23 = self.input.LT(1)
- self.match(self.input, 32, self.FOLLOW_32_in_line_name301)
+ self.match(self.input, 32, self.FOLLOW_32_in_line_name299)
stream_32.add(string_literal23)
char_literal24 = self.input.LT(1)
- self.match(self.input, 33, self.FOLLOW_33_in_line_name303)
+ self.match(self.input, 33, self.FOLLOW_33_in_line_name301)
stream_33.add(char_literal24)
n1 = self.input.LT(1)
- self.match(self.input, NAME, self.FOLLOW_NAME_in_line_name307)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_line_name305)
stream_NAME.add(n1)
char_literal25 = self.input.LT(1)
- self.match(self.input, 34, self.FOLLOW_34_in_line_name309)
+ self.match(self.input, 34, self.FOLLOW_34_in_line_name307)
stream_34.add(char_literal25)
char_literal26 = self.input.LT(1)
- self.match(self.input, 35, self.FOLLOW_35_in_line_name311)
+ self.match(self.input, 35, self.FOLLOW_35_in_line_name309)
stream_35.add(char_literal26)
char_literal27 = self.input.LT(1)
- self.match(self.input, 27, self.FOLLOW_27_in_line_name313)
- stream_27.add(char_literal27)
+ self.match(self.input, 30, self.FOLLOW_30_in_line_name311)
+ stream_30.add(char_literal27)
n2 = self.input.LT(1)
- self.match(self.input, NAME, self.FOLLOW_NAME_in_line_name317)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_line_name315)
stream_NAME.add(n2)
char_literal28 = self.input.LT(1)
- self.match(self.input, 27, self.FOLLOW_27_in_line_name319)
- stream_27.add(char_literal28)
+ self.match(self.input, 30, self.FOLLOW_30_in_line_name317)
+ stream_30.add(char_literal28)
# AST Rewrite
# elements: n1, n2
# token labels: n1, n2
@@ -762,13 +791,13 @@ class XKBGrammarParser(Parser):
root_0 = self.adaptor.nil()
- # 87:2: -> ^( TOKEN_NAME $n1 ^( VALUE $n2) )
- # XKBGrammar.g:87:5: ^( TOKEN_NAME $n1 ^( VALUE $n2) )
+ # 85:2: -> ^( TOKEN_NAME $n1 ^( VALUE $n2) )
+ # XKBGrammar.g:85:5: ^( TOKEN_NAME $n1 ^( VALUE $n2) )
root_1 = self.adaptor.nil()
root_1 = self.adaptor.becomeRoot(self.adaptor.createFromType(TOKEN_NAME, "TOKEN_NAME"), root_1)
self.adaptor.addChild(root_1, stream_n1.nextNode())
- # XKBGrammar.g:87:22: ^( VALUE $n2)
+ # XKBGrammar.g:85:22: ^( VALUE $n2)
root_2 = self.adaptor.nil()
root_2 = self.adaptor.becomeRoot(self.adaptor.createFromType(VALUE, "VALUE"), root_2)
@@ -815,7 +844,7 @@ class XKBGrammarParser(Parser):
# $ANTLR start line_keytype
- # XKBGrammar.g:90:1: line_keytype : 'key.type' '[' n1= NAME ']' '=' '\"' n2= NAME '\"' -> ^( TOKEN_KEY_TYPE $n1 ^( VALUE $n2) ) ;
+ # XKBGrammar.g:88:1: line_keytype : 'key.type' '[' n1= NAME ']' '=' '\"' n2= NAME '\"' -> ^( TOKEN_KEY_TYPE $n1 ^( VALUE $n2) ) ;
def line_keytype(self, ):
retval = self.line_keytype_return()
@@ -841,40 +870,40 @@ class XKBGrammarParser(Parser):
char_literal33_tree = None
char_literal34_tree = None
stream_NAME = RewriteRuleTokenStream(self.adaptor, "token NAME")
+ stream_30 = RewriteRuleTokenStream(self.adaptor, "token 30")
stream_35 = RewriteRuleTokenStream(self.adaptor, "token 35")
stream_36 = RewriteRuleTokenStream(self.adaptor, "token 36")
stream_33 = RewriteRuleTokenStream(self.adaptor, "token 33")
stream_34 = RewriteRuleTokenStream(self.adaptor, "token 34")
- stream_27 = RewriteRuleTokenStream(self.adaptor, "token 27")
try:
try:
- # XKBGrammar.g:91:2: ( 'key.type' '[' n1= NAME ']' '=' '\"' n2= NAME '\"' -> ^( TOKEN_KEY_TYPE $n1 ^( VALUE $n2) ) )
- # XKBGrammar.g:91:4: 'key.type' '[' n1= NAME ']' '=' '\"' n2= NAME '\"'
+ # XKBGrammar.g:89:2: ( 'key.type' '[' n1= NAME ']' '=' '\"' n2= NAME '\"' -> ^( TOKEN_KEY_TYPE $n1 ^( VALUE $n2) ) )
+ # XKBGrammar.g:89:4: 'key.type' '[' n1= NAME ']' '=' '\"' n2= NAME '\"'
string_literal29 = self.input.LT(1)
- self.match(self.input, 36, self.FOLLOW_36_in_line_keytype347)
+ self.match(self.input, 36, self.FOLLOW_36_in_line_keytype345)
stream_36.add(string_literal29)
char_literal30 = self.input.LT(1)
- self.match(self.input, 33, self.FOLLOW_33_in_line_keytype349)
+ self.match(self.input, 33, self.FOLLOW_33_in_line_keytype347)
stream_33.add(char_literal30)
n1 = self.input.LT(1)
- self.match(self.input, NAME, self.FOLLOW_NAME_in_line_keytype353)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_line_keytype351)
stream_NAME.add(n1)
char_literal31 = self.input.LT(1)
- self.match(self.input, 34, self.FOLLOW_34_in_line_keytype355)
+ self.match(self.input, 34, self.FOLLOW_34_in_line_keytype353)
stream_34.add(char_literal31)
char_literal32 = self.input.LT(1)
- self.match(self.input, 35, self.FOLLOW_35_in_line_keytype357)
+ self.match(self.input, 35, self.FOLLOW_35_in_line_keytype355)
stream_35.add(char_literal32)
char_literal33 = self.input.LT(1)
- self.match(self.input, 27, self.FOLLOW_27_in_line_keytype359)
- stream_27.add(char_literal33)
+ self.match(self.input, 30, self.FOLLOW_30_in_line_keytype357)
+ stream_30.add(char_literal33)
n2 = self.input.LT(1)
- self.match(self.input, NAME, self.FOLLOW_NAME_in_line_keytype363)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_line_keytype361)
stream_NAME.add(n2)
char_literal34 = self.input.LT(1)
- self.match(self.input, 27, self.FOLLOW_27_in_line_keytype365)
- stream_27.add(char_literal34)
+ self.match(self.input, 30, self.FOLLOW_30_in_line_keytype363)
+ stream_30.add(char_literal34)
# AST Rewrite
# elements: n2, n1
# token labels: n1, n2
@@ -893,13 +922,13 @@ class XKBGrammarParser(Parser):
root_0 = self.adaptor.nil()
- # 92:2: -> ^( TOKEN_KEY_TYPE $n1 ^( VALUE $n2) )
- # XKBGrammar.g:92:5: ^( TOKEN_KEY_TYPE $n1 ^( VALUE $n2) )
+ # 90:2: -> ^( TOKEN_KEY_TYPE $n1 ^( VALUE $n2) )
+ # XKBGrammar.g:90:5: ^( TOKEN_KEY_TYPE $n1 ^( VALUE $n2) )
root_1 = self.adaptor.nil()
root_1 = self.adaptor.becomeRoot(self.adaptor.createFromType(TOKEN_KEY_TYPE, "TOKEN_KEY_TYPE"), root_1)
self.adaptor.addChild(root_1, stream_n1.nextNode())
- # XKBGrammar.g:92:26: ^( VALUE $n2)
+ # XKBGrammar.g:90:26: ^( VALUE $n2)
root_2 = self.adaptor.nil()
root_2 = self.adaptor.becomeRoot(self.adaptor.createFromType(VALUE, "VALUE"), root_2)
@@ -946,7 +975,7 @@ class XKBGrammarParser(Parser):
# $ANTLR start line_key
- # XKBGrammar.g:95:1: line_key : 'key' keycode keysyms -> ^( TOKEN_KEY keycode keysyms ) ;
+ # XKBGrammar.g:93:1: line_key : 'key' keycode keysyms -> ^( TOKEN_KEY keycode keysyms ) ;
def line_key(self, ):
retval = self.line_key_return()
@@ -966,17 +995,17 @@ class XKBGrammarParser(Parser):
stream_keycode = RewriteRuleSubtreeStream(self.adaptor, "rule keycode")
try:
try:
- # XKBGrammar.g:96:2: ( 'key' keycode keysyms -> ^( TOKEN_KEY keycode keysyms ) )
- # XKBGrammar.g:96:4: 'key' keycode keysyms
+ # XKBGrammar.g:94:2: ( 'key' keycode keysyms -> ^( TOKEN_KEY keycode keysyms ) )
+ # XKBGrammar.g:94:4: 'key' keycode keysyms
string_literal35 = self.input.LT(1)
- self.match(self.input, 37, self.FOLLOW_37_in_line_key394)
+ self.match(self.input, 37, self.FOLLOW_37_in_line_key392)
stream_37.add(string_literal35)
- self._state.following.append(self.FOLLOW_keycode_in_line_key396)
+ self._state.following.append(self.FOLLOW_keycode_in_line_key394)
keycode36 = self.keycode()
self._state.following.pop()
stream_keycode.add(keycode36.tree)
- self._state.following.append(self.FOLLOW_keysyms_in_line_key398)
+ self._state.following.append(self.FOLLOW_keysyms_in_line_key396)
keysyms37 = self.keysyms()
self._state.following.pop()
@@ -997,8 +1026,8 @@ class XKBGrammarParser(Parser):
root_0 = self.adaptor.nil()
- # 97:2: -> ^( TOKEN_KEY keycode keysyms )
- # XKBGrammar.g:97:5: ^( TOKEN_KEY keycode keysyms )
+ # 95:2: -> ^( TOKEN_KEY keycode keysyms )
+ # XKBGrammar.g:95:5: ^( TOKEN_KEY keycode keysyms )
root_1 = self.adaptor.nil()
root_1 = self.adaptor.becomeRoot(self.adaptor.createFromType(TOKEN_KEY, "TOKEN_KEY"), root_1)
@@ -1044,7 +1073,7 @@ class XKBGrammarParser(Parser):
# $ANTLR start keycode
- # XKBGrammar.g:100:1: keycode : '<' NAME '>' -> ^( KEYCODE NAME ) ;
+ # XKBGrammar.g:98:1: keycode : '<' NAME '>' -> ^( KEYCODE NAME ) ;
def keycode(self, ):
retval = self.keycode_return()
@@ -1065,16 +1094,16 @@ class XKBGrammarParser(Parser):
try:
try:
- # XKBGrammar.g:101:2: ( '<' NAME '>' -> ^( KEYCODE NAME ) )
- # XKBGrammar.g:101:4: '<' NAME '>'
+ # XKBGrammar.g:99:2: ( '<' NAME '>' -> ^( KEYCODE NAME ) )
+ # XKBGrammar.g:99:4: '<' NAME '>'
char_literal38 = self.input.LT(1)
- self.match(self.input, 38, self.FOLLOW_38_in_keycode422)
+ self.match(self.input, 38, self.FOLLOW_38_in_keycode420)
stream_38.add(char_literal38)
NAME39 = self.input.LT(1)
- self.match(self.input, NAME, self.FOLLOW_NAME_in_keycode424)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keycode422)
stream_NAME.add(NAME39)
char_literal40 = self.input.LT(1)
- self.match(self.input, 39, self.FOLLOW_39_in_keycode426)
+ self.match(self.input, 39, self.FOLLOW_39_in_keycode424)
stream_39.add(char_literal40)
# AST Rewrite
# elements: NAME
@@ -1092,8 +1121,8 @@ class XKBGrammarParser(Parser):
root_0 = self.adaptor.nil()
- # 102:2: -> ^( KEYCODE NAME )
- # XKBGrammar.g:102:5: ^( KEYCODE NAME )
+ # 100:2: -> ^( KEYCODE NAME )
+ # XKBGrammar.g:100:5: ^( KEYCODE NAME )
root_1 = self.adaptor.nil()
root_1 = self.adaptor.becomeRoot(self.adaptor.createFromType(KEYCODE, "KEYCODE"), root_1)
@@ -1138,7 +1167,7 @@ class XKBGrammarParser(Parser):
# $ANTLR start keysyms
- # XKBGrammar.g:105:1: keysyms : '{' '[' keysym+= NAME ( ',' keysym+= NAME )* ']' '}' -> ^( KEYSYMS ( $keysym)+ ) ;
+ # XKBGrammar.g:103:1: keysyms : '{' '[' keysym+= NAME ( ',' keysym+= NAME )* ']' '}' -> ^( KEYSYMS ( $keysym)+ ) ;
def keysyms(self, ):
retval = self.keysyms_return()
@@ -1161,45 +1190,45 @@ class XKBGrammarParser(Parser):
char_literal45_tree = None
keysym_tree = None
stream_NAME = RewriteRuleTokenStream(self.adaptor, "token NAME")
- stream_30 = RewriteRuleTokenStream(self.adaptor, "token 30")
stream_40 = RewriteRuleTokenStream(self.adaptor, "token 40")
stream_33 = RewriteRuleTokenStream(self.adaptor, "token 33")
stream_34 = RewriteRuleTokenStream(self.adaptor, "token 34")
+ stream_27 = RewriteRuleTokenStream(self.adaptor, "token 27")
stream_28 = RewriteRuleTokenStream(self.adaptor, "token 28")
try:
try:
- # XKBGrammar.g:106:2: ( '{' '[' keysym+= NAME ( ',' keysym+= NAME )* ']' '}' -> ^( KEYSYMS ( $keysym)+ ) )
- # XKBGrammar.g:106:4: '{' '[' keysym+= NAME ( ',' keysym+= NAME )* ']' '}'
+ # XKBGrammar.g:104:2: ( '{' '[' keysym+= NAME ( ',' keysym+= NAME )* ']' '}' -> ^( KEYSYMS ( $keysym)+ ) )
+ # XKBGrammar.g:104:4: '{' '[' keysym+= NAME ( ',' keysym+= NAME )* ']' '}'
char_literal41 = self.input.LT(1)
- self.match(self.input, 28, self.FOLLOW_28_in_keysyms446)
- stream_28.add(char_literal41)
+ self.match(self.input, 27, self.FOLLOW_27_in_keysyms444)
+ stream_27.add(char_literal41)
char_literal42 = self.input.LT(1)
- self.match(self.input, 33, self.FOLLOW_33_in_keysyms448)
+ self.match(self.input, 33, self.FOLLOW_33_in_keysyms446)
stream_33.add(char_literal42)
keysym = self.input.LT(1)
- self.match(self.input, NAME, self.FOLLOW_NAME_in_keysyms452)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keysyms450)
stream_NAME.add(keysym)
if list_keysym is None:
list_keysym = []
list_keysym.append(keysym)
- # XKBGrammar.g:106:25: ( ',' keysym+= NAME )*
- while True: #loop4
- alt4 = 2
- LA4_0 = self.input.LA(1)
+ # XKBGrammar.g:104:25: ( ',' keysym+= NAME )*
+ while True: #loop5
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
- if (LA4_0 == 40) :
- alt4 = 1
+ if (LA5_0 == 40) :
+ alt5 = 1
- if alt4 == 1:
- # XKBGrammar.g:106:26: ',' keysym+= NAME
+ if alt5 == 1:
+ # XKBGrammar.g:104:26: ',' keysym+= NAME
char_literal43 = self.input.LT(1)
- self.match(self.input, 40, self.FOLLOW_40_in_keysyms455)
+ self.match(self.input, 40, self.FOLLOW_40_in_keysyms453)
stream_40.add(char_literal43)
keysym = self.input.LT(1)
- self.match(self.input, NAME, self.FOLLOW_NAME_in_keysyms459)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keysyms457)
stream_NAME.add(keysym)
if list_keysym is None:
list_keysym = []
@@ -1209,15 +1238,15 @@ class XKBGrammarParser(Parser):
else:
- break #loop4
+ break #loop5
char_literal44 = self.input.LT(1)
- self.match(self.input, 34, self.FOLLOW_34_in_keysyms463)
+ self.match(self.input, 34, self.FOLLOW_34_in_keysyms461)
stream_34.add(char_literal44)
char_literal45 = self.input.LT(1)
- self.match(self.input, 30, self.FOLLOW_30_in_keysyms465)
- stream_30.add(char_literal45)
+ self.match(self.input, 28, self.FOLLOW_28_in_keysyms463)
+ stream_28.add(char_literal45)
# AST Rewrite
# elements: keysym
# token labels:
@@ -1235,12 +1264,12 @@ class XKBGrammarParser(Parser):
root_0 = self.adaptor.nil()
- # 107:2: -> ^( KEYSYMS ( $keysym)+ )
- # XKBGrammar.g:107:5: ^( KEYSYMS ( $keysym)+ )
+ # 105:2: -> ^( KEYSYMS ( $keysym)+ )
+ # XKBGrammar.g:105:5: ^( KEYSYMS ( $keysym)+ )
root_1 = self.adaptor.nil()
root_1 = self.adaptor.becomeRoot(self.adaptor.createFromType(KEYSYMS, "KEYSYMS"), root_1)
- # XKBGrammar.g:107:15: ( $keysym)+
+ # XKBGrammar.g:105:15: ( $keysym)+
if not (stream_keysym.hasNext()):
raise RewriteEarlyExitException()
@@ -1289,7 +1318,7 @@ class XKBGrammarParser(Parser):
# $ANTLR start mapOptions
- # XKBGrammar.g:110:1: mapOptions : ( 'default' | 'hidden' | 'partial' | 'alphanumeric_keys' | 'alternate_group' | 'xkb_symbols' );
+ # XKBGrammar.g:108:1: mapOptions : ( 'default' | 'hidden' | 'partial' | 'alphanumeric_keys' | 'alternate_group' | 'xkb_symbols' );
def mapOptions(self, ):
retval = self.mapOptions_return()
@@ -1303,7 +1332,7 @@ class XKBGrammarParser(Parser):
try:
try:
- # XKBGrammar.g:111:2: ( 'default' | 'hidden' | 'partial' | 'alphanumeric_keys' | 'alternate_group' | 'xkb_symbols' )
+ # XKBGrammar.g:109:2: ( 'default' | 'hidden' | 'partial' | 'alphanumeric_keys' | 'alternate_group' | 'xkb_symbols' )
# XKBGrammar.g:
root_0 = self.adaptor.nil()
@@ -1348,55 +1377,55 @@ class XKBGrammarParser(Parser):
FOLLOW_section_in_layout144 = frozenset([41, 42, 43, 44, 45, 46])
FOLLOW_EOF_in_layout147 = frozenset([1])
- FOLLOW_mapType_in_section161 = frozenset([28])
- FOLLOW_mapMaterial_in_section163 = frozenset([1])
- FOLLOW_mapOptions_in_mapType186 = frozenset([27, 41, 42, 43, 44, 45, 46])
- FOLLOW_27_in_mapType189 = frozenset([23])
- FOLLOW_NAME_in_mapType191 = frozenset([27])
- FOLLOW_27_in_mapType193 = frozenset([1])
- FOLLOW_28_in_mapMaterial225 = frozenset([31, 32, 36, 37])
- FOLLOW_line_include_in_mapMaterial230 = frozenset([30, 31, 32, 36, 37])
- FOLLOW_line_name_in_mapMaterial236 = frozenset([29])
- FOLLOW_29_in_mapMaterial238 = frozenset([30, 31, 32, 36, 37])
- FOLLOW_line_keytype_in_mapMaterial244 = frozenset([29])
- FOLLOW_29_in_mapMaterial246 = frozenset([30, 31, 32, 36, 37])
- FOLLOW_line_key_in_mapMaterial252 = frozenset([29])
- FOLLOW_29_in_mapMaterial254 = frozenset([30, 31, 32, 36, 37])
- FOLLOW_30_in_mapMaterial261 = frozenset([29])
- FOLLOW_29_in_mapMaterial263 = frozenset([1])
- FOLLOW_31_in_line_include275 = frozenset([27])
- FOLLOW_27_in_line_include277 = frozenset([23])
- FOLLOW_NAME_in_line_include279 = frozenset([27])
- FOLLOW_27_in_line_include281 = frozenset([1])
- FOLLOW_32_in_line_name301 = frozenset([33])
- FOLLOW_33_in_line_name303 = frozenset([23])
- FOLLOW_NAME_in_line_name307 = frozenset([34])
- FOLLOW_34_in_line_name309 = frozenset([35])
- FOLLOW_35_in_line_name311 = frozenset([27])
- FOLLOW_27_in_line_name313 = frozenset([23])
- FOLLOW_NAME_in_line_name317 = frozenset([27])
- FOLLOW_27_in_line_name319 = frozenset([1])
- FOLLOW_36_in_line_keytype347 = frozenset([33])
- FOLLOW_33_in_line_keytype349 = frozenset([23])
- FOLLOW_NAME_in_line_keytype353 = frozenset([34])
- FOLLOW_34_in_line_keytype355 = frozenset([35])
- FOLLOW_35_in_line_keytype357 = frozenset([27])
- FOLLOW_27_in_line_keytype359 = frozenset([23])
- FOLLOW_NAME_in_line_keytype363 = frozenset([27])
- FOLLOW_27_in_line_keytype365 = frozenset([1])
- FOLLOW_37_in_line_key394 = frozenset([38])
- FOLLOW_keycode_in_line_key396 = frozenset([28])
- FOLLOW_keysyms_in_line_key398 = frozenset([1])
- FOLLOW_38_in_keycode422 = frozenset([23])
- FOLLOW_NAME_in_keycode424 = frozenset([39])
- FOLLOW_39_in_keycode426 = frozenset([1])
- FOLLOW_28_in_keysyms446 = frozenset([33])
- FOLLOW_33_in_keysyms448 = frozenset([23])
- FOLLOW_NAME_in_keysyms452 = frozenset([34, 40])
- FOLLOW_40_in_keysyms455 = frozenset([23])
- FOLLOW_NAME_in_keysyms459 = frozenset([34, 40])
- FOLLOW_34_in_keysyms463 = frozenset([30])
- FOLLOW_30_in_keysyms465 = frozenset([1])
+ FOLLOW_mapType_in_section161 = frozenset([27])
+ FOLLOW_27_in_section163 = frozenset([31, 32, 36, 37])
+ FOLLOW_mapMaterial_in_section165 = frozenset([28, 31, 32, 36, 37])
+ FOLLOW_28_in_section168 = frozenset([29])
+ FOLLOW_29_in_section170 = frozenset([1])
+ FOLLOW_mapOptions_in_mapType198 = frozenset([30, 41, 42, 43, 44, 45, 46])
+ FOLLOW_30_in_mapType201 = frozenset([23])
+ FOLLOW_NAME_in_mapType203 = frozenset([30])
+ FOLLOW_30_in_mapType205 = frozenset([1])
+ FOLLOW_line_include_in_mapMaterial237 = frozenset([1])
+ FOLLOW_line_name_in_mapMaterial243 = frozenset([29])
+ FOLLOW_29_in_mapMaterial245 = frozenset([1])
+ FOLLOW_line_keytype_in_mapMaterial251 = frozenset([29])
+ FOLLOW_29_in_mapMaterial253 = frozenset([1])
+ FOLLOW_line_key_in_mapMaterial259 = frozenset([29])
+ FOLLOW_29_in_mapMaterial261 = frozenset([1])
+ FOLLOW_31_in_line_include273 = frozenset([30])
+ FOLLOW_30_in_line_include275 = frozenset([23])
+ FOLLOW_NAME_in_line_include277 = frozenset([30])
+ FOLLOW_30_in_line_include279 = frozenset([1])
+ FOLLOW_32_in_line_name299 = frozenset([33])
+ FOLLOW_33_in_line_name301 = frozenset([23])
+ FOLLOW_NAME_in_line_name305 = frozenset([34])
+ FOLLOW_34_in_line_name307 = frozenset([35])
+ FOLLOW_35_in_line_name309 = frozenset([30])
+ FOLLOW_30_in_line_name311 = frozenset([23])
+ FOLLOW_NAME_in_line_name315 = frozenset([30])
+ FOLLOW_30_in_line_name317 = frozenset([1])
+ FOLLOW_36_in_line_keytype345 = frozenset([33])
+ FOLLOW_33_in_line_keytype347 = frozenset([23])
+ FOLLOW_NAME_in_line_keytype351 = frozenset([34])
+ FOLLOW_34_in_line_keytype353 = frozenset([35])
+ FOLLOW_35_in_line_keytype355 = frozenset([30])
+ FOLLOW_30_in_line_keytype357 = frozenset([23])
+ FOLLOW_NAME_in_line_keytype361 = frozenset([30])
+ FOLLOW_30_in_line_keytype363 = frozenset([1])
+ FOLLOW_37_in_line_key392 = frozenset([38])
+ FOLLOW_keycode_in_line_key394 = frozenset([27])
+ FOLLOW_keysyms_in_line_key396 = frozenset([1])
+ FOLLOW_38_in_keycode420 = frozenset([23])
+ FOLLOW_NAME_in_keycode422 = frozenset([39])
+ FOLLOW_39_in_keycode424 = frozenset([1])
+ FOLLOW_27_in_keysyms444 = frozenset([33])
+ FOLLOW_33_in_keysyms446 = frozenset([23])
+ FOLLOW_NAME_in_keysyms450 = frozenset([34, 40])
+ FOLLOW_40_in_keysyms453 = frozenset([23])
+ FOLLOW_NAME_in_keysyms457 = frozenset([34, 40])
+ FOLLOW_34_in_keysyms461 = frozenset([28])
+ FOLLOW_28_in_keysyms463 = frozenset([1])
FOLLOW_set_in_mapOptions0 = frozenset([1])
diff --git a/XKBGrammar/XKBGrammarParser.pyc b/XKBGrammar/XKBGrammarParser.pyc
Binary files differ.
diff --git a/XKBGrammar/XKBGrammarWalker.g b/XKBGrammar/XKBGrammarWalker.g
@@ -1,6 +1,6 @@
// XKB Grammar (X.org)
// Written by Simos Xenitellis <simos.lists@googlemail.com>, 2008.
-// Version 0.4
+// Version 0.5
tree grammar XKBGrammarWalker;
@@ -32,7 +32,7 @@ layout
;
section
- : ^(SECTION mapType mapMaterial)
+ : ^(SECTION mapType ^(MAPMATERIAL mapMaterial+))
;
mapType