commit 35a9ad844b37f5ab4833a5a913ea8d136adad144
parent 742101b340d0c06b17475b3f73c242ea0c87cf8d
Author: Joël Lupien (Jojolepro) <jojolepro@jojolepro.com>
Date: Tue, 25 Aug 2020 15:43:16 -0400
not sure what I did
Diffstat:
11 files changed, 7664 insertions(+), 0 deletions(-)
diff --git a/Keycodes.tokens b/Keycodes.tokens
@@ -0,0 +1,40 @@
+KEYCODEDOC=4
+KEYCODELIST=5
+ALIAS=14
+KEYCODEMATERIAL=10
+INDICATOR=15
+DQSTRING=17
+COMMENT=20
+KEYCODELISTTYPE=6
+MINIMUM=12
+KEYCODE=16
+INCLUDE=11
+WS=19
+T__30=30
+KEYCODELISTNAME=9
+T__31=31
+T__32=32
+KEYCODELISTOPTS=8
+MAXIMUM=13
+NAME=18
+KEYCODELISTOPTIONS=7
+LINE_COMMENT=21
+T__26=26
+T__27=27
+T__28=28
+T__29=29
+T__22=22
+T__23=23
+T__24=24
+T__25=25
+'>'=31
+'indicator'=32
+'}'=23
+'='=27
+'alias'=29
+'<'=30
+'{'=22
+';'=24
+'maximum'=28
+'include'=25
+'minimum'=26
diff --git a/KeycodesLexer.py b/KeycodesLexer.py
@@ -0,0 +1,931 @@
+# $ANTLR 3.1.2 Keycodes.g 2019-08-13 08:28:51
+
+import sys
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+KEYCODEDOC=4
+KEYCODELIST=5
+ALIAS=14
+KEYCODEMATERIAL=10
+INDICATOR=15
+DQSTRING=17
+COMMENT=20
+KEYCODELISTTYPE=6
+MINIMUM=12
+KEYCODE=16
+INCLUDE=11
+WS=19
+EOF=-1
+T__30=30
+KEYCODELISTNAME=9
+T__31=31
+T__32=32
+KEYCODELISTOPTS=8
+MAXIMUM=13
+NAME=18
+KEYCODELISTOPTIONS=7
+LINE_COMMENT=21
+T__26=26
+T__27=27
+T__28=28
+T__29=29
+T__22=22
+T__23=23
+T__24=24
+T__25=25
+
+
+class KeycodesLexer(Lexer):
+
+ grammarFileName = "Keycodes.g"
+ antlr_version = version_str_to_tuple("3.1.2")
+ antlr_version_str = "3.1.2"
+
+ def __init__(self, input=None, state=None):
+ if state is None:
+ state = RecognizerSharedState()
+ Lexer.__init__(self, input, state)
+
+ self.dfa8 = self.DFA8(
+ self, 8,
+ eot = self.DFA8_eot,
+ eof = self.DFA8_eof,
+ min = self.DFA8_min,
+ max = self.DFA8_max,
+ accept = self.DFA8_accept,
+ special = self.DFA8_special,
+ transition = self.DFA8_transition
+ )
+
+
+
+
+
+
+ # $ANTLR start "T__22"
+ def mT__22(self, ):
+
+ try:
+ _type = T__22
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:7:7: ( '{' )
+ # Keycodes.g:7:9: '{'
+ pass
+ self.match(123)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__22"
+
+
+
+ # $ANTLR start "T__23"
+ def mT__23(self, ):
+
+ try:
+ _type = T__23
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:8:7: ( '}' )
+ # Keycodes.g:8:9: '}'
+ pass
+ self.match(125)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__23"
+
+
+
+ # $ANTLR start "T__24"
+ def mT__24(self, ):
+
+ try:
+ _type = T__24
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:9:7: ( ';' )
+ # Keycodes.g:9:9: ';'
+ pass
+ self.match(59)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__24"
+
+
+
+ # $ANTLR start "T__25"
+ def mT__25(self, ):
+
+ try:
+ _type = T__25
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:10:7: ( 'include' )
+ # Keycodes.g:10:9: 'include'
+ pass
+ self.match("include")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__25"
+
+
+
+ # $ANTLR start "T__26"
+ def mT__26(self, ):
+
+ try:
+ _type = T__26
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:11:7: ( 'minimum' )
+ # Keycodes.g:11:9: 'minimum'
+ pass
+ self.match("minimum")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__26"
+
+
+
+ # $ANTLR start "T__27"
+ def mT__27(self, ):
+
+ try:
+ _type = T__27
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:12:7: ( '=' )
+ # Keycodes.g:12:9: '='
+ pass
+ self.match(61)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__27"
+
+
+
+ # $ANTLR start "T__28"
+ def mT__28(self, ):
+
+ try:
+ _type = T__28
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:13:7: ( 'maximum' )
+ # Keycodes.g:13:9: 'maximum'
+ pass
+ self.match("maximum")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__28"
+
+
+
+ # $ANTLR start "T__29"
+ def mT__29(self, ):
+
+ try:
+ _type = T__29
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:14:7: ( 'alias' )
+ # Keycodes.g:14:9: 'alias'
+ pass
+ self.match("alias")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__29"
+
+
+
+ # $ANTLR start "T__30"
+ def mT__30(self, ):
+
+ try:
+ _type = T__30
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:15:7: ( '<' )
+ # Keycodes.g:15:9: '<'
+ pass
+ self.match(60)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__30"
+
+
+
+ # $ANTLR start "T__31"
+ def mT__31(self, ):
+
+ try:
+ _type = T__31
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:16:7: ( '>' )
+ # Keycodes.g:16:9: '>'
+ pass
+ self.match(62)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__31"
+
+
+
+ # $ANTLR start "T__32"
+ def mT__32(self, ):
+
+ try:
+ _type = T__32
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:17:7: ( 'indicator' )
+ # Keycodes.g:17:9: 'indicator'
+ pass
+ self.match("indicator")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__32"
+
+
+
+ # $ANTLR start "KEYCODELISTOPTS"
+ def mKEYCODELISTOPTS(self, ):
+
+ try:
+ _type = KEYCODELISTOPTS
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:79:2: ( 'default' | 'xkb_keycodes' )
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == 100) :
+ alt1 = 1
+ elif (LA1_0 == 120) :
+ alt1 = 2
+ else:
+ nvae = NoViableAltException("", 1, 0, self.input)
+
+ raise nvae
+
+ if alt1 == 1:
+ # Keycodes.g:79:4: 'default'
+ pass
+ self.match("default")
+
+
+ elif alt1 == 2:
+ # Keycodes.g:80:4: 'xkb_keycodes'
+ pass
+ self.match("xkb_keycodes")
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "KEYCODELISTOPTS"
+
+
+
+ # $ANTLR start "NAME"
+ def mNAME(self, ):
+
+ try:
+ _type = NAME
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:84:2: ( ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' | '+' | '-' )* )
+ # Keycodes.g:84:4: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' | '+' | '-' )*
+ pass
+ # Keycodes.g:84:4: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' | '+' | '-' )*
+ while True: #loop2
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if (LA2_0 == 43 or LA2_0 == 45 or (48 <= LA2_0 <= 57) or (65 <= LA2_0 <= 90) or LA2_0 == 95 or (97 <= LA2_0 <= 122)) :
+ alt2 = 1
+
+
+ if alt2 == 1:
+ # Keycodes.g:
+ pass
+ if self.input.LA(1) == 43 or self.input.LA(1) == 45 or (48 <= self.input.LA(1) <= 57) or (65 <= self.input.LA(1) <= 90) or self.input.LA(1) == 95 or (97 <= self.input.LA(1) <= 122):
+ self.input.consume()
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+ else:
+ break #loop2
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "NAME"
+
+
+
+ # $ANTLR start "WS"
+ def mWS(self, ):
+
+ try:
+ _type = WS
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:88:2: ( ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) )
+ # Keycodes.g:89:2: ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' )
+ pass
+ if (9 <= self.input.LA(1) <= 10) or (12 <= self.input.LA(1) <= 13) or self.input.LA(1) == 32:
+ self.input.consume()
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+ #action start
+ _channel=HIDDEN;
+ #action end
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "WS"
+
+
+
+ # $ANTLR start "COMMENT"
+ def mCOMMENT(self, ):
+
+ try:
+ _type = COMMENT
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:94:6: ( '/*' ( . )* '*/' )
+ # Keycodes.g:95:2: '/*' ( . )* '*/'
+ pass
+ self.match("/*")
+ # Keycodes.g:95:7: ( . )*
+ while True: #loop3
+ alt3 = 2
+ LA3_0 = self.input.LA(1)
+
+ if (LA3_0 == 42) :
+ LA3_1 = self.input.LA(2)
+
+ if (LA3_1 == 47) :
+ alt3 = 2
+ elif ((0 <= LA3_1 <= 46) or (48 <= LA3_1 <= 65535)) :
+ alt3 = 1
+
+
+ elif ((0 <= LA3_0 <= 41) or (43 <= LA3_0 <= 65535)) :
+ alt3 = 1
+
+
+ if alt3 == 1:
+ # Keycodes.g:95:7: .
+ pass
+ self.matchAny()
+
+
+ else:
+ break #loop3
+
+
+ self.match("*/")
+ #action start
+ _channel=HIDDEN;
+ #action end
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "COMMENT"
+
+
+
+ # $ANTLR start "LINE_COMMENT"
+ def mLINE_COMMENT(self, ):
+
+ try:
+ _type = LINE_COMMENT
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:99:6: ( ( '//' | '#' ) (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
+ # Keycodes.g:100:2: ( '//' | '#' ) (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
+ pass
+ # Keycodes.g:100:2: ( '//' | '#' )
+ alt4 = 2
+ LA4_0 = self.input.LA(1)
+
+ if (LA4_0 == 47) :
+ alt4 = 1
+ elif (LA4_0 == 35) :
+ alt4 = 2
+ else:
+ nvae = NoViableAltException("", 4, 0, self.input)
+
+ raise nvae
+
+ if alt4 == 1:
+ # Keycodes.g:100:3: '//'
+ pass
+ self.match("//")
+
+
+ elif alt4 == 2:
+ # Keycodes.g:100:10: '#'
+ pass
+ self.match(35)
+
+
+
+ # Keycodes.g:100:16: (~ ( '\\n' | '\\r' ) )*
+ while True: #loop5
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
+
+ if ((0 <= LA5_0 <= 9) or (11 <= LA5_0 <= 12) or (14 <= LA5_0 <= 65535)) :
+ alt5 = 1
+
+
+ if alt5 == 1:
+ # Keycodes.g:100:16: ~ ( '\\n' | '\\r' )
+ pass
+ if (0 <= self.input.LA(1) <= 9) or (11 <= self.input.LA(1) <= 12) or (14 <= self.input.LA(1) <= 65535):
+ self.input.consume()
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+ else:
+ break #loop5
+
+
+ # Keycodes.g:100:32: ( '\\r' )?
+ alt6 = 2
+ LA6_0 = self.input.LA(1)
+
+ if (LA6_0 == 13) :
+ alt6 = 1
+ if alt6 == 1:
+ # Keycodes.g:100:32: '\\r'
+ pass
+ self.match(13)
+
+
+
+ self.match(10)
+ #action start
+ _channel=HIDDEN;
+ #action end
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "LINE_COMMENT"
+
+
+
+ # $ANTLR start "DQSTRING"
+ def mDQSTRING(self, ):
+
+ try:
+ _type = DQSTRING
+ _channel = DEFAULT_CHANNEL
+
+ # Keycodes.g:108:6: ( '\"' ( ( options {greedy=false; } : ~ ( '\"' ) )* ) '\"' )
+ # Keycodes.g:108:10: '\"' ( ( options {greedy=false; } : ~ ( '\"' ) )* ) '\"'
+ pass
+ self.match(34)
+ # Keycodes.g:108:14: ( ( options {greedy=false; } : ~ ( '\"' ) )* )
+ # Keycodes.g:108:15: ( options {greedy=false; } : ~ ( '\"' ) )*
+ pass
+ # Keycodes.g:108:15: ( options {greedy=false; } : ~ ( '\"' ) )*
+ while True: #loop7
+ alt7 = 2
+ LA7_0 = self.input.LA(1)
+
+ if ((0 <= LA7_0 <= 33) or (35 <= LA7_0 <= 65535)) :
+ alt7 = 1
+ elif (LA7_0 == 34) :
+ alt7 = 2
+
+
+ if alt7 == 1:
+ # Keycodes.g:108:40: ~ ( '\"' )
+ pass
+ if (0 <= self.input.LA(1) <= 33) or (35 <= self.input.LA(1) <= 65535):
+ self.input.consume()
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+ else:
+ break #loop7
+
+
+
+
+
+ self.match(34)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "DQSTRING"
+
+
+
+ def mTokens(self):
+ # Keycodes.g:1:8: ( T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | T__30 | T__31 | T__32 | KEYCODELISTOPTS | NAME | WS | COMMENT | LINE_COMMENT | DQSTRING )
+ alt8 = 17
+ alt8 = self.dfa8.predict(self.input)
+ if alt8 == 1:
+ # Keycodes.g:1:10: T__22
+ pass
+ self.mT__22()
+
+
+ elif alt8 == 2:
+ # Keycodes.g:1:16: T__23
+ pass
+ self.mT__23()
+
+
+ elif alt8 == 3:
+ # Keycodes.g:1:22: T__24
+ pass
+ self.mT__24()
+
+
+ elif alt8 == 4:
+ # Keycodes.g:1:28: T__25
+ pass
+ self.mT__25()
+
+
+ elif alt8 == 5:
+ # Keycodes.g:1:34: T__26
+ pass
+ self.mT__26()
+
+
+ elif alt8 == 6:
+ # Keycodes.g:1:40: T__27
+ pass
+ self.mT__27()
+
+
+ elif alt8 == 7:
+ # Keycodes.g:1:46: T__28
+ pass
+ self.mT__28()
+
+
+ elif alt8 == 8:
+ # Keycodes.g:1:52: T__29
+ pass
+ self.mT__29()
+
+
+ elif alt8 == 9:
+ # Keycodes.g:1:58: T__30
+ pass
+ self.mT__30()
+
+
+ elif alt8 == 10:
+ # Keycodes.g:1:64: T__31
+ pass
+ self.mT__31()
+
+
+ elif alt8 == 11:
+ # Keycodes.g:1:70: T__32
+ pass
+ self.mT__32()
+
+
+ elif alt8 == 12:
+ # Keycodes.g:1:76: KEYCODELISTOPTS
+ pass
+ self.mKEYCODELISTOPTS()
+
+
+ elif alt8 == 13:
+ # Keycodes.g:1:92: NAME
+ pass
+ self.mNAME()
+
+
+ elif alt8 == 14:
+ # Keycodes.g:1:97: WS
+ pass
+ self.mWS()
+
+
+ elif alt8 == 15:
+ # Keycodes.g:1:100: COMMENT
+ pass
+ self.mCOMMENT()
+
+
+ elif alt8 == 16:
+ # Keycodes.g:1:108: LINE_COMMENT
+ pass
+ self.mLINE_COMMENT()
+
+
+ elif alt8 == 17:
+ # Keycodes.g:1:121: DQSTRING
+ pass
+ self.mDQSTRING()
+
+
+
+
+
+
+
+ # lookup tables for DFA #8
+
+ DFA8_eot = DFA.unpack(
+ u"\1\14\3\uffff\2\14\1\uffff\1\14\2\uffff\2\14\5\uffff\6\14\1\uffff"
+ u"\22\14\1\61\6\14\1\uffff\2\14\1\72\1\14\1\74\1\75\1\76\1\14\1\uffff"
+ u"\1\14\3\uffff\1\14\1\102\1\14\1\uffff\2\14\1\76"
+ )
+
+ DFA8_eof = DFA.unpack(
+ u"\106\uffff"
+ )
+
+ DFA8_min = DFA.unpack(
+ u"\1\11\3\uffff\1\156\1\141\1\uffff\1\154\2\uffff\1\145\1\153\2\uffff"
+ u"\1\52\2\uffff\1\143\1\156\1\170\1\151\1\146\1\142\1\uffff\1\154"
+ u"\3\151\2\141\1\137\1\165\1\143\2\155\1\163\1\165\1\153\1\144\1"
+ u"\141\2\165\1\53\1\154\2\145\1\164\2\155\1\uffff\1\164\1\171\1\53"
+ u"\1\157\3\53\1\143\1\uffff\1\162\3\uffff\1\157\1\53\1\144\1\uffff"
+ u"\1\145\1\163\1\53"
+ )
+
+ DFA8_max = DFA.unpack(
+ u"\1\175\3\uffff\1\156\1\151\1\uffff\1\154\2\uffff\1\145\1\153\2"
+ u"\uffff\1\57\2\uffff\1\144\1\156\1\170\1\151\1\146\1\142\1\uffff"
+ u"\1\154\3\151\2\141\1\137\1\165\1\143\2\155\1\163\1\165\1\153\1"
+ u"\144\1\141\2\165\1\172\1\154\2\145\1\164\2\155\1\uffff\1\164\1"
+ u"\171\1\172\1\157\3\172\1\143\1\uffff\1\162\3\uffff\1\157\1\172"
+ u"\1\144\1\uffff\1\145\1\163\1\172"
+ )
+
+ DFA8_accept = DFA.unpack(
+ u"\1\uffff\1\1\1\2\1\3\2\uffff\1\6\1\uffff\1\11\1\12\2\uffff\1\15"
+ u"\1\16\1\uffff\1\20\1\21\6\uffff\1\17\31\uffff\1\10\10\uffff\1\4"
+ u"\1\uffff\1\5\1\7\1\14\3\uffff\1\13\3\uffff"
+ )
+
+ DFA8_special = DFA.unpack(
+ u"\106\uffff"
+ )
+
+
+ DFA8_transition = [
+ DFA.unpack(u"\2\15\1\uffff\2\15\22\uffff\1\15\1\uffff\1\20\1\17\13"
+ u"\uffff\1\16\13\uffff\1\3\1\10\1\6\1\11\42\uffff\1\7\2\uffff\1\12"
+ u"\4\uffff\1\4\3\uffff\1\5\12\uffff\1\13\2\uffff\1\1\1\uffff\1\2"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\21"),
+ DFA.unpack(u"\1\23\7\uffff\1\22"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\24"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\25"),
+ DFA.unpack(u"\1\26"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\27\4\uffff\1\17"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\30\1\31"),
+ DFA.unpack(u"\1\32"),
+ DFA.unpack(u"\1\33"),
+ DFA.unpack(u"\1\34"),
+ DFA.unpack(u"\1\35"),
+ DFA.unpack(u"\1\36"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\37"),
+ DFA.unpack(u"\1\40"),
+ DFA.unpack(u"\1\41"),
+ DFA.unpack(u"\1\42"),
+ DFA.unpack(u"\1\43"),
+ DFA.unpack(u"\1\44"),
+ DFA.unpack(u"\1\45"),
+ DFA.unpack(u"\1\46"),
+ DFA.unpack(u"\1\47"),
+ DFA.unpack(u"\1\50"),
+ DFA.unpack(u"\1\51"),
+ DFA.unpack(u"\1\52"),
+ DFA.unpack(u"\1\53"),
+ DFA.unpack(u"\1\54"),
+ DFA.unpack(u"\1\55"),
+ DFA.unpack(u"\1\56"),
+ DFA.unpack(u"\1\57"),
+ DFA.unpack(u"\1\60"),
+ DFA.unpack(u"\1\14\1\uffff\1\14\2\uffff\12\14\7\uffff\32\14\4\uffff"
+ u"\1\14\1\uffff\32\14"),
+ DFA.unpack(u"\1\62"),
+ DFA.unpack(u"\1\63"),
+ DFA.unpack(u"\1\64"),
+ DFA.unpack(u"\1\65"),
+ DFA.unpack(u"\1\66"),
+ DFA.unpack(u"\1\67"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\70"),
+ DFA.unpack(u"\1\71"),
+ DFA.unpack(u"\1\14\1\uffff\1\14\2\uffff\12\14\7\uffff\32\14\4\uffff"
+ u"\1\14\1\uffff\32\14"),
+ DFA.unpack(u"\1\73"),
+ DFA.unpack(u"\1\14\1\uffff\1\14\2\uffff\12\14\7\uffff\32\14\4\uffff"
+ u"\1\14\1\uffff\32\14"),
+ DFA.unpack(u"\1\14\1\uffff\1\14\2\uffff\12\14\7\uffff\32\14\4\uffff"
+ u"\1\14\1\uffff\32\14"),
+ DFA.unpack(u"\1\14\1\uffff\1\14\2\uffff\12\14\7\uffff\32\14\4\uffff"
+ u"\1\14\1\uffff\32\14"),
+ DFA.unpack(u"\1\77"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\100"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\101"),
+ DFA.unpack(u"\1\14\1\uffff\1\14\2\uffff\12\14\7\uffff\32\14\4\uffff"
+ u"\1\14\1\uffff\32\14"),
+ DFA.unpack(u"\1\103"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\104"),
+ DFA.unpack(u"\1\105"),
+ DFA.unpack(u"\1\14\1\uffff\1\14\2\uffff\12\14\7\uffff\32\14\4\uffff"
+ u"\1\14\1\uffff\32\14")
+ ]
+
+ # class definition for DFA #8
+
+ DFA8 = DFA
+
+
+
+
+def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
+ from antlr3.main import LexerMain
+ main = LexerMain(KeycodesLexer)
+ main.stdin = stdin
+ main.stdout = stdout
+ main.stderr = stderr
+ main.execute(argv)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/KeycodesParser.py b/KeycodesParser.py
@@ -0,0 +1,1279 @@
+# $ANTLR 3.1.2 Keycodes.g 2019-08-13 08:28:51
+
+import sys
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+from antlr3.tree import *
+
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+KEYCODEDOC=4
+KEYCODELIST=5
+ALIAS=14
+KEYCODEMATERIAL=10
+INDICATOR=15
+DQSTRING=17
+COMMENT=20
+KEYCODELISTTYPE=6
+MINIMUM=12
+KEYCODE=16
+INCLUDE=11
+WS=19
+EOF=-1
+T__30=30
+KEYCODELISTNAME=9
+T__31=31
+T__32=32
+KEYCODELISTOPTS=8
+MAXIMUM=13
+NAME=18
+KEYCODELISTOPTIONS=7
+LINE_COMMENT=21
+T__26=26
+T__27=27
+T__28=28
+T__29=29
+T__22=22
+T__23=23
+T__24=24
+T__25=25
+
+# token names
+tokenNames = [
+ "<invalid>", "<EOR>", "<DOWN>", "<UP>",
+ "KEYCODEDOC", "KEYCODELIST", "KEYCODELISTTYPE", "KEYCODELISTOPTIONS",
+ "KEYCODELISTOPTS", "KEYCODELISTNAME", "KEYCODEMATERIAL", "INCLUDE",
+ "MINIMUM", "MAXIMUM", "ALIAS", "INDICATOR", "KEYCODE", "DQSTRING", "NAME",
+ "WS", "COMMENT", "LINE_COMMENT", "'{'", "'}'", "';'", "'include'", "'minimum'",
+ "'='", "'maximum'", "'alias'", "'<'", "'>'", "'indicator'"
+]
+
+
+
+
+class KeycodesParser(Parser):
+ grammarFileName = "Keycodes.g"
+ antlr_version = version_str_to_tuple("3.1.2")
+ antlr_version_str = "3.1.2"
+ tokenNames = tokenNames
+
+ def __init__(self, input, state=None):
+ if state is None:
+ state = RecognizerSharedState()
+
+ Parser.__init__(self, input, state)
+
+
+
+
+
+
+
+
+ self._adaptor = CommonTreeAdaptor()
+
+
+
+ def getTreeAdaptor(self):
+ return self._adaptor
+
+ def setTreeAdaptor(self, adaptor):
+ self._adaptor = adaptor
+
+ adaptor = property(getTreeAdaptor, setTreeAdaptor)
+
+
+ class keycodedoc_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "keycodedoc"
+ # Keycodes.g:30:1: keycodedoc : ( keycodelist )+ EOF -> ^( KEYCODEDOC ( keycodelist )+ ) ;
+ def keycodedoc(self, ):
+
+ retval = self.keycodedoc_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ EOF2 = None
+ keycodelist1 = None
+
+
+ EOF2_tree = None
+ stream_EOF = RewriteRuleTokenStream(self._adaptor, "token EOF")
+ stream_keycodelist = RewriteRuleSubtreeStream(self._adaptor, "rule keycodelist")
+ try:
+ try:
+ # Keycodes.g:31:2: ( ( keycodelist )+ EOF -> ^( KEYCODEDOC ( keycodelist )+ ) )
+ # Keycodes.g:31:4: ( keycodelist )+ EOF
+ pass
+ # Keycodes.g:31:4: ( keycodelist )+
+ cnt1 = 0
+ while True: #loop1
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == KEYCODELISTOPTS) :
+ alt1 = 1
+
+
+ if alt1 == 1:
+ # Keycodes.g:31:4: keycodelist
+ pass
+ self._state.following.append(self.FOLLOW_keycodelist_in_keycodedoc97)
+ keycodelist1 = self.keycodelist()
+
+ self._state.following.pop()
+ stream_keycodelist.add(keycodelist1.tree)
+
+
+ else:
+ if cnt1 >= 1:
+ break #loop1
+
+ eee = EarlyExitException(1, self.input)
+ raise eee
+
+ cnt1 += 1
+
+
+ EOF2=self.match(self.input, EOF, self.FOLLOW_EOF_in_keycodedoc100)
+ stream_EOF.add(EOF2)
+
+ # AST Rewrite
+ # elements: keycodelist
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 32:2: -> ^( KEYCODEDOC ( keycodelist )+ )
+ # Keycodes.g:32:5: ^( KEYCODEDOC ( keycodelist )+ )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(KEYCODEDOC, "KEYCODEDOC"), root_1)
+
+ # Keycodes.g:32:18: ( keycodelist )+
+ if not (stream_keycodelist.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_keycodelist.hasNext():
+ self._adaptor.addChild(root_1, stream_keycodelist.nextTree())
+
+
+ stream_keycodelist.reset()
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "keycodedoc"
+
+ class keycodelist_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "keycodelist"
+ # Keycodes.g:35:1: keycodelist : keycodelisttype '{' ( keycodeMaterial )+ '}' ';' -> ^( KEYCODELIST keycodelisttype ^( KEYCODEMATERIAL ( keycodeMaterial )+ ) ) ;
+ def keycodelist(self, ):
+
+ retval = self.keycodelist_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ char_literal4 = None
+ char_literal6 = None
+ char_literal7 = None
+ keycodelisttype3 = None
+
+ keycodeMaterial5 = None
+
+
+ char_literal4_tree = None
+ char_literal6_tree = None
+ char_literal7_tree = None
+ stream_22 = RewriteRuleTokenStream(self._adaptor, "token 22")
+ stream_23 = RewriteRuleTokenStream(self._adaptor, "token 23")
+ stream_24 = RewriteRuleTokenStream(self._adaptor, "token 24")
+ stream_keycodelisttype = RewriteRuleSubtreeStream(self._adaptor, "rule keycodelisttype")
+ stream_keycodeMaterial = RewriteRuleSubtreeStream(self._adaptor, "rule keycodeMaterial")
+ try:
+ try:
+ # Keycodes.g:36:2: ( keycodelisttype '{' ( keycodeMaterial )+ '}' ';' -> ^( KEYCODELIST keycodelisttype ^( KEYCODEMATERIAL ( keycodeMaterial )+ ) ) )
+ # Keycodes.g:36:4: keycodelisttype '{' ( keycodeMaterial )+ '}' ';'
+ pass
+ self._state.following.append(self.FOLLOW_keycodelisttype_in_keycodelist123)
+ keycodelisttype3 = self.keycodelisttype()
+
+ self._state.following.pop()
+ stream_keycodelisttype.add(keycodelisttype3.tree)
+ char_literal4=self.match(self.input, 22, self.FOLLOW_22_in_keycodelist125)
+ stream_22.add(char_literal4)
+ # Keycodes.g:36:24: ( keycodeMaterial )+
+ cnt2 = 0
+ while True: #loop2
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if ((25 <= LA2_0 <= 26) or (28 <= LA2_0 <= 30) or LA2_0 == 32) :
+ alt2 = 1
+
+
+ if alt2 == 1:
+ # Keycodes.g:36:24: keycodeMaterial
+ pass
+ self._state.following.append(self.FOLLOW_keycodeMaterial_in_keycodelist127)
+ keycodeMaterial5 = self.keycodeMaterial()
+
+ self._state.following.pop()
+ stream_keycodeMaterial.add(keycodeMaterial5.tree)
+
+
+ else:
+ if cnt2 >= 1:
+ break #loop2
+
+ eee = EarlyExitException(2, self.input)
+ raise eee
+
+ cnt2 += 1
+
+
+ char_literal6=self.match(self.input, 23, self.FOLLOW_23_in_keycodelist130)
+ stream_23.add(char_literal6)
+ char_literal7=self.match(self.input, 24, self.FOLLOW_24_in_keycodelist132)
+ stream_24.add(char_literal7)
+
+ # AST Rewrite
+ # elements: keycodeMaterial, keycodelisttype
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 37:2: -> ^( KEYCODELIST keycodelisttype ^( KEYCODEMATERIAL ( keycodeMaterial )+ ) )
+ # Keycodes.g:37:5: ^( KEYCODELIST keycodelisttype ^( KEYCODEMATERIAL ( keycodeMaterial )+ ) )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(KEYCODELIST, "KEYCODELIST"), root_1)
+
+ self._adaptor.addChild(root_1, stream_keycodelisttype.nextTree())
+ # Keycodes.g:37:35: ^( KEYCODEMATERIAL ( keycodeMaterial )+ )
+ root_2 = self._adaptor.nil()
+ root_2 = self._adaptor.becomeRoot(self._adaptor.createFromType(KEYCODEMATERIAL, "KEYCODEMATERIAL"), root_2)
+
+ # Keycodes.g:37:53: ( keycodeMaterial )+
+ if not (stream_keycodeMaterial.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_keycodeMaterial.hasNext():
+ self._adaptor.addChild(root_2, stream_keycodeMaterial.nextTree())
+
+
+ stream_keycodeMaterial.reset()
+
+ self._adaptor.addChild(root_1, root_2)
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "keycodelist"
+
+ class keycodelisttype_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "keycodelisttype"
+ # Keycodes.g:40:1: keycodelisttype : ( KEYCODELISTOPTS )+ DQSTRING -> ^( KEYCODELISTTYPE ^( KEYCODELISTOPTIONS ( KEYCODELISTOPTS )+ ) ^( KEYCODELISTNAME DQSTRING ) ) ;
+ def keycodelisttype(self, ):
+
+ retval = self.keycodelisttype_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ KEYCODELISTOPTS8 = None
+ DQSTRING9 = None
+
+ KEYCODELISTOPTS8_tree = None
+ DQSTRING9_tree = None
+ stream_KEYCODELISTOPTS = RewriteRuleTokenStream(self._adaptor, "token KEYCODELISTOPTS")
+ stream_DQSTRING = RewriteRuleTokenStream(self._adaptor, "token DQSTRING")
+
+ try:
+ try:
+ # Keycodes.g:41:2: ( ( KEYCODELISTOPTS )+ DQSTRING -> ^( KEYCODELISTTYPE ^( KEYCODELISTOPTIONS ( KEYCODELISTOPTS )+ ) ^( KEYCODELISTNAME DQSTRING ) ) )
+ # Keycodes.g:41:4: ( KEYCODELISTOPTS )+ DQSTRING
+ pass
+ # Keycodes.g:41:4: ( KEYCODELISTOPTS )+
+ cnt3 = 0
+ while True: #loop3
+ alt3 = 2
+ LA3_0 = self.input.LA(1)
+
+ if (LA3_0 == KEYCODELISTOPTS) :
+ alt3 = 1
+
+
+ if alt3 == 1:
+ # Keycodes.g:41:4: KEYCODELISTOPTS
+ pass
+ KEYCODELISTOPTS8=self.match(self.input, KEYCODELISTOPTS, self.FOLLOW_KEYCODELISTOPTS_in_keycodelisttype160)
+ stream_KEYCODELISTOPTS.add(KEYCODELISTOPTS8)
+
+
+ else:
+ if cnt3 >= 1:
+ break #loop3
+
+ eee = EarlyExitException(3, self.input)
+ raise eee
+
+ cnt3 += 1
+
+
+ DQSTRING9=self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_keycodelisttype163)
+ stream_DQSTRING.add(DQSTRING9)
+
+ # AST Rewrite
+ # elements: DQSTRING, KEYCODELISTOPTS
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 42:2: -> ^( KEYCODELISTTYPE ^( KEYCODELISTOPTIONS ( KEYCODELISTOPTS )+ ) ^( KEYCODELISTNAME DQSTRING ) )
+ # Keycodes.g:42:5: ^( KEYCODELISTTYPE ^( KEYCODELISTOPTIONS ( KEYCODELISTOPTS )+ ) ^( KEYCODELISTNAME DQSTRING ) )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(KEYCODELISTTYPE, "KEYCODELISTTYPE"), root_1)
+
+ # Keycodes.g:42:23: ^( KEYCODELISTOPTIONS ( KEYCODELISTOPTS )+ )
+ root_2 = self._adaptor.nil()
+ root_2 = self._adaptor.becomeRoot(self._adaptor.createFromType(KEYCODELISTOPTIONS, "KEYCODELISTOPTIONS"), root_2)
+
+ # Keycodes.g:42:44: ( KEYCODELISTOPTS )+
+ if not (stream_KEYCODELISTOPTS.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_KEYCODELISTOPTS.hasNext():
+ self._adaptor.addChild(root_2, stream_KEYCODELISTOPTS.nextNode())
+
+
+ stream_KEYCODELISTOPTS.reset()
+
+ self._adaptor.addChild(root_1, root_2)
+ # Keycodes.g:42:62: ^( KEYCODELISTNAME DQSTRING )
+ root_2 = self._adaptor.nil()
+ root_2 = self._adaptor.becomeRoot(self._adaptor.createFromType(KEYCODELISTNAME, "KEYCODELISTNAME"), root_2)
+
+ self._adaptor.addChild(root_2, stream_DQSTRING.nextNode())
+
+ self._adaptor.addChild(root_1, root_2)
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "keycodelisttype"
+
+ class keycodeMaterial_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "keycodeMaterial"
+ # Keycodes.g:45:1: keycodeMaterial : ( line_include | line_minmax ';' | line_alias ';' | line_keycode ';' | line_indicator ';' );
+ def keycodeMaterial(self, ):
+
+ retval = self.keycodeMaterial_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ char_literal12 = None
+ char_literal14 = None
+ char_literal16 = None
+ char_literal18 = None
+ line_include10 = None
+
+ line_minmax11 = None
+
+ line_alias13 = None
+
+ line_keycode15 = None
+
+ line_indicator17 = None
+
+
+ char_literal12_tree = None
+ char_literal14_tree = None
+ char_literal16_tree = None
+ char_literal18_tree = None
+
+ try:
+ try:
+ # Keycodes.g:46:2: ( line_include | line_minmax ';' | line_alias ';' | line_keycode ';' | line_indicator ';' )
+ alt4 = 5
+ LA4 = self.input.LA(1)
+ if LA4 == 25:
+ alt4 = 1
+ elif LA4 == 26 or LA4 == 28:
+ alt4 = 2
+ elif LA4 == 29:
+ alt4 = 3
+ elif LA4 == 30:
+ alt4 = 4
+ elif LA4 == 32:
+ alt4 = 5
+ else:
+ nvae = NoViableAltException("", 4, 0, self.input)
+
+ raise nvae
+
+ if alt4 == 1:
+ # Keycodes.g:46:4: line_include
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_include_in_keycodeMaterial195)
+ line_include10 = self.line_include()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_include10.tree)
+
+
+ elif alt4 == 2:
+ # Keycodes.g:47:4: line_minmax ';'
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_minmax_in_keycodeMaterial201)
+ line_minmax11 = self.line_minmax()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_minmax11.tree)
+ char_literal12=self.match(self.input, 24, self.FOLLOW_24_in_keycodeMaterial203)
+
+
+ elif alt4 == 3:
+ # Keycodes.g:48:4: line_alias ';'
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_alias_in_keycodeMaterial209)
+ line_alias13 = self.line_alias()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_alias13.tree)
+ char_literal14=self.match(self.input, 24, self.FOLLOW_24_in_keycodeMaterial211)
+
+
+ elif alt4 == 4:
+ # Keycodes.g:49:4: line_keycode ';'
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_keycode_in_keycodeMaterial217)
+ line_keycode15 = self.line_keycode()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_keycode15.tree)
+ char_literal16=self.match(self.input, 24, self.FOLLOW_24_in_keycodeMaterial219)
+
+
+ elif alt4 == 5:
+ # Keycodes.g:50:4: line_indicator ';'
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_indicator_in_keycodeMaterial225)
+ line_indicator17 = self.line_indicator()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_indicator17.tree)
+ char_literal18=self.match(self.input, 24, self.FOLLOW_24_in_keycodeMaterial227)
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "keycodeMaterial"
+
+ class line_include_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_include"
+ # Keycodes.g:53:1: line_include : 'include' DQSTRING -> ^( INCLUDE DQSTRING ) ;
+ def line_include(self, ):
+
+ retval = self.line_include_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal19 = None
+ DQSTRING20 = None
+
+ string_literal19_tree = None
+ DQSTRING20_tree = None
+ stream_25 = RewriteRuleTokenStream(self._adaptor, "token 25")
+ stream_DQSTRING = RewriteRuleTokenStream(self._adaptor, "token DQSTRING")
+
+ try:
+ try:
+ # Keycodes.g:54:2: ( 'include' DQSTRING -> ^( INCLUDE DQSTRING ) )
+ # Keycodes.g:54:4: 'include' DQSTRING
+ pass
+ string_literal19=self.match(self.input, 25, self.FOLLOW_25_in_line_include239)
+ stream_25.add(string_literal19)
+ DQSTRING20=self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_line_include241)
+ stream_DQSTRING.add(DQSTRING20)
+
+ # AST Rewrite
+ # elements: DQSTRING
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 55:2: -> ^( INCLUDE DQSTRING )
+ # Keycodes.g:55:5: ^( INCLUDE DQSTRING )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(INCLUDE, "INCLUDE"), root_1)
+
+ self._adaptor.addChild(root_1, stream_DQSTRING.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_include"
+
+ class line_minmax_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_minmax"
+ # Keycodes.g:58:1: line_minmax : ( 'minimum' '=' NAME -> ^( MINIMUM NAME ) | 'maximum' '=' NAME -> ^( MAXIMUM NAME ) );
+ def line_minmax(self, ):
+
+ retval = self.line_minmax_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal21 = None
+ char_literal22 = None
+ NAME23 = None
+ string_literal24 = None
+ char_literal25 = None
+ NAME26 = None
+
+ string_literal21_tree = None
+ char_literal22_tree = None
+ NAME23_tree = None
+ string_literal24_tree = None
+ char_literal25_tree = None
+ NAME26_tree = None
+ stream_26 = RewriteRuleTokenStream(self._adaptor, "token 26")
+ stream_27 = RewriteRuleTokenStream(self._adaptor, "token 27")
+ stream_28 = RewriteRuleTokenStream(self._adaptor, "token 28")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # Keycodes.g:59:2: ( 'minimum' '=' NAME -> ^( MINIMUM NAME ) | 'maximum' '=' NAME -> ^( MAXIMUM NAME ) )
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == 26) :
+ alt5 = 1
+ elif (LA5_0 == 28) :
+ alt5 = 2
+ else:
+ nvae = NoViableAltException("", 5, 0, self.input)
+
+ raise nvae
+
+ if alt5 == 1:
+ # Keycodes.g:59:4: 'minimum' '=' NAME
+ pass
+ string_literal21=self.match(self.input, 26, self.FOLLOW_26_in_line_minmax261)
+ stream_26.add(string_literal21)
+ char_literal22=self.match(self.input, 27, self.FOLLOW_27_in_line_minmax263)
+ stream_27.add(char_literal22)
+ NAME23=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_minmax265)
+ stream_NAME.add(NAME23)
+
+ # AST Rewrite
+ # elements: NAME
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 59:23: -> ^( MINIMUM NAME )
+ # Keycodes.g:59:26: ^( MINIMUM NAME )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(MINIMUM, "MINIMUM"), root_1)
+
+ self._adaptor.addChild(root_1, stream_NAME.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+ elif alt5 == 2:
+ # Keycodes.g:60:4: 'maximum' '=' NAME
+ pass
+ string_literal24=self.match(self.input, 28, self.FOLLOW_28_in_line_minmax278)
+ stream_28.add(string_literal24)
+ char_literal25=self.match(self.input, 27, self.FOLLOW_27_in_line_minmax280)
+ stream_27.add(char_literal25)
+ NAME26=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_minmax282)
+ stream_NAME.add(NAME26)
+
+ # AST Rewrite
+ # elements: NAME
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 60:23: -> ^( MAXIMUM NAME )
+ # Keycodes.g:60:26: ^( MAXIMUM NAME )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(MAXIMUM, "MAXIMUM"), root_1)
+
+ self._adaptor.addChild(root_1, stream_NAME.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_minmax"
+
+ class line_alias_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_alias"
+ # Keycodes.g:63:1: line_alias : 'alias' '<' val+= NAME '>' '=' '<' val+= NAME '>' -> ^( ALIAS ( $val)+ ) ;
+ def line_alias(self, ):
+
+ retval = self.line_alias_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal27 = None
+ char_literal28 = None
+ char_literal29 = None
+ char_literal30 = None
+ char_literal31 = None
+ char_literal32 = None
+ val = None
+ list_val = None
+
+ string_literal27_tree = None
+ char_literal28_tree = None
+ char_literal29_tree = None
+ char_literal30_tree = None
+ char_literal31_tree = None
+ char_literal32_tree = None
+ val_tree = None
+ stream_27 = RewriteRuleTokenStream(self._adaptor, "token 27")
+ stream_29 = RewriteRuleTokenStream(self._adaptor, "token 29")
+ stream_30 = RewriteRuleTokenStream(self._adaptor, "token 30")
+ stream_31 = RewriteRuleTokenStream(self._adaptor, "token 31")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # Keycodes.g:64:2: ( 'alias' '<' val+= NAME '>' '=' '<' val+= NAME '>' -> ^( ALIAS ( $val)+ ) )
+ # Keycodes.g:64:4: 'alias' '<' val+= NAME '>' '=' '<' val+= NAME '>'
+ pass
+ string_literal27=self.match(self.input, 29, self.FOLLOW_29_in_line_alias301)
+ stream_29.add(string_literal27)
+ char_literal28=self.match(self.input, 30, self.FOLLOW_30_in_line_alias303)
+ stream_30.add(char_literal28)
+ val=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_alias307)
+ stream_NAME.add(val)
+ if list_val is None:
+ list_val = []
+ list_val.append(val)
+
+ char_literal29=self.match(self.input, 31, self.FOLLOW_31_in_line_alias309)
+ stream_31.add(char_literal29)
+ char_literal30=self.match(self.input, 27, self.FOLLOW_27_in_line_alias311)
+ stream_27.add(char_literal30)
+ char_literal31=self.match(self.input, 30, self.FOLLOW_30_in_line_alias313)
+ stream_30.add(char_literal31)
+ val=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_alias317)
+ stream_NAME.add(val)
+ if list_val is None:
+ list_val = []
+ list_val.append(val)
+
+ char_literal32=self.match(self.input, 31, self.FOLLOW_31_in_line_alias319)
+ stream_31.add(char_literal32)
+
+ # AST Rewrite
+ # elements: val
+ # token labels:
+ # rule labels: retval
+ # token list labels: val
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+ stream_val = RewriteRuleTokenStream(self._adaptor, "token val", list_val)
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 65:2: -> ^( ALIAS ( $val)+ )
+ # Keycodes.g:65:5: ^( ALIAS ( $val)+ )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(ALIAS, "ALIAS"), root_1)
+
+ # Keycodes.g:65:13: ( $val)+
+ if not (stream_val.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_val.hasNext():
+ self._adaptor.addChild(root_1, stream_val.nextNode())
+
+
+ stream_val.reset()
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_alias"
+
+ class line_keycode_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_keycode"
+ # Keycodes.g:68:1: line_keycode : '<' val+= NAME '>' '=' val+= NAME -> ^( KEYCODE ( NAME )+ ) ;
+ def line_keycode(self, ):
+
+ retval = self.line_keycode_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ char_literal33 = None
+ char_literal34 = None
+ char_literal35 = None
+ val = None
+ list_val = None
+
+ char_literal33_tree = None
+ char_literal34_tree = None
+ char_literal35_tree = None
+ val_tree = None
+ stream_27 = RewriteRuleTokenStream(self._adaptor, "token 27")
+ stream_30 = RewriteRuleTokenStream(self._adaptor, "token 30")
+ stream_31 = RewriteRuleTokenStream(self._adaptor, "token 31")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # Keycodes.g:69:2: ( '<' val+= NAME '>' '=' val+= NAME -> ^( KEYCODE ( NAME )+ ) )
+ # Keycodes.g:69:4: '<' val+= NAME '>' '=' val+= NAME
+ pass
+ char_literal33=self.match(self.input, 30, self.FOLLOW_30_in_line_keycode341)
+ stream_30.add(char_literal33)
+ val=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_keycode345)
+ stream_NAME.add(val)
+ if list_val is None:
+ list_val = []
+ list_val.append(val)
+
+ char_literal34=self.match(self.input, 31, self.FOLLOW_31_in_line_keycode347)
+ stream_31.add(char_literal34)
+ char_literal35=self.match(self.input, 27, self.FOLLOW_27_in_line_keycode349)
+ stream_27.add(char_literal35)
+ val=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_keycode353)
+ stream_NAME.add(val)
+ if list_val is None:
+ list_val = []
+ list_val.append(val)
+
+
+ # AST Rewrite
+ # elements: NAME
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 70:2: -> ^( KEYCODE ( NAME )+ )
+ # Keycodes.g:70:5: ^( KEYCODE ( NAME )+ )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(KEYCODE, "KEYCODE"), root_1)
+
+ # Keycodes.g:70:15: ( NAME )+
+ if not (stream_NAME.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_NAME.hasNext():
+ self._adaptor.addChild(root_1, stream_NAME.nextNode())
+
+
+ stream_NAME.reset()
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_keycode"
+
+ class line_indicator_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_indicator"
+ # Keycodes.g:73:1: line_indicator : 'indicator' NAME '=' DQSTRING -> ^( INDICATOR NAME DQSTRING ) ;
+ def line_indicator(self, ):
+
+ retval = self.line_indicator_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal36 = None
+ NAME37 = None
+ char_literal38 = None
+ DQSTRING39 = None
+
+ string_literal36_tree = None
+ NAME37_tree = None
+ char_literal38_tree = None
+ DQSTRING39_tree = None
+ stream_27 = RewriteRuleTokenStream(self._adaptor, "token 27")
+ stream_DQSTRING = RewriteRuleTokenStream(self._adaptor, "token DQSTRING")
+ stream_32 = RewriteRuleTokenStream(self._adaptor, "token 32")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # Keycodes.g:74:2: ( 'indicator' NAME '=' DQSTRING -> ^( INDICATOR NAME DQSTRING ) )
+ # Keycodes.g:74:4: 'indicator' NAME '=' DQSTRING
+ pass
+ string_literal36=self.match(self.input, 32, self.FOLLOW_32_in_line_indicator374)
+ stream_32.add(string_literal36)
+ NAME37=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_indicator376)
+ stream_NAME.add(NAME37)
+ char_literal38=self.match(self.input, 27, self.FOLLOW_27_in_line_indicator378)
+ stream_27.add(char_literal38)
+ DQSTRING39=self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_line_indicator380)
+ stream_DQSTRING.add(DQSTRING39)
+
+ # AST Rewrite
+ # elements: NAME, DQSTRING
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 75:2: -> ^( INDICATOR NAME DQSTRING )
+ # Keycodes.g:75:5: ^( INDICATOR NAME DQSTRING )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(INDICATOR, "INDICATOR"), root_1)
+
+ self._adaptor.addChild(root_1, stream_NAME.nextNode())
+ self._adaptor.addChild(root_1, stream_DQSTRING.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_indicator"
+
+
+ # Delegated rules
+
+
+
+
+ FOLLOW_keycodelist_in_keycodedoc97 = frozenset([8])
+ FOLLOW_EOF_in_keycodedoc100 = frozenset([1])
+ FOLLOW_keycodelisttype_in_keycodelist123 = frozenset([22])
+ FOLLOW_22_in_keycodelist125 = frozenset([25, 26, 28, 29, 30, 32])
+ FOLLOW_keycodeMaterial_in_keycodelist127 = frozenset([23, 25, 26, 28, 29, 30, 32])
+ FOLLOW_23_in_keycodelist130 = frozenset([24])
+ FOLLOW_24_in_keycodelist132 = frozenset([1])
+ FOLLOW_KEYCODELISTOPTS_in_keycodelisttype160 = frozenset([8, 17])
+ FOLLOW_DQSTRING_in_keycodelisttype163 = frozenset([1])
+ FOLLOW_line_include_in_keycodeMaterial195 = frozenset([1])
+ FOLLOW_line_minmax_in_keycodeMaterial201 = frozenset([24])
+ FOLLOW_24_in_keycodeMaterial203 = frozenset([1])
+ FOLLOW_line_alias_in_keycodeMaterial209 = frozenset([24])
+ FOLLOW_24_in_keycodeMaterial211 = frozenset([1])
+ FOLLOW_line_keycode_in_keycodeMaterial217 = frozenset([24])
+ FOLLOW_24_in_keycodeMaterial219 = frozenset([1])
+ FOLLOW_line_indicator_in_keycodeMaterial225 = frozenset([24])
+ FOLLOW_24_in_keycodeMaterial227 = frozenset([1])
+ FOLLOW_25_in_line_include239 = frozenset([17])
+ FOLLOW_DQSTRING_in_line_include241 = frozenset([1])
+ FOLLOW_26_in_line_minmax261 = frozenset([27])
+ FOLLOW_27_in_line_minmax263 = frozenset([18])
+ FOLLOW_NAME_in_line_minmax265 = frozenset([1])
+ FOLLOW_28_in_line_minmax278 = frozenset([27])
+ FOLLOW_27_in_line_minmax280 = frozenset([18])
+ FOLLOW_NAME_in_line_minmax282 = frozenset([1])
+ FOLLOW_29_in_line_alias301 = frozenset([30])
+ FOLLOW_30_in_line_alias303 = frozenset([18])
+ FOLLOW_NAME_in_line_alias307 = frozenset([31])
+ FOLLOW_31_in_line_alias309 = frozenset([27])
+ FOLLOW_27_in_line_alias311 = frozenset([30])
+ FOLLOW_30_in_line_alias313 = frozenset([18])
+ FOLLOW_NAME_in_line_alias317 = frozenset([31])
+ FOLLOW_31_in_line_alias319 = frozenset([1])
+ FOLLOW_30_in_line_keycode341 = frozenset([18])
+ FOLLOW_NAME_in_line_keycode345 = frozenset([31])
+ FOLLOW_31_in_line_keycode347 = frozenset([27])
+ FOLLOW_27_in_line_keycode349 = frozenset([18])
+ FOLLOW_NAME_in_line_keycode353 = frozenset([1])
+ FOLLOW_32_in_line_indicator374 = frozenset([18])
+ FOLLOW_NAME_in_line_indicator376 = frozenset([27])
+ FOLLOW_27_in_line_indicator378 = frozenset([17])
+ FOLLOW_DQSTRING_in_line_indicator380 = frozenset([1])
+
+
+
+def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
+ from antlr3.main import ParserMain
+ main = ParserMain("KeycodesLexer", KeycodesParser)
+ main.stdin = stdin
+ main.stdout = stdout
+ main.stderr = stderr
+ main.execute(argv)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/KeycodesWalker.py b/KeycodesWalker.py
@@ -0,0 +1,474 @@
+# $ANTLR 3.1.2 KeycodesWalker.g 2019-08-13 08:28:51
+
+import sys
+from antlr3 import *
+from antlr3.tree import *
+from antlr3.compat import set, frozenset
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+KEYCODEDOC=4
+KEYCODELIST=5
+ALIAS=14
+KEYCODEMATERIAL=10
+INDICATOR=15
+DQSTRING=17
+COMMENT=20
+KEYCODELISTTYPE=6
+MINIMUM=12
+KEYCODE=16
+INCLUDE=11
+WS=19
+EOF=-1
+T__30=30
+KEYCODELISTNAME=9
+T__31=31
+T__32=32
+KEYCODELISTOPTS=8
+MAXIMUM=13
+NAME=18
+KEYCODELISTOPTIONS=7
+LINE_COMMENT=21
+T__26=26
+T__27=27
+T__28=28
+T__29=29
+T__22=22
+T__23=23
+T__24=24
+T__25=25
+
+# token names
+tokenNames = [
+ "<invalid>", "<EOR>", "<DOWN>", "<UP>",
+ "KEYCODEDOC", "KEYCODELIST", "KEYCODELISTTYPE", "KEYCODELISTOPTIONS",
+ "KEYCODELISTOPTS", "KEYCODELISTNAME", "KEYCODEMATERIAL", "INCLUDE",
+ "MINIMUM", "MAXIMUM", "ALIAS", "INDICATOR", "KEYCODE", "DQSTRING", "NAME",
+ "WS", "COMMENT", "LINE_COMMENT", "'{'", "'}'", "';'", "'include'", "'minimum'",
+ "'='", "'maximum'", "'alias'", "'<'", "'>'", "'indicator'"
+]
+
+
+
+
+class KeycodesWalker(TreeParser):
+ grammarFileName = "KeycodesWalker.g"
+ antlr_version = version_str_to_tuple("3.1.2")
+ antlr_version_str = "3.1.2"
+ tokenNames = tokenNames
+
+ def __init__(self, input, state=None):
+ if state is None:
+ state = RecognizerSharedState()
+
+ TreeParser.__init__(self, input, state)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ # $ANTLR start "keycodedoc"
+ # KeycodesWalker.g:15:1: keycodedoc : ^( KEYCODEDOC ( keycodelist )+ ) ;
+ def keycodedoc(self, ):
+
+ try:
+ try:
+ # KeycodesWalker.g:16:2: ( ^( KEYCODEDOC ( keycodelist )+ ) )
+ # KeycodesWalker.g:16:4: ^( KEYCODEDOC ( keycodelist )+ )
+ pass
+ self.match(self.input, KEYCODEDOC, self.FOLLOW_KEYCODEDOC_in_keycodedoc72)
+
+ self.match(self.input, DOWN, None)
+ # KeycodesWalker.g:16:17: ( keycodelist )+
+ cnt1 = 0
+ while True: #loop1
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == KEYCODELIST) :
+ alt1 = 1
+
+
+ if alt1 == 1:
+ # KeycodesWalker.g:16:17: keycodelist
+ pass
+ self._state.following.append(self.FOLLOW_keycodelist_in_keycodedoc74)
+ self.keycodelist()
+
+ self._state.following.pop()
+
+
+ else:
+ if cnt1 >= 1:
+ break #loop1
+
+ eee = EarlyExitException(1, self.input)
+ raise eee
+
+ cnt1 += 1
+
+
+
+ self.match(self.input, UP, None)
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "keycodedoc"
+
+
+ # $ANTLR start "keycodelist"
+ # KeycodesWalker.g:19:1: keycodelist : ^( KEYCODELIST keycodelisttype ^( KEYCODEMATERIAL ( keycodeMaterial )+ ) ) ;
+ def keycodelist(self, ):
+
+ try:
+ try:
+ # KeycodesWalker.g:20:2: ( ^( KEYCODELIST keycodelisttype ^( KEYCODEMATERIAL ( keycodeMaterial )+ ) ) )
+ # KeycodesWalker.g:20:4: ^( KEYCODELIST keycodelisttype ^( KEYCODEMATERIAL ( keycodeMaterial )+ ) )
+ pass
+ self.match(self.input, KEYCODELIST, self.FOLLOW_KEYCODELIST_in_keycodelist90)
+
+ self.match(self.input, DOWN, None)
+ self._state.following.append(self.FOLLOW_keycodelisttype_in_keycodelist92)
+ self.keycodelisttype()
+
+ self._state.following.pop()
+ self.match(self.input, KEYCODEMATERIAL, self.FOLLOW_KEYCODEMATERIAL_in_keycodelist95)
+
+ self.match(self.input, DOWN, None)
+ # KeycodesWalker.g:20:52: ( keycodeMaterial )+
+ cnt2 = 0
+ while True: #loop2
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if ((INCLUDE <= LA2_0 <= KEYCODE)) :
+ alt2 = 1
+
+
+ if alt2 == 1:
+ # KeycodesWalker.g:20:52: keycodeMaterial
+ pass
+ self._state.following.append(self.FOLLOW_keycodeMaterial_in_keycodelist97)
+ self.keycodeMaterial()
+
+ self._state.following.pop()
+
+
+ else:
+ if cnt2 >= 1:
+ break #loop2
+
+ eee = EarlyExitException(2, self.input)
+ raise eee
+
+ cnt2 += 1
+
+
+
+ self.match(self.input, UP, None)
+
+ self.match(self.input, UP, None)
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "keycodelist"
+
+
+ # $ANTLR start "keycodelisttype"
+ # KeycodesWalker.g:23:1: keycodelisttype : ^( KEYCODELISTTYPE ^( KEYCODELISTOPTIONS ( KEYCODELISTOPTS )+ ) ^( KEYCODELISTNAME DQSTRING ) ) ;
+ def keycodelisttype(self, ):
+
+ try:
+ try:
+ # KeycodesWalker.g:24:2: ( ^( KEYCODELISTTYPE ^( KEYCODELISTOPTIONS ( KEYCODELISTOPTS )+ ) ^( KEYCODELISTNAME DQSTRING ) ) )
+ # KeycodesWalker.g:24:4: ^( KEYCODELISTTYPE ^( KEYCODELISTOPTIONS ( KEYCODELISTOPTS )+ ) ^( KEYCODELISTNAME DQSTRING ) )
+ pass
+ self.match(self.input, KEYCODELISTTYPE, self.FOLLOW_KEYCODELISTTYPE_in_keycodelisttype113)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, KEYCODELISTOPTIONS, self.FOLLOW_KEYCODELISTOPTIONS_in_keycodelisttype116)
+
+ self.match(self.input, DOWN, None)
+ # KeycodesWalker.g:24:43: ( KEYCODELISTOPTS )+
+ cnt3 = 0
+ while True: #loop3
+ alt3 = 2
+ LA3_0 = self.input.LA(1)
+
+ if (LA3_0 == KEYCODELISTOPTS) :
+ alt3 = 1
+
+
+ if alt3 == 1:
+ # KeycodesWalker.g:24:43: KEYCODELISTOPTS
+ pass
+ self.match(self.input, KEYCODELISTOPTS, self.FOLLOW_KEYCODELISTOPTS_in_keycodelisttype118)
+
+
+ else:
+ if cnt3 >= 1:
+ break #loop3
+
+ eee = EarlyExitException(3, self.input)
+ raise eee
+
+ cnt3 += 1
+
+
+
+ self.match(self.input, UP, None)
+ self.match(self.input, KEYCODELISTNAME, self.FOLLOW_KEYCODELISTNAME_in_keycodelisttype123)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_keycodelisttype125)
+
+ self.match(self.input, UP, None)
+
+ self.match(self.input, UP, None)
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "keycodelisttype"
+
+
+ # $ANTLR start "keycodeMaterial"
+ # KeycodesWalker.g:27:1: keycodeMaterial : ( ^( INCLUDE DQSTRING ) | ^( MINIMUM NAME ) | ^( MAXIMUM NAME ) | ^( ALIAS ( NAME )+ ) | ^( KEYCODE ( NAME )+ ) | ^( INDICATOR NAME DQSTRING ) );
+ def keycodeMaterial(self, ):
+
+ try:
+ try:
+ # KeycodesWalker.g:28:2: ( ^( INCLUDE DQSTRING ) | ^( MINIMUM NAME ) | ^( MAXIMUM NAME ) | ^( ALIAS ( NAME )+ ) | ^( KEYCODE ( NAME )+ ) | ^( INDICATOR NAME DQSTRING ) )
+ alt6 = 6
+ LA6 = self.input.LA(1)
+ if LA6 == INCLUDE:
+ alt6 = 1
+ elif LA6 == MINIMUM:
+ alt6 = 2
+ elif LA6 == MAXIMUM:
+ alt6 = 3
+ elif LA6 == ALIAS:
+ alt6 = 4
+ elif LA6 == KEYCODE:
+ alt6 = 5
+ elif LA6 == INDICATOR:
+ alt6 = 6
+ else:
+ nvae = NoViableAltException("", 6, 0, self.input)
+
+ raise nvae
+
+ if alt6 == 1:
+ # KeycodesWalker.g:28:4: ^( INCLUDE DQSTRING )
+ pass
+ self.match(self.input, INCLUDE, self.FOLLOW_INCLUDE_in_keycodeMaterial140)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_keycodeMaterial142)
+
+ self.match(self.input, UP, None)
+
+
+ elif alt6 == 2:
+ # KeycodesWalker.g:29:4: ^( MINIMUM NAME )
+ pass
+ self.match(self.input, MINIMUM, self.FOLLOW_MINIMUM_in_keycodeMaterial149)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keycodeMaterial151)
+
+ self.match(self.input, UP, None)
+
+
+ elif alt6 == 3:
+ # KeycodesWalker.g:30:4: ^( MAXIMUM NAME )
+ pass
+ self.match(self.input, MAXIMUM, self.FOLLOW_MAXIMUM_in_keycodeMaterial158)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keycodeMaterial160)
+
+ self.match(self.input, UP, None)
+
+
+ elif alt6 == 4:
+ # KeycodesWalker.g:31:4: ^( ALIAS ( NAME )+ )
+ pass
+ self.match(self.input, ALIAS, self.FOLLOW_ALIAS_in_keycodeMaterial167)
+
+ self.match(self.input, DOWN, None)
+ # KeycodesWalker.g:31:12: ( NAME )+
+ cnt4 = 0
+ while True: #loop4
+ alt4 = 2
+ LA4_0 = self.input.LA(1)
+
+ if (LA4_0 == NAME) :
+ alt4 = 1
+
+
+ if alt4 == 1:
+ # KeycodesWalker.g:31:12: NAME
+ pass
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keycodeMaterial169)
+
+
+ else:
+ if cnt4 >= 1:
+ break #loop4
+
+ eee = EarlyExitException(4, self.input)
+ raise eee
+
+ cnt4 += 1
+
+
+
+ self.match(self.input, UP, None)
+
+
+ elif alt6 == 5:
+ # KeycodesWalker.g:32:4: ^( KEYCODE ( NAME )+ )
+ pass
+ self.match(self.input, KEYCODE, self.FOLLOW_KEYCODE_in_keycodeMaterial177)
+
+ self.match(self.input, DOWN, None)
+ # KeycodesWalker.g:32:14: ( NAME )+
+ cnt5 = 0
+ while True: #loop5
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == NAME) :
+ alt5 = 1
+
+
+ if alt5 == 1:
+ # KeycodesWalker.g:32:14: NAME
+ pass
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keycodeMaterial179)
+
+
+ else:
+ if cnt5 >= 1:
+ break #loop5
+
+ eee = EarlyExitException(5, self.input)
+ raise eee
+
+ cnt5 += 1
+
+
+
+ self.match(self.input, UP, None)
+
+
+ elif alt6 == 6:
+ # KeycodesWalker.g:33:4: ^( INDICATOR NAME DQSTRING )
+ pass
+ self.match(self.input, INDICATOR, self.FOLLOW_INDICATOR_in_keycodeMaterial187)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keycodeMaterial189)
+ self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_keycodeMaterial191)
+
+ self.match(self.input, UP, None)
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "keycodeMaterial"
+
+
+ # Delegated rules
+
+
+
+
+ FOLLOW_KEYCODEDOC_in_keycodedoc72 = frozenset([2])
+ FOLLOW_keycodelist_in_keycodedoc74 = frozenset([3, 5])
+ FOLLOW_KEYCODELIST_in_keycodelist90 = frozenset([2])
+ FOLLOW_keycodelisttype_in_keycodelist92 = frozenset([10])
+ FOLLOW_KEYCODEMATERIAL_in_keycodelist95 = frozenset([2])
+ FOLLOW_keycodeMaterial_in_keycodelist97 = frozenset([3, 11, 12, 13, 14, 15, 16])
+ FOLLOW_KEYCODELISTTYPE_in_keycodelisttype113 = frozenset([2])
+ FOLLOW_KEYCODELISTOPTIONS_in_keycodelisttype116 = frozenset([2])
+ FOLLOW_KEYCODELISTOPTS_in_keycodelisttype118 = frozenset([3, 8])
+ FOLLOW_KEYCODELISTNAME_in_keycodelisttype123 = frozenset([2])
+ FOLLOW_DQSTRING_in_keycodelisttype125 = frozenset([3])
+ FOLLOW_INCLUDE_in_keycodeMaterial140 = frozenset([2])
+ FOLLOW_DQSTRING_in_keycodeMaterial142 = frozenset([3])
+ FOLLOW_MINIMUM_in_keycodeMaterial149 = frozenset([2])
+ FOLLOW_NAME_in_keycodeMaterial151 = frozenset([3])
+ FOLLOW_MAXIMUM_in_keycodeMaterial158 = frozenset([2])
+ FOLLOW_NAME_in_keycodeMaterial160 = frozenset([3])
+ FOLLOW_ALIAS_in_keycodeMaterial167 = frozenset([2])
+ FOLLOW_NAME_in_keycodeMaterial169 = frozenset([3, 18])
+ FOLLOW_KEYCODE_in_keycodeMaterial177 = frozenset([2])
+ FOLLOW_NAME_in_keycodeMaterial179 = frozenset([3, 18])
+ FOLLOW_INDICATOR_in_keycodeMaterial187 = frozenset([2])
+ FOLLOW_NAME_in_keycodeMaterial189 = frozenset([17])
+ FOLLOW_DQSTRING_in_keycodeMaterial191 = frozenset([3])
+
+
+
+def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
+ from antlr3.main import WalkerMain
+ main = WalkerMain(KeycodesWalker)
+ main.stdin = stdin
+ main.stdout = stdout
+ main.stderr = stderr
+ main.execute(argv)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/KeycodesWalker.tokens b/KeycodesWalker.tokens
@@ -0,0 +1,40 @@
+KEYCODEDOC=4
+KEYCODELIST=5
+ALIAS=14
+KEYCODEMATERIAL=10
+INDICATOR=15
+DQSTRING=17
+COMMENT=20
+KEYCODELISTTYPE=6
+MINIMUM=12
+KEYCODE=16
+INCLUDE=11
+WS=19
+T__30=30
+KEYCODELISTNAME=9
+T__31=31
+T__32=32
+KEYCODELISTOPTS=8
+MAXIMUM=13
+NAME=18
+KEYCODELISTOPTIONS=7
+LINE_COMMENT=21
+T__26=26
+T__27=27
+T__28=28
+T__29=29
+T__22=22
+T__23=23
+T__24=24
+T__25=25
+'>'=31
+'indicator'=32
+'}'=23
+'='=27
+'alias'=29
+'<'=30
+'{'=22
+';'=24
+'maximum'=28
+'include'=25
+'minimum'=26
diff --git a/XKBGrammar.tokens b/XKBGrammar.tokens
@@ -0,0 +1,67 @@
+ELEM_KEYSYMGROUP=22
+T__50=50
+TOKEN_NAME=6
+VALUE=20
+KEYELEMENTS=25
+OVERLAY=27
+TOKEN_KEY_TYPE=5
+KEYCODEX=19
+KEYCODE=18
+T__51=51
+MAPMATERIAL=17
+NAME=30
+LINE_COMMENT=33
+TOKEN_SYMBOL=10
+TOKEN_INCLUDE=4
+ELEM_VIRTUALMODS=24
+TOKEN_KEY=7
+LAYOUT=12
+STATE=21
+DQSTRING=29
+COMMENT=32
+MAPTYPE=14
+T__37=37
+T__38=38
+T__39=39
+T__34=34
+TOKEN_TYPE=8
+T__35=35
+T__36=36
+SYMBOLS=13
+WS=31
+TOKEN_VIRTUAL_MODIFIERS=11
+MAPOPTIONS=16
+MAPOPTS=28
+ELEM_KEYSYMS=23
+TOKEN_MODIFIER_MAP=9
+MAPNAME=15
+OVERRIDE=26
+T__48=48
+T__49=49
+T__44=44
+T__45=45
+T__46=46
+T__47=47
+T__40=40
+T__41=41
+T__42=42
+T__43=43
+','=46
+'include'=37
+'override'=26
+'virtual_modifiers'=48
+'symbols'=50
+'virtualMods'=51
+'type'=49
+'>'=45
+'}'=35
+']'=40
+'='=41
+'<'=44
+'{'=34
+';'=36
+'['=39
+'modifier_map'=47
+'key'=43
+'key.type'=42
+'name'=38
diff --git a/XKBGrammarLexer.py b/XKBGrammarLexer.py
@@ -0,0 +1,1599 @@
+# $ANTLR 3.1.2 XKBGrammar.g 2019-08-13 08:28:51
+
+import sys
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+ELEM_KEYSYMGROUP=22
+T__50=50
+TOKEN_NAME=6
+VALUE=20
+KEYELEMENTS=25
+OVERLAY=27
+TOKEN_KEY_TYPE=5
+KEYCODEX=19
+KEYCODE=18
+T__51=51
+MAPMATERIAL=17
+NAME=30
+LINE_COMMENT=33
+TOKEN_SYMBOL=10
+TOKEN_INCLUDE=4
+ELEM_VIRTUALMODS=24
+TOKEN_KEY=7
+LAYOUT=12
+STATE=21
+DQSTRING=29
+COMMENT=32
+MAPTYPE=14
+T__37=37
+T__38=38
+T__39=39
+T__34=34
+TOKEN_TYPE=8
+T__35=35
+T__36=36
+SYMBOLS=13
+WS=31
+EOF=-1
+TOKEN_VIRTUAL_MODIFIERS=11
+MAPOPTIONS=16
+MAPOPTS=28
+ELEM_KEYSYMS=23
+TOKEN_MODIFIER_MAP=9
+MAPNAME=15
+OVERRIDE=26
+T__48=48
+T__49=49
+T__44=44
+T__45=45
+T__46=46
+T__47=47
+T__40=40
+T__41=41
+T__42=42
+T__43=43
+
+
+class XKBGrammarLexer(Lexer):
+
+ grammarFileName = "XKBGrammar.g"
+ antlr_version = version_str_to_tuple("3.1.2")
+ antlr_version_str = "3.1.2"
+
+ def __init__(self, input=None, state=None):
+ if state is None:
+ state = RecognizerSharedState()
+ Lexer.__init__(self, input, state)
+
+ self.dfa1 = self.DFA1(
+ self, 1,
+ eot = self.DFA1_eot,
+ eof = self.DFA1_eof,
+ min = self.DFA1_min,
+ max = self.DFA1_max,
+ accept = self.DFA1_accept,
+ special = self.DFA1_special,
+ transition = self.DFA1_transition
+ )
+
+ self.dfa2 = self.DFA2(
+ self, 2,
+ eot = self.DFA2_eot,
+ eof = self.DFA2_eof,
+ min = self.DFA2_min,
+ max = self.DFA2_max,
+ accept = self.DFA2_accept,
+ special = self.DFA2_special,
+ transition = self.DFA2_transition
+ )
+
+ self.dfa9 = self.DFA9(
+ self, 9,
+ eot = self.DFA9_eot,
+ eof = self.DFA9_eof,
+ min = self.DFA9_min,
+ max = self.DFA9_max,
+ accept = self.DFA9_accept,
+ special = self.DFA9_special,
+ transition = self.DFA9_transition
+ )
+
+
+
+
+
+
+ # $ANTLR start "T__34"
+ def mT__34(self, ):
+
+ try:
+ _type = T__34
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:7:7: ( '{' )
+ # XKBGrammar.g:7:9: '{'
+ pass
+ self.match(123)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__34"
+
+
+
+ # $ANTLR start "T__35"
+ def mT__35(self, ):
+
+ try:
+ _type = T__35
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:8:7: ( '}' )
+ # XKBGrammar.g:8:9: '}'
+ pass
+ self.match(125)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__35"
+
+
+
+ # $ANTLR start "T__36"
+ def mT__36(self, ):
+
+ try:
+ _type = T__36
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:9:7: ( ';' )
+ # XKBGrammar.g:9:9: ';'
+ pass
+ self.match(59)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__36"
+
+
+
+ # $ANTLR start "T__37"
+ def mT__37(self, ):
+
+ try:
+ _type = T__37
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:10:7: ( 'include' )
+ # XKBGrammar.g:10:9: 'include'
+ pass
+ self.match("include")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__37"
+
+
+
+ # $ANTLR start "T__38"
+ def mT__38(self, ):
+
+ try:
+ _type = T__38
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:11:7: ( 'name' )
+ # XKBGrammar.g:11:9: 'name'
+ pass
+ self.match("name")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__38"
+
+
+
+ # $ANTLR start "T__39"
+ def mT__39(self, ):
+
+ try:
+ _type = T__39
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:12:7: ( '[' )
+ # XKBGrammar.g:12:9: '['
+ pass
+ self.match(91)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__39"
+
+
+
+ # $ANTLR start "T__40"
+ def mT__40(self, ):
+
+ try:
+ _type = T__40
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:13:7: ( ']' )
+ # XKBGrammar.g:13:9: ']'
+ pass
+ self.match(93)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__40"
+
+
+
+ # $ANTLR start "T__41"
+ def mT__41(self, ):
+
+ try:
+ _type = T__41
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:14:7: ( '=' )
+ # XKBGrammar.g:14:9: '='
+ pass
+ self.match(61)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__41"
+
+
+
+ # $ANTLR start "T__42"
+ def mT__42(self, ):
+
+ try:
+ _type = T__42
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:15:7: ( 'key.type' )
+ # XKBGrammar.g:15:9: 'key.type'
+ pass
+ self.match("key.type")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__42"
+
+
+
+ # $ANTLR start "T__43"
+ def mT__43(self, ):
+
+ try:
+ _type = T__43
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:16:7: ( 'key' )
+ # XKBGrammar.g:16:9: 'key'
+ pass
+ self.match("key")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__43"
+
+
+
+ # $ANTLR start "T__44"
+ def mT__44(self, ):
+
+ try:
+ _type = T__44
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:17:7: ( '<' )
+ # XKBGrammar.g:17:9: '<'
+ pass
+ self.match(60)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__44"
+
+
+
+ # $ANTLR start "T__45"
+ def mT__45(self, ):
+
+ try:
+ _type = T__45
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:18:7: ( '>' )
+ # XKBGrammar.g:18:9: '>'
+ pass
+ self.match(62)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__45"
+
+
+
+ # $ANTLR start "T__46"
+ def mT__46(self, ):
+
+ try:
+ _type = T__46
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:19:7: ( ',' )
+ # XKBGrammar.g:19:9: ','
+ pass
+ self.match(44)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__46"
+
+
+
+ # $ANTLR start "T__47"
+ def mT__47(self, ):
+
+ try:
+ _type = T__47
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:20:7: ( 'modifier_map' )
+ # XKBGrammar.g:20:9: 'modifier_map'
+ pass
+ self.match("modifier_map")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__47"
+
+
+
+ # $ANTLR start "T__48"
+ def mT__48(self, ):
+
+ try:
+ _type = T__48
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:21:7: ( 'virtual_modifiers' )
+ # XKBGrammar.g:21:9: 'virtual_modifiers'
+ pass
+ self.match("virtual_modifiers")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__48"
+
+
+
+ # $ANTLR start "T__49"
+ def mT__49(self, ):
+
+ try:
+ _type = T__49
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:22:7: ( 'type' )
+ # XKBGrammar.g:22:9: 'type'
+ pass
+ self.match("type")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__49"
+
+
+
+ # $ANTLR start "T__50"
+ def mT__50(self, ):
+
+ try:
+ _type = T__50
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:23:7: ( 'symbols' )
+ # XKBGrammar.g:23:9: 'symbols'
+ pass
+ self.match("symbols")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__50"
+
+
+
+ # $ANTLR start "T__51"
+ def mT__51(self, ):
+
+ try:
+ _type = T__51
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:24:7: ( 'virtualMods' )
+ # XKBGrammar.g:24:9: 'virtualMods'
+ pass
+ self.match("virtualMods")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "T__51"
+
+
+
+ # $ANTLR start "MAPOPTS"
+ def mMAPOPTS(self, ):
+
+ try:
+ _type = MAPOPTS
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:151:3: ( 'default' | 'hidden' | 'partial' | 'alphanumeric_keys' | 'keypad_keys' | 'function_keys' | 'modifier_keys' | 'alternate_group' | 'xkb_symbols' )
+ alt1 = 9
+ alt1 = self.dfa1.predict(self.input)
+ if alt1 == 1:
+ # XKBGrammar.g:151:5: 'default'
+ pass
+ self.match("default")
+
+
+ elif alt1 == 2:
+ # XKBGrammar.g:152:5: 'hidden'
+ pass
+ self.match("hidden")
+
+
+ elif alt1 == 3:
+ # XKBGrammar.g:153:5: 'partial'
+ pass
+ self.match("partial")
+
+
+ elif alt1 == 4:
+ # XKBGrammar.g:154:5: 'alphanumeric_keys'
+ pass
+ self.match("alphanumeric_keys")
+
+
+ elif alt1 == 5:
+ # XKBGrammar.g:155:5: 'keypad_keys'
+ pass
+ self.match("keypad_keys")
+
+
+ elif alt1 == 6:
+ # XKBGrammar.g:156:5: 'function_keys'
+ pass
+ self.match("function_keys")
+
+
+ elif alt1 == 7:
+ # XKBGrammar.g:157:5: 'modifier_keys'
+ pass
+ self.match("modifier_keys")
+
+
+ elif alt1 == 8:
+ # XKBGrammar.g:158:5: 'alternate_group'
+ pass
+ self.match("alternate_group")
+
+
+ elif alt1 == 9:
+ # XKBGrammar.g:159:5: 'xkb_symbols'
+ pass
+ self.match("xkb_symbols")
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "MAPOPTS"
+
+
+
+ # $ANTLR start "STATE"
+ def mSTATE(self, ):
+
+ try:
+ _type = STATE
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:163:3: ( 'Shift' | 'Control' | 'Lock' | 'Mod1' | 'Mod2' | 'Mod3' | 'Mod4' | 'Mod5' )
+ alt2 = 8
+ alt2 = self.dfa2.predict(self.input)
+ if alt2 == 1:
+ # XKBGrammar.g:163:5: 'Shift'
+ pass
+ self.match("Shift")
+
+
+ elif alt2 == 2:
+ # XKBGrammar.g:164:5: 'Control'
+ pass
+ self.match("Control")
+
+
+ elif alt2 == 3:
+ # XKBGrammar.g:165:5: 'Lock'
+ pass
+ self.match("Lock")
+
+
+ elif alt2 == 4:
+ # XKBGrammar.g:166:5: 'Mod1'
+ pass
+ self.match("Mod1")
+
+
+ elif alt2 == 5:
+ # XKBGrammar.g:167:5: 'Mod2'
+ pass
+ self.match("Mod2")
+
+
+ elif alt2 == 6:
+ # XKBGrammar.g:168:5: 'Mod3'
+ pass
+ self.match("Mod3")
+
+
+ elif alt2 == 7:
+ # XKBGrammar.g:169:5: 'Mod4'
+ pass
+ self.match("Mod4")
+
+
+ elif alt2 == 8:
+ # XKBGrammar.g:170:5: 'Mod5'
+ pass
+ self.match("Mod5")
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "STATE"
+
+
+
+ # $ANTLR start "OVERRIDE"
+ def mOVERRIDE(self, ):
+
+ try:
+ _type = OVERRIDE
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:174:3: ( 'override' )
+ # XKBGrammar.g:174:5: 'override'
+ pass
+ self.match("override")
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "OVERRIDE"
+
+
+
+ # $ANTLR start "NAME"
+ def mNAME(self, ):
+
+ try:
+ _type = NAME
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:178:3: ( ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' | '+' | '-' )* )
+ # XKBGrammar.g:178:5: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' | '+' | '-' )*
+ pass
+ # XKBGrammar.g:178:5: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' | '+' | '-' )*
+ while True: #loop3
+ alt3 = 2
+ LA3_0 = self.input.LA(1)
+
+ if (LA3_0 == 43 or LA3_0 == 45 or (48 <= LA3_0 <= 57) or (65 <= LA3_0 <= 90) or LA3_0 == 95 or (97 <= LA3_0 <= 122)) :
+ alt3 = 1
+
+
+ if alt3 == 1:
+ # XKBGrammar.g:
+ pass
+ if self.input.LA(1) == 43 or self.input.LA(1) == 45 or (48 <= self.input.LA(1) <= 57) or (65 <= self.input.LA(1) <= 90) or self.input.LA(1) == 95 or (97 <= self.input.LA(1) <= 122):
+ self.input.consume()
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+ else:
+ break #loop3
+
+
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "NAME"
+
+
+
+ # $ANTLR start "WS"
+ def mWS(self, ):
+
+ try:
+ _type = WS
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:182:3: ( ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) )
+ # XKBGrammar.g:183:3: ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' )
+ pass
+ if (9 <= self.input.LA(1) <= 10) or (12 <= self.input.LA(1) <= 13) or self.input.LA(1) == 32:
+ self.input.consume()
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+ #action start
+ _channel=HIDDEN;
+ #action end
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "WS"
+
+
+
+ # $ANTLR start "COMMENT"
+ def mCOMMENT(self, ):
+
+ try:
+ _type = COMMENT
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:188:7: ( '/*' ( . )* '*/' )
+ # XKBGrammar.g:189:3: '/*' ( . )* '*/'
+ pass
+ self.match("/*")
+ # XKBGrammar.g:189:8: ( . )*
+ while True: #loop4
+ alt4 = 2
+ LA4_0 = self.input.LA(1)
+
+ if (LA4_0 == 42) :
+ LA4_1 = self.input.LA(2)
+
+ if (LA4_1 == 47) :
+ alt4 = 2
+ elif ((0 <= LA4_1 <= 46) or (48 <= LA4_1 <= 65535)) :
+ alt4 = 1
+
+
+ elif ((0 <= LA4_0 <= 41) or (43 <= LA4_0 <= 65535)) :
+ alt4 = 1
+
+
+ if alt4 == 1:
+ # XKBGrammar.g:189:8: .
+ pass
+ self.matchAny()
+
+
+ else:
+ break #loop4
+
+
+ self.match("*/")
+ #action start
+ _channel=HIDDEN;
+ #action end
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "COMMENT"
+
+
+
+ # $ANTLR start "LINE_COMMENT"
+ def mLINE_COMMENT(self, ):
+
+ try:
+ _type = LINE_COMMENT
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:193:7: ( ( '//' | '#' ) (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
+ # XKBGrammar.g:194:3: ( '//' | '#' ) (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
+ pass
+ # XKBGrammar.g:194:3: ( '//' | '#' )
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == 47) :
+ alt5 = 1
+ elif (LA5_0 == 35) :
+ alt5 = 2
+ else:
+ nvae = NoViableAltException("", 5, 0, self.input)
+
+ raise nvae
+
+ if alt5 == 1:
+ # XKBGrammar.g:194:4: '//'
+ pass
+ self.match("//")
+
+
+ elif alt5 == 2:
+ # XKBGrammar.g:194:11: '#'
+ pass
+ self.match(35)
+
+
+
+ # XKBGrammar.g:194:17: (~ ( '\\n' | '\\r' ) )*
+ while True: #loop6
+ alt6 = 2
+ LA6_0 = self.input.LA(1)
+
+ if ((0 <= LA6_0 <= 9) or (11 <= LA6_0 <= 12) or (14 <= LA6_0 <= 65535)) :
+ alt6 = 1
+
+
+ if alt6 == 1:
+ # XKBGrammar.g:194:17: ~ ( '\\n' | '\\r' )
+ pass
+ if (0 <= self.input.LA(1) <= 9) or (11 <= self.input.LA(1) <= 12) or (14 <= self.input.LA(1) <= 65535):
+ self.input.consume()
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+ else:
+ break #loop6
+
+
+ # XKBGrammar.g:194:33: ( '\\r' )?
+ alt7 = 2
+ LA7_0 = self.input.LA(1)
+
+ if (LA7_0 == 13) :
+ alt7 = 1
+ if alt7 == 1:
+ # XKBGrammar.g:194:33: '\\r'
+ pass
+ self.match(13)
+
+
+
+ self.match(10)
+ #action start
+ _channel=HIDDEN;
+ #action end
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "LINE_COMMENT"
+
+
+
+ # $ANTLR start "DQSTRING"
+ def mDQSTRING(self, ):
+
+ try:
+ _type = DQSTRING
+ _channel = DEFAULT_CHANNEL
+
+ # XKBGrammar.g:202:7: ( '\"' ( options {greedy=false; } : ~ ( '\"' ) )* '\"' )
+ # XKBGrammar.g:202:11: '\"' ( options {greedy=false; } : ~ ( '\"' ) )* '\"'
+ pass
+ self.match(34)
+ # XKBGrammar.g:202:15: ( options {greedy=false; } : ~ ( '\"' ) )*
+ while True: #loop8
+ alt8 = 2
+ LA8_0 = self.input.LA(1)
+
+ if ((0 <= LA8_0 <= 33) or (35 <= LA8_0 <= 65535)) :
+ alt8 = 1
+ elif (LA8_0 == 34) :
+ alt8 = 2
+
+
+ if alt8 == 1:
+ # XKBGrammar.g:202:40: ~ ( '\"' )
+ pass
+ if (0 <= self.input.LA(1) <= 33) or (35 <= self.input.LA(1) <= 65535):
+ self.input.consume()
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+ else:
+ break #loop8
+
+
+ self.match(34)
+
+
+
+ self._state.type = _type
+ self._state.channel = _channel
+
+ finally:
+
+ pass
+
+ # $ANTLR end "DQSTRING"
+
+
+
+ def mTokens(self):
+ # XKBGrammar.g:1:8: ( T__34 | T__35 | T__36 | T__37 | T__38 | T__39 | T__40 | T__41 | T__42 | T__43 | T__44 | T__45 | T__46 | T__47 | T__48 | T__49 | T__50 | T__51 | MAPOPTS | STATE | OVERRIDE | NAME | WS | COMMENT | LINE_COMMENT | DQSTRING )
+ alt9 = 26
+ alt9 = self.dfa9.predict(self.input)
+ if alt9 == 1:
+ # XKBGrammar.g:1:10: T__34
+ pass
+ self.mT__34()
+
+
+ elif alt9 == 2:
+ # XKBGrammar.g:1:16: T__35
+ pass
+ self.mT__35()
+
+
+ elif alt9 == 3:
+ # XKBGrammar.g:1:22: T__36
+ pass
+ self.mT__36()
+
+
+ elif alt9 == 4:
+ # XKBGrammar.g:1:28: T__37
+ pass
+ self.mT__37()
+
+
+ elif alt9 == 5:
+ # XKBGrammar.g:1:34: T__38
+ pass
+ self.mT__38()
+
+
+ elif alt9 == 6:
+ # XKBGrammar.g:1:40: T__39
+ pass
+ self.mT__39()
+
+
+ elif alt9 == 7:
+ # XKBGrammar.g:1:46: T__40
+ pass
+ self.mT__40()
+
+
+ elif alt9 == 8:
+ # XKBGrammar.g:1:52: T__41
+ pass
+ self.mT__41()
+
+
+ elif alt9 == 9:
+ # XKBGrammar.g:1:58: T__42
+ pass
+ self.mT__42()
+
+
+ elif alt9 == 10:
+ # XKBGrammar.g:1:64: T__43
+ pass
+ self.mT__43()
+
+
+ elif alt9 == 11:
+ # XKBGrammar.g:1:70: T__44
+ pass
+ self.mT__44()
+
+
+ elif alt9 == 12:
+ # XKBGrammar.g:1:76: T__45
+ pass
+ self.mT__45()
+
+
+ elif alt9 == 13:
+ # XKBGrammar.g:1:82: T__46
+ pass
+ self.mT__46()
+
+
+ elif alt9 == 14:
+ # XKBGrammar.g:1:88: T__47
+ pass
+ self.mT__47()
+
+
+ elif alt9 == 15:
+ # XKBGrammar.g:1:94: T__48
+ pass
+ self.mT__48()
+
+
+ elif alt9 == 16:
+ # XKBGrammar.g:1:100: T__49
+ pass
+ self.mT__49()
+
+
+ elif alt9 == 17:
+ # XKBGrammar.g:1:106: T__50
+ pass
+ self.mT__50()
+
+
+ elif alt9 == 18:
+ # XKBGrammar.g:1:112: T__51
+ pass
+ self.mT__51()
+
+
+ elif alt9 == 19:
+ # XKBGrammar.g:1:118: MAPOPTS
+ pass
+ self.mMAPOPTS()
+
+
+ elif alt9 == 20:
+ # XKBGrammar.g:1:126: STATE
+ pass
+ self.mSTATE()
+
+
+ elif alt9 == 21:
+ # XKBGrammar.g:1:132: OVERRIDE
+ pass
+ self.mOVERRIDE()
+
+
+ elif alt9 == 22:
+ # XKBGrammar.g:1:141: NAME
+ pass
+ self.mNAME()
+
+
+ elif alt9 == 23:
+ # XKBGrammar.g:1:146: WS
+ pass
+ self.mWS()
+
+
+ elif alt9 == 24:
+ # XKBGrammar.g:1:149: COMMENT
+ pass
+ self.mCOMMENT()
+
+
+ elif alt9 == 25:
+ # XKBGrammar.g:1:157: LINE_COMMENT
+ pass
+ self.mLINE_COMMENT()
+
+
+ elif alt9 == 26:
+ # XKBGrammar.g:1:170: DQSTRING
+ pass
+ self.mDQSTRING()
+
+
+
+
+
+
+
+ # lookup tables for DFA #1
+
+ DFA1_eot = DFA.unpack(
+ u"\14\uffff"
+ )
+
+ DFA1_eof = DFA.unpack(
+ u"\14\uffff"
+ )
+
+ DFA1_min = DFA.unpack(
+ u"\1\141\3\uffff\1\154\4\uffff\1\160\2\uffff"
+ )
+
+ DFA1_max = DFA.unpack(
+ u"\1\170\3\uffff\1\154\4\uffff\1\164\2\uffff"
+ )
+
+ DFA1_accept = DFA.unpack(
+ u"\1\uffff\1\1\1\2\1\3\1\uffff\1\5\1\6\1\7\1\11\1\uffff\1\4\1\10"
+ )
+
+ DFA1_special = DFA.unpack(
+ u"\14\uffff"
+ )
+
+
+ DFA1_transition = [
+ DFA.unpack(u"\1\4\2\uffff\1\1\1\uffff\1\6\1\uffff\1\2\2\uffff\1\5"
+ u"\1\uffff\1\7\2\uffff\1\3\7\uffff\1\10"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\11"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\12\3\uffff\1\13"),
+ DFA.unpack(u""),
+ DFA.unpack(u"")
+ ]
+
+ # class definition for DFA #1
+
+ DFA1 = DFA
+ # lookup tables for DFA #2
+
+ DFA2_eot = DFA.unpack(
+ u"\14\uffff"
+ )
+
+ DFA2_eof = DFA.unpack(
+ u"\14\uffff"
+ )
+
+ DFA2_min = DFA.unpack(
+ u"\1\103\3\uffff\1\157\1\144\1\61\5\uffff"
+ )
+
+ DFA2_max = DFA.unpack(
+ u"\1\123\3\uffff\1\157\1\144\1\65\5\uffff"
+ )
+
+ DFA2_accept = DFA.unpack(
+ u"\1\uffff\1\1\1\2\1\3\3\uffff\1\4\1\5\1\6\1\7\1\10"
+ )
+
+ DFA2_special = DFA.unpack(
+ u"\14\uffff"
+ )
+
+
+ DFA2_transition = [
+ DFA.unpack(u"\1\2\10\uffff\1\3\1\4\5\uffff\1\1"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\5"),
+ DFA.unpack(u"\1\6"),
+ DFA.unpack(u"\1\7\1\10\1\11\1\12\1\13"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"")
+ ]
+
+ # class definition for DFA #2
+
+ DFA2 = DFA
+ # lookup tables for DFA #9
+
+ DFA9_eot = DFA.unpack(
+ u"\1\34\3\uffff\2\34\3\uffff\1\34\3\uffff\17\34\5\uffff\22\34\1\uffff"
+ u"\2\34\1\113\21\34\1\141\1\uffff\1\34\1\uffff\2\34\1\145\12\34\6"
+ u"\160\2\34\1\uffff\3\34\1\uffff\10\34\1\160\1\34\1\uffff\7\34\1"
+ u"\u0086\7\34\1\u008e\3\34\1\u0093\1\u0086\1\uffff\1\u0086\4\34\1"
+ u"\160\1\34\1\uffff\4\34\1\uffff\4\34\1\u00a1\10\34\1\uffff\11\34"
+ u"\1\u0086\3\34\1\u00b7\3\34\1\u0086\1\u00bb\2\34\1\uffff\3\34\1"
+ u"\uffff\1\u0086\3\34\1\u0086\5\34\1\u0086\2\34\1\u00cb\1\u0086\1"
+ u"\uffff"
+ )
+
+ DFA9_eof = DFA.unpack(
+ u"\u00cc\uffff"
+ )
+
+ DFA9_min = DFA.unpack(
+ u"\1\11\3\uffff\1\156\1\141\3\uffff\1\145\3\uffff\1\157\1\151\2\171"
+ u"\1\145\1\151\1\141\1\154\1\165\1\153\1\150\3\157\1\166\2\uffff"
+ u"\1\52\2\uffff\1\143\1\155\1\171\1\144\1\162\1\160\1\155\1\146\1"
+ u"\144\1\162\1\160\1\156\1\142\1\151\1\156\1\143\1\144\1\145\1\uffff"
+ u"\1\154\1\145\1\53\1\151\1\164\1\145\1\142\1\141\1\144\1\164\1\150"
+ u"\1\145\1\143\1\137\1\146\1\164\1\153\1\61\1\162\1\165\1\53\1\uffff"
+ u"\1\141\1\uffff\1\146\1\165\1\53\1\157\1\165\1\145\1\151\1\141\1"
+ u"\162\1\164\1\163\1\164\1\162\6\53\1\162\1\144\1\uffff\1\144\1\151"
+ u"\1\141\1\uffff\2\154\1\156\1\141\2\156\1\151\1\171\1\53\1\157\1"
+ u"\uffff\1\151\1\145\1\137\1\145\1\154\1\163\1\164\1\53\1\154\1\165"
+ u"\1\141\1\157\1\155\1\154\1\144\1\53\1\153\1\162\1\115\2\53\1\uffff"
+ u"\1\53\1\155\1\164\1\156\1\142\1\53\1\145\1\uffff\1\145\1\137\1"
+ u"\155\1\157\1\uffff\2\145\1\137\1\157\1\53\1\171\1\153\1\157\1\144"
+ u"\1\162\1\137\1\153\1\154\1\uffff\1\163\1\141\1\145\1\144\1\163"
+ u"\1\151\1\147\1\145\1\163\1\53\1\160\1\171\1\151\1\53\1\143\1\162"
+ u"\1\171\2\53\1\163\1\146\1\uffff\1\137\1\157\1\163\1\uffff\1\53"
+ u"\1\151\1\153\1\165\1\53\2\145\1\160\1\162\1\171\1\53\2\163\2\53"
+ u"\1\uffff"
+ )
+
+ DFA9_max = DFA.unpack(
+ u"\1\175\3\uffff\1\156\1\141\3\uffff\1\145\3\uffff\1\157\1\151\2"
+ u"\171\1\145\1\151\1\141\1\154\1\165\1\153\1\150\3\157\1\166\2\uffff"
+ u"\1\57\2\uffff\1\143\1\155\1\171\1\144\1\162\1\160\1\155\1\146\1"
+ u"\144\1\162\1\164\1\156\1\142\1\151\1\156\1\143\1\144\1\145\1\uffff"
+ u"\1\154\1\145\1\172\1\151\1\164\1\145\1\142\1\141\1\144\1\164\1"
+ u"\150\1\145\1\143\1\137\1\146\1\164\1\153\1\65\1\162\1\165\1\172"
+ u"\1\uffff\1\141\1\uffff\1\146\1\165\1\172\1\157\1\165\1\145\1\151"
+ u"\1\141\1\162\1\164\1\163\1\164\1\162\6\172\1\162\1\144\1\uffff"
+ u"\1\144\1\151\1\141\1\uffff\2\154\1\156\1\141\2\156\1\151\1\171"
+ u"\1\172\1\157\1\uffff\1\151\1\145\1\137\1\145\1\154\1\163\1\164"
+ u"\1\172\1\154\1\165\1\141\1\157\1\155\1\154\1\144\1\172\1\153\1"
+ u"\162\1\137\2\172\1\uffff\1\172\1\155\1\164\1\156\1\142\1\172\1"
+ u"\145\1\uffff\1\145\1\137\1\155\1\157\1\uffff\2\145\1\137\1\157"
+ u"\1\172\1\171\1\155\1\157\1\144\1\162\1\137\1\153\1\154\1\uffff"
+ u"\1\163\1\141\1\145\1\144\1\163\1\151\1\147\1\145\1\163\1\172\1"
+ u"\160\1\171\1\151\1\172\1\143\1\162\1\171\2\172\1\163\1\146\1\uffff"
+ u"\1\137\1\157\1\163\1\uffff\1\172\1\151\1\153\1\165\1\172\2\145"
+ u"\1\160\1\162\1\171\1\172\2\163\2\172\1\uffff"
+ )
+
+ DFA9_accept = DFA.unpack(
+ u"\1\uffff\1\1\1\2\1\3\2\uffff\1\6\1\7\1\10\1\uffff\1\13\1\14\1\15"
+ u"\17\uffff\1\26\1\27\1\uffff\1\31\1\32\22\uffff\1\30\25\uffff\1"
+ u"\11\1\uffff\1\12\25\uffff\1\5\3\uffff\1\20\12\uffff\1\24\25\uffff"
+ u"\1\23\7\uffff\1\4\4\uffff\1\21\15\uffff\1\25\25\uffff\1\22\3\uffff"
+ u"\1\16\17\uffff\1\17"
+ )
+
+ DFA9_special = DFA.unpack(
+ u"\u00cc\uffff"
+ )
+
+
+ DFA9_transition = [
+ DFA.unpack(u"\2\35\1\uffff\2\35\22\uffff\1\35\1\uffff\1\40\1\37\10"
+ u"\uffff\1\14\2\uffff\1\36\13\uffff\1\3\1\12\1\10\1\13\4\uffff\1"
+ u"\30\10\uffff\1\31\1\32\5\uffff\1\27\7\uffff\1\6\1\uffff\1\7\3\uffff"
+ u"\1\24\2\uffff\1\21\1\uffff\1\25\1\uffff\1\22\1\4\1\uffff\1\11\1"
+ u"\uffff\1\15\1\5\1\33\1\23\2\uffff\1\20\1\17\1\uffff\1\16\1\uffff"
+ u"\1\26\2\uffff\1\1\1\uffff\1\2"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\41"),
+ DFA.unpack(u"\1\42"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\43"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\44"),
+ DFA.unpack(u"\1\45"),
+ DFA.unpack(u"\1\46"),
+ DFA.unpack(u"\1\47"),
+ DFA.unpack(u"\1\50"),
+ DFA.unpack(u"\1\51"),
+ DFA.unpack(u"\1\52"),
+ DFA.unpack(u"\1\53"),
+ DFA.unpack(u"\1\54"),
+ DFA.unpack(u"\1\55"),
+ DFA.unpack(u"\1\56"),
+ DFA.unpack(u"\1\57"),
+ DFA.unpack(u"\1\60"),
+ DFA.unpack(u"\1\61"),
+ DFA.unpack(u"\1\62"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\63\4\uffff\1\37"),
+ DFA.unpack(u""),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\64"),
+ DFA.unpack(u"\1\65"),
+ DFA.unpack(u"\1\66"),
+ DFA.unpack(u"\1\67"),
+ DFA.unpack(u"\1\70"),
+ DFA.unpack(u"\1\71"),
+ DFA.unpack(u"\1\72"),
+ DFA.unpack(u"\1\73"),
+ DFA.unpack(u"\1\74"),
+ DFA.unpack(u"\1\75"),
+ DFA.unpack(u"\1\76\3\uffff\1\77"),
+ DFA.unpack(u"\1\100"),
+ DFA.unpack(u"\1\101"),
+ DFA.unpack(u"\1\102"),
+ DFA.unpack(u"\1\103"),
+ DFA.unpack(u"\1\104"),
+ DFA.unpack(u"\1\105"),
+ DFA.unpack(u"\1\106"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\107"),
+ DFA.unpack(u"\1\110"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\1\111\1\uffff\12\34\7\uffff\32\34"
+ u"\4\uffff\1\34\1\uffff\17\34\1\112\12\34"),
+ DFA.unpack(u"\1\114"),
+ DFA.unpack(u"\1\115"),
+ DFA.unpack(u"\1\116"),
+ DFA.unpack(u"\1\117"),
+ DFA.unpack(u"\1\120"),
+ DFA.unpack(u"\1\121"),
+ DFA.unpack(u"\1\122"),
+ DFA.unpack(u"\1\123"),
+ DFA.unpack(u"\1\124"),
+ DFA.unpack(u"\1\125"),
+ DFA.unpack(u"\1\126"),
+ DFA.unpack(u"\1\127"),
+ DFA.unpack(u"\1\130"),
+ DFA.unpack(u"\1\131"),
+ DFA.unpack(u"\1\132\1\133\1\134\1\135\1\136"),
+ DFA.unpack(u"\1\137"),
+ DFA.unpack(u"\1\140"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\142"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\143"),
+ DFA.unpack(u"\1\144"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\146"),
+ DFA.unpack(u"\1\147"),
+ DFA.unpack(u"\1\150"),
+ DFA.unpack(u"\1\151"),
+ DFA.unpack(u"\1\152"),
+ DFA.unpack(u"\1\153"),
+ DFA.unpack(u"\1\154"),
+ DFA.unpack(u"\1\155"),
+ DFA.unpack(u"\1\156"),
+ DFA.unpack(u"\1\157"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\161"),
+ DFA.unpack(u"\1\162"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\163"),
+ DFA.unpack(u"\1\164"),
+ DFA.unpack(u"\1\165"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\166"),
+ DFA.unpack(u"\1\167"),
+ DFA.unpack(u"\1\170"),
+ DFA.unpack(u"\1\171"),
+ DFA.unpack(u"\1\172"),
+ DFA.unpack(u"\1\173"),
+ DFA.unpack(u"\1\174"),
+ DFA.unpack(u"\1\175"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\176"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\177"),
+ DFA.unpack(u"\1\u0080"),
+ DFA.unpack(u"\1\u0081"),
+ DFA.unpack(u"\1\u0082"),
+ DFA.unpack(u"\1\u0083"),
+ DFA.unpack(u"\1\u0084"),
+ DFA.unpack(u"\1\u0085"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u0087"),
+ DFA.unpack(u"\1\u0088"),
+ DFA.unpack(u"\1\u0089"),
+ DFA.unpack(u"\1\u008a"),
+ DFA.unpack(u"\1\u008b"),
+ DFA.unpack(u"\1\u008c"),
+ DFA.unpack(u"\1\u008d"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u008f"),
+ DFA.unpack(u"\1\u0090"),
+ DFA.unpack(u"\1\u0092\21\uffff\1\u0091"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u0094"),
+ DFA.unpack(u"\1\u0095"),
+ DFA.unpack(u"\1\u0096"),
+ DFA.unpack(u"\1\u0097"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u0098"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u0099"),
+ DFA.unpack(u"\1\u009a"),
+ DFA.unpack(u"\1\u009b"),
+ DFA.unpack(u"\1\u009c"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u009d"),
+ DFA.unpack(u"\1\u009e"),
+ DFA.unpack(u"\1\u009f"),
+ DFA.unpack(u"\1\u00a0"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u00a2"),
+ DFA.unpack(u"\1\u00a4\1\uffff\1\u00a3"),
+ DFA.unpack(u"\1\u00a5"),
+ DFA.unpack(u"\1\u00a6"),
+ DFA.unpack(u"\1\u00a7"),
+ DFA.unpack(u"\1\u00a8"),
+ DFA.unpack(u"\1\u00a9"),
+ DFA.unpack(u"\1\u00aa"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00ab"),
+ DFA.unpack(u"\1\u00ac"),
+ DFA.unpack(u"\1\u00ad"),
+ DFA.unpack(u"\1\u00ae"),
+ DFA.unpack(u"\1\u00af"),
+ DFA.unpack(u"\1\u00b0"),
+ DFA.unpack(u"\1\u00b1"),
+ DFA.unpack(u"\1\u00b2"),
+ DFA.unpack(u"\1\u00b3"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u00b4"),
+ DFA.unpack(u"\1\u00b5"),
+ DFA.unpack(u"\1\u00b6"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u00b8"),
+ DFA.unpack(u"\1\u00b9"),
+ DFA.unpack(u"\1\u00ba"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u00bc"),
+ DFA.unpack(u"\1\u00bd"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\u00be"),
+ DFA.unpack(u"\1\u00bf"),
+ DFA.unpack(u"\1\u00c0"),
+ DFA.unpack(u""),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u00c1"),
+ DFA.unpack(u"\1\u00c2"),
+ DFA.unpack(u"\1\u00c3"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u00c4"),
+ DFA.unpack(u"\1\u00c5"),
+ DFA.unpack(u"\1\u00c6"),
+ DFA.unpack(u"\1\u00c7"),
+ DFA.unpack(u"\1\u00c8"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\u00c9"),
+ DFA.unpack(u"\1\u00ca"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"\1\34\1\uffff\1\34\2\uffff\12\34\7\uffff\32\34\4\uffff"
+ u"\1\34\1\uffff\32\34"),
+ DFA.unpack(u"")
+ ]
+
+ # class definition for DFA #9
+
+ DFA9 = DFA
+
+
+
+
+def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
+ from antlr3.main import LexerMain
+ main = LexerMain(XKBGrammarLexer)
+ main.stdin = stdin
+ main.stdout = stdout
+ main.stderr = stderr
+ main.execute(argv)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/XKBGrammarParser.py b/XKBGrammarParser.py
@@ -0,0 +1,2351 @@
+# $ANTLR 3.1.2 XKBGrammar.g 2019-08-13 08:28:51
+
+import sys
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+from antlr3.tree import *
+
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+ELEM_KEYSYMGROUP=22
+T__50=50
+TOKEN_NAME=6
+VALUE=20
+KEYELEMENTS=25
+OVERLAY=27
+TOKEN_KEY_TYPE=5
+KEYCODEX=19
+KEYCODE=18
+T__51=51
+MAPMATERIAL=17
+NAME=30
+LINE_COMMENT=33
+TOKEN_SYMBOL=10
+TOKEN_INCLUDE=4
+ELEM_VIRTUALMODS=24
+TOKEN_KEY=7
+LAYOUT=12
+STATE=21
+DQSTRING=29
+COMMENT=32
+MAPTYPE=14
+T__37=37
+T__38=38
+T__39=39
+T__34=34
+TOKEN_TYPE=8
+T__35=35
+T__36=36
+SYMBOLS=13
+WS=31
+EOF=-1
+TOKEN_VIRTUAL_MODIFIERS=11
+MAPOPTIONS=16
+MAPOPTS=28
+ELEM_KEYSYMS=23
+TOKEN_MODIFIER_MAP=9
+MAPNAME=15
+OVERRIDE=26
+T__48=48
+T__49=49
+T__44=44
+T__45=45
+T__46=46
+T__47=47
+T__40=40
+T__41=41
+T__42=42
+T__43=43
+
+# token names
+tokenNames = [
+ "<invalid>", "<EOR>", "<DOWN>", "<UP>",
+ "TOKEN_INCLUDE", "TOKEN_KEY_TYPE", "TOKEN_NAME", "TOKEN_KEY", "TOKEN_TYPE",
+ "TOKEN_MODIFIER_MAP", "TOKEN_SYMBOL", "TOKEN_VIRTUAL_MODIFIERS", "LAYOUT",
+ "SYMBOLS", "MAPTYPE", "MAPNAME", "MAPOPTIONS", "MAPMATERIAL", "KEYCODE",
+ "KEYCODEX", "VALUE", "STATE", "ELEM_KEYSYMGROUP", "ELEM_KEYSYMS", "ELEM_VIRTUALMODS",
+ "KEYELEMENTS", "OVERRIDE", "OVERLAY", "MAPOPTS", "DQSTRING", "NAME",
+ "WS", "COMMENT", "LINE_COMMENT", "'{'", "'}'", "';'", "'include'", "'name'",
+ "'['", "']'", "'='", "'key.type'", "'key'", "'<'", "'>'", "','", "'modifier_map'",
+ "'virtual_modifiers'", "'type'", "'symbols'", "'virtualMods'"
+]
+
+
+
+
+class XKBGrammarParser(Parser):
+ grammarFileName = "XKBGrammar.g"
+ antlr_version = version_str_to_tuple("3.1.2")
+ antlr_version_str = "3.1.2"
+ tokenNames = tokenNames
+
+ def __init__(self, input, state=None):
+ if state is None:
+ state = RecognizerSharedState()
+
+ Parser.__init__(self, input, state)
+
+
+
+
+
+
+
+
+ self._adaptor = CommonTreeAdaptor()
+
+
+
+ def getTreeAdaptor(self):
+ return self._adaptor
+
+ def setTreeAdaptor(self, adaptor):
+ self._adaptor = adaptor
+
+ adaptor = property(getTreeAdaptor, setTreeAdaptor)
+
+
+ class layout_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "layout"
+ # XKBGrammar.g:60:1: layout : ( symbols )+ EOF -> ^( LAYOUT ( symbols )+ ) ;
+ def layout(self, ):
+
+ retval = self.layout_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ EOF2 = None
+ symbols1 = None
+
+
+ EOF2_tree = None
+ stream_EOF = RewriteRuleTokenStream(self._adaptor, "token EOF")
+ stream_symbols = RewriteRuleSubtreeStream(self._adaptor, "rule symbols")
+ try:
+ try:
+ # XKBGrammar.g:61:3: ( ( symbols )+ EOF -> ^( LAYOUT ( symbols )+ ) )
+ # XKBGrammar.g:61:5: ( symbols )+ EOF
+ pass
+ # XKBGrammar.g:61:5: ( symbols )+
+ cnt1 = 0
+ while True: #loop1
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == MAPOPTS) :
+ alt1 = 1
+
+
+ if alt1 == 1:
+ # XKBGrammar.g:61:5: symbols
+ pass
+ self._state.following.append(self.FOLLOW_symbols_in_layout191)
+ symbols1 = self.symbols()
+
+ self._state.following.pop()
+ stream_symbols.add(symbols1.tree)
+
+
+ else:
+ if cnt1 >= 1:
+ break #loop1
+
+ eee = EarlyExitException(1, self.input)
+ raise eee
+
+ cnt1 += 1
+
+
+ EOF2=self.match(self.input, EOF, self.FOLLOW_EOF_in_layout194)
+ stream_EOF.add(EOF2)
+
+ # AST Rewrite
+ # elements: symbols
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 62:3: -> ^( LAYOUT ( symbols )+ )
+ # XKBGrammar.g:62:6: ^( LAYOUT ( symbols )+ )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(LAYOUT, "LAYOUT"), root_1)
+
+ # XKBGrammar.g:62:15: ( symbols )+
+ if not (stream_symbols.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_symbols.hasNext():
+ self._adaptor.addChild(root_1, stream_symbols.nextTree())
+
+
+ stream_symbols.reset()
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "layout"
+
+ class symbols_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "symbols"
+ # XKBGrammar.g:65:1: symbols : mapType '{' ( mapMaterial )+ '}' ';' -> ^( SYMBOLS mapType ^( MAPMATERIAL ( mapMaterial )+ ) ) ;
+ def symbols(self, ):
+
+ retval = self.symbols_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ char_literal4 = None
+ char_literal6 = None
+ char_literal7 = None
+ mapType3 = None
+
+ mapMaterial5 = None
+
+
+ char_literal4_tree = None
+ char_literal6_tree = None
+ char_literal7_tree = None
+ stream_34 = RewriteRuleTokenStream(self._adaptor, "token 34")
+ stream_35 = RewriteRuleTokenStream(self._adaptor, "token 35")
+ stream_36 = RewriteRuleTokenStream(self._adaptor, "token 36")
+ stream_mapMaterial = RewriteRuleSubtreeStream(self._adaptor, "rule mapMaterial")
+ stream_mapType = RewriteRuleSubtreeStream(self._adaptor, "rule mapType")
+ try:
+ try:
+ # XKBGrammar.g:66:3: ( mapType '{' ( mapMaterial )+ '}' ';' -> ^( SYMBOLS mapType ^( MAPMATERIAL ( mapMaterial )+ ) ) )
+ # XKBGrammar.g:66:5: mapType '{' ( mapMaterial )+ '}' ';'
+ pass
+ self._state.following.append(self.FOLLOW_mapType_in_symbols221)
+ mapType3 = self.mapType()
+
+ self._state.following.pop()
+ stream_mapType.add(mapType3.tree)
+ char_literal4=self.match(self.input, 34, self.FOLLOW_34_in_symbols223)
+ stream_34.add(char_literal4)
+ # XKBGrammar.g:66:17: ( mapMaterial )+
+ cnt2 = 0
+ while True: #loop2
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if (LA2_0 == OVERRIDE or (37 <= LA2_0 <= 38) or (42 <= LA2_0 <= 43) or (47 <= LA2_0 <= 48)) :
+ alt2 = 1
+
+
+ if alt2 == 1:
+ # XKBGrammar.g:66:17: mapMaterial
+ pass
+ self._state.following.append(self.FOLLOW_mapMaterial_in_symbols225)
+ mapMaterial5 = self.mapMaterial()
+
+ self._state.following.pop()
+ stream_mapMaterial.add(mapMaterial5.tree)
+
+
+ else:
+ if cnt2 >= 1:
+ break #loop2
+
+ eee = EarlyExitException(2, self.input)
+ raise eee
+
+ cnt2 += 1
+
+
+ char_literal6=self.match(self.input, 35, self.FOLLOW_35_in_symbols228)
+ stream_35.add(char_literal6)
+ char_literal7=self.match(self.input, 36, self.FOLLOW_36_in_symbols230)
+ stream_36.add(char_literal7)
+
+ # AST Rewrite
+ # elements: mapMaterial, mapType
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 67:3: -> ^( SYMBOLS mapType ^( MAPMATERIAL ( mapMaterial )+ ) )
+ # XKBGrammar.g:67:6: ^( SYMBOLS mapType ^( MAPMATERIAL ( mapMaterial )+ ) )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(SYMBOLS, "SYMBOLS"), root_1)
+
+ self._adaptor.addChild(root_1, stream_mapType.nextTree())
+ # XKBGrammar.g:67:24: ^( MAPMATERIAL ( mapMaterial )+ )
+ root_2 = self._adaptor.nil()
+ root_2 = self._adaptor.becomeRoot(self._adaptor.createFromType(MAPMATERIAL, "MAPMATERIAL"), root_2)
+
+ # XKBGrammar.g:67:38: ( mapMaterial )+
+ if not (stream_mapMaterial.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_mapMaterial.hasNext():
+ self._adaptor.addChild(root_2, stream_mapMaterial.nextTree())
+
+
+ stream_mapMaterial.reset()
+
+ self._adaptor.addChild(root_1, root_2)
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "symbols"
+
+ class mapType_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "mapType"
+ # XKBGrammar.g:70:1: mapType : ( MAPOPTS )+ DQSTRING -> ^( MAPTYPE ^( MAPOPTIONS ( MAPOPTS )+ ) ^( MAPNAME DQSTRING ) ) ;
+ def mapType(self, ):
+
+ retval = self.mapType_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ MAPOPTS8 = None
+ DQSTRING9 = None
+
+ MAPOPTS8_tree = None
+ DQSTRING9_tree = None
+ stream_MAPOPTS = RewriteRuleTokenStream(self._adaptor, "token MAPOPTS")
+ stream_DQSTRING = RewriteRuleTokenStream(self._adaptor, "token DQSTRING")
+
+ try:
+ try:
+ # XKBGrammar.g:71:3: ( ( MAPOPTS )+ DQSTRING -> ^( MAPTYPE ^( MAPOPTIONS ( MAPOPTS )+ ) ^( MAPNAME DQSTRING ) ) )
+ # XKBGrammar.g:71:5: ( MAPOPTS )+ DQSTRING
+ pass
+ # XKBGrammar.g:71:5: ( MAPOPTS )+
+ cnt3 = 0
+ while True: #loop3
+ alt3 = 2
+ LA3_0 = self.input.LA(1)
+
+ if (LA3_0 == MAPOPTS) :
+ alt3 = 1
+
+
+ if alt3 == 1:
+ # XKBGrammar.g:71:5: MAPOPTS
+ pass
+ MAPOPTS8=self.match(self.input, MAPOPTS, self.FOLLOW_MAPOPTS_in_mapType260)
+ stream_MAPOPTS.add(MAPOPTS8)
+
+
+ else:
+ if cnt3 >= 1:
+ break #loop3
+
+ eee = EarlyExitException(3, self.input)
+ raise eee
+
+ cnt3 += 1
+
+
+ DQSTRING9=self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_mapType263)
+ stream_DQSTRING.add(DQSTRING9)
+
+ # AST Rewrite
+ # elements: MAPOPTS, DQSTRING
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 72:3: -> ^( MAPTYPE ^( MAPOPTIONS ( MAPOPTS )+ ) ^( MAPNAME DQSTRING ) )
+ # XKBGrammar.g:72:6: ^( MAPTYPE ^( MAPOPTIONS ( MAPOPTS )+ ) ^( MAPNAME DQSTRING ) )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(MAPTYPE, "MAPTYPE"), root_1)
+
+ # XKBGrammar.g:72:16: ^( MAPOPTIONS ( MAPOPTS )+ )
+ root_2 = self._adaptor.nil()
+ root_2 = self._adaptor.becomeRoot(self._adaptor.createFromType(MAPOPTIONS, "MAPOPTIONS"), root_2)
+
+ # XKBGrammar.g:72:29: ( MAPOPTS )+
+ if not (stream_MAPOPTS.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_MAPOPTS.hasNext():
+ self._adaptor.addChild(root_2, stream_MAPOPTS.nextNode())
+
+
+ stream_MAPOPTS.reset()
+
+ self._adaptor.addChild(root_1, root_2)
+ # XKBGrammar.g:72:39: ^( MAPNAME DQSTRING )
+ root_2 = self._adaptor.nil()
+ root_2 = self._adaptor.becomeRoot(self._adaptor.createFromType(MAPNAME, "MAPNAME"), root_2)
+
+ self._adaptor.addChild(root_2, stream_DQSTRING.nextNode())
+
+ self._adaptor.addChild(root_1, root_2)
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "mapType"
+
+ class mapMaterial_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "mapMaterial"
+ # XKBGrammar.g:75:1: mapMaterial : ( line_include | line_name ';' | line_keytype ';' | line_key ';' | line_modifier_map ';' | line_virtual_modifiers ';' );
+ def mapMaterial(self, ):
+
+ retval = self.mapMaterial_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ char_literal12 = None
+ char_literal14 = None
+ char_literal16 = None
+ char_literal18 = None
+ char_literal20 = None
+ line_include10 = None
+
+ line_name11 = None
+
+ line_keytype13 = None
+
+ line_key15 = None
+
+ line_modifier_map17 = None
+
+ line_virtual_modifiers19 = None
+
+
+ char_literal12_tree = None
+ char_literal14_tree = None
+ char_literal16_tree = None
+ char_literal18_tree = None
+ char_literal20_tree = None
+
+ try:
+ try:
+ # XKBGrammar.g:76:3: ( line_include | line_name ';' | line_keytype ';' | line_key ';' | line_modifier_map ';' | line_virtual_modifiers ';' )
+ alt4 = 6
+ LA4 = self.input.LA(1)
+ if LA4 == 37:
+ alt4 = 1
+ elif LA4 == 38:
+ alt4 = 2
+ elif LA4 == 42:
+ alt4 = 3
+ elif LA4 == OVERRIDE or LA4 == 43:
+ alt4 = 4
+ elif LA4 == 47:
+ alt4 = 5
+ elif LA4 == 48:
+ alt4 = 6
+ else:
+ nvae = NoViableAltException("", 4, 0, self.input)
+
+ raise nvae
+
+ if alt4 == 1:
+ # XKBGrammar.g:76:5: line_include
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_include_in_mapMaterial298)
+ line_include10 = self.line_include()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_include10.tree)
+
+
+ elif alt4 == 2:
+ # XKBGrammar.g:77:5: line_name ';'
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_name_in_mapMaterial305)
+ line_name11 = self.line_name()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_name11.tree)
+ char_literal12=self.match(self.input, 36, self.FOLLOW_36_in_mapMaterial307)
+
+
+ elif alt4 == 3:
+ # XKBGrammar.g:78:5: line_keytype ';'
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_keytype_in_mapMaterial314)
+ line_keytype13 = self.line_keytype()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_keytype13.tree)
+ char_literal14=self.match(self.input, 36, self.FOLLOW_36_in_mapMaterial316)
+
+
+ elif alt4 == 4:
+ # XKBGrammar.g:79:5: line_key ';'
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_key_in_mapMaterial323)
+ line_key15 = self.line_key()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_key15.tree)
+ char_literal16=self.match(self.input, 36, self.FOLLOW_36_in_mapMaterial325)
+
+
+ elif alt4 == 5:
+ # XKBGrammar.g:80:5: line_modifier_map ';'
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_modifier_map_in_mapMaterial332)
+ line_modifier_map17 = self.line_modifier_map()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_modifier_map17.tree)
+ char_literal18=self.match(self.input, 36, self.FOLLOW_36_in_mapMaterial334)
+
+
+ elif alt4 == 6:
+ # XKBGrammar.g:81:5: line_virtual_modifiers ';'
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_line_virtual_modifiers_in_mapMaterial341)
+ line_virtual_modifiers19 = self.line_virtual_modifiers()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, line_virtual_modifiers19.tree)
+ char_literal20=self.match(self.input, 36, self.FOLLOW_36_in_mapMaterial343)
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "mapMaterial"
+
+ class line_include_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_include"
+ # XKBGrammar.g:84:1: line_include : 'include' DQSTRING -> ^( TOKEN_INCLUDE DQSTRING ) ;
+ def line_include(self, ):
+
+ retval = self.line_include_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal21 = None
+ DQSTRING22 = None
+
+ string_literal21_tree = None
+ DQSTRING22_tree = None
+ stream_37 = RewriteRuleTokenStream(self._adaptor, "token 37")
+ stream_DQSTRING = RewriteRuleTokenStream(self._adaptor, "token DQSTRING")
+
+ try:
+ try:
+ # XKBGrammar.g:85:3: ( 'include' DQSTRING -> ^( TOKEN_INCLUDE DQSTRING ) )
+ # XKBGrammar.g:85:5: 'include' DQSTRING
+ pass
+ string_literal21=self.match(self.input, 37, self.FOLLOW_37_in_line_include357)
+ stream_37.add(string_literal21)
+ DQSTRING22=self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_line_include359)
+ stream_DQSTRING.add(DQSTRING22)
+
+ # AST Rewrite
+ # elements: DQSTRING
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 86:3: -> ^( TOKEN_INCLUDE DQSTRING )
+ # XKBGrammar.g:86:6: ^( TOKEN_INCLUDE DQSTRING )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(TOKEN_INCLUDE, "TOKEN_INCLUDE"), root_1)
+
+ self._adaptor.addChild(root_1, stream_DQSTRING.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_include"
+
+ class line_name_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_name"
+ # XKBGrammar.g:89:1: line_name : 'name' ( '[' NAME ']' )? '=' DQSTRING -> ^( TOKEN_NAME DQSTRING ) ;
+ def line_name(self, ):
+
+ retval = self.line_name_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal23 = None
+ char_literal24 = None
+ NAME25 = None
+ char_literal26 = None
+ char_literal27 = None
+ DQSTRING28 = None
+
+ string_literal23_tree = None
+ char_literal24_tree = None
+ NAME25_tree = None
+ char_literal26_tree = None
+ char_literal27_tree = None
+ DQSTRING28_tree = None
+ stream_38 = RewriteRuleTokenStream(self._adaptor, "token 38")
+ stream_39 = RewriteRuleTokenStream(self._adaptor, "token 39")
+ stream_40 = RewriteRuleTokenStream(self._adaptor, "token 40")
+ stream_41 = RewriteRuleTokenStream(self._adaptor, "token 41")
+ stream_DQSTRING = RewriteRuleTokenStream(self._adaptor, "token DQSTRING")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # XKBGrammar.g:90:3: ( 'name' ( '[' NAME ']' )? '=' DQSTRING -> ^( TOKEN_NAME DQSTRING ) )
+ # XKBGrammar.g:90:5: 'name' ( '[' NAME ']' )? '=' DQSTRING
+ pass
+ string_literal23=self.match(self.input, 38, self.FOLLOW_38_in_line_name382)
+ stream_38.add(string_literal23)
+ # XKBGrammar.g:90:12: ( '[' NAME ']' )?
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == 39) :
+ alt5 = 1
+ if alt5 == 1:
+ # XKBGrammar.g:90:13: '[' NAME ']'
+ pass
+ char_literal24=self.match(self.input, 39, self.FOLLOW_39_in_line_name385)
+ stream_39.add(char_literal24)
+ NAME25=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_name387)
+ stream_NAME.add(NAME25)
+ char_literal26=self.match(self.input, 40, self.FOLLOW_40_in_line_name389)
+ stream_40.add(char_literal26)
+
+
+
+ char_literal27=self.match(self.input, 41, self.FOLLOW_41_in_line_name393)
+ stream_41.add(char_literal27)
+ DQSTRING28=self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_line_name395)
+ stream_DQSTRING.add(DQSTRING28)
+
+ # AST Rewrite
+ # elements: DQSTRING
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 91:3: -> ^( TOKEN_NAME DQSTRING )
+ # XKBGrammar.g:91:6: ^( TOKEN_NAME DQSTRING )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(TOKEN_NAME, "TOKEN_NAME"), root_1)
+
+ self._adaptor.addChild(root_1, stream_DQSTRING.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_name"
+
+ class line_keytype_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_keytype"
+ # XKBGrammar.g:94:1: line_keytype : 'key.type' ( '[' NAME ']' )? '=' DQSTRING -> ^( TOKEN_KEY_TYPE DQSTRING ) ;
+ def line_keytype(self, ):
+
+ retval = self.line_keytype_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal29 = None
+ char_literal30 = None
+ NAME31 = None
+ char_literal32 = None
+ char_literal33 = None
+ DQSTRING34 = None
+
+ string_literal29_tree = None
+ char_literal30_tree = None
+ NAME31_tree = None
+ char_literal32_tree = None
+ char_literal33_tree = None
+ DQSTRING34_tree = None
+ stream_39 = RewriteRuleTokenStream(self._adaptor, "token 39")
+ stream_40 = RewriteRuleTokenStream(self._adaptor, "token 40")
+ stream_41 = RewriteRuleTokenStream(self._adaptor, "token 41")
+ stream_42 = RewriteRuleTokenStream(self._adaptor, "token 42")
+ stream_DQSTRING = RewriteRuleTokenStream(self._adaptor, "token DQSTRING")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # XKBGrammar.g:95:3: ( 'key.type' ( '[' NAME ']' )? '=' DQSTRING -> ^( TOKEN_KEY_TYPE DQSTRING ) )
+ # XKBGrammar.g:95:5: 'key.type' ( '[' NAME ']' )? '=' DQSTRING
+ pass
+ string_literal29=self.match(self.input, 42, self.FOLLOW_42_in_line_keytype418)
+ stream_42.add(string_literal29)
+ # XKBGrammar.g:95:16: ( '[' NAME ']' )?
+ alt6 = 2
+ LA6_0 = self.input.LA(1)
+
+ if (LA6_0 == 39) :
+ alt6 = 1
+ if alt6 == 1:
+ # XKBGrammar.g:95:17: '[' NAME ']'
+ pass
+ char_literal30=self.match(self.input, 39, self.FOLLOW_39_in_line_keytype421)
+ stream_39.add(char_literal30)
+ NAME31=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_keytype423)
+ stream_NAME.add(NAME31)
+ char_literal32=self.match(self.input, 40, self.FOLLOW_40_in_line_keytype425)
+ stream_40.add(char_literal32)
+
+
+
+ char_literal33=self.match(self.input, 41, self.FOLLOW_41_in_line_keytype429)
+ stream_41.add(char_literal33)
+ DQSTRING34=self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_line_keytype431)
+ stream_DQSTRING.add(DQSTRING34)
+
+ # AST Rewrite
+ # elements: DQSTRING
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 96:3: -> ^( TOKEN_KEY_TYPE DQSTRING )
+ # XKBGrammar.g:96:6: ^( TOKEN_KEY_TYPE DQSTRING )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(TOKEN_KEY_TYPE, "TOKEN_KEY_TYPE"), root_1)
+
+ self._adaptor.addChild(root_1, stream_DQSTRING.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_keytype"
+
+ class line_key_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_key"
+ # XKBGrammar.g:99:1: line_key : ( OVERRIDE )? 'key' '<' NAME '>' '{' keyelements ( ',' keyelements )* '}' -> ^( TOKEN_KEY ( OVERRIDE )? ^( KEYCODEX NAME ) ( keyelements )+ ) ;
+ def line_key(self, ):
+
+ retval = self.line_key_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ OVERRIDE35 = None
+ string_literal36 = None
+ char_literal37 = None
+ NAME38 = None
+ char_literal39 = None
+ char_literal40 = None
+ char_literal42 = None
+ char_literal44 = None
+ keyelements41 = None
+
+ keyelements43 = None
+
+
+ OVERRIDE35_tree = None
+ string_literal36_tree = None
+ char_literal37_tree = None
+ NAME38_tree = None
+ char_literal39_tree = None
+ char_literal40_tree = None
+ char_literal42_tree = None
+ char_literal44_tree = None
+ stream_44 = RewriteRuleTokenStream(self._adaptor, "token 44")
+ stream_45 = RewriteRuleTokenStream(self._adaptor, "token 45")
+ stream_OVERRIDE = RewriteRuleTokenStream(self._adaptor, "token OVERRIDE")
+ stream_34 = RewriteRuleTokenStream(self._adaptor, "token 34")
+ stream_35 = RewriteRuleTokenStream(self._adaptor, "token 35")
+ stream_46 = RewriteRuleTokenStream(self._adaptor, "token 46")
+ stream_43 = RewriteRuleTokenStream(self._adaptor, "token 43")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+ stream_keyelements = RewriteRuleSubtreeStream(self._adaptor, "rule keyelements")
+ try:
+ try:
+ # XKBGrammar.g:100:3: ( ( OVERRIDE )? 'key' '<' NAME '>' '{' keyelements ( ',' keyelements )* '}' -> ^( TOKEN_KEY ( OVERRIDE )? ^( KEYCODEX NAME ) ( keyelements )+ ) )
+ # XKBGrammar.g:100:5: ( OVERRIDE )? 'key' '<' NAME '>' '{' keyelements ( ',' keyelements )* '}'
+ pass
+ # XKBGrammar.g:100:5: ( OVERRIDE )?
+ alt7 = 2
+ LA7_0 = self.input.LA(1)
+
+ if (LA7_0 == OVERRIDE) :
+ alt7 = 1
+ if alt7 == 1:
+ # XKBGrammar.g:100:5: OVERRIDE
+ pass
+ OVERRIDE35=self.match(self.input, OVERRIDE, self.FOLLOW_OVERRIDE_in_line_key454)
+ stream_OVERRIDE.add(OVERRIDE35)
+
+
+
+ string_literal36=self.match(self.input, 43, self.FOLLOW_43_in_line_key457)
+ stream_43.add(string_literal36)
+ char_literal37=self.match(self.input, 44, self.FOLLOW_44_in_line_key459)
+ stream_44.add(char_literal37)
+ NAME38=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_key461)
+ stream_NAME.add(NAME38)
+ char_literal39=self.match(self.input, 45, self.FOLLOW_45_in_line_key463)
+ stream_45.add(char_literal39)
+ char_literal40=self.match(self.input, 34, self.FOLLOW_34_in_line_key465)
+ stream_34.add(char_literal40)
+ self._state.following.append(self.FOLLOW_keyelements_in_line_key467)
+ keyelements41 = self.keyelements()
+
+ self._state.following.pop()
+ stream_keyelements.add(keyelements41.tree)
+ # XKBGrammar.g:100:50: ( ',' keyelements )*
+ while True: #loop8
+ alt8 = 2
+ LA8_0 = self.input.LA(1)
+
+ if (LA8_0 == 46) :
+ alt8 = 1
+
+
+ if alt8 == 1:
+ # XKBGrammar.g:100:51: ',' keyelements
+ pass
+ char_literal42=self.match(self.input, 46, self.FOLLOW_46_in_line_key470)
+ stream_46.add(char_literal42)
+ self._state.following.append(self.FOLLOW_keyelements_in_line_key472)
+ keyelements43 = self.keyelements()
+
+ self._state.following.pop()
+ stream_keyelements.add(keyelements43.tree)
+
+
+ else:
+ break #loop8
+
+
+ char_literal44=self.match(self.input, 35, self.FOLLOW_35_in_line_key476)
+ stream_35.add(char_literal44)
+
+ # AST Rewrite
+ # elements: NAME, keyelements, OVERRIDE
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 101:3: -> ^( TOKEN_KEY ( OVERRIDE )? ^( KEYCODEX NAME ) ( keyelements )+ )
+ # XKBGrammar.g:101:6: ^( TOKEN_KEY ( OVERRIDE )? ^( KEYCODEX NAME ) ( keyelements )+ )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(TOKEN_KEY, "TOKEN_KEY"), root_1)
+
+ # XKBGrammar.g:101:18: ( OVERRIDE )?
+ if stream_OVERRIDE.hasNext():
+ self._adaptor.addChild(root_1, stream_OVERRIDE.nextNode())
+
+
+ stream_OVERRIDE.reset();
+ # XKBGrammar.g:101:28: ^( KEYCODEX NAME )
+ root_2 = self._adaptor.nil()
+ root_2 = self._adaptor.becomeRoot(self._adaptor.createFromType(KEYCODEX, "KEYCODEX"), root_2)
+
+ self._adaptor.addChild(root_2, stream_NAME.nextNode())
+
+ self._adaptor.addChild(root_1, root_2)
+ # XKBGrammar.g:101:45: ( keyelements )+
+ if not (stream_keyelements.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_keyelements.hasNext():
+ self._adaptor.addChild(root_1, stream_keyelements.nextTree())
+
+
+ stream_keyelements.reset()
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_key"
+
+ class line_modifier_map_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_modifier_map"
+ # XKBGrammar.g:104:1: line_modifier_map : 'modifier_map' STATE '{' keycode ( ',' keycode )* '}' -> ^( TOKEN_MODIFIER_MAP STATE ( keycode )+ ) ;
+ def line_modifier_map(self, ):
+
+ retval = self.line_modifier_map_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal45 = None
+ STATE46 = None
+ char_literal47 = None
+ char_literal49 = None
+ char_literal51 = None
+ keycode48 = None
+
+ keycode50 = None
+
+
+ string_literal45_tree = None
+ STATE46_tree = None
+ char_literal47_tree = None
+ char_literal49_tree = None
+ char_literal51_tree = None
+ stream_34 = RewriteRuleTokenStream(self._adaptor, "token 34")
+ stream_35 = RewriteRuleTokenStream(self._adaptor, "token 35")
+ stream_46 = RewriteRuleTokenStream(self._adaptor, "token 46")
+ stream_47 = RewriteRuleTokenStream(self._adaptor, "token 47")
+ stream_STATE = RewriteRuleTokenStream(self._adaptor, "token STATE")
+ stream_keycode = RewriteRuleSubtreeStream(self._adaptor, "rule keycode")
+ try:
+ try:
+ # XKBGrammar.g:105:3: ( 'modifier_map' STATE '{' keycode ( ',' keycode )* '}' -> ^( TOKEN_MODIFIER_MAP STATE ( keycode )+ ) )
+ # XKBGrammar.g:105:5: 'modifier_map' STATE '{' keycode ( ',' keycode )* '}'
+ pass
+ string_literal45=self.match(self.input, 47, self.FOLLOW_47_in_line_modifier_map509)
+ stream_47.add(string_literal45)
+ STATE46=self.match(self.input, STATE, self.FOLLOW_STATE_in_line_modifier_map511)
+ stream_STATE.add(STATE46)
+ char_literal47=self.match(self.input, 34, self.FOLLOW_34_in_line_modifier_map513)
+ stream_34.add(char_literal47)
+ self._state.following.append(self.FOLLOW_keycode_in_line_modifier_map515)
+ keycode48 = self.keycode()
+
+ self._state.following.pop()
+ stream_keycode.add(keycode48.tree)
+ # XKBGrammar.g:105:38: ( ',' keycode )*
+ while True: #loop9
+ alt9 = 2
+ LA9_0 = self.input.LA(1)
+
+ if (LA9_0 == 46) :
+ alt9 = 1
+
+
+ if alt9 == 1:
+ # XKBGrammar.g:105:39: ',' keycode
+ pass
+ char_literal49=self.match(self.input, 46, self.FOLLOW_46_in_line_modifier_map518)
+ stream_46.add(char_literal49)
+ self._state.following.append(self.FOLLOW_keycode_in_line_modifier_map520)
+ keycode50 = self.keycode()
+
+ self._state.following.pop()
+ stream_keycode.add(keycode50.tree)
+
+
+ else:
+ break #loop9
+
+
+ char_literal51=self.match(self.input, 35, self.FOLLOW_35_in_line_modifier_map524)
+ stream_35.add(char_literal51)
+
+ # AST Rewrite
+ # elements: keycode, STATE
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 106:3: -> ^( TOKEN_MODIFIER_MAP STATE ( keycode )+ )
+ # XKBGrammar.g:106:6: ^( TOKEN_MODIFIER_MAP STATE ( keycode )+ )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(TOKEN_MODIFIER_MAP, "TOKEN_MODIFIER_MAP"), root_1)
+
+ self._adaptor.addChild(root_1, stream_STATE.nextNode())
+ # XKBGrammar.g:106:33: ( keycode )+
+ if not (stream_keycode.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_keycode.hasNext():
+ self._adaptor.addChild(root_1, stream_keycode.nextTree())
+
+
+ stream_keycode.reset()
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_modifier_map"
+
+ class line_virtual_modifiers_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "line_virtual_modifiers"
+ # XKBGrammar.g:109:1: line_virtual_modifiers : 'virtual_modifiers' NAME ( ',' NAME )* -> ^( TOKEN_VIRTUAL_MODIFIERS ( NAME )+ ) ;
+ def line_virtual_modifiers(self, ):
+
+ retval = self.line_virtual_modifiers_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal52 = None
+ NAME53 = None
+ char_literal54 = None
+ NAME55 = None
+
+ string_literal52_tree = None
+ NAME53_tree = None
+ char_literal54_tree = None
+ NAME55_tree = None
+ stream_46 = RewriteRuleTokenStream(self._adaptor, "token 46")
+ stream_48 = RewriteRuleTokenStream(self._adaptor, "token 48")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # XKBGrammar.g:110:3: ( 'virtual_modifiers' NAME ( ',' NAME )* -> ^( TOKEN_VIRTUAL_MODIFIERS ( NAME )+ ) )
+ # XKBGrammar.g:110:5: 'virtual_modifiers' NAME ( ',' NAME )*
+ pass
+ string_literal52=self.match(self.input, 48, self.FOLLOW_48_in_line_virtual_modifiers550)
+ stream_48.add(string_literal52)
+ NAME53=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_virtual_modifiers552)
+ stream_NAME.add(NAME53)
+ # XKBGrammar.g:110:30: ( ',' NAME )*
+ while True: #loop10
+ alt10 = 2
+ LA10_0 = self.input.LA(1)
+
+ if (LA10_0 == 46) :
+ alt10 = 1
+
+
+ if alt10 == 1:
+ # XKBGrammar.g:110:31: ',' NAME
+ pass
+ char_literal54=self.match(self.input, 46, self.FOLLOW_46_in_line_virtual_modifiers555)
+ stream_46.add(char_literal54)
+ NAME55=self.match(self.input, NAME, self.FOLLOW_NAME_in_line_virtual_modifiers557)
+ stream_NAME.add(NAME55)
+
+
+ else:
+ break #loop10
+
+
+
+ # AST Rewrite
+ # elements: NAME
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 111:3: -> ^( TOKEN_VIRTUAL_MODIFIERS ( NAME )+ )
+ # XKBGrammar.g:111:6: ^( TOKEN_VIRTUAL_MODIFIERS ( NAME )+ )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(TOKEN_VIRTUAL_MODIFIERS, "TOKEN_VIRTUAL_MODIFIERS"), root_1)
+
+ # XKBGrammar.g:111:32: ( NAME )+
+ if not (stream_NAME.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_NAME.hasNext():
+ self._adaptor.addChild(root_1, stream_NAME.nextNode())
+
+
+ stream_NAME.reset()
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "line_virtual_modifiers"
+
+ class keycode_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "keycode"
+ # XKBGrammar.g:114:1: keycode : ( '<' NAME '>' -> ^( KEYCODEX NAME ) | NAME -> ^( KEYCODE NAME ) );
+ def keycode(self, ):
+
+ retval = self.keycode_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ char_literal56 = None
+ NAME57 = None
+ char_literal58 = None
+ NAME59 = None
+
+ char_literal56_tree = None
+ NAME57_tree = None
+ char_literal58_tree = None
+ NAME59_tree = None
+ stream_44 = RewriteRuleTokenStream(self._adaptor, "token 44")
+ stream_45 = RewriteRuleTokenStream(self._adaptor, "token 45")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # XKBGrammar.g:115:3: ( '<' NAME '>' -> ^( KEYCODEX NAME ) | NAME -> ^( KEYCODE NAME ) )
+ alt11 = 2
+ LA11_0 = self.input.LA(1)
+
+ if (LA11_0 == 44) :
+ alt11 = 1
+ elif (LA11_0 == NAME) :
+ alt11 = 2
+ else:
+ nvae = NoViableAltException("", 11, 0, self.input)
+
+ raise nvae
+
+ if alt11 == 1:
+ # XKBGrammar.g:115:5: '<' NAME '>'
+ pass
+ char_literal56=self.match(self.input, 44, self.FOLLOW_44_in_keycode584)
+ stream_44.add(char_literal56)
+ NAME57=self.match(self.input, NAME, self.FOLLOW_NAME_in_keycode586)
+ stream_NAME.add(NAME57)
+ char_literal58=self.match(self.input, 45, self.FOLLOW_45_in_keycode588)
+ stream_45.add(char_literal58)
+
+ # AST Rewrite
+ # elements: NAME
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 115:18: -> ^( KEYCODEX NAME )
+ # XKBGrammar.g:115:21: ^( KEYCODEX NAME )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(KEYCODEX, "KEYCODEX"), root_1)
+
+ self._adaptor.addChild(root_1, stream_NAME.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+ elif alt11 == 2:
+ # XKBGrammar.g:116:5: NAME
+ pass
+ NAME59=self.match(self.input, NAME, self.FOLLOW_NAME_in_keycode602)
+ stream_NAME.add(NAME59)
+
+ # AST Rewrite
+ # elements: NAME
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 116:10: -> ^( KEYCODE NAME )
+ # XKBGrammar.g:116:13: ^( KEYCODE NAME )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(KEYCODE, "KEYCODE"), root_1)
+
+ self._adaptor.addChild(root_1, stream_NAME.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "keycode"
+
+ class override_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "override"
+ # XKBGrammar.g:119:1: override : 'override' ;
+ def override(self, ):
+
+ retval = self.override_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal60 = None
+
+ string_literal60_tree = None
+
+ try:
+ try:
+ # XKBGrammar.g:120:3: ( 'override' )
+ # XKBGrammar.g:120:5: 'override'
+ pass
+ root_0 = self._adaptor.nil()
+
+ string_literal60=self.match(self.input, OVERRIDE, self.FOLLOW_OVERRIDE_in_override623)
+
+ string_literal60_tree = self._adaptor.createWithPayload(string_literal60)
+ self._adaptor.addChild(root_0, string_literal60_tree)
+
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "override"
+
+ class keyelements_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "keyelements"
+ # XKBGrammar.g:123:1: keyelements : ( elem_keysyms | elem_keysymgroup | elem_virtualmods | elem_overlay );
+ def keyelements(self, ):
+
+ retval = self.keyelements_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ elem_keysyms61 = None
+
+ elem_keysymgroup62 = None
+
+ elem_virtualmods63 = None
+
+ elem_overlay64 = None
+
+
+
+ try:
+ try:
+ # XKBGrammar.g:124:3: ( elem_keysyms | elem_keysymgroup | elem_virtualmods | elem_overlay )
+ alt12 = 4
+ LA12 = self.input.LA(1)
+ if LA12 == 49:
+ alt12 = 1
+ elif LA12 == 39 or LA12 == 50:
+ alt12 = 2
+ elif LA12 == 51:
+ alt12 = 3
+ elif LA12 == NAME:
+ alt12 = 4
+ else:
+ nvae = NoViableAltException("", 12, 0, self.input)
+
+ raise nvae
+
+ if alt12 == 1:
+ # XKBGrammar.g:124:5: elem_keysyms
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_elem_keysyms_in_keyelements636)
+ elem_keysyms61 = self.elem_keysyms()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, elem_keysyms61.tree)
+
+
+ elif alt12 == 2:
+ # XKBGrammar.g:125:5: elem_keysymgroup
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_elem_keysymgroup_in_keyelements643)
+ elem_keysymgroup62 = self.elem_keysymgroup()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, elem_keysymgroup62.tree)
+
+
+ elif alt12 == 3:
+ # XKBGrammar.g:126:5: elem_virtualmods
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_elem_virtualmods_in_keyelements649)
+ elem_virtualmods63 = self.elem_virtualmods()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, elem_virtualmods63.tree)
+
+
+ elif alt12 == 4:
+ # XKBGrammar.g:127:5: elem_overlay
+ pass
+ root_0 = self._adaptor.nil()
+
+ self._state.following.append(self.FOLLOW_elem_overlay_in_keyelements655)
+ elem_overlay64 = self.elem_overlay()
+
+ self._state.following.pop()
+ self._adaptor.addChild(root_0, elem_overlay64.tree)
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "keyelements"
+
+ class elem_keysyms_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "elem_keysyms"
+ # XKBGrammar.g:130:1: elem_keysyms : 'type' ( '[' NAME ']' )? '=' DQSTRING -> ^( ELEM_KEYSYMS DQSTRING ) ;
+ def elem_keysyms(self, ):
+
+ retval = self.elem_keysyms_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal65 = None
+ char_literal66 = None
+ NAME67 = None
+ char_literal68 = None
+ char_literal69 = None
+ DQSTRING70 = None
+
+ string_literal65_tree = None
+ char_literal66_tree = None
+ NAME67_tree = None
+ char_literal68_tree = None
+ char_literal69_tree = None
+ DQSTRING70_tree = None
+ stream_49 = RewriteRuleTokenStream(self._adaptor, "token 49")
+ stream_39 = RewriteRuleTokenStream(self._adaptor, "token 39")
+ stream_40 = RewriteRuleTokenStream(self._adaptor, "token 40")
+ stream_41 = RewriteRuleTokenStream(self._adaptor, "token 41")
+ stream_DQSTRING = RewriteRuleTokenStream(self._adaptor, "token DQSTRING")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # XKBGrammar.g:131:3: ( 'type' ( '[' NAME ']' )? '=' DQSTRING -> ^( ELEM_KEYSYMS DQSTRING ) )
+ # XKBGrammar.g:131:5: 'type' ( '[' NAME ']' )? '=' DQSTRING
+ pass
+ string_literal65=self.match(self.input, 49, self.FOLLOW_49_in_elem_keysyms668)
+ stream_49.add(string_literal65)
+ # XKBGrammar.g:131:12: ( '[' NAME ']' )?
+ alt13 = 2
+ LA13_0 = self.input.LA(1)
+
+ if (LA13_0 == 39) :
+ alt13 = 1
+ if alt13 == 1:
+ # XKBGrammar.g:131:13: '[' NAME ']'
+ pass
+ char_literal66=self.match(self.input, 39, self.FOLLOW_39_in_elem_keysyms671)
+ stream_39.add(char_literal66)
+ NAME67=self.match(self.input, NAME, self.FOLLOW_NAME_in_elem_keysyms673)
+ stream_NAME.add(NAME67)
+ char_literal68=self.match(self.input, 40, self.FOLLOW_40_in_elem_keysyms675)
+ stream_40.add(char_literal68)
+
+
+
+ char_literal69=self.match(self.input, 41, self.FOLLOW_41_in_elem_keysyms679)
+ stream_41.add(char_literal69)
+ DQSTRING70=self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_elem_keysyms681)
+ stream_DQSTRING.add(DQSTRING70)
+
+ # AST Rewrite
+ # elements: DQSTRING
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 132:3: -> ^( ELEM_KEYSYMS DQSTRING )
+ # XKBGrammar.g:132:6: ^( ELEM_KEYSYMS DQSTRING )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(ELEM_KEYSYMS, "ELEM_KEYSYMS"), root_1)
+
+ self._adaptor.addChild(root_1, stream_DQSTRING.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "elem_keysyms"
+
+ class elem_keysymgroup_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "elem_keysymgroup"
+ # XKBGrammar.g:135:1: elem_keysymgroup : ( 'symbols' '[' NAME ']' '=' )? '[' keysym+= NAME ( ',' keysym+= NAME )* ']' -> ^( ELEM_KEYSYMGROUP ^( VALUE ( $keysym)+ ) ) ;
+ def elem_keysymgroup(self, ):
+
+ retval = self.elem_keysymgroup_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal71 = None
+ char_literal72 = None
+ NAME73 = None
+ char_literal74 = None
+ char_literal75 = None
+ char_literal76 = None
+ char_literal77 = None
+ char_literal78 = None
+ keysym = None
+ list_keysym = None
+
+ string_literal71_tree = None
+ char_literal72_tree = None
+ NAME73_tree = None
+ char_literal74_tree = None
+ char_literal75_tree = None
+ char_literal76_tree = None
+ char_literal77_tree = None
+ char_literal78_tree = None
+ keysym_tree = None
+ stream_46 = RewriteRuleTokenStream(self._adaptor, "token 46")
+ stream_39 = RewriteRuleTokenStream(self._adaptor, "token 39")
+ stream_50 = RewriteRuleTokenStream(self._adaptor, "token 50")
+ stream_40 = RewriteRuleTokenStream(self._adaptor, "token 40")
+ stream_41 = RewriteRuleTokenStream(self._adaptor, "token 41")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # XKBGrammar.g:136:3: ( ( 'symbols' '[' NAME ']' '=' )? '[' keysym+= NAME ( ',' keysym+= NAME )* ']' -> ^( ELEM_KEYSYMGROUP ^( VALUE ( $keysym)+ ) ) )
+ # XKBGrammar.g:136:5: ( 'symbols' '[' NAME ']' '=' )? '[' keysym+= NAME ( ',' keysym+= NAME )* ']'
+ pass
+ # XKBGrammar.g:136:5: ( 'symbols' '[' NAME ']' '=' )?
+ alt14 = 2
+ LA14_0 = self.input.LA(1)
+
+ if (LA14_0 == 50) :
+ alt14 = 1
+ if alt14 == 1:
+ # XKBGrammar.g:136:6: 'symbols' '[' NAME ']' '='
+ pass
+ string_literal71=self.match(self.input, 50, self.FOLLOW_50_in_elem_keysymgroup705)
+ stream_50.add(string_literal71)
+ char_literal72=self.match(self.input, 39, self.FOLLOW_39_in_elem_keysymgroup707)
+ stream_39.add(char_literal72)
+ NAME73=self.match(self.input, NAME, self.FOLLOW_NAME_in_elem_keysymgroup709)
+ stream_NAME.add(NAME73)
+ char_literal74=self.match(self.input, 40, self.FOLLOW_40_in_elem_keysymgroup711)
+ stream_40.add(char_literal74)
+ char_literal75=self.match(self.input, 41, self.FOLLOW_41_in_elem_keysymgroup713)
+ stream_41.add(char_literal75)
+
+
+
+ char_literal76=self.match(self.input, 39, self.FOLLOW_39_in_elem_keysymgroup717)
+ stream_39.add(char_literal76)
+ keysym=self.match(self.input, NAME, self.FOLLOW_NAME_in_elem_keysymgroup721)
+ stream_NAME.add(keysym)
+ if list_keysym is None:
+ list_keysym = []
+ list_keysym.append(keysym)
+
+ # XKBGrammar.g:136:52: ( ',' keysym+= NAME )*
+ while True: #loop15
+ alt15 = 2
+ LA15_0 = self.input.LA(1)
+
+ if (LA15_0 == 46) :
+ alt15 = 1
+
+
+ if alt15 == 1:
+ # XKBGrammar.g:136:53: ',' keysym+= NAME
+ pass
+ char_literal77=self.match(self.input, 46, self.FOLLOW_46_in_elem_keysymgroup724)
+ stream_46.add(char_literal77)
+ keysym=self.match(self.input, NAME, self.FOLLOW_NAME_in_elem_keysymgroup728)
+ stream_NAME.add(keysym)
+ if list_keysym is None:
+ list_keysym = []
+ list_keysym.append(keysym)
+
+
+
+ else:
+ break #loop15
+
+
+ char_literal78=self.match(self.input, 40, self.FOLLOW_40_in_elem_keysymgroup732)
+ stream_40.add(char_literal78)
+
+ # AST Rewrite
+ # elements: keysym
+ # token labels:
+ # rule labels: retval
+ # token list labels: keysym
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+ stream_keysym = RewriteRuleTokenStream(self._adaptor, "token keysym", list_keysym)
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 137:3: -> ^( ELEM_KEYSYMGROUP ^( VALUE ( $keysym)+ ) )
+ # XKBGrammar.g:137:6: ^( ELEM_KEYSYMGROUP ^( VALUE ( $keysym)+ ) )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(ELEM_KEYSYMGROUP, "ELEM_KEYSYMGROUP"), root_1)
+
+ # XKBGrammar.g:137:25: ^( VALUE ( $keysym)+ )
+ root_2 = self._adaptor.nil()
+ root_2 = self._adaptor.becomeRoot(self._adaptor.createFromType(VALUE, "VALUE"), root_2)
+
+ # XKBGrammar.g:137:33: ( $keysym)+
+ if not (stream_keysym.hasNext()):
+ raise RewriteEarlyExitException()
+
+ while stream_keysym.hasNext():
+ self._adaptor.addChild(root_2, stream_keysym.nextNode())
+
+
+ stream_keysym.reset()
+
+ self._adaptor.addChild(root_1, root_2)
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "elem_keysymgroup"
+
+ class elem_virtualmods_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "elem_virtualmods"
+ # XKBGrammar.g:140:1: elem_virtualmods : ( 'virtualMods' '=' NAME ) -> ^( ELEM_VIRTUALMODS NAME ) ;
+ def elem_virtualmods(self, ):
+
+ retval = self.elem_virtualmods_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ string_literal79 = None
+ char_literal80 = None
+ NAME81 = None
+
+ string_literal79_tree = None
+ char_literal80_tree = None
+ NAME81_tree = None
+ stream_51 = RewriteRuleTokenStream(self._adaptor, "token 51")
+ stream_41 = RewriteRuleTokenStream(self._adaptor, "token 41")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+
+ try:
+ try:
+ # XKBGrammar.g:141:3: ( ( 'virtualMods' '=' NAME ) -> ^( ELEM_VIRTUALMODS NAME ) )
+ # XKBGrammar.g:141:5: ( 'virtualMods' '=' NAME )
+ pass
+ # XKBGrammar.g:141:5: ( 'virtualMods' '=' NAME )
+ # XKBGrammar.g:141:6: 'virtualMods' '=' NAME
+ pass
+ string_literal79=self.match(self.input, 51, self.FOLLOW_51_in_elem_virtualmods763)
+ stream_51.add(string_literal79)
+ char_literal80=self.match(self.input, 41, self.FOLLOW_41_in_elem_virtualmods765)
+ stream_41.add(char_literal80)
+ NAME81=self.match(self.input, NAME, self.FOLLOW_NAME_in_elem_virtualmods767)
+ stream_NAME.add(NAME81)
+
+
+
+
+ # AST Rewrite
+ # elements: NAME
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 142:3: -> ^( ELEM_VIRTUALMODS NAME )
+ # XKBGrammar.g:142:6: ^( ELEM_VIRTUALMODS NAME )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(ELEM_VIRTUALMODS, "ELEM_VIRTUALMODS"), root_1)
+
+ self._adaptor.addChild(root_1, stream_NAME.nextNode())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "elem_virtualmods"
+
+ class elem_overlay_return(ParserRuleReturnScope):
+ def __init__(self):
+ ParserRuleReturnScope.__init__(self)
+
+ self.tree = None
+
+
+
+
+ # $ANTLR start "elem_overlay"
+ # XKBGrammar.g:145:1: elem_overlay : NAME '=' keycode -> ^( OVERLAY NAME keycode ) ;
+ def elem_overlay(self, ):
+
+ retval = self.elem_overlay_return()
+ retval.start = self.input.LT(1)
+
+ root_0 = None
+
+ NAME82 = None
+ char_literal83 = None
+ keycode84 = None
+
+
+ NAME82_tree = None
+ char_literal83_tree = None
+ stream_41 = RewriteRuleTokenStream(self._adaptor, "token 41")
+ stream_NAME = RewriteRuleTokenStream(self._adaptor, "token NAME")
+ stream_keycode = RewriteRuleSubtreeStream(self._adaptor, "rule keycode")
+ try:
+ try:
+ # XKBGrammar.g:146:3: ( NAME '=' keycode -> ^( OVERLAY NAME keycode ) )
+ # XKBGrammar.g:146:5: NAME '=' keycode
+ pass
+ NAME82=self.match(self.input, NAME, self.FOLLOW_NAME_in_elem_overlay791)
+ stream_NAME.add(NAME82)
+ char_literal83=self.match(self.input, 41, self.FOLLOW_41_in_elem_overlay793)
+ stream_41.add(char_literal83)
+ self._state.following.append(self.FOLLOW_keycode_in_elem_overlay795)
+ keycode84 = self.keycode()
+
+ self._state.following.pop()
+ stream_keycode.add(keycode84.tree)
+
+ # AST Rewrite
+ # elements: NAME, keycode
+ # token labels:
+ # rule labels: retval
+ # token list labels:
+ # rule list labels:
+ # wildcard labels:
+
+ retval.tree = root_0
+
+ if retval is not None:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "rule retval", retval.tree)
+ else:
+ stream_retval = RewriteRuleSubtreeStream(self._adaptor, "token retval", None)
+
+
+ root_0 = self._adaptor.nil()
+ # 147:3: -> ^( OVERLAY NAME keycode )
+ # XKBGrammar.g:147:6: ^( OVERLAY NAME keycode )
+ root_1 = self._adaptor.nil()
+ root_1 = self._adaptor.becomeRoot(self._adaptor.createFromType(OVERLAY, "OVERLAY"), root_1)
+
+ self._adaptor.addChild(root_1, stream_NAME.nextNode())
+ self._adaptor.addChild(root_1, stream_keycode.nextTree())
+
+ self._adaptor.addChild(root_0, root_1)
+
+
+
+ retval.tree = root_0
+
+
+
+ retval.stop = self.input.LT(-1)
+
+
+ retval.tree = self._adaptor.rulePostProcessing(root_0)
+ self._adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop)
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ retval.tree = self._adaptor.errorNode(self.input, retval.start, self.input.LT(-1), re)
+ finally:
+
+ pass
+
+ return retval
+
+ # $ANTLR end "elem_overlay"
+
+
+ # Delegated rules
+
+
+
+
+ FOLLOW_symbols_in_layout191 = frozenset([28])
+ FOLLOW_EOF_in_layout194 = frozenset([1])
+ FOLLOW_mapType_in_symbols221 = frozenset([34])
+ FOLLOW_34_in_symbols223 = frozenset([26, 37, 38, 42, 43, 47, 48])
+ FOLLOW_mapMaterial_in_symbols225 = frozenset([26, 35, 37, 38, 42, 43, 47, 48])
+ FOLLOW_35_in_symbols228 = frozenset([36])
+ FOLLOW_36_in_symbols230 = frozenset([1])
+ FOLLOW_MAPOPTS_in_mapType260 = frozenset([28, 29])
+ FOLLOW_DQSTRING_in_mapType263 = frozenset([1])
+ FOLLOW_line_include_in_mapMaterial298 = frozenset([1])
+ FOLLOW_line_name_in_mapMaterial305 = frozenset([36])
+ FOLLOW_36_in_mapMaterial307 = frozenset([1])
+ FOLLOW_line_keytype_in_mapMaterial314 = frozenset([36])
+ FOLLOW_36_in_mapMaterial316 = frozenset([1])
+ FOLLOW_line_key_in_mapMaterial323 = frozenset([36])
+ FOLLOW_36_in_mapMaterial325 = frozenset([1])
+ FOLLOW_line_modifier_map_in_mapMaterial332 = frozenset([36])
+ FOLLOW_36_in_mapMaterial334 = frozenset([1])
+ FOLLOW_line_virtual_modifiers_in_mapMaterial341 = frozenset([36])
+ FOLLOW_36_in_mapMaterial343 = frozenset([1])
+ FOLLOW_37_in_line_include357 = frozenset([29])
+ FOLLOW_DQSTRING_in_line_include359 = frozenset([1])
+ FOLLOW_38_in_line_name382 = frozenset([39, 41])
+ FOLLOW_39_in_line_name385 = frozenset([30])
+ FOLLOW_NAME_in_line_name387 = frozenset([40])
+ FOLLOW_40_in_line_name389 = frozenset([41])
+ FOLLOW_41_in_line_name393 = frozenset([29])
+ FOLLOW_DQSTRING_in_line_name395 = frozenset([1])
+ FOLLOW_42_in_line_keytype418 = frozenset([39, 41])
+ FOLLOW_39_in_line_keytype421 = frozenset([30])
+ FOLLOW_NAME_in_line_keytype423 = frozenset([40])
+ FOLLOW_40_in_line_keytype425 = frozenset([41])
+ FOLLOW_41_in_line_keytype429 = frozenset([29])
+ FOLLOW_DQSTRING_in_line_keytype431 = frozenset([1])
+ FOLLOW_OVERRIDE_in_line_key454 = frozenset([43])
+ FOLLOW_43_in_line_key457 = frozenset([44])
+ FOLLOW_44_in_line_key459 = frozenset([30])
+ FOLLOW_NAME_in_line_key461 = frozenset([45])
+ FOLLOW_45_in_line_key463 = frozenset([34])
+ FOLLOW_34_in_line_key465 = frozenset([30, 39, 49, 50, 51])
+ FOLLOW_keyelements_in_line_key467 = frozenset([35, 46])
+ FOLLOW_46_in_line_key470 = frozenset([30, 39, 49, 50, 51])
+ FOLLOW_keyelements_in_line_key472 = frozenset([35, 46])
+ FOLLOW_35_in_line_key476 = frozenset([1])
+ FOLLOW_47_in_line_modifier_map509 = frozenset([21])
+ FOLLOW_STATE_in_line_modifier_map511 = frozenset([34])
+ FOLLOW_34_in_line_modifier_map513 = frozenset([30, 44])
+ FOLLOW_keycode_in_line_modifier_map515 = frozenset([35, 46])
+ FOLLOW_46_in_line_modifier_map518 = frozenset([30, 44])
+ FOLLOW_keycode_in_line_modifier_map520 = frozenset([35, 46])
+ FOLLOW_35_in_line_modifier_map524 = frozenset([1])
+ FOLLOW_48_in_line_virtual_modifiers550 = frozenset([30])
+ FOLLOW_NAME_in_line_virtual_modifiers552 = frozenset([1, 46])
+ FOLLOW_46_in_line_virtual_modifiers555 = frozenset([30])
+ FOLLOW_NAME_in_line_virtual_modifiers557 = frozenset([1, 46])
+ FOLLOW_44_in_keycode584 = frozenset([30])
+ FOLLOW_NAME_in_keycode586 = frozenset([45])
+ FOLLOW_45_in_keycode588 = frozenset([1])
+ FOLLOW_NAME_in_keycode602 = frozenset([1])
+ FOLLOW_OVERRIDE_in_override623 = frozenset([1])
+ FOLLOW_elem_keysyms_in_keyelements636 = frozenset([1])
+ FOLLOW_elem_keysymgroup_in_keyelements643 = frozenset([1])
+ FOLLOW_elem_virtualmods_in_keyelements649 = frozenset([1])
+ FOLLOW_elem_overlay_in_keyelements655 = frozenset([1])
+ FOLLOW_49_in_elem_keysyms668 = frozenset([39, 41])
+ FOLLOW_39_in_elem_keysyms671 = frozenset([30])
+ FOLLOW_NAME_in_elem_keysyms673 = frozenset([40])
+ FOLLOW_40_in_elem_keysyms675 = frozenset([41])
+ FOLLOW_41_in_elem_keysyms679 = frozenset([29])
+ FOLLOW_DQSTRING_in_elem_keysyms681 = frozenset([1])
+ FOLLOW_50_in_elem_keysymgroup705 = frozenset([39])
+ FOLLOW_39_in_elem_keysymgroup707 = frozenset([30])
+ FOLLOW_NAME_in_elem_keysymgroup709 = frozenset([40])
+ FOLLOW_40_in_elem_keysymgroup711 = frozenset([41])
+ FOLLOW_41_in_elem_keysymgroup713 = frozenset([39])
+ FOLLOW_39_in_elem_keysymgroup717 = frozenset([30])
+ FOLLOW_NAME_in_elem_keysymgroup721 = frozenset([40, 46])
+ FOLLOW_46_in_elem_keysymgroup724 = frozenset([30])
+ FOLLOW_NAME_in_elem_keysymgroup728 = frozenset([40, 46])
+ FOLLOW_40_in_elem_keysymgroup732 = frozenset([1])
+ FOLLOW_51_in_elem_virtualmods763 = frozenset([41])
+ FOLLOW_41_in_elem_virtualmods765 = frozenset([30])
+ FOLLOW_NAME_in_elem_virtualmods767 = frozenset([1])
+ FOLLOW_NAME_in_elem_overlay791 = frozenset([41])
+ FOLLOW_41_in_elem_overlay793 = frozenset([30, 44])
+ FOLLOW_keycode_in_elem_overlay795 = frozenset([1])
+
+
+
+def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
+ from antlr3.main import ParserMain
+ main = ParserMain("XKBGrammarLexer", XKBGrammarParser)
+ main.stdin = stdin
+ main.stdout = stdout
+ main.stderr = stderr
+ main.execute(argv)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/XKBGrammarWalker.py b/XKBGrammarWalker.py
@@ -0,0 +1,797 @@
+# $ANTLR 3.1.2 XKBGrammarWalker.g 2019-08-13 08:28:51
+
+import sys
+from antlr3 import *
+from antlr3.tree import *
+from antlr3.compat import set, frozenset
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+ELEM_KEYSYMGROUP=22
+T__50=50
+TOKEN_NAME=6
+VALUE=20
+KEYELEMENTS=25
+OVERLAY=27
+TOKEN_KEY_TYPE=5
+T__59=59
+KEYCODEX=19
+T__55=55
+T__56=56
+T__57=57
+KEYCODE=18
+T__58=58
+T__51=51
+T__53=53
+T__54=54
+T__60=60
+T__61=61
+MAPMATERIAL=17
+NAME=30
+LINE_COMMENT=33
+TOKEN_SYMBOL=10
+TOKEN_INCLUDE=4
+ELEM_VIRTUALMODS=24
+TOKEN_KEY=7
+LAYOUT=12
+STATE=21
+DQSTRING=29
+COMMENT=32
+MAPTYPE=14
+T__37=37
+T__38=38
+T__39=39
+T__34=34
+TOKEN_TYPE=8
+T__35=35
+T__36=36
+SYMBOLS=13
+WS=31
+EOF=-1
+TOKEN_VIRTUAL_MODIFIERS=11
+MAPOPTIONS=16
+MAPOPTS=28
+ELEM_KEYSYMS=23
+TOKEN_MODIFIER_MAP=9
+MAPNAME=15
+OVERRIDE=26
+T__48=48
+T__49=49
+ELEM_OVERLAY=52
+T__44=44
+T__45=45
+T__46=46
+T__47=47
+T__40=40
+T__41=41
+T__42=42
+T__43=43
+
+# token names
+tokenNames = [
+ "<invalid>", "<EOR>", "<DOWN>", "<UP>",
+ "TOKEN_INCLUDE", "TOKEN_KEY_TYPE", "TOKEN_NAME", "TOKEN_KEY", "TOKEN_TYPE",
+ "TOKEN_MODIFIER_MAP", "TOKEN_SYMBOL", "TOKEN_VIRTUAL_MODIFIERS", "LAYOUT",
+ "SYMBOLS", "MAPTYPE", "MAPNAME", "MAPOPTIONS", "MAPMATERIAL", "KEYCODE",
+ "KEYCODEX", "VALUE", "STATE", "ELEM_KEYSYMGROUP", "ELEM_KEYSYMS", "ELEM_VIRTUALMODS",
+ "KEYELEMENTS", "OVERRIDE", "OVERLAY", "MAPOPTS", "DQSTRING", "NAME",
+ "WS", "COMMENT", "LINE_COMMENT", "'{'", "'}'", "';'", "'include'", "'name'",
+ "'['", "']'", "'='", "'key.type'", "'key'", "'<'", "'>'", "','", "'modifier_map'",
+ "'virtual_modifiers'", "'type'", "'symbols'", "'virtualMods'", "ELEM_OVERLAY",
+ "'default'", "'hidden'", "'partial'", "'alphanumeric_keys'", "'keypad_keys'",
+ "'function_keys'", "'modifier_keys'", "'alternate_group'", "'xkb_symbols'"
+]
+
+
+
+
+class XKBGrammarWalker(TreeParser):
+ grammarFileName = "XKBGrammarWalker.g"
+ antlr_version = version_str_to_tuple("3.1.2")
+ antlr_version_str = "3.1.2"
+ tokenNames = tokenNames
+
+ def __init__(self, input, state=None):
+ if state is None:
+ state = RecognizerSharedState()
+
+ TreeParser.__init__(self, input, state)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ # $ANTLR start "layout"
+ # XKBGrammarWalker.g:14:1: layout : ^( LAYOUT ( symbols )+ ) ;
+ def layout(self, ):
+
+ try:
+ try:
+ # XKBGrammarWalker.g:15:3: ( ^( LAYOUT ( symbols )+ ) )
+ # XKBGrammarWalker.g:15:5: ^( LAYOUT ( symbols )+ )
+ pass
+ self.match(self.input, LAYOUT, self.FOLLOW_LAYOUT_in_layout73)
+
+ self.match(self.input, DOWN, None)
+ # XKBGrammarWalker.g:15:14: ( symbols )+
+ cnt1 = 0
+ while True: #loop1
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == SYMBOLS) :
+ alt1 = 1
+
+
+ if alt1 == 1:
+ # XKBGrammarWalker.g:15:14: symbols
+ pass
+ self._state.following.append(self.FOLLOW_symbols_in_layout75)
+ self.symbols()
+
+ self._state.following.pop()
+
+
+ else:
+ if cnt1 >= 1:
+ break #loop1
+
+ eee = EarlyExitException(1, self.input)
+ raise eee
+
+ cnt1 += 1
+
+
+
+ self.match(self.input, UP, None)
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "layout"
+
+
+ # $ANTLR start "symbols"
+ # XKBGrammarWalker.g:18:1: symbols : ^( SYMBOLS mapType ^( MAPMATERIAL ( mapMaterial )+ ) ) ;
+ def symbols(self, ):
+
+ try:
+ try:
+ # XKBGrammarWalker.g:19:3: ( ^( SYMBOLS mapType ^( MAPMATERIAL ( mapMaterial )+ ) ) )
+ # XKBGrammarWalker.g:19:5: ^( SYMBOLS mapType ^( MAPMATERIAL ( mapMaterial )+ ) )
+ pass
+ self.match(self.input, SYMBOLS, self.FOLLOW_SYMBOLS_in_symbols94)
+
+ self.match(self.input, DOWN, None)
+ self._state.following.append(self.FOLLOW_mapType_in_symbols96)
+ self.mapType()
+
+ self._state.following.pop()
+ self.match(self.input, MAPMATERIAL, self.FOLLOW_MAPMATERIAL_in_symbols99)
+
+ self.match(self.input, DOWN, None)
+ # XKBGrammarWalker.g:19:37: ( mapMaterial )+
+ cnt2 = 0
+ while True: #loop2
+ alt2 = 2
+ LA2_0 = self.input.LA(1)
+
+ if ((TOKEN_INCLUDE <= LA2_0 <= TOKEN_KEY) or LA2_0 == TOKEN_MODIFIER_MAP or LA2_0 == TOKEN_VIRTUAL_MODIFIERS) :
+ alt2 = 1
+
+
+ if alt2 == 1:
+ # XKBGrammarWalker.g:19:37: mapMaterial
+ pass
+ self._state.following.append(self.FOLLOW_mapMaterial_in_symbols101)
+ self.mapMaterial()
+
+ self._state.following.pop()
+
+
+ else:
+ if cnt2 >= 1:
+ break #loop2
+
+ eee = EarlyExitException(2, self.input)
+ raise eee
+
+ cnt2 += 1
+
+
+
+ self.match(self.input, UP, None)
+
+ self.match(self.input, UP, None)
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "symbols"
+
+
+ # $ANTLR start "mapType"
+ # XKBGrammarWalker.g:22:1: mapType : ^( MAPTYPE ^( MAPOPTIONS ( MAPOPTS )+ ) ^( MAPNAME DQSTRING ) ) ;
+ def mapType(self, ):
+
+ try:
+ try:
+ # XKBGrammarWalker.g:23:3: ( ^( MAPTYPE ^( MAPOPTIONS ( MAPOPTS )+ ) ^( MAPNAME DQSTRING ) ) )
+ # XKBGrammarWalker.g:23:5: ^( MAPTYPE ^( MAPOPTIONS ( MAPOPTS )+ ) ^( MAPNAME DQSTRING ) )
+ pass
+ self.match(self.input, MAPTYPE, self.FOLLOW_MAPTYPE_in_mapType118)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, MAPOPTIONS, self.FOLLOW_MAPOPTIONS_in_mapType121)
+
+ self.match(self.input, DOWN, None)
+ # XKBGrammarWalker.g:23:28: ( MAPOPTS )+
+ cnt3 = 0
+ while True: #loop3
+ alt3 = 2
+ LA3_0 = self.input.LA(1)
+
+ if (LA3_0 == MAPOPTS) :
+ alt3 = 1
+
+
+ if alt3 == 1:
+ # XKBGrammarWalker.g:23:28: MAPOPTS
+ pass
+ self.match(self.input, MAPOPTS, self.FOLLOW_MAPOPTS_in_mapType123)
+
+
+ else:
+ if cnt3 >= 1:
+ break #loop3
+
+ eee = EarlyExitException(3, self.input)
+ raise eee
+
+ cnt3 += 1
+
+
+
+ self.match(self.input, UP, None)
+ self.match(self.input, MAPNAME, self.FOLLOW_MAPNAME_in_mapType128)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_mapType130)
+
+ self.match(self.input, UP, None)
+
+ self.match(self.input, UP, None)
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "mapType"
+
+
+ # $ANTLR start "mapMaterial"
+ # XKBGrammarWalker.g:26:1: mapMaterial : ( ^( TOKEN_INCLUDE DQSTRING ) | ^( TOKEN_NAME DQSTRING ) | ^( TOKEN_KEY_TYPE ( NAME )? ^( VALUE DQSTRING ) ) | ^( TOKEN_KEY ( OVERRIDE )? ^( KEYCODEX NAME ) ( keyelements )+ ) | ^( TOKEN_MODIFIER_MAP STATE ( keycode )+ ) | ^( TOKEN_VIRTUAL_MODIFIERS ( NAME )+ ) );
+ def mapMaterial(self, ):
+
+ try:
+ try:
+ # XKBGrammarWalker.g:27:3: ( ^( TOKEN_INCLUDE DQSTRING ) | ^( TOKEN_NAME DQSTRING ) | ^( TOKEN_KEY_TYPE ( NAME )? ^( VALUE DQSTRING ) ) | ^( TOKEN_KEY ( OVERRIDE )? ^( KEYCODEX NAME ) ( keyelements )+ ) | ^( TOKEN_MODIFIER_MAP STATE ( keycode )+ ) | ^( TOKEN_VIRTUAL_MODIFIERS ( NAME )+ ) )
+ alt9 = 6
+ LA9 = self.input.LA(1)
+ if LA9 == TOKEN_INCLUDE:
+ alt9 = 1
+ elif LA9 == TOKEN_NAME:
+ alt9 = 2
+ elif LA9 == TOKEN_KEY_TYPE:
+ alt9 = 3
+ elif LA9 == TOKEN_KEY:
+ alt9 = 4
+ elif LA9 == TOKEN_MODIFIER_MAP:
+ alt9 = 5
+ elif LA9 == TOKEN_VIRTUAL_MODIFIERS:
+ alt9 = 6
+ else:
+ nvae = NoViableAltException("", 9, 0, self.input)
+
+ raise nvae
+
+ if alt9 == 1:
+ # XKBGrammarWalker.g:27:5: ^( TOKEN_INCLUDE DQSTRING )
+ pass
+ self.match(self.input, TOKEN_INCLUDE, self.FOLLOW_TOKEN_INCLUDE_in_mapMaterial147)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_mapMaterial149)
+
+ self.match(self.input, UP, None)
+
+
+ elif alt9 == 2:
+ # XKBGrammarWalker.g:28:5: ^( TOKEN_NAME DQSTRING )
+ pass
+ self.match(self.input, TOKEN_NAME, self.FOLLOW_TOKEN_NAME_in_mapMaterial157)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_mapMaterial159)
+
+ self.match(self.input, UP, None)
+
+
+ elif alt9 == 3:
+ # XKBGrammarWalker.g:29:5: ^( TOKEN_KEY_TYPE ( NAME )? ^( VALUE DQSTRING ) )
+ pass
+ self.match(self.input, TOKEN_KEY_TYPE, self.FOLLOW_TOKEN_KEY_TYPE_in_mapMaterial167)
+
+ self.match(self.input, DOWN, None)
+ # XKBGrammarWalker.g:29:22: ( NAME )?
+ alt4 = 2
+ LA4_0 = self.input.LA(1)
+
+ if (LA4_0 == NAME) :
+ alt4 = 1
+ if alt4 == 1:
+ # XKBGrammarWalker.g:29:22: NAME
+ pass
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_mapMaterial169)
+
+
+
+ self.match(self.input, VALUE, self.FOLLOW_VALUE_in_mapMaterial173)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_mapMaterial175)
+
+ self.match(self.input, UP, None)
+
+ self.match(self.input, UP, None)
+
+
+ elif alt9 == 4:
+ # XKBGrammarWalker.g:30:5: ^( TOKEN_KEY ( OVERRIDE )? ^( KEYCODEX NAME ) ( keyelements )+ )
+ pass
+ self.match(self.input, TOKEN_KEY, self.FOLLOW_TOKEN_KEY_in_mapMaterial184)
+
+ self.match(self.input, DOWN, None)
+ # XKBGrammarWalker.g:30:17: ( OVERRIDE )?
+ alt5 = 2
+ LA5_0 = self.input.LA(1)
+
+ if (LA5_0 == OVERRIDE) :
+ alt5 = 1
+ if alt5 == 1:
+ # XKBGrammarWalker.g:30:17: OVERRIDE
+ pass
+ self.match(self.input, OVERRIDE, self.FOLLOW_OVERRIDE_in_mapMaterial186)
+
+
+
+ self.match(self.input, KEYCODEX, self.FOLLOW_KEYCODEX_in_mapMaterial190)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_mapMaterial192)
+
+ self.match(self.input, UP, None)
+ # XKBGrammarWalker.g:30:44: ( keyelements )+
+ cnt6 = 0
+ while True: #loop6
+ alt6 = 2
+ LA6_0 = self.input.LA(1)
+
+ if ((ELEM_KEYSYMGROUP <= LA6_0 <= ELEM_VIRTUALMODS) or LA6_0 == ELEM_OVERLAY) :
+ alt6 = 1
+
+
+ if alt6 == 1:
+ # XKBGrammarWalker.g:30:44: keyelements
+ pass
+ self._state.following.append(self.FOLLOW_keyelements_in_mapMaterial195)
+ self.keyelements()
+
+ self._state.following.pop()
+
+
+ else:
+ if cnt6 >= 1:
+ break #loop6
+
+ eee = EarlyExitException(6, self.input)
+ raise eee
+
+ cnt6 += 1
+
+
+
+ self.match(self.input, UP, None)
+
+
+ elif alt9 == 5:
+ # XKBGrammarWalker.g:31:5: ^( TOKEN_MODIFIER_MAP STATE ( keycode )+ )
+ pass
+ self.match(self.input, TOKEN_MODIFIER_MAP, self.FOLLOW_TOKEN_MODIFIER_MAP_in_mapMaterial204)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, STATE, self.FOLLOW_STATE_in_mapMaterial206)
+ # XKBGrammarWalker.g:31:32: ( keycode )+
+ cnt7 = 0
+ while True: #loop7
+ alt7 = 2
+ LA7_0 = self.input.LA(1)
+
+ if ((KEYCODE <= LA7_0 <= KEYCODEX)) :
+ alt7 = 1
+
+
+ if alt7 == 1:
+ # XKBGrammarWalker.g:31:32: keycode
+ pass
+ self._state.following.append(self.FOLLOW_keycode_in_mapMaterial208)
+ self.keycode()
+
+ self._state.following.pop()
+
+
+ else:
+ if cnt7 >= 1:
+ break #loop7
+
+ eee = EarlyExitException(7, self.input)
+ raise eee
+
+ cnt7 += 1
+
+
+
+ self.match(self.input, UP, None)
+
+
+ elif alt9 == 6:
+ # XKBGrammarWalker.g:32:5: ^( TOKEN_VIRTUAL_MODIFIERS ( NAME )+ )
+ pass
+ self.match(self.input, TOKEN_VIRTUAL_MODIFIERS, self.FOLLOW_TOKEN_VIRTUAL_MODIFIERS_in_mapMaterial217)
+
+ self.match(self.input, DOWN, None)
+ # XKBGrammarWalker.g:32:31: ( NAME )+
+ cnt8 = 0
+ while True: #loop8
+ alt8 = 2
+ LA8_0 = self.input.LA(1)
+
+ if (LA8_0 == NAME) :
+ alt8 = 1
+
+
+ if alt8 == 1:
+ # XKBGrammarWalker.g:32:31: NAME
+ pass
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_mapMaterial219)
+
+
+ else:
+ if cnt8 >= 1:
+ break #loop8
+
+ eee = EarlyExitException(8, self.input)
+ raise eee
+
+ cnt8 += 1
+
+
+
+ self.match(self.input, UP, None)
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "mapMaterial"
+
+
+ # $ANTLR start "keycode"
+ # XKBGrammarWalker.g:35:1: keycode : ( ^( KEYCODE NAME ) | ^( KEYCODEX NAME ) );
+ def keycode(self, ):
+
+ try:
+ try:
+ # XKBGrammarWalker.g:36:3: ( ^( KEYCODE NAME ) | ^( KEYCODEX NAME ) )
+ alt10 = 2
+ LA10_0 = self.input.LA(1)
+
+ if (LA10_0 == KEYCODE) :
+ alt10 = 1
+ elif (LA10_0 == KEYCODEX) :
+ alt10 = 2
+ else:
+ nvae = NoViableAltException("", 10, 0, self.input)
+
+ raise nvae
+
+ if alt10 == 1:
+ # XKBGrammarWalker.g:36:5: ^( KEYCODE NAME )
+ pass
+ self.match(self.input, KEYCODE, self.FOLLOW_KEYCODE_in_keycode236)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keycode238)
+
+ self.match(self.input, UP, None)
+
+
+ elif alt10 == 2:
+ # XKBGrammarWalker.g:37:5: ^( KEYCODEX NAME )
+ pass
+ self.match(self.input, KEYCODEX, self.FOLLOW_KEYCODEX_in_keycode246)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keycode248)
+
+ self.match(self.input, UP, None)
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "keycode"
+
+
+ # $ANTLR start "keyelements"
+ # XKBGrammarWalker.g:40:1: keyelements : ( ^( ELEM_KEYSYMS DQSTRING ) | ^( ELEM_KEYSYMGROUP ^( VALUE ( NAME )+ ) ) | ^( ELEM_VIRTUALMODS NAME ) | ^( ELEM_OVERLAY NAME keycode ) );
+ def keyelements(self, ):
+
+ try:
+ try:
+ # XKBGrammarWalker.g:41:3: ( ^( ELEM_KEYSYMS DQSTRING ) | ^( ELEM_KEYSYMGROUP ^( VALUE ( NAME )+ ) ) | ^( ELEM_VIRTUALMODS NAME ) | ^( ELEM_OVERLAY NAME keycode ) )
+ alt12 = 4
+ LA12 = self.input.LA(1)
+ if LA12 == ELEM_KEYSYMS:
+ alt12 = 1
+ elif LA12 == ELEM_KEYSYMGROUP:
+ alt12 = 2
+ elif LA12 == ELEM_VIRTUALMODS:
+ alt12 = 3
+ elif LA12 == ELEM_OVERLAY:
+ alt12 = 4
+ else:
+ nvae = NoViableAltException("", 12, 0, self.input)
+
+ raise nvae
+
+ if alt12 == 1:
+ # XKBGrammarWalker.g:41:5: ^( ELEM_KEYSYMS DQSTRING )
+ pass
+ self.match(self.input, ELEM_KEYSYMS, self.FOLLOW_ELEM_KEYSYMS_in_keyelements263)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, DQSTRING, self.FOLLOW_DQSTRING_in_keyelements265)
+
+ self.match(self.input, UP, None)
+
+
+ elif alt12 == 2:
+ # XKBGrammarWalker.g:42:5: ^( ELEM_KEYSYMGROUP ^( VALUE ( NAME )+ ) )
+ pass
+ self.match(self.input, ELEM_KEYSYMGROUP, self.FOLLOW_ELEM_KEYSYMGROUP_in_keyelements273)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, VALUE, self.FOLLOW_VALUE_in_keyelements276)
+
+ self.match(self.input, DOWN, None)
+ # XKBGrammarWalker.g:42:32: ( NAME )+
+ cnt11 = 0
+ while True: #loop11
+ alt11 = 2
+ LA11_0 = self.input.LA(1)
+
+ if (LA11_0 == NAME) :
+ alt11 = 1
+
+
+ if alt11 == 1:
+ # XKBGrammarWalker.g:42:32: NAME
+ pass
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keyelements278)
+
+
+ else:
+ if cnt11 >= 1:
+ break #loop11
+
+ eee = EarlyExitException(11, self.input)
+ raise eee
+
+ cnt11 += 1
+
+
+
+ self.match(self.input, UP, None)
+
+ self.match(self.input, UP, None)
+
+
+ elif alt12 == 3:
+ # XKBGrammarWalker.g:43:5: ^( ELEM_VIRTUALMODS NAME )
+ pass
+ self.match(self.input, ELEM_VIRTUALMODS, self.FOLLOW_ELEM_VIRTUALMODS_in_keyelements288)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keyelements290)
+
+ self.match(self.input, UP, None)
+
+
+ elif alt12 == 4:
+ # XKBGrammarWalker.g:44:5: ^( ELEM_OVERLAY NAME keycode )
+ pass
+ self.match(self.input, ELEM_OVERLAY, self.FOLLOW_ELEM_OVERLAY_in_keyelements298)
+
+ self.match(self.input, DOWN, None)
+ self.match(self.input, NAME, self.FOLLOW_NAME_in_keyelements300)
+ self._state.following.append(self.FOLLOW_keycode_in_keyelements302)
+ self.keycode()
+
+ self._state.following.pop()
+
+ self.match(self.input, UP, None)
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "keyelements"
+
+
+ # $ANTLR start "mapopts"
+ # XKBGrammarWalker.g:47:1: mapopts : ( 'default' | 'hidden' | 'partial' | 'alphanumeric_keys' | 'keypad_keys' | 'function_keys' | 'modifier_keys' | 'alternate_group' | 'xkb_symbols' );
+ def mapopts(self, ):
+
+ try:
+ try:
+ # XKBGrammarWalker.g:48:9: ( 'default' | 'hidden' | 'partial' | 'alphanumeric_keys' | 'keypad_keys' | 'function_keys' | 'modifier_keys' | 'alternate_group' | 'xkb_symbols' )
+ # XKBGrammarWalker.g:
+ pass
+ if (53 <= self.input.LA(1) <= 61):
+ self.input.consume()
+ self._state.errorRecovery = False
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ raise mse
+
+
+
+
+
+
+ except RecognitionException, re:
+ self.reportError(re)
+ self.recover(self.input, re)
+ finally:
+
+ pass
+
+ return
+
+ # $ANTLR end "mapopts"
+
+
+ # Delegated rules
+
+
+
+
+ FOLLOW_LAYOUT_in_layout73 = frozenset([2])
+ FOLLOW_symbols_in_layout75 = frozenset([3, 13])
+ FOLLOW_SYMBOLS_in_symbols94 = frozenset([2])
+ FOLLOW_mapType_in_symbols96 = frozenset([17])
+ FOLLOW_MAPMATERIAL_in_symbols99 = frozenset([2])
+ FOLLOW_mapMaterial_in_symbols101 = frozenset([3, 4, 5, 6, 7, 9, 11])
+ FOLLOW_MAPTYPE_in_mapType118 = frozenset([2])
+ FOLLOW_MAPOPTIONS_in_mapType121 = frozenset([2])
+ FOLLOW_MAPOPTS_in_mapType123 = frozenset([3, 28])
+ FOLLOW_MAPNAME_in_mapType128 = frozenset([2])
+ FOLLOW_DQSTRING_in_mapType130 = frozenset([3])
+ FOLLOW_TOKEN_INCLUDE_in_mapMaterial147 = frozenset([2])
+ FOLLOW_DQSTRING_in_mapMaterial149 = frozenset([3])
+ FOLLOW_TOKEN_NAME_in_mapMaterial157 = frozenset([2])
+ FOLLOW_DQSTRING_in_mapMaterial159 = frozenset([3])
+ FOLLOW_TOKEN_KEY_TYPE_in_mapMaterial167 = frozenset([2])
+ FOLLOW_NAME_in_mapMaterial169 = frozenset([20])
+ FOLLOW_VALUE_in_mapMaterial173 = frozenset([2])
+ FOLLOW_DQSTRING_in_mapMaterial175 = frozenset([3])
+ FOLLOW_TOKEN_KEY_in_mapMaterial184 = frozenset([2])
+ FOLLOW_OVERRIDE_in_mapMaterial186 = frozenset([19])
+ FOLLOW_KEYCODEX_in_mapMaterial190 = frozenset([2])
+ FOLLOW_NAME_in_mapMaterial192 = frozenset([3])
+ FOLLOW_keyelements_in_mapMaterial195 = frozenset([3, 22, 23, 24, 52])
+ FOLLOW_TOKEN_MODIFIER_MAP_in_mapMaterial204 = frozenset([2])
+ FOLLOW_STATE_in_mapMaterial206 = frozenset([18, 19])
+ FOLLOW_keycode_in_mapMaterial208 = frozenset([3, 18, 19])
+ FOLLOW_TOKEN_VIRTUAL_MODIFIERS_in_mapMaterial217 = frozenset([2])
+ FOLLOW_NAME_in_mapMaterial219 = frozenset([3, 30])
+ FOLLOW_KEYCODE_in_keycode236 = frozenset([2])
+ FOLLOW_NAME_in_keycode238 = frozenset([3])
+ FOLLOW_KEYCODEX_in_keycode246 = frozenset([2])
+ FOLLOW_NAME_in_keycode248 = frozenset([3])
+ FOLLOW_ELEM_KEYSYMS_in_keyelements263 = frozenset([2])
+ FOLLOW_DQSTRING_in_keyelements265 = frozenset([3])
+ FOLLOW_ELEM_KEYSYMGROUP_in_keyelements273 = frozenset([2])
+ FOLLOW_VALUE_in_keyelements276 = frozenset([2])
+ FOLLOW_NAME_in_keyelements278 = frozenset([3, 30])
+ FOLLOW_ELEM_VIRTUALMODS_in_keyelements288 = frozenset([2])
+ FOLLOW_NAME_in_keyelements290 = frozenset([3])
+ FOLLOW_ELEM_OVERLAY_in_keyelements298 = frozenset([2])
+ FOLLOW_NAME_in_keyelements300 = frozenset([18, 19])
+ FOLLOW_keycode_in_keyelements302 = frozenset([3])
+ FOLLOW_set_in_mapopts0 = frozenset([1])
+
+
+
+def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
+ from antlr3.main import WalkerMain
+ main = WalkerMain(XKBGrammarWalker)
+ main.stdin = stdin
+ main.stdout = stdout
+ main.stderr = stderr
+ main.execute(argv)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/XKBGrammarWalker.tokens b/XKBGrammarWalker.tokens
@@ -0,0 +1,86 @@
+ELEM_KEYSYMGROUP=22
+T__50=50
+TOKEN_NAME=6
+VALUE=20
+KEYELEMENTS=25
+OVERLAY=27
+TOKEN_KEY_TYPE=5
+T__59=59
+KEYCODEX=19
+T__55=55
+T__56=56
+T__57=57
+KEYCODE=18
+T__58=58
+T__51=51
+T__53=53
+T__54=54
+T__60=60
+T__61=61
+MAPMATERIAL=17
+NAME=30
+LINE_COMMENT=33
+TOKEN_SYMBOL=10
+TOKEN_INCLUDE=4
+ELEM_VIRTUALMODS=24
+TOKEN_KEY=7
+LAYOUT=12
+STATE=21
+DQSTRING=29
+COMMENT=32
+MAPTYPE=14
+T__37=37
+T__38=38
+T__39=39
+T__34=34
+TOKEN_TYPE=8
+T__35=35
+T__36=36
+SYMBOLS=13
+WS=31
+TOKEN_VIRTUAL_MODIFIERS=11
+MAPOPTIONS=16
+MAPOPTS=28
+ELEM_KEYSYMS=23
+TOKEN_MODIFIER_MAP=9
+MAPNAME=15
+OVERRIDE=26
+T__48=48
+T__49=49
+ELEM_OVERLAY=52
+T__44=44
+T__45=45
+T__46=46
+T__47=47
+T__40=40
+T__41=41
+T__42=42
+T__43=43
+'hidden'=54
+'virtualMods'=51
+'alphanumeric_keys'=56
+'modifier_keys'=59
+'type'=49
+'>'=45
+']'=40
+'<'=44
+'['=39
+'modifier_map'=47
+'key'=43
+'key.type'=42
+','=46
+'partial'=55
+'xkb_symbols'=61
+'keypad_keys'=57
+'include'=37
+'override'=26
+'virtual_modifiers'=48
+'symbols'=50
+'}'=35
+'='=41
+'{'=34
+';'=36
+'alternate_group'=60
+'name'=38
+'default'=53
+'function_keys'=58
diff --git a/antlr-3.1.2.jar b/antlr-3.1.2.jar
Binary files differ.