@@ -29,16 +29,14 @@ def processDoctype(self, token):
2929 self .outputTokens .append ([u"DOCTYPE" , token ["name" ], token ["publicId" ], token ["systemId" ], token ["correct" ]])
3030
3131 def processStartTag (self , token ):
32- self .outputTokens .append ([u"StartTag" , token ["name" ], token ["data" ]])
32+ self .outputTokens .append ([u"StartTag" , token ["name" ], dict ( token ["data" ][:: - 1 ]) ])
3333
3434 def processEmptyTag (self , token ):
3535 if token ["name" ] not in constants .voidElements :
3636 self .outputTokens .append (u"ParseError" )
37- self .outputTokens .append ([u"StartTag" , token ["name" ], token ["data" ]])
37+ self .outputTokens .append ([u"StartTag" , token ["name" ], dict ( token ["data" ][:: - 1 ]) ])
3838
3939 def processEndTag (self , token ):
40- if token ["data" ]:
41- self .processParseError (None )
4240 self .outputTokens .append ([u"EndTag" , token ["name" ]])
4341
4442 def processComment (self , token ):
@@ -55,7 +53,7 @@ def processEOF(self, token):
5553 pass
5654
5755 def processParseError (self , token ):
58- self .outputTokens .append (u"ParseError" )
56+ self .outputTokens .append ([ u"ParseError" , token [ "data" ]] )
5957
6058def concatenateCharacterTokens (tokens ):
6159 outputTokens = []
@@ -73,9 +71,10 @@ def concatenateCharacterTokens(tokens):
7371def normalizeTokens (tokens ):
7472 """ convert array of attributes to a dictionary """
7573 # TODO: convert tests to reflect arrays
76- for token in tokens :
77- if token [0 ] == 'StartTag' :
78- token [2 ] = dict (token [2 ][::- 1 ])
74+ for i , token in enumerate (tokens ):
75+ if token [0 ] == u'ParseError' :
76+ tokens [i ] = token [0 ]
77+ #token[2] = dict(token[2][::-1])
7978 return tokens
8079
8180def tokensMatch (expectedTokens , recievedTokens ):
@@ -102,14 +101,14 @@ def runTokenizerTest(self, test):
102101 test ['lastStartTag' ] = None
103102 parser = TokenizerTestParser (test ['contentModelFlag' ],
104103 test ['lastStartTag' ])
105-
106- tokens = normalizeTokens (parser .parse (test ['input' ]))
104+ tokens = parser .parse (test ['input' ])
107105 tokens = concatenateCharacterTokens (tokens )
108106 errorMsg = "\n " .join (["\n \n Content Model Flag:" ,
109107 test ['contentModelFlag' ] ,
110108 "\n Input:" , str (test ['input' ]),
111109 "\n Expected:" , str (output ),
112110 "\n Recieved:" , str (tokens )])
111+ tokens = normalizeTokens (tokens )
113112 self .assertEquals (tokensMatch (tokens , output ), True , errorMsg )
114113
115114def buildTestSuite ():
0 commit comments