def normalize_token(token)
case token[:type]
when :StartTag, :EmptyTag
token[:data] = Hash[*token[:data].reverse.flatten]
if token[:type] == :EmptyTag
save = @tokenizer.content_model_flag
@phase.processStartTag(token[:name], token[:data])
@tokenizer.content_model_flag = save
token[:data] = {}
token[:type] = :EndTag
end
when :Characters
if @tokenizer.content_model_flag == :CDATA
token[:data] = token[:data].
gsub('<','<').gsub('>','>').gsub('&','&')
end
when :EndTag
if token[:data]
parse_error("attributes-in-end-tag")
end
when :Comment
if token[:data][0..6] == "[CDATA[" and token[:data][-2..-1] == "]]"
token[:type] = :Characters
token[:data] = token[:data][7 ... -2]
end
end
return token
end