First steps towards making the lexer stream tokens

This commit is contained in:
Yorick Peterse 2014-04-09 19:32:06 +02:00
parent 10d0ec1573
commit e9bb97d261
1 changed files with 28 additions and 9 deletions

View File

@ -80,6 +80,8 @@ module Oga
@tokens = [] @tokens = []
@stack = [] @stack = []
@top = 0 @top = 0
@cs = self.class.lexer_start
@act = 0
@elements = [] @elements = []
@buffer_start_position = nil @buffer_start_position = nil
@ -93,24 +95,41 @@ module Oga
# #
# The type is a symbol, the value is either nil or a String. # The type is a symbol, the value is either nil or a String.
# #
# @param [String] data The string to lex. # This method resets the internal state of the lexer after consuming the
# input.
#
# @param [String] data The string to consume.
# @return [Array] # @return [Array]
# @see #advance
# #
def lex(data) def lex(data)
@data = data.unpack('U*') tokens = advance(data)
lexer_start = self.class.lexer_start
eof = data.length
%% write init;
%% write exec;
tokens = @tokens
reset reset
return tokens return tokens
end end
##
# Advances through the input and generates the corresponding tokens.
#
# This method does *not* reset the internal state of the lexer.
#
# @param [String] data The String to consume.
# @return [Array]
#
def advance(data)
@data = data.unpack('U*')
eof = data.length
p = 0
pe = eof
%% write exec; # % fix highlight
return @tokens
end
## ##
# @return [TrueClass|FalseClass] # @return [TrueClass|FalseClass]
# #