First steps towards making the lexer stream tokens

This commit is contained in:
Yorick Peterse 2014-04-09 19:32:06 +02:00
parent 10d0ec1573
commit e9bb97d261
1 changed files with 28 additions and 9 deletions

View File

@ -80,6 +80,8 @@ module Oga
@tokens = []
@stack = []
@top = 0
@cs = self.class.lexer_start
@act = 0
@elements = []
@buffer_start_position = nil
@ -93,24 +95,41 @@ module Oga
#
# The type is a symbol, the value is either nil or a String.
#
# @param [String] data The string to lex.
# This method resets the internal state of the lexer after consuming the
# input.
#
# @param [String] data The string to consume.
# @return [Array]
# @see #advance
#
def lex(data)
@data = data.unpack('U*')
lexer_start = self.class.lexer_start
eof = data.length
%% write init;
%% write exec;
tokens = @tokens
tokens = advance(data)
reset
return tokens
end
##
# Advances through the input and generates the corresponding tokens.
#
# This method does *not* reset the internal state of the lexer.
#
# @param [String] data The String to consume.
# @return [Array]
#
def advance(data)
@data = data.unpack('U*')
eof = data.length
p = 0
pe = eof
%% write exec; # % fix highlight
return @tokens
end
##
# @return [TrueClass|FalseClass]
#