Parent

Class Index [+]

Quicksearch

RKelly::Tokenizer

Constants

KEYWORDS
(Not documented)
RESERVED
(Not documented)
LITERALS
(Not documented)

Public Class Methods

new(&block) click to toggle source

(Not documented)

# File lib/rkelly/tokenizer.rb, line 48
    def initialize(&block)
      @lexemes = []

      token(:COMMENT, /\A\/(?:\*(?:.)*?\*\/|\/[^\n]*)/m)
      token(:STRING, /\A"(?:[^"\\]*(?:\\.[^"\\]*)*)"|\A'(?:[^'\\]*(?:\\.[^'\\]*)*)'/m)

      # A regexp to match floating point literals (but not integer literals).
      token(:NUMBER, /\A\d+\.\d*(?:[eE][-+]?\d+)?|\A\d+(?:\.\d*)?[eE][-+]?\d+|\A\.\d+(?:[eE][-+]?\d+)?/m) do |type, value|
        value.gsub!(/\.(\D)/, '.0\1') if value =~ /\.\w/
        value.gsub!(/\.$/, '.0') if value =~ /\.$/
        value.gsub!(/^\./, '0.') if value =~ /^\./
        [type, eval(value)]
      end
      token(:NUMBER, /\A0[xX][\da-fA-F]+|\A0[0-7]*|\A\d+/) do |type, value|
        [type, eval(value)]
      end

      token(:LITERALS,
        Regexp.new(LITERALS.keys.sort_by { |x|
          x.length
        }.reverse.map { |x| "\\A#{x.gsub(/([|+*^])/, '\\\\\1')}" }.join('|')
      )) do |type, value|
        [LITERALS[value], value]
      end

      token(:IDENT, /\A(\w|\$)+/) do |type,value|
        if KEYWORDS.include?(value)
          [value.upcase.to_sym, value]
        elsif RESERVED.include?(value)
          [:RESERVED, value]
        else
          [type, value]
        end
      end

      token(:REGEXP, /\A\/(?:[^\/\r\n\\]*(?:\\[^\r\n][^\/\r\n\\]*)*)\/[gi]*/)
      token(:S, /\A[\s\r\n]*/m)

      token(:SINGLE_CHAR, /\A./) do |type, value|
        [value, value]
      end
    end

Public Instance Methods

raw_tokens(string) click to toggle source

(Not documented)

# File lib/rkelly/tokenizer.rb, line 95
    def raw_tokens(string)
      tokens = []
      line_number = 1
      while string.length > 0
        longest_token = nil

        @lexemes.each { |lexeme|
          match = lexeme.match(string)
          next if match.nil?
          longest_token = match if longest_token.nil?
          next if longest_token.value.length >= match.value.length
          longest_token = match
        }

        longest_token.line = line_number
        line_number += longest_token.value.scan(/\n/).length
        string = string.slice(Range.new(longest_token.value.length, -1))
        tokens << longest_token
      end
      tokens
    end
tokenize(string) click to toggle source

(Not documented)

# File lib/rkelly/tokenizer.rb, line 91
    def tokenize(string)
      raw_tokens(string).map { |x| x.to_racc_token }
    end

Private Instance Methods

token(name, pattern = nil, &block) click to toggle source

(Not documented)

# File lib/rkelly/tokenizer.rb, line 118
    def token(name, pattern = nil, &block)
      @lexemes << Lexeme.new(name, pattern, &block)
    end

Disabled; run with --debug to generate this.

[Validate]

Generated with the Darkfish Rdoc Generator 1.1.6.