123456789_123456789_123456789_123456789_123456789_

Class: YARD::Parser::Ruby::Legacy::TokenList

Relationships & Source Files
Super Chains via Extension / Inclusion / Inheritance
Class Chain:
self, ::Array
Instance Chain:
Inherits: Array
  • Object
Defined in: lib/yard/parser/ruby/legacy/token_list.rb

Constant Summary

RubyToken - Included

EXPR_ARG, EXPR_BEG, EXPR_CLASS, EXPR_DOT, EXPR_END, EXPR_FNAME, EXPR_MID, NEWLINE_TOKEN, TkReading2Token, TkSymbol2Token, TokenDefinitions

Class Method Summary

Instance Method Summary

RubyToken - Included

::Array - Inherited

#place

Places values before or after another object (by value) in an array.

Constructor Details

.new(content = nil) ⇒ TokenList

[ GitHub ]

  
# File 'lib/yard/parser/ruby/legacy/token_list.rb', line 7

def initialize(content = nil)
  self << content if content
end

Instance Method Details

#<<(*tokens)

Alias for #push.

[ GitHub ]

  
# File 'lib/yard/parser/ruby/legacy/token_list.rb', line 35

alias << push

#convert_token(lex, tk) (private)

[ GitHub ]

  
# File 'lib/yard/parser/ruby/legacy/token_list.rb', line 53

def convert_token(lex, tk)
  if TkIDENTIFIER === tk && lex.peek == ':'
    next_tk = lex.token
    sym = TkLABEL.new(tk.line_no, tk.char_no, nil)
    sym.lex_state = lex.lex_state
    sym.set_text(tk.text + next_tk.text)
  elsif TkSYMBEG === tk
    next_tk = lex.token
    if next_tk
      sym = TkSYMBOL.new(tk.line_no, tk.char_no, nil)
      sym.lex_state = lex.lex_state
      sym.set_text(tk.text + next_tk.text)
    else
      tk
    end
  else
    tk
  end
end

#parse_content(content) (private)

[ GitHub ]

  
# File 'lib/yard/parser/ruby/legacy/token_list.rb', line 44

def parse_content(content)
  lex = RubyLex.new(content)
  loop do
    tk = lex.token
    break if tk.nil?
    self << convert_token(lex, tk)
  end
end

#push(*tokens) Also known as: #<<

Parameters:

  • tokens (TokenList, Token, String)

    A list of tokens. If the token is a string, it is parsed with RubyLex.

[ GitHub ]

  
# File 'lib/yard/parser/ruby/legacy/token_list.rb', line 21

def push(*tokens)
  tokens.each do |tok|
    if tok.is_a?(TokenList) || tok.is_a?(Array)
      concat tok
    elsif tok.is_a?(Token)
      super tok
    elsif tok.is_a?(String)
      parse_content(tok)
    else
      raise ArgumentError, "Expecting token, list of tokens or string of code to be tokenized. Got #{tok.class}"
    end
  end
  self
end

#squeeze(type = TkSPACE)

[ GitHub ]

  
# File 'lib/yard/parser/ruby/legacy/token_list.rb', line 37

def squeeze(type = TkSPACE)
  last = nil
  TokenList.new(map {|t| x = t.is_a?(type) && last.is_a?(type) ? nil : t; last = t; x })
end

#to_s(full_statement = false, show_block = true)

[ GitHub ]

  
# File 'lib/yard/parser/ruby/legacy/token_list.rb', line 11

def to_s(full_statement = false, show_block = true)
  inject([]) do |acc, token|
    break acc if !full_statement && TkStatementEnd === token
    acc << (!show_block && TkBlockContents === token ? "" : token.text)
  end.join
end