Module: YARD::Parser::Ruby::Legacy::RubyToken
Overview
::YARD::Parser::Ruby::Legacy
lexical tokenizer module.
Constant Summary
-
EXPR_ARG =
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 10:EXPR_ARG
-
EXPR_BEG =
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 7:EXPR_BEG
-
EXPR_CLASS =
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 13:EXPR_CLASS
-
EXPR_DOT =
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 12:EXPR_DOT
-
EXPR_END =
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 9:EXPR_END
-
EXPR_FNAME =
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 11:EXPR_FNAME
-
EXPR_MID =
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 8:EXPR_MID
-
NEWLINE_TOKEN =
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 308TkNL.new(0, 0)
-
TkReading2Token =
{ reading => token_class } { reading => [token_class, *opt] }
{}
-
TkSymbol2Token =
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 272{}
-
TokenDefinitions =
Internal use only
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 147[ [:TkCLASS, TkKW, "class", EXPR_CLASS], [:TkMODULE, TkKW, "module", EXPR_BEG], [:TkDEF, TkKW, "def", EXPR_FNAME], [:TkUNDEF, TkKW, "undef", EXPR_FNAME], [:TkBEGIN, TkKW, "begin", EXPR_BEG], [:TkRESCUE, TkKW, "rescue", EXPR_MID], [:TkENSURE, TkKW, "ensure", EXPR_BEG], [:TkEND, TkKW, "end", EXPR_END], [:TkIF, TkKW, "if", EXPR_BEG, :TkIF_MOD], [:TkUNLESS, TkKW, "unless", EXPR_BEG, :TkUNLESS_MOD], [:TkTHEN, TkKW, "then", EXPR_BEG], [:TkELSIF, TkKW, "elsif", EXPR_BEG], [:TkELSE, TkKW, "else", EXPR_BEG], [:TkCASE, TkKW, "case", EXPR_BEG], [:TkWHEN, TkKW, "when", EXPR_BEG], [:TkWHILE, TkKW, "while", EXPR_BEG, :TkWHILE_MOD], [:TkUNTIL, TkKW, "until", EXPR_BEG, :TkUNTIL_MOD], [:TkFOR, TkKW, "for", EXPR_BEG], [:TkBREAK, TkKW, "break", EXPR_END], [:TkNEXT, TkKW, "next", EXPR_END], [:TkREDO, TkKW, "redo", EXPR_END], [:TkRETRY, TkKW, "retry", EXPR_END], [:TkIN, TkKW, "in", EXPR_BEG], [:TkDO, TkKW, "do", EXPR_BEG], [:TkRETURN, TkKW, "return", EXPR_MID], [:TkYIELD, TkKW, "yield", EXPR_END], [:TkSUPER, TkKW, "super", EXPR_END], [:TkSELF, TkKW, "self", EXPR_END], [:TkNIL, TkKW, "nil", EXPR_END], [:TkTRUE, TkKW, "true", EXPR_END], [:TkFALSE, TkKW, "false", EXPR_END], [:TkAND, TkKW, "and", EXPR_BEG], [:TkOR, TkKW, "or", EXPR_BEG], [:TkNOT, TkKW, "not", EXPR_BEG], [:TkIF_MOD, TkKW], [:TkUNLESS_MOD, TkKW], [:TkWHILE_MOD, TkKW], [:TkUNTIL_MOD, TkKW], [:TkALIAS, TkKW, "alias", EXPR_FNAME], [:TkDEFINED, TkKW, "defined?", EXPR_END], [:TklBEGIN, TkKW, "BEGIN", EXPR_END], [:TklEND, TkKW, "END", EXPR_END], [:Tk__LINE__, TkKW, "__LINE__", EXPR_END], [:Tk__FILE__, TkKW, "__FILE__", EXPR_END], [:TkIDENTIFIER, TkId], [:TkFID, TkId], [:TkGVAR, TkId], [:TkIVAR, TkId], [:TkCONSTANT, TkId], [:TkINTEGER, TkVal], [:TkFLOAT, TkVal], [:TkSYMBOL, TkVal], [:TkLABEL, TkVal], [:TkSTRING, TkVal], [:TkXSTRING, TkVal], [:TkREGEXP, TkVal], [:TkCOMMENT, TkVal], [:TkDSTRING, TkNode], [:TkDXSTRING, TkNode], [:TkDREGEXP, TkNode], [:TkNTH_REF, TkId], [:TkBACK_REF, TkId], [:TkUPLUS, TkOp, "+@"], [:TkUMINUS, TkOp, "-@"], [:TkPOW, TkOp, "**"], [:TkCMP, TkOp, "<=>"], [:TkEQ, TkOp, "=="], [:TkEQQ, TkOp, "==="], [:TkNEQ, TkOp, "!="], [:TkGEQ, TkOp, ">="], [:TkLEQ, TkOp, "<="], [:TkANDOP, TkOp, "&&"], [:TkOROP, TkOp, "||"], [:TkMATCH, TkOp, "=~"], [:TkNMATCH, TkOp, "!~"], [:TkDOT2, TkOp, ".."], [:TkDOT3, TkOp, "..."], [:TkAREF, TkOp, "[]"], [:TkASET, TkOp, "[]="], [:TkLSHFT, TkOp, "<<"], [:TkRSHFT, TkOp, ">>"], [:TkCOLON2, TkOp], [:TkCOLON3, TkOp], [:OPASGN, TkOp], # +=, -= etc. # [:TkASSOC, TkOp, "=>"], [:TkQUESTION, TkOp, "?"], #? [:TkCOLON, TkOp, ":"], #: [:TkSTAR], # *arg [:TkAMPER], # &arg # [:TkSYMBEG, TkId], [:TkGT, TkOp, ">"], [:TkLT, TkOp, "<"], [:TkPLUS, TkOp, "+"], [:TkMINUS, TkOp, "-"], [:TkMULT, TkOp, "*"], [:TkDIV, TkOp, "/"], [:TkMOD, TkOp, "%"], [:TkBITOR, TkOp, "|"], [:TkBITXOR, TkOp, "^"], [:TkBITAND, TkOp, "&"], [:TkBITNOT, TkOp, "~"], [:TkNOTOP, TkOp, "!"], [:TkBACKQUOTE, TkOp, "`"], [:TkASSIGN, Token, "="], [:TkDOT, Token, "."], [:TkLPAREN, Token, "("], # (exp) [:TkLBRACK, Token, "["], # [arry] [:TkLBRACE, Token, "{"], # {hash} [:TkRPAREN, Token, ")"], [:TkRBRACK, Token, "]"], [:TkRBRACE, Token, "}"], [:TkCOMMA, Token, ","], [:TkSEMICOLON, Token, ";"], [:TkSPACE, TkWhitespace], [:TkNL, TkWhitespace], [:TkEND_OF_SCRIPT, TkWhitespace], [:TkBACKSLASH, TkUnknownChar, "\\"], [:TkAT, TkUnknownChar, "@"], [:TkDOLLAR, TkUnknownChar, "\$"] ]
Class Method Summary
- .def_token(token_n, super_token = Token, reading = nil, *opts) Internal use only Internal use only
Instance Method Summary
- #set_token_position(line, char) Internal use only Internal use only
- Token(token, value = nil) Internal use only Internal use only
Class Method Details
.def_token(token_n, super_token = Token, reading = nil, *opts)
This method is for internal use only.
[ GitHub ]
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 275
def self.def_token(token_n, super_token = Token, reading = nil, *opts) token_n = token_n.id2name unless token_n.is_a?(String) if RubyToken.const_defined?(token_n) # IRB.fail AlreadyDefinedToken, token_n end token_c = Class.new super_token RubyToken.const_set token_n, token_c # token_c.inspect if reading if TkReading2Token[reading] raise "duplicate #{token_n} #{reading}" end if opts.empty? TkReading2Token[reading] = [token_c] else TkReading2Token[reading] = [token_c].concat(opts) end end TkSymbol2Token[token_n.intern] = token_c if token_c <= TkOp token_c.class_eval %{ def self.op_name; "#{reading}"; end } end end
Instance Method Details
#set_token_position(line, char)
This method is for internal use only.
[ GitHub ]
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 119
def set_token_position(line, char) @prev_line_no = line @prev_char_no = char end
Token(token, value = nil)
This method is for internal use only.
[ GitHub ]
# File 'lib/yard/parser/ruby/legacy/ruby_lex.rb', line 125
def Token(token, value = nil) # rubocop:disable Style/MethodName tk = nil case token when String, Symbol source = token.is_a?(String) ? TkReading2Token : TkSymbol2Token if (tk = source[token]).nil? raise "no key #{token}" end tk = Token(tk[0], value) else if token tk = if (token.ancestors & [TkId, TkVal, TkOPASGN, TkUnknownChar]).empty? token.new(@prev_line_no, @prev_char_no) else token.new(@prev_line_no, @prev_char_no, value) end end end tk end