diff options
Diffstat (limited to 'lib/puppet')
-rw-r--r-- | lib/puppet/application.rb | 2 | ||||
-rw-r--r-- | lib/puppet/parser/lexer.rb | 10 |
2 files changed, 12 insertions, 0 deletions
diff --git a/lib/puppet/application.rb b/lib/puppet/application.rb index df46f6e8c..38fea336d 100644 --- a/lib/puppet/application.rb +++ b/lib/puppet/application.rb @@ -382,10 +382,12 @@ class Application if Puppet.features.usage? # RH:FIXME: My goodness, this is ugly. ::RDoc.const_set("PuppetSourceFile", name) + #:stopdoc: # Issue #4161 def (::RDoc).caller docfile = `grep -l 'Puppet::Application\\[:#{::RDoc::PuppetSourceFile}\\]' #{BINDIRS}`.chomp super << "#{docfile}:0" end + #:startdoc: ::RDoc::usage && exit else puts "No help available unless you have RDoc::usage installed" diff --git a/lib/puppet/parser/lexer.rb b/lib/puppet/parser/lexer.rb index 092323e62..3ac16b56a 100644 --- a/lib/puppet/parser/lexer.rb +++ b/lib/puppet/parser/lexer.rb @@ -161,9 +161,11 @@ class Puppet::Parser::Lexer TOKENS.add_token :NUMBER, %r{\b(?:0[xX][0-9A-Fa-f]+|0?\d+(?:\.\d+)?(?:[eE]-?\d+)?)\b} do |lexer, value| [TOKENS[:NAME], value] end + #:stopdoc: # Issue #4161 def (TOKENS[:NUMBER]).acceptable?(context={}) ![:DQPRE,:DQMID].include? context[:after] end + #:startdoc: TOKENS.add_token :NAME, %r{[a-z0-9][-\w]*} do |lexer, value| string_token = self @@ -178,9 +180,11 @@ class Puppet::Parser::Lexer [string_token, value] end [:NAME,:CLASSNAME,:CLASSREF].each { |name_token| + #:stopdoc: # Issue #4161 def (TOKENS[name_token]).acceptable?(context={}) ![:DQPRE,:DQMID].include? context[:after] end + #:startdoc: } TOKENS.add_token :COMMENT, %r{#.*}, :accumulate => true, :skip => true do |lexer,value| @@ -205,9 +209,11 @@ class Puppet::Parser::Lexer [self, Regexp.new(regex)] end + #:stopdoc: # Issue #4161 def (TOKENS[:REGEX]).acceptable?(context={}) [:NODE,:LBRACE,:RBRACE,:MATCH,:NOMATCH,:COMMA].include? context[:after] end + #:startdoc: TOKENS.add_token :RETURN, "\n", :skip => true, :incr_line => true, :skip_text => true @@ -225,18 +231,22 @@ class Puppet::Parser::Lexer TOKENS.add_token :DQCONT, /\}/ do |lexer, value| lexer.tokenize_interpolated_string(DQ_continuation_token_types) end + #:stopdoc: # Issue #4161 def (TOKENS[:DQCONT]).acceptable?(context={}) context[:string_interpolation_depth] > 0 end + #:startdoc: TOKENS.add_token :DOLLAR_VAR, %r{\$(\w*::)*\w+} do |lexer, value| [TOKENS[:VARIABLE],value[1..-1]] end TOKENS.add_token :VARIABLE, %r{(\w*::)*\w+} + #:stopdoc: # Issue #4161 def (TOKENS[:VARIABLE]).acceptable?(context={}) [:DQPRE,:DQMID].include? context[:after] end + #:startdoc: TOKENS.sort_tokens |