def Saikuro.analyze(files, state_formater, token_count_formater, output_dir)
idx_states = Array.new
idx_tokens = Array.new
files.each do |file|
begin
STDOUT.puts "Parsing #{file}"
top = ParseState.make_top_state
STDOUT.puts "TOP State made" if $VERBOSE
token_counter = TokenCounter.new
ParseState.set_token_counter(token_counter)
token_counter.set_current_file(file)
STDOUT.puts "Setting up Lexer" if $VERBOSE
lexer = RubyLex.new
lexer.exception_on_syntax_error = false
lexer.set_input(File.new(file,"r"))
top.lexer = lexer
STDOUT.puts "Parsing" if $VERBOSE
top.parse
fdir_path = seperate_file_from_path(file)
FileUtils.makedirs("#{output_dir}/#{fdir_path}")
if state_formater
state_io = StringIO.new
state_formater.start(state_io)
top.compute_state(state_formater)
state_formater.end
fname = "#{file}_cyclo.html"
puts "writing cyclomatic #{file}" if $VERBOSE
File.open("#{output_dir}/#{fname}","w") do |f|
f.write state_io.string
end
idx_states<< [
fname,
state_formater.warnings.dup,
state_formater.errors.dup,
]
end
if token_count_formater
token_io = StringIO.new
token_count_formater.start(token_io)
token_counter.list_tokens_per_line(token_count_formater)
token_count_formater.end
fname = "#{file}_token.html"
puts "writing token #{file}" if $VERBOSE
File.open("#{output_dir}/#{fname}","w") do |f|
f.write token_io.string
end
idx_tokens<< [
fname,
token_count_formater.warnings.dup,
token_count_formater.errors.dup,
]
end
rescue RubyLex::SyntaxError => synerr
STDOUT.puts "Lexer error for file #{file} on line #{lexer.line_no}"
STDOUT.puts "#{synerr.class.name} : #{synerr.message}"
rescue StandardError => err
STDOUT.puts "Error while parsing file : #{file}"
STDOUT.puts err.class,err.message,err.backtrace.join("\n")
rescue Exception => ex
STDOUT.puts "Error while parsing file : #{file}"
STDOUT.puts ex.class,ex.message,ex.backtrace.join("\n")
end
end
[idx_states, idx_tokens]
end