2016-04-02 17 views
0

Ben bir pentesting aracı olarak kullanıyorum bir program var, ben web siteleri SQL savunmasız ve bir Timeout::Error karşıdan geldi şimdi keşfetme sürecinde ben hata kurtarmaya çalıştım ama aynı zamanda birkaç diğer hatalar var de kurtarılması gerekiyor. Yani sorum şu, bir kurtarma bloğu içinde kaç tane hatanın kurtarılabileceğine dair bir sınır var mı? Ve değilse, bu Zaman Aşımı neden kurtarılmıyor?Kaç hatanın kurtarılabildiğini sınırla?

Kaynak:

def get_urls 
    info("Searching for possible SQL vulnerable sites.") 
    @agent = Mechanize.new 
    page = @agent.get('http://www.google.com/') 
    google_form = page.form('f') 
    google_form.q = "#{SEARCH}" 
    url = @agent.submit(google_form, google_form.buttons.first) 
    url.links.each do |link| 
    if link.href.to_s =~ /url.q/ 
     str = link.href.to_s 
     str_list = str.split(%r{=|&}) 
     urls = str_list[1] 
     next if str_list[1].split('/')[2] == "webcache.googleusercontent.com" 
     urls_to_log = urls.gsub("%3F", '?').gsub("%3D", '=') 
     success("Site found: #{urls_to_log}") 
     File.open("#{PATH}/temp/SQL_sites_to_check.txt", "a+") {|s| s.puts("#{urls_to_log}'")} 
    end 
    end 
    info("Possible vulnerable sites dumped into #{PATH}/temp/SQL_sites.txt") 
end 

def check_if_vulnerable 
    info("Checking if sites are vulnerable.") 
    IO.read("#{PATH}/temp/SQL_sites_to_check.txt").each_line do |parse| 
    Timeout::timeout(5) do 
     begin 
     @parsing = Nokogiri::HTML(RestClient.get("#{parse.chomp}")) 
     rescue Timeout::Error, RestClient::ResourceNotFound, RestClient::SSLCertificateNotVerified 
     if RestClient::ResourceNotFound 
      warn("URL: #{parse.chomp} returned 404 error, URL dumped into 404 bin") 
      File.open("#{PATH}/lib/404_bin.txt", "a+"){|s| s.puts(parse)} 
     elsif RestClient::SSLCertificateNotVerified 
      err("URL: #{parse.chomp} requires SSL cert, url dumped into SSL bin") 
      File.open("#{PATH}/lib/SSL_bin.txt", "a+"){|s| s.puts(parse)} 
     elsif Timeout::Error 
      warn("URL: #{parse.chomp} failed to load resulting in time out after 10 seconds. URL dumped into TIMEOUT bin") 
      File.open("#{PATH}/lib/TIMEOUT_bin.txt", "a+"){|s| s.puts(parse)} 
     end 
     end 
    end 
    end 
end 

Hata:

C:/Ruby22/lib/ruby/2.2.0/net/http.rb:892:in `new': execution expired (Timeout::E 
rror) 
     from C:/Ruby22/lib/ruby/2.2.0/net/http.rb:892:in `connect' 
     from C:/Ruby22/lib/ruby/2.2.0/net/http.rb:863:in `do_start' 
     from C:/Ruby22/lib/ruby/2.2.0/net/http.rb:852:in `start' 
     from C:/Ruby22/lib/ruby/gems/2.2.0/gems/rest-client-1.8.0-x86-mingw32/li 
b/restclient/request.rb:413:in `transmit' 
     from C:/Ruby22/lib/ruby/gems/2.2.0/gems/rest-client-1.8.0-x86-mingw32/li 
b/restclient/request.rb:176:in `execute' 
     from C:/Ruby22/lib/ruby/gems/2.2.0/gems/rest-client-1.8.0-x86-mingw32/li 
b/restclient/request.rb:41:in `execute' 
     from C:/Ruby22/lib/ruby/gems/2.2.0/gems/rest-client-1.8.0-x86-mingw32/li 
b/restclient.rb:65:in `get' 
     from whitewidow.rb:94:in `block (2 levels) in check_if_vulnerable' 
     from C:/Ruby22/lib/ruby/2.2.0/timeout.rb:88:in `block in timeout' 
     from C:/Ruby22/lib/ruby/2.2.0/timeout.rb:32:in `block in catch' 
     from C:/Ruby22/lib/ruby/2.2.0/timeout.rb:32:in `catch' 
     from C:/Ruby22/lib/ruby/2.2.0/timeout.rb:32:in `catch' 
     from C:/Ruby22/lib/ruby/2.2.0/timeout.rb:103:in `timeout' 
     from whitewidow.rb:92:in `block in check_if_vulnerable' 
     from whitewidow.rb:91:in `each_line' 
     from whitewidow.rb:91:in `check_if_vulnerable' 
     from whitewidow.rb:113:in `<main>' 

Eğer check_vulns yönteminde görebileceğiniz gibi ben Timeout::Error kurtarıldı var. Peki bu, bir sonraki URL'ye geçmeden zaman aşımına neden oluyor? Kurtarma için bir next eklemeyi denedim, ancak hala çalışmıyor, lütfen yardım edin?

cevap

1

basitçe Zaman Aşımı hareket ettirerek hatası

def check_if_vulnerable 
    info("Checking if sites are vulnerable.") 
    IO.read("#{PATH}/temp/SQL_sites_to_check.txt").each_line do |parse| 
     begin 
     Timeout::timeout(5) do 
      @parsing = Nokogiri::HTML(RestClient.get("#{parse.chomp}")) 
     end 
     rescue Timeout::Error, RestClient::ResourceNotFound, RestClient::SSLCertificateNotVerified 
     if RestClient::ResourceNotFound 
      warn("URL: #{parse.chomp} returned 404 error, URL dumped into 404 bin") 
      File.open("#{PATH}/lib/404_bin.txt", "a+"){|s| s.puts(parse)} 
     elsif RestClient::SSLCertificateNotVerified 
      err("URL: #{parse.chomp} requires SSL cert, url dumped into SSL bin") 
      File.open("#{PATH}/lib/SSL_bin.txt", "a+"){|s| s.puts(parse)} 
     elsif Timeout::Error 
      warn("URL: #{parse.chomp} failed to load resulting in time out after 10 seconds. URL dumped into TIMEOUT bin") 
      File.open("#{PATH}/lib/TIMEOUT_bin.txt", "a+"){|s| s.puts(parse)} 
     end 
     end 
    end 
    end 
end 
kurtaracak