require 'msf/core'
require 'openssl'
require 'rinda/tuplespace'
require 'pathname'
require 'uri'
class MetasploitModule < Msf::Auxiliary
include Msf::Auxiliary::Scanner
include Msf::Auxiliary::Report
def initialize(info = {})
super(update_info(info,
'Name' => 'Metasploit Web Crawler',
'Description' => 'This auxiliary module is a modular web crawler, to be used in conjuntion with wmap (someday) or standalone.',
'Author' => 'et',
'License' => MSF_LICENSE
))
register_options([
OptString.new('PATH', [true, "Starting crawling path", '/']),
OptInt.new('RPORT', [true, "Remote port", 80 ])
], self.class)
register_advanced_options([
OptPath.new('CrawlerModulesDir', [true, 'The base directory containing the crawler modules',
File.join(Msf::Config.data_directory, "msfcrawler")
]),
OptBool.new('EnableUl', [ false, "Enable maximum number of request per URI", true ]),
OptBool.new('StoreDB', [ false, "Store requests in database", false ]),
OptInt.new('MaxUriLimit', [ true, "Number max. request per URI", 10]),
OptInt.new('SleepTime', [ true, "Sleep time (secs) between requests", 0]),
OptInt.new('TakeTimeout', [ true, "Timeout for loop ending", 15]),
OptInt.new('ReadTimeout', [ true, "Read timeout (-1 forever)", 3]),
OptInt.new('ThreadNum', [ true, "Threads number", 20]),
OptString.new('DontCrawl', [true, "Filestypes not to crawl", '.exe,.zip,.tar,.bz2,.run,.asc,.gz'])
], self.class)
end
attr_accessor :ctarget, :cport, :cssl
def run
i, a = 0, []
self.ctarget = datastore['RHOSTS']
self.cport = datastore['RPORT']
self.cssl = datastore['SSL']
inipath = datastore['PATH']
cinipath = (inipath.nil? or inipath.empty?) ? '/' : inipath
inireq = {
'rhost' => ctarget,
'rport' => cport,
'uri' => cinipath,
'method' => 'GET',
'ctype' => 'text/plain',
'ssl' => cssl,
'query' => nil,
'data' => nil
}
@NotViewedQueue = Rinda::TupleSpace.new
@ViewedQueue = Hash.new
@UriLimits = Hash.new
@curent_site = self.ctarget
insertnewpath(inireq)
print_status("Loading modules: #{datastore['CrawlerModulesDir']}")
load_modules(datastore['CrawlerModulesDir'])
print_status("OK")
if datastore['EnableUl']
print_status("URI LIMITS ENABLED: #{datastore['MaxUriLimit']} (Maximum number of requests per uri)")
end
print_status("Target: #{self.ctarget} Port: #{self.cport} Path: #{cinipath} SSL: #{self.cssl}")
begin
reqfilter = reqtemplate(self.ctarget,self.cport,self.cssl)
i =0
loop do
hashreq = @NotViewedQueue.take(reqfilter, datastore['TakeTimeout'])
ul = false
if @UriLimits.include?(hashreq['uri']) and datastore['EnableUl']
if @UriLimits[hashreq['uri']] >= datastore['MaxUriLimit']
ul = true
end
else
@UriLimits[hashreq['uri']] = 0
end
if !@ViewedQueue.include?(hashsig(hashreq)) and !ul
@ViewedQueue[hashsig(hashreq)] = Time.now
@UriLimits[hashreq['uri']] += 1
if !File.extname(hashreq['uri']).empty? and datastore['DontCrawl'].include? File.extname(hashreq['uri'])
vprint_status "URI not crawled #{hashreq['uri']}"
else
prx = nil
c = Rex::Proto::Http::Client.new(
self.ctarget,
self.cport.to_i,
{},
self.cssl,
nil,
prx
)
sendreq(c,hashreq)
end
else
vprint_line "#{hashreq['uri']} already visited. "
end
end
rescue Rinda::RequestExpiredError
print_status("END.")
return
end
print_status("Finished crawling")
end
def reqtemplate(target,port,ssl)
hreq = {
'rhost' => target,
'rport' => port,
'uri' => nil,
'method' => nil,
'ctype' => nil,
'ssl' => ssl,
'query' => nil,
'data' => nil
}
return hreq
end
def storedb(hashreq,response,dbpath)
info = {
:web_site => @current_site,
:path => hashreq['uri'],
:query => hashreq['query'],
:data => hashreq['data'],
:code => response['code'],
:body => response['body'],
:headers => response['headers']
}
report_web_page(info)
end