1
mirror of https://github.com/rapid7/metasploit-framework synced 2024-08-28 23:26:18 +02:00

added dns_cache_scraper and useful wordlists

This commit is contained in:
zeknox 2013-12-12 20:18:18 -06:00
parent 7ab1369515
commit 554cd41403
3 changed files with 4109 additions and 0 deletions

View File

@ -0,0 +1,28 @@
www.es-web.sophos.com
www.es-web.sophos.com.edgesuite.net
www.es-web-2.sophos.com
www.es-web-2.sophos.com.edgesuite.net
www.dnl-01.geo.kaspersky.com
www.downloads2.kaspersky-labs.com
www.liveupdate.symantecliveupdate.com
www.liveupdate.symantec.com
www.update.symantec.com
www.update.nai.com
www.download797.avast.com
www.guru.avg.com
www.osce8-p.activeupdate.trendmicro.com
www.forefrontdl.microsoft.com
es-web.sophos.com
es-web.sophos.com.edgesuite.net
es-web-2.sophos.com
es-web-2.sophos.com.edgesuite.net
dnl-01.geo.kaspersky.com
downloads2.kaspersky-labs.com
liveupdate.symantecliveupdate.com
liveupdate.symantec.com
update.symantec.com
update.nai.com
download797.avast.com
guru.avg.com
osce8-p.activeupdate.trendmicro.com
forefrontdl.microsoft.com

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,111 @@
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
require 'net/dns/resolver'
class Metasploit3 < Msf::Auxiliary
include Msf::Auxiliary::Report
def initialize(info = {})
super(update_info(info,
'Name' => 'DNS Non-Recursive Record Scraper',
'Description' => %q{
This module can be used to scrape records that have been cached
by a specific nameserver. The module allows the tester to test
every record from a specified file
},
'Author'=> [
'Brandon McCann "zeknox" <bmccann[at]accuvant.com>',
'Rob Dixon "304geek" <rob.dixon[at]accuvant.com>'
],
'License' => MSF_LICENSE,
'References' => [
['URL', 'http://304geeks.blogspot.com/2013/01/dns-scraping-for-corporate-av-detection.html']
]))
register_options([
OptString.new('DOMAIN', [ false, "Domain name to query for"]),
OptPath.new('WORDLIST', [ true, "Wordlist for domain name queries", ::File.join(Msf::Config.install_root, "data", "wordlists", "av-update-urls.txt")]),
OptAddress.new('NS', [ true, "Specify the nameserver to use for queries" ]),
], self.class)
register_advanced_options([
OptBool.new('TCP_DNS', [false, "Run queries over TCP", false]),
], self.class)
end
# method to scrape dns
def scrape_dns(domain)
# dns request with recursive disabled
use_tcp = datastore['TCP_DNS'] == true
res = Net::DNS::Resolver.new(:nameservers => "#{datastore['NS']}", :recursive => false, :use_tcp => use_tcp)
# query dns
begin
query = res.send(domain)
rescue
print_error("Issues with #{domain}")
return
end
# found or not found
if query.answer.empty?
vprint_status("#{domain} - Not Found")
return
end
print_good("#{domain} - Found")
report_goods(domain)
end
# method to read each line from file
def read_file
::File.open("#{datastore['WORDLIST']}", "rb").each_line do |line|
scrape_dns(line.chomp)
end
end
# log results to database
def report_goods(domain)
report_service(
:host => datastore['NS'],
:name => "dns",
:port => 53,
:proto => "udp",
:info => "#{domain} cached"
)
report_note(
:host => datastore['NS'],
:name => "dns",
:port => 53,
:proto => "udp",
:type => "dns.cache.scrape",
:data => "#{domain} cached"
)
report_host(
:address => datastore['NS'],
:info => "#{domain} cached",
:comments => "DNS Cache Scraper"
)
end
# main control method
def run
print_status("Making queries against #{datastore['NS']}")
if datastore['DOMAIN'].blank?
read_file
else
scrape_dns(datastore['DOMAIN'])
end
end
end