Adds a Burp Session XML parser and importer. Also includes fix to Rex's HTTP header parser to ensure that it uses only unencoded regexes.
git-svn-id: file:///home/svn/framework3/trunk@12846 4d416f70-5f16-0410-b530-b9f4589650daunstable
parent
89a12add6f
commit
7b4c7d44db
|
@ -6,6 +6,7 @@ require 'rex/parser/foundstone_nokogiri'
|
|||
require 'rex/parser/mbsa_nokogiri'
|
||||
require 'rex/parser/acunetix_nokogiri'
|
||||
require 'rex/parser/appscan_nokogiri'
|
||||
require 'rex/parser/burp_session_nokogiri'
|
||||
|
||||
# Legacy XML parsers -- these will be converted some day
|
||||
|
||||
|
@ -1472,7 +1473,6 @@ class DBManager
|
|||
ret[:web_site] = site
|
||||
end
|
||||
|
||||
|
||||
#
|
||||
# Report a Web Page to the database. WebPage must be tied to an existing Web Site
|
||||
#
|
||||
|
@ -2027,6 +2027,7 @@ class DBManager
|
|||
# Returns one of: :nexpose_simplexml :nexpose_rawxml :nmap_xml :openvas_xml
|
||||
# :nessus_xml :nessus_xml_v2 :qualys_scan_xml, :qualys_asset_xml, :msf_xml :nessus_nbe :amap_mlog
|
||||
# :amap_log :ip_list, :msf_zip, :libpcap, :foundstone_xml, :acunetix_xml, :appscan_xml
|
||||
# :burp_session
|
||||
# If there is no match, an error is raised instead.
|
||||
def import_filetype_detect(data)
|
||||
|
||||
|
@ -2079,6 +2080,9 @@ class DBManager
|
|||
elsif (firstline.index("<SecScan ID="))
|
||||
@import_filedata[:type] = "Microsoft Baseline Security Analyzer"
|
||||
return :mbsa_xml
|
||||
elsif (data[0,1024] =~ /<!ATTLIST\s+items\s+burpVersion/)
|
||||
@import_filedata[:type] = "Burp Session XML"
|
||||
return :burp_session_xml
|
||||
elsif (firstline.index("<?xml"))
|
||||
# it's xml, check for root tags we can handle
|
||||
line_count = 0
|
||||
|
@ -4493,7 +4497,6 @@ class DBManager
|
|||
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
||||
wspace = args[:wspace] || workspace
|
||||
if Rex::Parser.nokogiri_loaded
|
||||
# Rex::Parser.reload("appscan_nokogiri.rb")
|
||||
parser = "Nokogiri v#{::Nokogiri::VERSION}"
|
||||
noko_args = args.dup
|
||||
noko_args[:blacklist] = bl
|
||||
|
@ -4520,6 +4523,37 @@ class DBManager
|
|||
parser.parse(args[:data])
|
||||
end
|
||||
|
||||
def import_burp_session_xml(args={}, &block)
|
||||
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
||||
wspace = args[:wspace] || workspace
|
||||
if Rex::Parser.nokogiri_loaded
|
||||
# Rex::Parser.reload("burp_session_nokogiri.rb")
|
||||
parser = "Nokogiri v#{::Nokogiri::VERSION}"
|
||||
noko_args = args.dup
|
||||
noko_args[:blacklist] = bl
|
||||
noko_args[:wspace] = wspace
|
||||
if block
|
||||
yield(:parser, parser)
|
||||
import_burp_session_noko_stream(noko_args) {|type, data| yield type,data}
|
||||
else
|
||||
import_burp_session_noko_stream(noko_args)
|
||||
end
|
||||
return true
|
||||
else # Sorry
|
||||
raise DBImportError.new("Could not import due to missing Nokogiri parser. Try 'gem install nokogiri'.")
|
||||
end
|
||||
end
|
||||
|
||||
def import_burp_session_noko_stream(args={},&block)
|
||||
if block
|
||||
doc = Rex::Parser::BurpSessionDocument.new(args,framework.db) {|type, data| yield type,data }
|
||||
else
|
||||
doc = Rex::Parser::BurpSessionDocument.new(args,self)
|
||||
end
|
||||
parser = ::Nokogiri::XML::SAX::Parser.new(doc)
|
||||
parser.parse(args[:data])
|
||||
end
|
||||
|
||||
#
|
||||
# Import IP360's xml output
|
||||
#
|
||||
|
|
|
@ -0,0 +1,290 @@
|
|||
require File.join(File.expand_path(File.dirname(__FILE__)),"nokogiri_doc_mixin")
|
||||
|
||||
module Rex
|
||||
module Parser
|
||||
|
||||
# If Nokogiri is available, define Burp Session document class.
|
||||
#
|
||||
# Burp Session XML files actually provide a lot, but since it also
|
||||
# provides the originating url, we can pull most of the detail from
|
||||
# the URI object.
|
||||
load_nokogiri && class BurpSessionDocument < Nokogiri::XML::SAX::Document
|
||||
|
||||
include NokogiriDocMixin
|
||||
|
||||
# The resolver prefers your local /etc/hosts (or windows equiv), but will
|
||||
# fall back to regular DNS. It retains a cache for the import to avoid
|
||||
# spamming your network with DNS requests.
|
||||
attr_reader :resolv_cache
|
||||
|
||||
# Since we try to resolve every time we hit a new web page, need to
|
||||
# hang on to our misses. Presume that it's a permanent enough failure
|
||||
# that it won't get fixed during this particular import
|
||||
attr_reader :missed_cache
|
||||
|
||||
# If name resolution of the host fails out completely, you will not be
|
||||
# able to import that Scan task. Other scan tasks in the same report
|
||||
# should be unaffected.
|
||||
attr_reader :parse_warning
|
||||
|
||||
def start_document
|
||||
@parse_warnings = []
|
||||
@parse_warned = []
|
||||
@resolv_cache = {}
|
||||
@missed_cache = []
|
||||
end
|
||||
|
||||
def start_element(name=nil,attrs=[])
|
||||
attrs = normalize_attrs(attrs)
|
||||
block = @block
|
||||
@state[:current_tag][name] = true
|
||||
case name
|
||||
when "host", "port", "protocol", "path"
|
||||
@state[:has_text] = true
|
||||
when "status"
|
||||
@state[:has_text] = true
|
||||
when "response"
|
||||
@state[:has_text] = true
|
||||
end
|
||||
end
|
||||
|
||||
def end_element(name=nil)
|
||||
block = @block
|
||||
case name
|
||||
when "item" # Wrap up this item, but keep resolved web sites
|
||||
collect_uri
|
||||
report_web_site(&block)
|
||||
handle_parse_warnings(&block)
|
||||
report_web_page(&block)
|
||||
report_web_service_info
|
||||
report_web_host_info
|
||||
# Reset the state once we close a host
|
||||
@state = @state.select {|k| [:current_tag, :web_sites].include? k}
|
||||
when "host"
|
||||
@state[:has_text] = false
|
||||
collect_host
|
||||
@text = nil
|
||||
when "port"
|
||||
@state[:has_text] = false
|
||||
collect_port
|
||||
@text = nil
|
||||
when "protocol"
|
||||
@state[:has_text] = false
|
||||
collect_protocol
|
||||
@text = nil
|
||||
when "path"
|
||||
@state[:has_text] = false
|
||||
collect_path_and_query
|
||||
@text = nil
|
||||
when "status"
|
||||
@state[:has_text] = false
|
||||
collect_status
|
||||
@text = nil
|
||||
when "response"
|
||||
@state[:has_text] = false
|
||||
collect_response
|
||||
@text = nil
|
||||
end
|
||||
@state[:current_tag].delete name
|
||||
end
|
||||
|
||||
def collect_host
|
||||
return unless in_item
|
||||
return unless has_text
|
||||
@state[:host] = @text
|
||||
end
|
||||
|
||||
def collect_port
|
||||
return unless in_item
|
||||
return unless has_text
|
||||
return unless @text.to_i.to_s == @text.to_s
|
||||
@state[:port] = @text.to_i
|
||||
end
|
||||
|
||||
def collect_protocol
|
||||
return unless in_item
|
||||
return unless has_text
|
||||
@state[:protocol] = @text
|
||||
end
|
||||
|
||||
def collect_path_and_query
|
||||
return unless in_item
|
||||
return unless has_text
|
||||
path,query = @text.split(/\?+/,2)
|
||||
return unless path
|
||||
if query
|
||||
@state[:query] = "?#{query}" # Can be nil
|
||||
end
|
||||
if path =~ /https?:[\x5c\x2f][\x5c\x2f]+[^\x5c\x2f][^\x5c\x2f]+([^?]+)/
|
||||
real_path = "/#{$1}"
|
||||
else
|
||||
real_path = path
|
||||
end
|
||||
@state[:path] = real_path
|
||||
end
|
||||
|
||||
def collect_status
|
||||
return unless in_item
|
||||
return unless has_text
|
||||
return unless @text.to_i.to_s == @text
|
||||
@state[:status] = @text.to_i
|
||||
end
|
||||
|
||||
def collect_uri
|
||||
return unless in_item
|
||||
return unless @state[:host]
|
||||
return unless @state[:port]
|
||||
return unless @state[:protocol]
|
||||
return unless @state[:path]
|
||||
url = @state[:protocol].to_s
|
||||
url << "://"
|
||||
url << @state[:host].to_s
|
||||
url << ":"
|
||||
url << @state[:port].to_s
|
||||
url << @state[:path]
|
||||
if @state[:query]
|
||||
url << "?"
|
||||
url << @state[:query]
|
||||
end
|
||||
@state[:uri] = URI.parse(url) rescue nil
|
||||
end
|
||||
|
||||
def report_web_host_info
|
||||
return unless @state[:web_site]
|
||||
return unless @state[:uri].kind_of? URI::HTTP
|
||||
return unless @state[:web_site].service.host.name.to_s.empty?
|
||||
host_info = {:workspace => @args[:wspace]}
|
||||
host_info[:address] = @state[:web_site].service.host.address
|
||||
host_info[:name] = @state[:uri].host
|
||||
report_db(:host, host_info)
|
||||
end
|
||||
|
||||
def report_web_service_info
|
||||
return unless @state[:web_site]
|
||||
return unless @state[:service_info]
|
||||
return unless @state[:web_site].service.info.to_s.empty?
|
||||
service_info = {}
|
||||
service_info[:host] = @state[:web_site].service.host
|
||||
service_info[:port] = @state[:web_site].service.port
|
||||
service_info[:proto] = @state[:web_site].service.proto
|
||||
service_info[:info] = @state[:service_info]
|
||||
db_report(:service, service_info)
|
||||
end
|
||||
|
||||
def report_web_page(&block)
|
||||
return unless @state[:uri].kind_of? URI::HTTP
|
||||
return unless @state[:status]
|
||||
return unless @state[:web_site]
|
||||
return unless @state[:response_headers].kind_of? Hash
|
||||
headers = {}
|
||||
@state[:response_headers].each do |k,v|
|
||||
headers[k.to_s.downcase] ||= []
|
||||
headers[k.to_s.downcase] << v
|
||||
end
|
||||
if headers["server"].kind_of? Array
|
||||
@state[:service_info] = headers["server"].first
|
||||
end
|
||||
return unless @state[:response_body]
|
||||
web_page_info = {:workspace => @args[:wspace]}
|
||||
web_page_info[:web_site] = @state[:web_site]
|
||||
web_page_info[:code] = @state[:status]
|
||||
web_page_info[:path] = @state[:uri].path
|
||||
web_page_info[:headers] = headers
|
||||
web_page_info[:body] = @state[:response_body]
|
||||
web_page_info[:query] = @state[:uri].query
|
||||
url = @state[:uri].to_s.gsub(/\?.*/,"")
|
||||
db.emit(:web_page, url, &block) if block
|
||||
db_report(:web_page, web_page_info)
|
||||
end
|
||||
|
||||
def report_web_site(&block)
|
||||
return unless @state[:uri].kind_of? URI::HTTP
|
||||
vhost = @state[:uri].host
|
||||
web_site_info = {:workspace => @args[:wspace]}
|
||||
web_site_info[:vhost] = vhost
|
||||
address = resolve_vhost_address(@state[:uri])
|
||||
return unless address
|
||||
web_site_info[:host] = address
|
||||
web_site_info[:port] = @state[:uri].port
|
||||
web_site_info[:ssl] = @state[:uri].kind_of? URI::HTTPS
|
||||
web_site_obj = db_report(:web_site, web_site_info)
|
||||
return unless web_site_obj
|
||||
@state[:web_sites] ||= []
|
||||
url = "#{@state[:uri].scheme}://#{@state[:uri].host}:#{@state[:uri].port}"
|
||||
unless @state[:web_sites].include? web_site_obj
|
||||
db.emit(:web_site, url, &block)
|
||||
@state[:web_sites] << web_site_obj
|
||||
end
|
||||
@state[:web_site] = web_site_obj
|
||||
end
|
||||
|
||||
def collect_response
|
||||
return unless in_item
|
||||
return unless has_text
|
||||
response_text = @text.dup
|
||||
response_header_text,response_body_text = response_text.split(/\r*\n\r*\n/n,2)
|
||||
return unless response_header_text
|
||||
response_header = Rex::Proto::Http::Packet::Header.new
|
||||
response_header.from_s response_header_text
|
||||
@state[:response_headers] = response_header
|
||||
@state[:response_body] = response_body_text
|
||||
end
|
||||
|
||||
def in_item
|
||||
return false unless in_tag("item")
|
||||
return false unless in_tag("items")
|
||||
return true
|
||||
end
|
||||
|
||||
def has_text
|
||||
return false unless @text
|
||||
return false if @text.strip.empty?
|
||||
@text = @text.strip
|
||||
end
|
||||
|
||||
def handle_parse_warnings(&block)
|
||||
return if @parse_warnings.empty?
|
||||
return unless block
|
||||
@parse_warnings.each_with_index do |pwarn,i|
|
||||
unless @parse_warned.include? i
|
||||
db.emit(:warning, pwarn, &block)
|
||||
@parse_warned << i
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def resolve_address(host)
|
||||
return @resolv_cache[host] if @resolv_cache[host]
|
||||
return false if @missed_cache.include? host
|
||||
address = Rex::Socket.resolv_to_dotted(host) rescue nil
|
||||
@resolv_cache[host] = address
|
||||
if address
|
||||
block = @block
|
||||
db.emit(:address, address, &block) if block
|
||||
else
|
||||
@missed_cache << host
|
||||
end
|
||||
return address
|
||||
end
|
||||
|
||||
# Alias this
|
||||
def resolve_vhost_address(uri)
|
||||
if uri.host
|
||||
address = resolve_address(uri.host)
|
||||
case address
|
||||
when false
|
||||
return false
|
||||
when nil
|
||||
@parse_warnings << "Could not resolve address for '#{uri.host}', skipping."
|
||||
end
|
||||
else
|
||||
@parse_warnings << "Could not determine a host for this import."
|
||||
end
|
||||
address
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
|
@ -37,16 +37,16 @@ class Packet::Header < Hash
|
|||
|
||||
# put the non-standard line terminations back to normal
|
||||
# gah. not having look behinds suck,
|
||||
header.gsub!(/([^\r])\n/,'\1' + "\r\n")
|
||||
header.gsub!(/([^\r])\n/n,'\1' + "\r\n")
|
||||
|
||||
# undo folding, kinda ugly but works for now.
|
||||
header.gsub!(/:\s*\r\n\s+/smi,': ')
|
||||
header.gsub!(/:\s*\r\n\s+/smni,': ')
|
||||
|
||||
# Extract the command string
|
||||
self.cmd_string = header.slice!(/.+\r\n/)
|
||||
|
||||
# Extract each header value pair
|
||||
header.split(/\r\n/m).each { |str|
|
||||
header.split(/\r\n/mn).each { |str|
|
||||
if (md = str.match(/^(.+?): (.+?)$/))
|
||||
if (self[md[1]])
|
||||
self[md[1]] << ", " + md[2]
|
||||
|
|
Loading…
Reference in New Issue