2011-05-26 20:29:47 +00:00
# Check Rex::Parser.nokogiri_loaded for status of the Nokogiri parsers
2011-06-15 00:51:51 +00:00
require 'rex/parser/nmap_nokogiri'
require 'rex/parser/nexpose_simple_nokogiri'
require 'rex/parser/nexpose_raw_nokogiri'
require 'rex/parser/foundstone_nokogiri'
require 'rex/parser/mbsa_nokogiri'
require 'rex/parser/acunetix_nokogiri'
require 'rex/parser/appscan_nokogiri'
require 'rex/parser/burp_session_nokogiri'
2011-07-28 21:17:30 +00:00
require 'rex/parser/ci_nokogiri'
2011-05-26 20:29:47 +00:00
2011-05-31 16:30:07 +00:00
# Legacy XML parsers -- these will be converted some day
2011-05-12 20:20:08 +00:00
require 'rex/parser/nmap_xml'
require 'rex/parser/nexpose_xml'
require 'rex/parser/retina_xml'
require 'rex/parser/netsparker_xml'
require 'rex/parser/nessus_xml'
require 'rex/parser/ip360_xml'
require 'rex/parser/ip360_aspl_xml'
2011-05-31 16:30:07 +00:00
2011-05-12 20:20:08 +00:00
require 'rex/socket'
require 'zip'
require 'packetfu'
require 'uri'
2010-06-11 21:21:59 +00:00
require 'tmpdir'
2010-06-04 14:57:58 +00:00
2006-03-21 04:37:48 +00:00
module Msf
###
#
# The states that a host can be in.
#
###
module HostState
#
# The host is alive.
#
Alive = " alive "
#
# The host is dead.
#
Dead = " down "
#
# The host state is unknown.
#
Unknown = " unknown "
end
###
#
# The states that a service can be in.
#
###
module ServiceState
2010-02-05 15:43:24 +00:00
Open = " open "
Closed = " closed "
Filtered = " filtered "
Unknown = " unknown "
2006-03-21 04:37:48 +00:00
end
2007-02-25 21:25:41 +00:00
###
#
# Events that can occur in the host/service database.
#
###
module DatabaseEvent
#
# Called when an existing host's state changes
#
2009-12-29 23:48:45 +00:00
def on_db_host_state ( host , ostate )
2007-02-25 21:25:41 +00:00
end
#
# Called when an existing service's state changes
#
2009-12-29 23:48:45 +00:00
def on_db_service_state ( host , port , ostate )
2007-02-25 21:25:41 +00:00
end
#
# Called when a new host is added to the database. The host parameter is
# of type Host.
#
2009-12-29 23:48:45 +00:00
def on_db_host ( host )
2007-02-25 21:25:41 +00:00
end
2009-07-22 20:14:35 +00:00
#
# Called when a new client is added to the database. The client
# parameter is of type Client.
#
2009-12-29 23:48:45 +00:00
def on_db_client ( client )
2009-07-22 20:14:35 +00:00
end
2007-02-25 21:25:41 +00:00
#
# Called when a new service is added to the database. The service
# parameter is of type Service.
#
2009-12-29 23:48:45 +00:00
def on_db_service ( service )
2007-02-25 21:25:41 +00:00
end
#
# Called when an applicable vulnerability is found for a service. The vuln
# parameter is of type Vuln.
#
2009-12-29 23:48:45 +00:00
def on_db_vuln ( vuln )
2007-02-25 21:25:41 +00:00
end
#
# Called when a new reference is created.
#
2009-12-29 23:48:45 +00:00
def on_db_ref ( ref )
2007-02-25 21:25:41 +00:00
end
end
2006-03-21 04:37:48 +00:00
2010-01-07 21:30:14 +00:00
class DBImportError < RuntimeError
end
2006-03-21 04:37:48 +00:00
###
#
# The DB module ActiveRecord definitions for the DBManager
#
###
class DBManager
2010-12-12 17:44:48 +00:00
def rfc3330_reserved ( ip )
case ip . class . to_s
when " PacketFu::Octets "
ip_x = ip . to_x
ip_i = ip . to_i
when " String "
if ipv4_validator ( ip )
ip_x = ip
ip_i = Rex :: Socket . addr_atoi ( ip )
else
raise ArgumentError , " Invalid IP address: #{ ip . inspect } "
end
when " Fixnum "
if ( 0 .. 2 ** 32 - 1 ) . include? ip
ip_x = Rex :: Socket . addr_itoa ( ip )
ip_i = ip
else
raise ArgumentError , " Invalid IP address: #{ ip . inspect } "
end
else
raise ArgumentError , " Invalid IP address: #{ ip . inspect } "
end
return true if Rex :: Socket :: RangeWalker . new ( " 0.0.0.0-0.255.255.255 " ) . include? ip_x
return true if Rex :: Socket :: RangeWalker . new ( " 127.0.0.0-127.255.255.255 " ) . include? ip_x
return true if Rex :: Socket :: RangeWalker . new ( " 169.254.0.0-169.254.255.255 " ) . include? ip_x
return true if Rex :: Socket :: RangeWalker . new ( " 224.0.0.0-239.255.255.255 " ) . include? ip_x
2010-12-17 03:20:47 +00:00
return true if Rex :: Socket :: RangeWalker . new ( " 255.255.255.255-255.255.255.255 " ) . include? ip_x
2010-12-12 17:44:48 +00:00
return false
end
2010-05-12 18:10:37 +00:00
def ipv4_validator ( addr )
return false unless addr . kind_of? String
addr =~ / ^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?) \ .){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$ /
end
2010-07-06 16:10:05 +00:00
2010-06-04 14:57:58 +00:00
# Takes a space-delimited set of ips and ranges, and subjects
# them to RangeWalker for validation. Returns true or false.
def validate_ips ( ips )
ret = true
begin
2010-07-06 16:10:05 +00:00
ips . split ( ' ' ) . each { | ip |
2010-06-04 14:57:58 +00:00
unless Rex :: Socket :: RangeWalker . new ( ip ) . ranges
ret = false
break
end
}
2010-07-06 16:10:05 +00:00
rescue
2010-06-04 14:57:58 +00:00
ret = false
end
return ret
end
2010-05-12 18:10:37 +00:00
2006-09-16 06:45:06 +00:00
#
# Determines if the database is functional
#
def check
2010-02-18 06:40:38 +00:00
res = Host . find ( :first )
2006-09-16 06:45:06 +00:00
end
2009-12-13 05:24:48 +00:00
2009-12-29 23:48:45 +00:00
def default_workspace
Workspace . default
end
2009-12-13 05:24:48 +00:00
2009-12-29 23:48:45 +00:00
def find_workspace ( name )
Workspace . find_by_name ( name )
2006-03-21 04:37:48 +00:00
end
2008-07-22 07:28:05 +00:00
#
2009-12-29 23:48:45 +00:00
# Creates a new workspace in the database
2008-07-22 07:28:05 +00:00
#
2009-12-29 23:48:45 +00:00
def add_workspace ( name )
Workspace . find_or_create_by_name ( name )
end
def workspaces
Workspace . find ( :all )
end
2010-03-08 14:17:34 +00:00
#
# Wait for all pending write to finish
#
def sync
2011-05-25 00:30:03 +00:00
# There is no more queue.
2010-03-08 14:17:34 +00:00
end
2009-12-29 23:48:45 +00:00
2010-01-02 23:14:37 +00:00
#
2009-12-29 23:48:45 +00:00
# Find a host. Performs no database writes.
2008-07-22 07:28:05 +00:00
#
2009-12-29 23:48:45 +00:00
def get_host ( opts )
if opts . kind_of? Host
return opts
elsif opts . kind_of? String
2010-02-18 06:40:38 +00:00
raise RuntimeError , " This invokation of get_host is no longer supported: #{ caller } "
2009-12-29 23:48:45 +00:00
else
address = opts [ :addr ] || opts [ :address ] || opts [ :host ] || return
return address if address . kind_of? Host
end
2010-02-18 06:40:38 +00:00
wspace = opts . delete ( :workspace ) || workspace
2011-04-01 01:01:11 +00:00
if wspace . kind_of? String
wspace = find_workspace ( wspace )
end
return wspace . hosts . find_by_address ( address )
2009-12-29 23:48:45 +00:00
end
2008-07-22 07:28:05 +00:00
2010-01-02 23:14:37 +00:00
#
2010-01-10 17:53:12 +00:00
# Exactly like report_host but waits for the database to create a host and returns it.
2009-07-22 20:14:35 +00:00
#
2009-12-29 23:48:45 +00:00
def find_or_create_host ( opts )
2011-04-22 20:16:55 +00:00
report_host ( opts )
2009-07-22 20:14:35 +00:00
end
2006-03-21 04:37:48 +00:00
#
2009-12-29 23:48:45 +00:00
# Report a host's attributes such as operating system and service pack
2006-03-21 04:37:48 +00:00
#
2009-12-29 23:48:45 +00:00
# The opts parameter MUST contain
2010-01-15 00:32:48 +00:00
# :host -- the host's ip address
2009-12-29 23:48:45 +00:00
#
# The opts parameter can contain:
# :state -- one of the Msf::HostState constants
# :os_name -- one of the Msf::OperatingSystems constants
# :os_flavor -- something like "XP" or "Gentoo"
# :os_sp -- something like "SP2"
2010-01-10 17:53:12 +00:00
# :os_lang -- something like "English", "French", or "en-US"
2009-12-29 23:48:45 +00:00
# :arch -- one of the ARCH_* constants
# :mac -- the host's MAC address
#
def report_host ( opts )
2011-04-22 20:16:55 +00:00
2010-02-02 23:11:59 +00:00
return if not active
2010-01-10 17:53:12 +00:00
addr = opts . delete ( :host ) || return
2010-04-05 19:38:51 +00:00
2010-11-12 16:43:00 +00:00
# Sometimes a host setup through a pivot will see the address as "Remote Pipe"
if addr . eql? " Remote Pipe "
return
end
2010-02-18 06:40:38 +00:00
wspace = opts . delete ( :workspace ) || workspace
2011-06-15 00:51:51 +00:00
ret = { }
2010-01-10 17:53:12 +00:00
2011-06-15 00:51:51 +00:00
if not addr . kind_of? Host
addr = normalize_host ( addr )
unless ipv4_validator ( addr )
raise :: ArgumentError , " Invalid IP address in report_host(): #{ addr } "
end
2011-04-22 20:16:55 +00:00
2010-01-10 17:53:12 +00:00
if opts [ :comm ] and opts [ :comm ] . length > 0
2010-02-18 06:40:38 +00:00
host = wspace . hosts . find_or_initialize_by_address_and_comm ( addr , opts [ :comm ] )
2010-01-10 17:53:12 +00:00
else
2010-02-18 06:40:38 +00:00
host = wspace . hosts . find_or_initialize_by_address ( addr )
2010-01-10 17:53:12 +00:00
end
2011-06-15 00:51:51 +00:00
else
host = addr
2011-04-22 20:16:55 +00:00
end
2010-01-10 17:53:12 +00:00
2011-08-02 01:07:52 +00:00
# Truncate the info field at the maximum field length
if opts [ :info ]
opts [ :info ] = opts [ :info ] [ 0 , 65535 ]
end
# Truncate the name field at the maximum field length
if opts [ :name ]
opts [ :name ] = opts [ :name ] [ 0 , 255 ]
end
2011-04-22 20:16:55 +00:00
opts . each { | k , v |
if ( host . attribute_names . include? ( k . to_s ) )
2011-07-31 03:34:51 +00:00
unless host . attribute_locked? ( k . to_s )
host [ k ] = v . to_s . gsub ( / [ \ x00- \ x1f] / , '' )
end
2011-04-22 20:16:55 +00:00
else
dlog ( " Unknown attribute for Host: #{ k } " )
end
}
host . info = host . info [ 0 , Host . columns_hash [ " info " ] . limit ] if host . info
2010-04-05 19:38:51 +00:00
2011-04-22 20:16:55 +00:00
# Set default fields if needed
host . state = HostState :: Alive if not host . state
host . comm = '' if not host . comm
host . workspace = wspace if not host . workspace
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
if host . changed?
2010-10-06 15:55:28 +00:00
msf_import_timestamps ( opts , host )
2010-04-05 19:38:51 +00:00
host . save!
2006-04-03 04:33:30 +00:00
end
2011-04-22 20:16:55 +00:00
host
2009-12-14 22:52:34 +00:00
end
2006-03-21 04:37:48 +00:00
#
2009-12-29 23:48:45 +00:00
# Iterates over the hosts table calling the supplied block with the host
# instance of each entry.
2006-03-21 04:37:48 +00:00
#
2010-02-18 06:40:38 +00:00
def each_host ( wspace = workspace , & block )
wspace . hosts . each do | host |
2006-03-21 04:37:48 +00:00
block . call ( host )
end
end
#
2009-12-29 23:48:45 +00:00
# Returns a list of all hosts in the database
2006-03-21 04:37:48 +00:00
#
2010-02-18 06:40:38 +00:00
def hosts ( wspace = workspace , only_up = false , addresses = nil )
2009-12-21 16:46:11 +00:00
conditions = { }
conditions [ :state ] = [ Msf :: HostState :: Alive , Msf :: HostState :: Unknown ] if only_up
conditions [ :address ] = addresses if addresses
2010-02-18 06:40:38 +00:00
wspace . hosts . all ( :conditions = > conditions , :order = > :address )
2006-03-21 04:37:48 +00:00
end
2009-12-29 23:48:45 +00:00
def find_or_create_service ( opts )
2011-04-22 20:16:55 +00:00
report_service ( opts )
2009-12-29 23:48:45 +00:00
end
2010-01-02 23:14:37 +00:00
#
# Record a service in the database.
2009-12-29 23:48:45 +00:00
#
# opts must contain
2009-12-31 01:27:04 +00:00
# :host -- the host where this service is running
2009-12-29 23:48:45 +00:00
# :port -- the port where this service listens
2010-09-24 21:25:18 +00:00
# :proto -- the transport layer protocol (e.g. tcp, udp)
#
# opts may contain
# :name -- the application layer protocol (e.g. ssh, mssql, smb)
2009-12-29 23:48:45 +00:00
#
def report_service ( opts )
2010-02-02 23:11:59 +00:00
return if not active
2010-03-28 16:53:55 +00:00
addr = opts . delete ( :host ) || return
hname = opts . delete ( :host_name )
2011-05-03 03:03:38 +00:00
hmac = opts . delete ( :mac )
2011-04-22 20:16:55 +00:00
host = nil
2010-02-18 06:40:38 +00:00
wspace = opts . delete ( :workspace ) || workspace
2010-01-10 17:53:12 +00:00
2010-02-18 06:40:38 +00:00
hopts = { :workspace = > wspace , :host = > addr }
2010-03-28 16:53:55 +00:00
hopts [ :name ] = hname if hname
hopts [ :mac ] = hmac if hmac
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
if addr . kind_of? Host
host = addr
addr = host . address
else
host = report_host ( hopts )
end
2010-01-15 15:49:13 +00:00
2010-01-10 17:53:12 +00:00
ret = { }
2011-04-22 20:16:55 +00:00
= begin
host = get_host ( :workspace = > wspace , :address = > addr )
if host
host . updated_at = host . created_at
host . state = HostState :: Alive
host . save!
end
= end
2010-01-15 15:49:13 +00:00
2011-04-22 20:16:55 +00:00
proto = opts [ :proto ] || 'tcp'
2011-06-02 06:17:38 +00:00
opts [ :name ] . downcase! if ( opts [ :name ] ) # XXX shouldn't modify this in place, might be frozen by caller
2010-01-10 17:53:12 +00:00
2011-04-22 20:16:55 +00:00
service = host . services . find_or_initialize_by_port_and_proto ( opts [ :port ] . to_i , proto )
opts . each { | k , v |
if ( service . attribute_names . include? ( k . to_s ) )
service [ k ] = v
else
dlog ( " Unknown attribute for Service: #{ k } " )
2010-01-10 17:53:12 +00:00
end
2011-04-22 20:16:55 +00:00
}
if ( service . state == nil )
service . state = ServiceState :: Open
2009-12-29 23:48:45 +00:00
end
2011-04-22 20:16:55 +00:00
if ( service and service . changed? )
msf_import_timestamps ( opts , service )
service . save!
end
ret [ :service ] = service
2009-12-29 23:48:45 +00:00
end
2010-02-18 06:40:38 +00:00
def get_service ( wspace , host , proto , port )
host = get_host ( :workspace = > wspace , :address = > host )
2009-12-29 23:48:45 +00:00
return if not host
return host . services . find_by_proto_and_port ( proto , port )
end
2006-03-21 04:37:48 +00:00
#
2009-12-29 23:48:45 +00:00
# Iterates over the services table calling the supplied block with the
2006-04-02 22:33:34 +00:00
# service instance of each entry.
2006-03-21 04:37:48 +00:00
#
2010-02-18 06:40:38 +00:00
def each_service ( wspace = workspace , & block )
services ( wspace ) . each do | service |
2006-03-21 04:37:48 +00:00
block . call ( service )
end
end
2009-12-13 05:24:48 +00:00
2006-03-21 04:37:48 +00:00
#
2009-12-29 23:48:45 +00:00
# Returns a list of all services in the database
2006-03-21 04:37:48 +00:00
#
2010-02-18 06:40:38 +00:00
def services ( wspace = workspace , only_up = false , proto = nil , addresses = nil , ports = nil , names = nil )
2009-12-21 16:46:11 +00:00
conditions = { }
2010-02-05 15:43:24 +00:00
conditions [ :state ] = [ ServiceState :: Open ] if only_up
2009-12-21 16:46:11 +00:00
conditions [ :proto ] = proto if proto
conditions [ " hosts.address " ] = addresses if addresses
conditions [ :port ] = ports if ports
conditions [ :name ] = names if names
2010-02-18 06:40:38 +00:00
wspace . services . all ( :include = > :host , :conditions = > conditions , :order = > " hosts.address, port " )
2006-03-21 04:37:48 +00:00
end
2006-04-02 22:33:34 +00:00
2011-04-07 21:59:32 +00:00
# Returns a session based on opened_time, host address, and workspace
# (or returns nil)
def get_session ( opts )
return if not active
wspace = opts [ :workspace ] || opts [ :wspace ] || workspace
addr = opts [ :addr ] || opts [ :address ] || opts [ :host ] || return
host = get_host ( :workspace = > wspace , :host = > addr )
time = opts [ :opened_at ] || opts [ :created_at ] || opts [ :time ] || return
Msf :: DBManager :: Session . find_by_host_id_and_opened_at ( host . id , time )
end
# Record a new session in the database
#
# opts must contain either
# :session - the Msf::Session object we are reporting
2011-06-15 00:51:51 +00:00
# :host - the Host object we are reporting a session on.
2011-04-07 21:59:32 +00:00
#
def report_session ( opts )
return if not active
if opts [ :session ]
raise ArgumentError . new ( " Invalid :session, expected Msf::Session " ) unless opts [ :session ] . kind_of? Msf :: Session
session = opts [ :session ]
wspace = opts [ :workspace ] || find_workspace ( session . workspace )
h_opts = { }
h_opts [ :host ] = normalize_host ( session )
2011-04-18 16:53:58 +00:00
h_opts [ :arch ] = session . arch if session . respond_to? ( :arch ) and session . arch
2011-04-07 21:59:32 +00:00
h_opts [ :workspace ] = wspace
host = find_or_create_host ( h_opts )
sess_data = {
:host_id = > host . id ,
:stype = > session . type ,
:desc = > session . info ,
:platform = > session . platform ,
:via_payload = > session . via_payload ,
:via_exploit = > session . via_exploit ,
:routes = > [ ] ,
:datastore = > session . exploit_datastore . to_h ,
2011-04-14 23:14:36 +00:00
:opened_at = > Time . now . utc ,
2011-04-26 21:29:47 +00:00
:last_seen = > Time . now . utc ,
2011-04-14 23:14:36 +00:00
:local_id = > session . sid
2011-04-07 21:59:32 +00:00
}
elsif opts [ :host ]
raise ArgumentError . new ( " Invalid :host, expected Host object " ) unless opts [ :host ] . kind_of? Host
host = opts [ :host ]
sess_data = {
:host_id = > host . id ,
:stype = > opts [ :stype ] ,
:desc = > opts [ :desc ] ,
:platform = > opts [ :platform ] ,
:via_payload = > opts [ :via_payload ] ,
:via_exploit = > opts [ :via_exploit ] ,
2011-04-29 04:10:46 +00:00
:routes = > opts [ :routes ] || [ ] ,
2011-04-07 21:59:32 +00:00
:datastore = > opts [ :datastore ] ,
:opened_at = > opts [ :opened_at ] ,
:closed_at = > opts [ :closed_at ] ,
2011-04-26 21:29:47 +00:00
:last_seen = > opts [ :last_seen ] || opts [ :closed_at ] ,
2011-04-07 21:59:32 +00:00
:close_reason = > opts [ :close_reason ] ,
}
else
raise ArgumentError . new ( " Missing option :session or :host " )
end
ret = { }
2011-08-02 01:07:52 +00:00
# Truncate the session data if necessary
if sess_data [ :desc ]
sess_data [ :desc ] = sess_data [ :desc ] [ 0 , 255 ]
end
2011-04-30 18:51:50 +00:00
s = Msf :: DBManager :: Session . new ( sess_data )
s . save!
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
if opts [ :session ]
2011-06-15 00:51:51 +00:00
session . db_record = s
2011-04-22 20:16:55 +00:00
end
2011-04-07 21:59:32 +00:00
# If this is a live session, we know the host is vulnerable to something.
# If the exploit used was multi/handler, though, we don't know what
# it's vulnerable to, so it isn't really useful to save it.
2011-04-30 18:51:50 +00:00
if opts [ :session ] and session . via_exploit and session . via_exploit != " exploit/multi/handler "
2011-04-07 21:59:32 +00:00
return unless host
port = session . exploit_datastore [ " RPORT " ]
service = ( port ? host . services . find_by_port ( port ) : nil )
mod = framework . modules . create ( session . via_exploit )
vuln_info = {
:host = > host . address ,
:name = > session . via_exploit ,
:refs = > mod . references ,
:workspace = > wspace ,
2011-05-15 22:19:00 +00:00
:exploited_at = > Time . now . utc
2011-04-07 21:59:32 +00:00
}
2011-05-15 22:19:00 +00:00
framework . db . report_vuln ( vuln_info )
2011-04-07 21:59:32 +00:00
end
2011-04-30 18:51:50 +00:00
s
2011-04-07 21:59:32 +00:00
end
#
# Record a session event in the database
#
# opts must contain one of:
# :session -- the Msf::Session OR the Msf::DBManager::Session we are reporting
# :etype -- event type, enum: command, output, upload, download, filedelete
#
# opts may contain
# :output -- the data for an output event
# :command -- the data for an command event
# :remote_path -- path to the associated file for upload, download, and filedelete events
# :local_path -- path to the associated file for upload, and download
#
def report_session_event ( opts )
return if not active
2011-06-15 00:51:51 +00:00
raise ArgumentError . new ( " Missing required option :session " ) if opts [ :session ] . nil?
2011-04-07 21:59:32 +00:00
raise ArgumentError . new ( " Expected an :etype " ) unless opts [ :etype ]
2011-05-02 06:45:21 +00:00
2011-04-07 21:59:32 +00:00
if opts [ :session ] . respond_to? :db_record
session = opts [ :session ] . db_record
2011-05-02 06:45:21 +00:00
if session . nil?
2011-06-15 00:51:51 +00:00
# The session doesn't have a db_record which means
2011-05-02 06:45:21 +00:00
# a) the database wasn't connected at session registration time
2011-06-15 00:51:51 +00:00
# or
2011-05-02 06:45:21 +00:00
# b) something awful happened and the report_session call failed
#
# Either way, we can't do anything with this session as is, so
# log a warning and punt.
wlog ( " Warning: trying to report a session_event for a session with no db_record ( #{ opts [ :session ] . sid } ) " )
return
end
2011-04-07 21:59:32 +00:00
event_data = { :created_at = > Time . now }
else
session = opts [ :session ]
event_data = { :created_at = > opts [ :created_at ] }
end
2011-05-02 06:45:21 +00:00
2011-04-07 21:59:32 +00:00
unless session . kind_of? Msf :: DBManager :: Session
raise ArgumentError . new ( " Invalid :session, expected Session object got #{ session . class } " )
end
event_data [ :session_id ] = session . id
[ :remote_path , :local_path , :output , :command , :etype ] . each do | attr |
event_data [ attr ] = opts [ attr ] if opts [ attr ]
end
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
s = Msf :: DBManager :: SessionEvent . create ( event_data )
2011-04-07 21:59:32 +00:00
end
def report_session_route ( session , route )
return if not active
2011-04-20 17:49:27 +00:00
if session . respond_to? :db_record
s = session . db_record
else
s = session
end
unless s . kind_of? Msf :: DBManager :: Session
raise ArgumentError . new ( " Invalid :session, expected Session object got #{ session . class } " )
end
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
subnet , netmask = route . split ( " / " )
s . routes . create ( :subnet = > subnet , :netmask = > netmask )
2011-04-07 21:59:32 +00:00
end
def report_session_route_remove ( session , route )
return if not active
2011-04-20 17:49:27 +00:00
if session . respond_to? :db_record
s = session . db_record
else
s = session
end
unless s . kind_of? Msf :: DBManager :: Session
raise ArgumentError . new ( " Invalid :session, expected Session object got #{ session . class } " )
end
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
subnet , netmask = route . split ( " / " )
r = s . routes . find_by_subnet_and_netmask ( subnet , netmask )
r . destroy if r
2011-04-07 21:59:32 +00:00
end
2009-12-29 23:48:45 +00:00
def get_client ( opts )
2010-02-18 06:40:38 +00:00
wspace = opts . delete ( :workspace ) || workspace
host = get_host ( :workspace = > wspace , :host = > opts [ :host ] ) || return
2009-12-29 23:48:45 +00:00
client = host . clients . find ( :first , :conditions = > { :ua_string = > opts [ :ua_string ] } )
return client
end
def find_or_create_client ( opts )
2011-04-22 20:16:55 +00:00
report_client ( opts )
2009-12-29 23:48:45 +00:00
end
#
# Report a client running on a host.
#
2010-01-02 23:14:37 +00:00
# opts must contain
2009-12-29 23:48:45 +00:00
# :ua_string -- the value of the User-Agent header
2010-10-08 20:32:32 +00:00
# :host -- the host where this client connected from, can be an ip address or a Host object
2009-12-29 23:48:45 +00:00
#
2010-01-02 23:14:37 +00:00
# opts can contain
2009-12-29 23:48:45 +00:00
# :ua_name -- one of the Msf::HttpClients constants
# :ua_ver -- detected version of the given client
2010-10-08 20:32:32 +00:00
# :campaign -- an id or Campaign object
2009-12-29 23:48:45 +00:00
#
2010-01-10 17:53:12 +00:00
# Returns a Client.
2009-12-29 23:48:45 +00:00
#
def report_client ( opts )
2010-02-02 23:11:59 +00:00
return if not active
2010-01-23 00:15:57 +00:00
addr = opts . delete ( :host ) || return
2010-02-18 06:40:38 +00:00
wspace = opts . delete ( :workspace ) || workspace
report_host ( :workspace = > wspace , :host = > addr )
2010-01-10 17:53:12 +00:00
ret = { }
2010-10-08 20:32:32 +00:00
2011-04-22 20:16:55 +00:00
host = get_host ( :workspace = > wspace , :host = > addr )
client = host . clients . find_or_initialize_by_ua_string ( opts [ :ua_string ] )
2011-07-31 21:02:39 +00:00
opts [ :ua_string ] = opts [ :ua_string ] . to_s
2011-04-22 20:16:55 +00:00
campaign = opts . delete ( :campaign )
if campaign
case campaign
when Campaign
opts [ :campaign_id ] = campaign . id
else
opts [ :campaign_id ] = campaign
2010-10-08 20:32:32 +00:00
end
2011-04-22 20:16:55 +00:00
end
2010-10-08 20:32:32 +00:00
2011-04-22 20:16:55 +00:00
opts . each { | k , v |
if ( client . attribute_names . include? ( k . to_s ) )
client [ k ] = v
else
dlog ( " Unknown attribute for Client: #{ k } " )
2010-01-10 17:53:12 +00:00
end
2011-04-22 20:16:55 +00:00
}
if ( client and client . changed? )
client . save!
2009-12-29 23:48:45 +00:00
end
2011-04-22 20:16:55 +00:00
ret [ :client ] = client
2009-12-29 23:48:45 +00:00
end
2006-04-02 22:33:34 +00:00
#
# This method iterates the vulns table calling the supplied block with the
# vuln instance of each entry.
#
2010-02-18 06:40:38 +00:00
def each_vuln ( wspace = workspace , & block )
wspace . vulns . each do | vulns |
2006-04-02 22:33:34 +00:00
block . call ( vulns )
end
end
2009-12-13 05:24:48 +00:00
2006-04-02 22:33:34 +00:00
#
# This methods returns a list of all vulnerabilities in the database
#
2010-02-18 06:40:38 +00:00
def vulns ( wspace = workspace )
wspace . vulns
2006-04-02 22:33:34 +00:00
end
2008-03-02 04:46:13 +00:00
2010-08-18 00:58:20 +00:00
#
# This methods returns a list of all credentials in the database
#
def creds ( wspace = workspace )
Cred . find (
:all ,
:include = > { :service = > :host } , # That's some magic right there.
:conditions = > [ " hosts.workspace_id = ? " , wspace . id ]
)
end
2010-08-24 21:57:04 +00:00
#
# This method returns a list of all exploited hosts in the database.
#
def exploited_hosts ( wspace = workspace )
wspace . exploited_hosts
end
2008-03-02 04:46:13 +00:00
#
# This method iterates the notes table calling the supplied block with the
# note instance of each entry.
#
2010-02-18 06:40:38 +00:00
def each_note ( wspace = workspace , & block )
wspace . notes . each do | note |
2008-03-02 04:46:13 +00:00
block . call ( note )
end
end
2009-06-23 03:49:25 +00:00
#
2009-12-29 23:48:45 +00:00
# Find or create a note matching this type/data
2009-06-23 03:49:25 +00:00
#
2009-12-29 23:48:45 +00:00
def find_or_create_note ( opts )
2011-04-22 20:16:55 +00:00
report_note ( opts )
2009-12-29 23:48:45 +00:00
end
2010-09-19 22:25:56 +00:00
#
# Report a Note to the database. Notes can be tied to a Workspace, Host, or Service.
#
# opts MUST contain
# :data -- whatever it is you're making a note of
# :type -- The type of note, e.g. smb_peer_os
#
# opts can contain
# :workspace -- the workspace to associate with this Note
# :host -- an IP address or a Host object to associate with this Note
# :service -- a Service object to associate with this Note
# :port -- along with :host and proto, a service to associate with this Note
# :proto -- along with :host and port, a service to associate with this Note
# :update -- what to do in case a similar Note exists, see below
#
# The :update option can have the following values:
# :unique -- allow only a single Note per +host+/+type+ pair
# :unique_data -- like :uniqe, but also compare +data+
# :insert -- always insert a new Note even if one with identical values exists
#
# If the provided :host is an IP address and does not exist in the
# database, it will be created. If :workspace, :host and :service are all
# omitted, the new Note will be associated with the current workspace.
#
2010-01-10 17:53:12 +00:00
def report_note ( opts )
2010-02-02 23:11:59 +00:00
return if not active
2010-02-18 06:40:38 +00:00
wspace = opts . delete ( :workspace ) || workspace
2011-04-07 21:59:32 +00:00
if wspace . kind_of? String
wspace = find_workspace ( wspace )
end
2010-03-10 18:00:19 +00:00
seen = opts . delete ( :seen ) || false
crit = opts . delete ( :critical ) || false
2010-01-10 17:53:12 +00:00
host = nil
2010-01-20 00:35:44 +00:00
addr = nil
2010-01-28 00:00:00 +00:00
# Report the host so it's there for the Proc to use below
2009-12-29 23:48:45 +00:00
if opts [ :host ]
if opts [ :host ] . kind_of? Host
2010-01-10 17:53:12 +00:00
host = opts [ :host ]
2009-12-29 23:48:45 +00:00
else
2011-01-29 02:42:45 +00:00
addr = normalize_host ( opts [ :host ] )
2011-04-22 20:16:55 +00:00
host = report_host ( { :workspace = > wspace , :host = > addr } )
2009-12-29 23:48:45 +00:00
end
2010-09-24 20:51:38 +00:00
# Do the same for a service if that's also included.
if ( opts [ :port ] )
proto = nil
2011-02-04 15:32:12 +00:00
sname = nil
2010-09-24 20:51:38 +00:00
case opts [ :proto ] . to_s . downcase # Catch incorrect usages
when 'tcp' , 'udp'
proto = opts [ :proto ]
2011-02-04 15:29:57 +00:00
sname = opts [ :sname ] if opts [ :sname ]
2010-09-24 20:51:38 +00:00
when 'dns' , 'snmp' , 'dhcp'
proto = 'udp'
2010-09-24 21:11:36 +00:00
sname = opts [ :proto ]
2010-09-24 20:51:38 +00:00
else
proto = 'tcp'
2010-09-24 21:11:36 +00:00
sname = opts [ :proto ]
2010-09-24 20:51:38 +00:00
end
2010-09-24 21:11:36 +00:00
sopts = {
2010-09-24 20:51:38 +00:00
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > host ,
2010-09-24 20:51:38 +00:00
:port = > opts [ :port ] ,
:proto = > proto
2010-09-24 21:11:36 +00:00
}
sopts [ :name ] = sname if sname
report_service ( sopts )
2010-09-24 20:51:38 +00:00
end
2010-09-19 22:25:56 +00:00
end
2010-02-14 19:34:22 +00:00
# Update Modes can be :unique, :unique_data, :insert
mode = opts [ :update ] || :unique
2010-01-10 17:53:12 +00:00
ret = { }
2011-04-22 20:16:55 +00:00
if addr and not host
host = get_host ( :workspace = > wspace , :host = > addr )
end
if host and ( opts [ :port ] and opts [ :proto ] )
service = get_service ( wspace , host , opts [ :proto ] , opts [ :port ] )
elsif opts [ :service ] and opts [ :service ] . kind_of? Service
service = opts [ :service ]
end
= begin
if host
host . updated_at = host . created_at
host . state = HostState :: Alive
host . save!
end
= end
ntype = opts . delete ( :type ) || opts . delete ( :ntype ) || ( raise RuntimeError , " A note :type or :ntype is required " )
data = opts [ :data ] || ( raise RuntimeError , " Note :data is required " )
method = nil
args = [ ]
note = nil
conditions = { :ntype = > ntype }
conditions [ :host_id ] = host [ :id ] if host
conditions [ :service_id ] = service [ :id ] if service
case mode
when :unique
notes = wspace . notes . find ( :all , :conditions = > conditions )
# Only one note of this type should exist, make a new one if it
# isn't there. If it is, grab it and overwrite its data.
if notes . empty?
note = wspace . notes . new ( conditions )
else
note = notes [ 0 ]
end
note . data = data
when :unique_data
notes = wspace . notes . find ( :all , :conditions = > conditions )
# Don't make a new Note with the same data as one that already
# exists for the given: type and (host or service)
notes . each do | n |
# Compare the deserialized data from the table to the raw
# data we're looking for. Because of the serialization we
# can't do this easily or reliably in SQL.
if n . data == data
note = n
break
end
2010-01-20 00:35:44 +00:00
end
2011-04-22 20:16:55 +00:00
if not note
# We didn't find one with the data we're looking for, make
# a new one.
note = wspace . notes . new ( conditions . merge ( :data = > data ) )
2010-09-19 22:25:56 +00:00
end
2011-04-22 20:16:55 +00:00
else
# Otherwise, assume :insert, which means always make a new one
note = wspace . notes . new
2010-04-05 19:38:51 +00:00
if host
2011-04-22 20:16:55 +00:00
note . host_id = host [ :id ]
2010-04-05 19:38:51 +00:00
end
2011-04-22 20:16:55 +00:00
if opts [ :service ] and opts [ :service ] . kind_of? Service
note . service_id = opts [ :service ] [ :id ]
2010-01-10 17:53:12 +00:00
end
2011-04-22 20:16:55 +00:00
note . seen = seen
note . critical = crit
note . ntype = ntype
note . data = data
2009-12-29 23:48:45 +00:00
end
2011-04-22 20:16:55 +00:00
msf_import_timestamps ( opts , note )
note . save!
ret [ :note ] = note
2009-06-23 03:49:25 +00:00
end
2008-03-02 04:46:13 +00:00
#
# This methods returns a list of all notes in the database
#
2010-02-18 06:40:38 +00:00
def notes ( wspace = workspace )
wspace . notes
2008-03-02 04:46:13 +00:00
end
2009-12-13 05:24:48 +00:00
2011-02-10 00:17:20 +00:00
# This is only exercised by MSF3 XML importing for now. Needs the wait
# conditions and return hash as well.
def report_host_tag ( opts )
name = opts . delete ( :name )
raise DBImportError . new ( " Missing required option :name " ) unless name
2011-06-15 00:51:51 +00:00
addr = opts . delete ( :addr )
2011-02-10 00:17:20 +00:00
raise DBImportError . new ( " Missing required option :addr " ) unless addr
wspace = opts . delete ( :wspace )
raise DBImportError . new ( " Missing required option :wspace " ) unless wspace
host = nil
report_host ( :workspace = > wspace , :address = > addr )
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
host = get_host ( :workspace = > wspace , :address = > addr )
desc = opts . delete ( :desc )
summary = opts . delete ( :summary )
detail = opts . delete ( :detail )
crit = opts . delete ( :crit )
possible_tag = Tag . find ( :all ,
:include = > :hosts ,
:conditions = > [ " hosts.workspace_id = ? and tags.name = ? " ,
wspace . id ,
name
]
) . first
tag = possible_tag || Tag . new
tag . name = name
tag . desc = desc
tag . report_summary = ! ! summary
tag . report_detail = ! ! detail
tag . critical = ! ! crit
tag . hosts = tag . hosts | [ host ]
tag . save! if tag . changed?
2011-02-10 00:17:20 +00:00
end
2010-08-18 00:58:20 +00:00
# report_auth_info used to create a note, now it creates
# an entry in the creds table. It's much more akin to
# report_vuln() now.
2009-12-29 23:48:45 +00:00
#
# opts must contain
2011-04-24 00:48:05 +00:00
# :host -- an IP address or Host object reference
2011-06-15 00:51:51 +00:00
# :port -- a port number
2010-01-02 23:14:37 +00:00
#
2009-12-29 23:48:45 +00:00
# opts can contain
2010-08-18 00:58:20 +00:00
# :user -- the username
# :pass -- the password, or path to ssh_key
2011-06-06 21:36:58 +00:00
# :ptype -- the type of password (password(ish), hash, or ssh_key)
2010-08-18 00:58:20 +00:00
# :proto -- a transport name for the port
# :sname -- service name
# :active -- by default, a cred is active, unless explicitly false
# :proof -- data used to prove the account is actually active.
#
# Sources: Credentials can be sourced from another credential, or from
# a vulnerability. For example, if an exploit was used to dump the
# smb_hashes, and this credential comes from there, the source_id would
# be the Vuln id (as reported by report_vuln) and the type would be "Vuln".
#
# :source_id -- The Vuln or Cred id of the source of this cred.
# :source_type -- Either Vuln or Cred
#
2011-06-15 00:51:51 +00:00
# TODO: This is written somewhat host-centric, when really the
2010-08-18 00:58:20 +00:00
# Service is the thing. Need to revisit someday.
2009-12-29 23:48:45 +00:00
def report_auth_info ( opts = { } )
2010-01-10 17:53:12 +00:00
return if not active
2011-06-15 00:51:51 +00:00
raise ArgumentError . new ( " Missing required option :host " ) if opts [ :host ] . nil?
2011-07-09 06:58:55 +00:00
raise ArgumentError . new ( " Missing required option :port " ) if ( opts [ :port ] . nil? and opts [ :service ] . nil? )
2011-06-15 00:51:51 +00:00
2011-04-24 02:41:29 +00:00
if ( not opts [ :host ] . kind_of? ( Host ) ) and ( not validate_ips ( opts [ :host ] ) )
raise ArgumentError . new ( " Invalid address or object for :host ( #{ opts [ :host ] . inspect } ) " )
2011-04-24 00:48:05 +00:00
end
2010-08-18 00:58:20 +00:00
host = opts . delete ( :host )
ptype = opts . delete ( :type ) || " password "
token = [ opts . delete ( :user ) , opts . delete ( :pass ) ]
sname = opts . delete ( :sname )
port = opts . delete ( :port )
proto = opts . delete ( :proto ) || " tcp "
proof = opts . delete ( :proof )
source_id = opts . delete ( :source_id )
source_type = opts . delete ( :source_type )
2010-12-09 21:23:39 +00:00
duplicate_ok = opts . delete ( :duplicate_ok )
2010-08-18 00:58:20 +00:00
# Nil is true for active.
active = ( opts [ :active ] || opts [ :active ] . nil? ) ? true : false
2010-02-18 06:40:38 +00:00
wspace = opts . delete ( :workspace ) || workspace
2010-08-18 00:58:20 +00:00
# Service management; assume the user knows what
# he's talking about.
2011-07-09 06:58:55 +00:00
service = opts . delete ( :service ) || report_service ( :host = > host , :port = > port , :proto = > proto , :name = > sname , :workspace = > wspace )
2010-08-18 00:58:20 +00:00
2011-07-27 13:25:55 +00:00
# Non-US-ASCII usernames are tripping up the database at the moment, this is a temporary fix until we update the tables
2011-07-27 13:38:22 +00:00
( token [ 0 ] = token [ 0 ] . gsub ( / [ \ x00- \ x1f \ x7f- \ xff] / ) { | m | " \\ x%.2x " % m . unpack ( " C " ) [ 0 ] } ) if token [ 0 ]
( token [ 1 ] = token [ 1 ] . gsub ( / [ \ x00- \ x1f \ x7f- \ xff] / ) { | m | " \\ x%.2x " % m . unpack ( " C " ) [ 0 ] } ) if token [ 1 ]
2011-07-27 13:25:55 +00:00
2010-08-18 00:58:20 +00:00
ret = { }
2011-04-22 20:16:55 +00:00
# If duplicate usernames are okay, find by both user and password (allows
# for actual duplicates to get modified updated_at, sources, etc)
if duplicate_ok
cred = service . creds . find_or_initialize_by_user_and_ptype_and_pass ( token [ 0 ] || " " , ptype , token [ 1 ] || " " )
else
2011-06-15 00:51:51 +00:00
# Create the cred by username only (so we can change passwords)
2011-04-22 20:16:55 +00:00
cred = service . creds . find_or_initialize_by_user_and_ptype ( token [ 0 ] || " " , ptype )
end
2010-08-18 00:58:20 +00:00
2011-04-22 20:16:55 +00:00
# Update with the password
cred . pass = ( token [ 1 ] || " " )
2010-08-18 00:58:20 +00:00
2011-04-22 20:16:55 +00:00
# Annotate the credential
cred . ptype = ptype
cred . active = active
2010-09-16 19:45:51 +00:00
2011-04-22 20:16:55 +00:00
# Update the source ID only if there wasn't already one.
if source_id and ! cred . source_id
2011-06-15 00:51:51 +00:00
cred . source_id = source_id
2011-04-22 20:16:55 +00:00
cred . source_type = source_type if source_type
end
2010-08-18 00:58:20 +00:00
2011-04-22 20:16:55 +00:00
# Safe proof (lazy way) -- doesn't chop expanded
# characters correctly, but shouldn't ever be a problem.
unless proof . nil?
2011-06-15 00:51:51 +00:00
proof = Rex :: Text . to_hex_ascii ( proof )
2011-04-22 20:16:55 +00:00
proof = proof [ 0 , 4096 ]
end
cred . proof = proof
2010-02-18 06:40:38 +00:00
2011-04-22 20:16:55 +00:00
# Update the timestamp
if cred . changed?
msf_import_timestamps ( opts , cred )
cred . save!
end
2010-09-01 22:06:52 +00:00
2011-04-22 20:16:55 +00:00
# Ensure the updated_at is touched any time report_auth_info is called
# except when it's set explicitly (as it is for imports)
unless opts [ :updated_at ] || opts [ " updated_at " ]
cred . updated_at = Time . now . utc
cred . save!
2010-02-18 06:40:38 +00:00
end
2011-04-22 20:16:55 +00:00
ret [ :cred ] = cred
2009-12-14 22:52:34 +00:00
end
2010-08-18 00:58:20 +00:00
alias :report_cred :report_auth_info
2010-11-02 21:27:26 +00:00
alias :report_auth :report_auth_info
2009-12-29 23:48:45 +00:00
2010-08-18 00:58:20 +00:00
#
# Find or create a credential matching this type/data
#
def find_or_create_cred ( opts )
2011-04-22 20:16:55 +00:00
report_auth_info ( opts )
2010-08-18 00:58:20 +00:00
end
2009-12-29 23:48:45 +00:00
2010-08-18 00:58:20 +00:00
#
# This method iterates the creds table calling the supplied block with the
# cred instance of each entry.
#
def each_cred ( wspace = workspace , & block )
wspace . creds . each do | cred |
block . call ( cred )
end
end
2006-09-16 20:08:13 +00:00
2010-08-24 21:57:04 +00:00
def each_exploited_host ( wspace = workspace , & block )
wspace . exploited_hosts . each do | eh |
block . call ( eh )
end
end
2009-07-22 20:14:35 +00:00
#
2009-12-29 23:48:45 +00:00
# Find or create a vuln matching this service/name
2009-12-13 05:24:48 +00:00
#
2009-12-29 23:48:45 +00:00
def find_or_create_vuln ( opts )
2011-04-22 20:16:55 +00:00
report_vuln ( opts )
2009-07-22 20:14:35 +00:00
end
2006-09-16 20:08:13 +00:00
#
2010-04-15 09:00:16 +00:00
# opts must contain
# :host -- the host where this vulnerability resides
2010-05-02 19:16:52 +00:00
# :name -- the scanner-specific id of the vuln (e.g. NEXPOSE-cifs-acct-password-never-expires)
2010-05-02 23:13:21 +00:00
#
2010-05-02 19:16:52 +00:00
# opts can contain
2010-05-09 23:32:43 +00:00
# :info -- a human readable description of the vuln, free-form text
2010-05-02 19:16:52 +00:00
# :refs -- an array of Ref objects or string names of references
2009-12-29 23:48:45 +00:00
#
def report_vuln ( opts )
2010-02-02 23:11:59 +00:00
return if not active
2010-04-15 09:00:16 +00:00
raise ArgumentError . new ( " Missing required option :host " ) if opts [ :host ] . nil?
2010-05-09 23:32:43 +00:00
raise ArgumentError . new ( " Deprecated data column for vuln, use .info instead " ) if opts [ :data ]
2010-01-10 17:53:12 +00:00
name = opts [ :name ] || return
2010-05-09 23:32:43 +00:00
info = opts [ :info ]
2010-02-18 06:40:38 +00:00
wspace = opts . delete ( :workspace ) || workspace
2011-05-15 22:19:00 +00:00
exploited_at = opts [ :exploited_at ] || opts [ " exploited_at " ]
2010-01-10 17:53:12 +00:00
rids = nil
if opts [ :refs ]
rids = [ ]
opts [ :refs ] . each do | r |
2010-03-21 23:12:37 +00:00
if r . respond_to? :ctx_id
r = r . ctx_id + '-' + r . ctx_val
end
2010-01-10 17:53:12 +00:00
rids << find_or_create_ref ( :name = > r )
end
2006-03-21 04:37:48 +00:00
end
2010-04-15 09:00:16 +00:00
2010-01-20 00:35:44 +00:00
host = nil
addr = nil
2010-04-15 09:00:16 +00:00
if opts [ :host ] . kind_of? Host
host = opts [ :host ]
else
2011-04-22 20:16:55 +00:00
host = report_host ( { :workspace = > wspace , :host = > opts [ :host ] } )
2011-01-29 02:42:45 +00:00
addr = normalize_host ( opts [ :host ] )
2010-01-20 00:35:44 +00:00
end
2010-01-10 17:53:12 +00:00
ret = { }
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
= begin
if host
host . updated_at = host . created_at
host . state = HostState :: Alive
host . save!
else
host = get_host ( :workspace = > wspace , :address = > addr )
end
= end
2010-02-24 22:57:38 +00:00
2011-08-02 01:07:52 +00:00
# Truncate the info field at the maximum field length
if info
info = info [ 0 , 65535 ]
end
# Truncate the name field at the maximum field length
name = name [ 0 , 255 ]
2011-04-22 20:16:55 +00:00
if info and name !~ / ^NEXPOSE- /
vuln = host . vulns . find_or_initialize_by_name_and_info ( name , info , :include = > :refs )
else
vuln = host . vulns . find_or_initialize_by_name ( name , :include = > :refs )
end
2010-01-10 17:53:12 +00:00
2011-04-22 20:16:55 +00:00
vuln . info = info . to_s if info
2011-05-15 22:19:00 +00:00
vuln . exploited_at = exploited_at if exploited_at
2011-04-19 23:00:19 +00:00
2011-04-22 20:16:55 +00:00
if opts [ :port ]
proto = nil
case opts [ :proto ] . to_s . downcase # Catch incorrect usages, as in report_note
when 'tcp' , 'udp'
proto = opts [ :proto ]
when 'dns' , 'snmp' , 'dhcp'
proto = 'udp'
sname = opts [ :proto ]
else
proto = 'tcp'
sname = opts [ :proto ]
2010-01-10 17:53:12 +00:00
end
2011-04-22 20:16:55 +00:00
vuln . service = host . services . find_or_create_by_port_and_proto ( opts [ :port ] , proto )
end
2010-01-10 17:53:12 +00:00
2011-04-22 20:16:55 +00:00
if rids
vuln . refs << ( rids - vuln . refs )
end
2010-01-10 17:53:12 +00:00
2011-04-22 20:16:55 +00:00
if vuln . changed?
msf_import_timestamps ( opts , vuln )
vuln . save!
2010-01-10 17:53:12 +00:00
end
2006-03-21 04:37:48 +00:00
end
2010-02-18 06:40:38 +00:00
def get_vuln ( wspace , host , service , name , data = '' )
raise RuntimeError , " Not workspace safe: #{ caller . inspect } "
2009-07-22 13:37:14 +00:00
vuln = nil
2009-12-21 16:46:11 +00:00
if ( service )
2009-07-22 13:37:14 +00:00
vuln = Vuln . find ( :first , :conditions = > [ " name = ? and service_id = ? and host_id = ? " , name , service . id , host . id ] )
else
vuln = Vuln . find ( :first , :conditions = > [ " name = ? and host_id = ? " , name , host . id ] )
end
2009-12-13 05:24:48 +00:00
2006-04-02 22:33:34 +00:00
return vuln
end
2006-09-16 20:08:13 +00:00
#
# Find or create a reference matching this name
#
2009-12-29 23:48:45 +00:00
def find_or_create_ref ( opts )
2010-01-10 17:53:12 +00:00
ret = { }
2010-05-02 19:16:52 +00:00
ret [ :ref ] = get_ref ( opts [ :name ] )
return ret [ :ref ] if ret [ :ref ]
2011-04-22 20:16:55 +00:00
ref = Ref . find_or_initialize_by_name ( opts [ :name ] )
if ref and ref . changed?
ref . save!
end
ret [ :ref ] = ref
2006-09-16 20:08:13 +00:00
end
2009-12-29 23:48:45 +00:00
def get_ref ( name )
Ref . find_by_name ( name )
2008-03-02 04:46:13 +00:00
end
2009-12-13 05:24:48 +00:00
2011-05-15 22:19:00 +00:00
# report_exploit() used to be used to track sessions and which modules
# opened them. That information is now available with the session table
# directly. TODO: kill this completely some day -- for now just warn if
# some other UI is actually using it.
2010-08-24 21:57:04 +00:00
def report_exploit ( opts = { } )
2011-06-15 00:51:51 +00:00
wlog ( " Deprecated method call: report_exploit() \n " +
" report_exploit() options: #{ opts . inspect } \n " +
2011-05-15 22:19:00 +00:00
" report_exploit() call stack: \n \t #{ caller . join ( " \n \t " ) } "
)
2010-08-24 21:57:04 +00:00
end
2008-12-22 03:19:39 +00:00
#
# Deletes a host and associated data matching this address/comm
#
2010-02-18 06:40:38 +00:00
def del_host ( wspace , address , comm = '' )
host = wspace . hosts . find_by_address_and_comm ( address , comm )
2009-12-14 22:52:34 +00:00
host . destroy if host
2008-12-22 03:19:39 +00:00
end
2009-10-16 18:27:18 +00:00
#
# Deletes a port and associated vulns matching this port
#
2010-02-18 06:40:38 +00:00
def del_service ( wspace , address , proto , port , comm = '' )
host = get_host ( :workspace = > wspace , :address = > address )
2009-10-16 18:27:18 +00:00
return unless host
2010-01-04 16:14:23 +00:00
host . services . all ( :conditions = > { :proto = > proto , :port = > port } ) . each { | s | s . destroy }
2009-10-16 18:27:18 +00:00
end
2008-12-22 03:19:39 +00:00
2006-09-16 20:08:13 +00:00
#
# Find a reference matching this name
#
def has_ref? ( name )
2009-12-07 17:03:21 +00:00
Ref . find_by_name ( name )
2006-09-16 20:08:13 +00:00
end
2006-09-17 00:39:23 +00:00
#
# Find a vulnerability matching this name
#
def has_vuln? ( name )
2009-12-07 17:03:21 +00:00
Vuln . find_by_name ( name )
2006-09-17 00:39:23 +00:00
end
2009-12-13 05:24:48 +00:00
2006-09-16 20:08:13 +00:00
#
# Look for an address across all comms
2009-12-13 05:24:48 +00:00
#
2010-02-18 06:40:38 +00:00
def has_host? ( wspace , addr )
wspace . hosts . find_by_address ( addr )
2006-04-02 22:33:34 +00:00
end
2006-09-17 00:39:23 +00:00
2010-03-11 19:38:19 +00:00
def events ( wspace = workspace )
2010-03-21 23:12:37 +00:00
wspace . events . find :all , :order = > 'created_at ASC'
2010-03-11 19:38:19 +00:00
end
2009-12-13 06:56:01 +00:00
2010-01-15 00:32:48 +00:00
def report_event ( opts = { } )
2010-02-02 23:11:59 +00:00
return if not active
2010-02-18 06:40:38 +00:00
wspace = opts . delete ( :workspace ) || workspace
2011-07-07 13:33:07 +00:00
return if not wspace # Temp fix?
2010-03-17 14:04:47 +00:00
uname = opts . delete ( :username )
2011-04-22 20:16:55 +00:00
if ! opts [ :host ] . kind_of? Host and opts [ :host ]
opts [ :host ] = report_host ( :workspace = > wspace , :host = > opts [ :host ] )
2010-01-28 00:00:00 +00:00
end
2011-04-22 20:16:55 +00:00
Event . create ( opts . merge ( :workspace_id = > wspace [ :id ] , :username = > uname ) )
2010-01-15 00:32:48 +00:00
end
2010-02-22 23:45:43 +00:00
#
# Loot collection
#
#
# This method iterates the loot table calling the supplied block with the
# instance of each entry.
#
def each_loot ( wspace = workspace , & block )
wspace . loots . each do | note |
block . call ( note )
end
end
#
# Find or create a loot matching this type/data
#
def find_or_create_loot ( opts )
2011-04-22 20:16:55 +00:00
report_loot ( opts )
2010-02-22 23:45:43 +00:00
end
def report_loot ( opts )
return if not active
wspace = opts . delete ( :workspace ) || workspace
2010-06-11 18:56:16 +00:00
path = opts . delete ( :path ) || ( raise RuntimeError , " A loot :path is required " )
2010-03-14 15:38:52 +00:00
2010-02-22 23:45:43 +00:00
host = nil
addr = nil
# Report the host so it's there for the Proc to use below
if opts [ :host ]
if opts [ :host ] . kind_of? Host
host = opts [ :host ]
else
2011-04-22 20:16:55 +00:00
host = report_host ( { :workspace = > wspace , :host = > opts [ :host ] } )
2011-01-29 02:42:45 +00:00
addr = normalize_host ( opts [ :host ] )
2010-02-22 23:45:43 +00:00
end
end
ret = { }
2010-02-26 21:48:16 +00:00
2011-04-22 20:16:55 +00:00
ltype = opts . delete ( :type ) || opts . delete ( :ltype ) || ( raise RuntimeError , " A loot :type or :ltype is required " )
ctype = opts . delete ( :ctype ) || opts . delete ( :content_type ) || 'text/plain'
name = opts . delete ( :name )
info = opts . delete ( :info )
data = opts [ :data ]
loot = wspace . loots . new
2010-02-22 23:45:43 +00:00
2011-04-22 20:16:55 +00:00
if host
loot . host_id = host [ :id ]
end
if opts [ :service ] and opts [ :service ] . kind_of? Service
loot . service_id = opts [ :service ] [ :id ]
end
2010-02-22 23:45:43 +00:00
2011-04-22 20:16:55 +00:00
loot . path = path
loot . ltype = ltype
loot . content_type = ctype
loot . data = data
loot . name = name if name
loot . info = info if info
msf_import_timestamps ( opts , loot )
loot . save!
2010-02-22 23:45:43 +00:00
2011-04-22 20:16:55 +00:00
if ! opts [ :created_at ]
= begin
if host
host . updated_at = host . created_at
host . state = HostState :: Alive
host . save!
2010-04-05 19:58:18 +00:00
end
2011-06-15 00:51:51 +00:00
= end
2010-02-22 23:45:43 +00:00
end
2011-04-22 20:16:55 +00:00
ret [ :loot ] = loot
2010-02-22 23:45:43 +00:00
end
#
2010-06-11 18:56:16 +00:00
# This methods returns a list of all loot in the database
2010-02-22 23:45:43 +00:00
#
def loots ( wspace = workspace )
wspace . loots
end
2010-06-11 18:56:16 +00:00
#
# Find or create a task matching this type/data
#
def find_or_create_task ( opts )
2011-04-22 20:16:55 +00:00
report_task ( opts )
2010-06-11 18:56:16 +00:00
end
def report_task ( opts )
return if not active
wspace = opts . delete ( :workspace ) || workspace
path = opts . delete ( :path ) || ( raise RuntimeError , " A task :path is required " )
ret = { }
2011-04-22 20:16:55 +00:00
user = opts . delete ( :user )
desc = opts . delete ( :desc )
error = opts . delete ( :error )
info = opts . delete ( :info )
mod = opts . delete ( :mod )
options = opts . delete ( :options )
prog = opts . delete ( :prog )
result = opts . delete ( :result )
completed_at = opts . delete ( :completed_at )
task = wspace . tasks . new
task . created_by = user
task . description = desc
task . error = error if error
task . info = info
task . module = mod
task . options = options
task . path = path
task . progress = prog
task . result = result if result
msf_import_timestamps ( opts , task )
# Having blank completed_ats, while accurate, will cause unstoppable tasks.
if completed_at . nil? || completed_at . empty?
task . completed_at = opts [ :updated_at ]
else
task . completed_at = completed_at
2010-06-11 18:56:16 +00:00
end
2011-04-22 20:16:55 +00:00
task . save!
ret [ :task ] = task
2010-06-11 18:56:16 +00:00
end
#
# This methods returns a list of all tasks in the database
#
def tasks ( wspace = workspace )
wspace . tasks
end
#
# Find or create a task matching this type/data
#
def find_or_create_report ( opts )
2011-04-22 20:16:55 +00:00
report_report ( opts )
2010-06-11 18:56:16 +00:00
end
def report_report ( opts )
return if not active
wspace = opts . delete ( :workspace ) || workspace
path = opts . delete ( :path ) || ( raise RuntimeError , " A report :path is required " )
ret = { }
2011-04-22 20:16:55 +00:00
user = opts . delete ( :user )
options = opts . delete ( :options )
rtype = opts . delete ( :rtype )
report = wspace . reports . new
report . created_by = user
report . options = options
report . rtype = rtype
report . path = path
msf_import_timestamps ( opts , report )
report . save!
2010-06-11 18:56:16 +00:00
2011-04-22 20:16:55 +00:00
ret [ :task ] = report
2010-06-11 18:56:16 +00:00
end
#
2010-09-19 23:22:32 +00:00
# This methods returns a list of all reports in the database
2010-06-11 18:56:16 +00:00
#
def reports ( wspace = workspace )
wspace . reports
end
2010-02-22 23:45:43 +00:00
2008-10-12 03:46:49 +00:00
#
# WMAP
# Support methods
#
2009-12-13 05:24:48 +00:00
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# Report a Web Site to the database. WebSites must be tied to an existing Service
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# opts MUST contain
# :service* -- the service object this site should be associated with
# :vhost -- the virtual host name for this particular web site`
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
# If service is NOT specified, the following values are mandatory
# :host -- the ip address of the server hosting the web site
# :port -- the port number of the associated web site
# :ssl -- whether or not SSL is in use on this port
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# These values will be used to create new host and service records
2011-06-15 00:51:51 +00:00
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# opts can contain
# :options -- a hash of options for accessing this particular web site
2009-12-13 05:24:48 +00:00
2011-06-15 00:51:51 +00:00
#
2010-10-06 18:00:21 +00:00
# Duplicate records for a given host, port, vhost combination will be overwritten
2008-10-19 20:32:14 +00:00
#
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
def report_web_site ( opts )
return if not active
wspace = opts . delete ( :workspace ) || workspace
vhost = opts . delete ( :vhost )
2009-12-13 05:24:48 +00:00
2010-10-06 18:00:21 +00:00
addr = nil
port = nil
2011-06-15 00:51:51 +00:00
name = nil
2010-10-06 18:00:21 +00:00
serv = nil
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
if opts [ :service ] and opts [ :service ] . kind_of? ( Service )
serv = opts [ :service ]
else
addr = opts [ :host ]
port = opts [ :port ]
name = opts [ :ssl ] ? 'https' : 'http'
if not ( addr and port )
raise ArgumentError , " report_web_site requires service OR host/port/ssl "
end
2011-06-15 00:51:51 +00:00
2011-02-08 17:32:37 +00:00
# Force addr to be the address and not hostname
addr = Rex :: Socket . getaddress ( addr )
2008-10-12 03:46:49 +00:00
end
2009-12-13 05:24:48 +00:00
2010-10-06 18:00:21 +00:00
ret = { }
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
host = serv ? serv . host : find_or_create_host (
:workspace = > wspace ,
2011-06-15 00:51:51 +00:00
:host = > addr ,
2011-04-22 20:16:55 +00:00
:state = > Msf :: HostState :: Alive
)
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
if host . name . to_s . empty?
host . name = vhost
2010-10-06 18:00:21 +00:00
host . save!
2011-04-22 20:16:55 +00:00
end
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
serv = serv ? serv : find_or_create_service (
:workspace = > wspace ,
2011-06-15 00:51:51 +00:00
:host = > host ,
:port = > port ,
2011-04-22 20:16:55 +00:00
:proto = > 'tcp' ,
:state = > 'open'
)
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
# Change the service name if it is blank or it has
# been explicitly specified.
if opts . keys . include? ( :ssl ) or serv . name . to_s . empty?
name = opts [ :ssl ] ? 'https' : 'http'
serv . name = name
serv . save!
end
2011-06-15 00:51:51 +00:00
= begin
2011-04-22 20:16:55 +00:00
host . updated_at = host . created_at
host . state = HostState :: Alive
host . save!
= end
vhost || = host . address
site = WebSite . find_or_initialize_by_vhost_and_service_id ( vhost , serv [ :id ] )
site . options = opts [ :options ] if opts [ :options ]
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
# XXX:
msf_import_timestamps ( opts , site )
site . save!
2008-11-10 04:38:05 +00:00
2011-04-22 20:16:55 +00:00
ret [ :web_site ] = site
2008-11-10 04:38:05 +00:00
end
2009-12-13 05:24:48 +00:00
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# Report a Web Page to the database. WebPage must be tied to an existing Web Site
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# opts MUST contain
# :web_site* -- the web site object that this page should be associated with
# :path -- the virtual host name for this particular web site
# :code -- the http status code from requesting this page
# :headers -- this is a HASH of headers (lowercase name as key) of ARRAYs of values
# :body -- the document body of the server response
2011-06-15 00:51:51 +00:00
# :query -- the query string after the path
2010-10-06 18:00:21 +00:00
# If web_site is NOT specified, the following values are mandatory
# :host -- the ip address of the server hosting the web site
# :port -- the port number of the associated web site
# :vhost -- the virtual host for this particular web site
# :ssl -- whether or not SSL is in use on this port
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# These values will be used to create new host, service, and web_site records
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# opts can contain
# :cookie -- the Set-Cookie headers, merged into a string
# :auth -- the Authorization headers, merged into a string
# :ctype -- the Content-Type headers, merged into a string
# :mtime -- the timestamp returned from the server of the last modification time
# :location -- the URL that a redirect points to
2011-06-15 00:51:51 +00:00
#
2010-10-06 18:00:21 +00:00
# Duplicate records for a given web_site, path, and query combination will be overwritten
#
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
def report_web_page ( opts )
return if not active
wspace = opts . delete ( :workspace ) || workspace
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
path = opts [ :path ]
code = opts [ :code ] . to_i
body = opts [ :body ] . to_s
query = opts [ :query ] . to_s
2011-06-15 00:51:51 +00:00
headers = opts [ :headers ]
2010-10-06 18:00:21 +00:00
site = nil
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
if not ( path and code and body and headers )
raise ArgumentError , " report_web_page requires the path, query, code, body, and headers parameters "
end
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
if opts [ :web_site ] and opts [ :web_site ] . kind_of? ( WebSite )
site = opts . delete ( :web_site )
else
site = report_web_site (
2010-10-06 20:24:26 +00:00
:workspace = > wspace ,
2011-06-15 00:51:51 +00:00
:host = > opts [ :host ] , :port = > opts [ :port ] ,
2011-04-22 20:16:55 +00:00
:vhost = > opts [ :host ] , :ssl = > opts [ :ssl ]
2010-10-06 18:00:21 +00:00
)
if not site
raise ArgumentError , " report_web_page was unable to create the associated web site "
end
2008-10-12 03:46:49 +00:00
end
2009-12-13 05:24:48 +00:00
2010-10-06 18:00:21 +00:00
ret = { }
2011-04-22 20:16:55 +00:00
page = WebPage . find_or_initialize_by_web_site_id_and_path_and_query ( site [ :id ] , path , query )
page . code = code
page . body = body
2011-06-15 00:51:51 +00:00
page . headers = headers
2011-04-22 20:16:55 +00:00
page . cookie = opts [ :cookie ] if opts [ :cookie ]
page . auth = opts [ :auth ] if opts [ :auth ]
page . mtime = opts [ :mtime ] if opts [ :mtime ]
page . ctype = opts [ :ctype ] if opts [ :ctype ]
page . location = opts [ :location ] if opts [ :location ]
msf_import_timestamps ( opts , page )
page . save!
ret [ :web_page ] = page
2008-10-12 03:46:49 +00:00
end
2011-06-15 00:51:51 +00:00
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# Report a Web Form to the database. WebForm must be tied to an existing Web Site
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# opts MUST contain
# :web_site* -- the web site object that this page should be associated with
# :path -- the virtual host name for this particular web site
# :query -- the query string that is appended to the path (not valid for GET)
# :method -- the form method, one of GET, POST, or PATH
# :params -- an ARRAY of all parameters and values specified in the form
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
# If web_site is NOT specified, the following values are mandatory
# :host -- the ip address of the server hosting the web site
# :port -- the port number of the associated web site
# :vhost -- the virtual host for this particular web site
# :ssl -- whether or not SSL is in use on this port
2008-10-12 03:46:49 +00:00
#
2011-06-15 00:51:51 +00:00
#
2010-10-06 18:00:21 +00:00
# Duplicate records for a given web_site, path, method, and params combination will be overwritten
#
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
def report_web_form ( opts )
return if not active
wspace = opts . delete ( :workspace ) || workspace
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
path = opts [ :path ]
meth = opts [ :method ] . to_s . upcase
para = opts [ :params ]
quer = opts [ :query ] . to_s
site = nil
if not ( path and meth )
raise ArgumentError , " report_web_form requires the path and method parameters "
end
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
if not %W{ GET POST PATH } . include? ( meth )
raise ArgumentError , " report_web_form requires the method to be one of GET, POST, PATH "
end
if opts [ :web_site ] and opts [ :web_site ] . kind_of? ( WebSite )
site = opts . delete ( :web_site )
else
site = report_web_site (
2010-10-06 20:24:26 +00:00
:workspace = > wspace ,
2011-06-15 00:51:51 +00:00
:host = > opts [ :host ] , :port = > opts [ :port ] ,
:vhost = > opts [ :host ] , :ssl = > opts [ :ssl ]
2010-10-06 18:00:21 +00:00
)
if not site
raise ArgumentError , " report_web_form was unable to create the associated web site "
end
end
ret = { }
2011-04-22 20:16:55 +00:00
# Since one of our serialized fields is used as a unique parameter, we must do the final
# comparisons through ruby and not SQL.
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
form = nil
WebForm . find_all_by_web_site_id_and_path_and_method_and_query ( site [ :id ] , path , meth , quer ) . each do | xform |
if xform . params == para
form = xform
break
2011-06-15 00:51:51 +00:00
end
2008-10-12 03:46:49 +00:00
end
2011-04-22 20:16:55 +00:00
if not form
form = WebForm . new
form . web_site_id = site [ :id ]
form . path = path
form . method = meth
form . params = para
form . query = quer
2011-06-15 00:51:51 +00:00
end
2011-04-22 20:16:55 +00:00
msf_import_timestamps ( opts , form )
form . save!
ret [ :web_form ] = form
2008-10-12 03:46:49 +00:00
end
2009-12-13 05:24:48 +00:00
2010-10-06 18:00:21 +00:00
2008-12-04 03:42:43 +00:00
#
2010-10-06 18:00:21 +00:00
# Report a Web Vuln to the database. WebVuln must be tied to an existing Web Site
2008-12-04 03:42:43 +00:00
#
2010-10-06 18:00:21 +00:00
# opts MUST contain
# :web_site* -- the web site object that this page should be associated with
# :path -- the virtual host name for this particular web site
# :query -- the query string appended to the path (not valid for GET method flaws)
# :method -- the form method, one of GET, POST, or PATH
# :params -- an ARRAY of all parameters and values specified in the form
# :pname -- the specific field where the vulnerability occurs
# :proof -- the string showing proof of the vulnerability
# :risk -- an INTEGER value from 0 to 5 indicating the risk (5 is highest)
# :name -- the string indicating the type of vulnerability
#
# If web_site is NOT specified, the following values are mandatory
# :host -- the ip address of the server hosting the web site
# :port -- the port number of the associated web site
# :vhost -- the virtual host for this particular web site
# :ssl -- whether or not SSL is in use on this port
#
2011-06-15 00:51:51 +00:00
#
2010-10-06 18:00:21 +00:00
# Duplicate records for a given web_site, path, method, pname, and name combination will be overwritten
#
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
def report_web_vuln ( opts )
return if not active
wspace = opts . delete ( :workspace ) || workspace
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
path = opts [ :path ]
2010-10-10 19:36:31 +00:00
meth = opts [ :method ]
2010-10-06 18:00:21 +00:00
para = opts [ :params ] || [ ]
quer = opts [ :query ] . to_s
pname = opts [ :pname ]
proof = opts [ :proof ]
risk = opts [ :risk ] . to_i
name = opts [ :name ] . to_s . strip
2010-10-10 19:36:31 +00:00
blame = opts [ :blame ] . to_s . strip
desc = opts [ :description ] . to_s . strip
conf = opts [ :confidence ] . to_i
2011-06-15 00:51:51 +00:00
cat = opts [ :category ] . to_s . strip
2010-10-06 18:00:21 +00:00
site = nil
if not ( path and meth and proof and pname )
2010-10-11 02:57:07 +00:00
raise ArgumentError , " report_web_vuln requires the path, method, proof, risk, name, params, and pname parameters. Received #{ opts . inspect } "
2010-10-06 18:00:21 +00:00
end
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
if not %W{ GET POST PATH } . include? ( meth )
2010-10-11 02:57:07 +00:00
raise ArgumentError , " report_web_vuln requires the method to be one of GET, POST, PATH. Received ' #{ meth } ' "
2010-10-06 18:00:21 +00:00
end
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
if risk < 0 or risk > 5
2010-10-11 02:57:07 +00:00
raise ArgumentError , " report_web_vuln requires the risk to be between 0 and 5 (inclusive). Received ' #{ risk } ' "
2010-10-06 18:00:21 +00:00
end
2010-10-10 19:36:31 +00:00
if conf < 0 or conf > 100
2010-10-11 02:57:07 +00:00
raise ArgumentError , " report_web_vuln requires the confidence to be between 1 and 100 (inclusive). Received ' #{ conf } ' "
2010-10-10 19:36:31 +00:00
end
if cat . empty?
raise ArgumentError , " report_web_vuln requires the category to be a valid string "
end
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
if name . empty?
raise ArgumentError , " report_web_vuln requires the name to be a valid string "
end
2011-06-15 00:51:51 +00:00
2010-10-06 18:00:21 +00:00
if opts [ :web_site ] and opts [ :web_site ] . kind_of? ( WebSite )
site = opts . delete ( :web_site )
else
site = report_web_site (
2010-10-06 20:24:26 +00:00
:workspace = > wspace ,
2011-06-15 00:51:51 +00:00
:host = > opts [ :host ] , :port = > opts [ :port ] ,
2011-04-22 20:16:55 +00:00
:vhost = > opts [ :host ] , :ssl = > opts [ :ssl ]
2010-10-06 18:00:21 +00:00
)
if not site
raise ArgumentError , " report_web_form was unable to create the associated web site "
end
end
ret = { }
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
meth = meth . to_s . upcase
2011-06-15 00:51:51 +00:00
2011-06-02 22:16:08 +00:00
vuln = WebVuln . find_or_initialize_by_web_site_id_and_path_and_method_and_pname_and_name_and_category_and_query ( site [ :id ] , path , meth , pname , name , cat , quer )
2011-06-15 00:51:51 +00:00
vuln . name = name
2011-04-22 20:16:55 +00:00
vuln . risk = risk
vuln . params = para
2011-06-15 00:51:51 +00:00
vuln . proof = proof . to_s
2011-04-22 20:16:55 +00:00
vuln . category = cat
vuln . blame = blame
vuln . description = desc
vuln . confidence = conf
msf_import_timestamps ( opts , vuln )
vuln . save!
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
ret [ :web_vuln ] = vuln
2008-12-04 03:42:43 +00:00
end
2009-12-13 05:24:48 +00:00
2008-10-12 03:46:49 +00:00
#
# WMAP
2010-10-06 18:00:21 +00:00
# Selected host
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
def selected_host
selhost = WmapTarget . find ( :first , :conditions = > [ " selected != 0 " ] )
if selhost
return selhost . host
else
return
end
2008-10-12 03:46:49 +00:00
end
2009-12-13 05:24:48 +00:00
2008-10-12 03:46:49 +00:00
#
# WMAP
2010-10-06 18:00:21 +00:00
# Selected port
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
def selected_port
WmapTarget . find ( :first , :conditions = > [ " selected != 0 " ] ) . port
2008-10-12 03:46:49 +00:00
end
2009-12-13 05:24:48 +00:00
2008-10-12 03:46:49 +00:00
#
# WMAP
2010-10-06 18:00:21 +00:00
# Selected ssl
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
def selected_ssl
WmapTarget . find ( :first , :conditions = > [ " selected != 0 " ] ) . ssl
2008-10-12 03:46:49 +00:00
end
2006-09-17 00:39:23 +00:00
2008-10-12 03:46:49 +00:00
#
# WMAP
2010-10-06 18:00:21 +00:00
# Selected id
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
def selected_id
WmapTarget . find ( :first , :conditions = > [ " selected != 0 " ] ) . object_id
2008-10-12 03:46:49 +00:00
end
2009-12-13 05:24:48 +00:00
2008-10-12 03:46:49 +00:00
#
# WMAP
2010-10-06 18:00:21 +00:00
# This method iterates the requests table identifiying possible targets
# This method wiil be remove on second phase of db merging.
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
def each_distinct_target ( & block )
request_distinct_targets . each do | target |
block . call ( target )
end
2008-10-12 03:46:49 +00:00
end
2009-12-13 05:24:48 +00:00
2008-10-12 03:46:49 +00:00
#
# WMAP
2010-10-06 18:00:21 +00:00
# This method returns a list of all possible targets available in requests
# This method wiil be remove on second phase of db merging.
2008-10-12 03:46:49 +00:00
#
2010-10-06 18:00:21 +00:00
def request_distinct_targets
WmapRequest . find ( :all , :select = > 'DISTINCT host,address,port,ssl' )
2008-10-12 03:46:49 +00:00
end
2009-12-13 05:24:48 +00:00
2008-11-30 22:41:09 +00:00
#
# WMAP
2010-10-06 18:00:21 +00:00
# This method iterates the requests table returning a list of all requests of a specific target
2008-11-30 22:41:09 +00:00
#
2010-10-06 18:00:21 +00:00
def each_request_target_with_path ( & block )
target_requests ( 'AND wmap_requests.path IS NOT NULL' ) . each do | req |
block . call ( req )
end
2008-11-30 22:41:09 +00:00
end
2009-12-13 05:24:48 +00:00
2009-05-28 03:26:27 +00:00
#
# WMAP
2010-10-06 18:00:21 +00:00
# This method iterates the requests table returning a list of all requests of a specific target
2009-05-28 03:26:27 +00:00
#
2010-10-06 18:00:21 +00:00
def each_request_target_with_query ( & block )
target_requests ( 'AND wmap_requests.query IS NOT NULL' ) . each do | req |
block . call ( req )
end
2009-05-28 03:26:27 +00:00
end
2009-12-13 05:24:48 +00:00
2010-10-06 18:00:21 +00:00
#
# WMAP
# This method iterates the requests table returning a list of all requests of a specific target
#
def each_request_target_with_body ( & block )
target_requests ( 'AND wmap_requests.body IS NOT NULL' ) . each do | req |
block . call ( req )
end
end
#
# WMAP
# This method iterates the requests table returning a list of all requests of a specific target
#
def each_request_target_with_headers ( & block )
target_requests ( 'AND wmap_requests.headers IS NOT NULL' ) . each do | req |
block . call ( req )
end
end
#
# WMAP
# This method iterates the requests table returning a list of all requests of a specific target
#
def each_request_target ( & block )
target_requests ( '' ) . each do | req |
block . call ( req )
end
end
#
# WMAP
# This method returns a list of all requests from target
#
def target_requests ( extra_condition )
WmapRequest . find ( :all , :conditions = > [ " wmap_requests.host = ? AND wmap_requests.port = ? #{ extra_condition } " , selected_host , selected_port ] )
end
#
# WMAP
# This method iterates the requests table calling the supplied block with the
# request instance of each entry.
#
def each_request ( & block )
requests . each do | request |
block . call ( request )
end
end
#
# WMAP
# This method allows to query directly the requests table. To be used mainly by modules
#
def request_sql ( host , port , extra_condition )
WmapRequest . find ( :all , :conditions = > [ " wmap_requests.host = ? AND wmap_requests.port = ? #{ extra_condition } " , host , port ] )
end
#
# WMAP
# This methods returns a list of all targets in the database
#
def requests
WmapRequest . find ( :all )
end
#
# WMAP
# This method iterates the targets table calling the supplied block with the
# target instance of each entry.
#
def each_target ( & block )
targets . each do | target |
block . call ( target )
end
end
#
# WMAP
# This methods returns a list of all targets in the database
#
def targets
WmapTarget . find ( :all )
end
#
# WMAP
# This methods deletes all targets from targets table in the database
#
def delete_all_targets
WmapTarget . delete_all
end
#
# WMAP
# Find a target matching this id
#
def get_target ( id )
target = WmapTarget . find ( :first , :conditions = > [ " id = ? " , id ] )
return target
end
#
# WMAP
# Create a target
#
def create_target ( host , port , ssl , sel )
tar = WmapTarget . create (
:host = > host ,
:address = > host ,
:port = > port ,
:ssl = > ssl ,
:selected = > sel
)
#framework.events.on_db_target(rec)
end
#
# WMAP
# Create a request (by hand)
#
def create_request ( host , port , ssl , meth , path , headers , query , body , respcode , resphead , response )
req = WmapRequest . create (
:host = > host ,
:address = > host ,
:port = > port ,
:ssl = > ssl ,
:meth = > meth ,
:path = > path ,
:headers = > headers ,
:query = > query ,
:body = > body ,
:respcode = > respcode ,
:resphead = > resphead ,
:response = > response
)
#framework.events.on_db_request(rec)
end
#
# WMAP
# Quick way to query the database (used by wmap_sql)
#
def sql_query ( sqlquery )
ActiveRecord :: Base . connection . select_all ( sqlquery )
end
# Returns a REXML::Document from the given data.
def rexmlify ( data )
2010-10-19 22:51:37 +00:00
if data . kind_of? ( REXML :: Document )
return data
else
# Make an attempt to recover from a REXML import fail, since
# it's better than dying outright.
begin
return REXML :: Document . new ( data )
rescue REXML :: ParseException = > e
dlog ( " REXML error: Badly formatted XML, attempting to recover. Error was: #{ e . inspect } " )
2011-04-04 22:19:59 +00:00
return REXML :: Document . new ( data . gsub ( / ([ \ x00- \ x08 \ x0b \ x0c \ x0e- \ x1f \ x80- \ xff]) / ) { | x | " \\ x%.2x " % x . unpack ( " C* " ) [ 0 ] } )
2010-10-19 22:51:37 +00:00
end
end
2010-06-04 14:57:58 +00:00
end
2011-04-22 20:16:55 +00:00
# Handles timestamps from Metasploit Express/Pro imports.
2010-10-06 15:55:28 +00:00
def msf_import_timestamps ( opts , obj )
2010-06-04 14:57:58 +00:00
obj . created_at = opts [ " created_at " ] if opts [ " created_at " ]
2010-06-11 18:56:16 +00:00
obj . created_at = opts [ :created_at ] if opts [ :created_at ]
2010-06-04 14:57:58 +00:00
obj . updated_at = opts [ " updated_at " ] ? opts [ " updated_at " ] : obj . created_at
2010-06-11 18:56:16 +00:00
obj . updated_at = opts [ :updated_at ] ? opts [ :updated_at ] : obj . created_at
2010-06-04 14:57:58 +00:00
return obj
end
2010-01-07 19:06:29 +00:00
##
#
# Import methods
#
##
#
# Generic importer that automatically determines the file type being
# imported. Since this looks for vendor-specific strings in the given
# file, there shouldn't be any false detections, but no guarantees.
#
2010-06-08 19:16:20 +00:00
def import_file ( args = { } , & block )
2010-06-04 14:57:58 +00:00
filename = args [ :filename ] || args [ 'filename' ]
wspace = args [ :wspace ] || args [ 'wspace' ] || workspace
2010-05-05 19:45:48 +00:00
@import_filedata = { }
@import_filedata [ :filename ] = filename
2010-06-10 21:06:06 +00:00
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2010-12-12 17:44:48 +00:00
case data [ 0 , 4 ]
when " PK \x03 \x04 "
2010-06-11 21:21:59 +00:00
data = Zip :: ZipFile . open ( filename )
2010-12-12 17:44:48 +00:00
when " \xd4 \xc3 \xb2 \xa1 " , " \xa1 \xb2 \xc3 \xd4 "
data = PacketFu :: PcapFile . new . readfile ( filename )
2010-06-10 21:06:06 +00:00
end
2010-06-08 19:16:20 +00:00
if block
import ( args . merge ( :data = > data ) ) { | type , data | yield type , data }
else
2010-07-06 16:10:05 +00:00
import ( args . merge ( :data = > data ) )
2010-06-08 19:16:20 +00:00
end
2010-06-10 21:06:06 +00:00
2010-01-07 19:06:29 +00:00
end
2010-02-14 18:40:27 +00:00
2010-06-04 14:57:58 +00:00
# A dispatcher method that figures out the data's file type,
# and sends it off to the appropriate importer. Note that
# import_file_detect will raise an error if the filetype
# is unknown.
2010-06-08 19:16:20 +00:00
def import ( args = { } , & block )
2010-06-04 14:57:58 +00:00
data = args [ :data ] || args [ 'data' ]
wspace = args [ :wspace ] || args [ 'wspace' ] || workspace
ftype = import_filetype_detect ( data )
2010-06-08 19:16:20 +00:00
yield ( :filetype , @import_filedata [ :type ] ) if block
self . send " import_ #{ ftype } " . to_sym , args , & block
2010-04-26 20:25:42 +00:00
end
2010-06-04 14:57:58 +00:00
# Returns one of: :nexpose_simplexml :nexpose_rawxml :nmap_xml :openvas_xml
2011-05-12 20:08:33 +00:00
# :nessus_xml :nessus_xml_v2 :qualys_scan_xml, :qualys_asset_xml, :msf_xml :nessus_nbe :amap_mlog
2011-06-02 22:16:08 +00:00
# :amap_log :ip_list, :msf_zip, :libpcap, :foundstone_xml, :acunetix_xml, :appscan_xml
2011-06-17 14:46:54 +00:00
# :burp_session, :ip360_xml_v3, :ip360_aspl_xml
2010-06-04 14:57:58 +00:00
# If there is no match, an error is raised instead.
def import_filetype_detect ( data )
2011-06-15 00:51:51 +00:00
2010-12-27 16:30:08 +00:00
if data and data . kind_of? Zip :: ZipFile
2010-10-28 15:59:40 +00:00
raise DBImportError . new ( " The zip file provided is empty. " ) if data . entries . empty?
2010-06-10 21:06:06 +00:00
@import_filedata || = { }
@import_filedata [ :zip_filename ] = File . split ( data . to_s ) . last
@import_filedata [ :zip_basename ] = @import_filedata [ :zip_filename ] . gsub ( / \ .zip$ / , " " )
@import_filedata [ :zip_entry_names ] = data . entries . map { | x | x . name }
2011-04-30 01:49:55 +00:00
begin
@import_filedata [ :zip_xml ] = @import_filedata [ :zip_entry_names ] . grep ( / ^(.*)_[0-9]+ \ .xml$ / ) . first || raise
@import_filedata [ :zip_wspace ] = @import_filedata [ :zip_xml ] . to_s . match ( / ^(.*)_[0-9]+ \ .xml$ / ) [ 1 ]
@import_filedata [ :type ] = " Metasploit ZIP Report "
2010-10-28 15:59:40 +00:00
return :msf_zip
2011-04-30 03:33:45 +00:00
rescue :: Interrupt
raise $!
rescue :: Exception
2010-10-28 15:59:40 +00:00
raise DBImportError . new ( " The zip file provided is not a Metasploit ZIP report " )
end
2010-06-10 21:06:06 +00:00
end
2010-12-12 17:44:48 +00:00
2010-12-27 16:30:08 +00:00
if data and data . kind_of? PacketFu :: PcapFile
2010-12-12 17:44:48 +00:00
raise DBImportError . new ( " The pcap file provided is empty. " ) if data . body . empty?
@import_filedata || = { }
@import_filedata [ :type ] = " Libpcap Packet Capture "
return :libpcap
end
2011-07-30 18:31:12 +00:00
# This is a text string, lets make sure its treated as binary
data = data . unpack ( " C* " ) . pack ( " C* " )
if data and data . to_s . strip . length == 0
2010-12-27 16:30:08 +00:00
raise DBImportError . new ( " The data provided to the import function was empty " )
end
2011-07-14 15:11:17 +00:00
# Parse the first line or 4k of data from the file
di = data . index ( " \n " ) || 4096
2011-07-27 13:25:55 +00:00
2010-02-14 18:40:27 +00:00
firstline = data [ 0 , di ]
2010-06-04 14:57:58 +00:00
@import_filedata || = { }
2010-01-07 19:06:29 +00:00
if ( firstline . index ( " <NeXposeSimpleXML " ) )
2010-07-06 16:10:05 +00:00
@import_filedata [ :type ] = " NeXpose Simple XML "
2010-06-04 14:57:58 +00:00
return :nexpose_simplexml
2010-04-07 20:51:05 +00:00
elsif ( firstline . index ( " <NexposeReport " ) )
2010-07-06 16:10:05 +00:00
@import_filedata [ :type ] = " NeXpose XML Report "
2010-06-04 14:57:58 +00:00
return :nexpose_rawxml
2010-10-07 02:33:57 +00:00
elsif ( firstline . index ( " <scanJob> " ) )
@import_filedata [ :type ] = " Retina XML "
2011-06-15 00:51:51 +00:00
return :retina_xml
2010-12-28 22:07:41 +00:00
elsif ( firstline . index ( " <NessusClientData> " ) )
@import_filedata [ :type ] = " Nessus XML (v1) "
return :nessus_xml
2011-06-01 00:01:46 +00:00
elsif ( firstline . index ( " <SecScan ID= " ) )
@import_filedata [ :type ] = " Microsoft Baseline Security Analyzer "
return :mbsa_xml
2011-06-03 21:17:57 +00:00
elsif ( data [ 0 , 1024 ] =~ / <!ATTLIST \ s+items \ s+burpVersion / )
@import_filedata [ :type ] = " Burp Session XML "
return :burp_session_xml
2010-01-07 19:06:29 +00:00
elsif ( firstline . index ( " <?xml " ) )
# it's xml, check for root tags we can handle
line_count = 0
data . each_line { | line |
2010-01-14 15:26:20 +00:00
line =~ / <([a-zA-Z0-9 \ - \ _]+)[ >] /
2010-01-07 19:06:29 +00:00
case $1
when " nmaprun "
2010-07-06 16:10:05 +00:00
@import_filedata [ :type ] = " Nmap XML "
2010-06-04 14:57:58 +00:00
return :nmap_xml
2010-01-07 19:06:29 +00:00
when " openvas-report "
2010-07-06 16:10:05 +00:00
@import_filedata [ :type ] = " OpenVAS Report "
2010-06-04 14:57:58 +00:00
return :openvas_xml
2010-01-07 19:06:29 +00:00
when " NessusClientData "
2010-07-06 16:10:05 +00:00
@import_filedata [ :type ] = " Nessus XML (v1) "
2010-06-04 14:57:58 +00:00
return :nessus_xml
2010-01-14 15:26:20 +00:00
when " NessusClientData_v2 "
2010-07-06 16:10:05 +00:00
@import_filedata [ :type ] = " Nessus XML (v2) "
2010-06-04 14:57:58 +00:00
return :nessus_xml_v2
2010-03-28 23:02:28 +00:00
when " SCAN "
2011-05-12 20:08:33 +00:00
@import_filedata [ :type ] = " Qualys Scan XML "
return :qualys_scan_xml
when " ASSET_DATA_REPORT "
@import_filedata [ :type ] = " Qualys Asset XML "
return :qualys_asset_xml
2010-10-06 18:10:39 +00:00
when / MetasploitExpressV[1234] /
2010-10-06 15:55:28 +00:00
@import_filedata [ :type ] = " Metasploit XML "
return :msf_xml
2010-10-06 18:10:39 +00:00
when / MetasploitV4 /
@import_filedata [ :type ] = " Metasploit XML "
2011-06-15 00:51:51 +00:00
return :msf_xml
2010-10-07 06:24:26 +00:00
when / netsparker /
@import_filedata [ :type ] = " NetSparker XML "
2011-06-15 00:51:51 +00:00
return :netsparker_xml
2011-06-17 20:08:27 +00:00
when / audits? / # <audit> and <audits> are both valid for nCircle. wtfmate.
2010-11-04 21:44:16 +00:00
@import_filedata [ :type ] = " IP360 XML v3 "
return :ip360_xml_v3
2011-06-15 00:51:51 +00:00
when / ontology /
@import_filedata [ :type ] = " IP360 ASPL "
return :ip360_aspl_xml
2011-05-31 15:24:42 +00:00
when / ReportInfo /
@import_filedata [ :type ] = " Foundstone "
return :foundstone_xml
2011-06-02 06:17:38 +00:00
when / ScanGroup /
@import_filedata [ :type ] = " Acunetix "
return :acunetix_xml
2011-06-02 22:16:08 +00:00
when / AppScanInfo / # Actually the second line
@import_filedata [ :type ] = " Appscan "
return :appscan_xml
2011-07-28 21:17:30 +00:00
when " entities "
if line =~ / creator.* \ x43 \ x4f \ x52 \ x45 \ x20 \ x49 \ x4d \ x50 \ x41 \ x43 \ x54 /i
@import_filedata [ :type ] = " CI "
return :ci_xml
end
2010-01-07 19:06:29 +00:00
else
# Give up if we haven't hit the root tag in the first few lines
break if line_count > 10
end
line_count += 1
}
elsif ( firstline . index ( " timestamps|||scan_start " ) )
2010-07-06 16:10:05 +00:00
@import_filedata [ :type ] = " Nessus NBE Report "
2010-01-07 19:06:29 +00:00
# then it's a nessus nbe
2010-06-04 14:57:58 +00:00
return :nessus_nbe
2010-01-07 19:06:29 +00:00
elsif ( firstline . index ( " # amap v " ) )
# then it's an amap mlog
2010-07-06 16:10:05 +00:00
@import_filedata [ :type ] = " Amap Log -m "
2010-06-04 14:57:58 +00:00
return :amap_mlog
2010-06-08 22:14:25 +00:00
elsif ( firstline . index ( " amap v " ) )
# then it's an amap log
2010-07-06 16:10:05 +00:00
@import_filedata [ :type ] = " Amap Log "
2010-06-08 22:14:25 +00:00
return :amap_log
2010-03-28 23:07:52 +00:00
elsif ( firstline =~ / ^ \ d{1,3} \ . \ d{1,3} \ . \ d{1,3} \ . \ d{1,3} / )
# then its an IP list
2010-07-06 16:10:05 +00:00
@import_filedata [ :type ] = " IP Address List "
2010-06-04 14:57:58 +00:00
return :ip_list
2010-10-07 06:24:26 +00:00
elsif ( data [ 0 , 1024 ] . index ( " <netsparker " ) )
@import_filedata [ :type ] = " NetSparker XML "
2011-06-15 00:51:51 +00:00
return :netsparker_xml
2010-11-02 21:27:26 +00:00
elsif ( firstline . index ( " # Metasploit PWDump Export " ) )
# then it's a Metasploit PWDump export
@import_filedata [ :type ] = " msf_pwdump "
return :msf_pwdump
2010-01-07 19:06:29 +00:00
end
2011-06-15 00:51:51 +00:00
2010-01-07 21:30:14 +00:00
raise DBImportError . new ( " Could not automatically determine file type " )
2010-01-07 19:06:29 +00:00
end
2010-06-30 15:28:49 +00:00
# Boils down the validate_import_file to a boolean
def validate_import_file ( data )
begin
import_filetype_detect ( data )
rescue DBImportError
return false
end
return true
end
2010-12-12 17:44:48 +00:00
def import_libpcap_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
data = PacketFu :: PcapFile . new . readfile ( filename )
import_libpcap ( args . merge ( :data = > data ) )
end
# The libpcap file format is handled by PacketFu for data
# extraction. TODO: Make this its own mixin, and possibly
# extend PacketFu to do better stream analysis on the fly.
def import_libpcap ( args = { } , & block )
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
# seen_hosts is only used for determining when to yield an address. Once we get
# some packet analysis going, the values will have all sorts of info. The plan
# is to ru through all the packets as a first pass and report host and service,
# then, once we have everything parsed, we can reconstruct sessions and ngrep
# out things like authentication sequences, examine ttl's and window sizes, all
# kinds of crazy awesome stuff like that.
2010-12-14 19:04:12 +00:00
seen_hosts = { }
2010-12-17 03:20:47 +00:00
decoded_packets = 0
2011-06-15 00:51:51 +00:00
last_count = 0
2010-12-12 17:44:48 +00:00
data . body . map { | p | p . data } . each do | p |
2010-12-17 03:20:47 +00:00
if ( decoded_packets > = last_count + 1000 ) and block
2011-06-15 00:51:51 +00:00
yield ( :pcap_count , decoded_packets )
2010-12-17 03:20:47 +00:00
last_count = decoded_packets
2010-12-16 21:36:00 +00:00
end
2010-12-17 03:20:47 +00:00
decoded_packets += 1
2010-12-16 21:36:00 +00:00
2010-12-12 17:44:48 +00:00
pkt = PacketFu :: Packet . parse ( p ) rescue next # Just silently skip bad packets
2010-12-16 21:36:00 +00:00
2010-12-17 03:20:47 +00:00
next unless pkt . is_ip? # Skip anything that's not IP. Technically, not Ethernet::Ip
2011-07-25 13:24:41 +00:00
next if pkt . is_tcp? && ( pkt . tcp_src == 0 || pkt . tcp_dst == 0 ) # Skip port 0
next if pkt . is_udp? && ( pkt . udp_src == 0 || pkt . udp_dst == 0 ) # Skip port 0
2010-12-17 03:20:47 +00:00
saddr = pkt . ip_saddr
daddr = pkt . ip_daddr
2010-12-12 17:44:48 +00:00
2010-12-17 03:20:47 +00:00
# Handle blacklists and obviously useless IP addresses, and report the host.
next if ( bl | [ saddr , daddr ] ) . size == bl . size # Both hosts are blacklisted, skip everything.
unless ( bl . include? ( saddr ) || rfc3330_reserved ( saddr ) )
2011-06-15 00:51:51 +00:00
yield ( :address , saddr ) if block and ! seen_hosts . keys . include? ( saddr )
2010-12-17 03:20:47 +00:00
report_host ( :workspace = > wspace , :host = > saddr , :state = > Msf :: HostState :: Alive ) unless seen_hosts [ saddr ]
seen_hosts [ saddr ] || = [ ]
2010-12-14 19:04:12 +00:00
2010-12-17 03:20:47 +00:00
end
unless ( bl . include? ( daddr ) || rfc3330_reserved ( daddr ) )
yield ( :address , daddr ) if block and ! seen_hosts . keys . include? ( daddr )
report_host ( :workspace = > wspace , :host = > daddr , :state = > Msf :: HostState :: Alive ) unless seen_hosts [ daddr ]
2011-06-15 00:51:51 +00:00
seen_hosts [ daddr ] || = [ ]
2010-12-17 03:20:47 +00:00
end
2010-12-12 17:44:48 +00:00
2010-12-17 03:20:47 +00:00
if pkt . is_tcp? # First pass on TCP packets
if ( pkt . tcp_flags . syn == 1 and pkt . tcp_flags . ack == 1 ) or # Oh, this kills me
pkt . tcp_src < 1024 # If it's a low port, assume it's a proper service.
if seen_hosts [ saddr ]
unless seen_hosts [ saddr ] . include? [ pkt . tcp_src , " tcp " ]
report_service (
2011-06-15 00:51:51 +00:00
:workspace = > wspace , :host = > saddr ,
:proto = > " tcp " , :port = > pkt . tcp_src ,
2010-12-17 03:20:47 +00:00
:state = > Msf :: ServiceState :: Open
2011-06-15 00:51:51 +00:00
)
2010-12-17 03:20:47 +00:00
seen_hosts [ saddr ] << [ pkt . tcp_src , " tcp " ]
yield ( :service , " %s:%d/%s " % [ saddr , pkt . tcp_src , " tcp " ] )
2010-12-12 17:44:48 +00:00
end
end
2010-12-17 03:20:47 +00:00
end
elsif pkt . is_udp? # First pass on UDP packets
if pkt . udp_src == pkt . udp_dst # Very basic p2p detection.
[ saddr , daddr ] . each do | xaddr |
if seen_hosts [ xaddr ]
unless seen_hosts [ xaddr ] . include? [ pkt . udp_src , " udp " ]
2010-12-12 17:44:48 +00:00
report_service (
2011-06-15 00:51:51 +00:00
:workspace = > wspace , :host = > xaddr ,
:proto = > " udp " , :port = > pkt . udp_src ,
2010-12-12 17:44:48 +00:00
:state = > Msf :: ServiceState :: Open
)
2010-12-17 03:20:47 +00:00
seen_hosts [ xaddr ] << [ pkt . udp_src , " udp " ]
yield ( :service , " %s:%d/%s " % [ xaddr , pkt . udp_src , " udp " ] )
2010-12-12 17:44:48 +00:00
end
end
end
2011-06-15 00:51:51 +00:00
elsif pkt . udp_src < 1024 # Probably a service
2010-12-17 03:20:47 +00:00
if seen_hosts [ saddr ]
unless seen_hosts [ saddr ] . include? [ pkt . udp_src , " udp " ]
report_service (
2011-06-15 00:51:51 +00:00
:workspace = > wspace , :host = > saddr ,
:proto = > " udp " , :port = > pkt . udp_src ,
2010-12-17 03:20:47 +00:00
:state = > Msf :: ServiceState :: Open
)
seen_hosts [ saddr ] << [ pkt . udp_src , " udp " ]
yield ( :service , " %s:%d/%s " % [ saddr , pkt . udp_src , " udp " ] )
end
end
2010-12-12 17:44:48 +00:00
end
2010-12-17 03:20:47 +00:00
end # tcp or udp
inspect_single_packet ( pkt , wspace )
2010-12-12 17:44:48 +00:00
end # data.body.map
2010-12-17 03:20:47 +00:00
# Right about here, we should have built up some streams for some stream analysis.
# Not sure what form that will take, but people like shoving many hundreds of
# thousands of packets through this thing, so it'll need to be memory efficient.
2010-12-14 19:04:12 +00:00
end
2010-12-17 03:20:47 +00:00
# Do all the single packet analysis we can while churning through the pcap
# the first time. Multiple packet inspection will come later, where we can
# do stream analysis, compare requests and responses, etc.
def inspect_single_packet ( pkt , wspace )
if pkt . is_tcp? or pkt . is_udp?
inspect_single_packet_http ( pkt , wspace )
2010-12-14 19:04:12 +00:00
end
end
# Checks for packets that are headed towards port 80, are tcp, contain an HTTP/1.0
# line, contains an Authorization line, contains a b64-encoded credential, and
# extracts it. Reports this credential and solidifies the service as HTTP.
2010-12-17 03:20:47 +00:00
def inspect_single_packet_http ( pkt , wspace )
2010-12-14 19:04:12 +00:00
# First, check the server side (data from port 80).
if pkt . is_tcp? and pkt . tcp_src == 80 and ! pkt . payload . nil? and ! pkt . payload . empty?
if pkt . payload =~ / ^HTTP \ x2f1 \ x2e[01] /
http_server_match = pkt . payload . match ( / \ nServer: \ s+([^ \ r \ n]+)[ \ r \ n] / )
if http_server_match . kind_of? ( MatchData ) and http_server_match [ 1 ]
report_service (
:workspace = > wspace ,
:host = > pkt . ip_saddr ,
:port = > pkt . tcp_src ,
:proto = > " tcp " ,
:name = > " http " ,
:info = > http_server_match [ 1 ] ,
:state = > Msf :: ServiceState :: Open
)
# That's all we want to know from this service.
return :something_significant
end
end
2010-12-12 17:44:48 +00:00
end
2010-12-14 19:04:12 +00:00
# Next, check the client side (data to port 80)
if pkt . is_tcp? and pkt . tcp_dst == 80 and ! pkt . payload . nil? and ! pkt . payload . empty?
if pkt . payload . match ( / [ \ x00- \ x20]HTTP \ x2f1 \ x2e[10] / )
auth_match = pkt . payload . match ( / \ nAuthorization: \ s+Basic \ s+([A-Za-z0-9= \ x2b]+) / )
if auth_match . kind_of? ( MatchData ) and auth_match [ 1 ]
2011-06-15 00:51:51 +00:00
b64_cred = auth_match [ 1 ]
2010-12-14 19:04:12 +00:00
else
return false
end
# If we're this far, we can surmise that at least the client is a web browser,
# he thinks the server is HTTP and he just made an authentication attempt. At
# this point, we'll just believe everything the packet says -- validation ought
# to come later.
user , pass = b64_cred . unpack ( " m* " ) . first . split ( / : / , 2 )
report_service (
:workspace = > wspace ,
:host = > pkt . ip_daddr ,
:port = > pkt . tcp_dst ,
:proto = > " tcp " ,
:name = > " http "
)
report_auth_info (
:workspace = > wspace ,
:host = > pkt . ip_daddr ,
:port = > pkt . tcp_dst ,
:proto = > " tcp " ,
:type = > " password " ,
:active = > true , # Once we can build a stream, determine if the auth was successful. For now, assume it is.
:user = > user ,
:pass = > pass
)
# That's all we want to know from this service.
return :something_significant
end
end
2010-12-12 17:44:48 +00:00
end
2011-06-15 00:51:51 +00:00
#
2010-11-02 21:27:26 +00:00
# Metasploit PWDump Export
#
# This file format is generated by the db_export -f pwdump and
# the Metasploit Express and Pro report types of "PWDump."
#
2011-06-15 00:51:51 +00:00
# This particular block scheme is temporary, since someone is
2010-11-02 21:27:26 +00:00
# bound to want to import gigantic lists, so we'll want a
# stream parser eventually (just like the other non-nmap formats).
#
# The file format is:
# # 1.2.3.4:23/tcp (telnet)
# username password
# user2 p\x01a\x02ss2
# <BLANK> pass3
# user3 <BLANK>
# smbuser:sid:lmhash:nthash:::
#
# Note the leading hash for the host:port line. Note also all usernames
# and passwords must be in 7-bit ASCII (character sequences of "\x01"
# will be interpolated -- this includes spaces, which must be notated
# as "\x20". Blank usernames or passwords should be <BLANK>.
#
def import_msf_pwdump ( args = { } , & block )
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2010-11-02 22:11:45 +00:00
last_host = nil
2010-11-02 21:27:26 +00:00
addr = nil
port = nil
proto = nil
sname = nil
ptype = nil
active = false # Are there cases where imported creds are good? I just hate trusting the import right away.
data . each_line do | line |
case line
when / ^[ \ s]* # / # Comment lines
if line [ / ^ # [ \ s]*([0-9.]+):([0-9]+)( \ x2f(tcp|udp))?[ \ s]*( \ x28([^ \ x29]*) \ x29)? / ]
addr = $1
port = $2
proto = $4
sname = $6
end
when / ^[ \ s]*Warning: /
next # Discard warning messages.
when / ^[ \ s]*([^ \ s:]+):[0-9]+:([A-Fa-f0-9]+:[A-Fa-f0-9]+):[^ \ s]*$ / # SMB Hash
user = ( [ nil , " <BLANK> " ] . include? ( $1 ) ) ? " " : $1
pass = ( [ nil , " <BLANK> " ] . include? ( $2 ) ) ? " " : $2
ptype = " smb_hash "
when / ^[ \ s]*([^ \ s:]+):([0-9]+):NO PASSWORD \ *+:NO PASSWORD \ *+[^ \ s]*$ / # SMB Hash
user = ( [ nil , " <BLANK> " ] . include? ( $1 ) ) ? " " : $1
pass = " "
ptype = " smb_hash "
when / ^[ \ s]*([ \ x21- \ x7f]+)[ \ s]+([ \ x21- \ x7f]+)? / # Must be a user pass
user = ( [ nil , " <BLANK> " ] . include? ( $1 ) ) ? " " : dehex ( $1 )
pass = ( [ nil , " <BLANK> " ] . include? ( $2 ) ) ? " " : dehex ( $2 )
ptype = " password "
else # Some unknown line not broken by a space.
next
end
next unless [ addr , port , user , pass ] . compact . size == 4
next unless ipv4_validator ( addr ) # Skip Malformed addrs
next unless port [ / ^[0-9]+$ / ] # Skip malformed ports
if bl . include? addr
next
else
2010-11-02 22:11:45 +00:00
yield ( :address , addr ) if block and addr != last_host
last_host = addr
2010-11-02 21:27:26 +00:00
end
cred_info = {
:host = > addr ,
:port = > port ,
:user = > user ,
:pass = > pass ,
:type = > ptype ,
:workspace = > wspace
}
cred_info [ :proto ] = proto if proto
cred_info [ :sname ] = sname if sname
cred_info [ :active ] = active
report_auth_info ( cred_info )
user = pass = ptype = nil
end
2011-06-15 00:51:51 +00:00
2010-11-02 21:27:26 +00:00
end
# If hex notation is present, turn them into a character.
def dehex ( str )
hexen = str . scan ( / \ x5cx[0-9a-fA-F]{2} / )
hexen . each { | h |
str . gsub! ( h , h [ 2 , 2 ] . to_i ( 16 ) . chr )
}
return str
end
2010-01-14 15:26:20 +00:00
#
2010-01-07 19:06:29 +00:00
# Nexpose Simple XML
#
# XXX At some point we'll want to make this a stream parser for dealing
# with large results files
#
2010-06-04 14:57:58 +00:00
def import_nexpose_simplexml_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2010-06-04 14:57:58 +00:00
import_nexpose_simplexml ( args . merge ( :data = > data ) )
2010-01-07 19:06:29 +00:00
end
2010-02-18 06:40:38 +00:00
2010-10-06 15:55:28 +00:00
# Import a Metasploit XML file.
def import_msf_file ( args = { } )
2010-06-04 14:57:58 +00:00
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2010-10-06 15:55:28 +00:00
import_msf_xml ( args . merge ( :data = > data ) )
2010-04-26 18:40:49 +00:00
end
2010-06-10 21:06:06 +00:00
# Import a Metasploit Express ZIP file. Note that this requires
# a fair bit of filesystem manipulation, and is very much tied
2010-07-06 16:10:05 +00:00
# up with the Metasploit Express ZIP file format export (for
2010-06-10 21:06:06 +00:00
# obvious reasons). In the event directories exist, they will
# be reused. If target files exist, they will be overwritten.
#
2010-07-06 16:10:05 +00:00
# XXX: Refactor so it's not quite as sanity-blasting.
2010-10-06 15:55:28 +00:00
def import_msf_zip ( args = { } , & block )
2010-06-10 21:06:06 +00:00
data = args [ :data ]
wpsace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2010-07-06 16:10:05 +00:00
2011-01-20 17:32:48 +00:00
new_tmp = :: File . join ( Dir :: tmpdir , " msf " , " imp_ #{ Rex :: Text :: rand_text_alphanumeric ( 4 ) } " , @import_filedata [ :zip_basename ] )
2010-10-06 18:00:21 +00:00
if :: File . exists? new_tmp
unless ( :: File . directory? ( new_tmp ) && :: File . writable? ( new_tmp ) )
2010-06-10 21:06:06 +00:00
raise DBImportError . new ( " Could not extract zip file to #{ new_tmp } " )
end
else
FileUtils . mkdir_p ( new_tmp )
end
@import_filedata [ :zip_tmp ] = new_tmp
2010-10-06 18:00:21 +00:00
@import_filedata [ :zip_tmp_subdirs ] = @import_filedata [ :zip_entry_names ] . map { | x | :: File . split ( x ) } . map { | x | x [ 0 ] } . uniq . reject { | x | x == " . " }
2010-06-10 21:06:06 +00:00
2010-07-06 16:10:05 +00:00
@import_filedata [ :zip_tmp_subdirs ] . each { | sub |
2010-10-06 18:00:21 +00:00
tmp_subdirs = :: File . join ( @import_filedata [ :zip_tmp ] , sub )
2010-06-11 18:56:16 +00:00
if File . exists? tmp_subdirs
2010-10-06 18:00:21 +00:00
unless ( :: File . directory? ( tmp_subdirs ) && File . writable? ( tmp_subdirs ) )
2010-06-11 18:56:16 +00:00
raise DBImportError . new ( " Could not extract zip file to #{ tmp_subdirs } " )
2010-06-10 21:06:06 +00:00
end
else
2010-10-06 18:00:21 +00:00
:: FileUtils . mkdir ( tmp_subdirs )
2010-06-10 21:06:06 +00:00
end
}
2011-01-20 17:32:48 +00:00
2010-06-10 21:06:06 +00:00
data . entries . each do | e |
2010-10-06 18:00:21 +00:00
target = :: File . join ( @import_filedata [ :zip_tmp ] , e . name )
:: File . unlink target if :: File . exists? ( target ) # Yep. Deleted.
2010-07-06 16:10:05 +00:00
data . extract ( e , target )
2010-06-10 21:06:06 +00:00
if target =~ / ^.*.xml$ /
2011-01-20 17:32:48 +00:00
target_data = :: File . open ( target ) { | f | f . read 1024 }
if import_filetype_detect ( target_data ) == :msf_xml
@import_filedata [ :zip_extracted_xml ] = target
break
end
2010-06-10 21:06:06 +00:00
end
end
# This will kick the newly-extracted XML file through
# the import_file process all over again.
if @import_filedata [ :zip_extracted_xml ]
new_args = args . dup
new_args [ :filename ] = @import_filedata [ :zip_extracted_xml ]
new_args [ :data ] = nil
2010-06-11 18:56:16 +00:00
new_args [ :ifd ] = @import_filedata . dup
2010-06-10 21:06:06 +00:00
if block
import_file ( new_args , & block )
else
2010-06-11 18:56:16 +00:00
import_file ( new_args )
2010-06-10 21:06:06 +00:00
end
end
2010-06-11 18:56:16 +00:00
# Kick down to all the MSFX ZIP specific items
if block
2010-10-06 15:55:28 +00:00
import_msf_collateral ( new_args , & block )
2010-06-11 18:56:16 +00:00
else
2010-10-06 15:55:28 +00:00
import_msf_collateral ( new_args )
2010-06-11 18:56:16 +00:00
end
2010-06-10 21:06:06 +00:00
end
2010-10-06 15:55:28 +00:00
# Imports loot, tasks, and reports from an MSF ZIP report.
2010-06-11 18:56:16 +00:00
# XXX: This function is stupidly long. It needs to be refactored.
2010-10-06 15:55:28 +00:00
def import_msf_collateral ( args = { } , & block )
2010-10-06 18:00:21 +00:00
data = :: File . open ( args [ :filename ] , " rb " ) { | f | f . read ( f . stat . size ) }
2010-06-11 18:56:16 +00:00
wspace = args [ :wspace ] || args [ 'wspace' ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2010-10-06 18:00:21 +00:00
basedir = args [ :basedir ] || args [ 'basedir' ] || :: File . join ( Msf :: Config . install_root , " data " , " msf " )
2010-07-07 00:28:34 +00:00
allow_yaml = false
2010-10-06 18:09:06 +00:00
btag = nil
2010-07-07 00:28:34 +00:00
2010-06-11 18:56:16 +00:00
doc = rexmlify ( data )
if doc . elements [ " MetasploitExpressV1 " ]
m_ver = 1
2010-07-07 00:28:34 +00:00
allow_yaml = true
2010-10-06 18:09:06 +00:00
btag = " MetasploitExpressV1 "
2010-06-11 18:56:16 +00:00
elsif doc . elements [ " MetasploitExpressV2 " ]
m_ver = 2
2010-07-07 00:28:34 +00:00
allow_yaml = true
2010-10-06 18:09:06 +00:00
btag = " MetasploitExpressV2 "
2010-07-07 00:28:34 +00:00
elsif doc . elements [ " MetasploitExpressV3 " ]
m_ver = 3
2010-10-06 18:09:06 +00:00
btag = " MetasploitExpressV3 "
2010-10-06 18:00:21 +00:00
elsif doc . elements [ " MetasploitExpressV4 " ]
2010-10-06 18:09:06 +00:00
m_ver = 4
btag = " MetasploitExpressV4 "
elsif doc . elements [ " MetasploitV4 " ]
m_ver = 4
2011-06-15 00:51:51 +00:00
btag = " MetasploitV4 "
2010-06-11 18:56:16 +00:00
else
m_ver = nil
end
2010-10-06 18:09:06 +00:00
unless m_ver and btag
raise DBImportError . new ( " Unsupported Metasploit XML document format " )
2010-06-11 18:56:16 +00:00
end
host_info = { }
2010-10-06 18:09:06 +00:00
doc . elements . each ( " / #{ btag } /hosts/host " ) do | host |
2010-10-06 05:10:16 +00:00
host_info [ host . elements [ " id " ] . text . to_s . strip ] = nils_for_nulls ( host . elements [ " address " ] . text . to_s . strip )
2010-06-11 18:56:16 +00:00
end
# Import Loot
2010-10-06 18:09:06 +00:00
doc . elements . each ( " / #{ btag } /loots/loot " ) do | loot |
2010-06-11 18:56:16 +00:00
next if bl . include? host_info [ loot . elements [ " host-id " ] . text . to_s . strip ]
loot_info = { }
loot_info [ :host ] = host_info [ loot . elements [ " host-id " ] . text . to_s . strip ]
loot_info [ :workspace ] = args [ :wspace ]
2010-10-06 05:10:16 +00:00
loot_info [ :ctype ] = nils_for_nulls ( loot . elements [ " content-type " ] . text . to_s . strip )
loot_info [ :info ] = nils_for_nulls ( unserialize_object ( loot . elements [ " info " ] , allow_yaml ) )
loot_info [ :ltype ] = nils_for_nulls ( loot . elements [ " ltype " ] . text . to_s . strip )
loot_info [ :name ] = nils_for_nulls ( loot . elements [ " name " ] . text . to_s . strip )
loot_info [ :created_at ] = nils_for_nulls ( loot . elements [ " created-at " ] . text . to_s . strip )
loot_info [ :updated_at ] = nils_for_nulls ( loot . elements [ " updated-at " ] . text . to_s . strip )
loot_info [ :name ] = nils_for_nulls ( loot . elements [ " name " ] . text . to_s . strip )
loot_info [ :orig_path ] = nils_for_nulls ( loot . elements [ " path " ] . text . to_s . strip )
2010-06-11 18:56:16 +00:00
tmp = args [ :ifd ] [ :zip_tmp ]
2010-10-06 05:10:16 +00:00
loot_info [ :orig_path ] . gsub! ( / ^ \ . / , tmp ) if loot_info [ :orig_path ]
if ! loot . elements [ " service-id " ] . text . to_s . strip . empty?
unless loot . elements [ " service-id " ] . text . to_s . strip == " NULL "
loot_info [ :service ] = loot . elements [ " service-id " ] . text . to_s . strip
end
2010-06-11 18:56:16 +00:00
end
# Only report loot if we actually have it.
# TODO: Copypasta. Seperate this out.
2010-10-06 18:00:21 +00:00
if :: File . exists? loot_info [ :orig_path ]
loot_dir = :: File . join ( basedir , " loot " )
loot_file = :: File . split ( loot_info [ :orig_path ] ) . last
if :: File . exists? loot_dir
unless ( :: File . directory? ( loot_dir ) && :: File . writable? ( loot_dir ) )
2010-06-11 18:56:16 +00:00
raise DBImportError . new ( " Could not move files to #{ loot_dir } " )
end
else
2010-10-06 18:00:21 +00:00
:: FileUtils . mkdir_p ( loot_dir )
2010-06-11 18:56:16 +00:00
end
2010-10-06 18:00:21 +00:00
new_loot = :: File . join ( loot_dir , loot_file )
2010-06-11 18:56:16 +00:00
loot_info [ :path ] = new_loot
2010-10-06 18:00:21 +00:00
if :: File . exists? ( new_loot )
:: File . unlink new_loot # Delete it, and don't report it.
2010-06-22 20:30:43 +00:00
else
report_loot ( loot_info ) # It's new, so report it.
end
2010-10-06 18:00:21 +00:00
:: FileUtils . copy ( loot_info [ :orig_path ] , new_loot )
2010-10-06 15:55:28 +00:00
yield ( :msf_loot , new_loot ) if block
2010-06-11 18:56:16 +00:00
end
end
# Import Tasks
2010-10-06 18:09:06 +00:00
doc . elements . each ( " / #{ btag } /tasks/task " ) do | task |
2010-06-11 18:56:16 +00:00
task_info = { }
task_info [ :workspace ] = args [ :wspace ]
# Should user be imported (original) or declared (the importing user)?
2010-10-06 05:10:16 +00:00
task_info [ :user ] = nils_for_nulls ( task . elements [ " created-by " ] . text . to_s . strip )
task_info [ :desc ] = nils_for_nulls ( task . elements [ " description " ] . text . to_s . strip )
task_info [ :info ] = nils_for_nulls ( unserialize_object ( task . elements [ " info " ] , allow_yaml ) )
task_info [ :mod ] = nils_for_nulls ( task . elements [ " module " ] . text . to_s . strip )
task_info [ :options ] = nils_for_nulls ( task . elements [ " options " ] . text . to_s . strip )
task_info [ :prog ] = nils_for_nulls ( task . elements [ " progress " ] . text . to_s . strip ) . to_i
task_info [ :created_at ] = nils_for_nulls ( task . elements [ " created-at " ] . text . to_s . strip )
task_info [ :updated_at ] = nils_for_nulls ( task . elements [ " updated-at " ] . text . to_s . strip )
2010-06-11 18:56:16 +00:00
if ! task . elements [ " completed-at " ] . text . to_s . empty?
2010-10-06 05:10:16 +00:00
task_info [ :completed_at ] = nils_for_nulls ( task . elements [ " completed-at " ] . text . to_s . strip )
2010-06-11 18:56:16 +00:00
end
if ! task . elements [ " error " ] . text . to_s . empty?
2010-10-06 05:10:16 +00:00
task_info [ :error ] = nils_for_nulls ( task . elements [ " error " ] . text . to_s . strip )
2010-06-11 18:56:16 +00:00
end
if ! task . elements [ " result " ] . text . to_s . empty?
2010-10-06 05:10:16 +00:00
task_info [ :result ] = nils_for_nulls ( task . elements [ " result " ] . text . to_s . strip )
2010-06-11 18:56:16 +00:00
end
2010-10-06 05:10:16 +00:00
task_info [ :orig_path ] = nils_for_nulls ( task . elements [ " path " ] . text . to_s . strip )
2010-06-11 18:56:16 +00:00
tmp = args [ :ifd ] [ :zip_tmp ]
2010-10-06 05:10:16 +00:00
task_info [ :orig_path ] . gsub! ( / ^ \ . / , tmp ) if task_info [ :orig_path ]
2010-06-11 18:56:16 +00:00
# Only report a task if we actually have it.
# TODO: Copypasta. Seperate this out.
2010-10-06 18:00:21 +00:00
if :: File . exists? task_info [ :orig_path ]
tasks_dir = :: File . join ( basedir , " tasks " )
task_file = :: File . split ( task_info [ :orig_path ] ) . last
if :: File . exists? tasks_dir
unless ( :: File . directory? ( tasks_dir ) && :: File . writable? ( tasks_dir ) )
2010-06-11 18:56:16 +00:00
raise DBImportError . new ( " Could not move files to #{ tasks_dir } " )
end
else
2010-10-06 18:00:21 +00:00
:: FileUtils . mkdir_p ( tasks_dir )
2010-06-11 18:56:16 +00:00
end
2010-10-06 18:00:21 +00:00
new_task = :: File . join ( tasks_dir , task_file )
2010-06-11 18:56:16 +00:00
task_info [ :path ] = new_task
2010-10-06 18:00:21 +00:00
if :: File . exists? ( new_task )
:: File . unlink new_task # Delete it, and don't report it.
2010-06-22 20:30:43 +00:00
else
report_task ( task_info ) # It's new, so report it.
end
2010-10-06 18:00:21 +00:00
:: FileUtils . copy ( task_info [ :orig_path ] , new_task )
2010-10-06 15:55:28 +00:00
yield ( :msf_task , new_task ) if block
2010-06-11 18:56:16 +00:00
end
end
# Import Reports
2010-10-06 18:09:06 +00:00
doc . elements . each ( " / #{ btag } /reports/report " ) do | report |
2010-06-11 18:56:16 +00:00
report_info = { }
report_info [ :workspace ] = args [ :wspace ]
# Should user be imported (original) or declared (the importing user)?
2010-10-06 05:10:16 +00:00
report_info [ :user ] = nils_for_nulls ( report . elements [ " created-by " ] . text . to_s . strip )
report_info [ :options ] = nils_for_nulls ( report . elements [ " options " ] . text . to_s . strip )
report_info [ :rtype ] = nils_for_nulls ( report . elements [ " rtype " ] . text . to_s . strip )
report_info [ :created_at ] = nils_for_nulls ( report . elements [ " created-at " ] . text . to_s . strip )
report_info [ :updated_at ] = nils_for_nulls ( report . elements [ " updated-at " ] . text . to_s . strip )
2010-06-11 18:56:16 +00:00
2010-10-06 05:10:16 +00:00
report_info [ :orig_path ] = nils_for_nulls ( report . elements [ " path " ] . text . to_s . strip )
2010-06-11 18:56:16 +00:00
tmp = args [ :ifd ] [ :zip_tmp ]
2010-10-06 05:10:16 +00:00
report_info [ :orig_path ] . gsub! ( / ^ \ . / , tmp ) if report_info [ :orig_path ]
2010-06-11 18:56:16 +00:00
# Only report a report if we actually have it.
# TODO: Copypasta. Seperate this out.
2010-10-06 18:00:21 +00:00
if :: File . exists? report_info [ :orig_path ]
reports_dir = :: File . join ( basedir , " reports " )
report_file = :: File . split ( report_info [ :orig_path ] ) . last
if :: File . exists? reports_dir
unless ( :: File . directory? ( reports_dir ) && :: File . writable? ( reports_dir ) )
2010-06-11 18:56:16 +00:00
raise DBImportError . new ( " Could not move files to #{ reports_dir } " )
end
else
2010-10-06 18:00:21 +00:00
:: FileUtils . mkdir_p ( reports_dir )
2010-06-11 18:56:16 +00:00
end
2010-10-06 18:00:21 +00:00
new_report = :: File . join ( reports_dir , report_file )
2010-06-11 18:56:16 +00:00
report_info [ :path ] = new_report
2010-10-06 18:00:21 +00:00
if :: File . exists? ( new_report )
:: File . unlink new_report
2010-06-22 20:30:43 +00:00
else
report_report ( report_info )
end
2010-10-06 18:00:21 +00:00
:: FileUtils . copy ( report_info [ :orig_path ] , new_report )
2010-10-06 15:55:28 +00:00
yield ( :msf_report , new_report ) if block
2010-06-11 18:56:16 +00:00
end
end
end
2010-06-10 21:06:06 +00:00
2010-04-26 18:40:49 +00:00
# For each host, step through services, notes, and vulns, and import
2010-04-27 14:46:00 +00:00
# them.
2010-04-26 18:40:49 +00:00
# TODO: loot, tasks, and reports
2010-10-06 15:55:28 +00:00
def import_msf_xml ( args = { } , & block )
2010-06-04 14:57:58 +00:00
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2010-07-06 19:33:27 +00:00
allow_yaml = false
2010-10-06 18:09:06 +00:00
btag = nil
2011-06-15 00:51:51 +00:00
2010-04-26 20:25:42 +00:00
doc = rexmlify ( data )
2010-06-04 18:54:50 +00:00
if doc . elements [ " MetasploitExpressV1 " ]
m_ver = 1
2010-07-06 19:33:27 +00:00
allow_yaml = true
2010-10-06 18:09:06 +00:00
btag = " MetasploitExpressV1 "
2010-06-04 18:54:50 +00:00
elsif doc . elements [ " MetasploitExpressV2 " ]
m_ver = 2
2010-07-06 19:33:27 +00:00
allow_yaml = true
2011-06-15 00:51:51 +00:00
btag = " MetasploitExpressV2 "
2010-07-06 19:33:27 +00:00
elsif doc . elements [ " MetasploitExpressV3 " ]
m_ver = 3
2011-06-15 00:51:51 +00:00
btag = " MetasploitExpressV3 "
2010-10-06 18:00:21 +00:00
elsif doc . elements [ " MetasploitExpressV4 " ]
2011-06-15 00:51:51 +00:00
m_ver = 4
2010-10-06 18:09:06 +00:00
btag = " MetasploitExpressV4 "
elsif doc . elements [ " MetasploitV4 " ]
2011-06-15 00:51:51 +00:00
m_ver = 4
btag = " MetasploitV4 "
2010-06-04 18:54:50 +00:00
else
m_ver = nil
end
2010-10-06 18:09:06 +00:00
unless m_ver and btag
raise DBImportError . new ( " Unsupported Metasploit XML document format " )
2010-06-04 18:54:50 +00:00
end
2010-10-06 18:09:06 +00:00
doc . elements . each ( " / #{ btag } /hosts/host " ) do | host |
2010-04-26 18:40:49 +00:00
host_data = { }
host_data [ :workspace ] = wspace
2010-10-06 05:10:16 +00:00
host_data [ :host ] = nils_for_nulls ( host . elements [ " address " ] . text . to_s . strip )
2010-06-04 14:57:58 +00:00
if bl . include? host_data [ :host ]
next
else
2010-06-08 19:16:20 +00:00
yield ( :address , host_data [ :host ] ) if block
2010-06-04 14:57:58 +00:00
end
2011-05-03 03:03:38 +00:00
host_data [ :mac ] = nils_for_nulls ( host . elements [ " mac " ] . text . to_s . strip )
2010-04-26 18:40:49 +00:00
if host . elements [ " comm " ] . text
2010-10-06 05:10:16 +00:00
host_data [ :comm ] = nils_for_nulls ( host . elements [ " comm " ] . text . to_s . strip )
2010-04-26 18:40:49 +00:00
end
2010-10-06 20:24:26 +00:00
%W{ created-at updated-at name state os-flavor os-lang os-name os-sp purpose } . each { | datum |
2010-04-26 20:25:42 +00:00
if host . elements [ datum ] . text
2010-10-06 05:10:16 +00:00
host_data [ datum . gsub ( '-' , '_' ) ] = nils_for_nulls ( host . elements [ datum ] . text . to_s . strip )
2010-04-26 20:25:42 +00:00
end
2010-04-26 18:40:49 +00:00
}
host_address = host_data [ :host ] . dup # Preserve after report_host() deletes
2011-04-22 20:16:55 +00:00
hobj = report_host ( host_data )
2010-04-27 14:46:00 +00:00
host . elements . each ( 'services/service' ) do | service |
2010-04-26 18:40:49 +00:00
service_data = { }
service_data [ :workspace ] = wspace
2011-04-22 20:16:55 +00:00
service_data [ :host ] = hobj
2010-10-06 05:10:16 +00:00
service_data [ :port ] = nils_for_nulls ( service . elements [ " port " ] . text . to_s . strip ) . to_i
service_data [ :proto ] = nils_for_nulls ( service . elements [ " proto " ] . text . to_s . strip )
2010-10-06 20:24:26 +00:00
%W{ created-at updated-at name state info } . each { | datum |
2010-04-26 20:25:42 +00:00
if service . elements [ datum ] . text
2010-07-06 16:33:27 +00:00
if datum == " info "
2010-10-06 05:10:16 +00:00
service_data [ " info " ] = nils_for_nulls ( unserialize_object ( service . elements [ datum ] , false ) )
2010-07-06 16:33:27 +00:00
else
2010-10-06 05:10:16 +00:00
service_data [ datum . gsub ( " - " , " _ " ) ] = nils_for_nulls ( service . elements [ datum ] . text . to_s . strip )
2010-07-06 16:33:27 +00:00
end
2010-04-26 20:25:42 +00:00
end
2010-04-26 18:40:49 +00:00
}
report_service ( service_data )
end
host . elements . each ( 'notes/note' ) do | note |
note_data = { }
note_data [ :workspace ] = wspace
2011-04-22 20:16:55 +00:00
note_data [ :host ] = hobj
2010-10-06 05:10:16 +00:00
note_data [ :type ] = nils_for_nulls ( note . elements [ " ntype " ] . text . to_s . strip )
note_data [ :data ] = nils_for_nulls ( unserialize_object ( note . elements [ " data " ] , allow_yaml ) )
2010-07-06 19:33:27 +00:00
2010-04-26 18:40:49 +00:00
if note . elements [ " critical " ] . text
2010-10-06 05:10:16 +00:00
note_data [ :critical ] = true unless note . elements [ " critical " ] . text . to_s . strip == " NULL "
2010-04-26 18:40:49 +00:00
end
if note . elements [ " seen " ] . text
2010-10-06 05:10:16 +00:00
note_data [ :seen ] = true unless note . elements [ " critical " ] . text . to_s . strip == " NULL "
2010-04-26 18:40:49 +00:00
end
2010-10-06 20:24:26 +00:00
%W{ created-at updated-at } . each { | datum |
2010-04-26 21:51:29 +00:00
if note . elements [ datum ] . text
2010-10-06 05:10:16 +00:00
note_data [ datum . gsub ( " - " , " _ " ) ] = nils_for_nulls ( note . elements [ datum ] . text . to_s . strip )
2010-04-26 21:51:29 +00:00
end
}
2010-04-26 18:40:49 +00:00
report_note ( note_data )
end
2011-02-10 00:17:20 +00:00
host . elements . each ( 'tags/tag' ) do | tag |
tag_data = { }
tag_data [ :addr ] = host_address
tag_data [ :wspace ] = wspace
tag_data [ :name ] = tag . elements [ " name " ] . text . to_s . strip
tag_data [ :desc ] = tag . elements [ " desc " ] . text . to_s . strip
if tag . elements [ " report-summary " ] . text
tag_data [ :summary ] = tag . elements [ " report-summary " ] . text . to_s . strip
end
if tag . elements [ " report-detail " ] . text
tag_data [ :detail ] = tag . elements [ " report-detail " ] . text . to_s . strip
end
if tag . elements [ " critical " ] . text
tag_data [ :crit ] = true unless tag . elements [ " critical " ] . text . to_s . strip == " NULL "
end
report_host_tag ( tag_data )
end
2010-04-26 18:40:49 +00:00
host . elements . each ( 'vulns/vuln' ) do | vuln |
vuln_data = { }
vuln_data [ :workspace ] = wspace
2011-04-22 20:16:55 +00:00
vuln_data [ :host ] = hobj
2010-10-06 05:10:16 +00:00
vuln_data [ :info ] = nils_for_nulls ( unserialize_object ( vuln . elements [ " info " ] , allow_yaml ) )
vuln_data [ :name ] = nils_for_nulls ( vuln . elements [ " name " ] . text . to_s . strip )
2011-05-15 22:19:00 +00:00
%W{ created-at updated-at exploited-at } . each { | datum |
if vuln . elements [ datum ] and vuln . elements [ datum ] . text
2010-10-06 05:10:16 +00:00
vuln_data [ datum . gsub ( " - " , " _ " ) ] = nils_for_nulls ( vuln . elements [ datum ] . text . to_s . strip )
2010-04-26 21:51:29 +00:00
end
}
2011-05-15 22:19:00 +00:00
if vuln . elements [ " refs " ]
vuln_data [ :refs ] = [ ]
vuln . elements . each ( " refs/ref " ) do | ref |
vuln_data [ :refs ] << nils_for_nulls ( ref . text . to_s . strip )
end
end
2010-04-26 18:40:49 +00:00
report_vuln ( vuln_data )
end
2010-08-23 17:45:36 +00:00
host . elements . each ( 'creds/cred' ) do | cred |
cred_data = { }
cred_data [ :workspace ] = wspace
2011-04-22 20:16:55 +00:00
cred_data [ :host ] = hobj
2010-10-06 20:24:26 +00:00
%W{ port ptype sname proto proof active user pass } . each { | datum |
2010-08-23 17:45:36 +00:00
if cred . elements [ datum ] . respond_to? :text
2010-10-06 05:10:16 +00:00
cred_data [ datum . intern ] = nils_for_nulls ( cred . elements [ datum ] . text . to_s . strip )
2010-08-23 17:45:36 +00:00
end
}
2010-10-06 20:24:26 +00:00
%W{ created-at updated-at } . each { | datum |
2010-08-23 17:45:36 +00:00
if cred . elements [ datum ] . respond_to? :text
2010-10-06 05:10:16 +00:00
cred_data [ datum . gsub ( " - " , " _ " ) ] = nils_for_nulls ( cred . elements [ datum ] . text . to_s . strip )
2010-08-23 17:45:36 +00:00
end
}
if cred_data [ :pass ] == " <masked> "
cred_data [ :pass ] = " "
cred_data [ :active ] = false
elsif cred_data [ :pass ] == " *BLANK PASSWORD* "
cred_data [ :pass ] = " "
end
2011-04-22 20:16:55 +00:00
report_cred ( cred_data )
2010-08-23 17:45:36 +00:00
end
2011-04-07 21:59:32 +00:00
host . elements . each ( 'sessions/session' ) do | sess |
sess_id = nils_for_nulls ( sess . elements [ " id " ] . text . to_s . strip . to_i )
sess_data = { }
2011-04-22 20:16:55 +00:00
sess_data [ :host ] = hobj
2011-04-07 21:59:32 +00:00
%W{ desc platform port stype } . each { | datum |
if sess . elements [ datum ] . respond_to? :text
sess_data [ datum . intern ] = nils_for_nulls ( sess . elements [ datum ] . text . to_s . strip )
end
}
%W{ opened-at close-reason closed-at via-exploit via-payload } . each { | datum |
if sess . elements [ datum ] . respond_to? :text
sess_data [ datum . gsub ( " - " , " _ " ) . intern ] = nils_for_nulls ( sess . elements [ datum ] . text . to_s . strip )
end
}
sess_data [ :datastore ] = nils_for_nulls ( unserialize_object ( sess . elements [ " datastore " ] , allow_yaml ) )
2011-04-29 04:10:46 +00:00
if sess . elements [ " routes " ]
sess_data [ :routes ] = nils_for_nulls ( unserialize_object ( sess . elements [ " routes " ] , allow_yaml ) ) || [ ]
end
2011-04-07 21:59:32 +00:00
if not sess_data [ :closed_at ] # Fake a close if we don't already have one
2011-06-15 00:51:51 +00:00
sess_data [ :closed_at ] = Time . now
2011-04-07 21:59:32 +00:00
sess_data [ :close_reason ] = " Imported at #{ Time . now } "
end
existing_session = get_session (
:workspace = > sess_data [ :host ] . workspace ,
:addr = > sess_data [ :host ] . address ,
:time = > sess_data [ :opened_at ]
2011-06-15 00:51:51 +00:00
)
2011-04-22 20:16:55 +00:00
this_session = existing_session || report_session ( sess_data )
2011-04-07 21:59:32 +00:00
next if existing_session
sess . elements . each ( 'events/event' ) do | sess_event |
sess_event_data = { }
sess_event_data [ :session ] = this_session
%W{ created-at etype local-path remote-path } . each { | datum |
if sess_event . elements [ datum ] . respond_to? :text
sess_event_data [ datum . gsub ( " - " , " _ " ) . intern ] = nils_for_nulls ( sess_event . elements [ datum ] . text . to_s . strip )
end
}
%W{ command output } . each { | datum |
if sess_event . elements [ datum ] . respond_to? :text
sess_event_data [ datum . gsub ( " - " , " _ " ) . intern ] = nils_for_nulls ( unserialize_object ( sess_event . elements [ datum ] , allow_yaml ) )
end
}
2011-06-15 00:51:51 +00:00
report_session_event ( sess_event_data )
2011-04-07 21:59:32 +00:00
end
end
2010-04-26 18:40:49 +00:00
end
2011-04-07 21:59:32 +00:00
2010-10-06 18:00:21 +00:00
# Import web sites
2010-10-06 20:24:26 +00:00
doc . elements . each ( " / #{ btag } /web_sites/web_site " ) do | web |
2010-10-06 18:00:21 +00:00
info = { }
info [ :workspace ] = wspace
2011-04-07 21:59:32 +00:00
2010-10-06 20:24:26 +00:00
%W{ host port vhost ssl comments } . each do | datum |
if web . elements [ datum ] . respond_to? :text
info [ datum . intern ] = nils_for_nulls ( web . elements [ datum ] . text . to_s . strip )
2011-06-15 00:51:51 +00:00
end
2010-10-06 20:24:26 +00:00
end
2011-04-07 21:59:32 +00:00
2010-10-06 20:24:26 +00:00
info [ :options ] = nils_for_nulls ( unserialize_object ( web . elements [ " options " ] , allow_yaml ) ) if web . elements [ " options " ] . respond_to? ( :text )
info [ :ssl ] = ( info [ :ssl ] and info [ :ssl ] . to_s . strip . downcase == " true " ) ? true : false
2011-04-07 21:59:32 +00:00
2010-10-06 20:24:26 +00:00
%W{ created-at updated-at } . each { | datum |
2010-10-06 18:00:21 +00:00
if web . elements [ datum ] . text
2010-10-06 20:24:26 +00:00
info [ datum . gsub ( " - " , " _ " ) ] = nils_for_nulls ( web . elements [ datum ] . text . to_s . strip )
2010-10-06 18:00:21 +00:00
end
}
2011-04-07 21:59:32 +00:00
2010-10-06 18:00:21 +00:00
report_web_site ( info )
2010-10-20 01:13:26 +00:00
yield ( :web_site , " #{ info [ :host ] } : #{ info [ :port ] } ( #{ info [ :vhost ] } ) " ) if block
2010-10-06 18:00:21 +00:00
end
2011-04-07 21:59:32 +00:00
2010-10-06 18:00:21 +00:00
%W{ page form vuln } . each do | wtype |
2010-10-06 20:24:26 +00:00
doc . elements . each ( " / #{ btag } /web_ #{ wtype } s/web_ #{ wtype } " ) do | web |
2010-10-06 18:00:21 +00:00
info = { }
info [ :workspace ] = wspace
2010-10-06 20:24:26 +00:00
info [ :host ] = nils_for_nulls ( web . elements [ " host " ] . text . to_s . strip ) if web . elements [ " host " ] . respond_to? ( :text )
info [ :port ] = nils_for_nulls ( web . elements [ " port " ] . text . to_s . strip ) if web . elements [ " port " ] . respond_to? ( :text )
info [ :ssl ] = nils_for_nulls ( web . elements [ " ssl " ] . text . to_s . strip ) if web . elements [ " ssl " ] . respond_to? ( :text )
info [ :vhost ] = nils_for_nulls ( web . elements [ " vhost " ] . text . to_s . strip ) if web . elements [ " vhost " ] . respond_to? ( :text )
2011-04-07 21:59:32 +00:00
2010-10-06 20:24:26 +00:00
info [ :ssl ] = ( info [ :ssl ] and info [ :ssl ] . to_s . strip . downcase == " true " ) ? true : false
2011-04-07 21:59:32 +00:00
2010-10-06 18:00:21 +00:00
case wtype
when " page "
2010-10-06 20:24:26 +00:00
%W{ path code body query cookie auth ctype mtime location } . each do | datum |
2010-10-06 18:00:21 +00:00
if web . elements [ datum ] . respond_to? :text
info [ datum . intern ] = nils_for_nulls ( web . elements [ datum ] . text . to_s . strip )
2011-06-15 00:51:51 +00:00
end
2010-10-06 18:00:21 +00:00
end
info [ :headers ] = nils_for_nulls ( unserialize_object ( web . elements [ " headers " ] , allow_yaml ) )
when " form "
2010-10-06 20:24:26 +00:00
%W{ path query method } . each do | datum |
2010-10-06 18:00:21 +00:00
if web . elements [ datum ] . respond_to? :text
info [ datum . intern ] = nils_for_nulls ( web . elements [ datum ] . text . to_s . strip )
2011-06-15 00:51:51 +00:00
end
2010-10-06 18:00:21 +00:00
end
2011-06-15 00:51:51 +00:00
info [ :params ] = nils_for_nulls ( unserialize_object ( web . elements [ " params " ] , allow_yaml ) )
2010-10-06 18:00:21 +00:00
when " vuln "
2010-10-11 03:35:15 +00:00
%W{ path query method pname proof risk name blame description category confidence } . each do | datum |
2010-10-06 18:00:21 +00:00
if web . elements [ datum ] . respond_to? :text
info [ datum . intern ] = nils_for_nulls ( web . elements [ datum ] . text . to_s . strip )
2011-06-15 00:51:51 +00:00
end
2010-10-06 18:00:21 +00:00
end
2011-06-15 00:51:51 +00:00
info [ :params ] = nils_for_nulls ( unserialize_object ( web . elements [ " params " ] , allow_yaml ) )
info [ :risk ] = info [ :risk ] . to_i
info [ :confidence ] = info [ :confidence ] . to_i
2010-10-06 18:00:21 +00:00
end
2011-04-07 21:59:32 +00:00
2010-10-06 20:24:26 +00:00
%W{ created-at updated-at } . each { | datum |
2010-10-06 18:00:21 +00:00
if web . elements [ datum ] . text
2010-10-06 20:24:26 +00:00
info [ datum . gsub ( " - " , " _ " ) ] = nils_for_nulls ( web . elements [ datum ] . text . to_s . strip )
2010-10-06 18:00:21 +00:00
end
}
2010-10-06 20:24:26 +00:00
self . send ( " report_web_ #{ wtype } " , info )
2011-04-07 21:59:32 +00:00
2010-10-20 01:13:26 +00:00
yield ( " web_ #{ wtype } " . intern , info [ :path ] ) if block
2010-10-06 18:00:21 +00:00
end
end
2010-04-26 18:40:49 +00:00
end
2010-10-06 18:00:21 +00:00
# Convert the string "NULL" to actual nil
2010-10-06 05:10:16 +00:00
def nils_for_nulls ( str )
str == " NULL " ? nil : str
end
2010-06-08 19:16:20 +00:00
def import_nexpose_simplexml ( args = { } , & block )
2010-06-04 14:57:58 +00:00
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2011-05-26 20:29:47 +00:00
wspace = args [ :wspace ] || workspace
2011-06-15 00:51:51 +00:00
if Rex :: Parser . nokogiri_loaded
2011-05-26 20:29:47 +00:00
parser = " Nokogiri v #{ :: Nokogiri :: VERSION } "
noko_args = args . dup
noko_args [ :blacklist ] = bl
noko_args [ :wspace ] = wspace
if block
yield ( :parser , parser )
import_nexpose_noko_stream ( noko_args ) { | type , data | yield type , data }
else
2011-06-15 00:51:51 +00:00
import_nexpose_noko_stream ( noko_args )
2011-05-26 20:29:47 +00:00
end
return true
end
data = args [ :data ]
2010-06-04 14:57:58 +00:00
2010-04-26 20:25:42 +00:00
doc = rexmlify ( data )
2010-01-07 19:06:29 +00:00
doc . elements . each ( '/NeXposeSimpleXML/devices/device' ) do | dev |
addr = dev . attributes [ 'address' ] . to_s
2010-06-04 14:57:58 +00:00
if bl . include? addr
next
else
2010-06-08 19:16:20 +00:00
yield ( :address , addr ) if block
2010-06-04 14:57:58 +00:00
end
2010-02-17 06:01:53 +00:00
fprint = { }
dev . elements . each ( 'fingerprint/description' ) do | str |
fprint [ :desc ] = str . text . to_s . strip
end
dev . elements . each ( 'fingerprint/vendor' ) do | str |
fprint [ :vendor ] = str . text . to_s . strip
end
dev . elements . each ( 'fingerprint/family' ) do | str |
fprint [ :family ] = str . text . to_s . strip
end
dev . elements . each ( 'fingerprint/product' ) do | str |
fprint [ :product ] = str . text . to_s . strip
end
dev . elements . each ( 'fingerprint/version' ) do | str |
fprint [ :version ] = str . text . to_s . strip
end
dev . elements . each ( 'fingerprint/architecture' ) do | str |
fprint [ :arch ] = str . text . to_s . upcase . strip
2010-01-07 19:06:29 +00:00
end
2010-02-17 06:01:53 +00:00
conf = {
2010-02-18 06:40:38 +00:00
:workspace = > wspace ,
2010-02-17 06:01:53 +00:00
:host = > addr ,
2010-03-22 00:11:43 +00:00
:state = > Msf :: HostState :: Alive
2010-02-17 06:01:53 +00:00
}
2011-04-22 20:16:55 +00:00
host = report_host ( conf )
report_import_note ( wspace , host )
2010-02-17 06:01:53 +00:00
report_note (
2010-02-18 06:40:38 +00:00
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > host ,
2010-02-17 06:01:53 +00:00
:type = > 'host.os.nexpose_fingerprint' ,
:data = > fprint
)
2010-01-07 19:06:29 +00:00
# Load vulnerabilities not associated with a service
dev . elements . each ( 'vulnerabilities/vulnerability' ) do | vuln |
vid = vuln . attributes [ 'id' ] . to_s . downcase
refs = process_nexpose_data_sxml_refs ( vuln )
next if not refs
2010-01-10 17:53:12 +00:00
report_vuln (
2010-02-18 06:40:38 +00:00
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > host ,
2010-02-18 06:40:38 +00:00
:name = > 'NEXPOSE-' + vid ,
2010-05-10 16:16:38 +00:00
:info = > vid ,
2011-05-26 20:29:47 +00:00
:refs = > refs
)
2010-01-07 19:06:29 +00:00
end
# Load the services
dev . elements . each ( 'services/service' ) do | svc |
sname = svc . attributes [ 'name' ] . to_s
sprot = svc . attributes [ 'protocol' ] . to_s . downcase
sport = svc . attributes [ 'port' ] . to_s . to_i
2010-03-27 02:31:14 +00:00
next if sport == 0
2010-01-07 19:06:29 +00:00
name = sname . split ( '(' ) [ 0 ] . strip
2010-02-17 06:01:53 +00:00
info = ''
svc . elements . each ( 'fingerprint/description' ) do | str |
info = str . text . to_s . strip
end
2010-01-20 00:35:44 +00:00
2010-01-07 19:06:29 +00:00
if ( sname . downcase != '<unknown>' )
2011-04-22 20:16:55 +00:00
report_service ( :workspace = > wspace , :host = > host , :proto = > sprot , :port = > sport , :name = > name , :info = > info )
2010-01-07 19:06:29 +00:00
else
2011-04-22 20:16:55 +00:00
report_service ( :workspace = > wspace , :host = > host , :proto = > sprot , :port = > sport , :info = > info )
2010-01-07 19:06:29 +00:00
end
# Load vulnerabilities associated with this service
svc . elements . each ( 'vulnerabilities/vulnerability' ) do | vuln |
vid = vuln . attributes [ 'id' ] . to_s . downcase
refs = process_nexpose_data_sxml_refs ( vuln )
next if not refs
2010-01-20 01:01:54 +00:00
report_vuln (
2010-02-18 06:40:38 +00:00
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > host ,
2010-01-27 22:13:48 +00:00
:port = > sport ,
:proto = > sprot ,
:name = > 'NEXPOSE-' + vid ,
2010-05-10 16:16:38 +00:00
:info = > vid ,
2011-05-26 20:29:47 +00:00
:refs = > refs
)
2010-01-07 19:06:29 +00:00
end
end
end
end
#
# Nexpose Raw XML
#
2010-06-04 14:57:58 +00:00
def import_nexpose_rawxml_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2010-06-04 14:57:58 +00:00
import_nexpose_rawxml ( args . merge ( :data = > data ) )
2010-01-07 19:06:29 +00:00
end
2010-04-26 20:25:42 +00:00
2010-06-08 19:16:20 +00:00
def import_nexpose_rawxml ( args = { } , & block )
2010-06-04 14:57:58 +00:00
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2011-05-27 17:30:11 +00:00
wspace = args [ :wspace ] || workspace
2011-06-15 00:51:51 +00:00
if Rex :: Parser . nokogiri_loaded
2011-05-27 17:30:11 +00:00
parser = " Nokogiri v #{ :: Nokogiri :: VERSION } "
noko_args = args . dup
noko_args [ :blacklist ] = bl
noko_args [ :wspace ] = wspace
if block
yield ( :parser , parser )
import_nexpose_raw_noko_stream ( noko_args ) { | type , data | yield type , data }
else
2011-06-15 00:51:51 +00:00
import_nexpose_raw_noko_stream ( noko_args )
2011-05-27 17:30:11 +00:00
end
return true
end
data = args [ :data ]
2010-06-04 14:57:58 +00:00
2010-05-02 19:16:52 +00:00
# Use a stream parser instead of a tree parser so we can deal with
# huge results files without running out of memory.
parser = Rex :: Parser :: NexposeXMLStreamParser . new
# Since all the Refs have to be in the database before we can use them
# in a Vuln, we store all the hosts until we finish parsing and only
# then put everything in the database. This is memory-intensive for
2010-05-02 23:13:21 +00:00
# large files, but should be much less so than a tree parser.
2010-05-02 19:16:52 +00:00
#
# This method is also considerably faster than parsing through the tree
# looking for references every time we hit a vuln.
hosts = [ ]
vulns = [ ]
# The callback merely populates our in-memory table of hosts and vulns
parser . callback = Proc . new { | type , value |
case type
when :host
2011-04-01 01:01:11 +00:00
# XXX: Blacklist should be checked here instead of saving a
# host we're just going to throw away later
2010-05-02 19:16:52 +00:00
hosts . push ( value )
when :vuln
2011-02-01 23:42:07 +00:00
value [ " id " ] = value [ " id " ] . downcase if value [ " id " ]
2010-05-02 19:16:52 +00:00
vulns . push ( value )
end
}
REXML :: Document . parse_stream ( data , parser )
2011-03-29 22:22:58 +00:00
vuln_refs = nexpose_refs_to_struct ( vulns )
2010-05-02 19:16:52 +00:00
hosts . each do | host |
2010-06-04 14:57:58 +00:00
if bl . include? host [ " addr " ]
next
else
2010-06-08 19:16:20 +00:00
yield ( :address , host [ " addr " ] ) if block
2010-06-04 14:57:58 +00:00
end
2011-03-29 22:22:58 +00:00
nexpose_host_from_rawxml ( host , vuln_refs , wspace )
2010-05-02 19:16:52 +00:00
end
end
#
# Takes an array of vuln hashes, as returned by the NeXpose rawxml stream
# parser, like:
# [
# {"id"=>"winreg-notes-protocol-handler", severity="8", "refs"=>[{"source"=>"BID", "value"=>"10600"}, ...]}
# {"id"=>"windows-zotob-c", severity="8", "refs"=>[{"source"=>"BID", "value"=>"14513"}, ...]}
# ]
2011-03-29 22:22:58 +00:00
# and transforms it into a struct, containing :id, :refs, :title, and :severity
2010-05-02 23:13:21 +00:00
#
2011-03-29 22:22:58 +00:00
# Other attributes can be added later, as needed.
def nexpose_refs_to_struct ( vulns )
ret = [ ]
2010-05-02 19:16:52 +00:00
vulns . each do | vuln |
2011-06-15 00:51:51 +00:00
next if ret . map { | v | v . id } . include? vuln [ " id " ]
2011-03-29 22:22:58 +00:00
vstruct = Struct . new ( :id , :refs , :title , :severity ) . new
vstruct . id = vuln [ " id " ]
vstruct . title = vuln [ " title " ]
vstruct . severity = vuln [ " severity " ]
vstruct . refs = [ ]
2010-05-02 19:16:52 +00:00
vuln [ " refs " ] . each do | ref |
if ref [ 'source' ] == 'BID'
2011-03-29 22:22:58 +00:00
vstruct . refs . push ( 'BID-' + ref [ " value " ] )
2010-05-02 19:16:52 +00:00
elsif ref [ 'source' ] == 'CVE'
# value is CVE-$ID
2011-03-29 22:22:58 +00:00
vstruct . refs . push ( ref [ " value " ] )
2010-05-02 19:16:52 +00:00
elsif ref [ 'source' ] == 'MS'
2011-03-29 22:22:58 +00:00
vstruct . refs . push ( 'MSB-' + ref [ " value " ] )
2010-05-02 19:16:52 +00:00
elsif ref [ 'source' ] == 'URL'
2011-03-29 22:22:58 +00:00
vstruct . refs . push ( 'URL-' + ref [ " value " ] )
#else
# $stdout.puts("Unknown source: #{ref["source"]}")
2010-05-02 19:16:52 +00:00
end
end
2011-03-29 22:22:58 +00:00
ret . push vstruct
2010-05-02 19:16:52 +00:00
end
2011-03-29 22:22:58 +00:00
return ret
2010-05-02 19:16:52 +00:00
end
2011-03-29 22:22:58 +00:00
# Takes a Host object, an array of vuln structs (generated by nexpose_refs_to_struct()),
# and a workspace, and reports the vulns on that host.
def nexpose_host_from_rawxml ( h , vstructs , wspace )
2011-04-22 20:16:55 +00:00
hobj = nil
2010-05-02 19:16:52 +00:00
data = { :workspace = > wspace }
if h [ " addr " ]
addr = h [ " addr " ]
else
# Can't report it if it doesn't have an IP
return
end
data [ :host ] = addr
if ( h [ " hardware-address " ] )
2010-05-02 23:43:41 +00:00
# Put colons between each octet of the MAC address
data [ :mac ] = h [ " hardware-address " ] . gsub ( ':' , '' ) . scan ( / .. / ) . join ( ':' )
2010-05-02 19:16:52 +00:00
end
data [ :state ] = ( h [ " status " ] == " alive " ) ? Msf :: HostState :: Alive : Msf :: HostState :: Dead
# Since we only have one name field per host in the database, just
# take the first one.
if ( h [ " names " ] and h [ " names " ] . first )
data [ :name ] = h [ " names " ] . first
end
if ( data [ :state ] != Msf :: HostState :: Dead )
2011-04-22 20:16:55 +00:00
hobj = report_host ( data )
report_import_note ( wspace , hobj )
2010-05-02 19:16:52 +00:00
end
2011-04-26 17:31:26 +00:00
if h [ " notes " ]
note = {
:workspace = > wspace ,
:host = > ( hobj || addr ) ,
:type = > " host.vuln.nexpose_keys " ,
:data = > { } ,
:mode = > :unique_data
}
h [ " notes " ] . each do | v , k |
note [ :data ] [ v ] || = [ ]
next if note [ :data ] [ v ] . include? k
note [ :data ] [ v ] << k
end
report_note ( note )
end
2010-05-02 22:23:43 +00:00
if h [ " os_family " ]
2010-05-02 19:16:52 +00:00
note = {
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj || addr ,
2010-05-02 19:16:52 +00:00
:type = > 'host.os.nexpose_fingerprint' ,
:data = > {
2010-05-02 22:23:43 +00:00
:family = > h [ " os_family " ] ,
:certainty = > h [ " os_certainty " ]
2010-05-02 19:16:52 +00:00
}
}
2010-05-02 22:23:43 +00:00
note [ :data ] [ :vendor ] = h [ " os_vendor " ] if h [ " os_vendor " ]
note [ :data ] [ :product ] = h [ " os_product " ] if h [ " os_product " ]
2011-04-11 22:29:53 +00:00
note [ :data ] [ :version ] = h [ " os_version " ] if h [ " os_version " ]
2010-05-02 22:23:43 +00:00
note [ :data ] [ :arch ] = h [ " arch " ] if h [ " arch " ]
2010-05-02 19:16:52 +00:00
report_note ( note )
end
h [ " endpoints " ] . each { | p |
extra = " "
extra << p [ " product " ] + " " if p [ " product " ]
2010-05-02 22:23:43 +00:00
extra << p [ " version " ] + " " if p [ " version " ]
2010-05-02 23:50:41 +00:00
2010-05-02 23:51:54 +00:00
# Skip port-0 endpoints
next if p [ " port " ] . to_i == 0
2010-05-02 22:23:43 +00:00
# XXX This should probably be handled in a more standard way
2010-05-02 23:50:41 +00:00
# extra << "(" + p["certainty"] + " certainty) " if p["certainty"]
2010-05-02 19:16:52 +00:00
data = { }
data [ :workspace ] = wspace
data [ :proto ] = p [ " protocol " ] . downcase
data [ :port ] = p [ " port " ] . to_i
data [ :state ] = p [ " status " ]
2011-04-22 20:16:55 +00:00
data [ :host ] = hobj || addr
2010-05-02 19:16:52 +00:00
data [ :info ] = extra if not extra . empty?
if p [ " name " ] != " <unknown> "
data [ :name ] = p [ " name " ]
end
report_service ( data )
}
h [ " vulns " ] . each_pair { | k , v |
2011-06-15 00:51:51 +00:00
2011-02-01 23:42:07 +00:00
next if v [ " status " ] !~ / ^vulnerable /
2011-03-29 22:22:58 +00:00
vstruct = vstructs . select { | vs | vs . id . to_s . downcase == v [ " id " ] . to_s . downcase } . first
2011-04-30 04:33:52 +00:00
next unless vstruct
2010-05-02 19:16:52 +00:00
data = { }
data [ :workspace ] = wspace
2011-04-22 20:16:55 +00:00
data [ :host ] = hobj || addr
2010-05-02 22:23:43 +00:00
data [ :proto ] = v [ " protocol " ] . downcase if v [ " protocol " ]
data [ :port ] = v [ " port " ] . to_i if v [ " port " ]
2010-05-02 19:16:52 +00:00
data [ :name ] = " NEXPOSE- " + v [ " id " ]
2011-03-29 22:22:58 +00:00
data [ :info ] = vstruct . title
data [ :refs ] = vstruct . refs
2010-05-02 19:16:52 +00:00
report_vuln ( data )
}
end
2010-04-07 20:51:05 +00:00
2010-10-07 02:33:57 +00:00
#
# Retina XML
#
# Process a Retina XML file
def import_retina_xml_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2010-10-07 02:33:57 +00:00
import_retina_xml ( args . merge ( :data = > data ) )
end
# Process Retina XML
def import_retina_xml ( args = { } , & block )
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2011-05-31 16:30:07 +00:00
msg = " Warning: The Retina XML format does not associate vulnerabilities with the \n "
msg << " specific service on which they were found. \n "
msg << " This makes it impossible to correlate exploits to discovered vulnerabilities \n "
msg << " in a reliable fashion. "
2011-06-15 00:51:51 +00:00
2011-05-31 16:30:07 +00:00
yield ( :warning , msg ) if block
2011-06-15 00:51:51 +00:00
2010-10-07 02:33:57 +00:00
parser = Rex :: Parser :: RetinaXMLStreamParser . new
parser . on_found_host = Proc . new do | host |
2011-04-22 20:16:55 +00:00
hobj = nil
2010-10-07 02:33:57 +00:00
data = { :workspace = > wspace }
addr = host [ 'address' ]
next if not addr
2011-06-15 00:51:51 +00:00
2010-10-07 02:33:57 +00:00
next if bl . include? addr
data [ :host ] = addr
2011-06-15 00:51:51 +00:00
2010-10-07 02:33:57 +00:00
if host [ 'mac' ]
data [ :mac ] = host [ 'mac' ]
end
2011-06-15 00:51:51 +00:00
2010-10-07 02:33:57 +00:00
data [ :state ] = Msf :: HostState :: Alive
if host [ 'hostname' ]
data [ :name ] = host [ 'hostname' ]
end
if host [ 'netbios' ]
data [ :name ] = host [ 'netbios' ]
end
2011-06-15 00:51:51 +00:00
2010-10-07 02:33:57 +00:00
yield ( :address , data [ :host ] ) if block
2011-06-15 00:51:51 +00:00
2010-10-07 02:33:57 +00:00
# Import Host
2011-04-22 20:16:55 +00:00
hobj = report_host ( data )
report_import_note ( wspace , hobj )
2011-06-15 00:51:51 +00:00
2010-10-07 02:33:57 +00:00
# Import OS fingerprint
if host [ " os " ]
note = {
:workspace = > wspace ,
:host = > addr ,
:type = > 'host.os.retina_fingerprint' ,
:data = > {
:os = > host [ " os " ]
}
}
report_note ( note )
end
2011-06-15 00:51:51 +00:00
2010-10-07 02:33:57 +00:00
# Import vulnerabilities
host [ 'vulns' ] . each do | vuln |
refs = vuln [ 'refs' ] . map { | v | v . join ( " - " ) }
refs << " RETINA- #{ vuln [ 'rthid' ] } " if vuln [ 'rthid' ]
vuln_info = {
:workspace = > wspace ,
:host = > addr ,
:name = > vuln [ 'name' ] ,
:info = > vuln [ 'description' ] ,
:refs = > refs
}
2011-06-15 00:51:51 +00:00
2010-10-07 02:33:57 +00:00
report_vuln ( vuln_info )
end
end
REXML :: Document . parse_stream ( data , parser )
end
2010-10-07 06:24:26 +00:00
#
# NetSparker XML
#
# Process a NetSparker XML file
def import_netsparker_xml_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2010-10-07 06:24:26 +00:00
import_netsparker_xml ( args . merge ( :data = > data ) )
end
2010-10-11 02:57:07 +00:00
# Process NetSparker XML
2010-10-07 06:24:26 +00:00
def import_netsparker_xml ( args = { } , & block )
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
addr = nil
parser = Rex :: Parser :: NetSparkerXMLStreamParser . new
parser . on_found_vuln = Proc . new do | vuln |
data = { :workspace = > wspace }
# Parse the URL
url = vuln [ 'url' ]
return if not url
2011-06-15 00:51:51 +00:00
# Crack the URL into a URI
2010-10-07 06:24:26 +00:00
uri = URI ( url ) rescue nil
return if not uri
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
# Resolve the host and cache the IP
if not addr
baddr = Rex :: Socket . addr_aton ( uri . host ) rescue nil
if baddr
addr = Rex :: Socket . addr_ntoa ( baddr )
2011-04-22 20:16:55 +00:00
yield ( :address , addr ) if block
2010-10-07 06:24:26 +00:00
end
end
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
# Bail early if we have no IP address
if not addr
raise Interrupt , " Not a valid IP address "
end
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
if bl . include? ( addr )
raise Interrupt , " IP address is on the blacklist "
end
data [ :host ] = addr
data [ :vhost ] = uri . host
data [ :port ] = uri . port
data [ :ssl ] = ( uri . scheme == " ssl " )
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
body = nil
# First report a web page
if vuln [ 'response' ]
headers = { }
code = 200
head , body = vuln [ 'response' ] . to_s . split ( / \ r? \ n \ r? \ n / , 2 )
if body
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
if head =~ / ^HTTP \ d+ \ . \ d+ \ s+( \ d+) \ s* /
code = $1 . to_i
end
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
headers = { }
head . split ( / \ r? \ n / ) . each do | line |
hname , hval = line . strip . split ( / \ s*: \ s* / , 2 )
next if hval . to_s . strip . empty?
headers [ hname . downcase ] || = [ ]
headers [ hname . downcase ] << hval
end
2011-06-15 00:51:51 +00:00
info = {
2010-10-07 06:24:26 +00:00
:path = > uri . path ,
:query = > uri . query ,
:code = > code ,
:body = > body ,
:headers = > headers
}
info . merge! ( data )
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
if headers [ 'content-type' ]
info [ :ctype ] = headers [ 'content-type' ] [ 0 ]
end
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
if headers [ 'set-cookie' ]
info [ :cookie ] = headers [ 'set-cookie' ] . join ( " \n " )
end
if headers [ 'authorization' ]
info [ :auth ] = headers [ 'authorization' ] . join ( " \n " )
end
if headers [ 'location' ]
info [ :location ] = headers [ 'location' ] [ 0 ]
end
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
if headers [ 'last-modified' ]
info [ :mtime ] = headers [ 'last-modified' ] [ 0 ]
end
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
# Report the web page to the database
report_web_page ( info )
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
yield ( :web_page , url ) if block
end
end # End web_page reporting
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
details = netsparker_vulnerability_map ( vuln )
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
method = netsparker_method_map ( vuln )
pname = netsparker_pname_map ( vuln )
params = netsparker_params_map ( vuln )
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
proof = ''
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
if vuln [ 'info' ] and vuln [ 'info' ] . length > 0
proof << vuln [ 'info' ] . map { | x | " #{ x [ 0 ] } : #{ x [ 1 ] } \n " } . join + " \n "
end
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
if proof . empty?
if body
proof << body + " \n "
else
proof << vuln [ 'response' ] . to_s + " \n "
end
end
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
if params . empty? and pname
params = [ [ pname , vuln [ 'vparam_name' ] . to_s ] ]
end
2010-10-07 06:24:26 +00:00
info = {
:path = > uri . path ,
:query = > uri . query ,
:method = > method ,
:params = > params ,
:pname = > pname . to_s ,
2010-10-11 02:57:07 +00:00
:proof = > proof ,
:risk = > details [ :risk ] ,
:name = > details [ :name ] ,
:blame = > details [ :blame ] ,
:category = > details [ :category ] ,
:description = > details [ :description ] ,
2011-06-15 00:51:51 +00:00
:confidence = > details [ :confidence ] ,
2010-10-07 06:24:26 +00:00
}
info . merge! ( data )
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
next if vuln [ 'type' ] . to_s . empty?
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
report_web_vuln ( info )
2011-06-15 00:51:51 +00:00
yield ( :web_vuln , url ) if block
2010-10-07 06:24:26 +00:00
end
# We throw interrupts in our parser when the job is hopeless
begin
REXML :: Document . parse_stream ( data , parser )
2010-10-09 22:09:35 +00:00
rescue :: Interrupt = > e
2010-10-07 06:24:26 +00:00
wlog ( " The netsparker_xml_import() job was interrupted: #{ e } " )
end
end
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
def netsparker_method_map ( vuln )
case vuln [ 'vparam_type' ]
when " FullQueryString "
" GET "
when " Querystring "
" GET "
when " Post "
" POST "
when " RawUrlInjection "
" GET "
else
" GET "
end
end
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
def netsparker_pname_map ( vuln )
case vuln [ 'vparam_name' ]
2010-10-11 02:57:07 +00:00
when " URI-BASED " , " Query Based "
" PATH "
2010-10-07 06:24:26 +00:00
else
vuln [ 'vparam_name' ]
end
end
2011-06-15 00:51:51 +00:00
2010-10-07 06:24:26 +00:00
def netsparker_params_map ( vuln )
[ ]
end
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
def netsparker_vulnerability_map ( vuln )
res = {
:risk = > 1 ,
:name = > 'Information Disclosure' ,
:blame = > 'System Administrator' ,
:category = > 'info' ,
:description = > " This is an information leak " ,
:confidence = > 100
}
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
# Risk is a value from 1-5 indicating the severity of the issue
# Examples: 1, 4, 5
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
# Name is a descriptive name for this vulnerability.
# Examples: XSS, ReflectiveXSS, PersistentXSS
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
# Blame indicates who is at fault for the vulnerability
# Examples: App Developer, Server Developer, System Administrator
# Category indicates the general class of vulnerability
# Examples: info, xss, sql, rfi, lfi, cmd
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
# Description is a textual summary of the vulnerability
# Examples: "A reflective cross-site scripting attack"
# "The web server leaks the internal IP address"
# "The cookie is not set to HTTP-only"
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
#
2011-06-15 00:51:51 +00:00
# Confidence is a value from 1 to 100 indicating how confident the
2010-10-11 02:57:07 +00:00
# software is that the results are valid.
# Examples: 100, 90, 75, 15, 10, 0
case vuln [ 'type' ] . to_s
2010-10-07 06:24:26 +00:00
when " ApacheDirectoryListing "
2010-10-11 02:57:07 +00:00
res = {
:risk = > 1 ,
:name = > 'Directory Listing' ,
:blame = > 'System Administrator' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
2010-10-07 06:24:26 +00:00
when " ApacheMultiViewsEnabled "
2010-10-11 02:57:07 +00:00
res = {
:risk = > 1 ,
:name = > 'Apache MultiViews Enabled' ,
:blame = > 'System Administrator' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
2010-10-07 06:24:26 +00:00
when " ApacheVersion "
2010-10-11 02:57:07 +00:00
res = {
:risk = > 1 ,
:name = > 'Web Server Version' ,
:blame = > 'System Administrator' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
when " PHPVersion "
res = {
:risk = > 1 ,
:name = > 'PHP Module Version' ,
:blame = > 'System Administrator' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
2010-10-07 06:24:26 +00:00
when " AutoCompleteEnabled "
2010-10-11 02:57:07 +00:00
res = {
:risk = > 1 ,
:name = > 'Form AutoComplete Enabled' ,
:blame = > 'App Developer' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
2010-10-07 06:24:26 +00:00
when " CookieNotMarkedAsHttpOnly "
2010-10-11 02:57:07 +00:00
res = {
:risk = > 1 ,
:name = > 'Cookie Not HttpOnly' ,
:blame = > 'App Developer' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
2010-10-07 06:24:26 +00:00
when " EmailDisclosure "
2010-10-11 02:57:07 +00:00
res = {
:risk = > 1 ,
:name = > 'Email Address Disclosure' ,
:blame = > 'App Developer' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
2010-10-07 06:24:26 +00:00
when " ForbiddenResource "
2010-10-11 02:57:07 +00:00
res = {
:risk = > 1 ,
:name = > 'Forbidden Resource' ,
:blame = > 'App Developer' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
when " FileUploadFound "
res = {
:risk = > 1 ,
:name = > 'File Upload Form' ,
:blame = > 'App Developer' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
2010-10-07 06:24:26 +00:00
when " PasswordOverHTTP "
2010-10-11 02:57:07 +00:00
res = {
:risk = > 2 ,
:name = > 'Password Over HTTP' ,
:blame = > 'App Developer' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
when " MySQL5Identified "
res = {
:risk = > 1 ,
:name = > 'MySQL 5 Identified' ,
:blame = > 'App Developer' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
2010-10-07 06:24:26 +00:00
when " PossibleInternalWindowsPathLeakage "
2010-10-11 02:57:07 +00:00
res = {
:risk = > 1 ,
:name = > 'Path Leakage - Windows' ,
:blame = > 'App Developer' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
}
when " PossibleInternalUnixPathLeakage "
res = {
:risk = > 1 ,
:name = > 'Path Leakage - Unix' ,
:blame = > 'App Developer' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > 100
2011-06-15 00:51:51 +00:00
}
when " PossibleXSS " , " LowPossibilityPermanentXSS " , " XSS " , " PermanentXSS "
2010-10-11 02:57:07 +00:00
conf = 100
conf = 25 if vuln [ 'type' ] . to_s == " LowPossibilityPermanentXSS "
conf = 50 if vuln [ 'type' ] . to_s == " PossibleXSS "
res = {
:risk = > 3 ,
:name = > 'Cross-Site Scripting' ,
:blame = > 'App Developer' ,
:category = > 'xss' ,
:description = > " " ,
:confidence = > conf
2011-06-15 00:51:51 +00:00
}
2010-10-11 02:57:07 +00:00
when " ConfirmedBlindSQLInjection " , " ConfirmedSQLInjection " , " HighlyPossibleSqlInjection " , " DatabaseErrorMessages "
conf = 100
conf = 90 if vuln [ 'type' ] . to_s == " HighlyPossibleSqlInjection "
conf = 25 if vuln [ 'type' ] . to_s == " DatabaseErrorMessages "
res = {
:risk = > 5 ,
:name = > 'SQL Injection' ,
:blame = > 'App Developer' ,
:category = > 'sql' ,
:description = > " " ,
:confidence = > conf
2011-06-15 00:51:51 +00:00
}
2010-10-07 06:24:26 +00:00
else
2010-10-11 02:57:07 +00:00
conf = 100
res = {
:risk = > 1 ,
:name = > vuln [ 'type' ] . to_s ,
:blame = > 'App Developer' ,
:category = > 'info' ,
:description = > " " ,
:confidence = > conf
2011-06-15 00:51:51 +00:00
}
2010-10-07 06:24:26 +00:00
end
2011-06-15 00:51:51 +00:00
2010-10-11 02:57:07 +00:00
res
2010-10-07 06:24:26 +00:00
end
2010-10-11 02:57:07 +00:00
2010-01-07 19:06:29 +00:00
#
# Import Nmap's -oX xml output
#
2010-06-04 14:57:58 +00:00
def import_nmap_xml_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2010-06-04 14:57:58 +00:00
import_nmap_xml ( args . merge ( :data = > data ) )
2010-01-07 19:06:29 +00:00
end
2010-02-18 06:40:38 +00:00
2011-05-27 17:30:11 +00:00
def import_nexpose_raw_noko_stream ( args , & block )
if block
doc = Rex :: Parser :: NexposeRawDocument . new ( args , framework . db ) { | type , data | yield type , data }
else
doc = Rex :: Parser :: NexposeRawDocument . new ( args , self )
end
parser = :: Nokogiri :: XML :: SAX :: Parser . new ( doc )
parser . parse ( args [ :data ] )
end
2011-05-26 20:29:47 +00:00
def import_nexpose_noko_stream ( args , & block )
if block
doc = Rex :: Parser :: NexposeSimpleDocument . new ( args , framework . db ) { | type , data | yield type , data }
else
doc = Rex :: Parser :: NexposeSimpleDocument . new ( args , self )
end
parser = :: Nokogiri :: XML :: SAX :: Parser . new ( doc )
parser . parse ( args [ :data ] )
end
2011-05-24 19:40:50 +00:00
def import_nmap_noko_stream ( args , & block )
if block
doc = Rex :: Parser :: NmapDocument . new ( args , framework . db ) { | type , data | yield type , data }
else
doc = Rex :: Parser :: NmapDocument . new ( args , self )
end
parser = :: Nokogiri :: XML :: SAX :: Parser . new ( doc )
parser . parse ( args [ :data ] )
end
# If you have Nokogiri installed, you'll be shunted over to
# that. Otherwise, you'll hit the old NmapXMLStreamParser.
2010-06-08 19:16:20 +00:00
def import_nmap_xml ( args = { } , & block )
2010-06-04 14:57:58 +00:00
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2011-05-24 19:40:50 +00:00
if Rex :: Parser . nokogiri_loaded
noko_args = args . dup
noko_args [ :blacklist ] = bl
noko_args [ :wspace ] = wspace
if block
yield ( :parser , " Nokogiri v #{ :: Nokogiri :: VERSION } " )
import_nmap_noko_stream ( noko_args ) { | type , data | yield type , data }
else
import_nmap_noko_stream ( noko_args )
end
return true
end
# XXX: Legacy nmap xml parser starts here.
2010-06-16 18:22:42 +00:00
fix_services = args [ :fix_services ]
2011-05-24 19:40:50 +00:00
data = args [ :data ]
2010-06-04 14:57:58 +00:00
2010-01-07 19:06:29 +00:00
# Use a stream parser instead of a tree parser so we can deal with
# huge results files without running out of memory.
parser = Rex :: Parser :: NmapXMLStreamParser . new
2011-05-24 19:40:50 +00:00
yield ( :parser , parser . class . name ) if block
2010-01-07 19:06:29 +00:00
# Whenever the parser pulls a host out of the nmap results, store
# it, along with any associated services, in the database.
parser . on_found_host = Proc . new { | h |
2011-04-22 20:16:55 +00:00
hobj = nil
2010-02-26 13:40:32 +00:00
data = { :workspace = > wspace }
2010-01-07 19:06:29 +00:00
if ( h [ " addrs " ] . has_key? ( " ipv4 " ) )
2010-01-10 17:53:12 +00:00
addr = h [ " addrs " ] [ " ipv4 " ]
2010-01-07 19:06:29 +00:00
elsif ( h [ " addrs " ] . has_key? ( " ipv6 " ) )
2010-01-10 17:53:12 +00:00
addr = h [ " addrs " ] [ " ipv6 " ]
2010-01-07 19:06:29 +00:00
else
# Can't report it if it doesn't have an IP
2010-05-03 01:03:49 +00:00
raise RuntimeError , " At least one IPv4 or IPv6 address is required "
2010-01-07 19:06:29 +00:00
end
2010-06-08 19:16:20 +00:00
next if bl . include? addr
2010-01-10 17:53:12 +00:00
data [ :host ] = addr
2010-01-07 19:06:29 +00:00
if ( h [ " addrs " ] . has_key? ( " mac " ) )
2010-02-14 18:32:37 +00:00
data [ :mac ] = h [ " addrs " ] [ " mac " ]
2010-01-07 19:06:29 +00:00
end
2010-02-05 15:51:46 +00:00
data [ :state ] = ( h [ " status " ] == " up " ) ? Msf :: HostState :: Alive : Msf :: HostState :: Dead
2010-02-14 18:32:37 +00:00
if ( h [ " reverse_dns " ] )
data [ :name ] = h [ " reverse_dns " ]
end
2010-05-07 17:35:49 +00:00
# Only report alive hosts with ports to speak of.
2010-02-15 22:59:55 +00:00
if ( data [ :state ] != Msf :: HostState :: Dead )
2010-05-07 17:35:49 +00:00
if h [ " ports " ] . size > 0
2010-06-22 22:37:14 +00:00
if fix_services
port_states = h [ " ports " ] . map { | p | p [ " state " ] } . reject { | p | p == " filtered " }
next if port_states . compact . empty?
end
2010-06-08 19:16:20 +00:00
yield ( :address , data [ :host ] ) if block
2011-04-22 20:16:55 +00:00
hobj = report_host ( data )
report_import_note ( wspace , hobj )
2010-05-07 17:35:49 +00:00
end
2010-02-15 22:59:55 +00:00
end
2010-01-07 19:06:29 +00:00
2010-03-22 00:36:59 +00:00
if ( h [ " os_vendor " ] )
2010-02-14 19:07:15 +00:00
note = {
2010-02-18 06:40:38 +00:00
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj || addr ,
2010-02-14 19:07:15 +00:00
:type = > 'host.os.nmap_fingerprint' ,
:data = > {
:os_vendor = > h [ " os_vendor " ] ,
:os_family = > h [ " os_family " ] ,
:os_version = > h [ " os_version " ] ,
:os_accuracy = > h [ " os_accuracy " ]
}
}
if ( h [ " os_match " ] )
note [ :data ] [ :os_match ] = h [ 'os_match' ]
end
report_note ( note )
end
if ( h [ " last_boot " ] )
report_note (
2010-02-18 06:40:38 +00:00
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj || addr ,
2010-02-14 19:07:15 +00:00
:type = > 'host.last_boot' ,
:data = > {
:time = > h [ " last_boot " ]
}
)
end
2011-01-28 23:32:14 +00:00
if ( h [ " trace " ] )
hops = [ ]
h [ " trace " ] [ " hops " ] . each do | hop |
2011-06-15 00:51:51 +00:00
hops << {
2011-01-28 23:32:14 +00:00
" ttl " = > hop [ " ttl " ] . to_i ,
" address " = > hop [ " ipaddr " ] . to_s ,
" rtt " = > hop [ " rtt " ] . to_f ,
" name " = > hop [ " host " ] . to_s
}
end
report_note (
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj || addr ,
2011-01-28 23:32:14 +00:00
:type = > 'host.nmap.traceroute' ,
:data = > {
'port' = > h [ " trace " ] [ " port " ] . to_i ,
'proto' = > h [ " trace " ] [ " proto " ] . to_s ,
'hops' = > hops
}
)
end
2011-06-15 00:51:51 +00:00
2010-05-05 19:45:48 +00:00
2010-01-07 19:06:29 +00:00
# Put all the ports, regardless of state, into the db.
h [ " ports " ] . each { | p |
2010-06-18 03:03:11 +00:00
# Localhost port results are pretty unreliable -- if it's
# unknown, it's no good (possibly Windows-only)
if (
p [ " state " ] == " unknown " &&
h [ " status_reason " ] == " localhost-response "
)
next
end
2010-01-07 19:06:29 +00:00
extra = " "
extra << p [ " product " ] + " " if p [ " product " ]
2010-01-14 15:26:20 +00:00
extra << p [ " version " ] + " " if p [ " version " ]
2010-01-07 19:06:29 +00:00
extra << p [ " extrainfo " ] + " " if p [ " extrainfo " ]
data = { }
2010-02-18 06:40:38 +00:00
data [ :workspace ] = wspace
2010-06-16 18:22:42 +00:00
if fix_services
2010-10-06 15:55:28 +00:00
data [ :proto ] = nmap_msf_service_map ( p [ " protocol " ] )
2010-06-22 22:37:14 +00:00
else
data [ :proto ] = p [ " protocol " ] . downcase
2010-06-16 18:22:42 +00:00
end
2010-01-07 19:06:29 +00:00
data [ :port ] = p [ " portid " ] . to_i
data [ :state ] = p [ " state " ]
2011-04-22 20:16:55 +00:00
data [ :host ] = hobj || addr
2010-01-07 19:06:29 +00:00
data [ :info ] = extra if not extra . empty?
if p [ " name " ] != " unknown "
data [ :name ] = p [ " name " ]
end
2010-01-10 17:53:12 +00:00
report_service ( data )
2010-01-07 19:06:29 +00:00
}
2011-05-27 18:51:09 +00:00
#Parse the scripts output
if h [ " scripts " ]
2011-06-15 00:51:51 +00:00
h [ " scripts " ] . each do | key , val |
2011-05-27 18:51:09 +00:00
if key == " smb-check-vulns "
if val =~ / MS08-067: VULNERABLE /
vuln_info = {
:workspace = > wspace ,
:host = > hobj || addr ,
:port = > 445 ,
:proto = > 'tcp' ,
2011-06-15 00:51:51 +00:00
:name = > 'MS08-067' ,
2011-05-27 18:51:09 +00:00
:info = > 'Microsoft Windows Server Service Crafted RPC Request Handling Unspecified Remote Code Execution' ,
:refs = > [ 'CVE-2008-4250' ,
'BID-31874' ,
'OSVDB-49243' ,
'CWE-94' ,
'MSFT-MS08-067' ,
'MSF-Microsoft Server Service Relative Path Stack Corruption' ,
'NSS-34476' ]
}
report_vuln ( vuln_info )
end
2011-05-27 23:34:12 +00:00
if val =~ / MS06-025: VULNERABLE /
vuln_info = {
:workspace = > wspace ,
:host = > hobj || addr ,
:port = > 445 ,
:proto = > 'tcp' ,
2011-06-15 00:51:51 +00:00
:name = > 'MS06-025' ,
2011-05-27 23:34:12 +00:00
:info = > 'Vulnerability in Routing and Remote Access Could Allow Remote Code Execution' ,
:refs = > [ 'CVE-2006-2370' ,
'CVE-2006-2371' ,
'BID-18325' ,
'BID-18358' ,
'BID-18424' ,
'OSVDB-26436' ,
'OSVDB-26437' ,
'MSFT-MS06-025' ,
'MSF-Microsoft RRAS Service RASMAN Registry Overflow' ,
'NSS-21689' ]
}
report_vuln ( vuln_info )
end
# This one has NOT been Tested , remove this comment if confirmed working
if val =~ / MS07-029: VULNERABLE /
vuln_info = {
:workspace = > wspace ,
:host = > hobj || addr ,
:port = > 445 ,
:proto = > 'tcp' ,
2011-06-15 00:51:51 +00:00
:name = > 'MS07-029' ,
2011-05-27 23:34:12 +00:00
:info = > 'Vulnerability in Windows DNS RPC Interface Could Allow Remote Code Execution' ,
# Add more refs based on nessus/nexpose .. results
:refs = > [ 'CVE-2007-1748' ,
'OSVDB-34100' ,
'MSF-Microsoft DNS RPC Service extractQuotedChar()' ,
'NSS-25168' ]
}
report_vuln ( vuln_info )
end
2011-05-27 18:51:09 +00:00
end
end
end
2010-01-07 19:06:29 +00:00
}
2011-05-24 19:40:50 +00:00
# XXX: Legacy nmap xml parser ends here.
2011-06-15 00:51:51 +00:00
2010-01-07 19:06:29 +00:00
REXML :: Document . parse_stream ( data , parser )
end
2011-05-31 19:27:22 +00:00
# This is starting to be more than just nmap -> msf, other
# things are creeping in here. Consider renaming the method
# and intentionally making it more general.
2010-10-06 15:55:28 +00:00
def nmap_msf_service_map ( proto )
2010-06-16 18:22:42 +00:00
return proto unless proto . kind_of? String
case proto . downcase
when " msrpc " , " nfs-or-iis " ; " dcerpc "
when " netbios-ns " ; " netbios "
when " netbios-ssn " , " microsoft-ds " ; " smb "
when " ms-sql-s " ; " mssql "
when " ms-sql-m " ; " mssql-m "
when " postgresql " ; " postgres "
when " http-proxy " ; " http "
when " iiimsf " ; " db2 "
2011-05-31 15:24:42 +00:00
when " oracle-tns " ; " oracle "
2011-05-31 19:27:22 +00:00
when / ^dns-(udp|tcp)$ / ; " dns "
when / ^dce[ \ s+]rpc$ / ; " dcerpc "
2010-06-16 18:22:42 +00:00
else
2010-06-22 22:37:14 +00:00
proto . downcase
2010-06-16 18:22:42 +00:00
end
end
2010-05-05 19:45:48 +00:00
def report_import_note ( wspace , addr )
2010-05-07 17:35:49 +00:00
if @import_filedata . kind_of? ( Hash ) && @import_filedata [ :filename ] && @import_filedata [ :filename ] !~ / msfe-nmap[0-9]{8} /
2010-05-05 19:45:48 +00:00
report_note (
:workspace = > wspace ,
:host = > addr ,
:type = > 'host.imported' ,
:data = > @import_filedata . merge ( :time = > Time . now . utc )
)
end
end
2010-01-07 19:06:29 +00:00
#
# Import Nessus NBE files
#
2010-06-04 14:57:58 +00:00
def import_nessus_nbe_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2010-06-04 14:57:58 +00:00
import_nessus_nbe ( args . merge ( :data = > data ) )
2010-01-07 19:06:29 +00:00
end
2010-05-20 14:08:29 +00:00
2011-05-31 16:22:42 +00:00
# There is no place the NBE actually stores the plugin name used to
# scan. You get "Security Note" or "Security Warning," and that's it.
2010-06-08 19:16:20 +00:00
def import_nessus_nbe ( args = { } , & block )
2010-06-04 14:57:58 +00:00
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
nbe_copy = data . dup
2010-07-06 16:10:05 +00:00
# First pass, just to build the address map.
2010-05-12 23:00:21 +00:00
addr_map = { }
2011-06-15 00:51:51 +00:00
2011-04-24 00:54:43 +00:00
# Cache host objects before passing into handle_nessus()
2011-04-24 02:32:43 +00:00
hobj_map = { }
2010-05-12 23:00:21 +00:00
nbe_copy . each_line do | line |
2010-01-07 19:06:29 +00:00
r = line . split ( '|' )
next if r [ 0 ] != 'results'
2010-05-12 23:00:21 +00:00
next if r [ 4 ] != " 12053 "
data = r [ 6 ]
addr , hname = data . match ( / ([0-9 \ x2e]+) resolves as (.+) \ x2e \\ n / ) [ 1 , 2 ]
addr_map [ hname ] = addr
end
2010-06-04 14:57:58 +00:00
data . each_line do | line |
2010-05-12 23:00:21 +00:00
r = line . split ( '|' )
next if r [ 0 ] != 'results'
hname = r [ 2 ]
2010-05-20 14:08:29 +00:00
if addr_map [ hname ]
addr = addr_map [ hname ]
else
addr = hname # Must be unresolved, probably an IP address.
end
2010-01-07 19:06:29 +00:00
port = r [ 3 ]
nasl = r [ 4 ]
type = r [ 5 ]
data = r [ 6 ]
2010-05-20 14:08:29 +00:00
# If there's no resolution, or if it's malformed, skip it.
2010-05-12 22:23:40 +00:00
next unless ipv4_validator ( addr )
2010-06-04 14:57:58 +00:00
if bl . include? addr
next
else
2010-06-08 19:16:20 +00:00
yield ( :address , addr ) if block
2010-06-04 14:57:58 +00:00
end
2011-06-15 00:51:51 +00:00
2011-04-24 00:54:43 +00:00
hobj_map [ addr ] || = report_host ( :host = > addr , :workspace = > wspace )
2010-06-04 14:57:58 +00:00
2010-01-07 19:06:29 +00:00
# Match the NBE types with the XML severity ratings
case type
# log messages don't actually have any data, they are just
# complaints about not being able to perform this or that test
# because such-and-such was missing
when " Log Message " ; next
when " Security Hole " ; severity = 3
when " Security Warning " ; severity = 2
when " Security Note " ; severity = 1
# a severity 0 means there's no extra data, it's just an open port
else ; severity = 0
end
2010-05-12 23:00:21 +00:00
if nasl == " 11936 "
os = data . match ( / The remote host is running (.*) \\ n / ) [ 1 ]
report_note (
:workspace = > wspace ,
2011-04-24 00:54:43 +00:00
:host = > hobj_map [ addr ] ,
2010-05-12 23:00:21 +00:00
:type = > 'host.os.nessus_fingerprint' ,
:data = > {
:os = > os . to_s . strip
}
)
end
2011-06-15 00:51:51 +00:00
2011-05-02 05:20:07 +00:00
next if nasl . to_s . strip . empty?
2011-05-31 16:22:42 +00:00
plugin_name = nil # NBE doesn't ever populate this
handle_nessus ( wspace , hobj_map [ addr ] , port , nasl , plugin_name , severity , data )
2010-01-07 19:06:29 +00:00
end
end
#
# Of course they had to change the nessus format.
#
2010-06-08 19:16:20 +00:00
def import_openvas_xml ( args = { } , & block )
2010-06-04 14:57:58 +00:00
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-01-14 15:26:20 +00:00
raise DBImportError . new ( " No OpenVAS XML support. Please submit a patch to msfdev[at]metasploit.com " )
2010-01-07 19:06:29 +00:00
end
2010-11-04 21:44:16 +00:00
#
# Import IP360 XML v3 output
#
def import_ip360_xml_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
import_ip360_xml_v3 ( args . merge ( :data = > data ) )
end
2010-01-07 19:06:29 +00:00
#
2010-01-14 12:57:26 +00:00
# Import Nessus XML v1 and v2 output
2010-01-07 19:06:29 +00:00
#
# Old versions of openvas exported this as well
#
2010-06-04 14:57:58 +00:00
def import_nessus_xml_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2010-01-14 12:57:26 +00:00
if data . index ( " NessusClientData_v2 " )
2010-06-04 14:57:58 +00:00
import_nessus_xml_v2 ( args . merge ( :data = > data ) )
2010-01-14 12:57:26 +00:00
else
2010-06-04 14:57:58 +00:00
import_nessus_xml ( args . merge ( :data = > data ) )
2010-01-14 12:57:26 +00:00
end
2010-01-07 19:06:29 +00:00
end
2010-01-14 12:57:26 +00:00
2010-06-08 19:16:20 +00:00
def import_nessus_xml ( args = { } , & block )
2010-06-04 14:57:58 +00:00
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2010-01-07 19:06:29 +00:00
2010-04-26 20:25:42 +00:00
doc = rexmlify ( data )
2010-01-07 19:06:29 +00:00
doc . elements . each ( '/NessusClientData/Report/ReportHost' ) do | host |
2011-04-22 20:16:55 +00:00
hobj = nil
2010-05-12 22:23:40 +00:00
addr = nil
hname = nil
os = nil
2010-07-06 16:10:05 +00:00
# If the name is resolved, the Nessus plugin for DNS
2010-05-12 22:23:40 +00:00
# resolution should be there. If not, fall back to the
# HostName
host . elements . each ( 'ReportItem' ) do | item |
next unless item . elements [ 'pluginID' ] . text == " 12053 "
addr = item . elements [ 'data' ] . text . match ( / ([0-9 \ x2e]+) resolves as / ) [ 1 ]
hname = host . elements [ 'HostName' ] . text
end
addr || = host . elements [ 'HostName' ] . text
2010-05-12 18:10:37 +00:00
next unless ipv4_validator ( addr ) # Skip resolved names and SCAN-ERROR.
2010-06-04 14:57:58 +00:00
if bl . include? addr
next
else
2010-06-08 19:16:20 +00:00
yield ( :address , addr ) if block
2010-06-04 14:57:58 +00:00
end
2010-05-12 18:10:37 +00:00
2010-05-12 22:23:40 +00:00
hinfo = {
:workspace = > wspace ,
:host = > addr
}
# Record the hostname
hinfo . merge! ( :name = > hname . to_s . strip ) if hname
2011-04-22 20:16:55 +00:00
hobj = report_host ( hinfo )
report_import_note ( wspace , hobj )
2010-07-06 16:10:05 +00:00
2010-05-12 22:23:40 +00:00
# Record the OS
os || = host . elements [ " os_name " ]
if os
report_note (
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj ,
2010-05-12 22:23:40 +00:00
:type = > 'host.os.nessus_fingerprint' ,
:data = > {
:os = > os . text . to_s . strip
}
)
end
2010-01-07 19:06:29 +00:00
host . elements . each ( 'ReportItem' ) do | item |
nasl = item . elements [ 'pluginID' ] . text
2011-05-31 16:22:42 +00:00
plugin_name = item . elements [ 'pluginName' ] . text
2010-01-07 19:06:29 +00:00
port = item . elements [ 'port' ] . text
data = item . elements [ 'data' ] . text
severity = item . elements [ 'severity' ] . text
2011-05-31 16:22:42 +00:00
handle_nessus ( wspace , hobj , port , nasl , plugin_name , severity , data )
2010-01-07 19:06:29 +00:00
end
end
end
2010-06-08 19:16:20 +00:00
def import_nessus_xml_v2 ( args = { } , & block )
2010-06-04 14:57:58 +00:00
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2011-06-15 00:51:51 +00:00
2010-10-14 18:54:35 +00:00
#@host = {
#'hname' => nil,
#'addr' => nil,
#'mac' => nil,
#'os' => nil,
#'ports' => [ 'port' => { 'port' => nil,
# 'svc_name' => nil,
# 'proto' => nil,
# 'severity' => nil,
# 'nasl' => nil,
# 'description' => nil,
# 'cve' => [],
# 'bid' => [],
# 'xref' => []
# }
# ]
#}
parser = Rex :: Parser :: NessusXMLStreamParser . new
parser . on_found_host = Proc . new { | host |
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
hobj = nil
2010-10-14 18:54:35 +00:00
addr = host [ 'addr' ] || host [ 'hname' ]
2011-06-15 00:51:51 +00:00
2010-05-12 18:10:37 +00:00
next unless ipv4_validator ( addr ) # Catches SCAN-ERROR, among others.
2011-06-15 00:51:51 +00:00
2010-06-04 14:57:58 +00:00
if bl . include? addr
next
else
2010-06-08 19:16:20 +00:00
yield ( :address , addr ) if block
2010-06-04 14:57:58 +00:00
end
2011-04-01 01:01:11 +00:00
os = host [ 'os' ]
hname = host [ 'hname' ]
mac = host [ 'mac' ]
host_info = {
:workspace = > wspace ,
:host = > addr ,
}
host_info [ :name ] = hname . to_s . strip if hname
2011-07-19 21:27:02 +00:00
# Short mac, protect against Nessus's habit of saving multiple macs
# We can't use them anyway, so take just the first.
host_info [ :mac ] = mac . to_s . strip . upcase . split ( / \ s+ / ) . first if mac
2011-04-01 01:01:11 +00:00
2011-04-22 20:16:55 +00:00
hobj = report_host ( host_info )
report_import_note ( wspace , hobj )
2011-06-15 00:51:51 +00:00
2010-10-14 18:54:35 +00:00
os = host [ 'os' ]
yield ( :os , os ) if block
2010-05-03 01:49:00 +00:00
if os
report_note (
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj ,
2010-05-03 01:49:00 +00:00
:type = > 'host.os.nessus_fingerprint' ,
:data = > {
2010-10-14 18:54:35 +00:00
:os = > os . to_s . strip
2010-05-03 01:49:00 +00:00
}
)
end
2011-06-15 00:51:51 +00:00
2010-10-14 18:54:35 +00:00
host [ 'ports' ] . each do | item |
next if item [ 'port' ] == 0
msf = nil
nasl = item [ 'nasl' ] . to_s
2011-05-17 22:18:43 +00:00
nasl_name = item [ 'nasl_name' ] . to_s
2010-10-14 18:54:35 +00:00
port = item [ 'port' ] . to_s
proto = item [ 'proto' ] || " tcp "
2011-04-28 18:52:35 +00:00
sname = item [ 'svc_name' ]
2010-10-14 18:54:35 +00:00
severity = item [ 'severity' ]
description = item [ 'description' ]
2011-06-15 00:51:51 +00:00
cve = item [ 'cve' ]
2010-10-14 18:54:35 +00:00
bid = item [ 'bid' ]
xref = item [ 'xref' ]
msf = item [ 'msf' ]
2011-06-15 00:51:51 +00:00
2010-10-14 18:54:35 +00:00
yield ( :port , port ) if block
2011-06-15 00:51:51 +00:00
2011-05-17 22:18:43 +00:00
handle_nessus_v2 ( wspace , hobj , port , proto , sname , nasl , nasl_name , severity , description , cve , bid , xref , msf )
2011-06-15 00:51:51 +00:00
2010-01-14 12:57:26 +00:00
end
2010-10-14 18:54:35 +00:00
yield ( :end , hname ) if block
}
2011-06-15 00:51:51 +00:00
2010-10-14 18:54:35 +00:00
REXML :: Document . parse_stream ( data , parser )
2011-06-15 00:51:51 +00:00
2010-01-14 12:57:26 +00:00
end
2010-01-14 15:26:20 +00:00
2011-06-01 00:01:46 +00:00
def import_mbsa_xml ( args = { } , & block )
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
wspace = args [ :wspace ] || workspace
2011-06-15 00:51:51 +00:00
if Rex :: Parser . nokogiri_loaded
2011-06-01 00:01:46 +00:00
parser = " Nokogiri v #{ :: Nokogiri :: VERSION } "
noko_args = args . dup
noko_args [ :blacklist ] = bl
noko_args [ :wspace ] = wspace
if block
yield ( :parser , parser )
import_mbsa_noko_stream ( noko_args ) { | type , data | yield type , data }
else
2011-06-15 00:51:51 +00:00
import_mbsa_noko_stream ( noko_args )
2011-06-01 00:01:46 +00:00
end
return true
2011-06-15 00:51:51 +00:00
else # Sorry
2011-06-01 00:01:46 +00:00
raise DBImportError . new ( " Could not import due to missing Nokogiri parser. Try 'gem install nokogiri'. " )
end
end
def import_mbsa_noko_stream ( args = { } , & block )
if block
doc = Rex :: Parser :: MbsaDocument . new ( args , framework . db ) { | type , data | yield type , data }
else
doc = Rex :: Parser :: MbsaDocument . new ( args , self )
end
parser = :: Nokogiri :: XML :: SAX :: Parser . new ( doc )
parser . parse ( args [ :data ] )
end
2011-05-31 15:24:42 +00:00
def import_foundstone_xml ( args = { } , & block )
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
wspace = args [ :wspace ] || workspace
2011-06-15 00:51:51 +00:00
if Rex :: Parser . nokogiri_loaded
2011-05-31 15:24:42 +00:00
parser = " Nokogiri v #{ :: Nokogiri :: VERSION } "
noko_args = args . dup
noko_args [ :blacklist ] = bl
noko_args [ :wspace ] = wspace
if block
yield ( :parser , parser )
import_foundstone_noko_stream ( noko_args ) { | type , data | yield type , data }
else
2011-06-15 00:51:51 +00:00
import_foundstone_noko_stream ( noko_args )
2011-05-31 15:24:42 +00:00
end
return true
2011-06-01 00:01:46 +00:00
else # Sorry
2011-05-31 15:24:42 +00:00
raise DBImportError . new ( " Could not import due to missing Nokogiri parser. Try 'gem install nokogiri'. " )
end
end
def import_foundstone_noko_stream ( args = { } , & block )
if block
doc = Rex :: Parser :: FoundstoneDocument . new ( args , framework . db ) { | type , data | yield type , data }
else
doc = Rex :: Parser :: FoundstoneDocument . new ( args , self )
end
parser = :: Nokogiri :: XML :: SAX :: Parser . new ( doc )
parser . parse ( args [ :data ] )
end
2011-06-02 06:17:38 +00:00
def import_acunetix_xml ( args = { } , & block )
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
wspace = args [ :wspace ] || workspace
2011-06-15 00:51:51 +00:00
if Rex :: Parser . nokogiri_loaded
2011-06-02 06:17:38 +00:00
parser = " Nokogiri v #{ :: Nokogiri :: VERSION } "
noko_args = args . dup
noko_args [ :blacklist ] = bl
noko_args [ :wspace ] = wspace
if block
yield ( :parser , parser )
import_acunetix_noko_stream ( noko_args ) { | type , data | yield type , data }
else
2011-06-15 00:51:51 +00:00
import_acunetix_noko_stream ( noko_args )
2011-06-02 06:17:38 +00:00
end
return true
else # Sorry
raise DBImportError . new ( " Could not import due to missing Nokogiri parser. Try 'gem install nokogiri'. " )
end
end
2011-07-28 21:17:30 +00:00
def import_ci_xml ( args = { } , & block )
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
wspace = args [ :wspace ] || workspace
if Rex :: Parser . nokogiri_loaded
parser = " Nokogiri v #{ :: Nokogiri :: VERSION } "
noko_args = args . dup
noko_args [ :blacklist ] = bl
noko_args [ :wspace ] = wspace
if block
yield ( :parser , parser )
import_ci_noko_stream ( noko_args ) { | type , data | yield type , data }
else
import_ci_noko_stream ( noko_args )
end
return true
else # Sorry
raise DBImportError . new ( " Could not import due to missing Nokogiri parser. Try 'gem install nokogiri'. " )
end
end
2011-06-02 06:17:38 +00:00
def import_acunetix_noko_stream ( args = { } , & block )
if block
doc = Rex :: Parser :: AcunetixDocument . new ( args , framework . db ) { | type , data | yield type , data }
else
doc = Rex :: Parser :: AcunetixFoundstoneDocument . new ( args , self )
end
parser = :: Nokogiri :: XML :: SAX :: Parser . new ( doc )
parser . parse ( args [ :data ] )
end
2011-06-02 22:16:08 +00:00
def import_appscan_xml ( args = { } , & block )
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
wspace = args [ :wspace ] || workspace
2011-06-15 00:51:51 +00:00
if Rex :: Parser . nokogiri_loaded
2011-06-02 22:16:08 +00:00
parser = " Nokogiri v #{ :: Nokogiri :: VERSION } "
noko_args = args . dup
noko_args [ :blacklist ] = bl
noko_args [ :wspace ] = wspace
if block
yield ( :parser , parser )
import_appscan_noko_stream ( noko_args ) { | type , data | yield type , data }
else
2011-06-15 00:51:51 +00:00
import_appscan_noko_stream ( noko_args )
2011-06-02 22:16:08 +00:00
end
return true
else # Sorry
raise DBImportError . new ( " Could not import due to missing Nokogiri parser. Try 'gem install nokogiri'. " )
end
end
def import_appscan_noko_stream ( args = { } , & block )
if block
doc = Rex :: Parser :: AppscanDocument . new ( args , framework . db ) { | type , data | yield type , data }
else
doc = Rex :: Parser :: AppscanDocument . new ( args , self )
end
parser = :: Nokogiri :: XML :: SAX :: Parser . new ( doc )
parser . parse ( args [ :data ] )
end
2011-06-03 21:17:57 +00:00
def import_burp_session_xml ( args = { } , & block )
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
wspace = args [ :wspace ] || workspace
2011-06-15 00:51:51 +00:00
if Rex :: Parser . nokogiri_loaded
2011-06-03 21:17:57 +00:00
# Rex::Parser.reload("burp_session_nokogiri.rb")
parser = " Nokogiri v #{ :: Nokogiri :: VERSION } "
noko_args = args . dup
noko_args [ :blacklist ] = bl
noko_args [ :wspace ] = wspace
if block
yield ( :parser , parser )
import_burp_session_noko_stream ( noko_args ) { | type , data | yield type , data }
else
2011-06-15 00:51:51 +00:00
import_burp_session_noko_stream ( noko_args )
2011-06-03 21:17:57 +00:00
end
return true
else # Sorry
raise DBImportError . new ( " Could not import due to missing Nokogiri parser. Try 'gem install nokogiri'. " )
end
end
def import_burp_session_noko_stream ( args = { } , & block )
if block
doc = Rex :: Parser :: BurpSessionDocument . new ( args , framework . db ) { | type , data | yield type , data }
else
doc = Rex :: Parser :: BurpSessionDocument . new ( args , self )
end
parser = :: Nokogiri :: XML :: SAX :: Parser . new ( doc )
parser . parse ( args [ :data ] )
end
2011-06-15 00:51:51 +00:00
#
# Import IP360's ASPL database
#
def import_ip360_aspl_xml ( args = { } , & block )
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
if not data . index ( " <ontology " )
raise DBImportError . new ( " The ASPL file does not appear to be valid or may still be compressed " )
end
base = :: File . join ( Msf :: Config . config_directory , " data " , " ncircle " )
:: FileUtils . mkdir_p ( base )
:: File . open ( :: File . join ( base , " ip360.aspl " ) , " w " ) do | fd |
fd . write ( data )
end
yield ( :notice , " Saved the IP360 ASPL database to #{ base } ... " )
end
2010-11-04 21:44:16 +00:00
#
# Import IP360's xml output
#
def import_ip360_xml_v3 ( args = { } , & block )
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2011-06-15 00:51:51 +00:00
# @aspl = {'vulns' => {'name' => { }, 'cve' => { }, 'bid' => { } }
2010-11-04 21:44:16 +00:00
# 'oses' => {'name' } }
2011-06-15 00:51:51 +00:00
aspl_path = nil
aspl_paths = [
:: File . join ( Msf :: Config . config_directory , " data " , " ncircle " , " ip360.aspl " ) ,
:: File . join ( Msf :: Config . data_directory , " ncircle " , " ip360.aspl " )
]
aspl_paths . each do | tpath |
next if not ( :: File . exist? ( tpath ) and :: File . readable? ( tpath ) )
aspl_path = tpath
break
2010-11-04 21:44:16 +00:00
end
2011-06-15 00:51:51 +00:00
if not aspl_path
raise DBImportError . new ( " The nCircle IP360 ASPL file is not present. \n Download ASPL from nCircle VNE | Administer | Support | Resources, unzip it, and import it first " )
2010-11-04 21:44:16 +00:00
end
# parse nCircle ASPL file
aspl = " "
:: File . open ( aspl_path , " rb " ) do | f |
aspl = f . read ( f . stat . size )
end
2011-06-15 00:51:51 +00:00
2010-11-04 21:44:16 +00:00
@asplhash = nil
parser = Rex :: Parser :: IP360ASPLXMLStreamParser . new
2011-06-15 00:51:51 +00:00
parser . on_found_aspl = Proc . new { | asplh |
2010-11-04 21:44:16 +00:00
@asplhash = asplh
}
REXML :: Document . parse_stream ( aspl , parser )
#@host = {'hname' => nil, 'addr' => nil, 'mac' => nil, 'os' => nil, 'hid' => nil,
# 'vulns' => ['vuln' => {'vulnid' => nil, 'port' => nil, 'proto' => nil } ],
# 'apps' => ['app' => {'appid' => nil, 'svcid' => nil, 'port' => nil, 'proto' => nil } ],
# 'shares' => []
# }
# nCircle has some quotes escaped which causes the parser to break
# we don't need these lines so just replace \" with "
data . gsub! ( / \\ " / , '"' )
# parse nCircle Scan Output
parser = Rex :: Parser :: IP360XMLStreamParser . new
parser . on_found_host = Proc . new { | host |
2011-04-22 20:16:55 +00:00
hobj = nil
2010-11-04 21:44:16 +00:00
addr = host [ 'addr' ] || host [ 'hname' ]
2011-06-15 00:51:51 +00:00
2010-11-04 21:44:16 +00:00
next unless ipv4_validator ( addr ) # Catches SCAN-ERROR, among others.
2011-06-15 00:51:51 +00:00
2010-11-04 21:44:16 +00:00
if bl . include? addr
next
else
yield ( :address , addr ) if block
end
2011-04-01 01:01:11 +00:00
2010-11-04 21:44:16 +00:00
os = host [ 'os' ]
2011-04-01 01:01:11 +00:00
hname = host [ 'hname' ]
mac = host [ 'mac' ]
host_hash = {
:workspace = > wspace ,
:host = > addr ,
}
host_hash [ :name ] = hname . to_s . strip if hname
host_hash [ :mac ] = mac . to_s . strip . upcase if mac
2011-04-22 20:16:55 +00:00
hobj = report_host ( host_hash )
2011-06-15 00:51:51 +00:00
2010-11-04 21:44:16 +00:00
yield ( :os , os ) if block
if os
report_note (
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj ,
2010-11-04 21:44:16 +00:00
:type = > 'host.os.ip360_fingerprint' ,
:data = > {
:os = > @asplhash [ 'oses' ] [ os ] . to_s . strip
}
)
end
2011-06-15 00:51:51 +00:00
2010-11-04 21:44:16 +00:00
host [ 'apps' ] . each do | item |
port = item [ 'port' ] . to_s
proto = item [ 'proto' ] . to_s
2011-04-22 20:16:55 +00:00
handle_ip360_v3_svc ( wspace , hobj , port , proto , hname )
2010-11-04 21:44:16 +00:00
end
2011-06-15 00:51:51 +00:00
2010-11-04 21:44:16 +00:00
host [ 'vulns' ] . each do | item |
vulnid = item [ 'vulnid' ] . to_s
port = item [ 'port' ] . to_s
proto = item [ 'proto' ] || " tcp "
vulnname = @asplhash [ 'vulns' ] [ 'name' ] [ vulnid ]
cves = @asplhash [ 'vulns' ] [ 'cve' ] [ vulnid ]
bids = @asplhash [ 'vulns' ] [ 'bid' ] [ vulnid ]
2011-06-15 00:51:51 +00:00
2010-11-04 21:44:16 +00:00
yield ( :port , port ) if block
2011-06-15 00:51:51 +00:00
2011-04-22 20:16:55 +00:00
handle_ip360_v3_vuln ( wspace , hobj , port , proto , hname , vulnid , vulnname , cves , bids )
2011-06-15 00:51:51 +00:00
2010-11-04 21:44:16 +00:00
end
yield ( :end , hname ) if block
}
2011-06-15 00:51:51 +00:00
2010-11-04 21:44:16 +00:00
REXML :: Document . parse_stream ( data , parser )
end
2011-05-12 20:08:33 +00:00
def find_qualys_asset_vuln_refs ( doc )
vuln_refs = { }
doc . elements . each ( " /ASSET_DATA_REPORT/GLOSSARY/VULN_DETAILS_LIST/VULN_DETAILS " ) do | vuln |
next unless vuln . elements [ 'QID' ] && vuln . elements [ 'QID' ] . first
qid = vuln . elements [ 'QID' ] . first . to_s
vuln_refs [ qid ] || = [ ]
if vuln . elements [ " CVE_ID_LIST/CVE_ID/ID " ]
vuln . elements [ " CVE_ID_LIST/CVE_ID/ID " ] . each do | ref |
next unless ref
next unless ref . to_s [ / ^C..-[0-9 \ -]{9} / ]
vuln_refs [ qid ] << ref . to_s . gsub ( / ^C.. / , " CVE " )
end
end
if vuln . elements [ " BUGTRAQ_ID_LIST/BUGTRAQ_ID/ID " ]
vuln . elements [ " BUGTRAQ_ID_LIST/BUGTRAQ_ID/ID " ] . each do | ref |
next unless ref
next unless ref . to_s [ / ^[0-9]{1,9} / ]
vuln_refs [ qid ] << " BID- #{ ref } "
end
end
end
return vuln_refs
end
# Pull out vulnerabilities that have at least one matching
# ref -- many "vulns" are not vulns, just audit information.
def find_qualys_asset_vulns ( host , wspace , hobj , vuln_refs , & block )
host . elements . each ( " VULN_INFO_LIST/VULN_INFO " ) do | vi |
next unless vi . elements [ " QID " ]
vi . elements . each ( " QID " ) do | qid |
next if vuln_refs [ qid . text ] . nil? || vuln_refs [ qid . text ] . empty?
2011-06-15 00:51:51 +00:00
handle_qualys ( wspace , hobj , nil , nil , qid . text , nil , vuln_refs [ qid . text ] , nil )
2011-05-12 20:08:33 +00:00
end
end
end
# Takes QID numbers and finds the discovered services in
2011-06-15 00:51:51 +00:00
# a qualys_asset_xml.
2011-05-12 20:08:33 +00:00
def find_qualys_asset_ports ( i , host , wspace , hobj )
return unless ( i == 82023 || i == 82004 )
proto = i == 82023 ? 'tcp' : 'udp'
qid = host . elements [ " VULN_INFO_LIST/VULN_INFO/QID[@id='qid_ #{ i } '] " ]
qid_result = qid . parent . elements [ " RESULT[@format='table'] " ] if qid
hports = qid_result . first . to_s if qid_result
if hports
hports . scan ( / ([0-9]+) \ t(.*?) \ t.*? \ t([^ \ t \ n]*) / ) do | match |
if match [ 2 ] == nil or match [ 2 ] . strip == 'unknown'
name = match [ 1 ] . strip
else
name = match [ 2 ] . strip
end
handle_qualys ( wspace , hobj , match [ 0 ] . to_s , proto , 0 , nil , nil , name )
end
end
end
#
# Import Qualys's Asset Data Report format
2011-06-15 00:51:51 +00:00
#
2011-05-12 20:08:33 +00:00
def import_qualys_asset_xml ( args = { } , & block )
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
doc = rexmlify ( data )
vuln_refs = find_qualys_asset_vuln_refs ( doc )
# 2nd pass, actually grab the hosts.
doc . elements . each ( " /ASSET_DATA_REPORT/HOST_LIST/HOST " ) do | host |
hobj = nil
addr = host . elements [ " IP " ] . text if host . elements [ " IP " ]
next unless validate_ips ( addr )
if bl . include? addr
next
else
yield ( :address , addr ) if block
end
hname = ( # Prefer NetBIOS over DNS
( host . elements [ " NETBIOS " ] . text if host . elements [ " NETBIOS " ] ) ||
( host . elements [ " DNS " ] . text if host . elements [ " DNS " ] ) ||
2011-06-15 00:51:51 +00:00
" " )
2011-05-12 20:08:33 +00:00
hobj = report_host ( :workspace = > wspace , :host = > addr , :name = > hname , :state = > Msf :: HostState :: Alive )
report_import_note ( wspace , hobj )
if host . elements [ " OPERATING_SYSTEM " ]
hos = host . elements [ " OPERATING_SYSTEM " ] . text
report_note (
:workspace = > wspace ,
:host = > hobj ,
:type = > 'host.os.qualys_fingerprint' ,
:data = > { :os = > hos }
)
end
# Report open ports.
find_qualys_asset_ports ( 82023 , host , wspace , hobj ) # TCP
find_qualys_asset_ports ( 82004 , host , wspace , hobj ) # UDP
# Report vulns
find_qualys_asset_vulns ( host , wspace , hobj , vuln_refs , & block )
end # host
end
2010-03-28 23:02:28 +00:00
#
2011-05-12 20:08:33 +00:00
# Import Qualys' Scan xml output
2010-03-28 23:02:28 +00:00
#
2011-05-12 20:08:33 +00:00
def import_qualys_scan_xml_file ( args = { } )
2010-06-04 14:57:58 +00:00
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2011-05-12 20:08:33 +00:00
import_qualys_scan_xml ( args . merge ( :data = > data ) )
2010-03-28 23:02:28 +00:00
end
2011-05-12 20:08:33 +00:00
def import_qualys_scan_xml ( args = { } , & block )
2010-06-04 14:57:58 +00:00
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2010-03-28 23:02:28 +00:00
2010-04-26 20:25:42 +00:00
doc = rexmlify ( data )
2010-03-28 23:02:28 +00:00
doc . elements . each ( '/SCAN/IP' ) do | host |
2011-04-22 20:16:55 +00:00
hobj = nil
2010-03-28 23:02:28 +00:00
addr = host . attributes [ 'value' ]
2010-06-04 14:57:58 +00:00
if bl . include? addr
next
else
2010-06-08 19:16:20 +00:00
yield ( :address , addr ) if block
2010-06-04 14:57:58 +00:00
end
2010-03-28 23:02:28 +00:00
hname = host . attributes [ 'name' ] || ''
2011-04-22 20:16:55 +00:00
hobj = report_host ( :workspace = > wspace , :host = > addr , :name = > hname , :state = > Msf :: HostState :: Alive )
report_import_note ( wspace , hobj )
2010-03-28 23:02:28 +00:00
if host . elements [ " OS " ]
hos = host . elements [ " OS " ] . text
report_note (
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj ,
2010-03-28 23:02:28 +00:00
:type = > 'host.os.qualys_fingerprint' ,
:data = > {
:os = > hos
}
)
end
# Open TCP Services List (Qualys ID 82023)
services_tcp = host . elements [ " SERVICES/CAT/SERVICE[@number='82023']/RESULT " ]
if services_tcp
services_tcp . text . scan ( / ([0-9]+) \ t(.*?) \ t.*? \ t([^ \ t \ n]*) / ) do | match |
if match [ 2 ] == nil or match [ 2 ] . strip == 'unknown'
name = match [ 1 ] . strip
else
name = match [ 2 ] . strip
end
2011-04-22 20:16:55 +00:00
handle_qualys ( wspace , hobj , match [ 0 ] . to_s , 'tcp' , 0 , nil , nil , name )
2010-03-28 23:02:28 +00:00
end
end
# Open UDP Services List (Qualys ID 82004)
services_udp = host . elements [ " SERVICES/CAT/SERVICE[@number='82004']/RESULT " ]
if services_udp
services_udp . text . scan ( / ([0-9]+) \ t(.*?) \ t.*? \ t([^ \ t \ n]*) / ) do | match |
if match [ 2 ] == nil or match [ 2 ] . strip == 'unknown'
name = match [ 1 ] . strip
else
name = match [ 2 ] . strip
end
2011-04-22 20:16:55 +00:00
handle_qualys ( wspace , hobj , match [ 0 ] . to_s , 'udp' , 0 , nil , nil , name )
2010-03-28 23:02:28 +00:00
end
end
# VULNS are confirmed, PRACTICES are unconfirmed vulnerabilities
host . elements . each ( 'VULNS/CAT | PRACTICES/CAT' ) do | cat |
port = cat . attributes [ 'port' ]
protocol = cat . attributes [ 'protocol' ]
cat . elements . each ( 'VULN | PRACTICE' ) do | vuln |
refs = [ ]
qid = vuln . attributes [ 'number' ]
severity = vuln . attributes [ 'severity' ]
vuln . elements . each ( 'VENDOR_REFERENCE_LIST/VENDOR_REFERENCE' ) do | ref |
refs . push ( ref . elements [ 'ID' ] . text . to_s )
end
vuln . elements . each ( 'CVE_ID_LIST/CVE_ID' ) do | ref |
refs . push ( 'CVE-' + / C..-([0-9 \ -]{9}) / . match ( ref . elements [ 'ID' ] . text . to_s ) [ 1 ] )
end
vuln . elements . each ( 'BUGTRAQ_ID_LIST/BUGTRAQ_ID' ) do | ref |
refs . push ( 'BID-' + ref . elements [ 'ID' ] . text . to_s )
end
2011-04-22 20:16:55 +00:00
handle_qualys ( wspace , hobj , port , protocol , qid , severity , refs )
2010-03-28 23:02:28 +00:00
end
end
end
end
2010-06-04 14:57:58 +00:00
def import_ip_list_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2010-06-04 14:57:58 +00:00
import_ip_list ( args . merge ( :data = > data ) )
2010-03-03 00:34:16 +00:00
end
2010-03-08 14:17:34 +00:00
2010-06-08 19:16:20 +00:00
def import_ip_list ( args = { } , & block )
2010-06-04 14:57:58 +00:00
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2010-09-03 15:04:09 +00:00
data . each_line do | ip |
ip . strip!
if bl . include? ip
2010-06-04 14:57:58 +00:00
next
else
2010-09-03 15:04:09 +00:00
yield ( :address , ip ) if block
2010-06-04 14:57:58 +00:00
end
2010-09-03 15:04:09 +00:00
host = find_or_create_host ( :workspace = > wspace , :host = > ip , :state = > Msf :: HostState :: Alive )
2010-03-03 00:34:16 +00:00
end
end
2010-06-04 14:57:58 +00:00
def import_amap_log_file ( args = { } )
filename = args [ :filename ]
wspace = args [ :wspace ] || workspace
2010-11-04 21:44:16 +00:00
data = " "
:: File . open ( filename , 'rb' ) do | f |
data = f . read ( f . stat . size )
end
2011-06-15 00:51:51 +00:00
2010-06-08 22:14:25 +00:00
case import_filetype_detect ( data )
when :amap_log
import_amap_log ( args . merge ( :data = > data ) )
when :amap_mlog
import_amap_mlog ( args . merge ( :data = > data ) )
else
raise DBImportError . new ( " Could not determine file type " )
end
end
def import_amap_log ( args = { } , & block )
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
data . each_line do | line |
next if line =~ / ^ # /
next if line !~ / ^Protocol on ([^:]+):([^ \ x5c \ x2f]+)[ \ x5c \ x2f](tcp|udp) matches (.*)$ /
addr = $1
next if bl . include? addr
port = $2 . to_i
proto = $3 . downcase
name = $4
host = find_or_create_host ( :workspace = > wspace , :host = > addr , :state = > Msf :: HostState :: Alive )
next if not host
yield ( :address , addr ) if block
info = {
:workspace = > wspace ,
:host = > host ,
:proto = > proto ,
:port = > port
}
if name != " unidentified "
info [ :name ] = name
end
service = find_or_create_service ( info )
end
2010-01-07 19:06:29 +00:00
end
2010-03-28 23:02:28 +00:00
2010-06-08 19:16:20 +00:00
def import_amap_mlog ( args = { } , & block )
2010-06-04 14:57:58 +00:00
data = args [ :data ]
wspace = args [ :wspace ] || workspace
bl = validate_ips ( args [ :blacklist ] ) ? args [ :blacklist ] . split : [ ]
2010-01-07 19:06:29 +00:00
data . each_line do | line |
next if line =~ / ^ # /
r = line . split ( ':' )
next if r . length < 6
addr = r [ 0 ]
2010-06-08 22:14:25 +00:00
next if bl . include? addr
2010-01-07 19:06:29 +00:00
port = r [ 1 ] . to_i
proto = r [ 2 ] . downcase
status = r [ 3 ]
name = r [ 5 ]
next if status != " open "
2010-02-18 06:40:38 +00:00
host = find_or_create_host ( :workspace = > wspace , :host = > addr , :state = > Msf :: HostState :: Alive )
2010-01-07 19:06:29 +00:00
next if not host
2010-06-08 22:14:25 +00:00
yield ( :address , addr ) if block
2010-01-07 19:06:29 +00:00
info = {
2010-02-18 06:40:38 +00:00
:workspace = > wspace ,
2010-01-14 15:26:20 +00:00
:host = > host ,
:proto = > proto ,
2010-01-07 19:06:29 +00:00
:port = > port
}
if name != " unidentified "
info [ :name ] = name
end
service = find_or_create_service ( info )
end
end
2011-07-28 21:17:30 +00:00
def import_ci_noko_stream ( args , & block )
if block
doc = Rex :: Parser :: CIDocument . new ( args , framework . db ) { | type , data | yield type , data }
else
doc = Rex :: Parser :: CI . new ( args , self )
end
parser = :: Nokogiri :: XML :: SAX :: Parser . new ( doc )
parser . parse ( args [ :data ] )
end
2010-09-28 17:52:48 +00:00
def unserialize_object ( xml_elem , allow_yaml = false )
2011-04-30 04:33:52 +00:00
return nil unless xml_elem
2010-09-28 17:52:48 +00:00
string = xml_elem . text . to_s . strip
2010-07-06 16:33:27 +00:00
return string unless string . is_a? ( String )
2011-04-30 04:33:52 +00:00
return nil if ( string . empty? || string . nil? )
2010-07-06 19:33:27 +00:00
2010-07-06 16:33:27 +00:00
begin
2010-07-06 18:35:49 +00:00
# Validate that it is properly formed base64 first
if string . gsub ( / \ s+ / , '' ) =~ / ^([a-z0-9A-Z \ + \/ =]+)$ /
Marshal . load ( $1 . unpack ( " m " ) [ 0 ] )
else
2010-07-06 19:33:27 +00:00
if allow_yaml
2010-09-28 17:52:48 +00:00
begin
YAML . load ( string )
rescue
dlog ( " Badly formatted YAML: ' #{ string } ' " )
string
end
2010-07-06 19:33:27 +00:00
else
string
end
2010-07-06 18:35:49 +00:00
end
2010-07-06 16:33:27 +00:00
rescue :: Exception = > e
2010-07-06 18:17:14 +00:00
if allow_yaml
YAML . load ( string ) rescue string
else
string
end
2010-07-06 16:33:27 +00:00
end
end
2011-04-07 21:59:32 +00:00
#
# Returns something suitable for the +:host+ parameter to the various report_* methods
#
# Takes a Host object, a Session object, an Msf::Session object or a String
2011-06-15 00:51:51 +00:00
# address
2011-04-07 21:59:32 +00:00
#
2011-01-30 19:11:27 +00:00
def normalize_host ( host )
2011-04-07 21:59:32 +00:00
return host if host . kind_of? Host
norm_host = nil
2011-06-15 00:51:51 +00:00
2011-04-07 21:59:32 +00:00
if ( host . kind_of? String )
# If it's an IPv4 addr with a host on the end, strip the port
if host =~ / (( \ d{1,3} \ .){3} \ d{1,3}): \ d+ /
norm_host = $1
else
norm_host = host
end
elsif host . kind_of? Session
norm_host = host . host
elsif host . respond_to? ( :target_host )
# Then it's an Msf::Session object with a target but target_host
# won't be set in some cases, so try tunnel_peer as well
2011-01-30 19:11:27 +00:00
thost = host . target_host
2011-04-07 21:59:32 +00:00
if host . tunnel_peer and ( ! thost or thost . empty? )
# tunnel_peer is of the form ip:port, so strip off the port to
# get the addr by itself
thost = host . tunnel_peer . split ( " : " ) [ 0 ]
2011-01-30 19:11:27 +00:00
end
2011-04-07 21:59:32 +00:00
norm_host = thost
end
# If we got here and don't have a norm_host yet, it could be a
# Msf::Session object with an empty or nil tunnel_host and tunnel_peer;
# see if it has a socket and use its peerhost if so.
if (
norm_host . nil? and
host . respond_to? ( :sock ) and
host . sock . respond_to? ( :peerhost ) and
host . sock . peerhost . to_s . length > 0
)
norm_host = session . sock . peerhost
end
# If We got here and still don't have a real host, there's nothing left
# to try, just log it and return what we were given
if not norm_host
dlog ( " Host could not be normalized: #{ host . inspect } " )
norm_host = host
2011-01-30 19:11:27 +00:00
end
2011-04-07 21:59:32 +00:00
norm_host
2011-01-30 19:11:27 +00:00
end
2011-06-15 00:51:51 +00:00
# A way to sneak the yield back into the db importer.
2011-05-31 16:22:42 +00:00
# Used by the SAX parsers.
def emit ( sym , data , & block )
2011-06-15 00:51:51 +00:00
yield ( sym , data )
2011-05-31 16:22:42 +00:00
end
# Debug logger
def xxx ( desc , thing )
$stderr . puts " **** #{ desc } **** "
$stderr . puts thing . inspect
end
2010-01-07 19:06:29 +00:00
protected
2010-01-14 15:26:20 +00:00
2010-01-07 19:06:29 +00:00
#
# This holds all of the shared parsing/handling used by the
2010-01-14 12:57:26 +00:00
# Nessus NBE and NESSUS v1 methods
2010-01-07 19:06:29 +00:00
#
2011-05-31 16:22:42 +00:00
def handle_nessus ( wspace , hobj , port , nasl , plugin_name , severity , data )
2011-04-22 20:16:55 +00:00
addr = hobj . address
2010-01-07 19:06:29 +00:00
# The port section looks like:
# http (80/tcp)
p = port . match ( / ^([^ \ (]+) \ (( \ d+) \/ ([^ \ )]+) \ ) / )
return if not p
2011-04-01 01:01:11 +00:00
# Unnecessary as the caller should already have reported this host
#report_host(:workspace => wspace, :host => addr, :state => Msf::HostState::Alive)
2010-01-07 19:06:29 +00:00
name = p [ 1 ] . strip
2010-01-20 00:35:44 +00:00
port = p [ 2 ] . to_i
proto = p [ 3 ] . downcase
2011-04-22 20:16:55 +00:00
info = { :workspace = > wspace , :host = > hobj , :port = > port , :proto = > proto }
2010-01-14 15:26:20 +00:00
if name != " unknown " and name [ - 1 , 1 ] != " ? "
2010-01-07 19:06:29 +00:00
info [ :name ] = name
end
2010-01-20 00:35:44 +00:00
report_service ( info )
2011-05-13 17:00:03 +00:00
if nasl . nil? || nasl . empty? || nasl == 0 || nasl == " 0 "
return
end
2010-01-07 19:06:29 +00:00
data . gsub! ( " \\ n " , " \n " )
refs = [ ]
if ( data =~ / ^CVE : (.*)$ / )
$1 . gsub ( / C(VE|AN) \ - / , '' ) . split ( ',' ) . map { | r | r . strip } . each do | r |
2010-01-14 15:26:20 +00:00
refs . push ( 'CVE-' + r )
2010-01-07 19:06:29 +00:00
end
end
if ( data =~ / ^BID : (.*)$ / )
$1 . split ( ',' ) . map { | r | r . strip } . each do | r |
refs . push ( 'BID-' + r )
end
end
if ( data =~ / ^Other references : (.*)$ / )
$1 . split ( ',' ) . map { | r | r . strip } . each do | r |
ref_id , ref_val = r . split ( ':' )
ref_val ? refs . push ( ref_id + '-' + ref_val ) : refs . push ( ref_id )
end
end
2011-05-02 05:20:07 +00:00
nss = 'NSS-' + nasl . to_s . strip
2011-05-17 22:18:43 +00:00
refs << nss
2011-05-13 17:00:03 +00:00
2011-05-31 16:22:42 +00:00
unless plugin_name . to_s . strip . empty?
vuln_name = plugin_name
else
vuln_name = nss
end
2010-05-12 22:23:40 +00:00
vuln_info = {
2010-02-18 06:40:38 +00:00
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj ,
2010-01-20 00:35:44 +00:00
:port = > port ,
:proto = > proto ,
2011-05-31 16:22:42 +00:00
:name = > vuln_name ,
2010-05-12 22:23:40 +00:00
:info = > data ,
2010-05-03 01:49:00 +00:00
:refs = > refs
2010-05-12 22:23:40 +00:00
}
report_vuln ( vuln_info )
2010-01-07 19:06:29 +00:00
end
2010-01-14 12:57:26 +00:00
#
# NESSUS v2 file format has a dramatically different layout
# for ReportItem data
#
2011-05-17 22:18:43 +00:00
def handle_nessus_v2 ( wspace , hobj , port , proto , name , nasl , nasl_name , severity , description , cve , bid , xref , msf )
2011-04-22 20:16:55 +00:00
addr = hobj . address
2010-01-14 15:26:20 +00:00
2011-04-22 20:16:55 +00:00
info = { :workspace = > wspace , :host = > hobj , :port = > port , :proto = > proto }
2011-04-28 18:52:35 +00:00
unless name =~ / ^unknown$| \ ?$ /
2010-01-14 12:57:26 +00:00
info [ :name ] = name
end
2010-01-14 15:26:20 +00:00
2010-05-03 01:49:00 +00:00
if port . to_i != 0
report_service ( info )
end
2010-01-14 15:26:20 +00:00
2011-05-13 17:00:03 +00:00
if nasl . nil? || nasl . empty? || nasl == 0 || nasl == " 0 "
return
end
2010-01-14 15:26:20 +00:00
2010-01-14 12:57:26 +00:00
refs = [ ]
2010-01-14 15:26:20 +00:00
2010-10-14 18:54:35 +00:00
cve . each do | r |
2010-01-14 12:57:26 +00:00
r . to_s . gsub! ( / C(VE|AN) \ - / , '' )
refs . push ( 'CVE-' + r . to_s )
end if cve
2010-01-14 15:26:20 +00:00
2010-10-14 18:54:35 +00:00
bid . each do | r |
2010-01-14 12:57:26 +00:00
refs . push ( 'BID-' + r . to_s )
end if bid
2010-01-14 15:26:20 +00:00
2010-10-14 18:54:35 +00:00
xref . each do | r |
2010-01-14 12:57:26 +00:00
ref_id , ref_val = r . to_s . split ( ':' )
ref_val ? refs . push ( ref_id + '-' + ref_val ) : refs . push ( ref_id )
end if xref
2011-06-15 00:51:51 +00:00
2010-10-14 18:54:35 +00:00
msfref = " MSF- " << msf if msf
refs . push msfref if msfref
2011-06-15 00:51:51 +00:00
2010-01-14 12:57:26 +00:00
nss = 'NSS-' + nasl
2011-05-17 22:18:43 +00:00
if nasl_name . nil? || nasl_name . empty?
vuln_name = nss
else
vuln_name = nasl_name
end
2010-01-14 15:26:20 +00:00
2011-05-17 22:18:43 +00:00
refs << nss . strip
2011-05-13 17:00:03 +00:00
2010-05-03 01:49:00 +00:00
vuln = {
2010-02-18 06:40:38 +00:00
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj ,
2011-05-17 22:18:43 +00:00
:name = > vuln_name ,
2010-10-14 18:54:35 +00:00
:info = > description ? description : " " ,
2010-05-03 01:49:00 +00:00
:refs = > refs
}
if port . to_i != 0
vuln [ :port ] = port
vuln [ :proto ] = proto
end
report_vuln ( vuln )
2010-01-14 12:57:26 +00:00
end
2010-11-04 21:44:16 +00:00
#
2011-06-15 00:51:51 +00:00
# IP360 v3 vuln
2010-11-04 21:44:16 +00:00
#
2011-04-22 20:16:55 +00:00
def handle_ip360_v3_svc ( wspace , hobj , port , proto , hname )
addr = hobj . address
report_host ( :workspace = > wspace , :host = > hobj , :state = > Msf :: HostState :: Alive )
2010-11-04 21:44:16 +00:00
2011-04-22 20:16:55 +00:00
info = { :workspace = > wspace , :host = > hobj , :port = > port , :proto = > proto }
2010-11-04 21:44:16 +00:00
if hname != " unknown " and hname [ - 1 , 1 ] != " ? "
info [ :name ] = hname
end
if port . to_i != 0
report_service ( info )
end
end #handle_ip360_v3_svc
#
2011-06-15 00:51:51 +00:00
# IP360 v3 vuln
2010-11-04 21:44:16 +00:00
#
2011-04-22 20:16:55 +00:00
def handle_ip360_v3_vuln ( wspace , hobj , port , proto , hname , vulnid , vulnname , cves , bids )
info = { :workspace = > wspace , :host = > hobj , :port = > port , :proto = > proto }
2010-11-04 21:44:16 +00:00
if hname != " unknown " and hname [ - 1 , 1 ] != " ? "
info [ :name ] = hname
end
if port . to_i != 0
report_service ( info )
end
refs = [ ]
cves . split ( / , / ) . each do | cve |
refs . push ( cve . to_s )
end if cves
bids . split ( / , / ) . each do | bid |
refs . push ( 'BID-' + bid . to_s )
end if bids
description = nil # not working yet
vuln = {
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj ,
2010-11-04 21:44:16 +00:00
:name = > vulnname ,
:info = > description ? description : " " ,
:refs = > refs
}
if port . to_i != 0
vuln [ :port ] = port
vuln [ :proto ] = proto
end
report_vuln ( vuln )
end #handle_ip360_v3_vuln
2010-10-14 18:54:35 +00:00
2010-03-28 23:02:28 +00:00
#
# Qualys report parsing/handling
#
2011-04-22 20:16:55 +00:00
def handle_qualys ( wspace , hobj , port , protocol , qid , severity , refs , name = nil )
addr = hobj . address
2011-05-12 20:08:33 +00:00
port = port . to_i if port
2010-03-28 23:02:28 +00:00
2011-04-22 20:16:55 +00:00
info = { :workspace = > wspace , :host = > hobj , :port = > port , :proto = > protocol }
2011-06-02 20:56:42 +00:00
if name and name != 'unknown' and name != 'No registered hostname'
2010-03-28 23:02:28 +00:00
info [ :name ] = name
end
2010-07-07 14:53:16 +00:00
if info [ :host ] && info [ :port ] && info [ :proto ]
report_service ( info )
end
2010-03-28 23:02:28 +00:00
2011-06-01 20:14:25 +00:00
fixed_refs = [ ]
if refs
refs . each do | ref |
case ref
when / ^MS[0-9]{2}-[0-9]{3} /
fixed_refs << " MSB- #{ ref } "
else
fixed_refs << ref
end
end
end
2010-03-28 23:02:28 +00:00
return if qid == 0
2010-07-07 14:53:16 +00:00
if addr
report_vuln (
:workspace = > wspace ,
2011-04-22 20:16:55 +00:00
:host = > hobj ,
2010-07-07 14:53:16 +00:00
:port = > port ,
:proto = > protocol ,
:name = > 'QUALYS-' + qid ,
2011-06-01 20:14:25 +00:00
:refs = > fixed_refs
2010-07-07 14:53:16 +00:00
)
end
2010-03-28 23:02:28 +00:00
end
2010-01-07 19:06:29 +00:00
def process_nexpose_data_sxml_refs ( vuln )
refs = [ ]
vid = vuln . attributes [ 'id' ] . to_s . downcase
vry = vuln . attributes [ 'resultCode' ] . to_s . upcase
# Only process vuln-exploitable and vuln-version statuses
return if vry !~ / ^V[VE]$ /
refs = [ ]
vuln . elements . each ( 'id' ) do | ref |
rtyp = ref . attributes [ 'type' ] . to_s . upcase
rval = ref . text . to_s . strip
case rtyp
when 'CVE'
refs << rval . gsub ( 'CAN' , 'CVE' )
when 'MS' # obsolete?
refs << " MSB-MS- #{ rval } "
else
refs << " #{ rtyp } - #{ rval } "
end
end
refs << " NEXPOSE- #{ vid } "
refs
end
2006-03-21 04:37:48 +00:00
end
2008-10-23 04:23:54 +00:00
end
2009-12-13 05:24:48 +00:00