2010-01-14 20:29:55 +00:00
|
|
|
require 'rex/parser/nmap_xml'
|
2010-05-02 19:16:52 +00:00
|
|
|
require 'rex/parser/nexpose_xml'
|
2010-10-07 02:33:57 +00:00
|
|
|
require 'rex/parser/retina_xml'
|
2010-10-07 06:24:26 +00:00
|
|
|
require 'rex/parser/netsparker_xml'
|
2010-10-14 18:54:35 +00:00
|
|
|
require 'rex/parser/nessus_xml'
|
2010-06-04 14:57:58 +00:00
|
|
|
require 'rex/socket'
|
2010-06-11 21:21:59 +00:00
|
|
|
require 'zip'
|
2010-10-07 06:24:26 +00:00
|
|
|
require 'uri'
|
2010-06-11 21:21:59 +00:00
|
|
|
require 'tmpdir'
|
|
|
|
require 'fileutils'
|
2010-06-04 14:57:58 +00:00
|
|
|
|
2006-03-21 04:37:48 +00:00
|
|
|
module Msf
|
|
|
|
|
|
|
|
###
|
|
|
|
#
|
|
|
|
# The states that a host can be in.
|
|
|
|
#
|
|
|
|
###
|
|
|
|
module HostState
|
|
|
|
#
|
|
|
|
# The host is alive.
|
|
|
|
#
|
|
|
|
Alive = "alive"
|
|
|
|
#
|
|
|
|
# The host is dead.
|
|
|
|
#
|
|
|
|
Dead = "down"
|
|
|
|
#
|
|
|
|
# The host state is unknown.
|
|
|
|
#
|
|
|
|
Unknown = "unknown"
|
|
|
|
end
|
|
|
|
|
|
|
|
###
|
|
|
|
#
|
|
|
|
# The states that a service can be in.
|
|
|
|
#
|
|
|
|
###
|
|
|
|
module ServiceState
|
2010-02-05 15:43:24 +00:00
|
|
|
Open = "open"
|
|
|
|
Closed = "closed"
|
|
|
|
Filtered = "filtered"
|
|
|
|
Unknown = "unknown"
|
2006-03-21 04:37:48 +00:00
|
|
|
end
|
|
|
|
|
2007-02-25 21:25:41 +00:00
|
|
|
###
|
|
|
|
#
|
|
|
|
# Events that can occur in the host/service database.
|
|
|
|
#
|
|
|
|
###
|
|
|
|
module DatabaseEvent
|
|
|
|
|
|
|
|
#
|
|
|
|
# Called when an existing host's state changes
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def on_db_host_state(host, ostate)
|
2007-02-25 21:25:41 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Called when an existing service's state changes
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def on_db_service_state(host, port, ostate)
|
2007-02-25 21:25:41 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Called when a new host is added to the database. The host parameter is
|
|
|
|
# of type Host.
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def on_db_host(host)
|
2007-02-25 21:25:41 +00:00
|
|
|
end
|
|
|
|
|
2009-07-22 20:14:35 +00:00
|
|
|
#
|
|
|
|
# Called when a new client is added to the database. The client
|
|
|
|
# parameter is of type Client.
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def on_db_client(client)
|
2009-07-22 20:14:35 +00:00
|
|
|
end
|
|
|
|
|
2007-02-25 21:25:41 +00:00
|
|
|
#
|
|
|
|
# Called when a new service is added to the database. The service
|
|
|
|
# parameter is of type Service.
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def on_db_service(service)
|
2007-02-25 21:25:41 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Called when an applicable vulnerability is found for a service. The vuln
|
|
|
|
# parameter is of type Vuln.
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def on_db_vuln(vuln)
|
2007-02-25 21:25:41 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Called when a new reference is created.
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def on_db_ref(ref)
|
2007-02-25 21:25:41 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|
2006-03-21 04:37:48 +00:00
|
|
|
|
2010-01-07 21:30:14 +00:00
|
|
|
class DBImportError < RuntimeError
|
|
|
|
end
|
|
|
|
|
2006-03-21 04:37:48 +00:00
|
|
|
###
|
|
|
|
#
|
|
|
|
# The DB module ActiveRecord definitions for the DBManager
|
|
|
|
#
|
|
|
|
###
|
|
|
|
class DBManager
|
|
|
|
|
2010-05-12 18:10:37 +00:00
|
|
|
def ipv4_validator(addr)
|
|
|
|
return false unless addr.kind_of? String
|
|
|
|
addr =~ /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/
|
|
|
|
end
|
2010-07-06 16:10:05 +00:00
|
|
|
|
2010-06-04 14:57:58 +00:00
|
|
|
# Takes a space-delimited set of ips and ranges, and subjects
|
|
|
|
# them to RangeWalker for validation. Returns true or false.
|
|
|
|
def validate_ips(ips)
|
|
|
|
ret = true
|
|
|
|
begin
|
2010-07-06 16:10:05 +00:00
|
|
|
ips.split(' ').each {|ip|
|
2010-06-04 14:57:58 +00:00
|
|
|
unless Rex::Socket::RangeWalker.new(ip).ranges
|
|
|
|
ret = false
|
|
|
|
break
|
|
|
|
end
|
|
|
|
}
|
2010-07-06 16:10:05 +00:00
|
|
|
rescue
|
2010-06-04 14:57:58 +00:00
|
|
|
ret = false
|
|
|
|
end
|
|
|
|
return ret
|
|
|
|
end
|
|
|
|
|
2010-05-12 18:10:37 +00:00
|
|
|
|
2006-09-16 06:45:06 +00:00
|
|
|
#
|
|
|
|
# Determines if the database is functional
|
|
|
|
#
|
|
|
|
def check
|
2010-02-18 06:40:38 +00:00
|
|
|
res = Host.find(:first)
|
2006-09-16 06:45:06 +00:00
|
|
|
end
|
|
|
|
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2009-12-29 23:48:45 +00:00
|
|
|
def default_workspace
|
|
|
|
Workspace.default
|
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2009-12-29 23:48:45 +00:00
|
|
|
def find_workspace(name)
|
|
|
|
Workspace.find_by_name(name)
|
2006-03-21 04:37:48 +00:00
|
|
|
end
|
|
|
|
|
2008-07-22 07:28:05 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# Creates a new workspace in the database
|
2008-07-22 07:28:05 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def add_workspace(name)
|
|
|
|
Workspace.find_or_create_by_name(name)
|
|
|
|
end
|
|
|
|
|
|
|
|
def workspaces
|
|
|
|
Workspace.find(:all)
|
|
|
|
end
|
|
|
|
|
2010-03-08 14:17:34 +00:00
|
|
|
#
|
|
|
|
# Wait for all pending write to finish
|
|
|
|
#
|
|
|
|
def sync
|
|
|
|
task = queue( Proc.new { } )
|
|
|
|
task.wait
|
|
|
|
end
|
2009-12-29 23:48:45 +00:00
|
|
|
|
2010-01-02 23:14:37 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# Find a host. Performs no database writes.
|
2008-07-22 07:28:05 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def get_host(opts)
|
|
|
|
if opts.kind_of? Host
|
|
|
|
return opts
|
|
|
|
elsif opts.kind_of? String
|
2010-02-18 06:40:38 +00:00
|
|
|
raise RuntimeError, "This invokation of get_host is no longer supported: #{caller}"
|
2009-12-29 23:48:45 +00:00
|
|
|
else
|
|
|
|
address = opts[:addr] || opts[:address] || opts[:host] || return
|
|
|
|
return address if address.kind_of? Host
|
|
|
|
end
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace = opts.delete(:workspace) || workspace
|
|
|
|
host = wspace.hosts.find_by_address(address)
|
2009-12-29 23:48:45 +00:00
|
|
|
return host
|
|
|
|
end
|
2008-07-22 07:28:05 +00:00
|
|
|
|
2010-01-02 23:14:37 +00:00
|
|
|
#
|
2010-01-10 17:53:12 +00:00
|
|
|
# Exactly like report_host but waits for the database to create a host and returns it.
|
2009-07-22 20:14:35 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def find_or_create_host(opts)
|
2010-01-10 17:53:12 +00:00
|
|
|
report_host(opts.merge({:wait => true}))
|
2009-07-22 20:14:35 +00:00
|
|
|
end
|
|
|
|
|
2006-03-21 04:37:48 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# Report a host's attributes such as operating system and service pack
|
2006-03-21 04:37:48 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# The opts parameter MUST contain
|
2010-01-15 00:32:48 +00:00
|
|
|
# :host -- the host's ip address
|
2009-12-29 23:48:45 +00:00
|
|
|
#
|
|
|
|
# The opts parameter can contain:
|
|
|
|
# :state -- one of the Msf::HostState constants
|
|
|
|
# :os_name -- one of the Msf::OperatingSystems constants
|
|
|
|
# :os_flavor -- something like "XP" or "Gentoo"
|
|
|
|
# :os_sp -- something like "SP2"
|
2010-01-10 17:53:12 +00:00
|
|
|
# :os_lang -- something like "English", "French", or "en-US"
|
2009-12-29 23:48:45 +00:00
|
|
|
# :arch -- one of the ARCH_* constants
|
|
|
|
# :mac -- the host's MAC address
|
|
|
|
#
|
|
|
|
def report_host(opts)
|
2010-02-02 23:11:59 +00:00
|
|
|
return if not active
|
2010-01-10 17:53:12 +00:00
|
|
|
addr = opts.delete(:host) || return
|
2010-04-05 19:38:51 +00:00
|
|
|
|
|
|
|
# Ensure the host field updated_at is changed on each report_host()
|
|
|
|
if addr.kind_of? Host
|
2010-04-05 20:34:41 +00:00
|
|
|
queue( Proc.new { addr.updated_at = addr.created_at; addr.save! } )
|
2010-04-05 19:38:51 +00:00
|
|
|
return addr
|
|
|
|
end
|
|
|
|
|
2010-01-10 17:53:12 +00:00
|
|
|
wait = opts.delete(:wait)
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace = opts.delete(:workspace) || workspace
|
2010-01-10 17:53:12 +00:00
|
|
|
|
2010-02-14 19:34:22 +00:00
|
|
|
if opts[:host_mac]
|
|
|
|
opts[:mac] = opts.delete(:host_mac)
|
|
|
|
end
|
|
|
|
|
2010-05-12 18:10:37 +00:00
|
|
|
unless ipv4_validator(addr)
|
2010-03-10 05:45:40 +00:00
|
|
|
raise ::ArgumentError, "Invalid IP address in report_host(): #{addr}"
|
|
|
|
end
|
|
|
|
|
2010-01-10 17:53:12 +00:00
|
|
|
ret = {}
|
|
|
|
task = queue( Proc.new {
|
|
|
|
if opts[:comm] and opts[:comm].length > 0
|
2010-02-18 06:40:38 +00:00
|
|
|
host = wspace.hosts.find_or_initialize_by_address_and_comm(addr, opts[:comm])
|
2010-01-10 17:53:12 +00:00
|
|
|
else
|
2010-02-18 06:40:38 +00:00
|
|
|
host = wspace.hosts.find_or_initialize_by_address(addr)
|
2010-01-10 17:53:12 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
opts.each { |k,v|
|
|
|
|
if (host.attribute_names.include?(k.to_s))
|
|
|
|
host[k] = v
|
|
|
|
else
|
|
|
|
dlog("Unknown attribute for Host: #{k}")
|
|
|
|
end
|
|
|
|
}
|
2010-04-13 21:15:15 +00:00
|
|
|
host.info = host.info[0,Host.columns_hash["info"].limit] if host.info
|
2010-01-10 17:53:12 +00:00
|
|
|
|
2010-04-05 19:38:51 +00:00
|
|
|
# Set default fields if needed
|
|
|
|
host.state = HostState::Alive if not host.state
|
|
|
|
host.comm = '' if not host.comm
|
|
|
|
host.workspace = wspace if not host.workspace
|
|
|
|
|
|
|
|
# Always save the host, helps track updates
|
2010-10-06 15:55:28 +00:00
|
|
|
msf_import_timestamps(opts,host)
|
2010-04-05 19:38:51 +00:00
|
|
|
host.save!
|
|
|
|
|
2010-01-10 17:53:12 +00:00
|
|
|
ret[:host] = host
|
|
|
|
} )
|
|
|
|
if wait
|
|
|
|
return nil if task.wait != :done
|
|
|
|
return ret[:host]
|
2006-04-03 04:33:30 +00:00
|
|
|
end
|
2010-01-10 17:53:12 +00:00
|
|
|
return task
|
2009-12-14 22:52:34 +00:00
|
|
|
end
|
2006-03-21 04:37:48 +00:00
|
|
|
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# Iterates over the hosts table calling the supplied block with the host
|
|
|
|
# instance of each entry.
|
2006-03-21 04:37:48 +00:00
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def each_host(wspace=workspace, &block)
|
|
|
|
wspace.hosts.each do |host|
|
2006-03-21 04:37:48 +00:00
|
|
|
block.call(host)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# Returns a list of all hosts in the database
|
2006-03-21 04:37:48 +00:00
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def hosts(wspace = workspace, only_up = false, addresses = nil)
|
2009-12-21 16:46:11 +00:00
|
|
|
conditions = {}
|
|
|
|
conditions[:state] = [Msf::HostState::Alive, Msf::HostState::Unknown] if only_up
|
|
|
|
conditions[:address] = addresses if addresses
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace.hosts.all(:conditions => conditions, :order => :address)
|
2006-03-21 04:37:48 +00:00
|
|
|
end
|
|
|
|
|
2009-12-29 23:48:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
def find_or_create_service(opts)
|
2010-01-10 17:53:12 +00:00
|
|
|
report_service(opts.merge({:wait => true}))
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
|
|
|
|
2010-01-02 23:14:37 +00:00
|
|
|
#
|
|
|
|
# Record a service in the database.
|
2009-12-29 23:48:45 +00:00
|
|
|
#
|
|
|
|
# opts must contain
|
2009-12-31 01:27:04 +00:00
|
|
|
# :host -- the host where this service is running
|
2009-12-29 23:48:45 +00:00
|
|
|
# :port -- the port where this service listens
|
2010-09-24 21:25:18 +00:00
|
|
|
# :proto -- the transport layer protocol (e.g. tcp, udp)
|
|
|
|
#
|
|
|
|
# opts may contain
|
|
|
|
# :name -- the application layer protocol (e.g. ssh, mssql, smb)
|
2009-12-29 23:48:45 +00:00
|
|
|
#
|
|
|
|
def report_service(opts)
|
2010-02-02 23:11:59 +00:00
|
|
|
return if not active
|
2010-03-28 16:53:55 +00:00
|
|
|
addr = opts.delete(:host) || return
|
|
|
|
hname = opts.delete(:host_name)
|
|
|
|
hmac = opts.delete(:host_mac)
|
|
|
|
|
2010-01-10 17:53:12 +00:00
|
|
|
wait = opts.delete(:wait)
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace = opts.delete(:workspace) || workspace
|
2010-01-10 17:53:12 +00:00
|
|
|
|
2010-02-18 06:40:38 +00:00
|
|
|
hopts = {:workspace => wspace, :host => addr}
|
2010-03-28 16:53:55 +00:00
|
|
|
hopts[:name] = hname if hname
|
|
|
|
hopts[:mac] = hmac if hmac
|
2010-01-20 00:35:44 +00:00
|
|
|
report_host(hopts)
|
2010-01-15 15:49:13 +00:00
|
|
|
|
2010-01-10 17:53:12 +00:00
|
|
|
ret = {}
|
2010-01-15 15:49:13 +00:00
|
|
|
|
2010-01-14 15:26:20 +00:00
|
|
|
task = queue(Proc.new {
|
2010-02-18 06:40:38 +00:00
|
|
|
host = get_host(:workspace => wspace, :address => addr)
|
2010-04-05 19:58:18 +00:00
|
|
|
if host
|
2010-04-05 20:34:41 +00:00
|
|
|
host.updated_at = host.created_at
|
|
|
|
host.state = HostState::Alive
|
2010-04-05 19:58:18 +00:00
|
|
|
host.save!
|
|
|
|
end
|
2010-02-24 22:54:16 +00:00
|
|
|
|
2010-01-10 17:53:12 +00:00
|
|
|
proto = opts[:proto] || 'tcp'
|
2010-01-20 00:35:44 +00:00
|
|
|
opts[:name].downcase! if (opts[:name])
|
2010-01-10 17:53:12 +00:00
|
|
|
|
|
|
|
service = host.services.find_or_initialize_by_port_and_proto(opts[:port].to_i, proto)
|
|
|
|
opts.each { |k,v|
|
|
|
|
if (service.attribute_names.include?(k.to_s))
|
|
|
|
service[k] = v
|
|
|
|
else
|
|
|
|
dlog("Unknown attribute for Service: #{k}")
|
|
|
|
end
|
|
|
|
}
|
2010-01-27 04:48:27 +00:00
|
|
|
if (service.state == nil)
|
2010-02-05 15:43:24 +00:00
|
|
|
service.state = ServiceState::Open
|
2010-01-27 04:48:27 +00:00
|
|
|
end
|
2010-01-10 17:53:12 +00:00
|
|
|
if (service and service.changed?)
|
2010-10-06 15:55:28 +00:00
|
|
|
msf_import_timestamps(opts,service)
|
2010-01-10 17:53:12 +00:00
|
|
|
service.save!
|
|
|
|
end
|
|
|
|
ret[:service] = service
|
|
|
|
})
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:service]
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
2010-01-10 17:53:12 +00:00
|
|
|
return task
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
|
|
|
|
2010-02-18 06:40:38 +00:00
|
|
|
def get_service(wspace, host, proto, port)
|
|
|
|
host = get_host(:workspace => wspace, :address => host)
|
2009-12-29 23:48:45 +00:00
|
|
|
return if not host
|
|
|
|
return host.services.find_by_proto_and_port(proto, port)
|
|
|
|
end
|
|
|
|
|
2006-03-21 04:37:48 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# Iterates over the services table calling the supplied block with the
|
2006-04-02 22:33:34 +00:00
|
|
|
# service instance of each entry.
|
2006-03-21 04:37:48 +00:00
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def each_service(wspace=workspace, &block)
|
|
|
|
services(wspace).each do |service|
|
2006-03-21 04:37:48 +00:00
|
|
|
block.call(service)
|
|
|
|
end
|
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2006-03-21 04:37:48 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# Returns a list of all services in the database
|
2006-03-21 04:37:48 +00:00
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def services(wspace = workspace, only_up = false, proto = nil, addresses = nil, ports = nil, names = nil)
|
2009-12-21 16:46:11 +00:00
|
|
|
conditions = {}
|
2010-02-05 15:43:24 +00:00
|
|
|
conditions[:state] = [ServiceState::Open] if only_up
|
2009-12-21 16:46:11 +00:00
|
|
|
conditions[:proto] = proto if proto
|
|
|
|
conditions["hosts.address"] = addresses if addresses
|
|
|
|
conditions[:port] = ports if ports
|
|
|
|
conditions[:name] = names if names
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace.services.all(:include => :host, :conditions => conditions, :order => "hosts.address, port")
|
2006-03-21 04:37:48 +00:00
|
|
|
end
|
2006-04-02 22:33:34 +00:00
|
|
|
|
2009-12-29 23:48:45 +00:00
|
|
|
|
|
|
|
def get_client(opts)
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace = opts.delete(:workspace) || workspace
|
|
|
|
host = get_host(:workspace => wspace, :host => opts[:host]) || return
|
2009-12-29 23:48:45 +00:00
|
|
|
client = host.clients.find(:first, :conditions => {:ua_string => opts[:ua_string]})
|
|
|
|
return client
|
|
|
|
end
|
|
|
|
|
|
|
|
def find_or_create_client(opts)
|
2010-01-10 17:53:12 +00:00
|
|
|
report_client(opts.merge({:wait => true}))
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Report a client running on a host.
|
|
|
|
#
|
2010-01-02 23:14:37 +00:00
|
|
|
# opts must contain
|
2009-12-29 23:48:45 +00:00
|
|
|
# :ua_string -- the value of the User-Agent header
|
2010-10-08 20:32:32 +00:00
|
|
|
# :host -- the host where this client connected from, can be an ip address or a Host object
|
2009-12-29 23:48:45 +00:00
|
|
|
#
|
2010-01-02 23:14:37 +00:00
|
|
|
# opts can contain
|
2009-12-29 23:48:45 +00:00
|
|
|
# :ua_name -- one of the Msf::HttpClients constants
|
|
|
|
# :ua_ver -- detected version of the given client
|
2010-10-08 20:32:32 +00:00
|
|
|
# :campaign -- an id or Campaign object
|
2009-12-29 23:48:45 +00:00
|
|
|
#
|
2010-01-10 17:53:12 +00:00
|
|
|
# Returns a Client.
|
2009-12-29 23:48:45 +00:00
|
|
|
#
|
|
|
|
def report_client(opts)
|
2010-02-02 23:11:59 +00:00
|
|
|
return if not active
|
2010-01-23 00:15:57 +00:00
|
|
|
addr = opts.delete(:host) || return
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace = opts.delete(:workspace) || workspace
|
|
|
|
report_host(:workspace => wspace, :host => addr)
|
2010-01-10 17:53:12 +00:00
|
|
|
wait = opts.delete(:wait)
|
|
|
|
|
|
|
|
ret = {}
|
|
|
|
task = queue(Proc.new {
|
2010-02-22 19:31:21 +00:00
|
|
|
host = get_host(:workspace => wspace, :host => addr)
|
2010-01-10 17:53:12 +00:00
|
|
|
client = host.clients.find_or_initialize_by_ua_string(opts[:ua_string])
|
2010-10-08 20:32:32 +00:00
|
|
|
|
|
|
|
campaign = opts.delete(:campaign)
|
|
|
|
if campaign
|
|
|
|
case campaign
|
|
|
|
when Campaign
|
|
|
|
opts[:campaign_id] = campaign.id
|
|
|
|
else
|
|
|
|
opts[:campaign_id] = campaign
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-01-10 17:53:12 +00:00
|
|
|
opts.each { |k,v|
|
|
|
|
if (client.attribute_names.include?(k.to_s))
|
|
|
|
client[k] = v
|
|
|
|
else
|
|
|
|
dlog("Unknown attribute for Client: #{k}")
|
|
|
|
end
|
|
|
|
}
|
|
|
|
if (client and client.changed?)
|
|
|
|
client.save!
|
|
|
|
end
|
|
|
|
ret[:client] = client
|
|
|
|
})
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:client]
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
2010-01-10 17:53:12 +00:00
|
|
|
return task
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
|
|
|
|
2006-04-02 22:33:34 +00:00
|
|
|
#
|
|
|
|
# This method iterates the vulns table calling the supplied block with the
|
|
|
|
# vuln instance of each entry.
|
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def each_vuln(wspace=workspace,&block)
|
|
|
|
wspace.vulns.each do |vulns|
|
2006-04-02 22:33:34 +00:00
|
|
|
block.call(vulns)
|
|
|
|
end
|
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2006-04-02 22:33:34 +00:00
|
|
|
#
|
|
|
|
# This methods returns a list of all vulnerabilities in the database
|
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def vulns(wspace=workspace)
|
|
|
|
wspace.vulns
|
2006-04-02 22:33:34 +00:00
|
|
|
end
|
2008-03-02 04:46:13 +00:00
|
|
|
|
2010-08-18 00:58:20 +00:00
|
|
|
#
|
|
|
|
# This methods returns a list of all credentials in the database
|
|
|
|
#
|
|
|
|
def creds(wspace=workspace)
|
|
|
|
Cred.find(
|
|
|
|
:all,
|
|
|
|
:include => {:service => :host}, # That's some magic right there.
|
|
|
|
:conditions => ["hosts.workspace_id = ?", wspace.id]
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2010-08-24 21:57:04 +00:00
|
|
|
#
|
|
|
|
# This method returns a list of all exploited hosts in the database.
|
|
|
|
#
|
|
|
|
def exploited_hosts(wspace=workspace)
|
|
|
|
wspace.exploited_hosts
|
|
|
|
end
|
|
|
|
|
2008-03-02 04:46:13 +00:00
|
|
|
#
|
|
|
|
# This method iterates the notes table calling the supplied block with the
|
|
|
|
# note instance of each entry.
|
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def each_note(wspace=workspace, &block)
|
|
|
|
wspace.notes.each do |note|
|
2008-03-02 04:46:13 +00:00
|
|
|
block.call(note)
|
|
|
|
end
|
|
|
|
end
|
2009-06-23 03:49:25 +00:00
|
|
|
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# Find or create a note matching this type/data
|
2009-06-23 03:49:25 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def find_or_create_note(opts)
|
2010-01-10 17:53:12 +00:00
|
|
|
report_note(opts.merge({:wait => true}))
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
|
|
|
|
2010-09-19 22:25:56 +00:00
|
|
|
#
|
|
|
|
# Report a Note to the database. Notes can be tied to a Workspace, Host, or Service.
|
|
|
|
#
|
|
|
|
# opts MUST contain
|
|
|
|
# :data -- whatever it is you're making a note of
|
|
|
|
# :type -- The type of note, e.g. smb_peer_os
|
|
|
|
#
|
|
|
|
# opts can contain
|
|
|
|
# :workspace -- the workspace to associate with this Note
|
|
|
|
# :host -- an IP address or a Host object to associate with this Note
|
|
|
|
# :service -- a Service object to associate with this Note
|
|
|
|
# :port -- along with :host and proto, a service to associate with this Note
|
|
|
|
# :proto -- along with :host and port, a service to associate with this Note
|
|
|
|
# :update -- what to do in case a similar Note exists, see below
|
|
|
|
#
|
|
|
|
# The :update option can have the following values:
|
|
|
|
# :unique -- allow only a single Note per +host+/+type+ pair
|
|
|
|
# :unique_data -- like :uniqe, but also compare +data+
|
|
|
|
# :insert -- always insert a new Note even if one with identical values exists
|
|
|
|
#
|
|
|
|
# If the provided :host is an IP address and does not exist in the
|
|
|
|
# database, it will be created. If :workspace, :host and :service are all
|
|
|
|
# omitted, the new Note will be associated with the current workspace.
|
|
|
|
#
|
2010-01-10 17:53:12 +00:00
|
|
|
def report_note(opts)
|
2010-02-02 23:11:59 +00:00
|
|
|
return if not active
|
2010-01-10 17:53:12 +00:00
|
|
|
wait = opts.delete(:wait)
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace = opts.delete(:workspace) || workspace
|
2010-03-10 18:00:19 +00:00
|
|
|
seen = opts.delete(:seen) || false
|
|
|
|
crit = opts.delete(:critical) || false
|
2010-01-10 17:53:12 +00:00
|
|
|
host = nil
|
2010-01-20 00:35:44 +00:00
|
|
|
addr = nil
|
2010-01-28 00:00:00 +00:00
|
|
|
# Report the host so it's there for the Proc to use below
|
2009-12-29 23:48:45 +00:00
|
|
|
if opts[:host]
|
|
|
|
if opts[:host].kind_of? Host
|
2010-01-10 17:53:12 +00:00
|
|
|
host = opts[:host]
|
2009-12-29 23:48:45 +00:00
|
|
|
else
|
2010-02-18 06:40:38 +00:00
|
|
|
report_host({:workspace => wspace, :host => opts[:host]})
|
2010-01-20 00:35:44 +00:00
|
|
|
addr = opts[:host]
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
2010-09-24 20:51:38 +00:00
|
|
|
# Do the same for a service if that's also included.
|
|
|
|
if (opts[:port])
|
|
|
|
proto = nil
|
|
|
|
case opts[:proto].to_s.downcase # Catch incorrect usages
|
|
|
|
when 'tcp','udp'
|
|
|
|
proto = opts[:proto]
|
|
|
|
when 'dns','snmp','dhcp'
|
|
|
|
proto = 'udp'
|
2010-09-24 21:11:36 +00:00
|
|
|
sname = opts[:proto]
|
2010-09-24 20:51:38 +00:00
|
|
|
else
|
|
|
|
proto = 'tcp'
|
2010-09-24 21:11:36 +00:00
|
|
|
sname = opts[:proto]
|
2010-09-24 20:51:38 +00:00
|
|
|
end
|
2010-09-24 21:11:36 +00:00
|
|
|
sopts = {
|
2010-09-24 20:51:38 +00:00
|
|
|
:workspace => wspace,
|
|
|
|
:host => opts[:host],
|
|
|
|
:port => opts[:port],
|
|
|
|
:proto => proto
|
2010-09-24 21:11:36 +00:00
|
|
|
}
|
|
|
|
sopts[:name] = sname if sname
|
|
|
|
report_service(sopts)
|
2010-09-24 20:51:38 +00:00
|
|
|
end
|
2010-09-19 22:25:56 +00:00
|
|
|
end
|
2010-02-14 19:34:22 +00:00
|
|
|
# Update Modes can be :unique, :unique_data, :insert
|
|
|
|
mode = opts[:update] || :unique
|
|
|
|
|
2010-01-10 17:53:12 +00:00
|
|
|
ret = {}
|
|
|
|
task = queue(Proc.new {
|
2010-01-20 00:35:44 +00:00
|
|
|
if addr and not host
|
2010-02-18 06:40:38 +00:00
|
|
|
host = get_host(:workspace => wspace, :host => addr)
|
2010-01-20 00:35:44 +00:00
|
|
|
end
|
2010-09-22 05:37:21 +00:00
|
|
|
if host and (opts[:port] and opts[:proto])
|
|
|
|
service = get_service(wspace, host, opts[:proto], opts[:port])
|
2010-09-27 03:52:39 +00:00
|
|
|
elsif opts[:service] and opts[:service].kind_of? Service
|
|
|
|
service = opts[:service]
|
2010-09-19 22:25:56 +00:00
|
|
|
end
|
2010-04-05 19:38:51 +00:00
|
|
|
|
|
|
|
if host
|
2010-04-05 20:34:41 +00:00
|
|
|
host.updated_at = host.created_at
|
|
|
|
host.state = HostState::Alive
|
2010-04-05 19:38:51 +00:00
|
|
|
host.save!
|
|
|
|
end
|
2010-01-27 23:25:17 +00:00
|
|
|
|
2010-05-03 01:19:36 +00:00
|
|
|
ntype = opts.delete(:type) || opts.delete(:ntype) || (raise RuntimeError, "A note :type or :ntype is required")
|
|
|
|
data = opts[:data] || (raise RuntimeError, "Note :data is required")
|
2010-02-14 19:34:22 +00:00
|
|
|
method = nil
|
|
|
|
args = []
|
|
|
|
note = nil
|
|
|
|
|
2010-09-22 05:37:21 +00:00
|
|
|
conditions = { :ntype => ntype }
|
|
|
|
conditions[:host_id] = host[:id] if host
|
|
|
|
conditions[:service_id] = service[:id] if service
|
|
|
|
|
|
|
|
notes = wspace.notes.find(:all, :conditions => conditions)
|
|
|
|
|
2010-02-14 19:34:22 +00:00
|
|
|
case mode
|
|
|
|
when :unique
|
2010-09-22 05:37:21 +00:00
|
|
|
# Only one note of this type should exist, make a new one if it
|
|
|
|
# isn't there. If it is, grab it and overwrite its data.
|
|
|
|
if notes.empty?
|
|
|
|
note = wspace.notes.new(conditions)
|
|
|
|
else
|
|
|
|
note = notes[0]
|
2010-02-14 19:34:22 +00:00
|
|
|
end
|
2010-09-22 05:37:21 +00:00
|
|
|
note.data = data
|
|
|
|
when :unique_data
|
|
|
|
# Don't make a new Note with the same data as one that already
|
|
|
|
# exists for the given: type and (host or service)
|
|
|
|
notes.each do |n|
|
|
|
|
# Compare the deserialized data from the table to the raw
|
|
|
|
# data we're looking for. Because of the serialization we
|
|
|
|
# can't do this easily or reliably in SQL.
|
|
|
|
if n.data == data
|
|
|
|
note = n
|
|
|
|
break
|
|
|
|
end
|
2010-02-14 19:34:22 +00:00
|
|
|
end
|
2010-09-22 05:37:21 +00:00
|
|
|
if not note
|
|
|
|
# We didn't find one with the data we're looking for, make
|
|
|
|
# a new one.
|
|
|
|
note = wspace.notes.new(conditions.merge(:data => data))
|
2010-02-14 19:34:22 +00:00
|
|
|
end
|
|
|
|
else
|
2010-09-22 05:37:21 +00:00
|
|
|
# Otherwise, assume :insert, which means always make a new one
|
2010-02-18 06:40:38 +00:00
|
|
|
note = wspace.notes.new
|
2010-02-14 19:34:22 +00:00
|
|
|
if host
|
|
|
|
note.host_id = host[:id]
|
|
|
|
end
|
|
|
|
if opts[:service] and opts[:service].kind_of? Service
|
|
|
|
note.service_id = opts[:service][:id]
|
|
|
|
end
|
2010-03-10 18:00:19 +00:00
|
|
|
note.seen = seen
|
|
|
|
note.critical = crit
|
|
|
|
note.ntype = ntype
|
|
|
|
note.data = data
|
2010-01-10 17:53:12 +00:00
|
|
|
end
|
2010-10-06 15:55:28 +00:00
|
|
|
msf_import_timestamps(opts,note)
|
2010-09-22 05:37:21 +00:00
|
|
|
note.save!
|
2009-12-29 23:48:45 +00:00
|
|
|
|
2010-01-10 17:53:12 +00:00
|
|
|
ret[:note] = note
|
|
|
|
})
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
2010-02-22 23:45:43 +00:00
|
|
|
return ret[:note]
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
2010-01-10 17:53:12 +00:00
|
|
|
return task
|
2009-06-23 03:49:25 +00:00
|
|
|
end
|
|
|
|
|
2008-03-02 04:46:13 +00:00
|
|
|
#
|
|
|
|
# This methods returns a list of all notes in the database
|
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def notes(wspace=workspace)
|
|
|
|
wspace.notes
|
2008-03-02 04:46:13 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2010-08-18 00:58:20 +00:00
|
|
|
# report_auth_info used to create a note, now it creates
|
|
|
|
# an entry in the creds table. It's much more akin to
|
|
|
|
# report_vuln() now.
|
2009-12-29 23:48:45 +00:00
|
|
|
#
|
|
|
|
# opts must contain
|
2010-08-18 00:58:20 +00:00
|
|
|
# :host -- an IP address
|
|
|
|
# :port -- a port number
|
2010-01-02 23:14:37 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# opts can contain
|
2010-08-18 00:58:20 +00:00
|
|
|
# :user -- the username
|
|
|
|
# :pass -- the password, or path to ssh_key
|
|
|
|
# :ptype -- the type of password (password, hash, or ssh_key)
|
|
|
|
# :proto -- a transport name for the port
|
|
|
|
# :sname -- service name
|
|
|
|
# :active -- by default, a cred is active, unless explicitly false
|
|
|
|
# :proof -- data used to prove the account is actually active.
|
|
|
|
#
|
|
|
|
# Sources: Credentials can be sourced from another credential, or from
|
|
|
|
# a vulnerability. For example, if an exploit was used to dump the
|
|
|
|
# smb_hashes, and this credential comes from there, the source_id would
|
|
|
|
# be the Vuln id (as reported by report_vuln) and the type would be "Vuln".
|
|
|
|
#
|
|
|
|
# :source_id -- The Vuln or Cred id of the source of this cred.
|
|
|
|
# :source_type -- Either Vuln or Cred
|
|
|
|
#
|
|
|
|
# TODO: This is written somewhat host-centric, when really the
|
|
|
|
# Service is the thing. Need to revisit someday.
|
2009-12-29 23:48:45 +00:00
|
|
|
def report_auth_info(opts={})
|
2010-01-10 17:53:12 +00:00
|
|
|
return if not active
|
2010-08-18 00:58:20 +00:00
|
|
|
raise ArgumentError.new("Missing required option :host") if opts[:host].nil?
|
|
|
|
raise ArgumentError.new("Invalid address for :host") unless validate_ips(opts[:host])
|
|
|
|
raise ArgumentError.new("Missing required option :port") if opts[:port].nil?
|
|
|
|
host = opts.delete(:host)
|
|
|
|
ptype = opts.delete(:type) || "password"
|
|
|
|
token = [opts.delete(:user), opts.delete(:pass)]
|
|
|
|
sname = opts.delete(:sname)
|
|
|
|
port = opts.delete(:port)
|
|
|
|
proto = opts.delete(:proto) || "tcp"
|
|
|
|
proof = opts.delete(:proof)
|
|
|
|
source_id = opts.delete(:source_id)
|
|
|
|
source_type = opts.delete(:source_type)
|
|
|
|
# Nil is true for active.
|
|
|
|
active = (opts[:active] || opts[:active].nil?) ? true : false
|
|
|
|
|
|
|
|
wait = opts.delete(:wait)
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace = opts.delete(:workspace) || workspace
|
2010-08-18 00:58:20 +00:00
|
|
|
|
|
|
|
# Service management; assume the user knows what
|
|
|
|
# he's talking about.
|
|
|
|
unless service = get_service(wspace, host, proto, port)
|
2010-09-16 19:45:51 +00:00
|
|
|
report_service(:host => host, :port => port, :proto => proto, :name => sname, :workspace => wspace)
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
2010-08-18 00:58:20 +00:00
|
|
|
|
|
|
|
ret = {}
|
|
|
|
task = queue( Proc.new {
|
|
|
|
|
|
|
|
# Get the service
|
|
|
|
service ||= get_service(wspace, host, proto, port)
|
|
|
|
|
|
|
|
# Create the cred by username only (so we can change passwords)
|
|
|
|
cred = service.creds.find_or_initialize_by_user_and_ptype(token[0] || "", ptype)
|
|
|
|
|
|
|
|
# Update with the password
|
|
|
|
cred.pass = (token[1] || "")
|
|
|
|
|
|
|
|
# Annotate the credential
|
|
|
|
cred.ptype = ptype
|
|
|
|
cred.active = active
|
|
|
|
|
2010-09-16 19:45:51 +00:00
|
|
|
# Update the source ID only if there wasn't already one.
|
|
|
|
if source_id and !cred.source_id
|
|
|
|
cred.source_id = source_id
|
|
|
|
cred.source_type = source_type if source_type
|
|
|
|
end
|
|
|
|
|
2010-08-18 00:58:20 +00:00
|
|
|
# Safe proof (lazy way) -- doesn't chop expanded
|
|
|
|
# characters correctly, but shouldn't ever be a problem.
|
|
|
|
unless proof.nil?
|
|
|
|
proof = Rex::Text.to_hex_ascii(proof)
|
|
|
|
proof = proof[0,4096]
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
2010-08-18 00:58:20 +00:00
|
|
|
cred.proof = proof
|
|
|
|
|
|
|
|
# Update the timestamp
|
|
|
|
if cred.changed?
|
2010-10-06 15:55:28 +00:00
|
|
|
msf_import_timestamps(opts,cred)
|
2010-08-18 00:58:20 +00:00
|
|
|
cred.save!
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
2010-02-18 06:40:38 +00:00
|
|
|
|
2010-09-01 22:06:52 +00:00
|
|
|
# Ensure the updated_at is touched any time report_auth_info is called
|
|
|
|
# except when it's set explicitly (as it is for imports)
|
|
|
|
unless opts[:updated_at] || opts["updated_at"]
|
|
|
|
cred.updated_at = Time.now.utc
|
|
|
|
cred.save!
|
|
|
|
end
|
|
|
|
|
2010-08-18 00:58:20 +00:00
|
|
|
ret[:cred] = cred
|
|
|
|
})
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:cred]
|
2010-02-18 06:40:38 +00:00
|
|
|
end
|
2010-08-18 00:58:20 +00:00
|
|
|
return task
|
2009-12-14 22:52:34 +00:00
|
|
|
end
|
|
|
|
|
2010-08-18 00:58:20 +00:00
|
|
|
alias :report_cred :report_auth_info
|
2009-12-29 23:48:45 +00:00
|
|
|
|
2010-08-18 00:58:20 +00:00
|
|
|
#
|
|
|
|
# Find or create a credential matching this type/data
|
|
|
|
#
|
|
|
|
def find_or_create_cred(opts)
|
|
|
|
report_auth_info(opts.merge({:wait => true}))
|
|
|
|
end
|
2009-12-29 23:48:45 +00:00
|
|
|
|
2010-08-18 00:58:20 +00:00
|
|
|
#
|
|
|
|
# This method iterates the creds table calling the supplied block with the
|
|
|
|
# cred instance of each entry.
|
|
|
|
#
|
|
|
|
def each_cred(wspace=workspace,&block)
|
|
|
|
wspace.creds.each do |cred|
|
|
|
|
block.call(cred)
|
|
|
|
end
|
|
|
|
end
|
2006-09-16 20:08:13 +00:00
|
|
|
|
2010-08-24 21:57:04 +00:00
|
|
|
def each_exploited_host(wspace=workspace,&block)
|
|
|
|
wspace.exploited_hosts.each do |eh|
|
|
|
|
block.call(eh)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2009-07-22 20:14:35 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
# Find or create a vuln matching this service/name
|
2009-12-13 05:24:48 +00:00
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def find_or_create_vuln(opts)
|
2010-01-10 17:53:12 +00:00
|
|
|
report_vuln(opts.merge({:wait => true}))
|
2009-07-22 20:14:35 +00:00
|
|
|
end
|
|
|
|
|
2006-09-16 20:08:13 +00:00
|
|
|
#
|
2010-04-15 09:00:16 +00:00
|
|
|
# opts must contain
|
|
|
|
# :host -- the host where this vulnerability resides
|
2010-05-02 19:16:52 +00:00
|
|
|
# :name -- the scanner-specific id of the vuln (e.g. NEXPOSE-cifs-acct-password-never-expires)
|
2010-05-02 23:13:21 +00:00
|
|
|
#
|
2010-05-02 19:16:52 +00:00
|
|
|
# opts can contain
|
2010-05-09 23:32:43 +00:00
|
|
|
# :info -- a human readable description of the vuln, free-form text
|
2010-05-02 19:16:52 +00:00
|
|
|
# :refs -- an array of Ref objects or string names of references
|
2009-12-29 23:48:45 +00:00
|
|
|
#
|
|
|
|
def report_vuln(opts)
|
2010-02-02 23:11:59 +00:00
|
|
|
return if not active
|
2010-04-15 09:00:16 +00:00
|
|
|
raise ArgumentError.new("Missing required option :host") if opts[:host].nil?
|
2010-05-09 23:32:43 +00:00
|
|
|
raise ArgumentError.new("Deprecated data column for vuln, use .info instead") if opts[:data]
|
2010-01-10 17:53:12 +00:00
|
|
|
name = opts[:name] || return
|
2010-05-09 23:32:43 +00:00
|
|
|
info = opts[:info]
|
2010-01-10 17:53:12 +00:00
|
|
|
wait = opts.delete(:wait)
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace = opts.delete(:workspace) || workspace
|
2010-01-10 17:53:12 +00:00
|
|
|
rids = nil
|
|
|
|
if opts[:refs]
|
|
|
|
rids = []
|
|
|
|
opts[:refs].each do |r|
|
2010-03-21 23:12:37 +00:00
|
|
|
if r.respond_to? :ctx_id
|
|
|
|
r = r.ctx_id + '-' + r.ctx_val
|
|
|
|
end
|
2010-01-10 17:53:12 +00:00
|
|
|
rids << find_or_create_ref(:name => r)
|
|
|
|
end
|
2006-03-21 04:37:48 +00:00
|
|
|
end
|
2010-04-15 09:00:16 +00:00
|
|
|
|
2010-01-20 00:35:44 +00:00
|
|
|
host = nil
|
|
|
|
addr = nil
|
2010-04-15 09:00:16 +00:00
|
|
|
if opts[:host].kind_of? Host
|
|
|
|
host = opts[:host]
|
|
|
|
else
|
|
|
|
report_host({:workspace => wspace, :host => opts[:host]})
|
|
|
|
addr = opts[:host]
|
2010-01-20 00:35:44 +00:00
|
|
|
end
|
2010-01-10 17:53:12 +00:00
|
|
|
|
|
|
|
ret = {}
|
|
|
|
task = queue( Proc.new {
|
2010-04-05 19:58:18 +00:00
|
|
|
if host
|
2010-04-05 20:34:41 +00:00
|
|
|
host.updated_at = host.created_at
|
|
|
|
host.state = HostState::Alive
|
2010-04-05 19:58:18 +00:00
|
|
|
host.save!
|
2010-04-07 20:51:05 +00:00
|
|
|
else
|
2010-04-15 09:00:16 +00:00
|
|
|
host = get_host(:workspace => wspace, :address => addr)
|
2010-04-05 19:58:18 +00:00
|
|
|
end
|
2010-02-24 22:57:38 +00:00
|
|
|
|
2010-05-09 23:32:43 +00:00
|
|
|
if info
|
|
|
|
vuln = host.vulns.find_or_initialize_by_name_and_info(name, info, :include => :refs)
|
2010-01-10 17:53:12 +00:00
|
|
|
else
|
2010-01-20 00:35:44 +00:00
|
|
|
vuln = host.vulns.find_or_initialize_by_name(name, :include => :refs)
|
2010-01-10 17:53:12 +00:00
|
|
|
end
|
|
|
|
|
2010-10-06 04:18:01 +00:00
|
|
|
if opts[:port]
|
|
|
|
proto = nil
|
|
|
|
case opts[:proto].to_s.downcase # Catch incorrect usages, as in report_note
|
|
|
|
when 'tcp','udp'
|
|
|
|
proto = opts[:proto]
|
|
|
|
when 'dns','snmp','dhcp'
|
|
|
|
proto = 'udp'
|
|
|
|
sname = opts[:proto]
|
|
|
|
else
|
|
|
|
proto = 'tcp'
|
|
|
|
sname = opts[:proto]
|
|
|
|
end
|
|
|
|
vuln.service = host.services.find_or_create_by_port_and_proto(opts[:port], proto)
|
2010-01-10 17:53:12 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
if rids
|
|
|
|
vuln.refs << (rids - vuln.refs)
|
|
|
|
end
|
|
|
|
|
|
|
|
if vuln.changed?
|
2010-10-06 15:55:28 +00:00
|
|
|
msf_import_timestamps(opts,vuln)
|
2010-01-10 17:53:12 +00:00
|
|
|
vuln.save!
|
|
|
|
end
|
|
|
|
ret[:vuln] = vuln
|
|
|
|
})
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:vuln]
|
|
|
|
end
|
|
|
|
return task
|
2006-03-21 04:37:48 +00:00
|
|
|
end
|
|
|
|
|
2010-02-18 06:40:38 +00:00
|
|
|
def get_vuln(wspace, host, service, name, data='')
|
|
|
|
raise RuntimeError, "Not workspace safe: #{caller.inspect}"
|
2009-07-22 13:37:14 +00:00
|
|
|
vuln = nil
|
2009-12-21 16:46:11 +00:00
|
|
|
if (service)
|
2009-07-22 13:37:14 +00:00
|
|
|
vuln = Vuln.find(:first, :conditions => [ "name = ? and service_id = ? and host_id = ?", name, service.id, host.id])
|
|
|
|
else
|
|
|
|
vuln = Vuln.find(:first, :conditions => [ "name = ? and host_id = ?", name, host.id])
|
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2006-04-02 22:33:34 +00:00
|
|
|
return vuln
|
|
|
|
end
|
2006-09-16 20:08:13 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Find or create a reference matching this name
|
|
|
|
#
|
2009-12-29 23:48:45 +00:00
|
|
|
def find_or_create_ref(opts)
|
2010-01-10 17:53:12 +00:00
|
|
|
ret = {}
|
2010-05-02 19:16:52 +00:00
|
|
|
ret[:ref] = get_ref(opts[:name])
|
|
|
|
return ret[:ref] if ret[:ref]
|
|
|
|
|
2010-01-14 15:26:20 +00:00
|
|
|
task = queue(Proc.new {
|
2010-01-10 17:53:12 +00:00
|
|
|
ref = Ref.find_or_initialize_by_name(opts[:name])
|
|
|
|
if ref and ref.changed?
|
|
|
|
ref.save!
|
2009-12-29 23:48:45 +00:00
|
|
|
end
|
2010-01-10 17:53:12 +00:00
|
|
|
ret[:ref] = ref
|
|
|
|
})
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:ref]
|
2006-09-16 20:08:13 +00:00
|
|
|
end
|
2009-12-29 23:48:45 +00:00
|
|
|
def get_ref(name)
|
|
|
|
Ref.find_by_name(name)
|
2008-03-02 04:46:13 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2010-08-24 21:57:04 +00:00
|
|
|
def report_exploit(opts={})
|
|
|
|
return if not active
|
|
|
|
raise ArgumentError.new("Missing required option :host") if opts[:host].nil?
|
|
|
|
wait = opts[:wait]
|
|
|
|
wspace = opts.delete(:workspace) || workspace
|
|
|
|
host = nil
|
|
|
|
addr = nil
|
|
|
|
sname = opts.delete(:sname)
|
|
|
|
port = opts.delete(:port)
|
|
|
|
proto = opts.delete(:proto) || "tcp"
|
|
|
|
name = opts.delete(:name)
|
|
|
|
payload = opts.delete(:payload)
|
|
|
|
session_uuid = opts.delete(:session_uuid)
|
|
|
|
|
|
|
|
if opts[:host].kind_of? Host
|
|
|
|
host = opts[:host]
|
|
|
|
else
|
|
|
|
report_host({:workspace => wspace, :host => opts[:host]})
|
|
|
|
addr = opts[:host]
|
|
|
|
end
|
|
|
|
|
|
|
|
if opts[:service].kind_of? Service
|
|
|
|
service = opts[:service]
|
|
|
|
elsif port
|
|
|
|
report_service(:host => host, :port => port, :proto => proto, :name => sname)
|
|
|
|
service = get_service(wspace, host, proto, port)
|
|
|
|
else
|
|
|
|
service = nil
|
|
|
|
end
|
|
|
|
|
|
|
|
ret = {}
|
|
|
|
|
|
|
|
task = queue(
|
|
|
|
Proc.new {
|
|
|
|
if host
|
|
|
|
host.updated_at = host.created_at
|
|
|
|
host.state = HostState::Alive
|
|
|
|
host.save!
|
|
|
|
else
|
|
|
|
host = get_host(:workspace => wspace, :address => addr)
|
|
|
|
end
|
|
|
|
exploit_info = {
|
|
|
|
:workspace => wspace,
|
|
|
|
:host_id => host.id,
|
|
|
|
:name => name,
|
|
|
|
:payload => payload,
|
|
|
|
}
|
|
|
|
exploit_info[:service_id] = service.id if service
|
|
|
|
exploit_info[:session_uuid] = session_uuid if session_uuid
|
|
|
|
exploit_record = ExploitedHost.create(exploit_info)
|
|
|
|
exploit_record.save!
|
|
|
|
|
|
|
|
ret[:exploit] = exploit_record
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:exploit]
|
|
|
|
end
|
|
|
|
return task
|
|
|
|
|
|
|
|
end
|
|
|
|
|
2009-12-29 23:48:45 +00:00
|
|
|
|
2008-12-22 03:19:39 +00:00
|
|
|
#
|
|
|
|
# Deletes a host and associated data matching this address/comm
|
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def del_host(wspace, address, comm='')
|
|
|
|
host = wspace.hosts.find_by_address_and_comm(address, comm)
|
2009-12-14 22:52:34 +00:00
|
|
|
host.destroy if host
|
2008-12-22 03:19:39 +00:00
|
|
|
end
|
2009-10-16 18:27:18 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Deletes a port and associated vulns matching this port
|
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def del_service(wspace, address, proto, port, comm='')
|
|
|
|
|
|
|
|
host = get_host(:workspace => wspace, :address => address)
|
2009-10-16 18:27:18 +00:00
|
|
|
return unless host
|
|
|
|
|
2010-01-04 16:14:23 +00:00
|
|
|
host.services.all(:conditions => {:proto => proto, :port => port}).each { |s| s.destroy }
|
2009-10-16 18:27:18 +00:00
|
|
|
end
|
2008-12-22 03:19:39 +00:00
|
|
|
|
2006-09-16 20:08:13 +00:00
|
|
|
#
|
|
|
|
# Find a reference matching this name
|
|
|
|
#
|
|
|
|
def has_ref?(name)
|
2009-12-07 17:03:21 +00:00
|
|
|
Ref.find_by_name(name)
|
2006-09-16 20:08:13 +00:00
|
|
|
end
|
2006-09-17 00:39:23 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Find a vulnerability matching this name
|
|
|
|
#
|
|
|
|
def has_vuln?(name)
|
2009-12-07 17:03:21 +00:00
|
|
|
Vuln.find_by_name(name)
|
2006-09-17 00:39:23 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2006-09-16 20:08:13 +00:00
|
|
|
#
|
|
|
|
# Look for an address across all comms
|
2009-12-13 05:24:48 +00:00
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def has_host?(wspace,addr)
|
|
|
|
wspace.hosts.find_by_address(addr)
|
2006-04-02 22:33:34 +00:00
|
|
|
end
|
2006-09-17 00:39:23 +00:00
|
|
|
|
2010-03-11 19:38:19 +00:00
|
|
|
def events(wspace=workspace)
|
2010-03-21 23:12:37 +00:00
|
|
|
wspace.events.find :all, :order => 'created_at ASC'
|
2010-03-11 19:38:19 +00:00
|
|
|
end
|
2009-12-13 06:56:01 +00:00
|
|
|
|
2010-01-15 00:32:48 +00:00
|
|
|
def report_event(opts = {})
|
2010-02-02 23:11:59 +00:00
|
|
|
return if not active
|
2010-02-18 06:40:38 +00:00
|
|
|
wspace = opts.delete(:workspace) || workspace
|
2010-03-17 14:04:47 +00:00
|
|
|
uname = opts.delete(:username)
|
|
|
|
|
2010-01-28 00:00:00 +00:00
|
|
|
if opts[:host]
|
2010-02-18 06:40:38 +00:00
|
|
|
report_host(:workspace => wspace, :host => opts[:host])
|
2010-01-28 00:00:00 +00:00
|
|
|
end
|
2010-01-15 00:32:48 +00:00
|
|
|
framework.db.queue(Proc.new {
|
2010-02-22 19:31:21 +00:00
|
|
|
opts[:host] = get_host(:workspace => wspace, :host => opts[:host]) if opts[:host]
|
2010-03-17 14:04:47 +00:00
|
|
|
Event.create(opts.merge(:workspace_id => wspace[:id], :username => uname))
|
2010-01-15 00:32:48 +00:00
|
|
|
})
|
|
|
|
end
|
|
|
|
|
2010-02-22 23:45:43 +00:00
|
|
|
#
|
|
|
|
# Loot collection
|
|
|
|
#
|
|
|
|
#
|
|
|
|
# This method iterates the loot table calling the supplied block with the
|
|
|
|
# instance of each entry.
|
|
|
|
#
|
|
|
|
def each_loot(wspace=workspace, &block)
|
|
|
|
wspace.loots.each do |note|
|
|
|
|
block.call(note)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Find or create a loot matching this type/data
|
|
|
|
#
|
|
|
|
def find_or_create_loot(opts)
|
|
|
|
report_loot(opts.merge({:wait => true}))
|
|
|
|
end
|
|
|
|
|
|
|
|
def report_loot(opts)
|
|
|
|
return if not active
|
|
|
|
wait = opts.delete(:wait)
|
|
|
|
wspace = opts.delete(:workspace) || workspace
|
2010-06-11 18:56:16 +00:00
|
|
|
path = opts.delete(:path) || (raise RuntimeError, "A loot :path is required")
|
2010-03-14 15:38:52 +00:00
|
|
|
|
2010-02-22 23:45:43 +00:00
|
|
|
host = nil
|
|
|
|
addr = nil
|
|
|
|
|
|
|
|
# Report the host so it's there for the Proc to use below
|
|
|
|
if opts[:host]
|
|
|
|
if opts[:host].kind_of? Host
|
|
|
|
host = opts[:host]
|
|
|
|
else
|
|
|
|
report_host({:workspace => wspace, :host => opts[:host]})
|
|
|
|
addr = opts[:host]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
ret = {}
|
|
|
|
task = queue(Proc.new {
|
2010-02-26 21:48:16 +00:00
|
|
|
|
2010-02-22 23:45:43 +00:00
|
|
|
if addr and not host
|
|
|
|
host = get_host(:workspace => wspace, :host => addr)
|
|
|
|
end
|
|
|
|
|
2010-05-03 01:19:36 +00:00
|
|
|
ltype = opts.delete(:type) || opts.delete(:ltype) || (raise RuntimeError, "A loot :type or :ltype is required")
|
2010-02-26 19:33:49 +00:00
|
|
|
ctype = opts.delete(:ctype) || opts.delete(:content_type) || 'text/plain'
|
2010-03-14 15:38:52 +00:00
|
|
|
name = opts.delete(:name)
|
|
|
|
info = opts.delete(:info)
|
2010-02-22 23:45:43 +00:00
|
|
|
data = opts[:data]
|
|
|
|
loot = wspace.loots.new
|
|
|
|
|
|
|
|
if host
|
|
|
|
loot.host_id = host[:id]
|
|
|
|
end
|
|
|
|
if opts[:service] and opts[:service].kind_of? Service
|
|
|
|
loot.service_id = opts[:service][:id]
|
|
|
|
end
|
|
|
|
|
|
|
|
loot.path = path
|
|
|
|
loot.ltype = ltype
|
2010-02-26 21:48:16 +00:00
|
|
|
loot.content_type = ctype
|
2010-02-22 23:45:43 +00:00
|
|
|
loot.data = data
|
2010-03-14 15:38:52 +00:00
|
|
|
loot.name = name if name
|
2010-03-14 16:09:46 +00:00
|
|
|
loot.info = info if info
|
2010-10-06 15:55:28 +00:00
|
|
|
msf_import_timestamps(opts,loot)
|
2010-02-22 23:45:43 +00:00
|
|
|
loot.save!
|
|
|
|
|
2010-06-11 18:56:16 +00:00
|
|
|
if !opts[:created_at]
|
|
|
|
if host
|
|
|
|
host.updated_at = host.created_at
|
|
|
|
host.state = HostState::Alive
|
|
|
|
host.save!
|
|
|
|
end
|
2010-04-05 19:58:18 +00:00
|
|
|
end
|
2010-04-05 19:38:51 +00:00
|
|
|
|
2010-02-22 23:45:43 +00:00
|
|
|
ret[:loot] = loot
|
|
|
|
})
|
|
|
|
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:loot]
|
|
|
|
end
|
|
|
|
return task
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
2010-06-11 18:56:16 +00:00
|
|
|
# This methods returns a list of all loot in the database
|
2010-02-22 23:45:43 +00:00
|
|
|
#
|
|
|
|
def loots(wspace=workspace)
|
|
|
|
wspace.loots
|
|
|
|
end
|
|
|
|
|
2010-06-11 18:56:16 +00:00
|
|
|
#
|
|
|
|
# Find or create a task matching this type/data
|
|
|
|
#
|
|
|
|
def find_or_create_task(opts)
|
|
|
|
report_task(opts.merge({:wait => true}))
|
|
|
|
end
|
|
|
|
|
|
|
|
def report_task(opts)
|
|
|
|
return if not active
|
|
|
|
wait = opts.delete(:wait)
|
|
|
|
wspace = opts.delete(:workspace) || workspace
|
|
|
|
path = opts.delete(:path) || (raise RuntimeError, "A task :path is required")
|
|
|
|
|
|
|
|
ret = {}
|
|
|
|
this_task = queue(Proc.new {
|
|
|
|
|
|
|
|
user = opts.delete(:user)
|
|
|
|
desc = opts.delete(:desc)
|
|
|
|
error = opts.delete(:error)
|
|
|
|
info = opts.delete(:info)
|
|
|
|
mod = opts.delete(:mod)
|
|
|
|
options = opts.delete(:options)
|
|
|
|
prog = opts.delete(:prog)
|
|
|
|
result = opts.delete(:result)
|
|
|
|
completed_at = opts.delete(:completed_at)
|
|
|
|
task = wspace.tasks.new
|
|
|
|
|
|
|
|
task.created_by = user
|
|
|
|
task.description = desc
|
|
|
|
task.error = error if error
|
|
|
|
task.info = info
|
|
|
|
task.module = mod
|
|
|
|
task.options = options
|
|
|
|
task.path = path
|
|
|
|
task.progress = prog
|
|
|
|
task.result = result if result
|
2010-10-06 15:55:28 +00:00
|
|
|
msf_import_timestamps(opts,task)
|
2010-06-11 18:56:16 +00:00
|
|
|
# Having blank completed_ats, while accurate, will cause unstoppable tasks.
|
|
|
|
if completed_at.nil? || completed_at.empty?
|
|
|
|
task.completed_at = opts[:updated_at]
|
|
|
|
else
|
|
|
|
task.completed_at = completed_at
|
|
|
|
end
|
|
|
|
task.save!
|
|
|
|
|
|
|
|
ret[:task] = task
|
|
|
|
})
|
|
|
|
|
|
|
|
if wait
|
|
|
|
return nil if this_task.wait() != :done
|
|
|
|
return ret[:task]
|
|
|
|
end
|
|
|
|
return this_task
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# This methods returns a list of all tasks in the database
|
|
|
|
#
|
|
|
|
def tasks(wspace=workspace)
|
|
|
|
wspace.tasks
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
# Find or create a task matching this type/data
|
|
|
|
#
|
|
|
|
def find_or_create_report(opts)
|
|
|
|
report_report(opts.merge({:wait => true}))
|
|
|
|
end
|
|
|
|
|
|
|
|
def report_report(opts)
|
|
|
|
return if not active
|
|
|
|
wait = opts.delete(:wait)
|
|
|
|
wspace = opts.delete(:workspace) || workspace
|
|
|
|
path = opts.delete(:path) || (raise RuntimeError, "A report :path is required")
|
|
|
|
|
|
|
|
ret = {}
|
|
|
|
this_task = queue(Proc.new {
|
|
|
|
|
|
|
|
user = opts.delete(:user)
|
|
|
|
options = opts.delete(:options)
|
|
|
|
rtype = opts.delete(:rtype)
|
|
|
|
report = wspace.reports.new
|
|
|
|
|
|
|
|
report.created_by = user
|
|
|
|
report.options = options
|
|
|
|
report.rtype = rtype
|
|
|
|
report.path = path
|
2010-10-06 15:55:28 +00:00
|
|
|
msf_import_timestamps(opts,report)
|
2010-06-11 18:56:16 +00:00
|
|
|
report.save!
|
|
|
|
|
|
|
|
ret[:task] = report
|
|
|
|
})
|
|
|
|
|
|
|
|
if wait
|
|
|
|
return nil if this_task.wait() != :done
|
|
|
|
return ret[:task]
|
|
|
|
end
|
|
|
|
return this_task
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
2010-09-19 23:22:32 +00:00
|
|
|
# This methods returns a list of all reports in the database
|
2010-06-11 18:56:16 +00:00
|
|
|
#
|
|
|
|
def reports(wspace=workspace)
|
|
|
|
wspace.reports
|
|
|
|
end
|
2010-02-22 23:45:43 +00:00
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# Support methods
|
|
|
|
#
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# Report a Web Site to the database. WebSites must be tied to an existing Service
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# opts MUST contain
|
|
|
|
# :service* -- the service object this site should be associated with
|
|
|
|
# :vhost -- the virtual host name for this particular web site`
|
|
|
|
|
|
|
|
# If service is NOT specified, the following values are mandatory
|
|
|
|
# :host -- the ip address of the server hosting the web site
|
|
|
|
# :port -- the port number of the associated web site
|
|
|
|
# :ssl -- whether or not SSL is in use on this port
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# These values will be used to create new host and service records
|
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# opts can contain
|
|
|
|
# :options -- a hash of options for accessing this particular web site
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
#
|
|
|
|
# Duplicate records for a given host, port, vhost combination will be overwritten
|
2008-10-19 20:32:14 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
|
|
|
|
def report_web_site(opts)
|
|
|
|
return if not active
|
|
|
|
wait = opts.delete(:wait)
|
|
|
|
wspace = opts.delete(:workspace) || workspace
|
|
|
|
vhost = opts.delete(:vhost)
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
addr = nil
|
|
|
|
port = nil
|
|
|
|
name = nil
|
|
|
|
serv = nil
|
|
|
|
|
|
|
|
if opts[:service] and opts[:service].kind_of?(Service)
|
|
|
|
serv = opts[:service]
|
|
|
|
else
|
|
|
|
addr = opts[:host]
|
|
|
|
port = opts[:port]
|
|
|
|
name = opts[:ssl] ? 'https' : 'http'
|
|
|
|
if not (addr and port)
|
|
|
|
raise ArgumentError, "report_web_site requires service OR host/port/ssl"
|
|
|
|
end
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
ret = {}
|
|
|
|
task = queue(Proc.new {
|
|
|
|
|
|
|
|
host = serv ? serv.host : find_or_create_host(
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:state => Msf::HostState::Alive
|
|
|
|
)
|
|
|
|
|
|
|
|
if host.name.to_s.empty?
|
|
|
|
host.name = vhost
|
|
|
|
host.save!
|
|
|
|
end
|
|
|
|
|
|
|
|
serv = serv ? serv : find_or_create_service(
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => host,
|
|
|
|
:port => port,
|
|
|
|
:proto => 'tcp',
|
|
|
|
:state => 'open'
|
|
|
|
)
|
|
|
|
|
|
|
|
# Change the service name if it is blank or it has
|
|
|
|
# been explicitly specified.
|
|
|
|
if opts.keys.include?(:ssl) or serv.name.to_s.empty?
|
|
|
|
name = opts[:ssl] ? 'https' : 'http'
|
|
|
|
serv.name = name
|
|
|
|
serv.save!
|
|
|
|
end
|
|
|
|
|
|
|
|
host.updated_at = host.created_at
|
|
|
|
host.state = HostState::Alive
|
|
|
|
host.save!
|
|
|
|
|
|
|
|
vhost ||= host.address
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
site = WebSite.find_or_initialize_by_vhost_and_service_id(vhost, serv[:id])
|
|
|
|
site.options = opts[:options] if opts[:options]
|
|
|
|
|
|
|
|
# XXX:
|
|
|
|
msf_import_timestamps(opts, site)
|
|
|
|
site.save!
|
2008-11-10 04:38:05 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
ret[:web_site] = site
|
|
|
|
})
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:web_site]
|
2008-11-10 04:38:05 +00:00
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
return task
|
2008-11-10 04:38:05 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# Report a Web Page to the database. WebPage must be tied to an existing Web Site
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# opts MUST contain
|
|
|
|
# :web_site* -- the web site object that this page should be associated with
|
|
|
|
# :path -- the virtual host name for this particular web site
|
|
|
|
# :code -- the http status code from requesting this page
|
|
|
|
# :headers -- this is a HASH of headers (lowercase name as key) of ARRAYs of values
|
|
|
|
# :body -- the document body of the server response
|
|
|
|
# :query -- the query string after the path
|
|
|
|
|
|
|
|
# If web_site is NOT specified, the following values are mandatory
|
|
|
|
# :host -- the ip address of the server hosting the web site
|
|
|
|
# :port -- the port number of the associated web site
|
|
|
|
# :vhost -- the virtual host for this particular web site
|
|
|
|
# :ssl -- whether or not SSL is in use on this port
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# These values will be used to create new host, service, and web_site records
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# opts can contain
|
|
|
|
# :cookie -- the Set-Cookie headers, merged into a string
|
|
|
|
# :auth -- the Authorization headers, merged into a string
|
|
|
|
# :ctype -- the Content-Type headers, merged into a string
|
|
|
|
# :mtime -- the timestamp returned from the server of the last modification time
|
|
|
|
# :location -- the URL that a redirect points to
|
|
|
|
#
|
|
|
|
# Duplicate records for a given web_site, path, and query combination will be overwritten
|
|
|
|
#
|
|
|
|
|
|
|
|
def report_web_page(opts)
|
|
|
|
return if not active
|
|
|
|
wait = opts.delete(:wait)
|
|
|
|
wspace = opts.delete(:workspace) || workspace
|
|
|
|
|
|
|
|
path = opts[:path]
|
|
|
|
code = opts[:code].to_i
|
|
|
|
body = opts[:body].to_s
|
|
|
|
query = opts[:query].to_s
|
|
|
|
headers = opts[:headers]
|
|
|
|
site = nil
|
|
|
|
|
|
|
|
if not (path and code and body and headers)
|
|
|
|
raise ArgumentError, "report_web_page requires the path, query, code, body, and headers parameters"
|
|
|
|
end
|
|
|
|
|
|
|
|
if opts[:web_site] and opts[:web_site].kind_of?(WebSite)
|
|
|
|
site = opts.delete(:web_site)
|
|
|
|
else
|
|
|
|
site = report_web_site(
|
2010-10-06 20:24:26 +00:00
|
|
|
:workspace => wspace,
|
|
|
|
:host => opts[:host], :port => opts[:port],
|
|
|
|
:vhost => opts[:host], :ssl => opts[:ssl],
|
|
|
|
:wait => true
|
2010-10-06 18:00:21 +00:00
|
|
|
)
|
|
|
|
if not site
|
|
|
|
raise ArgumentError, "report_web_page was unable to create the associated web site"
|
|
|
|
end
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
ret = {}
|
|
|
|
task = queue(Proc.new {
|
|
|
|
page = WebPage.find_or_initialize_by_web_site_id_and_path_and_query(site[:id], path, query)
|
|
|
|
page.code = code
|
|
|
|
page.body = body
|
|
|
|
page.headers = headers
|
|
|
|
page.cookie = opts[:cookie] if opts[:cookie]
|
|
|
|
page.auth = opts[:auth] if opts[:auth]
|
|
|
|
page.mtime = opts[:mtime] if opts[:mtime]
|
|
|
|
page.ctype = opts[:ctype] if opts[:ctype]
|
|
|
|
page.location = opts[:location] if opts[:location]
|
|
|
|
msf_import_timestamps(opts, page)
|
|
|
|
page.save!
|
|
|
|
|
|
|
|
ret[:web_page] = page
|
|
|
|
})
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:web_page]
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
return task
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
|
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# Report a Web Form to the database. WebForm must be tied to an existing Web Site
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# opts MUST contain
|
|
|
|
# :web_site* -- the web site object that this page should be associated with
|
|
|
|
# :path -- the virtual host name for this particular web site
|
|
|
|
# :query -- the query string that is appended to the path (not valid for GET)
|
|
|
|
# :method -- the form method, one of GET, POST, or PATH
|
|
|
|
# :params -- an ARRAY of all parameters and values specified in the form
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# If web_site is NOT specified, the following values are mandatory
|
|
|
|
# :host -- the ip address of the server hosting the web site
|
|
|
|
# :port -- the port number of the associated web site
|
|
|
|
# :vhost -- the virtual host for this particular web site
|
|
|
|
# :ssl -- whether or not SSL is in use on this port
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
#
|
|
|
|
# Duplicate records for a given web_site, path, method, and params combination will be overwritten
|
|
|
|
#
|
|
|
|
|
|
|
|
def report_web_form(opts)
|
|
|
|
return if not active
|
|
|
|
wait = opts.delete(:wait)
|
|
|
|
wspace = opts.delete(:workspace) || workspace
|
|
|
|
|
|
|
|
path = opts[:path]
|
|
|
|
meth = opts[:method].to_s.upcase
|
|
|
|
para = opts[:params]
|
|
|
|
quer = opts[:query].to_s
|
|
|
|
site = nil
|
|
|
|
|
|
|
|
if not (path and meth)
|
|
|
|
raise ArgumentError, "report_web_form requires the path and method parameters"
|
|
|
|
end
|
|
|
|
|
|
|
|
if not %W{GET POST PATH}.include?(meth)
|
|
|
|
raise ArgumentError, "report_web_form requires the method to be one of GET, POST, PATH"
|
|
|
|
end
|
|
|
|
|
|
|
|
if opts[:web_site] and opts[:web_site].kind_of?(WebSite)
|
|
|
|
site = opts.delete(:web_site)
|
|
|
|
else
|
|
|
|
site = report_web_site(
|
2010-10-06 20:24:26 +00:00
|
|
|
:workspace => wspace,
|
|
|
|
:host => opts[:host], :port => opts[:port],
|
|
|
|
:vhost => opts[:host], :ssl => opts[:ssl],
|
|
|
|
:wait => true
|
2010-10-06 18:00:21 +00:00
|
|
|
)
|
|
|
|
if not site
|
|
|
|
raise ArgumentError, "report_web_form was unable to create the associated web site"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
ret = {}
|
|
|
|
task = queue(Proc.new {
|
|
|
|
|
|
|
|
# Since one of our serialized fields is used as a unique parameter, we must do the final
|
|
|
|
# comparisons through ruby and not SQL.
|
|
|
|
|
|
|
|
form = nil
|
|
|
|
WebForm.find_all_by_web_site_id_and_path_and_method_and_query(site[:id], path, meth, quer).each do |xform|
|
|
|
|
if xform.params == para
|
|
|
|
form = xform
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if not form
|
|
|
|
form = WebForm.new
|
|
|
|
form.web_site_id = site[:id]
|
|
|
|
form.path = path
|
|
|
|
form.method = meth
|
|
|
|
form.params = para
|
|
|
|
form.query = quer
|
|
|
|
end
|
|
|
|
|
|
|
|
msf_import_timestamps(opts, form)
|
|
|
|
form.save!
|
|
|
|
|
|
|
|
ret[:web_form] = form
|
|
|
|
})
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:web_form]
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
return task
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
|
2008-12-04 03:42:43 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# Report a Web Vuln to the database. WebVuln must be tied to an existing Web Site
|
2008-12-04 03:42:43 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
# opts MUST contain
|
|
|
|
# :web_site* -- the web site object that this page should be associated with
|
|
|
|
# :path -- the virtual host name for this particular web site
|
|
|
|
# :query -- the query string appended to the path (not valid for GET method flaws)
|
|
|
|
# :method -- the form method, one of GET, POST, or PATH
|
|
|
|
# :params -- an ARRAY of all parameters and values specified in the form
|
|
|
|
# :pname -- the specific field where the vulnerability occurs
|
|
|
|
# :proof -- the string showing proof of the vulnerability
|
|
|
|
# :risk -- an INTEGER value from 0 to 5 indicating the risk (5 is highest)
|
|
|
|
# :name -- the string indicating the type of vulnerability
|
|
|
|
#
|
|
|
|
# If web_site is NOT specified, the following values are mandatory
|
|
|
|
# :host -- the ip address of the server hosting the web site
|
|
|
|
# :port -- the port number of the associated web site
|
|
|
|
# :vhost -- the virtual host for this particular web site
|
|
|
|
# :ssl -- whether or not SSL is in use on this port
|
|
|
|
#
|
|
|
|
#
|
|
|
|
# Duplicate records for a given web_site, path, method, pname, and name combination will be overwritten
|
|
|
|
#
|
|
|
|
|
|
|
|
def report_web_vuln(opts)
|
|
|
|
return if not active
|
|
|
|
wait = opts.delete(:wait)
|
|
|
|
wspace = opts.delete(:workspace) || workspace
|
|
|
|
|
|
|
|
path = opts[:path]
|
2010-10-10 19:36:31 +00:00
|
|
|
meth = opts[:method]
|
2010-10-06 18:00:21 +00:00
|
|
|
para = opts[:params] || []
|
|
|
|
quer = opts[:query].to_s
|
|
|
|
pname = opts[:pname]
|
|
|
|
proof = opts[:proof]
|
|
|
|
risk = opts[:risk].to_i
|
|
|
|
name = opts[:name].to_s.strip
|
2010-10-10 19:36:31 +00:00
|
|
|
blame = opts[:blame].to_s.strip
|
|
|
|
desc = opts[:description].to_s.strip
|
|
|
|
conf = opts[:confidence].to_i
|
|
|
|
cat = opts[:category].to_s.strip
|
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
site = nil
|
|
|
|
|
|
|
|
if not (path and meth and proof and pname)
|
2010-10-11 02:57:07 +00:00
|
|
|
raise ArgumentError, "report_web_vuln requires the path, method, proof, risk, name, params, and pname parameters. Received #{opts.inspect}"
|
2010-10-06 18:00:21 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
if not %W{GET POST PATH}.include?(meth)
|
2010-10-11 02:57:07 +00:00
|
|
|
raise ArgumentError, "report_web_vuln requires the method to be one of GET, POST, PATH. Received '#{meth}'"
|
2010-10-06 18:00:21 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
if risk < 0 or risk > 5
|
2010-10-11 02:57:07 +00:00
|
|
|
raise ArgumentError, "report_web_vuln requires the risk to be between 0 and 5 (inclusive). Received '#{risk}'"
|
2010-10-06 18:00:21 +00:00
|
|
|
end
|
2010-10-10 19:36:31 +00:00
|
|
|
|
|
|
|
if conf < 0 or conf > 100
|
2010-10-11 02:57:07 +00:00
|
|
|
raise ArgumentError, "report_web_vuln requires the confidence to be between 1 and 100 (inclusive). Received '#{conf}'"
|
2010-10-10 19:36:31 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
if cat.empty?
|
|
|
|
raise ArgumentError, "report_web_vuln requires the category to be a valid string"
|
|
|
|
end
|
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
if name.empty?
|
|
|
|
raise ArgumentError, "report_web_vuln requires the name to be a valid string"
|
|
|
|
end
|
|
|
|
|
|
|
|
if opts[:web_site] and opts[:web_site].kind_of?(WebSite)
|
|
|
|
site = opts.delete(:web_site)
|
|
|
|
else
|
|
|
|
site = report_web_site(
|
2010-10-06 20:24:26 +00:00
|
|
|
:workspace => wspace,
|
|
|
|
:host => opts[:host], :port => opts[:port],
|
|
|
|
:vhost => opts[:host], :ssl => opts[:ssl],
|
|
|
|
:wait => true
|
2010-10-06 18:00:21 +00:00
|
|
|
)
|
|
|
|
if not site
|
|
|
|
raise ArgumentError, "report_web_form was unable to create the associated web site"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
ret = {}
|
|
|
|
task = queue(Proc.new {
|
2010-10-06 20:24:26 +00:00
|
|
|
|
2010-10-10 19:36:31 +00:00
|
|
|
meth = meth.to_s.upcase
|
|
|
|
|
|
|
|
vuln = WebVuln.find_or_initialize_by_web_site_id_and_path_and_method_and_pname_and_category_and_query(site[:id], path, meth, pname, cat, quer)
|
|
|
|
vuln.name = name
|
|
|
|
vuln.risk = risk
|
|
|
|
vuln.params = para
|
|
|
|
vuln.proof = proof.to_s
|
|
|
|
vuln.category = cat
|
|
|
|
vuln.blame = blame
|
|
|
|
vuln.description = desc
|
|
|
|
vuln.confidence = conf
|
2010-10-06 18:00:21 +00:00
|
|
|
msf_import_timestamps(opts, vuln)
|
|
|
|
vuln.save!
|
|
|
|
|
|
|
|
ret[:web_vuln] = vuln
|
|
|
|
})
|
|
|
|
if wait
|
|
|
|
return nil if task.wait() != :done
|
|
|
|
return ret[:web_vuln]
|
|
|
|
end
|
|
|
|
return task
|
2008-12-04 03:42:43 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
|
|
|
# WMAP
|
2010-10-06 18:00:21 +00:00
|
|
|
# Selected host
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
def selected_host
|
|
|
|
selhost = WmapTarget.find(:first, :conditions => ["selected != 0"] )
|
|
|
|
if selhost
|
|
|
|
return selhost.host
|
|
|
|
else
|
|
|
|
return
|
|
|
|
end
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
|
|
|
# WMAP
|
2010-10-06 18:00:21 +00:00
|
|
|
# Selected port
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
def selected_port
|
|
|
|
WmapTarget.find(:first, :conditions => ["selected != 0"] ).port
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
|
|
|
# WMAP
|
2010-10-06 18:00:21 +00:00
|
|
|
# Selected ssl
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
def selected_ssl
|
|
|
|
WmapTarget.find(:first, :conditions => ["selected != 0"] ).ssl
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2006-09-17 00:39:23 +00:00
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
|
|
|
# WMAP
|
2010-10-06 18:00:21 +00:00
|
|
|
# Selected id
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
def selected_id
|
|
|
|
WmapTarget.find(:first, :conditions => ["selected != 0"] ).object_id
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
|
|
|
# WMAP
|
2010-10-06 18:00:21 +00:00
|
|
|
# This method iterates the requests table identifiying possible targets
|
|
|
|
# This method wiil be remove on second phase of db merging.
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
def each_distinct_target(&block)
|
|
|
|
request_distinct_targets.each do |target|
|
|
|
|
block.call(target)
|
|
|
|
end
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
|
|
|
# WMAP
|
2010-10-06 18:00:21 +00:00
|
|
|
# This method returns a list of all possible targets available in requests
|
|
|
|
# This method wiil be remove on second phase of db merging.
|
2008-10-12 03:46:49 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
def request_distinct_targets
|
|
|
|
WmapRequest.find(:all, :select => 'DISTINCT host,address,port,ssl')
|
2008-10-12 03:46:49 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2008-11-30 22:41:09 +00:00
|
|
|
#
|
|
|
|
# WMAP
|
2010-10-06 18:00:21 +00:00
|
|
|
# This method iterates the requests table returning a list of all requests of a specific target
|
2008-11-30 22:41:09 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
def each_request_target_with_path(&block)
|
|
|
|
target_requests('AND wmap_requests.path IS NOT NULL').each do |req|
|
|
|
|
block.call(req)
|
|
|
|
end
|
2008-11-30 22:41:09 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2009-05-28 03:26:27 +00:00
|
|
|
#
|
|
|
|
# WMAP
|
2010-10-06 18:00:21 +00:00
|
|
|
# This method iterates the requests table returning a list of all requests of a specific target
|
2009-05-28 03:26:27 +00:00
|
|
|
#
|
2010-10-06 18:00:21 +00:00
|
|
|
def each_request_target_with_query(&block)
|
|
|
|
target_requests('AND wmap_requests.query IS NOT NULL').each do |req|
|
|
|
|
block.call(req)
|
|
|
|
end
|
2009-05-28 03:26:27 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# This method iterates the requests table returning a list of all requests of a specific target
|
|
|
|
#
|
|
|
|
def each_request_target_with_body(&block)
|
|
|
|
target_requests('AND wmap_requests.body IS NOT NULL').each do |req|
|
|
|
|
block.call(req)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# This method iterates the requests table returning a list of all requests of a specific target
|
|
|
|
#
|
|
|
|
def each_request_target_with_headers(&block)
|
|
|
|
target_requests('AND wmap_requests.headers IS NOT NULL').each do |req|
|
|
|
|
block.call(req)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# This method iterates the requests table returning a list of all requests of a specific target
|
|
|
|
#
|
|
|
|
def each_request_target(&block)
|
|
|
|
target_requests('').each do |req|
|
|
|
|
block.call(req)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# This method returns a list of all requests from target
|
|
|
|
#
|
|
|
|
def target_requests(extra_condition)
|
|
|
|
WmapRequest.find(:all, :conditions => ["wmap_requests.host = ? AND wmap_requests.port = ? #{extra_condition}",selected_host,selected_port])
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# This method iterates the requests table calling the supplied block with the
|
|
|
|
# request instance of each entry.
|
|
|
|
#
|
|
|
|
def each_request(&block)
|
|
|
|
requests.each do |request|
|
|
|
|
block.call(request)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# This method allows to query directly the requests table. To be used mainly by modules
|
|
|
|
#
|
|
|
|
def request_sql(host,port,extra_condition)
|
|
|
|
WmapRequest.find(:all, :conditions => ["wmap_requests.host = ? AND wmap_requests.port = ? #{extra_condition}",host,port])
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# This methods returns a list of all targets in the database
|
|
|
|
#
|
|
|
|
def requests
|
|
|
|
WmapRequest.find(:all)
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# This method iterates the targets table calling the supplied block with the
|
|
|
|
# target instance of each entry.
|
|
|
|
#
|
|
|
|
def each_target(&block)
|
|
|
|
targets.each do |target|
|
|
|
|
block.call(target)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# This methods returns a list of all targets in the database
|
|
|
|
#
|
|
|
|
def targets
|
|
|
|
WmapTarget.find(:all)
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# This methods deletes all targets from targets table in the database
|
|
|
|
#
|
|
|
|
def delete_all_targets
|
|
|
|
WmapTarget.delete_all
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# Find a target matching this id
|
|
|
|
#
|
|
|
|
def get_target(id)
|
|
|
|
target = WmapTarget.find(:first, :conditions => [ "id = ?", id])
|
|
|
|
return target
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# Create a target
|
|
|
|
#
|
|
|
|
def create_target(host,port,ssl,sel)
|
|
|
|
tar = WmapTarget.create(
|
|
|
|
:host => host,
|
|
|
|
:address => host,
|
|
|
|
:port => port,
|
|
|
|
:ssl => ssl,
|
|
|
|
:selected => sel
|
|
|
|
)
|
|
|
|
#framework.events.on_db_target(rec)
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# Create a request (by hand)
|
|
|
|
#
|
|
|
|
def create_request(host,port,ssl,meth,path,headers,query,body,respcode,resphead,response)
|
|
|
|
req = WmapRequest.create(
|
|
|
|
:host => host,
|
|
|
|
:address => host,
|
|
|
|
:port => port,
|
|
|
|
:ssl => ssl,
|
|
|
|
:meth => meth,
|
|
|
|
:path => path,
|
|
|
|
:headers => headers,
|
|
|
|
:query => query,
|
|
|
|
:body => body,
|
|
|
|
:respcode => respcode,
|
|
|
|
:resphead => resphead,
|
|
|
|
:response => response
|
|
|
|
)
|
|
|
|
#framework.events.on_db_request(rec)
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# WMAP
|
|
|
|
# Quick way to query the database (used by wmap_sql)
|
|
|
|
#
|
|
|
|
def sql_query(sqlquery)
|
|
|
|
ActiveRecord::Base.connection.select_all(sqlquery)
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
# Returns a REXML::Document from the given data.
|
|
|
|
def rexmlify(data)
|
2010-10-19 22:51:37 +00:00
|
|
|
if data.kind_of?(REXML::Document)
|
|
|
|
return data
|
|
|
|
else
|
|
|
|
# Make an attempt to recover from a REXML import fail, since
|
|
|
|
# it's better than dying outright.
|
|
|
|
begin
|
|
|
|
return REXML::Document.new(data)
|
|
|
|
rescue REXML::ParseException => e
|
|
|
|
dlog("REXML error: Badly formatted XML, attempting to recover. Error was: #{e.inspect}")
|
|
|
|
return REXML::Document.new(data.gsub(/([\x00-\x08\x0b\x0c\x0e-\x19\x80-\xff])/){ |x| "\\x%.2x" % x.unpack("C*")[0] })
|
|
|
|
end
|
|
|
|
end
|
2010-06-04 14:57:58 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# Handles timestamps from Metasploit Express imports.
|
2010-10-06 15:55:28 +00:00
|
|
|
def msf_import_timestamps(opts,obj)
|
2010-06-04 14:57:58 +00:00
|
|
|
obj.created_at = opts["created_at"] if opts["created_at"]
|
2010-06-11 18:56:16 +00:00
|
|
|
obj.created_at = opts[:created_at] if opts[:created_at]
|
2010-06-04 14:57:58 +00:00
|
|
|
obj.updated_at = opts["updated_at"] ? opts["updated_at"] : obj.created_at
|
2010-06-11 18:56:16 +00:00
|
|
|
obj.updated_at = opts[:updated_at] ? opts[:updated_at] : obj.created_at
|
2010-06-04 14:57:58 +00:00
|
|
|
return obj
|
|
|
|
end
|
2010-01-07 19:06:29 +00:00
|
|
|
|
|
|
|
##
|
|
|
|
#
|
|
|
|
# Import methods
|
|
|
|
#
|
|
|
|
##
|
|
|
|
|
|
|
|
#
|
|
|
|
# Generic importer that automatically determines the file type being
|
|
|
|
# imported. Since this looks for vendor-specific strings in the given
|
|
|
|
# file, there shouldn't be any false detections, but no guarantees.
|
|
|
|
#
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_file(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
filename = args[:filename] || args['filename']
|
|
|
|
wspace = args[:wspace] || args['wspace'] || workspace
|
2010-05-05 19:45:48 +00:00
|
|
|
@import_filedata = {}
|
|
|
|
@import_filedata[:filename] = filename
|
2010-06-10 21:06:06 +00:00
|
|
|
|
2010-05-03 01:17:20 +00:00
|
|
|
f = File.open(filename, 'rb')
|
2010-01-07 19:06:29 +00:00
|
|
|
data = f.read(f.stat.size)
|
2010-06-10 21:06:06 +00:00
|
|
|
if data[0,4] == "PK\x03\x04"
|
2010-06-11 21:21:59 +00:00
|
|
|
data = Zip::ZipFile.open(filename)
|
2010-06-10 21:06:06 +00:00
|
|
|
end
|
2010-06-08 19:16:20 +00:00
|
|
|
if block
|
|
|
|
import(args.merge(:data => data)) { |type,data| yield type,data }
|
|
|
|
else
|
2010-07-06 16:10:05 +00:00
|
|
|
import(args.merge(:data => data))
|
2010-06-08 19:16:20 +00:00
|
|
|
end
|
2010-06-10 21:06:06 +00:00
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
2010-02-14 18:40:27 +00:00
|
|
|
|
2010-06-04 14:57:58 +00:00
|
|
|
# A dispatcher method that figures out the data's file type,
|
|
|
|
# and sends it off to the appropriate importer. Note that
|
|
|
|
# import_file_detect will raise an error if the filetype
|
|
|
|
# is unknown.
|
2010-06-08 19:16:20 +00:00
|
|
|
def import(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data] || args['data']
|
|
|
|
wspace = args[:wspace] || args['wspace'] || workspace
|
2010-06-10 21:06:06 +00:00
|
|
|
unless data.kind_of? Zip::ZipFile
|
|
|
|
di = data.index("\n")
|
|
|
|
raise DBImportError.new("Could not automatically determine file type") if not di
|
|
|
|
end
|
2010-06-04 14:57:58 +00:00
|
|
|
ftype = import_filetype_detect(data)
|
2010-06-08 19:16:20 +00:00
|
|
|
yield(:filetype, @import_filedata[:type]) if block
|
2010-10-06 21:04:16 +00:00
|
|
|
|
2010-06-08 19:16:20 +00:00
|
|
|
self.send "import_#{ftype}".to_sym, args, &block
|
2010-04-26 20:25:42 +00:00
|
|
|
end
|
|
|
|
|
2010-04-26 21:51:29 +00:00
|
|
|
|
2010-06-04 14:57:58 +00:00
|
|
|
# Returns one of: :nexpose_simplexml :nexpose_rawxml :nmap_xml :openvas_xml
|
2010-10-06 15:55:28 +00:00
|
|
|
# :nessus_xml :nessus_xml_v2 :qualys_xml :msf_xml :nessus_nbe :amap_mlog
|
|
|
|
# :amap_log :ip_list, :msf_zip
|
2010-06-04 14:57:58 +00:00
|
|
|
# If there is no match, an error is raised instead.
|
|
|
|
def import_filetype_detect(data)
|
2010-06-10 21:06:06 +00:00
|
|
|
if data.kind_of? Zip::ZipFile
|
2010-10-28 15:59:40 +00:00
|
|
|
raise DBImportError.new("The zip file provided is empty.") if data.entries.empty?
|
2010-06-10 21:06:06 +00:00
|
|
|
@import_filedata ||= {}
|
|
|
|
@import_filedata[:zip_filename] = File.split(data.to_s).last
|
|
|
|
@import_filedata[:zip_basename] = @import_filedata[:zip_filename].gsub(/\.zip$/,"")
|
|
|
|
@import_filedata[:zip_entry_names] = data.entries.map {|x| x.name}
|
|
|
|
@import_filedata[:zip_xml] = @import_filedata[:zip_entry_names].grep(/^(.*)_[0-9]+\.xml$/).first
|
|
|
|
@import_filedata[:zip_wspace] = $1
|
2010-10-06 15:55:28 +00:00
|
|
|
@import_filedata[:type] = "Metasploit ZIP Report"
|
2010-10-28 15:59:40 +00:00
|
|
|
if @import_filedata[:zip_xml]
|
|
|
|
return :msf_zip
|
|
|
|
else
|
|
|
|
raise DBImportError.new("The zip file provided is not a Metasploit ZIP report")
|
|
|
|
end
|
2010-06-10 21:06:06 +00:00
|
|
|
end
|
2010-02-14 18:40:27 +00:00
|
|
|
di = data.index("\n")
|
|
|
|
firstline = data[0, di]
|
2010-06-04 14:57:58 +00:00
|
|
|
@import_filedata ||= {}
|
2010-01-07 19:06:29 +00:00
|
|
|
if (firstline.index("<NeXposeSimpleXML"))
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "NeXpose Simple XML"
|
2010-06-04 14:57:58 +00:00
|
|
|
return :nexpose_simplexml
|
2010-04-07 20:51:05 +00:00
|
|
|
elsif (firstline.index("<NexposeReport"))
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "NeXpose XML Report"
|
2010-06-04 14:57:58 +00:00
|
|
|
return :nexpose_rawxml
|
2010-10-07 02:33:57 +00:00
|
|
|
elsif (firstline.index("<scanJob>"))
|
|
|
|
@import_filedata[:type] = "Retina XML"
|
|
|
|
return :retina_xml
|
2010-01-07 19:06:29 +00:00
|
|
|
elsif (firstline.index("<?xml"))
|
|
|
|
# it's xml, check for root tags we can handle
|
|
|
|
line_count = 0
|
|
|
|
data.each_line { |line|
|
2010-01-14 15:26:20 +00:00
|
|
|
line =~ /<([a-zA-Z0-9\-\_]+)[ >]/
|
2010-01-07 19:06:29 +00:00
|
|
|
case $1
|
|
|
|
when "nmaprun"
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "Nmap XML"
|
2010-06-04 14:57:58 +00:00
|
|
|
return :nmap_xml
|
2010-01-07 19:06:29 +00:00
|
|
|
when "openvas-report"
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "OpenVAS Report"
|
2010-06-04 14:57:58 +00:00
|
|
|
return :openvas_xml
|
2010-01-07 19:06:29 +00:00
|
|
|
when "NessusClientData"
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "Nessus XML (v1)"
|
2010-06-04 14:57:58 +00:00
|
|
|
return :nessus_xml
|
2010-01-14 15:26:20 +00:00
|
|
|
when "NessusClientData_v2"
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "Nessus XML (v2)"
|
2010-06-04 14:57:58 +00:00
|
|
|
return :nessus_xml_v2
|
2010-03-28 23:02:28 +00:00
|
|
|
when "SCAN"
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "Qualys XML"
|
2010-06-04 14:57:58 +00:00
|
|
|
return :qualys_xml
|
2010-10-06 18:10:39 +00:00
|
|
|
when /MetasploitExpressV[1234]/
|
2010-10-06 15:55:28 +00:00
|
|
|
@import_filedata[:type] = "Metasploit XML"
|
|
|
|
return :msf_xml
|
2010-10-06 18:10:39 +00:00
|
|
|
when /MetasploitV4/
|
|
|
|
@import_filedata[:type] = "Metasploit XML"
|
2010-10-06 20:24:26 +00:00
|
|
|
return :msf_xml
|
2010-10-07 06:24:26 +00:00
|
|
|
when /netsparker/
|
|
|
|
@import_filedata[:type] = "NetSparker XML"
|
|
|
|
return :netsparker_xml
|
2010-01-07 19:06:29 +00:00
|
|
|
else
|
|
|
|
# Give up if we haven't hit the root tag in the first few lines
|
|
|
|
break if line_count > 10
|
|
|
|
end
|
|
|
|
line_count += 1
|
|
|
|
}
|
|
|
|
elsif (firstline.index("timestamps|||scan_start"))
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "Nessus NBE Report"
|
2010-01-07 19:06:29 +00:00
|
|
|
# then it's a nessus nbe
|
2010-06-04 14:57:58 +00:00
|
|
|
return :nessus_nbe
|
2010-01-07 19:06:29 +00:00
|
|
|
elsif (firstline.index("# amap v"))
|
|
|
|
# then it's an amap mlog
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "Amap Log -m"
|
2010-06-04 14:57:58 +00:00
|
|
|
return :amap_mlog
|
2010-06-08 22:14:25 +00:00
|
|
|
elsif (firstline.index("amap v"))
|
|
|
|
# then it's an amap log
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "Amap Log"
|
2010-06-08 22:14:25 +00:00
|
|
|
return :amap_log
|
2010-03-28 23:07:52 +00:00
|
|
|
elsif (firstline =~ /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/)
|
|
|
|
# then its an IP list
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:type] = "IP Address List"
|
2010-06-04 14:57:58 +00:00
|
|
|
return :ip_list
|
2010-10-07 06:24:26 +00:00
|
|
|
elsif (data[0,1024].index("<netsparker"))
|
|
|
|
@import_filedata[:type] = "NetSparker XML"
|
|
|
|
return :netsparker_xml
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
2010-10-07 06:24:26 +00:00
|
|
|
|
2010-01-07 21:30:14 +00:00
|
|
|
raise DBImportError.new("Could not automatically determine file type")
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
|
|
|
|
2010-06-30 15:28:49 +00:00
|
|
|
# Boils down the validate_import_file to a boolean
|
|
|
|
def validate_import_file(data)
|
|
|
|
begin
|
|
|
|
import_filetype_detect(data)
|
|
|
|
rescue DBImportError
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
return true
|
|
|
|
end
|
|
|
|
|
2010-01-14 15:26:20 +00:00
|
|
|
#
|
2010-01-07 19:06:29 +00:00
|
|
|
# Nexpose Simple XML
|
|
|
|
#
|
|
|
|
# XXX At some point we'll want to make this a stream parser for dealing
|
|
|
|
# with large results files
|
|
|
|
#
|
2010-06-04 14:57:58 +00:00
|
|
|
def import_nexpose_simplexml_file(args={})
|
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
2010-05-03 01:17:20 +00:00
|
|
|
f = File.open(filename, 'rb')
|
2010-01-07 19:06:29 +00:00
|
|
|
data = f.read(f.stat.size)
|
2010-06-04 14:57:58 +00:00
|
|
|
import_nexpose_simplexml(args.merge(:data => data))
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
2010-02-18 06:40:38 +00:00
|
|
|
|
2010-10-06 15:55:28 +00:00
|
|
|
# Import a Metasploit XML file.
|
|
|
|
def import_msf_file(args={})
|
2010-06-04 14:57:58 +00:00
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
2010-05-03 01:17:20 +00:00
|
|
|
f = File.open(filename, 'rb')
|
2010-04-26 18:40:49 +00:00
|
|
|
data = f.read(f.stat.size)
|
2010-10-06 15:55:28 +00:00
|
|
|
import_msf_xml(args.merge(:data => data))
|
2010-04-26 18:40:49 +00:00
|
|
|
end
|
|
|
|
|
2010-06-10 21:06:06 +00:00
|
|
|
# Import a Metasploit Express ZIP file. Note that this requires
|
|
|
|
# a fair bit of filesystem manipulation, and is very much tied
|
2010-07-06 16:10:05 +00:00
|
|
|
# up with the Metasploit Express ZIP file format export (for
|
2010-06-10 21:06:06 +00:00
|
|
|
# obvious reasons). In the event directories exist, they will
|
|
|
|
# be reused. If target files exist, they will be overwritten.
|
|
|
|
#
|
2010-07-06 16:10:05 +00:00
|
|
|
# XXX: Refactor so it's not quite as sanity-blasting.
|
2010-10-06 15:55:28 +00:00
|
|
|
def import_msf_zip(args={}, &block)
|
2010-06-10 21:06:06 +00:00
|
|
|
data = args[:data]
|
|
|
|
wpsace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
2010-07-06 16:10:05 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
new_tmp = ::File.join(Dir::tmpdir,"msf",@import_filedata[:zip_basename])
|
|
|
|
if ::File.exists? new_tmp
|
|
|
|
unless (::File.directory?(new_tmp) && ::File.writable?(new_tmp))
|
2010-06-10 21:06:06 +00:00
|
|
|
raise DBImportError.new("Could not extract zip file to #{new_tmp}")
|
|
|
|
end
|
|
|
|
else
|
|
|
|
FileUtils.mkdir_p(new_tmp)
|
|
|
|
end
|
|
|
|
@import_filedata[:zip_tmp] = new_tmp
|
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
@import_filedata[:zip_tmp_subdirs] = @import_filedata[:zip_entry_names].map {|x| ::File.split(x)}.map {|x| x[0]}.uniq.reject {|x| x == "."}
|
2010-06-10 21:06:06 +00:00
|
|
|
|
2010-07-06 16:10:05 +00:00
|
|
|
@import_filedata[:zip_tmp_subdirs].each {|sub|
|
2010-10-06 18:00:21 +00:00
|
|
|
tmp_subdirs = ::File.join(@import_filedata[:zip_tmp],sub)
|
2010-06-11 18:56:16 +00:00
|
|
|
if File.exists? tmp_subdirs
|
2010-10-06 18:00:21 +00:00
|
|
|
unless (::File.directory?(tmp_subdirs) && File.writable?(tmp_subdirs))
|
2010-06-11 18:56:16 +00:00
|
|
|
raise DBImportError.new("Could not extract zip file to #{tmp_subdirs}")
|
2010-06-10 21:06:06 +00:00
|
|
|
end
|
|
|
|
else
|
2010-10-06 18:00:21 +00:00
|
|
|
::FileUtils.mkdir(tmp_subdirs)
|
2010-06-10 21:06:06 +00:00
|
|
|
end
|
|
|
|
}
|
|
|
|
|
|
|
|
data.entries.each do |e|
|
2010-10-06 18:00:21 +00:00
|
|
|
target = ::File.join(@import_filedata[:zip_tmp],e.name)
|
|
|
|
::File.unlink target if ::File.exists?(target) # Yep. Deleted.
|
2010-07-06 16:10:05 +00:00
|
|
|
data.extract(e,target)
|
2010-06-10 21:06:06 +00:00
|
|
|
if target =~ /^.*.xml$/
|
|
|
|
@import_filedata[:zip_extracted_xml] = target
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# This will kick the newly-extracted XML file through
|
|
|
|
# the import_file process all over again.
|
|
|
|
if @import_filedata[:zip_extracted_xml]
|
|
|
|
new_args = args.dup
|
|
|
|
new_args[:filename] = @import_filedata[:zip_extracted_xml]
|
|
|
|
new_args[:data] = nil
|
2010-06-11 18:56:16 +00:00
|
|
|
new_args[:ifd] = @import_filedata.dup
|
2010-06-10 21:06:06 +00:00
|
|
|
if block
|
|
|
|
import_file(new_args, &block)
|
|
|
|
else
|
2010-06-11 18:56:16 +00:00
|
|
|
import_file(new_args)
|
2010-06-10 21:06:06 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-06-11 18:56:16 +00:00
|
|
|
# Kick down to all the MSFX ZIP specific items
|
|
|
|
if block
|
2010-10-06 15:55:28 +00:00
|
|
|
import_msf_collateral(new_args, &block)
|
2010-06-11 18:56:16 +00:00
|
|
|
else
|
2010-10-06 15:55:28 +00:00
|
|
|
import_msf_collateral(new_args)
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
2010-06-10 21:06:06 +00:00
|
|
|
end
|
|
|
|
|
2010-10-06 15:55:28 +00:00
|
|
|
# Imports loot, tasks, and reports from an MSF ZIP report.
|
2010-06-11 18:56:16 +00:00
|
|
|
# XXX: This function is stupidly long. It needs to be refactored.
|
2010-10-06 15:55:28 +00:00
|
|
|
def import_msf_collateral(args={}, &block)
|
2010-10-06 18:00:21 +00:00
|
|
|
data = ::File.open(args[:filename], "rb") {|f| f.read(f.stat.size)}
|
2010-06-11 18:56:16 +00:00
|
|
|
wspace = args[:wspace] || args['wspace'] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
2010-10-06 18:00:21 +00:00
|
|
|
basedir = args[:basedir] || args['basedir'] || ::File.join(Msf::Config.install_root, "data", "msf")
|
2010-07-07 00:28:34 +00:00
|
|
|
|
|
|
|
allow_yaml = false
|
2010-10-06 18:09:06 +00:00
|
|
|
btag = nil
|
2010-07-07 00:28:34 +00:00
|
|
|
|
2010-06-11 18:56:16 +00:00
|
|
|
doc = rexmlify(data)
|
|
|
|
if doc.elements["MetasploitExpressV1"]
|
|
|
|
m_ver = 1
|
2010-07-07 00:28:34 +00:00
|
|
|
allow_yaml = true
|
2010-10-06 18:09:06 +00:00
|
|
|
btag = "MetasploitExpressV1"
|
2010-06-11 18:56:16 +00:00
|
|
|
elsif doc.elements["MetasploitExpressV2"]
|
|
|
|
m_ver = 2
|
2010-07-07 00:28:34 +00:00
|
|
|
allow_yaml = true
|
2010-10-06 18:09:06 +00:00
|
|
|
btag = "MetasploitExpressV2"
|
2010-07-07 00:28:34 +00:00
|
|
|
elsif doc.elements["MetasploitExpressV3"]
|
|
|
|
m_ver = 3
|
2010-10-06 18:09:06 +00:00
|
|
|
btag = "MetasploitExpressV3"
|
2010-10-06 18:00:21 +00:00
|
|
|
elsif doc.elements["MetasploitExpressV4"]
|
2010-10-06 18:09:06 +00:00
|
|
|
m_ver = 4
|
|
|
|
btag = "MetasploitExpressV4"
|
|
|
|
elsif doc.elements["MetasploitV4"]
|
|
|
|
m_ver = 4
|
|
|
|
btag = "MetasploitV4"
|
2010-06-11 18:56:16 +00:00
|
|
|
else
|
|
|
|
m_ver = nil
|
|
|
|
end
|
2010-10-06 18:09:06 +00:00
|
|
|
unless m_ver and btag
|
|
|
|
raise DBImportError.new("Unsupported Metasploit XML document format")
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
host_info = {}
|
2010-10-06 18:09:06 +00:00
|
|
|
doc.elements.each("/#{btag}/hosts/host") do |host|
|
2010-10-06 05:10:16 +00:00
|
|
|
host_info[host.elements["id"].text.to_s.strip] = nils_for_nulls(host.elements["address"].text.to_s.strip)
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# Import Loot
|
2010-10-06 18:09:06 +00:00
|
|
|
doc.elements.each("/#{btag}/loots/loot") do |loot|
|
2010-06-11 18:56:16 +00:00
|
|
|
next if bl.include? host_info[loot.elements["host-id"].text.to_s.strip]
|
|
|
|
loot_info = {}
|
|
|
|
loot_info[:host] = host_info[loot.elements["host-id"].text.to_s.strip]
|
|
|
|
loot_info[:workspace] = args[:wspace]
|
2010-10-06 05:10:16 +00:00
|
|
|
loot_info[:ctype] = nils_for_nulls(loot.elements["content-type"].text.to_s.strip)
|
|
|
|
loot_info[:info] = nils_for_nulls(unserialize_object(loot.elements["info"], allow_yaml))
|
|
|
|
loot_info[:ltype] = nils_for_nulls(loot.elements["ltype"].text.to_s.strip)
|
|
|
|
loot_info[:name] = nils_for_nulls(loot.elements["name"].text.to_s.strip)
|
|
|
|
loot_info[:created_at] = nils_for_nulls(loot.elements["created-at"].text.to_s.strip)
|
|
|
|
loot_info[:updated_at] = nils_for_nulls(loot.elements["updated-at"].text.to_s.strip)
|
|
|
|
loot_info[:name] = nils_for_nulls(loot.elements["name"].text.to_s.strip)
|
|
|
|
loot_info[:orig_path] = nils_for_nulls(loot.elements["path"].text.to_s.strip)
|
2010-06-11 18:56:16 +00:00
|
|
|
tmp = args[:ifd][:zip_tmp]
|
2010-10-06 05:10:16 +00:00
|
|
|
loot_info[:orig_path].gsub!(/^\./,tmp) if loot_info[:orig_path]
|
|
|
|
if !loot.elements["service-id"].text.to_s.strip.empty?
|
|
|
|
unless loot.elements["service-id"].text.to_s.strip == "NULL"
|
|
|
|
loot_info[:service] = loot.elements["service-id"].text.to_s.strip
|
|
|
|
end
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# Only report loot if we actually have it.
|
|
|
|
# TODO: Copypasta. Seperate this out.
|
2010-10-06 18:00:21 +00:00
|
|
|
if ::File.exists? loot_info[:orig_path]
|
|
|
|
loot_dir = ::File.join(basedir,"loot")
|
|
|
|
loot_file = ::File.split(loot_info[:orig_path]).last
|
|
|
|
if ::File.exists? loot_dir
|
|
|
|
unless (::File.directory?(loot_dir) && ::File.writable?(loot_dir))
|
2010-06-11 18:56:16 +00:00
|
|
|
raise DBImportError.new("Could not move files to #{loot_dir}")
|
|
|
|
end
|
|
|
|
else
|
2010-10-06 18:00:21 +00:00
|
|
|
::FileUtils.mkdir_p(loot_dir)
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
new_loot = ::File.join(loot_dir,loot_file)
|
2010-06-11 18:56:16 +00:00
|
|
|
loot_info[:path] = new_loot
|
2010-10-06 18:00:21 +00:00
|
|
|
if ::File.exists?(new_loot)
|
|
|
|
::File.unlink new_loot # Delete it, and don't report it.
|
2010-06-22 20:30:43 +00:00
|
|
|
else
|
|
|
|
report_loot(loot_info) # It's new, so report it.
|
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
::FileUtils.copy(loot_info[:orig_path], new_loot)
|
2010-10-06 15:55:28 +00:00
|
|
|
yield(:msf_loot, new_loot) if block
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Import Tasks
|
2010-10-06 18:09:06 +00:00
|
|
|
doc.elements.each("/#{btag}/tasks/task") do |task|
|
2010-06-11 18:56:16 +00:00
|
|
|
task_info = {}
|
|
|
|
task_info[:workspace] = args[:wspace]
|
|
|
|
# Should user be imported (original) or declared (the importing user)?
|
2010-10-06 05:10:16 +00:00
|
|
|
task_info[:user] = nils_for_nulls(task.elements["created-by"].text.to_s.strip)
|
|
|
|
task_info[:desc] = nils_for_nulls(task.elements["description"].text.to_s.strip)
|
|
|
|
task_info[:info] = nils_for_nulls(unserialize_object(task.elements["info"], allow_yaml))
|
|
|
|
task_info[:mod] = nils_for_nulls(task.elements["module"].text.to_s.strip)
|
|
|
|
task_info[:options] = nils_for_nulls(task.elements["options"].text.to_s.strip)
|
|
|
|
task_info[:prog] = nils_for_nulls(task.elements["progress"].text.to_s.strip).to_i
|
|
|
|
task_info[:created_at] = nils_for_nulls(task.elements["created-at"].text.to_s.strip)
|
|
|
|
task_info[:updated_at] = nils_for_nulls(task.elements["updated-at"].text.to_s.strip)
|
2010-06-11 18:56:16 +00:00
|
|
|
if !task.elements["completed-at"].text.to_s.empty?
|
2010-10-06 05:10:16 +00:00
|
|
|
task_info[:completed_at] = nils_for_nulls(task.elements["completed-at"].text.to_s.strip)
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
|
|
|
if !task.elements["error"].text.to_s.empty?
|
2010-10-06 05:10:16 +00:00
|
|
|
task_info[:error] = nils_for_nulls(task.elements["error"].text.to_s.strip)
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
|
|
|
if !task.elements["result"].text.to_s.empty?
|
2010-10-06 05:10:16 +00:00
|
|
|
task_info[:result] = nils_for_nulls(task.elements["result"].text.to_s.strip)
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
2010-10-06 05:10:16 +00:00
|
|
|
task_info[:orig_path] = nils_for_nulls(task.elements["path"].text.to_s.strip)
|
2010-06-11 18:56:16 +00:00
|
|
|
tmp = args[:ifd][:zip_tmp]
|
2010-10-06 05:10:16 +00:00
|
|
|
task_info[:orig_path].gsub!(/^\./,tmp) if task_info[:orig_path]
|
2010-06-11 18:56:16 +00:00
|
|
|
|
|
|
|
# Only report a task if we actually have it.
|
|
|
|
# TODO: Copypasta. Seperate this out.
|
2010-10-06 18:00:21 +00:00
|
|
|
if ::File.exists? task_info[:orig_path]
|
|
|
|
tasks_dir = ::File.join(basedir,"tasks")
|
|
|
|
task_file = ::File.split(task_info[:orig_path]).last
|
|
|
|
if ::File.exists? tasks_dir
|
|
|
|
unless (::File.directory?(tasks_dir) && ::File.writable?(tasks_dir))
|
2010-06-11 18:56:16 +00:00
|
|
|
raise DBImportError.new("Could not move files to #{tasks_dir}")
|
|
|
|
end
|
|
|
|
else
|
2010-10-06 18:00:21 +00:00
|
|
|
::FileUtils.mkdir_p(tasks_dir)
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
new_task = ::File.join(tasks_dir,task_file)
|
2010-06-11 18:56:16 +00:00
|
|
|
task_info[:path] = new_task
|
2010-10-06 18:00:21 +00:00
|
|
|
if ::File.exists?(new_task)
|
|
|
|
::File.unlink new_task # Delete it, and don't report it.
|
2010-06-22 20:30:43 +00:00
|
|
|
else
|
|
|
|
report_task(task_info) # It's new, so report it.
|
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
::FileUtils.copy(task_info[:orig_path], new_task)
|
2010-10-06 15:55:28 +00:00
|
|
|
yield(:msf_task, new_task) if block
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Import Reports
|
2010-10-06 18:09:06 +00:00
|
|
|
doc.elements.each("/#{btag}/reports/report") do |report|
|
2010-06-11 18:56:16 +00:00
|
|
|
report_info = {}
|
|
|
|
report_info[:workspace] = args[:wspace]
|
|
|
|
# Should user be imported (original) or declared (the importing user)?
|
2010-10-06 05:10:16 +00:00
|
|
|
report_info[:user] = nils_for_nulls(report.elements["created-by"].text.to_s.strip)
|
|
|
|
report_info[:options] = nils_for_nulls(report.elements["options"].text.to_s.strip)
|
|
|
|
report_info[:rtype] = nils_for_nulls(report.elements["rtype"].text.to_s.strip)
|
|
|
|
report_info[:created_at] = nils_for_nulls(report.elements["created-at"].text.to_s.strip)
|
|
|
|
report_info[:updated_at] = nils_for_nulls(report.elements["updated-at"].text.to_s.strip)
|
2010-06-11 18:56:16 +00:00
|
|
|
|
2010-10-06 05:10:16 +00:00
|
|
|
report_info[:orig_path] = nils_for_nulls(report.elements["path"].text.to_s.strip)
|
2010-06-11 18:56:16 +00:00
|
|
|
tmp = args[:ifd][:zip_tmp]
|
2010-10-06 05:10:16 +00:00
|
|
|
report_info[:orig_path].gsub!(/^\./,tmp) if report_info[:orig_path]
|
2010-06-11 18:56:16 +00:00
|
|
|
|
|
|
|
# Only report a report if we actually have it.
|
|
|
|
# TODO: Copypasta. Seperate this out.
|
2010-10-06 18:00:21 +00:00
|
|
|
if ::File.exists? report_info[:orig_path]
|
|
|
|
reports_dir = ::File.join(basedir,"reports")
|
|
|
|
report_file = ::File.split(report_info[:orig_path]).last
|
|
|
|
if ::File.exists? reports_dir
|
|
|
|
unless (::File.directory?(reports_dir) && ::File.writable?(reports_dir))
|
2010-06-11 18:56:16 +00:00
|
|
|
raise DBImportError.new("Could not move files to #{reports_dir}")
|
|
|
|
end
|
|
|
|
else
|
2010-10-06 18:00:21 +00:00
|
|
|
::FileUtils.mkdir_p(reports_dir)
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
new_report = ::File.join(reports_dir,report_file)
|
2010-06-11 18:56:16 +00:00
|
|
|
report_info[:path] = new_report
|
2010-10-06 18:00:21 +00:00
|
|
|
if ::File.exists?(new_report)
|
|
|
|
::File.unlink new_report
|
2010-06-22 20:30:43 +00:00
|
|
|
else
|
|
|
|
report_report(report_info)
|
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
::FileUtils.copy(report_info[:orig_path], new_report)
|
2010-10-06 15:55:28 +00:00
|
|
|
yield(:msf_report, new_report) if block
|
2010-06-11 18:56:16 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
2010-06-10 21:06:06 +00:00
|
|
|
|
2010-04-26 18:40:49 +00:00
|
|
|
# For each host, step through services, notes, and vulns, and import
|
2010-04-27 14:46:00 +00:00
|
|
|
# them.
|
2010-04-26 18:40:49 +00:00
|
|
|
# TODO: loot, tasks, and reports
|
2010-10-06 15:55:28 +00:00
|
|
|
def import_msf_xml(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
|
|
|
|
2010-07-06 19:33:27 +00:00
|
|
|
allow_yaml = false
|
2010-10-06 18:09:06 +00:00
|
|
|
btag = nil
|
|
|
|
|
2010-04-26 20:25:42 +00:00
|
|
|
doc = rexmlify(data)
|
2010-06-04 18:54:50 +00:00
|
|
|
if doc.elements["MetasploitExpressV1"]
|
|
|
|
m_ver = 1
|
2010-07-06 19:33:27 +00:00
|
|
|
allow_yaml = true
|
2010-10-06 18:09:06 +00:00
|
|
|
btag = "MetasploitExpressV1"
|
2010-06-04 18:54:50 +00:00
|
|
|
elsif doc.elements["MetasploitExpressV2"]
|
|
|
|
m_ver = 2
|
2010-07-06 19:33:27 +00:00
|
|
|
allow_yaml = true
|
2010-10-06 18:09:06 +00:00
|
|
|
btag = "MetasploitExpressV2"
|
2010-07-06 19:33:27 +00:00
|
|
|
elsif doc.elements["MetasploitExpressV3"]
|
|
|
|
m_ver = 3
|
2010-10-06 18:09:06 +00:00
|
|
|
btag = "MetasploitExpressV3"
|
2010-10-06 18:00:21 +00:00
|
|
|
elsif doc.elements["MetasploitExpressV4"]
|
|
|
|
m_ver = 4
|
2010-10-06 18:09:06 +00:00
|
|
|
btag = "MetasploitExpressV4"
|
|
|
|
elsif doc.elements["MetasploitV4"]
|
|
|
|
m_ver = 4
|
|
|
|
btag = "MetasploitV4"
|
2010-06-04 18:54:50 +00:00
|
|
|
else
|
|
|
|
m_ver = nil
|
|
|
|
end
|
2010-10-06 18:09:06 +00:00
|
|
|
unless m_ver and btag
|
|
|
|
raise DBImportError.new("Unsupported Metasploit XML document format")
|
2010-06-04 18:54:50 +00:00
|
|
|
end
|
|
|
|
|
2010-10-06 18:09:06 +00:00
|
|
|
doc.elements.each("/#{btag}/hosts/host") do |host|
|
2010-04-26 18:40:49 +00:00
|
|
|
host_data = {}
|
|
|
|
host_data[:workspace] = wspace
|
2010-10-06 05:10:16 +00:00
|
|
|
host_data[:host] = nils_for_nulls(host.elements["address"].text.to_s.strip)
|
2010-06-04 14:57:58 +00:00
|
|
|
if bl.include? host_data[:host]
|
|
|
|
next
|
|
|
|
else
|
2010-06-08 19:16:20 +00:00
|
|
|
yield(:address,host_data[:host]) if block
|
2010-06-04 14:57:58 +00:00
|
|
|
end
|
2010-10-06 05:10:16 +00:00
|
|
|
host_data[:host_mac] = nils_for_nulls(host.elements["mac"].text.to_s.strip)
|
2010-04-26 18:40:49 +00:00
|
|
|
if host.elements["comm"].text
|
2010-10-06 05:10:16 +00:00
|
|
|
host_data[:comm] = nils_for_nulls(host.elements["comm"].text.to_s.strip)
|
2010-04-26 18:40:49 +00:00
|
|
|
end
|
2010-10-06 20:24:26 +00:00
|
|
|
%W{created-at updated-at name state os-flavor os-lang os-name os-sp purpose}.each { |datum|
|
2010-04-26 20:25:42 +00:00
|
|
|
if host.elements[datum].text
|
2010-10-06 05:10:16 +00:00
|
|
|
host_data[datum.gsub('-','_')] = nils_for_nulls(host.elements[datum].text.to_s.strip)
|
2010-04-26 20:25:42 +00:00
|
|
|
end
|
2010-04-26 18:40:49 +00:00
|
|
|
}
|
|
|
|
host_address = host_data[:host].dup # Preserve after report_host() deletes
|
2010-04-27 14:46:00 +00:00
|
|
|
report_host(host_data)
|
|
|
|
host.elements.each('services/service') do |service|
|
2010-04-26 18:40:49 +00:00
|
|
|
service_data = {}
|
|
|
|
service_data[:workspace] = wspace
|
|
|
|
service_data[:host] = host_address
|
2010-10-06 05:10:16 +00:00
|
|
|
service_data[:port] = nils_for_nulls(service.elements["port"].text.to_s.strip).to_i
|
|
|
|
service_data[:proto] = nils_for_nulls(service.elements["proto"].text.to_s.strip)
|
2010-10-06 20:24:26 +00:00
|
|
|
%W{created-at updated-at name state info}.each { |datum|
|
2010-04-26 20:25:42 +00:00
|
|
|
if service.elements[datum].text
|
2010-07-06 16:33:27 +00:00
|
|
|
if datum == "info"
|
2010-10-06 05:10:16 +00:00
|
|
|
service_data["info"] = nils_for_nulls(unserialize_object(service.elements[datum], false))
|
2010-07-06 16:33:27 +00:00
|
|
|
else
|
2010-10-06 05:10:16 +00:00
|
|
|
service_data[datum.gsub("-","_")] = nils_for_nulls(service.elements[datum].text.to_s.strip)
|
2010-07-06 16:33:27 +00:00
|
|
|
end
|
2010-04-26 20:25:42 +00:00
|
|
|
end
|
2010-04-26 18:40:49 +00:00
|
|
|
}
|
|
|
|
report_service(service_data)
|
|
|
|
end
|
|
|
|
host.elements.each('notes/note') do |note|
|
|
|
|
note_data = {}
|
|
|
|
note_data[:workspace] = wspace
|
|
|
|
note_data[:host] = host_address
|
2010-10-06 05:10:16 +00:00
|
|
|
note_data[:type] = nils_for_nulls(note.elements["ntype"].text.to_s.strip)
|
|
|
|
note_data[:data] = nils_for_nulls(unserialize_object(note.elements["data"], allow_yaml))
|
2010-07-06 19:33:27 +00:00
|
|
|
|
2010-04-26 18:40:49 +00:00
|
|
|
if note.elements["critical"].text
|
2010-10-06 05:10:16 +00:00
|
|
|
note_data[:critical] = true unless note.elements["critical"].text.to_s.strip == "NULL"
|
2010-04-26 18:40:49 +00:00
|
|
|
end
|
|
|
|
if note.elements["seen"].text
|
2010-10-06 05:10:16 +00:00
|
|
|
note_data[:seen] = true unless note.elements["critical"].text.to_s.strip == "NULL"
|
2010-04-26 18:40:49 +00:00
|
|
|
end
|
2010-10-06 20:24:26 +00:00
|
|
|
%W{created-at updated-at}.each { |datum|
|
2010-04-26 21:51:29 +00:00
|
|
|
if note.elements[datum].text
|
2010-10-06 05:10:16 +00:00
|
|
|
note_data[datum.gsub("-","_")] = nils_for_nulls(note.elements[datum].text.to_s.strip)
|
2010-04-26 21:51:29 +00:00
|
|
|
end
|
|
|
|
}
|
2010-04-26 18:40:49 +00:00
|
|
|
report_note(note_data)
|
|
|
|
end
|
|
|
|
host.elements.each('vulns/vuln') do |vuln|
|
|
|
|
vuln_data = {}
|
|
|
|
vuln_data[:workspace] = wspace
|
|
|
|
vuln_data[:host] = host_address
|
2010-10-06 05:10:16 +00:00
|
|
|
vuln_data[:info] = nils_for_nulls(unserialize_object(vuln.elements["info"], allow_yaml))
|
|
|
|
vuln_data[:name] = nils_for_nulls(vuln.elements["name"].text.to_s.strip)
|
2010-10-06 20:24:26 +00:00
|
|
|
%W{created-at updated-at}.each { |datum|
|
2010-04-26 21:51:29 +00:00
|
|
|
if vuln.elements[datum].text
|
2010-10-06 05:10:16 +00:00
|
|
|
vuln_data[datum.gsub("-","_")] = nils_for_nulls(vuln.elements[datum].text.to_s.strip)
|
2010-04-26 21:51:29 +00:00
|
|
|
end
|
|
|
|
}
|
2010-04-26 18:40:49 +00:00
|
|
|
report_vuln(vuln_data)
|
|
|
|
end
|
2010-08-23 17:45:36 +00:00
|
|
|
host.elements.each('creds/cred') do |cred|
|
|
|
|
cred_data = {}
|
|
|
|
cred_data[:workspace] = wspace
|
|
|
|
cred_data[:host] = host_address
|
2010-10-06 20:24:26 +00:00
|
|
|
%W{port ptype sname proto proof active user pass}.each {|datum|
|
2010-08-23 17:45:36 +00:00
|
|
|
if cred.elements[datum].respond_to? :text
|
2010-10-06 05:10:16 +00:00
|
|
|
cred_data[datum.intern] = nils_for_nulls(cred.elements[datum].text.to_s.strip)
|
2010-08-23 17:45:36 +00:00
|
|
|
end
|
|
|
|
}
|
2010-10-06 20:24:26 +00:00
|
|
|
%W{created-at updated-at}.each { |datum|
|
2010-08-23 17:45:36 +00:00
|
|
|
if cred.elements[datum].respond_to? :text
|
2010-10-06 05:10:16 +00:00
|
|
|
cred_data[datum.gsub("-","_")] = nils_for_nulls(cred.elements[datum].text.to_s.strip)
|
2010-08-23 17:45:36 +00:00
|
|
|
end
|
|
|
|
}
|
|
|
|
if cred_data[:pass] == "<masked>"
|
|
|
|
cred_data[:pass] = ""
|
|
|
|
cred_data[:active] = false
|
|
|
|
elsif cred_data[:pass] == "*BLANK PASSWORD*"
|
|
|
|
cred_data[:pass] = ""
|
|
|
|
end
|
|
|
|
report_cred(cred_data.merge(:wait => true))
|
|
|
|
end
|
2010-04-26 18:40:49 +00:00
|
|
|
end
|
2010-10-06 18:00:21 +00:00
|
|
|
|
|
|
|
# Import web sites
|
2010-10-06 20:24:26 +00:00
|
|
|
doc.elements.each("/#{btag}/web_sites/web_site") do |web|
|
2010-10-06 18:00:21 +00:00
|
|
|
info = {}
|
|
|
|
info[:workspace] = wspace
|
2010-10-06 20:24:26 +00:00
|
|
|
|
|
|
|
%W{host port vhost ssl comments}.each do |datum|
|
|
|
|
if web.elements[datum].respond_to? :text
|
|
|
|
info[datum.intern] = nils_for_nulls(web.elements[datum].text.to_s.strip)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
info[:options] = nils_for_nulls(unserialize_object(web.elements["options"], allow_yaml)) if web.elements["options"].respond_to?(:text)
|
|
|
|
info[:ssl] = (info[:ssl] and info[:ssl].to_s.strip.downcase == "true") ? true : false
|
2010-10-06 18:00:21 +00:00
|
|
|
|
2010-10-06 20:24:26 +00:00
|
|
|
%W{created-at updated-at}.each { |datum|
|
2010-10-06 18:00:21 +00:00
|
|
|
if web.elements[datum].text
|
2010-10-06 20:24:26 +00:00
|
|
|
info[datum.gsub("-","_")] = nils_for_nulls(web.elements[datum].text.to_s.strip)
|
2010-10-06 18:00:21 +00:00
|
|
|
end
|
|
|
|
}
|
2010-10-06 20:24:26 +00:00
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
report_web_site(info)
|
2010-10-20 01:13:26 +00:00
|
|
|
yield(:web_site, "#{info[:host]}:#{info[:port]} (#{info[:vhost]})") if block
|
2010-10-06 18:00:21 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
%W{page form vuln}.each do |wtype|
|
2010-10-06 20:24:26 +00:00
|
|
|
doc.elements.each("/#{btag}/web_#{wtype}s/web_#{wtype}") do |web|
|
2010-10-06 18:00:21 +00:00
|
|
|
info = {}
|
|
|
|
info[:workspace] = wspace
|
2010-10-06 20:24:26 +00:00
|
|
|
info[:host] = nils_for_nulls(web.elements["host"].text.to_s.strip) if web.elements["host"].respond_to?(:text)
|
|
|
|
info[:port] = nils_for_nulls(web.elements["port"].text.to_s.strip) if web.elements["port"].respond_to?(:text)
|
|
|
|
info[:ssl] = nils_for_nulls(web.elements["ssl"].text.to_s.strip) if web.elements["ssl"].respond_to?(:text)
|
|
|
|
info[:vhost] = nils_for_nulls(web.elements["vhost"].text.to_s.strip) if web.elements["vhost"].respond_to?(:text)
|
|
|
|
|
|
|
|
info[:ssl] = (info[:ssl] and info[:ssl].to_s.strip.downcase == "true") ? true : false
|
2010-10-06 18:00:21 +00:00
|
|
|
|
|
|
|
case wtype
|
|
|
|
when "page"
|
2010-10-06 20:24:26 +00:00
|
|
|
%W{path code body query cookie auth ctype mtime location}.each do |datum|
|
2010-10-06 18:00:21 +00:00
|
|
|
if web.elements[datum].respond_to? :text
|
|
|
|
info[datum.intern] = nils_for_nulls(web.elements[datum].text.to_s.strip)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
info[:headers] = nils_for_nulls(unserialize_object(web.elements["headers"], allow_yaml))
|
|
|
|
when "form"
|
2010-10-06 20:24:26 +00:00
|
|
|
%W{path query method}.each do |datum|
|
2010-10-06 18:00:21 +00:00
|
|
|
if web.elements[datum].respond_to? :text
|
|
|
|
info[datum.intern] = nils_for_nulls(web.elements[datum].text.to_s.strip)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
info[:params] = nils_for_nulls(unserialize_object(web.elements["params"], allow_yaml))
|
|
|
|
when "vuln"
|
2010-10-11 03:35:15 +00:00
|
|
|
%W{path query method pname proof risk name blame description category confidence}.each do |datum|
|
2010-10-06 18:00:21 +00:00
|
|
|
if web.elements[datum].respond_to? :text
|
|
|
|
info[datum.intern] = nils_for_nulls(web.elements[datum].text.to_s.strip)
|
|
|
|
end
|
|
|
|
end
|
2010-10-06 20:24:26 +00:00
|
|
|
info[:params] = nils_for_nulls(unserialize_object(web.elements["params"], allow_yaml))
|
|
|
|
info[:risk] = info[:risk].to_i
|
2010-10-11 02:57:07 +00:00
|
|
|
info[:confidence] = info[:confidence].to_i
|
2010-10-06 18:00:21 +00:00
|
|
|
end
|
|
|
|
|
2010-10-06 20:24:26 +00:00
|
|
|
%W{created-at updated-at}.each { |datum|
|
2010-10-06 18:00:21 +00:00
|
|
|
if web.elements[datum].text
|
2010-10-06 20:24:26 +00:00
|
|
|
info[datum.gsub("-","_")] = nils_for_nulls(web.elements[datum].text.to_s.strip)
|
2010-10-06 18:00:21 +00:00
|
|
|
end
|
|
|
|
}
|
2010-10-06 20:24:26 +00:00
|
|
|
self.send("report_web_#{wtype}", info)
|
2010-10-20 01:13:26 +00:00
|
|
|
|
|
|
|
yield("web_#{wtype}".intern, info[:path]) if block
|
2010-10-06 18:00:21 +00:00
|
|
|
end
|
|
|
|
end
|
2010-04-26 18:40:49 +00:00
|
|
|
end
|
|
|
|
|
2010-10-06 18:00:21 +00:00
|
|
|
# Convert the string "NULL" to actual nil
|
2010-10-06 05:10:16 +00:00
|
|
|
def nils_for_nulls(str)
|
|
|
|
str == "NULL" ? nil : str
|
|
|
|
end
|
|
|
|
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_nexpose_simplexml(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
|
|
|
|
2010-04-26 20:25:42 +00:00
|
|
|
doc = rexmlify(data)
|
2010-01-07 19:06:29 +00:00
|
|
|
doc.elements.each('/NeXposeSimpleXML/devices/device') do |dev|
|
|
|
|
addr = dev.attributes['address'].to_s
|
2010-06-04 14:57:58 +00:00
|
|
|
if bl.include? addr
|
|
|
|
next
|
|
|
|
else
|
2010-06-08 19:16:20 +00:00
|
|
|
yield(:address,addr) if block
|
2010-06-04 14:57:58 +00:00
|
|
|
end
|
2010-02-17 06:01:53 +00:00
|
|
|
|
|
|
|
fprint = {}
|
|
|
|
|
|
|
|
dev.elements.each('fingerprint/description') do |str|
|
|
|
|
fprint[:desc] = str.text.to_s.strip
|
|
|
|
end
|
|
|
|
dev.elements.each('fingerprint/vendor') do |str|
|
|
|
|
fprint[:vendor] = str.text.to_s.strip
|
|
|
|
end
|
|
|
|
dev.elements.each('fingerprint/family') do |str|
|
|
|
|
fprint[:family] = str.text.to_s.strip
|
|
|
|
end
|
|
|
|
dev.elements.each('fingerprint/product') do |str|
|
|
|
|
fprint[:product] = str.text.to_s.strip
|
|
|
|
end
|
|
|
|
dev.elements.each('fingerprint/version') do |str|
|
|
|
|
fprint[:version] = str.text.to_s.strip
|
|
|
|
end
|
|
|
|
dev.elements.each('fingerprint/architecture') do |str|
|
|
|
|
fprint[:arch] = str.text.to_s.upcase.strip
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
|
|
|
|
2010-02-17 06:01:53 +00:00
|
|
|
conf = {
|
2010-02-18 06:40:38 +00:00
|
|
|
:workspace => wspace,
|
2010-02-17 06:01:53 +00:00
|
|
|
:host => addr,
|
2010-03-22 00:11:43 +00:00
|
|
|
:state => Msf::HostState::Alive
|
2010-02-17 06:01:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
report_host(conf)
|
|
|
|
|
|
|
|
report_note(
|
2010-02-18 06:40:38 +00:00
|
|
|
:workspace => wspace,
|
2010-02-17 06:01:53 +00:00
|
|
|
:host => addr,
|
|
|
|
:type => 'host.os.nexpose_fingerprint',
|
|
|
|
:data => fprint
|
|
|
|
)
|
2010-01-07 19:06:29 +00:00
|
|
|
|
|
|
|
# Load vulnerabilities not associated with a service
|
|
|
|
dev.elements.each('vulnerabilities/vulnerability') do |vuln|
|
|
|
|
vid = vuln.attributes['id'].to_s.downcase
|
|
|
|
refs = process_nexpose_data_sxml_refs(vuln)
|
|
|
|
next if not refs
|
2010-01-10 17:53:12 +00:00
|
|
|
report_vuln(
|
2010-02-18 06:40:38 +00:00
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:name => 'NEXPOSE-' + vid,
|
2010-05-10 16:16:38 +00:00
|
|
|
:info => vid,
|
2010-02-18 06:40:38 +00:00
|
|
|
:refs => refs)
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# Load the services
|
|
|
|
dev.elements.each('services/service') do |svc|
|
|
|
|
sname = svc.attributes['name'].to_s
|
|
|
|
sprot = svc.attributes['protocol'].to_s.downcase
|
|
|
|
sport = svc.attributes['port'].to_s.to_i
|
2010-03-27 02:31:14 +00:00
|
|
|
next if sport == 0
|
2010-01-07 19:06:29 +00:00
|
|
|
|
|
|
|
name = sname.split('(')[0].strip
|
2010-02-17 06:01:53 +00:00
|
|
|
info = ''
|
|
|
|
|
|
|
|
svc.elements.each('fingerprint/description') do |str|
|
|
|
|
info = str.text.to_s.strip
|
|
|
|
end
|
2010-01-20 00:35:44 +00:00
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
if(sname.downcase != '<unknown>')
|
2010-02-18 06:40:38 +00:00
|
|
|
report_service(:workspace => wspace, :host => addr, :proto => sprot, :port => sport, :name => name, :info => info)
|
2010-01-07 19:06:29 +00:00
|
|
|
else
|
2010-02-18 06:40:38 +00:00
|
|
|
report_service(:workspace => wspace, :host => addr, :proto => sprot, :port => sport, :info => info)
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# Load vulnerabilities associated with this service
|
|
|
|
svc.elements.each('vulnerabilities/vulnerability') do |vuln|
|
|
|
|
vid = vuln.attributes['id'].to_s.downcase
|
|
|
|
refs = process_nexpose_data_sxml_refs(vuln)
|
|
|
|
next if not refs
|
2010-01-20 01:01:54 +00:00
|
|
|
report_vuln(
|
2010-02-18 06:40:38 +00:00
|
|
|
:workspace => wspace,
|
2010-01-27 22:13:48 +00:00
|
|
|
:host => addr,
|
|
|
|
:port => sport,
|
|
|
|
:proto => sprot,
|
|
|
|
:name => 'NEXPOSE-' + vid,
|
2010-05-10 16:16:38 +00:00
|
|
|
:info => vid,
|
2010-01-20 00:35:44 +00:00
|
|
|
:refs => refs)
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
# Nexpose Raw XML
|
|
|
|
#
|
2010-06-04 14:57:58 +00:00
|
|
|
def import_nexpose_rawxml_file(args={})
|
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
2010-05-03 01:17:20 +00:00
|
|
|
f = File.open(filename, 'rb')
|
2010-01-07 19:06:29 +00:00
|
|
|
data = f.read(f.stat.size)
|
2010-06-04 14:57:58 +00:00
|
|
|
import_nexpose_rawxml(args.merge(:data => data))
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
2010-04-26 20:25:42 +00:00
|
|
|
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_nexpose_rawxml(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
|
|
|
|
2010-05-02 19:16:52 +00:00
|
|
|
# Use a stream parser instead of a tree parser so we can deal with
|
|
|
|
# huge results files without running out of memory.
|
|
|
|
parser = Rex::Parser::NexposeXMLStreamParser.new
|
|
|
|
|
|
|
|
# Since all the Refs have to be in the database before we can use them
|
|
|
|
# in a Vuln, we store all the hosts until we finish parsing and only
|
|
|
|
# then put everything in the database. This is memory-intensive for
|
2010-05-02 23:13:21 +00:00
|
|
|
# large files, but should be much less so than a tree parser.
|
2010-05-02 19:16:52 +00:00
|
|
|
#
|
|
|
|
# This method is also considerably faster than parsing through the tree
|
|
|
|
# looking for references every time we hit a vuln.
|
|
|
|
hosts = []
|
|
|
|
vulns = []
|
|
|
|
|
|
|
|
# The callback merely populates our in-memory table of hosts and vulns
|
|
|
|
parser.callback = Proc.new { |type, value|
|
|
|
|
case type
|
|
|
|
when :host
|
|
|
|
hosts.push(value)
|
|
|
|
when :vuln
|
|
|
|
vulns.push(value)
|
|
|
|
end
|
|
|
|
}
|
|
|
|
|
|
|
|
REXML::Document.parse_stream(data, parser)
|
|
|
|
|
|
|
|
vuln_refs = nexpose_refs_to_hash(vulns)
|
|
|
|
hosts.each do |host|
|
2010-06-04 14:57:58 +00:00
|
|
|
if bl.include? host["addr"]
|
|
|
|
next
|
|
|
|
else
|
2010-06-08 19:16:20 +00:00
|
|
|
yield(:address,host["addr"]) if block
|
2010-06-04 14:57:58 +00:00
|
|
|
#
|
|
|
|
end
|
2010-05-02 22:23:43 +00:00
|
|
|
nexpose_host(host, vuln_refs, wspace)
|
2010-05-02 19:16:52 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Takes an array of vuln hashes, as returned by the NeXpose rawxml stream
|
|
|
|
# parser, like:
|
|
|
|
# [
|
|
|
|
# {"id"=>"winreg-notes-protocol-handler", severity="8", "refs"=>[{"source"=>"BID", "value"=>"10600"}, ...]}
|
|
|
|
# {"id"=>"windows-zotob-c", severity="8", "refs"=>[{"source"=>"BID", "value"=>"14513"}, ...]}
|
|
|
|
# ]
|
|
|
|
# and transforms it into a hash of vuln references keyed on vuln id, like:
|
|
|
|
# { "windows-zotob-c" => [{"source"=>"BID", "value"=>"14513"}, ...] }
|
2010-05-02 23:13:21 +00:00
|
|
|
#
|
2010-05-02 19:16:52 +00:00
|
|
|
# This method ignores all attributes other than the vuln's NeXpose ID and
|
|
|
|
# references (including title, severity, et cetera).
|
|
|
|
#
|
|
|
|
def nexpose_refs_to_hash(vulns)
|
|
|
|
refs = {}
|
|
|
|
vulns.each do |vuln|
|
|
|
|
vuln["refs"].each do |ref|
|
|
|
|
refs[vuln['id']] ||= []
|
|
|
|
if ref['source'] == 'BID'
|
|
|
|
refs[vuln['id']].push('BID-' + ref["value"])
|
|
|
|
elsif ref['source'] == 'CVE'
|
|
|
|
# value is CVE-$ID
|
|
|
|
refs[vuln['id']].push(ref["value"])
|
|
|
|
elsif ref['source'] == 'MS'
|
|
|
|
refs[vuln['id']].push('MSB-MS-' + ref["value"])
|
|
|
|
elsif ref['source'] == 'URL'
|
|
|
|
refs[vuln['id']].push('URL-' + ref["value"])
|
|
|
|
#else
|
|
|
|
# $stdout.puts("Unknown source: #{ref["source"]}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
refs
|
|
|
|
end
|
|
|
|
|
2010-05-02 22:23:43 +00:00
|
|
|
def nexpose_host(h, vuln_refs, wspace)
|
2010-05-02 19:16:52 +00:00
|
|
|
data = {:workspace => wspace}
|
|
|
|
if h["addr"]
|
|
|
|
addr = h["addr"]
|
|
|
|
else
|
|
|
|
# Can't report it if it doesn't have an IP
|
|
|
|
return
|
|
|
|
end
|
|
|
|
data[:host] = addr
|
|
|
|
if (h["hardware-address"])
|
2010-05-02 23:43:41 +00:00
|
|
|
# Put colons between each octet of the MAC address
|
|
|
|
data[:mac] = h["hardware-address"].gsub(':', '').scan(/../).join(':')
|
2010-05-02 19:16:52 +00:00
|
|
|
end
|
|
|
|
data[:state] = (h["status"] == "alive") ? Msf::HostState::Alive : Msf::HostState::Dead
|
|
|
|
|
|
|
|
# Since we only have one name field per host in the database, just
|
|
|
|
# take the first one.
|
|
|
|
if (h["names"] and h["names"].first)
|
|
|
|
data[:name] = h["names"].first
|
|
|
|
end
|
|
|
|
|
|
|
|
if (data[:state] != Msf::HostState::Dead)
|
|
|
|
report_host(data)
|
|
|
|
end
|
|
|
|
|
2010-05-02 22:23:43 +00:00
|
|
|
if h["os_family"]
|
2010-05-02 19:16:52 +00:00
|
|
|
note = {
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:type => 'host.os.nexpose_fingerprint',
|
|
|
|
:data => {
|
2010-05-02 22:23:43 +00:00
|
|
|
:family => h["os_family"],
|
|
|
|
:certainty => h["os_certainty"]
|
2010-05-02 19:16:52 +00:00
|
|
|
}
|
|
|
|
}
|
2010-05-02 22:23:43 +00:00
|
|
|
note[:data][:vendor] = h["os_vendor"] if h["os_vendor"]
|
|
|
|
note[:data][:product] = h["os_product"] if h["os_product"]
|
|
|
|
note[:data][:arch] = h["arch"] if h["arch"]
|
2010-05-02 19:16:52 +00:00
|
|
|
|
|
|
|
report_note(note)
|
|
|
|
end
|
|
|
|
|
|
|
|
h["endpoints"].each { |p|
|
|
|
|
extra = ""
|
|
|
|
extra << p["product"] + " " if p["product"]
|
2010-05-02 22:23:43 +00:00
|
|
|
extra << p["version"] + " " if p["version"]
|
2010-05-02 23:50:41 +00:00
|
|
|
|
2010-05-02 23:51:54 +00:00
|
|
|
# Skip port-0 endpoints
|
|
|
|
next if p["port"].to_i == 0
|
|
|
|
|
2010-05-02 22:23:43 +00:00
|
|
|
# XXX This should probably be handled in a more standard way
|
2010-05-02 23:50:41 +00:00
|
|
|
# extra << "(" + p["certainty"] + " certainty) " if p["certainty"]
|
2010-05-02 19:16:52 +00:00
|
|
|
|
|
|
|
data = {}
|
|
|
|
data[:workspace] = wspace
|
|
|
|
data[:proto] = p["protocol"].downcase
|
|
|
|
data[:port] = p["port"].to_i
|
|
|
|
data[:state] = p["status"]
|
|
|
|
data[:host] = addr
|
|
|
|
data[:info] = extra if not extra.empty?
|
|
|
|
if p["name"] != "<unknown>"
|
|
|
|
data[:name] = p["name"]
|
|
|
|
end
|
|
|
|
report_service(data)
|
|
|
|
}
|
|
|
|
|
|
|
|
h["vulns"].each_pair { |k,v|
|
|
|
|
next if v["status"] != "vulnerable-exploited" and v["status"] != "vulnerable-version"
|
|
|
|
|
|
|
|
data = {}
|
|
|
|
data[:workspace] = wspace
|
|
|
|
data[:host] = addr
|
2010-05-02 22:23:43 +00:00
|
|
|
data[:proto] = v["protocol"].downcase if v["protocol"]
|
|
|
|
data[:port] = v["port"].to_i if v["port"]
|
2010-05-02 19:16:52 +00:00
|
|
|
data[:name] = "NEXPOSE-" + v["id"]
|
|
|
|
data[:refs] = vuln_refs[v["id"]]
|
|
|
|
report_vuln(data)
|
|
|
|
}
|
|
|
|
end
|
2010-04-07 20:51:05 +00:00
|
|
|
|
2010-10-07 02:33:57 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Retina XML
|
|
|
|
#
|
|
|
|
|
|
|
|
# Process a Retina XML file
|
|
|
|
def import_retina_xml_file(args={})
|
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
|
|
|
f = File.open(filename, 'rb')
|
|
|
|
data = f.read(f.stat.size)
|
|
|
|
import_retina_xml(args.merge(:data => data))
|
|
|
|
end
|
|
|
|
|
|
|
|
# Process Retina XML
|
|
|
|
def import_retina_xml(args={}, &block)
|
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
|
|
|
|
|
|
|
parser = Rex::Parser::RetinaXMLStreamParser.new
|
|
|
|
parser.on_found_host = Proc.new do |host|
|
|
|
|
data = {:workspace => wspace}
|
|
|
|
addr = host['address']
|
|
|
|
next if not addr
|
|
|
|
|
|
|
|
next if bl.include? addr
|
|
|
|
data[:host] = addr
|
|
|
|
|
|
|
|
if host['mac']
|
|
|
|
data[:mac] = host['mac']
|
|
|
|
end
|
|
|
|
|
|
|
|
data[:state] = Msf::HostState::Alive
|
|
|
|
|
|
|
|
if host['hostname']
|
|
|
|
data[:name] = host['hostname']
|
|
|
|
end
|
|
|
|
|
|
|
|
if host['netbios']
|
|
|
|
data[:name] = host['netbios']
|
|
|
|
end
|
|
|
|
|
|
|
|
yield(:address, data[:host]) if block
|
|
|
|
|
|
|
|
# Import Host
|
|
|
|
report_host(data)
|
|
|
|
report_import_note(wspace, addr)
|
|
|
|
|
|
|
|
# Import OS fingerprint
|
|
|
|
if host["os"]
|
|
|
|
note = {
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:type => 'host.os.retina_fingerprint',
|
|
|
|
:data => {
|
|
|
|
:os => host["os"]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
report_note(note)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Import vulnerabilities
|
|
|
|
host['vulns'].each do |vuln|
|
|
|
|
refs = vuln['refs'].map{|v| v.join("-")}
|
|
|
|
refs << "RETINA-#{vuln['rthid']}" if vuln['rthid']
|
|
|
|
|
|
|
|
vuln_info = {
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:name => vuln['name'],
|
|
|
|
:info => vuln['description'],
|
|
|
|
:refs => refs
|
|
|
|
}
|
|
|
|
|
|
|
|
report_vuln(vuln_info)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
REXML::Document.parse_stream(data, parser)
|
|
|
|
end
|
|
|
|
|
2010-10-07 06:24:26 +00:00
|
|
|
#
|
|
|
|
# NetSparker XML
|
|
|
|
#
|
|
|
|
|
|
|
|
# Process a NetSparker XML file
|
|
|
|
def import_netsparker_xml_file(args={})
|
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
|
|
|
f = File.open(filename, 'rb')
|
|
|
|
data = f.read(f.stat.size)
|
|
|
|
import_netsparker_xml(args.merge(:data => data))
|
|
|
|
end
|
|
|
|
|
2010-10-11 02:57:07 +00:00
|
|
|
# Process NetSparker XML
|
2010-10-07 06:24:26 +00:00
|
|
|
def import_netsparker_xml(args={}, &block)
|
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
|
|
|
addr = nil
|
|
|
|
parser = Rex::Parser::NetSparkerXMLStreamParser.new
|
|
|
|
parser.on_found_vuln = Proc.new do |vuln|
|
|
|
|
data = {:workspace => wspace}
|
|
|
|
|
|
|
|
# Parse the URL
|
|
|
|
url = vuln['url']
|
|
|
|
return if not url
|
|
|
|
|
|
|
|
# Crack the URL into a URI
|
|
|
|
uri = URI(url) rescue nil
|
|
|
|
return if not uri
|
|
|
|
|
|
|
|
# Resolve the host and cache the IP
|
|
|
|
if not addr
|
|
|
|
baddr = Rex::Socket.addr_aton(uri.host) rescue nil
|
|
|
|
if baddr
|
|
|
|
addr = Rex::Socket.addr_ntoa(baddr)
|
|
|
|
yield(:address, data[:host]) if block
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Bail early if we have no IP address
|
|
|
|
if not addr
|
|
|
|
raise Interrupt, "Not a valid IP address"
|
|
|
|
end
|
|
|
|
|
|
|
|
if bl.include?(addr)
|
|
|
|
raise Interrupt, "IP address is on the blacklist"
|
|
|
|
end
|
|
|
|
|
|
|
|
data[:host] = addr
|
|
|
|
data[:vhost] = uri.host
|
|
|
|
data[:port] = uri.port
|
|
|
|
data[:ssl] = (uri.scheme == "ssl")
|
2010-10-11 02:57:07 +00:00
|
|
|
|
2010-10-07 06:24:26 +00:00
|
|
|
body = nil
|
|
|
|
# First report a web page
|
|
|
|
if vuln['response']
|
|
|
|
headers = {}
|
|
|
|
code = 200
|
|
|
|
head,body = vuln['response'].to_s.split(/\r?\n\r?\n/, 2)
|
|
|
|
if body
|
|
|
|
|
|
|
|
if head =~ /^HTTP\d+\.\d+\s+(\d+)\s*/
|
|
|
|
code = $1.to_i
|
|
|
|
end
|
|
|
|
|
|
|
|
headers = {}
|
|
|
|
head.split(/\r?\n/).each do |line|
|
|
|
|
hname,hval = line.strip.split(/\s*:\s*/, 2)
|
|
|
|
next if hval.to_s.strip.empty?
|
|
|
|
headers[hname.downcase] ||= []
|
|
|
|
headers[hname.downcase] << hval
|
|
|
|
end
|
|
|
|
|
|
|
|
info = {
|
|
|
|
:path => uri.path,
|
|
|
|
:query => uri.query,
|
|
|
|
:code => code,
|
|
|
|
:body => body,
|
|
|
|
:headers => headers
|
|
|
|
}
|
|
|
|
info.merge!(data)
|
|
|
|
|
|
|
|
if headers['content-type']
|
|
|
|
info[:ctype] = headers['content-type'][0]
|
|
|
|
end
|
|
|
|
|
|
|
|
if headers['set-cookie']
|
|
|
|
info[:cookie] = headers['set-cookie'].join("\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
if headers['authorization']
|
|
|
|
info[:auth] = headers['authorization'].join("\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
if headers['location']
|
|
|
|
info[:location] = headers['location'][0]
|
|
|
|
end
|
|
|
|
|
|
|
|
if headers['last-modified']
|
|
|
|
info[:mtime] = headers['last-modified'][0]
|
|
|
|
end
|
|
|
|
|
|
|
|
# Report the web page to the database
|
|
|
|
report_web_page(info)
|
|
|
|
|
|
|
|
yield(:web_page, url) if block
|
|
|
|
end
|
|
|
|
end # End web_page reporting
|
|
|
|
|
2010-10-11 02:57:07 +00:00
|
|
|
|
|
|
|
details = netsparker_vulnerability_map(vuln)
|
|
|
|
|
2010-10-07 06:24:26 +00:00
|
|
|
method = netsparker_method_map(vuln)
|
|
|
|
pname = netsparker_pname_map(vuln)
|
|
|
|
params = netsparker_params_map(vuln)
|
2010-10-11 02:57:07 +00:00
|
|
|
|
|
|
|
proof = ''
|
|
|
|
|
|
|
|
if vuln['info'] and vuln['info'].length > 0
|
|
|
|
proof << vuln['info'].map{|x| "#{x[0]}: #{x[1]}\n" }.join + "\n"
|
|
|
|
end
|
|
|
|
|
|
|
|
if proof.empty?
|
|
|
|
if body
|
|
|
|
proof << body + "\n"
|
|
|
|
else
|
|
|
|
proof << vuln['response'].to_s + "\n"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if params.empty? and pname
|
|
|
|
params = [[pname, vuln['vparam_name'].to_s]]
|
|
|
|
end
|
2010-10-07 06:24:26 +00:00
|
|
|
|
|
|
|
info = {
|
|
|
|
:path => uri.path,
|
|
|
|
:query => uri.query,
|
|
|
|
:method => method,
|
|
|
|
:params => params,
|
|
|
|
:pname => pname.to_s,
|
2010-10-11 02:57:07 +00:00
|
|
|
:proof => proof,
|
|
|
|
:risk => details[:risk],
|
|
|
|
:name => details[:name],
|
|
|
|
:blame => details[:blame],
|
|
|
|
:category => details[:category],
|
|
|
|
:description => details[:description],
|
|
|
|
:confidence => details[:confidence],
|
2010-10-07 06:24:26 +00:00
|
|
|
}
|
|
|
|
info.merge!(data)
|
|
|
|
|
|
|
|
next if vuln['type'].to_s.empty?
|
2010-10-11 02:57:07 +00:00
|
|
|
|
2010-10-07 06:24:26 +00:00
|
|
|
report_web_vuln(info)
|
|
|
|
yield(:web_vuln, url) if block
|
|
|
|
end
|
|
|
|
|
|
|
|
# We throw interrupts in our parser when the job is hopeless
|
|
|
|
begin
|
|
|
|
REXML::Document.parse_stream(data, parser)
|
2010-10-09 22:09:35 +00:00
|
|
|
rescue ::Interrupt => e
|
2010-10-07 06:24:26 +00:00
|
|
|
wlog("The netsparker_xml_import() job was interrupted: #{e}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def netsparker_method_map(vuln)
|
|
|
|
case vuln['vparam_type']
|
|
|
|
when "FullQueryString"
|
|
|
|
"GET"
|
|
|
|
when "Querystring"
|
|
|
|
"GET"
|
|
|
|
when "Post"
|
|
|
|
"POST"
|
|
|
|
when "RawUrlInjection"
|
|
|
|
"GET"
|
|
|
|
else
|
|
|
|
"GET"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def netsparker_pname_map(vuln)
|
|
|
|
case vuln['vparam_name']
|
2010-10-11 02:57:07 +00:00
|
|
|
when "URI-BASED", "Query Based"
|
|
|
|
"PATH"
|
2010-10-07 06:24:26 +00:00
|
|
|
else
|
|
|
|
vuln['vparam_name']
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def netsparker_params_map(vuln)
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2010-10-11 02:57:07 +00:00
|
|
|
def netsparker_vulnerability_map(vuln)
|
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'Information Disclosure',
|
|
|
|
:blame => 'System Administrator',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "This is an information leak",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
|
|
|
|
|
|
|
# Risk is a value from 1-5 indicating the severity of the issue
|
|
|
|
# Examples: 1, 4, 5
|
|
|
|
|
|
|
|
# Name is a descriptive name for this vulnerability.
|
|
|
|
# Examples: XSS, ReflectiveXSS, PersistentXSS
|
|
|
|
|
|
|
|
# Blame indicates who is at fault for the vulnerability
|
|
|
|
# Examples: App Developer, Server Developer, System Administrator
|
|
|
|
|
|
|
|
# Category indicates the general class of vulnerability
|
|
|
|
# Examples: info, xss, sql, rfi, lfi, cmd
|
|
|
|
|
|
|
|
# Description is a textual summary of the vulnerability
|
|
|
|
# Examples: "A reflective cross-site scripting attack"
|
|
|
|
# "The web server leaks the internal IP address"
|
|
|
|
# "The cookie is not set to HTTP-only"
|
|
|
|
|
|
|
|
#
|
|
|
|
# Confidence is a value from 1 to 100 indicating how confident the
|
|
|
|
# software is that the results are valid.
|
|
|
|
# Examples: 100, 90, 75, 15, 10, 0
|
|
|
|
|
|
|
|
case vuln['type'].to_s
|
2010-10-07 06:24:26 +00:00
|
|
|
when "ApacheDirectoryListing"
|
2010-10-11 02:57:07 +00:00
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'Directory Listing',
|
|
|
|
:blame => 'System Administrator',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
2010-10-07 06:24:26 +00:00
|
|
|
when "ApacheMultiViewsEnabled"
|
2010-10-11 02:57:07 +00:00
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'Apache MultiViews Enabled',
|
|
|
|
:blame => 'System Administrator',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
2010-10-07 06:24:26 +00:00
|
|
|
when "ApacheVersion"
|
2010-10-11 02:57:07 +00:00
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'Web Server Version',
|
|
|
|
:blame => 'System Administrator',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
|
|
|
when "PHPVersion"
|
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'PHP Module Version',
|
|
|
|
:blame => 'System Administrator',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
2010-10-07 06:24:26 +00:00
|
|
|
when "AutoCompleteEnabled"
|
2010-10-11 02:57:07 +00:00
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'Form AutoComplete Enabled',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
2010-10-07 06:24:26 +00:00
|
|
|
when "CookieNotMarkedAsHttpOnly"
|
2010-10-11 02:57:07 +00:00
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'Cookie Not HttpOnly',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
2010-10-07 06:24:26 +00:00
|
|
|
when "EmailDisclosure"
|
2010-10-11 02:57:07 +00:00
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'Email Address Disclosure',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
2010-10-07 06:24:26 +00:00
|
|
|
when "ForbiddenResource"
|
2010-10-11 02:57:07 +00:00
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'Forbidden Resource',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
|
|
|
when "FileUploadFound"
|
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'File Upload Form',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
2010-10-07 06:24:26 +00:00
|
|
|
when "PasswordOverHTTP"
|
2010-10-11 02:57:07 +00:00
|
|
|
res = {
|
|
|
|
:risk => 2,
|
|
|
|
:name => 'Password Over HTTP',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
|
|
|
when "MySQL5Identified"
|
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'MySQL 5 Identified',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
2010-10-07 06:24:26 +00:00
|
|
|
when "PossibleInternalWindowsPathLeakage"
|
2010-10-11 02:57:07 +00:00
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'Path Leakage - Windows',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
|
|
|
when "PossibleInternalUnixPathLeakage"
|
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => 'Path Leakage - Unix',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => 100
|
|
|
|
}
|
|
|
|
when "PossibleXSS", "LowPossibilityPermanentXSS", "XSS", "PermanentXSS"
|
|
|
|
conf = 100
|
|
|
|
conf = 25 if vuln['type'].to_s == "LowPossibilityPermanentXSS"
|
|
|
|
conf = 50 if vuln['type'].to_s == "PossibleXSS"
|
|
|
|
res = {
|
|
|
|
:risk => 3,
|
|
|
|
:name => 'Cross-Site Scripting',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'xss',
|
|
|
|
:description => "",
|
|
|
|
:confidence => conf
|
|
|
|
}
|
|
|
|
|
|
|
|
when "ConfirmedBlindSQLInjection", "ConfirmedSQLInjection", "HighlyPossibleSqlInjection", "DatabaseErrorMessages"
|
|
|
|
conf = 100
|
|
|
|
conf = 90 if vuln['type'].to_s == "HighlyPossibleSqlInjection"
|
|
|
|
conf = 25 if vuln['type'].to_s == "DatabaseErrorMessages"
|
|
|
|
res = {
|
|
|
|
:risk => 5,
|
|
|
|
:name => 'SQL Injection',
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'sql',
|
|
|
|
:description => "",
|
|
|
|
:confidence => conf
|
|
|
|
}
|
2010-10-07 06:24:26 +00:00
|
|
|
else
|
2010-10-11 02:57:07 +00:00
|
|
|
conf = 100
|
|
|
|
res = {
|
|
|
|
:risk => 1,
|
|
|
|
:name => vuln['type'].to_s,
|
|
|
|
:blame => 'App Developer',
|
|
|
|
:category => 'info',
|
|
|
|
:description => "",
|
|
|
|
:confidence => conf
|
|
|
|
}
|
2010-10-07 06:24:26 +00:00
|
|
|
end
|
2010-10-11 02:57:07 +00:00
|
|
|
|
|
|
|
res
|
2010-10-07 06:24:26 +00:00
|
|
|
end
|
2010-10-11 02:57:07 +00:00
|
|
|
|
2010-10-07 02:33:57 +00:00
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
#
|
|
|
|
# Import Nmap's -oX xml output
|
|
|
|
#
|
2010-06-04 14:57:58 +00:00
|
|
|
def import_nmap_xml_file(args={})
|
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
2010-05-03 01:17:20 +00:00
|
|
|
f = File.open(filename, 'rb')
|
2010-01-07 19:06:29 +00:00
|
|
|
data = f.read(f.stat.size)
|
2010-06-04 14:57:58 +00:00
|
|
|
import_nmap_xml(args.merge(:data => data))
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
2010-02-18 06:40:38 +00:00
|
|
|
|
2010-09-27 15:40:33 +00:00
|
|
|
# Too many functions in one def! Refactor this.
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_nmap_xml(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
2010-06-16 18:22:42 +00:00
|
|
|
fix_services = args[:fix_services]
|
2010-06-04 14:57:58 +00:00
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
# Use a stream parser instead of a tree parser so we can deal with
|
|
|
|
# huge results files without running out of memory.
|
|
|
|
parser = Rex::Parser::NmapXMLStreamParser.new
|
|
|
|
|
|
|
|
# Whenever the parser pulls a host out of the nmap results, store
|
|
|
|
# it, along with any associated services, in the database.
|
|
|
|
parser.on_found_host = Proc.new { |h|
|
2010-02-26 13:40:32 +00:00
|
|
|
data = {:workspace => wspace}
|
2010-01-07 19:06:29 +00:00
|
|
|
if (h["addrs"].has_key?("ipv4"))
|
2010-01-10 17:53:12 +00:00
|
|
|
addr = h["addrs"]["ipv4"]
|
2010-01-07 19:06:29 +00:00
|
|
|
elsif (h["addrs"].has_key?("ipv6"))
|
2010-01-10 17:53:12 +00:00
|
|
|
addr = h["addrs"]["ipv6"]
|
2010-01-07 19:06:29 +00:00
|
|
|
else
|
|
|
|
# Can't report it if it doesn't have an IP
|
2010-05-03 01:03:49 +00:00
|
|
|
raise RuntimeError, "At least one IPv4 or IPv6 address is required"
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
2010-06-08 19:16:20 +00:00
|
|
|
next if bl.include? addr
|
2010-01-10 17:53:12 +00:00
|
|
|
data[:host] = addr
|
2010-01-07 19:06:29 +00:00
|
|
|
if (h["addrs"].has_key?("mac"))
|
2010-02-14 18:32:37 +00:00
|
|
|
data[:mac] = h["addrs"]["mac"]
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
2010-02-05 15:51:46 +00:00
|
|
|
data[:state] = (h["status"] == "up") ? Msf::HostState::Alive : Msf::HostState::Dead
|
2010-02-14 18:32:37 +00:00
|
|
|
|
|
|
|
if ( h["reverse_dns"] )
|
|
|
|
data[:name] = h["reverse_dns"]
|
|
|
|
end
|
|
|
|
|
2010-05-07 17:35:49 +00:00
|
|
|
# Only report alive hosts with ports to speak of.
|
2010-02-15 22:59:55 +00:00
|
|
|
if(data[:state] != Msf::HostState::Dead)
|
2010-05-07 17:35:49 +00:00
|
|
|
if h["ports"].size > 0
|
2010-06-22 22:37:14 +00:00
|
|
|
if fix_services
|
|
|
|
port_states = h["ports"].map {|p| p["state"]}.reject {|p| p == "filtered"}
|
|
|
|
next if port_states.compact.empty?
|
|
|
|
end
|
2010-06-08 19:16:20 +00:00
|
|
|
yield(:address,data[:host]) if block
|
2010-07-06 16:10:05 +00:00
|
|
|
report_host(data)
|
2010-05-07 17:35:49 +00:00
|
|
|
report_import_note(wspace,addr)
|
|
|
|
end
|
2010-02-15 22:59:55 +00:00
|
|
|
end
|
2010-01-07 19:06:29 +00:00
|
|
|
|
2010-03-22 00:36:59 +00:00
|
|
|
if( h["os_vendor"] )
|
2010-02-14 19:07:15 +00:00
|
|
|
note = {
|
2010-02-18 06:40:38 +00:00
|
|
|
:workspace => wspace,
|
2010-02-14 19:07:15 +00:00
|
|
|
:host => addr,
|
|
|
|
:type => 'host.os.nmap_fingerprint',
|
|
|
|
:data => {
|
|
|
|
:os_vendor => h["os_vendor"],
|
|
|
|
:os_family => h["os_family"],
|
|
|
|
:os_version => h["os_version"],
|
|
|
|
:os_accuracy => h["os_accuracy"]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if(h["os_match"])
|
|
|
|
note[:data][:os_match] = h['os_match']
|
|
|
|
end
|
|
|
|
|
|
|
|
report_note(note)
|
|
|
|
end
|
|
|
|
|
|
|
|
if (h["last_boot"])
|
|
|
|
report_note(
|
2010-02-18 06:40:38 +00:00
|
|
|
:workspace => wspace,
|
2010-02-14 19:07:15 +00:00
|
|
|
:host => addr,
|
|
|
|
:type => 'host.last_boot',
|
|
|
|
:data => {
|
|
|
|
:time => h["last_boot"]
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2010-05-05 19:45:48 +00:00
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
# Put all the ports, regardless of state, into the db.
|
|
|
|
h["ports"].each { |p|
|
2010-06-18 03:03:11 +00:00
|
|
|
# Localhost port results are pretty unreliable -- if it's
|
|
|
|
# unknown, it's no good (possibly Windows-only)
|
|
|
|
if (
|
|
|
|
p["state"] == "unknown" &&
|
|
|
|
h["status_reason"] == "localhost-response"
|
|
|
|
)
|
|
|
|
next
|
|
|
|
end
|
2010-01-07 19:06:29 +00:00
|
|
|
extra = ""
|
|
|
|
extra << p["product"] + " " if p["product"]
|
2010-01-14 15:26:20 +00:00
|
|
|
extra << p["version"] + " " if p["version"]
|
2010-01-07 19:06:29 +00:00
|
|
|
extra << p["extrainfo"] + " " if p["extrainfo"]
|
|
|
|
|
|
|
|
data = {}
|
2010-02-18 06:40:38 +00:00
|
|
|
data[:workspace] = wspace
|
2010-06-16 18:22:42 +00:00
|
|
|
if fix_services
|
2010-10-06 15:55:28 +00:00
|
|
|
data[:proto] = nmap_msf_service_map(p["protocol"])
|
2010-06-22 22:37:14 +00:00
|
|
|
else
|
|
|
|
data[:proto] = p["protocol"].downcase
|
2010-06-16 18:22:42 +00:00
|
|
|
end
|
2010-01-07 19:06:29 +00:00
|
|
|
data[:port] = p["portid"].to_i
|
|
|
|
data[:state] = p["state"]
|
2010-01-10 17:53:12 +00:00
|
|
|
data[:host] = addr
|
2010-01-07 19:06:29 +00:00
|
|
|
data[:info] = extra if not extra.empty?
|
|
|
|
if p["name"] != "unknown"
|
|
|
|
data[:name] = p["name"]
|
|
|
|
end
|
2010-01-10 17:53:12 +00:00
|
|
|
report_service(data)
|
2010-01-07 19:06:29 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
REXML::Document.parse_stream(data, parser)
|
|
|
|
end
|
|
|
|
|
2010-10-06 15:55:28 +00:00
|
|
|
def nmap_msf_service_map(proto)
|
2010-06-16 18:22:42 +00:00
|
|
|
return proto unless proto.kind_of? String
|
|
|
|
case proto.downcase
|
|
|
|
when "msrpc", "nfs-or-iis"; "dcerpc"
|
|
|
|
when "netbios-ns"; "netbios"
|
|
|
|
when "netbios-ssn", "microsoft-ds"; "smb"
|
|
|
|
when "ms-sql-s"; "mssql"
|
|
|
|
when "ms-sql-m"; "mssql-m"
|
|
|
|
when "postgresql"; "postgres"
|
|
|
|
when "http-proxy"; "http"
|
|
|
|
when "iiimsf"; "db2"
|
|
|
|
else
|
2010-06-22 22:37:14 +00:00
|
|
|
proto.downcase
|
2010-06-16 18:22:42 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-05-05 19:45:48 +00:00
|
|
|
def report_import_note(wspace,addr)
|
2010-05-07 17:35:49 +00:00
|
|
|
if @import_filedata.kind_of?(Hash) && @import_filedata[:filename] && @import_filedata[:filename] !~ /msfe-nmap[0-9]{8}/
|
2010-05-05 19:45:48 +00:00
|
|
|
report_note(
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:type => 'host.imported',
|
|
|
|
:data => @import_filedata.merge(:time=> Time.now.utc)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
#
|
|
|
|
# Import Nessus NBE files
|
|
|
|
#
|
2010-06-04 14:57:58 +00:00
|
|
|
def import_nessus_nbe_file(args={})
|
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
2010-05-03 01:17:20 +00:00
|
|
|
f = File.open(filename, 'rb')
|
2010-01-07 19:06:29 +00:00
|
|
|
data = f.read(f.stat.size)
|
2010-06-04 14:57:58 +00:00
|
|
|
import_nessus_nbe(args.merge(:data => data))
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
2010-05-20 14:08:29 +00:00
|
|
|
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_nessus_nbe(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
|
|
|
|
|
|
|
nbe_copy = data.dup
|
2010-07-06 16:10:05 +00:00
|
|
|
# First pass, just to build the address map.
|
2010-05-12 23:00:21 +00:00
|
|
|
addr_map = {}
|
|
|
|
|
|
|
|
nbe_copy.each_line do |line|
|
2010-01-07 19:06:29 +00:00
|
|
|
r = line.split('|')
|
|
|
|
next if r[0] != 'results'
|
2010-05-12 23:00:21 +00:00
|
|
|
next if r[4] != "12053"
|
|
|
|
data = r[6]
|
|
|
|
addr,hname = data.match(/([0-9\x2e]+) resolves as (.+)\x2e\\n/)[1,2]
|
|
|
|
addr_map[hname] = addr
|
|
|
|
end
|
|
|
|
|
2010-06-04 14:57:58 +00:00
|
|
|
data.each_line do |line|
|
2010-05-12 23:00:21 +00:00
|
|
|
r = line.split('|')
|
|
|
|
next if r[0] != 'results'
|
|
|
|
hname = r[2]
|
2010-05-20 14:08:29 +00:00
|
|
|
if addr_map[hname]
|
|
|
|
addr = addr_map[hname]
|
|
|
|
else
|
|
|
|
addr = hname # Must be unresolved, probably an IP address.
|
|
|
|
end
|
2010-01-07 19:06:29 +00:00
|
|
|
port = r[3]
|
|
|
|
nasl = r[4]
|
|
|
|
type = r[5]
|
|
|
|
data = r[6]
|
|
|
|
|
2010-05-20 14:08:29 +00:00
|
|
|
# If there's no resolution, or if it's malformed, skip it.
|
2010-05-12 22:23:40 +00:00
|
|
|
next unless ipv4_validator(addr)
|
|
|
|
|
2010-06-04 14:57:58 +00:00
|
|
|
if bl.include? addr
|
|
|
|
next
|
|
|
|
else
|
2010-06-08 19:16:20 +00:00
|
|
|
yield(:address,addr) if block
|
2010-06-04 14:57:58 +00:00
|
|
|
end
|
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
# Match the NBE types with the XML severity ratings
|
|
|
|
case type
|
|
|
|
# log messages don't actually have any data, they are just
|
|
|
|
# complaints about not being able to perform this or that test
|
|
|
|
# because such-and-such was missing
|
|
|
|
when "Log Message"; next
|
|
|
|
when "Security Hole"; severity = 3
|
|
|
|
when "Security Warning"; severity = 2
|
|
|
|
when "Security Note"; severity = 1
|
|
|
|
# a severity 0 means there's no extra data, it's just an open port
|
|
|
|
else; severity = 0
|
|
|
|
end
|
2010-05-12 23:00:21 +00:00
|
|
|
if nasl == "11936"
|
|
|
|
os = data.match(/The remote host is running (.*)\\n/)[1]
|
|
|
|
report_note(
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:type => 'host.os.nessus_fingerprint',
|
|
|
|
:data => {
|
|
|
|
:os => os.to_s.strip
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
2010-02-18 06:40:38 +00:00
|
|
|
handle_nessus(wspace, addr, port, nasl, severity, data)
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# Of course they had to change the nessus format.
|
|
|
|
#
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_openvas_xml(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
2010-01-14 15:26:20 +00:00
|
|
|
raise DBImportError.new("No OpenVAS XML support. Please submit a patch to msfdev[at]metasploit.com")
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
#
|
2010-01-14 12:57:26 +00:00
|
|
|
# Import Nessus XML v1 and v2 output
|
2010-01-07 19:06:29 +00:00
|
|
|
#
|
|
|
|
# Old versions of openvas exported this as well
|
|
|
|
#
|
2010-06-04 14:57:58 +00:00
|
|
|
def import_nessus_xml_file(args={})
|
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
2010-05-03 01:17:20 +00:00
|
|
|
f = File.open(filename, 'rb')
|
2010-01-07 19:06:29 +00:00
|
|
|
data = f.read(f.stat.size)
|
2010-01-14 12:57:26 +00:00
|
|
|
|
|
|
|
if data.index("NessusClientData_v2")
|
2010-06-04 14:57:58 +00:00
|
|
|
import_nessus_xml_v2(args.merge(:data => data))
|
2010-01-14 12:57:26 +00:00
|
|
|
else
|
2010-06-04 14:57:58 +00:00
|
|
|
import_nessus_xml(args.merge(:data => data))
|
2010-01-14 12:57:26 +00:00
|
|
|
end
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
2010-01-14 12:57:26 +00:00
|
|
|
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_nessus_xml(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
2010-01-07 19:06:29 +00:00
|
|
|
|
2010-04-26 20:25:42 +00:00
|
|
|
doc = rexmlify(data)
|
2010-01-07 19:06:29 +00:00
|
|
|
doc.elements.each('/NessusClientData/Report/ReportHost') do |host|
|
|
|
|
|
2010-05-12 22:23:40 +00:00
|
|
|
addr = nil
|
|
|
|
hname = nil
|
|
|
|
os = nil
|
2010-07-06 16:10:05 +00:00
|
|
|
# If the name is resolved, the Nessus plugin for DNS
|
2010-05-12 22:23:40 +00:00
|
|
|
# resolution should be there. If not, fall back to the
|
|
|
|
# HostName
|
|
|
|
host.elements.each('ReportItem') do |item|
|
|
|
|
next unless item.elements['pluginID'].text == "12053"
|
|
|
|
addr = item.elements['data'].text.match(/([0-9\x2e]+) resolves as/)[1]
|
|
|
|
hname = host.elements['HostName'].text
|
|
|
|
end
|
|
|
|
addr ||= host.elements['HostName'].text
|
2010-05-12 18:10:37 +00:00
|
|
|
next unless ipv4_validator(addr) # Skip resolved names and SCAN-ERROR.
|
2010-06-04 14:57:58 +00:00
|
|
|
if bl.include? addr
|
|
|
|
next
|
|
|
|
else
|
2010-06-08 19:16:20 +00:00
|
|
|
yield(:address,addr) if block
|
2010-06-04 14:57:58 +00:00
|
|
|
end
|
2010-05-12 18:10:37 +00:00
|
|
|
|
2010-05-12 22:23:40 +00:00
|
|
|
hinfo = {
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr
|
|
|
|
}
|
|
|
|
|
|
|
|
# Record the hostname
|
|
|
|
hinfo.merge!(:name => hname.to_s.strip) if hname
|
|
|
|
report_host(hinfo)
|
2010-07-06 16:10:05 +00:00
|
|
|
|
2010-05-12 22:23:40 +00:00
|
|
|
# Record the OS
|
|
|
|
os ||= host.elements["os_name"]
|
|
|
|
if os
|
|
|
|
report_note(
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:type => 'host.os.nessus_fingerprint',
|
|
|
|
:data => {
|
|
|
|
:os => os.text.to_s.strip
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
host.elements.each('ReportItem') do |item|
|
|
|
|
nasl = item.elements['pluginID'].text
|
|
|
|
port = item.elements['port'].text
|
|
|
|
data = item.elements['data'].text
|
|
|
|
severity = item.elements['severity'].text
|
|
|
|
|
2010-02-18 06:40:38 +00:00
|
|
|
handle_nessus(wspace, addr, port, nasl, severity, data)
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_nessus_xml_v2(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
2010-10-14 18:54:35 +00:00
|
|
|
|
|
|
|
#@host = {
|
|
|
|
#'hname' => nil,
|
|
|
|
#'addr' => nil,
|
|
|
|
#'mac' => nil,
|
|
|
|
#'os' => nil,
|
|
|
|
#'ports' => [ 'port' => { 'port' => nil,
|
|
|
|
# 'svc_name' => nil,
|
|
|
|
# 'proto' => nil,
|
|
|
|
# 'severity' => nil,
|
|
|
|
# 'nasl' => nil,
|
|
|
|
# 'description' => nil,
|
|
|
|
# 'cve' => [],
|
|
|
|
# 'bid' => [],
|
|
|
|
# 'xref' => []
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
#}
|
|
|
|
parser = Rex::Parser::NessusXMLStreamParser.new
|
|
|
|
parser.on_found_host = Proc.new { |host|
|
|
|
|
|
|
|
|
addr = host['addr'] || host['hname']
|
|
|
|
|
2010-05-12 18:10:37 +00:00
|
|
|
next unless ipv4_validator(addr) # Catches SCAN-ERROR, among others.
|
2010-10-14 18:54:35 +00:00
|
|
|
|
2010-06-04 14:57:58 +00:00
|
|
|
if bl.include? addr
|
|
|
|
next
|
|
|
|
else
|
2010-06-08 19:16:20 +00:00
|
|
|
yield(:address,addr) if block
|
2010-06-04 14:57:58 +00:00
|
|
|
end
|
2010-10-14 18:54:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
os = host['os']
|
|
|
|
yield(:os,os) if block
|
2010-05-03 01:49:00 +00:00
|
|
|
if os
|
2010-10-14 18:54:35 +00:00
|
|
|
|
2010-05-03 01:49:00 +00:00
|
|
|
report_note(
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:type => 'host.os.nessus_fingerprint',
|
|
|
|
:data => {
|
2010-10-14 18:54:35 +00:00
|
|
|
:os => os.to_s.strip
|
2010-05-03 01:49:00 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
2010-10-14 18:54:35 +00:00
|
|
|
|
|
|
|
hname = host['hname']
|
|
|
|
|
2010-05-03 01:49:00 +00:00
|
|
|
if hname
|
|
|
|
report_host(
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
2010-10-14 18:54:35 +00:00
|
|
|
:name => hname.to_s.strip
|
2010-05-03 01:49:00 +00:00
|
|
|
)
|
|
|
|
end
|
2010-10-14 18:54:35 +00:00
|
|
|
|
|
|
|
mac = host['mac']
|
|
|
|
|
2010-05-03 01:49:00 +00:00
|
|
|
if mac
|
|
|
|
report_host(
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
2010-10-14 18:54:35 +00:00
|
|
|
:mac => mac.to_s.strip.upcase
|
2010-05-03 01:49:00 +00:00
|
|
|
)
|
|
|
|
end
|
2010-10-14 18:54:35 +00:00
|
|
|
|
|
|
|
host['ports'].each do |item|
|
|
|
|
next if item['port'] == 0
|
|
|
|
msf = nil
|
|
|
|
nasl = item['nasl'].to_s
|
|
|
|
port = item['port'].to_s
|
|
|
|
proto = item['proto'] || "tcp"
|
|
|
|
name = item['svc_name']
|
|
|
|
severity = item['severity']
|
|
|
|
description = item['description']
|
|
|
|
cve = item['cve']
|
|
|
|
bid = item['bid']
|
|
|
|
xref = item['xref']
|
|
|
|
msf = item['msf']
|
|
|
|
|
|
|
|
yield(:port,port) if block
|
|
|
|
|
|
|
|
handle_nessus_v2(wspace, addr, port, proto, hname, nasl, severity, description, cve, bid, xref, msf)
|
|
|
|
|
2010-01-14 12:57:26 +00:00
|
|
|
end
|
2010-10-14 18:54:35 +00:00
|
|
|
yield(:end,hname) if block
|
|
|
|
}
|
|
|
|
|
|
|
|
REXML::Document.parse_stream(data, parser)
|
|
|
|
|
2010-01-14 12:57:26 +00:00
|
|
|
end
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-03-28 23:02:28 +00:00
|
|
|
#
|
|
|
|
# Import Qualys' xml output
|
|
|
|
#
|
2010-06-04 14:57:58 +00:00
|
|
|
def import_qualys_xml_file(args={})
|
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
2010-05-03 01:17:20 +00:00
|
|
|
f = File.open(filename, 'rb')
|
2010-03-28 23:02:28 +00:00
|
|
|
data = f.read(f.stat.size)
|
2010-06-04 14:57:58 +00:00
|
|
|
import_qualys_xml(args.merge(:data => data))
|
2010-03-28 23:02:28 +00:00
|
|
|
end
|
|
|
|
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_qualys_xml(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
|
|
|
|
2010-03-28 23:02:28 +00:00
|
|
|
|
2010-04-26 20:25:42 +00:00
|
|
|
doc = rexmlify(data)
|
2010-03-28 23:02:28 +00:00
|
|
|
doc.elements.each('/SCAN/IP') do |host|
|
|
|
|
addr = host.attributes['value']
|
2010-06-04 14:57:58 +00:00
|
|
|
if bl.include? addr
|
|
|
|
next
|
|
|
|
else
|
2010-06-08 19:16:20 +00:00
|
|
|
yield(:address,addr) if block
|
2010-06-04 14:57:58 +00:00
|
|
|
end
|
2010-03-28 23:02:28 +00:00
|
|
|
hname = host.attributes['name'] || ''
|
|
|
|
|
|
|
|
report_host(:workspace => wspace, :host => addr, :name => hname, :state => Msf::HostState::Alive)
|
|
|
|
|
|
|
|
if host.elements["OS"]
|
|
|
|
hos = host.elements["OS"].text
|
|
|
|
report_note(
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:type => 'host.os.qualys_fingerprint',
|
|
|
|
:data => {
|
|
|
|
:os => hos
|
|
|
|
}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Open TCP Services List (Qualys ID 82023)
|
|
|
|
services_tcp = host.elements["SERVICES/CAT/SERVICE[@number='82023']/RESULT"]
|
|
|
|
if services_tcp
|
|
|
|
services_tcp.text.scan(/([0-9]+)\t(.*?)\t.*?\t([^\t\n]*)/) do |match|
|
|
|
|
if match[2] == nil or match[2].strip == 'unknown'
|
|
|
|
name = match[1].strip
|
|
|
|
else
|
|
|
|
name = match[2].strip
|
|
|
|
end
|
|
|
|
handle_qualys(wspace, addr, match[0].to_s, 'tcp', 0, nil, nil, name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
# Open UDP Services List (Qualys ID 82004)
|
|
|
|
services_udp = host.elements["SERVICES/CAT/SERVICE[@number='82004']/RESULT"]
|
|
|
|
if services_udp
|
|
|
|
services_udp.text.scan(/([0-9]+)\t(.*?)\t.*?\t([^\t\n]*)/) do |match|
|
|
|
|
if match[2] == nil or match[2].strip == 'unknown'
|
|
|
|
name = match[1].strip
|
|
|
|
else
|
|
|
|
name = match[2].strip
|
|
|
|
end
|
|
|
|
handle_qualys(wspace, addr, match[0].to_s, 'udp', 0, nil, nil, name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# VULNS are confirmed, PRACTICES are unconfirmed vulnerabilities
|
|
|
|
host.elements.each('VULNS/CAT | PRACTICES/CAT') do |cat|
|
|
|
|
port = cat.attributes['port']
|
|
|
|
protocol = cat.attributes['protocol']
|
|
|
|
cat.elements.each('VULN | PRACTICE') do |vuln|
|
|
|
|
refs = []
|
|
|
|
qid = vuln.attributes['number']
|
|
|
|
severity = vuln.attributes['severity']
|
|
|
|
vuln.elements.each('VENDOR_REFERENCE_LIST/VENDOR_REFERENCE') do |ref|
|
|
|
|
refs.push(ref.elements['ID'].text.to_s)
|
|
|
|
end
|
|
|
|
vuln.elements.each('CVE_ID_LIST/CVE_ID') do |ref|
|
|
|
|
refs.push('CVE-' + /C..-([0-9\-]{9})/.match(ref.elements['ID'].text.to_s)[1])
|
|
|
|
end
|
|
|
|
vuln.elements.each('BUGTRAQ_ID_LIST/BUGTRAQ_ID') do |ref|
|
|
|
|
refs.push('BID-' + ref.elements['ID'].text.to_s)
|
|
|
|
end
|
|
|
|
|
|
|
|
handle_qualys(wspace, addr, port, protocol, qid, severity, refs)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-06-04 14:57:58 +00:00
|
|
|
def import_ip_list_file(args={})
|
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
|
2010-05-03 01:17:20 +00:00
|
|
|
f = File.open(filename, 'rb')
|
2010-03-03 00:34:16 +00:00
|
|
|
data = f.read(f.stat.size)
|
2010-06-04 14:57:58 +00:00
|
|
|
import_ip_list(args.merge(:data => data))
|
2010-03-03 00:34:16 +00:00
|
|
|
end
|
2010-03-08 14:17:34 +00:00
|
|
|
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_ip_list(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
|
|
|
|
2010-09-03 15:04:09 +00:00
|
|
|
data.each_line do |ip|
|
|
|
|
ip.strip!
|
|
|
|
if bl.include? ip
|
2010-06-04 14:57:58 +00:00
|
|
|
next
|
|
|
|
else
|
2010-09-03 15:04:09 +00:00
|
|
|
yield(:address,ip) if block
|
2010-06-04 14:57:58 +00:00
|
|
|
end
|
2010-09-03 15:04:09 +00:00
|
|
|
host = find_or_create_host(:workspace => wspace, :host=> ip, :state => Msf::HostState::Alive)
|
2010-03-03 00:34:16 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-06-04 14:57:58 +00:00
|
|
|
def import_amap_log_file(args={})
|
|
|
|
filename = args[:filename]
|
|
|
|
wspace = args[:wspace] || workspace
|
2010-05-03 01:17:20 +00:00
|
|
|
f = File.open(filename, 'rb')
|
2010-01-07 19:06:29 +00:00
|
|
|
data = f.read(f.stat.size)
|
2010-06-08 22:14:25 +00:00
|
|
|
case import_filetype_detect(data)
|
|
|
|
when :amap_log
|
|
|
|
import_amap_log(args.merge(:data => data))
|
|
|
|
when :amap_mlog
|
|
|
|
import_amap_mlog(args.merge(:data => data))
|
|
|
|
else
|
|
|
|
raise DBImportError.new("Could not determine file type")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_amap_log(args={}, &block)
|
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
|
|
|
|
|
|
|
data.each_line do |line|
|
|
|
|
next if line =~ /^#/
|
|
|
|
next if line !~ /^Protocol on ([^:]+):([^\x5c\x2f]+)[\x5c\x2f](tcp|udp) matches (.*)$/
|
|
|
|
addr = $1
|
|
|
|
next if bl.include? addr
|
|
|
|
port = $2.to_i
|
|
|
|
proto = $3.downcase
|
|
|
|
name = $4
|
|
|
|
host = find_or_create_host(:workspace => wspace, :host => addr, :state => Msf::HostState::Alive)
|
|
|
|
next if not host
|
|
|
|
yield(:address,addr) if block
|
|
|
|
info = {
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => host,
|
|
|
|
:proto => proto,
|
|
|
|
:port => port
|
|
|
|
}
|
|
|
|
if name != "unidentified"
|
|
|
|
info[:name] = name
|
|
|
|
end
|
|
|
|
service = find_or_create_service(info)
|
|
|
|
end
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
2010-03-28 23:02:28 +00:00
|
|
|
|
2010-06-08 19:16:20 +00:00
|
|
|
def import_amap_mlog(args={}, &block)
|
2010-06-04 14:57:58 +00:00
|
|
|
data = args[:data]
|
|
|
|
wspace = args[:wspace] || workspace
|
|
|
|
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
data.each_line do |line|
|
|
|
|
next if line =~ /^#/
|
|
|
|
r = line.split(':')
|
|
|
|
next if r.length < 6
|
|
|
|
|
|
|
|
addr = r[0]
|
2010-06-08 22:14:25 +00:00
|
|
|
next if bl.include? addr
|
2010-01-07 19:06:29 +00:00
|
|
|
port = r[1].to_i
|
|
|
|
proto = r[2].downcase
|
|
|
|
status = r[3]
|
|
|
|
name = r[5]
|
|
|
|
next if status != "open"
|
|
|
|
|
2010-02-18 06:40:38 +00:00
|
|
|
host = find_or_create_host(:workspace => wspace, :host => addr, :state => Msf::HostState::Alive)
|
2010-01-07 19:06:29 +00:00
|
|
|
next if not host
|
2010-06-08 22:14:25 +00:00
|
|
|
yield(:address,addr) if block
|
2010-01-07 19:06:29 +00:00
|
|
|
info = {
|
2010-02-18 06:40:38 +00:00
|
|
|
:workspace => wspace,
|
2010-01-14 15:26:20 +00:00
|
|
|
:host => host,
|
|
|
|
:proto => proto,
|
2010-01-07 19:06:29 +00:00
|
|
|
:port => port
|
|
|
|
}
|
|
|
|
if name != "unidentified"
|
|
|
|
info[:name] = name
|
|
|
|
end
|
|
|
|
service = find_or_create_service(info)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-09-28 17:52:48 +00:00
|
|
|
def unserialize_object(xml_elem, allow_yaml = false)
|
|
|
|
string = xml_elem.text.to_s.strip
|
2010-07-06 16:33:27 +00:00
|
|
|
return string unless string.is_a?(String)
|
|
|
|
return nil if not string
|
|
|
|
return nil if string.empty?
|
2010-07-06 19:33:27 +00:00
|
|
|
|
2010-07-06 16:33:27 +00:00
|
|
|
begin
|
2010-07-06 18:35:49 +00:00
|
|
|
# Validate that it is properly formed base64 first
|
|
|
|
if string.gsub(/\s+/, '') =~ /^([a-z0-9A-Z\+\/=]+)$/
|
|
|
|
Marshal.load($1.unpack("m")[0])
|
|
|
|
else
|
2010-07-06 19:33:27 +00:00
|
|
|
if allow_yaml
|
2010-09-28 17:52:48 +00:00
|
|
|
begin
|
|
|
|
YAML.load(string)
|
|
|
|
rescue
|
|
|
|
dlog("Badly formatted YAML: '#{string}'")
|
|
|
|
string
|
|
|
|
end
|
2010-07-06 19:33:27 +00:00
|
|
|
else
|
|
|
|
string
|
|
|
|
end
|
2010-07-06 18:35:49 +00:00
|
|
|
end
|
2010-07-06 16:33:27 +00:00
|
|
|
rescue ::Exception => e
|
2010-07-06 18:17:14 +00:00
|
|
|
if allow_yaml
|
|
|
|
YAML.load(string) rescue string
|
|
|
|
else
|
|
|
|
string
|
|
|
|
end
|
2010-07-06 16:33:27 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-10-02 18:48:29 +00:00
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
protected
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
#
|
|
|
|
# This holds all of the shared parsing/handling used by the
|
2010-01-14 12:57:26 +00:00
|
|
|
# Nessus NBE and NESSUS v1 methods
|
2010-01-07 19:06:29 +00:00
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def handle_nessus(wspace, addr, port, nasl, severity, data)
|
2010-01-07 19:06:29 +00:00
|
|
|
# The port section looks like:
|
|
|
|
# http (80/tcp)
|
|
|
|
p = port.match(/^([^\(]+)\((\d+)\/([^\)]+)\)/)
|
|
|
|
return if not p
|
|
|
|
|
2010-02-18 06:40:38 +00:00
|
|
|
report_host(:workspace => wspace, :host => addr, :state => Msf::HostState::Alive)
|
2010-01-07 19:06:29 +00:00
|
|
|
name = p[1].strip
|
2010-01-20 00:35:44 +00:00
|
|
|
port = p[2].to_i
|
|
|
|
proto = p[3].downcase
|
|
|
|
|
2010-02-18 06:40:38 +00:00
|
|
|
info = { :workspace => wspace, :host => addr, :port => port, :proto => proto }
|
2010-01-14 15:26:20 +00:00
|
|
|
if name != "unknown" and name[-1,1] != "?"
|
2010-01-07 19:06:29 +00:00
|
|
|
info[:name] = name
|
|
|
|
end
|
2010-01-20 00:35:44 +00:00
|
|
|
report_service(info)
|
|
|
|
|
|
|
|
return if not nasl
|
2010-01-07 19:06:29 +00:00
|
|
|
|
|
|
|
data.gsub!("\\n", "\n")
|
|
|
|
|
|
|
|
refs = []
|
|
|
|
|
|
|
|
if (data =~ /^CVE : (.*)$/)
|
|
|
|
$1.gsub(/C(VE|AN)\-/, '').split(',').map { |r| r.strip }.each do |r|
|
2010-01-14 15:26:20 +00:00
|
|
|
refs.push('CVE-' + r)
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if (data =~ /^BID : (.*)$/)
|
|
|
|
$1.split(',').map { |r| r.strip }.each do |r|
|
|
|
|
refs.push('BID-' + r)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if (data =~ /^Other references : (.*)$/)
|
|
|
|
$1.split(',').map { |r| r.strip }.each do |r|
|
|
|
|
ref_id, ref_val = r.split(':')
|
|
|
|
ref_val ? refs.push(ref_id + '-' + ref_val) : refs.push(ref_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
nss = 'NSS-' + nasl.to_s
|
|
|
|
|
2010-05-12 22:23:40 +00:00
|
|
|
vuln_info = {
|
2010-02-18 06:40:38 +00:00
|
|
|
:workspace => wspace,
|
2010-01-14 15:26:20 +00:00
|
|
|
:host => addr,
|
2010-01-20 00:35:44 +00:00
|
|
|
:port => port,
|
|
|
|
:proto => proto,
|
2010-01-14 15:26:20 +00:00
|
|
|
:name => nss,
|
2010-05-12 22:23:40 +00:00
|
|
|
:info => data,
|
2010-05-03 01:49:00 +00:00
|
|
|
:refs => refs
|
2010-05-12 22:23:40 +00:00
|
|
|
}
|
|
|
|
report_vuln(vuln_info)
|
2010-01-07 19:06:29 +00:00
|
|
|
end
|
|
|
|
|
2010-01-14 12:57:26 +00:00
|
|
|
#
|
|
|
|
# NESSUS v2 file format has a dramatically different layout
|
|
|
|
# for ReportItem data
|
|
|
|
#
|
2010-10-14 18:54:35 +00:00
|
|
|
def handle_nessus_v2(wspace,addr,port,proto,name,nasl,severity,description,cve,bid,xref,msf)
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-02-18 06:40:38 +00:00
|
|
|
report_host(:workspace => wspace, :host => addr, :state => Msf::HostState::Alive)
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-02-18 06:40:38 +00:00
|
|
|
info = { :workspace => wspace, :host => addr, :port => port, :proto => proto }
|
2010-01-14 15:26:20 +00:00
|
|
|
if name != "unknown" and name[-1,1] != "?"
|
2010-01-14 12:57:26 +00:00
|
|
|
info[:name] = name
|
|
|
|
end
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-05-03 01:49:00 +00:00
|
|
|
if port.to_i != 0
|
|
|
|
report_service(info)
|
|
|
|
end
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-01-20 00:35:44 +00:00
|
|
|
return if nasl == "0"
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-01-14 12:57:26 +00:00
|
|
|
refs = []
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-10-14 18:54:35 +00:00
|
|
|
cve.each do |r|
|
2010-01-14 12:57:26 +00:00
|
|
|
r.to_s.gsub!(/C(VE|AN)\-/, '')
|
|
|
|
refs.push('CVE-' + r.to_s)
|
|
|
|
end if cve
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-10-14 18:54:35 +00:00
|
|
|
bid.each do |r|
|
2010-01-14 12:57:26 +00:00
|
|
|
refs.push('BID-' + r.to_s)
|
|
|
|
end if bid
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-10-14 18:54:35 +00:00
|
|
|
xref.each do |r|
|
2010-01-14 12:57:26 +00:00
|
|
|
ref_id, ref_val = r.to_s.split(':')
|
|
|
|
ref_val ? refs.push(ref_id + '-' + ref_val) : refs.push(ref_id)
|
|
|
|
end if xref
|
2010-10-14 18:54:35 +00:00
|
|
|
|
|
|
|
msfref = "MSF-" << msf if msf
|
|
|
|
refs.push msfref if msfref
|
|
|
|
|
2010-01-14 12:57:26 +00:00
|
|
|
nss = 'NSS-' + nasl
|
2010-01-14 15:26:20 +00:00
|
|
|
|
2010-05-03 01:49:00 +00:00
|
|
|
vuln = {
|
2010-02-18 06:40:38 +00:00
|
|
|
:workspace => wspace,
|
2010-01-14 15:26:20 +00:00
|
|
|
:host => addr,
|
|
|
|
:name => nss,
|
2010-10-14 18:54:35 +00:00
|
|
|
:info => description ? description : "",
|
2010-05-03 01:49:00 +00:00
|
|
|
:refs => refs
|
|
|
|
}
|
|
|
|
|
|
|
|
if port.to_i != 0
|
|
|
|
vuln[:port] = port
|
|
|
|
vuln[:proto] = proto
|
|
|
|
end
|
|
|
|
|
|
|
|
report_vuln(vuln)
|
2010-01-14 12:57:26 +00:00
|
|
|
end
|
|
|
|
|
2010-10-14 18:54:35 +00:00
|
|
|
|
2010-03-28 23:02:28 +00:00
|
|
|
#
|
|
|
|
# Qualys report parsing/handling
|
|
|
|
#
|
|
|
|
def handle_qualys(wspace, addr, port, protocol, qid, severity, refs, name=nil)
|
|
|
|
|
|
|
|
port = port.to_i
|
|
|
|
|
|
|
|
info = { :workspace => wspace, :host => addr, :port => port, :proto => protocol }
|
|
|
|
if name and name != 'unknown'
|
|
|
|
info[:name] = name
|
|
|
|
end
|
|
|
|
|
2010-07-07 14:53:16 +00:00
|
|
|
if info[:host] && info[:port] && info[:proto]
|
|
|
|
report_service(info)
|
|
|
|
end
|
2010-03-28 23:02:28 +00:00
|
|
|
|
|
|
|
return if qid == 0
|
|
|
|
|
2010-07-07 14:53:16 +00:00
|
|
|
if addr
|
|
|
|
report_vuln(
|
|
|
|
:workspace => wspace,
|
|
|
|
:host => addr,
|
|
|
|
:port => port,
|
|
|
|
:proto => protocol,
|
|
|
|
:name => 'QUALYS-' + qid,
|
|
|
|
:refs => refs
|
|
|
|
)
|
|
|
|
end
|
2010-03-28 23:02:28 +00:00
|
|
|
end
|
|
|
|
|
2010-01-07 19:06:29 +00:00
|
|
|
def process_nexpose_data_sxml_refs(vuln)
|
|
|
|
refs = []
|
|
|
|
vid = vuln.attributes['id'].to_s.downcase
|
|
|
|
vry = vuln.attributes['resultCode'].to_s.upcase
|
|
|
|
|
|
|
|
# Only process vuln-exploitable and vuln-version statuses
|
|
|
|
return if vry !~ /^V[VE]$/
|
|
|
|
|
|
|
|
refs = []
|
|
|
|
vuln.elements.each('id') do |ref|
|
|
|
|
rtyp = ref.attributes['type'].to_s.upcase
|
|
|
|
rval = ref.text.to_s.strip
|
|
|
|
case rtyp
|
|
|
|
when 'CVE'
|
|
|
|
refs << rval.gsub('CAN', 'CVE')
|
|
|
|
when 'MS' # obsolete?
|
|
|
|
refs << "MSB-MS-#{rval}"
|
|
|
|
else
|
|
|
|
refs << "#{rtyp}-#{rval}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
refs << "NEXPOSE-#{vid}"
|
|
|
|
refs
|
|
|
|
end
|
|
|
|
|
|
|
|
#
|
|
|
|
# NeXpose vuln lookup
|
|
|
|
#
|
2010-02-18 06:40:38 +00:00
|
|
|
def nexpose_vuln_lookup(wspace, doc, vid, refs, host, serv=nil)
|
2010-01-07 19:06:29 +00:00
|
|
|
doc.elements.each("/NexposeReport/VulnerabilityDefinitions/vulnerability[@id = '#{vid}']]") do |vulndef|
|
|
|
|
|
|
|
|
title = vulndef.attributes['title']
|
|
|
|
pciSeverity = vulndef.attributes['pciSeverity']
|
|
|
|
cvss_score = vulndef.attributes['cvssScore']
|
|
|
|
cvss_vector = vulndef.attributes['cvssVector']
|
|
|
|
|
|
|
|
vulndef.elements['references'].elements.each('reference') do |ref|
|
|
|
|
if ref.attributes['source'] == 'BID'
|
|
|
|
refs[ 'BID-' + ref.text ] = true
|
|
|
|
elsif ref.attributes['source'] == 'CVE'
|
|
|
|
# ref.text is CVE-$ID
|
|
|
|
refs[ ref.text ] = true
|
|
|
|
elsif ref.attributes['source'] == 'MS'
|
|
|
|
refs[ 'MSB-MS-' + ref.text ] = true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
refs[ 'NEXPOSE-' + vid.downcase ] = true
|
|
|
|
|
|
|
|
vuln = find_or_create_vuln(
|
2010-02-18 06:40:38 +00:00
|
|
|
:workspace => wspace,
|
2010-01-07 19:06:29 +00:00
|
|
|
:host => host,
|
|
|
|
:service => serv,
|
|
|
|
:name => 'NEXPOSE-' + vid.downcase,
|
2010-05-10 23:42:06 +00:00
|
|
|
:info => title)
|
2010-01-07 19:06:29 +00:00
|
|
|
|
|
|
|
rids = []
|
|
|
|
refs.keys.each do |r|
|
|
|
|
rids << find_or_create_ref(:name => r)
|
|
|
|
end
|
|
|
|
|
|
|
|
vuln.refs << (rids - vuln.refs)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2006-03-21 04:37:48 +00:00
|
|
|
end
|
|
|
|
|
2008-10-23 04:23:54 +00:00
|
|
|
end
|
2009-12-13 05:24:48 +00:00
|
|
|
|