Add framework.uuid_db as a JSONHashFile

bug/bundler_fix
HD Moore 2015-05-20 00:28:32 -05:00
parent 9d7e54f360
commit 513a81e340
3 changed files with 103 additions and 116 deletions

View File

@ -73,7 +73,7 @@ class Framework
require 'msf/core/plugin_manager'
require 'msf/core/db_manager'
require 'msf/core/event_dispatcher'
require 'rex/json_hash_file'
#
# Creates an instance of the framework context.
@ -91,6 +91,7 @@ class Framework
self.datastore = DataStore.new
self.jobs = Rex::JobContainer.new
self.plugins = PluginManager.new(self)
self.uuid_db = Rex::JSONHashFile.new(::File.join(Msf::Config.config_directory, "payloads.json"))
# Configure the thread factory
Rex::ThreadFactory.provider = Metasploit::Framework::ThreadFactoryProvider.new(framework: self)
@ -187,6 +188,12 @@ class Framework
# unloading of plugins.
#
attr_reader :plugins
#
# The framework instance's payload uuid database. The payload uuid
# database is used to record and match the unique ID values embedded
# into generated payloads.
#
attr_reader :uuid_db
# The framework instance's db manager. The db manager
# maintains the database db and handles db events
@ -243,6 +250,7 @@ protected
attr_writer :jobs # :nodoc:
attr_writer :plugins # :nodoc:
attr_writer :db # :nodoc:
attr_writer :uuid_db # :nodoc:
end
class FrameworkEventSubscriber

View File

@ -1,115 +0,0 @@
# -*- coding => binary -*-
require 'msf/core'
require 'msf/core/payload/uuid'
require 'json'
#
# This module provides a flat file database interface for managing UUIDs
#
class Msf::Payload::UUID::DB
attr_accessor :info, :path
def initialize(path)
self.info = {}
self.path = path
@lock = Mutex.new
@last = 0
reload
end
# Save the file, but prevent thread & process contention
def save(action={})
@lock.synchronize do
::File.open(path, ::File::RDWR|::File::CREAT) do |fd|
fd.flock(::File::LOCK_EX)
# Reload and merge if the file has changed recently
if fd.stat.mtime.to_f > @last
self.info = parse_data(fd.read).merge(self.info)
end
if action[:register_uuid]
params = (action[:params] || {}).merge({ type: 'uuid' })
self.info[ action[:register_uuid] ] = params
end
if action[:register_url]
params = (action[:params] || {}).merge({ type: 'url' })
self.info[ action[:register_uurl] ] = params
end
if action[:remove_uuid]
self.info.delete(action[:delete_uuid])
end
fd.rewind
fd.write(JSON.pretty_generate(self.info))
fd.sync
fd.truncate(fd.pos)
@last = Time.now.to_f
end
end
end
# Load the file from disk
def load
@lock.synchronize do
::File.open(path, ::File::RDWR|::File::CREAT) do |fd|
fd.flock(::File::LOCK_EX)
@last = fd.stat.mtime.to_f
self.info = parse_data(fd.read(fd.stat.size))
end
end
end
# Reload if the file has changed
def reload
return unless ::File.exists?(path)
return unless ::File.stat(path).mtime.to_f > @last
load
end
def register_uuid(uuid, params)
save(register_uuid: uuid, params: params)
end
def remove_uuid(uuid)
save(remove_uuid: uuid)
end
def find_uuid(uuid)
reload
self.info[uuid]
end
def register_url(url, params)
save(register_url: url, params: params)
end
def remove_url(url)
save(remove_url: url)
end
def find_url(url)
reload
self.info[url]
end
private
def parse_data(data)
return {} if data.to_s.strip.length == 0
begin
JSON.parse(data)
rescue JSON::ParserError => e
# TODO: Figure out the appropriate error handling path
raise e
end
end
end

94
lib/rex/json_hash_file.rb Normal file
View File

@ -0,0 +1,94 @@
# -*- coding => binary -*-
require 'json'
#
# This class provides a thread-friendly hash file store in JSON format
#
module Rex
class JSONHashFile
attr_accessor :path
def initialize(path)
self.path = path
@lock = Mutex.new
@hash = {}
@last = 0
synced_update
end
def [](k)
synced_update
@hash[k]
end
def []=(k,v)
synced_update do
@hash[k] = v
end
end
def keys
synced_update
@hash.keys
end
def delete(k)
synced_update do
@hash.delete(k)
end
end
def clear
synced_update do
@hash.clear
end
end
private
# Save the file, but prevent thread & process contention
def synced_update(&block)
@lock.synchronize do
::File.open(path, ::File::RDWR|::File::CREAT) do |fd|
fd.flock(::File::LOCK_EX)
# Reload and merge if the file has changed recently
if fd.stat.mtime.to_f > @last
parse_data(fd.read).merge(@hash).each_pair do |k,v|
@hash[k] = v
end
end
res = nil
# Update the file on disk if new data is written
if block_given?
res = block.call
fd.rewind
fd.write(JSON.pretty_generate(@hash))
fd.sync
fd.truncate(fd.pos)
end
@last = fd.stat.mtime.to_f
res
end
end
end
def parse_data(data)
return {} if data.to_s.strip.length == 0
begin
JSON.parse(data)
rescue JSON::ParserError => e
# elog("JSONHashFile @ #{path} was corrupt: #{e.class} #{e}"
{}
end
end
end
end