discourse/app/jobs/regular/backup_chunks_merger.rb

44 lines
1.4 KiB
Ruby
Raw Normal View History

# frozen_string_literal: true
require_dependency "backup_restore/local_backup_store"
require_dependency "backup_restore/backup_store"
2014-02-21 18:41:01 -06:00
module Jobs
class BackupChunksMerger < Jobs::Base
2018-01-31 05:05:06 -06:00
sidekiq_options queue: 'critical', retry: false
2014-02-21 18:41:01 -06:00
def execute(args)
filename = args[:filename]
identifier = args[:identifier]
chunks = args[:chunks].to_i
raise Discourse::InvalidParameters.new(:filename) if filename.blank?
2014-02-21 18:41:01 -06:00
raise Discourse::InvalidParameters.new(:identifier) if identifier.blank?
raise Discourse::InvalidParameters.new(:chunks) if chunks <= 0
2014-02-21 18:41:01 -06:00
backup_path = "#{BackupRestore::LocalBackupStore.base_directory}/#{filename}"
tmp_backup_path = "#{backup_path}.tmp"
2014-05-27 15:14:37 -05:00
# path to tmp directory
tmp_directory = File.dirname(BackupRestore::LocalBackupStore.chunk_path(identifier, filename, 0))
2014-05-27 15:14:37 -05:00
# merge all chunks
HandleChunkUpload.merge_chunks(
chunks,
upload_path: backup_path,
tmp_upload_path: tmp_backup_path,
identifier: identifier,
filename: filename,
tmp_directory: tmp_directory
)
2018-01-31 05:05:06 -06:00
# push an updated list to the clients
store = BackupRestore::BackupStore.create
data = ActiveModel::ArraySerializer.new(store.files, each_serializer: BackupFileSerializer).as_json
2018-01-31 05:05:06 -06:00
MessageBus.publish("/admin/backups", data, user_ids: User.staff.pluck(:id))
2014-02-21 18:41:01 -06:00
end
end
end