Revert "FEATURE: Support backup uploads/downloads directly to/from S3."

This reverts commit c29a4dddc1.

We're doing a beta bump soon so un-revert this after that is done.
This commit is contained in:
Guo Xiang Tan
2018-10-11 11:08:12 +08:00
parent c92bda95e1
commit 3c59106bac
52 changed files with 419 additions and 1078 deletions

View File

@@ -1,5 +1,5 @@
require_dependency "backup_restore/backuper"
require_dependency "backup_restore/restorer"
require "backup_restore/backuper"
require "backup_restore/restorer"
module BackupRestore

View File

@@ -1,74 +0,0 @@
module BackupRestore
# @abstract
class BackupStore
class BackupFileExists < RuntimeError; end
class StorageError < RuntimeError; end
# @return [BackupStore]
def self.create(opts = {})
case SiteSetting.backup_location
when BackupLocationSiteSetting::LOCAL
require_dependency "backup_restore/local_backup_store"
BackupRestore::LocalBackupStore.new(opts)
when BackupLocationSiteSetting::S3
require_dependency "backup_restore/s3_backup_store"
BackupRestore::S3BackupStore.new(opts)
end
end
# @return [Array<BackupFile>]
def files
unsorted_files.sort_by { |file| -file.last_modified.to_i }
end
# @return [BackupFile]
def latest_file
files.first
end
def delete_old
return unless cleanup_allowed?
return if (backup_files = files).size <= SiteSetting.maximum_backups
backup_files[SiteSetting.maximum_backups..-1].each do |file|
delete_file(file.filename)
end
end
def remote?
fail NotImplementedError
end
# @return [BackupFile]
def file(filename, include_download_source: false)
fail NotImplementedError
end
def delete_file(filename)
fail NotImplementedError
end
def download_file(filename, destination, failure_message = nil)
fail NotImplementedError
end
def upload_file(filename, source_path, content_type)
fail NotImplementedError
end
def generate_upload_url(filename)
fail NotImplementedError
end
private
# @return [Array<BackupFile>]
def unsorted_files
fail NotImplementedError
end
def cleanup_allowed?
true
end
end
end

View File

@@ -1,5 +1,4 @@
require "disk_space"
require "mini_mime"
require 'disk_space'
module BackupRestore
@@ -11,7 +10,6 @@ module BackupRestore
@client_id = opts[:client_id]
@publish_to_message_bus = opts[:publish_to_message_bus] || false
@with_uploads = opts[:with_uploads].nil? ? true : opts[:with_uploads]
@filename_override = opts[:filename]
ensure_no_operation_is_running
ensure_we_have_a_user
@@ -44,7 +42,6 @@ module BackupRestore
log "Finalizing backup..."
@with_uploads ? create_archive : move_dump_backup
upload_archive
after_create_hook
rescue SystemExit
@@ -55,9 +52,9 @@ module BackupRestore
@success = false
else
@success = true
@backup_filename
File.join(@archive_directory, @backup_filename)
ensure
delete_old
remove_old
clean_up
notify_user
log "Finished!"
@@ -78,14 +75,12 @@ module BackupRestore
def initialize_state
@success = false
@store = BackupRestore::BackupStore.create
@current_db = RailsMultisite::ConnectionManagement.current_db
@timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S")
@tmp_directory = File.join(Rails.root, "tmp", "backups", @current_db, @timestamp)
@dump_filename = File.join(@tmp_directory, BackupRestore::DUMP_FILE)
@archive_directory = BackupRestore::LocalBackupStore.base_directory(@current_db)
filename = @filename_override || "#{SiteSetting.title.parameterize}-#{@timestamp}"
@archive_basename = File.join(@archive_directory, "#{filename}-#{BackupRestore::VERSION_PREFIX}#{BackupRestore.current_version}")
@archive_directory = File.join(Rails.root, "public", "backups", @current_db)
@archive_basename = File.join(@archive_directory, "#{SiteSetting.title.parameterize}-#{@timestamp}-#{BackupRestore::VERSION_PREFIX}#{BackupRestore.current_version}")
@backup_filename =
if @with_uploads
@@ -200,10 +195,8 @@ module BackupRestore
def move_dump_backup
log "Finalizing database dump file: #{@backup_filename}"
archive_filename = File.join(@archive_directory, @backup_filename)
Discourse::Utils.execute_command(
'mv', @dump_filename, archive_filename,
'mv', @dump_filename, File.join(@archive_directory, @backup_filename),
failure_message: "Failed to move database dump file."
)
@@ -250,30 +243,17 @@ module BackupRestore
Discourse::Utils.execute_command('gzip', '-5', tar_filename, failure_message: "Failed to gzip archive.")
end
def upload_archive
return unless @store.remote?
log "Uploading archive..."
content_type = MiniMime.lookup_by_filename(@backup_filename).content_type
archive_path = File.join(@archive_directory, @backup_filename)
@store.upload_file(@backup_filename, archive_path, content_type)
ensure
log "Removing archive from local storage..."
FileUtils.remove_file(archive_path, force: true)
end
def after_create_hook
log "Executing the after_create_hook for the backup..."
DiscourseEvent.trigger(:backup_created)
backup = Backup.create_from_filename(@backup_filename)
backup.after_create_hook
end
def delete_old
return if Rails.env.development?
log "Deleting old backups..."
@store.delete_old
def remove_old
log "Removing old backups..."
Backup.remove_old
rescue => ex
log "Something went wrong while deleting old backups.", ex
log "Something went wrong while removing old backups.", ex
end
def notify_user

View File

@@ -1,65 +0,0 @@
require_dependency "backup_restore/backup_store"
require_dependency "disk_space"
module BackupRestore
class LocalBackupStore < BackupStore
def self.base_directory(current_db = nil)
current_db ||= RailsMultisite::ConnectionManagement.current_db
base_directory = File.join(Rails.root, "public", "backups", current_db)
FileUtils.mkdir_p(base_directory) unless Dir.exists?(base_directory)
base_directory
end
def self.chunk_path(identifier, filename, chunk_number)
File.join(LocalBackupStore.base_directory, "tmp", identifier, "#{filename}.part#{chunk_number}")
end
def initialize(opts = {})
@base_directory = opts[:base_directory] || LocalBackupStore.base_directory
end
def remote?
false
end
def file(filename, include_download_source: false)
path = path_from_filename(filename)
create_file_from_path(path, include_download_source) if File.exists?(path)
end
def delete_file(filename)
path = path_from_filename(filename)
if File.exists?(path)
FileUtils.remove_file(path, force: true)
DiskSpace.reset_cached_stats
end
end
def download_file(filename, destination, failure_message = "")
path = path_from_filename(filename)
Discourse::Utils.execute_command('cp', path, destination, failure_message: failure_message)
end
private
def unsorted_files
files = Dir.glob(File.join(@base_directory, "*.{gz,tgz}"))
files.map! { |filename| create_file_from_path(filename) }
files
end
def path_from_filename(filename)
File.join(@base_directory, filename)
end
def create_file_from_path(path, include_download_source = false)
BackupFile.new(
filename: File.basename(path),
size: File.size(path),
last_modified: File.mtime(path).utc,
source: include_download_source ? path : nil
)
end
end
end

View File

@@ -133,12 +133,12 @@ module BackupRestore
def initialize_state
@success = false
@store = BackupRestore::BackupStore.create
@db_was_changed = false
@current_db = RailsMultisite::ConnectionManagement.current_db
@current_version = BackupRestore.current_version
@timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S")
@tmp_directory = File.join(Rails.root, "tmp", "restores", @current_db, @timestamp)
@source_filename = File.join(Backup.base_directory, @filename)
@archive_filename = File.join(@tmp_directory, @filename)
@tar_filename = @archive_filename[0...-3]
@meta_filename = File.join(@tmp_directory, BackupRestore::METADATA_FILE)
@@ -195,15 +195,8 @@ module BackupRestore
end
def copy_archive_to_tmp_directory
if @store.remote?
log "Downloading archive to tmp directory..."
failure_message = "Failed to download archive to tmp directory."
else
log "Copying archive to tmp directory..."
failure_message = "Failed to copy archive to tmp directory."
end
@store.download_file(@filename, @archive_filename, failure_message)
log "Copying archive to tmp directory..."
Discourse::Utils.execute_command('cp', @source_filename, @archive_filename, failure_message: "Failed to copy archive to tmp directory.")
end
def unzip_archive

View File

@@ -1,95 +0,0 @@
require_dependency "backup_restore/backup_store"
require_dependency "s3_helper"
module BackupRestore
class S3BackupStore < BackupStore
DOWNLOAD_URL_EXPIRES_AFTER_SECONDS ||= 15
UPLOAD_URL_EXPIRES_AFTER_SECONDS ||= 21_600 # 6 hours
def initialize(opts = {})
s3_options = S3Helper.s3_options(SiteSetting)
s3_options.merge!(opts[:s3_options]) if opts[:s3_options]
@s3_helper = S3Helper.new(SiteSetting.s3_backup_bucket, '', s3_options)
end
def remote?
true
end
def file(filename, include_download_source: false)
obj = @s3_helper.object(filename)
create_file_from_object(obj, include_download_source) if obj.exists?
end
def delete_file(filename)
obj = @s3_helper.object(filename)
obj.delete if obj.exists?
end
def download_file(filename, destination_path, failure_message = nil)
unless @s3_helper.object(filename).download_file(destination_path)
raise failure_message&.to_s || "Failed to download file"
end
end
def upload_file(filename, source_path, content_type)
obj = @s3_helper.object(filename)
raise BackupFileExists.new if obj.exists?
obj.upload_file(source_path, content_type: content_type)
end
def generate_upload_url(filename)
obj = @s3_helper.object(filename)
raise BackupFileExists.new if obj.exists?
presigned_url(obj, :put, UPLOAD_URL_EXPIRES_AFTER_SECONDS)
end
private
def unsorted_files
objects = []
@s3_helper.list.each do |obj|
if obj.key.match?(/\.t?gz$/i)
objects << create_file_from_object(obj)
end
end
objects
rescue Aws::Errors::ServiceError => e
Rails.logger.warn("Failed to list backups from S3: #{e.message.presence || e.class.name}")
raise StorageError
end
def create_file_from_object(obj, include_download_source = false)
BackupFile.new(
filename: File.basename(obj.key),
size: obj.size,
last_modified: obj.last_modified,
source: include_download_source ? presigned_url(obj, :get, DOWNLOAD_URL_EXPIRES_AFTER_SECONDS) : nil
)
end
def presigned_url(obj, method, expires_in_seconds)
ensure_cors!
obj.presigned_url(method, expires_in: expires_in_seconds)
end
def ensure_cors!
rule = {
allowed_headers: ["*"],
allowed_methods: ["PUT"],
allowed_origins: [Discourse.base_url_no_prefix],
max_age_seconds: 3000
}
@s3_helper.ensure_cors!([rule])
end
def cleanup_allowed?
!SiteSetting.s3_disable_cleanup
end
end
end